repository_name
stringlengths 5
67
| func_path_in_repository
stringlengths 4
234
| func_name
stringlengths 0
314
| whole_func_string
stringlengths 52
3.87M
| language
stringclasses 6
values | func_code_string
stringlengths 52
3.87M
| func_documentation_string
stringlengths 1
47.2k
| func_code_url
stringlengths 85
339
|
---|---|---|---|---|---|---|---|
astropy/astropy-helpers | astropy_helpers/utils.py | extends_doc | def extends_doc(extended_func):
"""
A function decorator for use when wrapping an existing function but adding
additional functionality. This copies the docstring from the original
function, and appends to it (along with a newline) the docstring of the
wrapper function.
Examples
--------
>>> def foo():
... '''Hello.'''
...
>>> @extends_doc(foo)
... def bar():
... '''Goodbye.'''
...
>>> print(bar.__doc__)
Hello.
Goodbye.
"""
def decorator(func):
if not (extended_func.__doc__ is None or func.__doc__ is None):
func.__doc__ = '\n\n'.join([extended_func.__doc__.rstrip('\n'),
func.__doc__.lstrip('\n')])
return func
return decorator | python | def extends_doc(extended_func):
"""
A function decorator for use when wrapping an existing function but adding
additional functionality. This copies the docstring from the original
function, and appends to it (along with a newline) the docstring of the
wrapper function.
Examples
--------
>>> def foo():
... '''Hello.'''
...
>>> @extends_doc(foo)
... def bar():
... '''Goodbye.'''
...
>>> print(bar.__doc__)
Hello.
Goodbye.
"""
def decorator(func):
if not (extended_func.__doc__ is None or func.__doc__ is None):
func.__doc__ = '\n\n'.join([extended_func.__doc__.rstrip('\n'),
func.__doc__.lstrip('\n')])
return func
return decorator | A function decorator for use when wrapping an existing function but adding
additional functionality. This copies the docstring from the original
function, and appends to it (along with a newline) the docstring of the
wrapper function.
Examples
--------
>>> def foo():
... '''Hello.'''
...
>>> @extends_doc(foo)
... def bar():
... '''Goodbye.'''
...
>>> print(bar.__doc__)
Hello.
Goodbye. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/utils.py#L264-L294 |
astropy/astropy-helpers | astropy_helpers/utils.py | find_data_files | def find_data_files(package, pattern):
"""
Include files matching ``pattern`` inside ``package``.
Parameters
----------
package : str
The package inside which to look for data files
pattern : str
Pattern (glob-style) to match for the data files (e.g. ``*.dat``).
This supports the``**``recursive syntax. For example, ``**/*.fits``
matches all files ending with ``.fits`` recursively. Only one
instance of ``**`` can be included in the pattern.
"""
return glob.glob(os.path.join(package, pattern), recursive=True) | python | def find_data_files(package, pattern):
"""
Include files matching ``pattern`` inside ``package``.
Parameters
----------
package : str
The package inside which to look for data files
pattern : str
Pattern (glob-style) to match for the data files (e.g. ``*.dat``).
This supports the``**``recursive syntax. For example, ``**/*.fits``
matches all files ending with ``.fits`` recursively. Only one
instance of ``**`` can be included in the pattern.
"""
return glob.glob(os.path.join(package, pattern), recursive=True) | Include files matching ``pattern`` inside ``package``.
Parameters
----------
package : str
The package inside which to look for data files
pattern : str
Pattern (glob-style) to match for the data files (e.g. ``*.dat``).
This supports the``**``recursive syntax. For example, ``**/*.fits``
matches all files ending with ``.fits`` recursively. Only one
instance of ``**`` can be included in the pattern. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/utils.py#L297-L312 |
astropy/astropy-helpers | astropy_helpers/version_helpers.py | _version_split | def _version_split(version):
"""
Split a version string into major, minor, and bugfix numbers. If any of
those numbers are missing the default is zero. Any pre/post release
modifiers are ignored.
Examples
========
>>> _version_split('1.2.3')
(1, 2, 3)
>>> _version_split('1.2')
(1, 2, 0)
>>> _version_split('1.2rc1')
(1, 2, 0)
>>> _version_split('1')
(1, 0, 0)
>>> _version_split('')
(0, 0, 0)
"""
parsed_version = pkg_resources.parse_version(version)
if hasattr(parsed_version, 'base_version'):
# New version parsing for setuptools >= 8.0
if parsed_version.base_version:
parts = [int(part)
for part in parsed_version.base_version.split('.')]
else:
parts = []
else:
parts = []
for part in parsed_version:
if part.startswith('*'):
# Ignore any .dev, a, b, rc, etc.
break
parts.append(int(part))
if len(parts) < 3:
parts += [0] * (3 - len(parts))
# In principle a version could have more parts (like 1.2.3.4) but we only
# support <major>.<minor>.<micro>
return tuple(parts[:3]) | python | def _version_split(version):
"""
Split a version string into major, minor, and bugfix numbers. If any of
those numbers are missing the default is zero. Any pre/post release
modifiers are ignored.
Examples
========
>>> _version_split('1.2.3')
(1, 2, 3)
>>> _version_split('1.2')
(1, 2, 0)
>>> _version_split('1.2rc1')
(1, 2, 0)
>>> _version_split('1')
(1, 0, 0)
>>> _version_split('')
(0, 0, 0)
"""
parsed_version = pkg_resources.parse_version(version)
if hasattr(parsed_version, 'base_version'):
# New version parsing for setuptools >= 8.0
if parsed_version.base_version:
parts = [int(part)
for part in parsed_version.base_version.split('.')]
else:
parts = []
else:
parts = []
for part in parsed_version:
if part.startswith('*'):
# Ignore any .dev, a, b, rc, etc.
break
parts.append(int(part))
if len(parts) < 3:
parts += [0] * (3 - len(parts))
# In principle a version could have more parts (like 1.2.3.4) but we only
# support <major>.<minor>.<micro>
return tuple(parts[:3]) | Split a version string into major, minor, and bugfix numbers. If any of
those numbers are missing the default is zero. Any pre/post release
modifiers are ignored.
Examples
========
>>> _version_split('1.2.3')
(1, 2, 3)
>>> _version_split('1.2')
(1, 2, 0)
>>> _version_split('1.2rc1')
(1, 2, 0)
>>> _version_split('1')
(1, 0, 0)
>>> _version_split('')
(0, 0, 0) | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/version_helpers.py#L43-L85 |
astropy/astropy-helpers | astropy_helpers/version_helpers.py | _generate_git_header | def _generate_git_header(packagename, version, githash):
"""
Generates a header to the version.py module that includes utilities for
probing the git repository for updates (to the current git hash, etc.)
These utilities should only be available in development versions, and not
in release builds.
If this fails for any reason an empty string is returned.
"""
loader = pkgutil.get_loader(git_helpers)
source = loader.get_source(git_helpers.__name__) or ''
source_lines = source.splitlines()
if not source_lines:
log.warn('Cannot get source code for astropy_helpers.git_helpers; '
'git support disabled.')
return ''
idx = 0
for idx, line in enumerate(source_lines):
if line.startswith('# BEGIN'):
break
git_helpers_py = '\n'.join(source_lines[idx + 1:])
verstr = version
new_githash = git_helpers.get_git_devstr(sha=True, show_warning=False)
if new_githash:
githash = new_githash
return _FROZEN_VERSION_PY_WITH_GIT_HEADER.format(
git_helpers=git_helpers_py, packagename=packagename,
verstr=verstr, githash=githash) | python | def _generate_git_header(packagename, version, githash):
"""
Generates a header to the version.py module that includes utilities for
probing the git repository for updates (to the current git hash, etc.)
These utilities should only be available in development versions, and not
in release builds.
If this fails for any reason an empty string is returned.
"""
loader = pkgutil.get_loader(git_helpers)
source = loader.get_source(git_helpers.__name__) or ''
source_lines = source.splitlines()
if not source_lines:
log.warn('Cannot get source code for astropy_helpers.git_helpers; '
'git support disabled.')
return ''
idx = 0
for idx, line in enumerate(source_lines):
if line.startswith('# BEGIN'):
break
git_helpers_py = '\n'.join(source_lines[idx + 1:])
verstr = version
new_githash = git_helpers.get_git_devstr(sha=True, show_warning=False)
if new_githash:
githash = new_githash
return _FROZEN_VERSION_PY_WITH_GIT_HEADER.format(
git_helpers=git_helpers_py, packagename=packagename,
verstr=verstr, githash=githash) | Generates a header to the version.py module that includes utilities for
probing the git repository for updates (to the current git hash, etc.)
These utilities should only be available in development versions, and not
in release builds.
If this fails for any reason an empty string is returned. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/version_helpers.py#L182-L215 |
astropy/astropy-helpers | astropy_helpers/version_helpers.py | generate_version_py | def generate_version_py(packagename=None, version=None, release=None, debug=None,
uses_git=None, srcdir='.'):
"""
Generate a version.py file in the package with version information, and
update developer version strings.
This function should normally be called without any arguments. In this case
the package name and version is read in from the ``setup.cfg`` file (from
the ``name`` or ``package_name`` entry and the ``version`` entry in the
``[metadata]`` section).
If the version is a developer version (of the form ``3.2.dev``), the
version string will automatically be expanded to include a sequential
number as a suffix (e.g. ``3.2.dev13312``), and the updated version string
will be returned by this function.
Based on this updated version string, a ``version.py`` file will be
generated inside the package, containing the version string as well as more
detailed information (for example the major, minor, and bugfix version
numbers, a ``release`` flag indicating whether the current version is a
stable or developer version, and so on.
"""
if packagename is not None:
warnings.warn('The packagename argument to generate_version_py has '
'been deprecated and will be removed in future. Specify '
'the package name in setup.cfg instead', AstropyDeprecationWarning)
if version is not None:
warnings.warn('The version argument to generate_version_py has '
'been deprecated and will be removed in future. Specify '
'the version number in setup.cfg instead', AstropyDeprecationWarning)
if release is not None:
warnings.warn('The release argument to generate_version_py has '
'been deprecated and will be removed in future. We now '
'use the presence of the "dev" string in the version to '
'determine whether this is a release', AstropyDeprecationWarning)
# We use ConfigParser instead of read_configuration here because the latter
# only reads in keys recognized by setuptools, but we need to access
# package_name below.
conf = ConfigParser()
conf.read('setup.cfg')
if conf.has_option('metadata', 'name'):
packagename = conf.get('metadata', 'name')
elif conf.has_option('metadata', 'package_name'):
# The package-template used package_name instead of name for a while
warnings.warn('Specifying the package name using the "package_name" '
'option in setup.cfg is deprecated - use the "name" '
'option instead.', AstropyDeprecationWarning)
packagename = conf.get('metadata', 'package_name')
elif packagename is not None: # deprecated
pass
else:
sys.stderr.write('ERROR: Could not read package name from setup.cfg\n')
sys.exit(1)
if conf.has_option('metadata', 'version'):
version = conf.get('metadata', 'version')
add_git_devstr = True
elif version is not None: # deprecated
add_git_devstr = False
else:
sys.stderr.write('ERROR: Could not read package version from setup.cfg\n')
sys.exit(1)
if release is None:
release = 'dev' not in version
if not release and add_git_devstr:
version += get_git_devstr(False)
if uses_git is None:
uses_git = not release
# In some cases, packages have a - but this is a _ in the module. Since we
# are only interested in the module here, we replace - by _
packagename = packagename.replace('-', '_')
try:
version_module = get_pkg_version_module(packagename)
try:
last_generated_version = version_module._last_generated_version
except AttributeError:
last_generated_version = version_module.version
try:
last_githash = version_module._last_githash
except AttributeError:
last_githash = version_module.githash
current_release = version_module.release
current_debug = version_module.debug
except ImportError:
version_module = None
last_generated_version = None
last_githash = None
current_release = None
current_debug = None
if release is None:
# Keep whatever the current value is, if it exists
release = bool(current_release)
if debug is None:
# Likewise, keep whatever the current value is, if it exists
debug = bool(current_debug)
package_srcdir = os.path.join(srcdir, *packagename.split('.'))
version_py = os.path.join(package_srcdir, 'version.py')
if (last_generated_version != version or current_release != release or
current_debug != debug):
if '-q' not in sys.argv and '--quiet' not in sys.argv:
log.set_threshold(log.INFO)
if is_distutils_display_option():
# Always silence unnecessary log messages when display options are
# being used
log.set_threshold(log.WARN)
log.info('Freezing version number to {0}'.format(version_py))
with open(version_py, 'w') as f:
# This overwrites the actual version.py
f.write(_get_version_py_str(packagename, version, last_githash,
release, debug, uses_git=uses_git))
return version | python | def generate_version_py(packagename=None, version=None, release=None, debug=None,
uses_git=None, srcdir='.'):
"""
Generate a version.py file in the package with version information, and
update developer version strings.
This function should normally be called without any arguments. In this case
the package name and version is read in from the ``setup.cfg`` file (from
the ``name`` or ``package_name`` entry and the ``version`` entry in the
``[metadata]`` section).
If the version is a developer version (of the form ``3.2.dev``), the
version string will automatically be expanded to include a sequential
number as a suffix (e.g. ``3.2.dev13312``), and the updated version string
will be returned by this function.
Based on this updated version string, a ``version.py`` file will be
generated inside the package, containing the version string as well as more
detailed information (for example the major, minor, and bugfix version
numbers, a ``release`` flag indicating whether the current version is a
stable or developer version, and so on.
"""
if packagename is not None:
warnings.warn('The packagename argument to generate_version_py has '
'been deprecated and will be removed in future. Specify '
'the package name in setup.cfg instead', AstropyDeprecationWarning)
if version is not None:
warnings.warn('The version argument to generate_version_py has '
'been deprecated and will be removed in future. Specify '
'the version number in setup.cfg instead', AstropyDeprecationWarning)
if release is not None:
warnings.warn('The release argument to generate_version_py has '
'been deprecated and will be removed in future. We now '
'use the presence of the "dev" string in the version to '
'determine whether this is a release', AstropyDeprecationWarning)
# We use ConfigParser instead of read_configuration here because the latter
# only reads in keys recognized by setuptools, but we need to access
# package_name below.
conf = ConfigParser()
conf.read('setup.cfg')
if conf.has_option('metadata', 'name'):
packagename = conf.get('metadata', 'name')
elif conf.has_option('metadata', 'package_name'):
# The package-template used package_name instead of name for a while
warnings.warn('Specifying the package name using the "package_name" '
'option in setup.cfg is deprecated - use the "name" '
'option instead.', AstropyDeprecationWarning)
packagename = conf.get('metadata', 'package_name')
elif packagename is not None: # deprecated
pass
else:
sys.stderr.write('ERROR: Could not read package name from setup.cfg\n')
sys.exit(1)
if conf.has_option('metadata', 'version'):
version = conf.get('metadata', 'version')
add_git_devstr = True
elif version is not None: # deprecated
add_git_devstr = False
else:
sys.stderr.write('ERROR: Could not read package version from setup.cfg\n')
sys.exit(1)
if release is None:
release = 'dev' not in version
if not release and add_git_devstr:
version += get_git_devstr(False)
if uses_git is None:
uses_git = not release
# In some cases, packages have a - but this is a _ in the module. Since we
# are only interested in the module here, we replace - by _
packagename = packagename.replace('-', '_')
try:
version_module = get_pkg_version_module(packagename)
try:
last_generated_version = version_module._last_generated_version
except AttributeError:
last_generated_version = version_module.version
try:
last_githash = version_module._last_githash
except AttributeError:
last_githash = version_module.githash
current_release = version_module.release
current_debug = version_module.debug
except ImportError:
version_module = None
last_generated_version = None
last_githash = None
current_release = None
current_debug = None
if release is None:
# Keep whatever the current value is, if it exists
release = bool(current_release)
if debug is None:
# Likewise, keep whatever the current value is, if it exists
debug = bool(current_debug)
package_srcdir = os.path.join(srcdir, *packagename.split('.'))
version_py = os.path.join(package_srcdir, 'version.py')
if (last_generated_version != version or current_release != release or
current_debug != debug):
if '-q' not in sys.argv and '--quiet' not in sys.argv:
log.set_threshold(log.INFO)
if is_distutils_display_option():
# Always silence unnecessary log messages when display options are
# being used
log.set_threshold(log.WARN)
log.info('Freezing version number to {0}'.format(version_py))
with open(version_py, 'w') as f:
# This overwrites the actual version.py
f.write(_get_version_py_str(packagename, version, last_githash,
release, debug, uses_git=uses_git))
return version | Generate a version.py file in the package with version information, and
update developer version strings.
This function should normally be called without any arguments. In this case
the package name and version is read in from the ``setup.cfg`` file (from
the ``name`` or ``package_name`` entry and the ``version`` entry in the
``[metadata]`` section).
If the version is a developer version (of the form ``3.2.dev``), the
version string will automatically be expanded to include a sequential
number as a suffix (e.g. ``3.2.dev13312``), and the updated version string
will be returned by this function.
Based on this updated version string, a ``version.py`` file will be
generated inside the package, containing the version string as well as more
detailed information (for example the major, minor, and bugfix version
numbers, a ``release`` flag indicating whether the current version is a
stable or developer version, and so on. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/version_helpers.py#L218-L349 |
astropy/astropy-helpers | astropy_helpers/version_helpers.py | get_pkg_version_module | def get_pkg_version_module(packagename, fromlist=None):
"""Returns the package's .version module generated by
`astropy_helpers.version_helpers.generate_version_py`. Raises an
ImportError if the version module is not found.
If ``fromlist`` is an iterable, return a tuple of the members of the
version module corresponding to the member names given in ``fromlist``.
Raises an `AttributeError` if any of these module members are not found.
"""
version = import_file(os.path.join(packagename, 'version.py'), name='version')
if fromlist:
return tuple(getattr(version, member) for member in fromlist)
else:
return version | python | def get_pkg_version_module(packagename, fromlist=None):
"""Returns the package's .version module generated by
`astropy_helpers.version_helpers.generate_version_py`. Raises an
ImportError if the version module is not found.
If ``fromlist`` is an iterable, return a tuple of the members of the
version module corresponding to the member names given in ``fromlist``.
Raises an `AttributeError` if any of these module members are not found.
"""
version = import_file(os.path.join(packagename, 'version.py'), name='version')
if fromlist:
return tuple(getattr(version, member) for member in fromlist)
else:
return version | Returns the package's .version module generated by
`astropy_helpers.version_helpers.generate_version_py`. Raises an
ImportError if the version module is not found.
If ``fromlist`` is an iterable, return a tuple of the members of the
version module corresponding to the member names given in ``fromlist``.
Raises an `AttributeError` if any of these module members are not found. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/version_helpers.py#L352-L367 |
astropy/astropy-helpers | astropy_helpers/setup_helpers.py | setup | def setup(**kwargs):
"""
A wrapper around setuptools' setup() function that automatically sets up
custom commands, generates a version file, and customizes the setup process
via the ``setup_package.py`` files.
"""
# DEPRECATED: store the package name in a built-in variable so it's easy
# to get from other parts of the setup infrastructure. We should phase this
# out in packages that use it - the cookiecutter template should now be
# able to put the right package name where needed.
conf = read_configuration('setup.cfg')
builtins._ASTROPY_PACKAGE_NAME_ = conf['metadata']['name']
# Create a dictionary with setup command overrides. Note that this gets
# information about the package (name and version) from the setup.cfg file.
cmdclass = register_commands()
# Freeze build information in version.py. Note that this gets information
# about the package (name and version) from the setup.cfg file.
version = generate_version_py()
# Get configuration information from all of the various subpackages.
# See the docstring for setup_helpers.update_package_files for more
# details.
package_info = get_package_info()
package_info['cmdclass'] = cmdclass
package_info['version'] = version
# Override using any specified keyword arguments
package_info.update(kwargs)
setuptools_setup(**package_info) | python | def setup(**kwargs):
"""
A wrapper around setuptools' setup() function that automatically sets up
custom commands, generates a version file, and customizes the setup process
via the ``setup_package.py`` files.
"""
# DEPRECATED: store the package name in a built-in variable so it's easy
# to get from other parts of the setup infrastructure. We should phase this
# out in packages that use it - the cookiecutter template should now be
# able to put the right package name where needed.
conf = read_configuration('setup.cfg')
builtins._ASTROPY_PACKAGE_NAME_ = conf['metadata']['name']
# Create a dictionary with setup command overrides. Note that this gets
# information about the package (name and version) from the setup.cfg file.
cmdclass = register_commands()
# Freeze build information in version.py. Note that this gets information
# about the package (name and version) from the setup.cfg file.
version = generate_version_py()
# Get configuration information from all of the various subpackages.
# See the docstring for setup_helpers.update_package_files for more
# details.
package_info = get_package_info()
package_info['cmdclass'] = cmdclass
package_info['version'] = version
# Override using any specified keyword arguments
package_info.update(kwargs)
setuptools_setup(**package_info) | A wrapper around setuptools' setup() function that automatically sets up
custom commands, generates a version file, and customizes the setup process
via the ``setup_package.py`` files. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/setup_helpers.py#L72-L104 |
astropy/astropy-helpers | astropy_helpers/setup_helpers.py | get_debug_option | def get_debug_option(packagename):
""" Determines if the build is in debug mode.
Returns
-------
debug : bool
True if the current build was started with the debug option, False
otherwise.
"""
try:
current_debug = get_pkg_version_module(packagename,
fromlist=['debug'])[0]
except (ImportError, AttributeError):
current_debug = None
# Only modify the debug flag if one of the build commands was explicitly
# run (i.e. not as a sub-command of something else)
dist = get_dummy_distribution()
if any(cmd in dist.commands for cmd in ['build', 'build_ext']):
debug = bool(get_distutils_build_option('debug'))
else:
debug = bool(current_debug)
if current_debug is not None and current_debug != debug:
build_ext_cmd = dist.get_command_class('build_ext')
build_ext_cmd._force_rebuild = True
return debug | python | def get_debug_option(packagename):
""" Determines if the build is in debug mode.
Returns
-------
debug : bool
True if the current build was started with the debug option, False
otherwise.
"""
try:
current_debug = get_pkg_version_module(packagename,
fromlist=['debug'])[0]
except (ImportError, AttributeError):
current_debug = None
# Only modify the debug flag if one of the build commands was explicitly
# run (i.e. not as a sub-command of something else)
dist = get_dummy_distribution()
if any(cmd in dist.commands for cmd in ['build', 'build_ext']):
debug = bool(get_distutils_build_option('debug'))
else:
debug = bool(current_debug)
if current_debug is not None and current_debug != debug:
build_ext_cmd = dist.get_command_class('build_ext')
build_ext_cmd._force_rebuild = True
return debug | Determines if the build is in debug mode.
Returns
-------
debug : bool
True if the current build was started with the debug option, False
otherwise. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/setup_helpers.py#L114-L143 |
astropy/astropy-helpers | astropy_helpers/setup_helpers.py | register_commands | def register_commands(package=None, version=None, release=None, srcdir='.'):
"""
This function generates a dictionary containing customized commands that
can then be passed to the ``cmdclass`` argument in ``setup()``.
"""
if package is not None:
warnings.warn('The package argument to generate_version_py has '
'been deprecated and will be removed in future. Specify '
'the package name in setup.cfg instead', AstropyDeprecationWarning)
if version is not None:
warnings.warn('The version argument to generate_version_py has '
'been deprecated and will be removed in future. Specify '
'the version number in setup.cfg instead', AstropyDeprecationWarning)
if release is not None:
warnings.warn('The release argument to generate_version_py has '
'been deprecated and will be removed in future. We now '
'use the presence of the "dev" string in the version to '
'determine whether this is a release', AstropyDeprecationWarning)
# We use ConfigParser instead of read_configuration here because the latter
# only reads in keys recognized by setuptools, but we need to access
# package_name below.
conf = ConfigParser()
conf.read('setup.cfg')
if conf.has_option('metadata', 'name'):
package = conf.get('metadata', 'name')
elif conf.has_option('metadata', 'package_name'):
# The package-template used package_name instead of name for a while
warnings.warn('Specifying the package name using the "package_name" '
'option in setup.cfg is deprecated - use the "name" '
'option instead.', AstropyDeprecationWarning)
package = conf.get('metadata', 'package_name')
elif package is not None: # deprecated
pass
else:
sys.stderr.write('ERROR: Could not read package name from setup.cfg\n')
sys.exit(1)
if _module_state['registered_commands'] is not None:
return _module_state['registered_commands']
if _module_state['have_sphinx']:
try:
from .commands.build_sphinx import (AstropyBuildSphinx,
AstropyBuildDocs)
except ImportError:
AstropyBuildSphinx = AstropyBuildDocs = FakeBuildSphinx
else:
AstropyBuildSphinx = AstropyBuildDocs = FakeBuildSphinx
_module_state['registered_commands'] = registered_commands = {
'test': generate_test_command(package),
# Use distutils' sdist because it respects package_data.
# setuptools/distributes sdist requires duplication of information in
# MANIFEST.in
'sdist': DistutilsSdist,
'build_ext': AstropyHelpersBuildExt,
'build_sphinx': AstropyBuildSphinx,
'build_docs': AstropyBuildDocs
}
# Need to override the __name__ here so that the commandline options are
# presented as being related to the "build" command, for example; normally
# this wouldn't be necessary since commands also have a command_name
# attribute, but there is a bug in distutils' help display code that it
# uses __name__ instead of command_name. Yay distutils!
for name, cls in registered_commands.items():
cls.__name__ = name
# Add a few custom options; more of these can be added by specific packages
# later
for option in [
('use-system-libraries',
"Use system libraries whenever possible", True)]:
add_command_option('build', *option)
add_command_option('install', *option)
add_command_hooks(registered_commands, srcdir=srcdir)
return registered_commands | python | def register_commands(package=None, version=None, release=None, srcdir='.'):
"""
This function generates a dictionary containing customized commands that
can then be passed to the ``cmdclass`` argument in ``setup()``.
"""
if package is not None:
warnings.warn('The package argument to generate_version_py has '
'been deprecated and will be removed in future. Specify '
'the package name in setup.cfg instead', AstropyDeprecationWarning)
if version is not None:
warnings.warn('The version argument to generate_version_py has '
'been deprecated and will be removed in future. Specify '
'the version number in setup.cfg instead', AstropyDeprecationWarning)
if release is not None:
warnings.warn('The release argument to generate_version_py has '
'been deprecated and will be removed in future. We now '
'use the presence of the "dev" string in the version to '
'determine whether this is a release', AstropyDeprecationWarning)
# We use ConfigParser instead of read_configuration here because the latter
# only reads in keys recognized by setuptools, but we need to access
# package_name below.
conf = ConfigParser()
conf.read('setup.cfg')
if conf.has_option('metadata', 'name'):
package = conf.get('metadata', 'name')
elif conf.has_option('metadata', 'package_name'):
# The package-template used package_name instead of name for a while
warnings.warn('Specifying the package name using the "package_name" '
'option in setup.cfg is deprecated - use the "name" '
'option instead.', AstropyDeprecationWarning)
package = conf.get('metadata', 'package_name')
elif package is not None: # deprecated
pass
else:
sys.stderr.write('ERROR: Could not read package name from setup.cfg\n')
sys.exit(1)
if _module_state['registered_commands'] is not None:
return _module_state['registered_commands']
if _module_state['have_sphinx']:
try:
from .commands.build_sphinx import (AstropyBuildSphinx,
AstropyBuildDocs)
except ImportError:
AstropyBuildSphinx = AstropyBuildDocs = FakeBuildSphinx
else:
AstropyBuildSphinx = AstropyBuildDocs = FakeBuildSphinx
_module_state['registered_commands'] = registered_commands = {
'test': generate_test_command(package),
# Use distutils' sdist because it respects package_data.
# setuptools/distributes sdist requires duplication of information in
# MANIFEST.in
'sdist': DistutilsSdist,
'build_ext': AstropyHelpersBuildExt,
'build_sphinx': AstropyBuildSphinx,
'build_docs': AstropyBuildDocs
}
# Need to override the __name__ here so that the commandline options are
# presented as being related to the "build" command, for example; normally
# this wouldn't be necessary since commands also have a command_name
# attribute, but there is a bug in distutils' help display code that it
# uses __name__ instead of command_name. Yay distutils!
for name, cls in registered_commands.items():
cls.__name__ = name
# Add a few custom options; more of these can be added by specific packages
# later
for option in [
('use-system-libraries',
"Use system libraries whenever possible", True)]:
add_command_option('build', *option)
add_command_option('install', *option)
add_command_hooks(registered_commands, srcdir=srcdir)
return registered_commands | This function generates a dictionary containing customized commands that
can then be passed to the ``cmdclass`` argument in ``setup()``. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/setup_helpers.py#L156-L241 |
astropy/astropy-helpers | astropy_helpers/setup_helpers.py | add_command_hooks | def add_command_hooks(commands, srcdir='.'):
"""
Look through setup_package.py modules for functions with names like
``pre_<command_name>_hook`` and ``post_<command_name>_hook`` where
``<command_name>`` is the name of a ``setup.py`` command (e.g. build_ext).
If either hook is present this adds a wrapped version of that command to
the passed in ``commands`` `dict`. ``commands`` may be pre-populated with
other custom distutils command classes that should be wrapped if there are
hooks for them (e.g. `AstropyBuildPy`).
"""
hook_re = re.compile(r'^(pre|post)_(.+)_hook$')
# Distutils commands have a method of the same name, but it is not a
# *classmethod* (which probably didn't exist when distutils was first
# written)
def get_command_name(cmdcls):
if hasattr(cmdcls, 'command_name'):
return cmdcls.command_name
else:
return cmdcls.__name__
packages = find_packages(srcdir)
dist = get_dummy_distribution()
hooks = collections.defaultdict(dict)
for setuppkg in iter_setup_packages(srcdir, packages):
for name, obj in vars(setuppkg).items():
match = hook_re.match(name)
if not match:
continue
hook_type = match.group(1)
cmd_name = match.group(2)
if hook_type not in hooks[cmd_name]:
hooks[cmd_name][hook_type] = []
hooks[cmd_name][hook_type].append((setuppkg.__name__, obj))
for cmd_name, cmd_hooks in hooks.items():
commands[cmd_name] = generate_hooked_command(
cmd_name, dist.get_command_class(cmd_name), cmd_hooks) | python | def add_command_hooks(commands, srcdir='.'):
"""
Look through setup_package.py modules for functions with names like
``pre_<command_name>_hook`` and ``post_<command_name>_hook`` where
``<command_name>`` is the name of a ``setup.py`` command (e.g. build_ext).
If either hook is present this adds a wrapped version of that command to
the passed in ``commands`` `dict`. ``commands`` may be pre-populated with
other custom distutils command classes that should be wrapped if there are
hooks for them (e.g. `AstropyBuildPy`).
"""
hook_re = re.compile(r'^(pre|post)_(.+)_hook$')
# Distutils commands have a method of the same name, but it is not a
# *classmethod* (which probably didn't exist when distutils was first
# written)
def get_command_name(cmdcls):
if hasattr(cmdcls, 'command_name'):
return cmdcls.command_name
else:
return cmdcls.__name__
packages = find_packages(srcdir)
dist = get_dummy_distribution()
hooks = collections.defaultdict(dict)
for setuppkg in iter_setup_packages(srcdir, packages):
for name, obj in vars(setuppkg).items():
match = hook_re.match(name)
if not match:
continue
hook_type = match.group(1)
cmd_name = match.group(2)
if hook_type not in hooks[cmd_name]:
hooks[cmd_name][hook_type] = []
hooks[cmd_name][hook_type].append((setuppkg.__name__, obj))
for cmd_name, cmd_hooks in hooks.items():
commands[cmd_name] = generate_hooked_command(
cmd_name, dist.get_command_class(cmd_name), cmd_hooks) | Look through setup_package.py modules for functions with names like
``pre_<command_name>_hook`` and ``post_<command_name>_hook`` where
``<command_name>`` is the name of a ``setup.py`` command (e.g. build_ext).
If either hook is present this adds a wrapped version of that command to
the passed in ``commands`` `dict`. ``commands`` may be pre-populated with
other custom distutils command classes that should be wrapped if there are
hooks for them (e.g. `AstropyBuildPy`). | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/setup_helpers.py#L244-L288 |
astropy/astropy-helpers | astropy_helpers/setup_helpers.py | generate_hooked_command | def generate_hooked_command(cmd_name, cmd_cls, hooks):
"""
Returns a generated subclass of ``cmd_cls`` that runs the pre- and
post-command hooks for that command before and after the ``cmd_cls.run``
method.
"""
def run(self, orig_run=cmd_cls.run):
self.run_command_hooks('pre_hooks')
orig_run(self)
self.run_command_hooks('post_hooks')
return type(cmd_name, (cmd_cls, object),
{'run': run, 'run_command_hooks': run_command_hooks,
'pre_hooks': hooks.get('pre', []),
'post_hooks': hooks.get('post', [])}) | python | def generate_hooked_command(cmd_name, cmd_cls, hooks):
"""
Returns a generated subclass of ``cmd_cls`` that runs the pre- and
post-command hooks for that command before and after the ``cmd_cls.run``
method.
"""
def run(self, orig_run=cmd_cls.run):
self.run_command_hooks('pre_hooks')
orig_run(self)
self.run_command_hooks('post_hooks')
return type(cmd_name, (cmd_cls, object),
{'run': run, 'run_command_hooks': run_command_hooks,
'pre_hooks': hooks.get('pre', []),
'post_hooks': hooks.get('post', [])}) | Returns a generated subclass of ``cmd_cls`` that runs the pre- and
post-command hooks for that command before and after the ``cmd_cls.run``
method. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/setup_helpers.py#L291-L306 |
astropy/astropy-helpers | astropy_helpers/setup_helpers.py | run_command_hooks | def run_command_hooks(cmd_obj, hook_kind):
"""Run hooks registered for that command and phase.
*cmd_obj* is a finalized command object; *hook_kind* is either
'pre_hook' or 'post_hook'.
"""
hooks = getattr(cmd_obj, hook_kind, None)
if not hooks:
return
for modname, hook in hooks:
if isinstance(hook, str):
try:
hook_obj = resolve_name(hook)
except ImportError as exc:
raise DistutilsModuleError(
'cannot find hook {0}: {1}'.format(hook, exc))
else:
hook_obj = hook
if not callable(hook_obj):
raise DistutilsOptionError('hook {0!r} is not callable' % hook)
log.info('running {0} from {1} for {2} command'.format(
hook_kind.rstrip('s'), modname, cmd_obj.get_command_name()))
try:
hook_obj(cmd_obj)
except Exception:
log.error('{0} command hook {1} raised an exception: %s\n'.format(
hook_obj.__name__, cmd_obj.get_command_name()))
log.error(traceback.format_exc())
sys.exit(1) | python | def run_command_hooks(cmd_obj, hook_kind):
"""Run hooks registered for that command and phase.
*cmd_obj* is a finalized command object; *hook_kind* is either
'pre_hook' or 'post_hook'.
"""
hooks = getattr(cmd_obj, hook_kind, None)
if not hooks:
return
for modname, hook in hooks:
if isinstance(hook, str):
try:
hook_obj = resolve_name(hook)
except ImportError as exc:
raise DistutilsModuleError(
'cannot find hook {0}: {1}'.format(hook, exc))
else:
hook_obj = hook
if not callable(hook_obj):
raise DistutilsOptionError('hook {0!r} is not callable' % hook)
log.info('running {0} from {1} for {2} command'.format(
hook_kind.rstrip('s'), modname, cmd_obj.get_command_name()))
try:
hook_obj(cmd_obj)
except Exception:
log.error('{0} command hook {1} raised an exception: %s\n'.format(
hook_obj.__name__, cmd_obj.get_command_name()))
log.error(traceback.format_exc())
sys.exit(1) | Run hooks registered for that command and phase.
*cmd_obj* is a finalized command object; *hook_kind* is either
'pre_hook' or 'post_hook'. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/setup_helpers.py#L309-L343 |
astropy/astropy-helpers | astropy_helpers/setup_helpers.py | update_package_files | def update_package_files(srcdir, extensions, package_data, packagenames,
package_dirs):
"""
This function is deprecated and maintained for backward compatibility
with affiliated packages. Affiliated packages should update their
setup.py to use `get_package_info` instead.
"""
info = get_package_info(srcdir)
extensions.extend(info['ext_modules'])
package_data.update(info['package_data'])
packagenames = list(set(packagenames + info['packages']))
package_dirs.update(info['package_dir']) | python | def update_package_files(srcdir, extensions, package_data, packagenames,
package_dirs):
"""
This function is deprecated and maintained for backward compatibility
with affiliated packages. Affiliated packages should update their
setup.py to use `get_package_info` instead.
"""
info = get_package_info(srcdir)
extensions.extend(info['ext_modules'])
package_data.update(info['package_data'])
packagenames = list(set(packagenames + info['packages']))
package_dirs.update(info['package_dir']) | This function is deprecated and maintained for backward compatibility
with affiliated packages. Affiliated packages should update their
setup.py to use `get_package_info` instead. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/setup_helpers.py#L357-L369 |
astropy/astropy-helpers | astropy_helpers/setup_helpers.py | get_package_info | def get_package_info(srcdir='.', exclude=()):
"""
Collates all of the information for building all subpackages
and returns a dictionary of keyword arguments that can
be passed directly to `distutils.setup`.
The purpose of this function is to allow subpackages to update the
arguments to the package's ``setup()`` function in its setup.py
script, rather than having to specify all extensions/package data
directly in the ``setup.py``. See Astropy's own
``setup.py`` for example usage and the Astropy development docs
for more details.
This function obtains that information by iterating through all
packages in ``srcdir`` and locating a ``setup_package.py`` module.
This module can contain the following functions:
``get_extensions()``, ``get_package_data()``,
``get_build_options()``, and ``get_external_libraries()``.
Each of those functions take no arguments.
- ``get_extensions`` returns a list of
`distutils.extension.Extension` objects.
- ``get_package_data()`` returns a dict formatted as required by
the ``package_data`` argument to ``setup()``.
- ``get_build_options()`` returns a list of tuples describing the
extra build options to add.
- ``get_external_libraries()`` returns
a list of libraries that can optionally be built using external
dependencies.
"""
ext_modules = []
packages = []
package_dir = {}
# Read in existing package data, and add to it below
setup_cfg = os.path.join(srcdir, 'setup.cfg')
if os.path.exists(setup_cfg):
conf = read_configuration(setup_cfg)
if 'options' in conf and 'package_data' in conf['options']:
package_data = conf['options']['package_data']
else:
package_data = {}
else:
package_data = {}
if exclude:
warnings.warn(
"Use of the exclude parameter is no longer supported since it does "
"not work as expected. Use add_exclude_packages instead. Note that "
"it must be called prior to any other calls from setup helpers.",
AstropyDeprecationWarning)
# Use the find_packages tool to locate all packages and modules
packages = find_packages(srcdir, exclude=exclude)
# Update package_dir if the package lies in a subdirectory
if srcdir != '.':
package_dir[''] = srcdir
# For each of the setup_package.py modules, extract any
# information that is needed to install them. The build options
# are extracted first, so that their values will be available in
# subsequent calls to `get_extensions`, etc.
for setuppkg in iter_setup_packages(srcdir, packages):
if hasattr(setuppkg, 'get_build_options'):
options = setuppkg.get_build_options()
for option in options:
add_command_option('build', *option)
if hasattr(setuppkg, 'get_external_libraries'):
libraries = setuppkg.get_external_libraries()
for library in libraries:
add_external_library(library)
for setuppkg in iter_setup_packages(srcdir, packages):
# get_extensions must include any Cython extensions by their .pyx
# filename.
if hasattr(setuppkg, 'get_extensions'):
ext_modules.extend(setuppkg.get_extensions())
if hasattr(setuppkg, 'get_package_data'):
package_data.update(setuppkg.get_package_data())
# Locate any .pyx files not already specified, and add their extensions in.
# The default include dirs include numpy to facilitate numerical work.
ext_modules.extend(get_cython_extensions(srcdir, packages, ext_modules,
['numpy']))
# Now remove extensions that have the special name 'skip_cython', as they
# exist Only to indicate that the cython extensions shouldn't be built
for i, ext in reversed(list(enumerate(ext_modules))):
if ext.name == 'skip_cython':
del ext_modules[i]
# On Microsoft compilers, we need to pass the '/MANIFEST'
# commandline argument. This was the default on MSVC 9.0, but is
# now required on MSVC 10.0, but it doesn't seem to hurt to add
# it unconditionally.
if get_compiler_option() == 'msvc':
for ext in ext_modules:
ext.extra_link_args.append('/MANIFEST')
return {
'ext_modules': ext_modules,
'packages': packages,
'package_dir': package_dir,
'package_data': package_data,
} | python | def get_package_info(srcdir='.', exclude=()):
"""
Collates all of the information for building all subpackages
and returns a dictionary of keyword arguments that can
be passed directly to `distutils.setup`.
The purpose of this function is to allow subpackages to update the
arguments to the package's ``setup()`` function in its setup.py
script, rather than having to specify all extensions/package data
directly in the ``setup.py``. See Astropy's own
``setup.py`` for example usage and the Astropy development docs
for more details.
This function obtains that information by iterating through all
packages in ``srcdir`` and locating a ``setup_package.py`` module.
This module can contain the following functions:
``get_extensions()``, ``get_package_data()``,
``get_build_options()``, and ``get_external_libraries()``.
Each of those functions take no arguments.
- ``get_extensions`` returns a list of
`distutils.extension.Extension` objects.
- ``get_package_data()`` returns a dict formatted as required by
the ``package_data`` argument to ``setup()``.
- ``get_build_options()`` returns a list of tuples describing the
extra build options to add.
- ``get_external_libraries()`` returns
a list of libraries that can optionally be built using external
dependencies.
"""
ext_modules = []
packages = []
package_dir = {}
# Read in existing package data, and add to it below
setup_cfg = os.path.join(srcdir, 'setup.cfg')
if os.path.exists(setup_cfg):
conf = read_configuration(setup_cfg)
if 'options' in conf and 'package_data' in conf['options']:
package_data = conf['options']['package_data']
else:
package_data = {}
else:
package_data = {}
if exclude:
warnings.warn(
"Use of the exclude parameter is no longer supported since it does "
"not work as expected. Use add_exclude_packages instead. Note that "
"it must be called prior to any other calls from setup helpers.",
AstropyDeprecationWarning)
# Use the find_packages tool to locate all packages and modules
packages = find_packages(srcdir, exclude=exclude)
# Update package_dir if the package lies in a subdirectory
if srcdir != '.':
package_dir[''] = srcdir
# For each of the setup_package.py modules, extract any
# information that is needed to install them. The build options
# are extracted first, so that their values will be available in
# subsequent calls to `get_extensions`, etc.
for setuppkg in iter_setup_packages(srcdir, packages):
if hasattr(setuppkg, 'get_build_options'):
options = setuppkg.get_build_options()
for option in options:
add_command_option('build', *option)
if hasattr(setuppkg, 'get_external_libraries'):
libraries = setuppkg.get_external_libraries()
for library in libraries:
add_external_library(library)
for setuppkg in iter_setup_packages(srcdir, packages):
# get_extensions must include any Cython extensions by their .pyx
# filename.
if hasattr(setuppkg, 'get_extensions'):
ext_modules.extend(setuppkg.get_extensions())
if hasattr(setuppkg, 'get_package_data'):
package_data.update(setuppkg.get_package_data())
# Locate any .pyx files not already specified, and add their extensions in.
# The default include dirs include numpy to facilitate numerical work.
ext_modules.extend(get_cython_extensions(srcdir, packages, ext_modules,
['numpy']))
# Now remove extensions that have the special name 'skip_cython', as they
# exist Only to indicate that the cython extensions shouldn't be built
for i, ext in reversed(list(enumerate(ext_modules))):
if ext.name == 'skip_cython':
del ext_modules[i]
# On Microsoft compilers, we need to pass the '/MANIFEST'
# commandline argument. This was the default on MSVC 9.0, but is
# now required on MSVC 10.0, but it doesn't seem to hurt to add
# it unconditionally.
if get_compiler_option() == 'msvc':
for ext in ext_modules:
ext.extra_link_args.append('/MANIFEST')
return {
'ext_modules': ext_modules,
'packages': packages,
'package_dir': package_dir,
'package_data': package_data,
} | Collates all of the information for building all subpackages
and returns a dictionary of keyword arguments that can
be passed directly to `distutils.setup`.
The purpose of this function is to allow subpackages to update the
arguments to the package's ``setup()`` function in its setup.py
script, rather than having to specify all extensions/package data
directly in the ``setup.py``. See Astropy's own
``setup.py`` for example usage and the Astropy development docs
for more details.
This function obtains that information by iterating through all
packages in ``srcdir`` and locating a ``setup_package.py`` module.
This module can contain the following functions:
``get_extensions()``, ``get_package_data()``,
``get_build_options()``, and ``get_external_libraries()``.
Each of those functions take no arguments.
- ``get_extensions`` returns a list of
`distutils.extension.Extension` objects.
- ``get_package_data()`` returns a dict formatted as required by
the ``package_data`` argument to ``setup()``.
- ``get_build_options()`` returns a list of tuples describing the
extra build options to add.
- ``get_external_libraries()`` returns
a list of libraries that can optionally be built using external
dependencies. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/setup_helpers.py#L372-L481 |
astropy/astropy-helpers | astropy_helpers/setup_helpers.py | iter_setup_packages | def iter_setup_packages(srcdir, packages):
""" A generator that finds and imports all of the ``setup_package.py``
modules in the source packages.
Returns
-------
modgen : generator
A generator that yields (modname, mod), where `mod` is the module and
`modname` is the module name for the ``setup_package.py`` modules.
"""
for packagename in packages:
package_parts = packagename.split('.')
package_path = os.path.join(srcdir, *package_parts)
setup_package = os.path.relpath(
os.path.join(package_path, 'setup_package.py'))
if os.path.isfile(setup_package):
module = import_file(setup_package,
name=packagename + '.setup_package')
yield module | python | def iter_setup_packages(srcdir, packages):
""" A generator that finds and imports all of the ``setup_package.py``
modules in the source packages.
Returns
-------
modgen : generator
A generator that yields (modname, mod), where `mod` is the module and
`modname` is the module name for the ``setup_package.py`` modules.
"""
for packagename in packages:
package_parts = packagename.split('.')
package_path = os.path.join(srcdir, *package_parts)
setup_package = os.path.relpath(
os.path.join(package_path, 'setup_package.py'))
if os.path.isfile(setup_package):
module = import_file(setup_package,
name=packagename + '.setup_package')
yield module | A generator that finds and imports all of the ``setup_package.py``
modules in the source packages.
Returns
-------
modgen : generator
A generator that yields (modname, mod), where `mod` is the module and
`modname` is the module name for the ``setup_package.py`` modules. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/setup_helpers.py#L484-L505 |
astropy/astropy-helpers | astropy_helpers/setup_helpers.py | iter_pyx_files | def iter_pyx_files(package_dir, package_name):
"""
A generator that yields Cython source files (ending in '.pyx') in the
source packages.
Returns
-------
pyxgen : generator
A generator that yields (extmod, fullfn) where `extmod` is the
full name of the module that the .pyx file would live in based
on the source directory structure, and `fullfn` is the path to
the .pyx file.
"""
for dirpath, dirnames, filenames in walk_skip_hidden(package_dir):
for fn in filenames:
if fn.endswith('.pyx'):
fullfn = os.path.relpath(os.path.join(dirpath, fn))
# Package must match file name
extmod = '.'.join([package_name, fn[:-4]])
yield (extmod, fullfn)
break | python | def iter_pyx_files(package_dir, package_name):
"""
A generator that yields Cython source files (ending in '.pyx') in the
source packages.
Returns
-------
pyxgen : generator
A generator that yields (extmod, fullfn) where `extmod` is the
full name of the module that the .pyx file would live in based
on the source directory structure, and `fullfn` is the path to
the .pyx file.
"""
for dirpath, dirnames, filenames in walk_skip_hidden(package_dir):
for fn in filenames:
if fn.endswith('.pyx'):
fullfn = os.path.relpath(os.path.join(dirpath, fn))
# Package must match file name
extmod = '.'.join([package_name, fn[:-4]])
yield (extmod, fullfn)
break | A generator that yields Cython source files (ending in '.pyx') in the
source packages.
Returns
-------
pyxgen : generator
A generator that yields (extmod, fullfn) where `extmod` is the
full name of the module that the .pyx file would live in based
on the source directory structure, and `fullfn` is the path to
the .pyx file. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/setup_helpers.py#L508-L529 |
astropy/astropy-helpers | astropy_helpers/setup_helpers.py | get_cython_extensions | def get_cython_extensions(srcdir, packages, prevextensions=tuple(),
extincludedirs=None):
"""
Looks for Cython files and generates Extensions if needed.
Parameters
----------
srcdir : str
Path to the root of the source directory to search.
prevextensions : list of `~distutils.core.Extension` objects
The extensions that are already defined. Any .pyx files already here
will be ignored.
extincludedirs : list of str or None
Directories to include as the `include_dirs` argument to the generated
`~distutils.core.Extension` objects.
Returns
-------
exts : list of `~distutils.core.Extension` objects
The new extensions that are needed to compile all .pyx files (does not
include any already in `prevextensions`).
"""
# Vanilla setuptools and old versions of distribute include Cython files
# as .c files in the sources, not .pyx, so we cannot simply look for
# existing .pyx sources in the previous sources, but we should also check
# for .c files with the same remaining filename. So we look for .pyx and
# .c files, and we strip the extension.
prevsourcepaths = []
ext_modules = []
for ext in prevextensions:
for s in ext.sources:
if s.endswith(('.pyx', '.c', '.cpp')):
sourcepath = os.path.realpath(os.path.splitext(s)[0])
prevsourcepaths.append(sourcepath)
for package_name in packages:
package_parts = package_name.split('.')
package_path = os.path.join(srcdir, *package_parts)
for extmod, pyxfn in iter_pyx_files(package_path, package_name):
sourcepath = os.path.realpath(os.path.splitext(pyxfn)[0])
if sourcepath not in prevsourcepaths:
ext_modules.append(Extension(extmod, [pyxfn],
include_dirs=extincludedirs))
return ext_modules | python | def get_cython_extensions(srcdir, packages, prevextensions=tuple(),
extincludedirs=None):
"""
Looks for Cython files and generates Extensions if needed.
Parameters
----------
srcdir : str
Path to the root of the source directory to search.
prevextensions : list of `~distutils.core.Extension` objects
The extensions that are already defined. Any .pyx files already here
will be ignored.
extincludedirs : list of str or None
Directories to include as the `include_dirs` argument to the generated
`~distutils.core.Extension` objects.
Returns
-------
exts : list of `~distutils.core.Extension` objects
The new extensions that are needed to compile all .pyx files (does not
include any already in `prevextensions`).
"""
# Vanilla setuptools and old versions of distribute include Cython files
# as .c files in the sources, not .pyx, so we cannot simply look for
# existing .pyx sources in the previous sources, but we should also check
# for .c files with the same remaining filename. So we look for .pyx and
# .c files, and we strip the extension.
prevsourcepaths = []
ext_modules = []
for ext in prevextensions:
for s in ext.sources:
if s.endswith(('.pyx', '.c', '.cpp')):
sourcepath = os.path.realpath(os.path.splitext(s)[0])
prevsourcepaths.append(sourcepath)
for package_name in packages:
package_parts = package_name.split('.')
package_path = os.path.join(srcdir, *package_parts)
for extmod, pyxfn in iter_pyx_files(package_path, package_name):
sourcepath = os.path.realpath(os.path.splitext(pyxfn)[0])
if sourcepath not in prevsourcepaths:
ext_modules.append(Extension(extmod, [pyxfn],
include_dirs=extincludedirs))
return ext_modules | Looks for Cython files and generates Extensions if needed.
Parameters
----------
srcdir : str
Path to the root of the source directory to search.
prevextensions : list of `~distutils.core.Extension` objects
The extensions that are already defined. Any .pyx files already here
will be ignored.
extincludedirs : list of str or None
Directories to include as the `include_dirs` argument to the generated
`~distutils.core.Extension` objects.
Returns
-------
exts : list of `~distutils.core.Extension` objects
The new extensions that are needed to compile all .pyx files (does not
include any already in `prevextensions`). | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/setup_helpers.py#L532-L579 |
astropy/astropy-helpers | astropy_helpers/setup_helpers.py | pkg_config | def pkg_config(packages, default_libraries, executable='pkg-config'):
"""
Uses pkg-config to update a set of distutils Extension arguments
to include the flags necessary to link against the given packages.
If the pkg-config lookup fails, default_libraries is applied to
libraries.
Parameters
----------
packages : list of str
A list of pkg-config packages to look up.
default_libraries : list of str
A list of library names to use if the pkg-config lookup fails.
Returns
-------
config : dict
A dictionary containing keyword arguments to
`distutils.Extension`. These entries include:
- ``include_dirs``: A list of include directories
- ``library_dirs``: A list of library directories
- ``libraries``: A list of libraries
- ``define_macros``: A list of macro defines
- ``undef_macros``: A list of macros to undefine
- ``extra_compile_args``: A list of extra arguments to pass to
the compiler
"""
flag_map = {'-I': 'include_dirs', '-L': 'library_dirs', '-l': 'libraries',
'-D': 'define_macros', '-U': 'undef_macros'}
command = "{0} --libs --cflags {1}".format(executable, ' '.join(packages)),
result = DistutilsExtensionArgs()
try:
pipe = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
output = pipe.communicate()[0].strip()
except subprocess.CalledProcessError as e:
lines = [
("{0} failed. This may cause the build to fail below."
.format(executable)),
" command: {0}".format(e.cmd),
" returncode: {0}".format(e.returncode),
" output: {0}".format(e.output)
]
log.warn('\n'.join(lines))
result['libraries'].extend(default_libraries)
else:
if pipe.returncode != 0:
lines = [
"pkg-config could not lookup up package(s) {0}.".format(
", ".join(packages)),
"This may cause the build to fail below."
]
log.warn('\n'.join(lines))
result['libraries'].extend(default_libraries)
else:
for token in output.split():
# It's not clear what encoding the output of
# pkg-config will come to us in. It will probably be
# some combination of pure ASCII (for the compiler
# flags) and the filesystem encoding (for any argument
# that includes directories or filenames), but this is
# just conjecture, as the pkg-config documentation
# doesn't seem to address it.
arg = token[:2].decode('ascii')
value = token[2:].decode(sys.getfilesystemencoding())
if arg in flag_map:
if arg == '-D':
value = tuple(value.split('=', 1))
result[flag_map[arg]].append(value)
else:
result['extra_compile_args'].append(value)
return result | python | def pkg_config(packages, default_libraries, executable='pkg-config'):
"""
Uses pkg-config to update a set of distutils Extension arguments
to include the flags necessary to link against the given packages.
If the pkg-config lookup fails, default_libraries is applied to
libraries.
Parameters
----------
packages : list of str
A list of pkg-config packages to look up.
default_libraries : list of str
A list of library names to use if the pkg-config lookup fails.
Returns
-------
config : dict
A dictionary containing keyword arguments to
`distutils.Extension`. These entries include:
- ``include_dirs``: A list of include directories
- ``library_dirs``: A list of library directories
- ``libraries``: A list of libraries
- ``define_macros``: A list of macro defines
- ``undef_macros``: A list of macros to undefine
- ``extra_compile_args``: A list of extra arguments to pass to
the compiler
"""
flag_map = {'-I': 'include_dirs', '-L': 'library_dirs', '-l': 'libraries',
'-D': 'define_macros', '-U': 'undef_macros'}
command = "{0} --libs --cflags {1}".format(executable, ' '.join(packages)),
result = DistutilsExtensionArgs()
try:
pipe = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
output = pipe.communicate()[0].strip()
except subprocess.CalledProcessError as e:
lines = [
("{0} failed. This may cause the build to fail below."
.format(executable)),
" command: {0}".format(e.cmd),
" returncode: {0}".format(e.returncode),
" output: {0}".format(e.output)
]
log.warn('\n'.join(lines))
result['libraries'].extend(default_libraries)
else:
if pipe.returncode != 0:
lines = [
"pkg-config could not lookup up package(s) {0}.".format(
", ".join(packages)),
"This may cause the build to fail below."
]
log.warn('\n'.join(lines))
result['libraries'].extend(default_libraries)
else:
for token in output.split():
# It's not clear what encoding the output of
# pkg-config will come to us in. It will probably be
# some combination of pure ASCII (for the compiler
# flags) and the filesystem encoding (for any argument
# that includes directories or filenames), but this is
# just conjecture, as the pkg-config documentation
# doesn't seem to address it.
arg = token[:2].decode('ascii')
value = token[2:].decode(sys.getfilesystemencoding())
if arg in flag_map:
if arg == '-D':
value = tuple(value.split('=', 1))
result[flag_map[arg]].append(value)
else:
result['extra_compile_args'].append(value)
return result | Uses pkg-config to update a set of distutils Extension arguments
to include the flags necessary to link against the given packages.
If the pkg-config lookup fails, default_libraries is applied to
libraries.
Parameters
----------
packages : list of str
A list of pkg-config packages to look up.
default_libraries : list of str
A list of library names to use if the pkg-config lookup fails.
Returns
-------
config : dict
A dictionary containing keyword arguments to
`distutils.Extension`. These entries include:
- ``include_dirs``: A list of include directories
- ``library_dirs``: A list of library directories
- ``libraries``: A list of libraries
- ``define_macros``: A list of macro defines
- ``undef_macros``: A list of macros to undefine
- ``extra_compile_args``: A list of extra arguments to pass to
the compiler | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/setup_helpers.py#L602-L679 |
astropy/astropy-helpers | astropy_helpers/setup_helpers.py | add_external_library | def add_external_library(library):
"""
Add a build option for selecting the internal or system copy of a library.
Parameters
----------
library : str
The name of the library. If the library is `foo`, the build
option will be called `--use-system-foo`.
"""
for command in ['build', 'build_ext', 'install']:
add_command_option(command, str('use-system-' + library),
'Use the system {0} library'.format(library),
is_bool=True) | python | def add_external_library(library):
"""
Add a build option for selecting the internal or system copy of a library.
Parameters
----------
library : str
The name of the library. If the library is `foo`, the build
option will be called `--use-system-foo`.
"""
for command in ['build', 'build_ext', 'install']:
add_command_option(command, str('use-system-' + library),
'Use the system {0} library'.format(library),
is_bool=True) | Add a build option for selecting the internal or system copy of a library.
Parameters
----------
library : str
The name of the library. If the library is `foo`, the build
option will be called `--use-system-foo`. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/setup_helpers.py#L682-L696 |
astropy/astropy-helpers | astropy_helpers/setup_helpers.py | find_packages | def find_packages(where='.', exclude=(), invalidate_cache=False):
"""
This version of ``find_packages`` caches previous results to speed up
subsequent calls. Use ``invalide_cache=True`` to ignore cached results
from previous ``find_packages`` calls, and repeat the package search.
"""
if exclude:
warnings.warn(
"Use of the exclude parameter is no longer supported since it does "
"not work as expected. Use add_exclude_packages instead. Note that "
"it must be called prior to any other calls from setup helpers.",
AstropyDeprecationWarning)
# Calling add_exclude_packages after this point will have no effect
_module_state['excludes_too_late'] = True
if not invalidate_cache and _module_state['package_cache'] is not None:
return _module_state['package_cache']
packages = _find_packages(
where=where, exclude=list(_module_state['exclude_packages']))
_module_state['package_cache'] = packages
return packages | python | def find_packages(where='.', exclude=(), invalidate_cache=False):
"""
This version of ``find_packages`` caches previous results to speed up
subsequent calls. Use ``invalide_cache=True`` to ignore cached results
from previous ``find_packages`` calls, and repeat the package search.
"""
if exclude:
warnings.warn(
"Use of the exclude parameter is no longer supported since it does "
"not work as expected. Use add_exclude_packages instead. Note that "
"it must be called prior to any other calls from setup helpers.",
AstropyDeprecationWarning)
# Calling add_exclude_packages after this point will have no effect
_module_state['excludes_too_late'] = True
if not invalidate_cache and _module_state['package_cache'] is not None:
return _module_state['package_cache']
packages = _find_packages(
where=where, exclude=list(_module_state['exclude_packages']))
_module_state['package_cache'] = packages
return packages | This version of ``find_packages`` caches previous results to speed up
subsequent calls. Use ``invalide_cache=True`` to ignore cached results
from previous ``find_packages`` calls, and repeat the package search. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/setup_helpers.py#L725-L749 |
astropy/astropy-helpers | astropy_helpers/commands/build_ext.py | should_build_with_cython | def should_build_with_cython(previous_cython_version, is_release):
"""
Returns the previously used Cython version (or 'unknown' if not
previously built) if Cython should be used to build extension modules from
pyx files.
"""
# Only build with Cython if, of course, Cython is installed, we're in a
# development version (i.e. not release) or the Cython-generated source
# files haven't been created yet (cython_version == 'unknown'). The latter
# case can happen even when release is True if checking out a release tag
# from the repository
have_cython = False
try:
from Cython import __version__ as cython_version # noqa
have_cython = True
except ImportError:
pass
if have_cython and (not is_release or previous_cython_version == 'unknown'):
return cython_version
else:
return False | python | def should_build_with_cython(previous_cython_version, is_release):
"""
Returns the previously used Cython version (or 'unknown' if not
previously built) if Cython should be used to build extension modules from
pyx files.
"""
# Only build with Cython if, of course, Cython is installed, we're in a
# development version (i.e. not release) or the Cython-generated source
# files haven't been created yet (cython_version == 'unknown'). The latter
# case can happen even when release is True if checking out a release tag
# from the repository
have_cython = False
try:
from Cython import __version__ as cython_version # noqa
have_cython = True
except ImportError:
pass
if have_cython and (not is_release or previous_cython_version == 'unknown'):
return cython_version
else:
return False | Returns the previously used Cython version (or 'unknown' if not
previously built) if Cython should be used to build extension modules from
pyx files. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/commands/build_ext.py#L15-L37 |
astropy/astropy-helpers | astropy_helpers/commands/build_ext.py | AstropyHelpersBuildExt._check_cython_sources | def _check_cython_sources(self, extension):
"""
Where relevant, make sure that the .c files associated with .pyx
modules are present (if building without Cython installed).
"""
# Determine the compiler we'll be using
if self.compiler is None:
compiler = get_default_compiler()
else:
compiler = self.compiler
# Replace .pyx with C-equivalents, unless c files are missing
for jdx, src in enumerate(extension.sources):
base, ext = os.path.splitext(src)
pyxfn = base + '.pyx'
cfn = base + '.c'
cppfn = base + '.cpp'
if not os.path.isfile(pyxfn):
continue
if self._uses_cython:
extension.sources[jdx] = pyxfn
else:
if os.path.isfile(cfn):
extension.sources[jdx] = cfn
elif os.path.isfile(cppfn):
extension.sources[jdx] = cppfn
else:
msg = (
'Could not find C/C++ file {0}.(c/cpp) for Cython '
'file {1} when building extension {2}. Cython '
'must be installed to build from a git '
'checkout.'.format(base, pyxfn, extension.name))
raise IOError(errno.ENOENT, msg, cfn)
# Cython (at least as of 0.29.2) uses deprecated Numpy API features
# the use of which produces a few warnings when compiling.
# These additional flags should squelch those warnings.
# TODO: Feel free to remove this if/when a Cython update
# removes use of the deprecated Numpy API
if compiler == 'unix':
extension.extra_compile_args.extend([
'-Wp,-w', '-Wno-unused-function']) | python | def _check_cython_sources(self, extension):
"""
Where relevant, make sure that the .c files associated with .pyx
modules are present (if building without Cython installed).
"""
# Determine the compiler we'll be using
if self.compiler is None:
compiler = get_default_compiler()
else:
compiler = self.compiler
# Replace .pyx with C-equivalents, unless c files are missing
for jdx, src in enumerate(extension.sources):
base, ext = os.path.splitext(src)
pyxfn = base + '.pyx'
cfn = base + '.c'
cppfn = base + '.cpp'
if not os.path.isfile(pyxfn):
continue
if self._uses_cython:
extension.sources[jdx] = pyxfn
else:
if os.path.isfile(cfn):
extension.sources[jdx] = cfn
elif os.path.isfile(cppfn):
extension.sources[jdx] = cppfn
else:
msg = (
'Could not find C/C++ file {0}.(c/cpp) for Cython '
'file {1} when building extension {2}. Cython '
'must be installed to build from a git '
'checkout.'.format(base, pyxfn, extension.name))
raise IOError(errno.ENOENT, msg, cfn)
# Cython (at least as of 0.29.2) uses deprecated Numpy API features
# the use of which produces a few warnings when compiling.
# These additional flags should squelch those warnings.
# TODO: Feel free to remove this if/when a Cython update
# removes use of the deprecated Numpy API
if compiler == 'unix':
extension.extra_compile_args.extend([
'-Wp,-w', '-Wno-unused-function']) | Where relevant, make sure that the .c files associated with .pyx
modules are present (if building without Cython installed). | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/commands/build_ext.py#L162-L206 |
astropy/astropy-helpers | astropy_helpers/openmp_helpers.py | _get_flag_value_from_var | def _get_flag_value_from_var(flag, var, delim=' '):
"""
Extract flags from an environment variable.
Parameters
----------
flag : str
The flag to extract, for example '-I' or '-L'
var : str
The environment variable to extract the flag from, e.g. CFLAGS or LDFLAGS.
delim : str, optional
The delimiter separating flags inside the environment variable
Examples
--------
Let's assume the LDFLAGS is set to '-L/usr/local/include -customflag'. This
function will then return the following:
>>> _get_flag_value_from_var('-L', 'LDFLAGS')
'/usr/local/include'
Notes
-----
Environment variables are first checked in ``os.environ[var]``, then in
``distutils.sysconfig.get_config_var(var)``.
This function is not supported on Windows.
"""
if sys.platform.startswith('win'):
return None
# Simple input validation
if not var or not flag:
return None
flag_length = len(flag)
if not flag_length:
return None
# Look for var in os.eviron then in get_config_var
if var in os.environ:
flags = os.environ[var]
else:
try:
flags = get_config_var(var)
except KeyError:
return None
# Extract flag from {var:value}
if flags:
for item in flags.split(delim):
if item.startswith(flag):
return item[flag_length:] | python | def _get_flag_value_from_var(flag, var, delim=' '):
"""
Extract flags from an environment variable.
Parameters
----------
flag : str
The flag to extract, for example '-I' or '-L'
var : str
The environment variable to extract the flag from, e.g. CFLAGS or LDFLAGS.
delim : str, optional
The delimiter separating flags inside the environment variable
Examples
--------
Let's assume the LDFLAGS is set to '-L/usr/local/include -customflag'. This
function will then return the following:
>>> _get_flag_value_from_var('-L', 'LDFLAGS')
'/usr/local/include'
Notes
-----
Environment variables are first checked in ``os.environ[var]``, then in
``distutils.sysconfig.get_config_var(var)``.
This function is not supported on Windows.
"""
if sys.platform.startswith('win'):
return None
# Simple input validation
if not var or not flag:
return None
flag_length = len(flag)
if not flag_length:
return None
# Look for var in os.eviron then in get_config_var
if var in os.environ:
flags = os.environ[var]
else:
try:
flags = get_config_var(var)
except KeyError:
return None
# Extract flag from {var:value}
if flags:
for item in flags.split(delim):
if item.startswith(flag):
return item[flag_length:] | Extract flags from an environment variable.
Parameters
----------
flag : str
The flag to extract, for example '-I' or '-L'
var : str
The environment variable to extract the flag from, e.g. CFLAGS or LDFLAGS.
delim : str, optional
The delimiter separating flags inside the environment variable
Examples
--------
Let's assume the LDFLAGS is set to '-L/usr/local/include -customflag'. This
function will then return the following:
>>> _get_flag_value_from_var('-L', 'LDFLAGS')
'/usr/local/include'
Notes
-----
Environment variables are first checked in ``os.environ[var]``, then in
``distutils.sysconfig.get_config_var(var)``.
This function is not supported on Windows. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/openmp_helpers.py#L52-L104 |
astropy/astropy-helpers | astropy_helpers/openmp_helpers.py | get_openmp_flags | def get_openmp_flags():
"""
Utility for returning compiler and linker flags possibly needed for
OpenMP support.
Returns
-------
result : `{'compiler_flags':<flags>, 'linker_flags':<flags>}`
Notes
-----
The flags returned are not tested for validity, use
`check_openmp_support(openmp_flags=get_openmp_flags())` to do so.
"""
compile_flags = []
link_flags = []
if get_compiler_option() == 'msvc':
compile_flags.append('-openmp')
else:
include_path = _get_flag_value_from_var('-I', 'CFLAGS')
if include_path:
compile_flags.append('-I' + include_path)
lib_path = _get_flag_value_from_var('-L', 'LDFLAGS')
if lib_path:
link_flags.append('-L' + lib_path)
link_flags.append('-Wl,-rpath,' + lib_path)
compile_flags.append('-fopenmp')
link_flags.append('-fopenmp')
return {'compiler_flags': compile_flags, 'linker_flags': link_flags} | python | def get_openmp_flags():
"""
Utility for returning compiler and linker flags possibly needed for
OpenMP support.
Returns
-------
result : `{'compiler_flags':<flags>, 'linker_flags':<flags>}`
Notes
-----
The flags returned are not tested for validity, use
`check_openmp_support(openmp_flags=get_openmp_flags())` to do so.
"""
compile_flags = []
link_flags = []
if get_compiler_option() == 'msvc':
compile_flags.append('-openmp')
else:
include_path = _get_flag_value_from_var('-I', 'CFLAGS')
if include_path:
compile_flags.append('-I' + include_path)
lib_path = _get_flag_value_from_var('-L', 'LDFLAGS')
if lib_path:
link_flags.append('-L' + lib_path)
link_flags.append('-Wl,-rpath,' + lib_path)
compile_flags.append('-fopenmp')
link_flags.append('-fopenmp')
return {'compiler_flags': compile_flags, 'linker_flags': link_flags} | Utility for returning compiler and linker flags possibly needed for
OpenMP support.
Returns
-------
result : `{'compiler_flags':<flags>, 'linker_flags':<flags>}`
Notes
-----
The flags returned are not tested for validity, use
`check_openmp_support(openmp_flags=get_openmp_flags())` to do so. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/openmp_helpers.py#L107-L141 |
astropy/astropy-helpers | astropy_helpers/openmp_helpers.py | check_openmp_support | def check_openmp_support(openmp_flags=None):
"""
Check whether OpenMP test code can be compiled and run.
Parameters
----------
openmp_flags : dict, optional
This should be a dictionary with keys ``compiler_flags`` and
``linker_flags`` giving the compiliation and linking flags respectively.
These are passed as `extra_postargs` to `compile()` and
`link_executable()` respectively. If this is not set, the flags will
be automatically determined using environment variables.
Returns
-------
result : bool
`True` if the test passed, `False` otherwise.
"""
ccompiler = new_compiler()
customize_compiler(ccompiler)
if not openmp_flags:
# customize_compiler() extracts info from os.environ. If certain keys
# exist it uses these plus those from sysconfig.get_config_vars().
# If the key is missing in os.environ it is not extracted from
# sysconfig.get_config_var(). E.g. 'LDFLAGS' get left out, preventing
# clang from finding libomp.dylib because -L<path> is not passed to
# linker. Call get_openmp_flags() to get flags missed by
# customize_compiler().
openmp_flags = get_openmp_flags()
compile_flags = openmp_flags.get('compiler_flags')
link_flags = openmp_flags.get('linker_flags')
# Pass -coverage flag to linker.
# https://github.com/astropy/astropy-helpers/pull/374
if '-coverage' in compile_flags and '-coverage' not in link_flags:
link_flags.append('-coverage')
tmp_dir = tempfile.mkdtemp()
start_dir = os.path.abspath('.')
try:
os.chdir(tmp_dir)
# Write test program
with open('test_openmp.c', 'w') as f:
f.write(CCODE)
os.mkdir('objects')
# Compile, test program
ccompiler.compile(['test_openmp.c'], output_dir='objects',
extra_postargs=compile_flags)
# Link test program
objects = glob.glob(os.path.join('objects', '*' + ccompiler.obj_extension))
ccompiler.link_executable(objects, 'test_openmp',
extra_postargs=link_flags)
# Run test program
output = subprocess.check_output('./test_openmp')
output = output.decode(sys.stdout.encoding or 'utf-8').splitlines()
if 'nthreads=' in output[0]:
nthreads = int(output[0].strip().split('=')[1])
if len(output) == nthreads:
is_openmp_supported = True
else:
log.warn("Unexpected number of lines from output of test OpenMP "
"program (output was {0})".format(output))
is_openmp_supported = False
else:
log.warn("Unexpected output from test OpenMP "
"program (output was {0})".format(output))
is_openmp_supported = False
except (CompileError, LinkError, subprocess.CalledProcessError):
is_openmp_supported = False
finally:
os.chdir(start_dir)
return is_openmp_supported | python | def check_openmp_support(openmp_flags=None):
"""
Check whether OpenMP test code can be compiled and run.
Parameters
----------
openmp_flags : dict, optional
This should be a dictionary with keys ``compiler_flags`` and
``linker_flags`` giving the compiliation and linking flags respectively.
These are passed as `extra_postargs` to `compile()` and
`link_executable()` respectively. If this is not set, the flags will
be automatically determined using environment variables.
Returns
-------
result : bool
`True` if the test passed, `False` otherwise.
"""
ccompiler = new_compiler()
customize_compiler(ccompiler)
if not openmp_flags:
# customize_compiler() extracts info from os.environ. If certain keys
# exist it uses these plus those from sysconfig.get_config_vars().
# If the key is missing in os.environ it is not extracted from
# sysconfig.get_config_var(). E.g. 'LDFLAGS' get left out, preventing
# clang from finding libomp.dylib because -L<path> is not passed to
# linker. Call get_openmp_flags() to get flags missed by
# customize_compiler().
openmp_flags = get_openmp_flags()
compile_flags = openmp_flags.get('compiler_flags')
link_flags = openmp_flags.get('linker_flags')
# Pass -coverage flag to linker.
# https://github.com/astropy/astropy-helpers/pull/374
if '-coverage' in compile_flags and '-coverage' not in link_flags:
link_flags.append('-coverage')
tmp_dir = tempfile.mkdtemp()
start_dir = os.path.abspath('.')
try:
os.chdir(tmp_dir)
# Write test program
with open('test_openmp.c', 'w') as f:
f.write(CCODE)
os.mkdir('objects')
# Compile, test program
ccompiler.compile(['test_openmp.c'], output_dir='objects',
extra_postargs=compile_flags)
# Link test program
objects = glob.glob(os.path.join('objects', '*' + ccompiler.obj_extension))
ccompiler.link_executable(objects, 'test_openmp',
extra_postargs=link_flags)
# Run test program
output = subprocess.check_output('./test_openmp')
output = output.decode(sys.stdout.encoding or 'utf-8').splitlines()
if 'nthreads=' in output[0]:
nthreads = int(output[0].strip().split('=')[1])
if len(output) == nthreads:
is_openmp_supported = True
else:
log.warn("Unexpected number of lines from output of test OpenMP "
"program (output was {0})".format(output))
is_openmp_supported = False
else:
log.warn("Unexpected output from test OpenMP "
"program (output was {0})".format(output))
is_openmp_supported = False
except (CompileError, LinkError, subprocess.CalledProcessError):
is_openmp_supported = False
finally:
os.chdir(start_dir)
return is_openmp_supported | Check whether OpenMP test code can be compiled and run.
Parameters
----------
openmp_flags : dict, optional
This should be a dictionary with keys ``compiler_flags`` and
``linker_flags`` giving the compiliation and linking flags respectively.
These are passed as `extra_postargs` to `compile()` and
`link_executable()` respectively. If this is not set, the flags will
be automatically determined using environment variables.
Returns
-------
result : bool
`True` if the test passed, `False` otherwise. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/openmp_helpers.py#L144-L227 |
astropy/astropy-helpers | astropy_helpers/openmp_helpers.py | is_openmp_supported | def is_openmp_supported():
"""
Determine whether the build compiler has OpenMP support.
"""
log_threshold = log.set_threshold(log.FATAL)
ret = check_openmp_support()
log.set_threshold(log_threshold)
return ret | python | def is_openmp_supported():
"""
Determine whether the build compiler has OpenMP support.
"""
log_threshold = log.set_threshold(log.FATAL)
ret = check_openmp_support()
log.set_threshold(log_threshold)
return ret | Determine whether the build compiler has OpenMP support. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/openmp_helpers.py#L230-L237 |
astropy/astropy-helpers | astropy_helpers/openmp_helpers.py | add_openmp_flags_if_available | def add_openmp_flags_if_available(extension):
"""
Add OpenMP compilation flags, if supported (if not a warning will be
printed to the console and no flags will be added.)
Returns `True` if the flags were added, `False` otherwise.
"""
if _ASTROPY_DISABLE_SETUP_WITH_OPENMP_:
log.info("OpenMP support has been explicitly disabled.")
return False
openmp_flags = get_openmp_flags()
using_openmp = check_openmp_support(openmp_flags=openmp_flags)
if using_openmp:
compile_flags = openmp_flags.get('compiler_flags')
link_flags = openmp_flags.get('linker_flags')
log.info("Compiling Cython/C/C++ extension with OpenMP support")
extension.extra_compile_args.extend(compile_flags)
extension.extra_link_args.extend(link_flags)
else:
log.warn("Cannot compile Cython/C/C++ extension with OpenMP, reverting "
"to non-parallel code")
return using_openmp | python | def add_openmp_flags_if_available(extension):
"""
Add OpenMP compilation flags, if supported (if not a warning will be
printed to the console and no flags will be added.)
Returns `True` if the flags were added, `False` otherwise.
"""
if _ASTROPY_DISABLE_SETUP_WITH_OPENMP_:
log.info("OpenMP support has been explicitly disabled.")
return False
openmp_flags = get_openmp_flags()
using_openmp = check_openmp_support(openmp_flags=openmp_flags)
if using_openmp:
compile_flags = openmp_flags.get('compiler_flags')
link_flags = openmp_flags.get('linker_flags')
log.info("Compiling Cython/C/C++ extension with OpenMP support")
extension.extra_compile_args.extend(compile_flags)
extension.extra_link_args.extend(link_flags)
else:
log.warn("Cannot compile Cython/C/C++ extension with OpenMP, reverting "
"to non-parallel code")
return using_openmp | Add OpenMP compilation flags, if supported (if not a warning will be
printed to the console and no flags will be added.)
Returns `True` if the flags were added, `False` otherwise. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/openmp_helpers.py#L240-L265 |
astropy/astropy-helpers | astropy_helpers/openmp_helpers.py | generate_openmp_enabled_py | def generate_openmp_enabled_py(packagename, srcdir='.', disable_openmp=None):
"""
Generate ``package.openmp_enabled.is_openmp_enabled``, which can then be used
to determine, post build, whether the package was built with or without
OpenMP support.
"""
if packagename.lower() == 'astropy':
packagetitle = 'Astropy'
else:
packagetitle = packagename
epoch = int(os.environ.get('SOURCE_DATE_EPOCH', time.time()))
timestamp = datetime.datetime.utcfromtimestamp(epoch)
if disable_openmp is not None:
import builtins
builtins._ASTROPY_DISABLE_SETUP_WITH_OPENMP_ = disable_openmp
if _ASTROPY_DISABLE_SETUP_WITH_OPENMP_:
log.info("OpenMP support has been explicitly disabled.")
openmp_support = False if _ASTROPY_DISABLE_SETUP_WITH_OPENMP_ else is_openmp_supported()
src = _IS_OPENMP_ENABLED_SRC.format(packagetitle=packagetitle,
timestamp=timestamp,
return_bool=openmp_support)
package_srcdir = os.path.join(srcdir, *packagename.split('.'))
is_openmp_enabled_py = os.path.join(package_srcdir, 'openmp_enabled.py')
with open(is_openmp_enabled_py, 'w') as f:
f.write(src) | python | def generate_openmp_enabled_py(packagename, srcdir='.', disable_openmp=None):
"""
Generate ``package.openmp_enabled.is_openmp_enabled``, which can then be used
to determine, post build, whether the package was built with or without
OpenMP support.
"""
if packagename.lower() == 'astropy':
packagetitle = 'Astropy'
else:
packagetitle = packagename
epoch = int(os.environ.get('SOURCE_DATE_EPOCH', time.time()))
timestamp = datetime.datetime.utcfromtimestamp(epoch)
if disable_openmp is not None:
import builtins
builtins._ASTROPY_DISABLE_SETUP_WITH_OPENMP_ = disable_openmp
if _ASTROPY_DISABLE_SETUP_WITH_OPENMP_:
log.info("OpenMP support has been explicitly disabled.")
openmp_support = False if _ASTROPY_DISABLE_SETUP_WITH_OPENMP_ else is_openmp_supported()
src = _IS_OPENMP_ENABLED_SRC.format(packagetitle=packagetitle,
timestamp=timestamp,
return_bool=openmp_support)
package_srcdir = os.path.join(srcdir, *packagename.split('.'))
is_openmp_enabled_py = os.path.join(package_srcdir, 'openmp_enabled.py')
with open(is_openmp_enabled_py, 'w') as f:
f.write(src) | Generate ``package.openmp_enabled.is_openmp_enabled``, which can then be used
to determine, post build, whether the package was built with or without
OpenMP support. | https://github.com/astropy/astropy-helpers/blob/f5a27d3f84a98ea0eebb85e0cf3e7214c6bc0d09/astropy_helpers/openmp_helpers.py#L279-L308 |
PmagPy/PmagPy | dialogs/magic_grid3.py | HugeTable.GetValue | def GetValue(self, row, col):
"""
Find the matching value from pandas DataFrame,
return it.
"""
if len(self.dataframe):
return str(self.dataframe.iloc[row, col])
return '' | python | def GetValue(self, row, col):
"""
Find the matching value from pandas DataFrame,
return it.
"""
if len(self.dataframe):
return str(self.dataframe.iloc[row, col])
return '' | Find the matching value from pandas DataFrame,
return it. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/magic_grid3.py#L43-L50 |
PmagPy/PmagPy | dialogs/magic_grid3.py | HugeTable.SetValue | def SetValue(self, row, col, value):
"""
Set value in the pandas DataFrame
"""
self.dataframe.iloc[row, col] = value | python | def SetValue(self, row, col, value):
"""
Set value in the pandas DataFrame
"""
self.dataframe.iloc[row, col] = value | Set value in the pandas DataFrame | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/magic_grid3.py#L52-L56 |
PmagPy/PmagPy | dialogs/magic_grid3.py | HugeTable.SetColumnValues | def SetColumnValues(self, col, value):
"""
Custom method to efficiently set all values
in a column.
Parameters
----------
col : str or int
name or index position of column
value : list-like
values to assign to all cells in the column
"""
try:
self.dataframe.iloc[:, col] = value
except ValueError:
self.dataframe.loc[:, col] = value | python | def SetColumnValues(self, col, value):
"""
Custom method to efficiently set all values
in a column.
Parameters
----------
col : str or int
name or index position of column
value : list-like
values to assign to all cells in the column
"""
try:
self.dataframe.iloc[:, col] = value
except ValueError:
self.dataframe.loc[:, col] = value | Custom method to efficiently set all values
in a column.
Parameters
----------
col : str or int
name or index position of column
value : list-like
values to assign to all cells in the column | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/magic_grid3.py#L58-L73 |
PmagPy/PmagPy | dialogs/magic_grid3.py | HugeTable.GetColLabelValue | def GetColLabelValue(self, col):
"""
Get col label from dataframe
"""
if len(self.dataframe):
return self.dataframe.columns[col]
return '' | python | def GetColLabelValue(self, col):
"""
Get col label from dataframe
"""
if len(self.dataframe):
return self.dataframe.columns[col]
return '' | Get col label from dataframe | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/magic_grid3.py#L75-L81 |
PmagPy/PmagPy | dialogs/magic_grid3.py | HugeTable.SetColLabelValue | def SetColLabelValue(self, col, value):
"""
Set col label value in dataframe
"""
if len(self.dataframe):
col_name = str(self.dataframe.columns[col])
self.dataframe.rename(columns={col_name: str(value)}, inplace=True)
return None | python | def SetColLabelValue(self, col, value):
"""
Set col label value in dataframe
"""
if len(self.dataframe):
col_name = str(self.dataframe.columns[col])
self.dataframe.rename(columns={col_name: str(value)}, inplace=True)
return None | Set col label value in dataframe | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/magic_grid3.py#L83-L90 |
PmagPy/PmagPy | dialogs/magic_grid3.py | BaseMagicGrid.set_scrollbars | def set_scrollbars(self):
"""
Set to always have vertical scrollbar.
Have horizontal scrollbar unless grid has very few rows.
Older versions of wxPython will choke on this,
in which case nothing happens.
"""
try:
if len(self.row_labels) < 5:
show_horizontal = wx.SHOW_SB_NEVER
else:
show_horizontal = wx.SHOW_SB_DEFAULT
self.ShowScrollbars(show_horizontal, wx.SHOW_SB_DEFAULT)
except AttributeError:
pass | python | def set_scrollbars(self):
"""
Set to always have vertical scrollbar.
Have horizontal scrollbar unless grid has very few rows.
Older versions of wxPython will choke on this,
in which case nothing happens.
"""
try:
if len(self.row_labels) < 5:
show_horizontal = wx.SHOW_SB_NEVER
else:
show_horizontal = wx.SHOW_SB_DEFAULT
self.ShowScrollbars(show_horizontal, wx.SHOW_SB_DEFAULT)
except AttributeError:
pass | Set to always have vertical scrollbar.
Have horizontal scrollbar unless grid has very few rows.
Older versions of wxPython will choke on this,
in which case nothing happens. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/magic_grid3.py#L144-L158 |
PmagPy/PmagPy | dialogs/magic_grid3.py | BaseMagicGrid.add_items | def add_items(self, dataframe, hide_cols=()):
"""
Add items and/or update existing items in grid
"""
# replace "None" values with ""
dataframe = dataframe.fillna("")
# remove any columns that shouldn't be shown
for col in hide_cols:
if col in dataframe.columns:
del dataframe[col]
# add more rows
self.AppendRows(len(dataframe))
columns = dataframe.columns
row_num = -1
# fill in all rows with appropriate values
for ind, row in dataframe.iterrows():
row_num += 1
for col_num, col in enumerate(columns):
value = row[col]
self.SetCellValue(row_num, col_num, str(value))
# set citation default value
if col == 'citations':
citation = row['citations']
if (citation is None) or (citation is np.nan):
self.SetCellValue(row_num, col_num, 'This study')
else:
if 'This study' not in citation:
if len(citation):
citation += ':'
citation += 'This study'
self.SetCellValue(row_num, col_num, citation)
self.row_labels.extend(dataframe.index) | python | def add_items(self, dataframe, hide_cols=()):
"""
Add items and/or update existing items in grid
"""
# replace "None" values with ""
dataframe = dataframe.fillna("")
# remove any columns that shouldn't be shown
for col in hide_cols:
if col in dataframe.columns:
del dataframe[col]
# add more rows
self.AppendRows(len(dataframe))
columns = dataframe.columns
row_num = -1
# fill in all rows with appropriate values
for ind, row in dataframe.iterrows():
row_num += 1
for col_num, col in enumerate(columns):
value = row[col]
self.SetCellValue(row_num, col_num, str(value))
# set citation default value
if col == 'citations':
citation = row['citations']
if (citation is None) or (citation is np.nan):
self.SetCellValue(row_num, col_num, 'This study')
else:
if 'This study' not in citation:
if len(citation):
citation += ':'
citation += 'This study'
self.SetCellValue(row_num, col_num, citation)
self.row_labels.extend(dataframe.index) | Add items and/or update existing items in grid | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/magic_grid3.py#L160-L191 |
PmagPy/PmagPy | dialogs/magic_grid3.py | BaseMagicGrid.save_items | def save_items(self, rows=None, verbose=False):
"""
Return a dictionary of row data for selected rows:
{1: {col1: val1, col2: val2}, ...}
If a list of row numbers isn't provided, get data for all.
"""
if rows:
rows = rows
else:
rows = list(range(self.GetNumberRows()))
cols = list(range(self.GetNumberCols()))
data = {}
for row in rows:
data[row] = {}
for col in cols:
col_name = self.GetColLabelValue(col)
if verbose:
print(col_name, ":", self.GetCellValue(row, col))
data[row][col_name] = self.GetCellValue(row, col)
return data | python | def save_items(self, rows=None, verbose=False):
"""
Return a dictionary of row data for selected rows:
{1: {col1: val1, col2: val2}, ...}
If a list of row numbers isn't provided, get data for all.
"""
if rows:
rows = rows
else:
rows = list(range(self.GetNumberRows()))
cols = list(range(self.GetNumberCols()))
data = {}
for row in rows:
data[row] = {}
for col in cols:
col_name = self.GetColLabelValue(col)
if verbose:
print(col_name, ":", self.GetCellValue(row, col))
data[row][col_name] = self.GetCellValue(row, col)
return data | Return a dictionary of row data for selected rows:
{1: {col1: val1, col2: val2}, ...}
If a list of row numbers isn't provided, get data for all. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/magic_grid3.py#L194-L213 |
PmagPy/PmagPy | dialogs/magic_grid3.py | BaseMagicGrid.on_edit_grid | def on_edit_grid(self, event):
"""sets self.changes to true when user edits the grid.
provides down and up key functionality for exiting the editor"""
if not self.changes:
self.changes = {event.Row}
else:
self.changes.add(event.Row)
#self.changes = True
try:
editor = event.GetControl()
editor.Bind(wx.EVT_KEY_DOWN, self.onEditorKey)
except AttributeError:
# if it's a EVT_GRID_EDITOR_SHOWN, it doesn't have the GetControl method
pass | python | def on_edit_grid(self, event):
"""sets self.changes to true when user edits the grid.
provides down and up key functionality for exiting the editor"""
if not self.changes:
self.changes = {event.Row}
else:
self.changes.add(event.Row)
#self.changes = True
try:
editor = event.GetControl()
editor.Bind(wx.EVT_KEY_DOWN, self.onEditorKey)
except AttributeError:
# if it's a EVT_GRID_EDITOR_SHOWN, it doesn't have the GetControl method
pass | sets self.changes to true when user edits the grid.
provides down and up key functionality for exiting the editor | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/magic_grid3.py#L236-L249 |
PmagPy/PmagPy | dialogs/magic_grid3.py | BaseMagicGrid.do_paste | def do_paste(self, event):
"""
Read clipboard into dataframe
Paste data into grid, adding extra rows if needed
and ignoring extra columns.
"""
# find where the user has clicked
col_ind = self.GetGridCursorCol()
row_ind = self.GetGridCursorRow()
# read in clipboard text
text_df = pd.read_clipboard(header=None, sep='\t').fillna('')
# add extra rows if need to accomadate clipboard text
row_length_diff = len(text_df) - (len(self.row_labels) - row_ind)
if row_length_diff > 0:
for n in range(row_length_diff):
self.add_row()
# ignore excess columns if present
col_length_diff = len(text_df.columns) - (len(self.col_labels) - col_ind)
if col_length_diff > 0:
text_df = text_df.iloc[:, :-col_length_diff].copy()
# go through copied text and parse it into the grid rows
for label, row_data in text_df.iterrows():
col_range = list(range(col_ind, col_ind + len(row_data)))
if len(row_data) > 1:
cols = list(zip(col_range, row_data.index))
for column in cols:
value = row_data[column[1]]
this_col = column[0]
self.SetCellValue(row_ind, this_col, str(value))
else:
value = row_data[0]
self.SetCellValue(row_ind, col_ind, str(value))
row_ind += 1
# could instead use wxPython clipboard here
# see old git history for that
self.size_grid()
event.Skip() | python | def do_paste(self, event):
"""
Read clipboard into dataframe
Paste data into grid, adding extra rows if needed
and ignoring extra columns.
"""
# find where the user has clicked
col_ind = self.GetGridCursorCol()
row_ind = self.GetGridCursorRow()
# read in clipboard text
text_df = pd.read_clipboard(header=None, sep='\t').fillna('')
# add extra rows if need to accomadate clipboard text
row_length_diff = len(text_df) - (len(self.row_labels) - row_ind)
if row_length_diff > 0:
for n in range(row_length_diff):
self.add_row()
# ignore excess columns if present
col_length_diff = len(text_df.columns) - (len(self.col_labels) - col_ind)
if col_length_diff > 0:
text_df = text_df.iloc[:, :-col_length_diff].copy()
# go through copied text and parse it into the grid rows
for label, row_data in text_df.iterrows():
col_range = list(range(col_ind, col_ind + len(row_data)))
if len(row_data) > 1:
cols = list(zip(col_range, row_data.index))
for column in cols:
value = row_data[column[1]]
this_col = column[0]
self.SetCellValue(row_ind, this_col, str(value))
else:
value = row_data[0]
self.SetCellValue(row_ind, col_ind, str(value))
row_ind += 1
# could instead use wxPython clipboard here
# see old git history for that
self.size_grid()
event.Skip() | Read clipboard into dataframe
Paste data into grid, adding extra rows if needed
and ignoring extra columns. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/magic_grid3.py#L281-L317 |
PmagPy/PmagPy | dialogs/magic_grid3.py | BaseMagicGrid.update_changes_after_row_delete | def update_changes_after_row_delete(self, row_num):
"""
Update self.changes so that row numbers for edited rows are still correct.
I.e., if row 4 was edited and then row 2 was deleted, row 4 becomes row 3.
This function updates self.changes to reflect that.
"""
if row_num in self.changes.copy():
self.changes.remove(row_num)
updated_rows = []
for changed_row in self.changes:
if changed_row == -1:
updated_rows.append(-1)
if changed_row > row_num:
updated_rows.append(changed_row - 1)
if changed_row < row_num:
updated_rows.append(changed_row)
self.changes = set(updated_rows) | python | def update_changes_after_row_delete(self, row_num):
"""
Update self.changes so that row numbers for edited rows are still correct.
I.e., if row 4 was edited and then row 2 was deleted, row 4 becomes row 3.
This function updates self.changes to reflect that.
"""
if row_num in self.changes.copy():
self.changes.remove(row_num)
updated_rows = []
for changed_row in self.changes:
if changed_row == -1:
updated_rows.append(-1)
if changed_row > row_num:
updated_rows.append(changed_row - 1)
if changed_row < row_num:
updated_rows.append(changed_row)
self.changes = set(updated_rows) | Update self.changes so that row numbers for edited rows are still correct.
I.e., if row 4 was edited and then row 2 was deleted, row 4 becomes row 3.
This function updates self.changes to reflect that. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/magic_grid3.py#L345-L361 |
PmagPy/PmagPy | dialogs/magic_grid3.py | BaseMagicGrid.paint_invalid_cell | def paint_invalid_cell(self, row, col, color='MEDIUM VIOLET RED',
skip_cell=False):
"""
Take row, column, and turn it color
"""
self.SetColLabelRenderer(col, MyColLabelRenderer('#1101e0'))
# SetCellRenderer doesn't work with table-based grid (HugeGrid class)
if not skip_cell:
self.SetCellRenderer(row, col, MyCustomRenderer(color)) | python | def paint_invalid_cell(self, row, col, color='MEDIUM VIOLET RED',
skip_cell=False):
"""
Take row, column, and turn it color
"""
self.SetColLabelRenderer(col, MyColLabelRenderer('#1101e0'))
# SetCellRenderer doesn't work with table-based grid (HugeGrid class)
if not skip_cell:
self.SetCellRenderer(row, col, MyCustomRenderer(color)) | Take row, column, and turn it color | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/magic_grid3.py#L443-L451 |
PmagPy/PmagPy | dialogs/magic_grid3.py | HugeMagicGrid.add_col | def add_col(self, label):
"""
Update table dataframe, and append a new column
Parameters
----------
label : str
Returns
---------
last_col: int
index column number of added col
"""
self.table.dataframe[label] = ''
self.AppendCols(1, updateLabels=False)
last_col = self.table.GetNumberCols() - 1
self.SetColLabelValue(last_col, label)
self.col_labels.append(label)
self.size_grid()
return last_col | python | def add_col(self, label):
"""
Update table dataframe, and append a new column
Parameters
----------
label : str
Returns
---------
last_col: int
index column number of added col
"""
self.table.dataframe[label] = ''
self.AppendCols(1, updateLabels=False)
last_col = self.table.GetNumberCols() - 1
self.SetColLabelValue(last_col, label)
self.col_labels.append(label)
self.size_grid()
return last_col | Update table dataframe, and append a new column
Parameters
----------
label : str
Returns
---------
last_col: int
index column number of added col | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/magic_grid3.py#L597-L616 |
PmagPy/PmagPy | dialogs/magic_grid3.py | HugeMagicGrid.remove_col | def remove_col(self, col_num):
"""
update table dataframe, and remove a column.
resize grid to display correctly
"""
label_value = self.GetColLabelValue(col_num).strip('**').strip('^^')
self.col_labels.remove(label_value)
del self.table.dataframe[label_value]
result = self.DeleteCols(pos=col_num, numCols=1, updateLabels=True)
self.size_grid()
return result | python | def remove_col(self, col_num):
"""
update table dataframe, and remove a column.
resize grid to display correctly
"""
label_value = self.GetColLabelValue(col_num).strip('**').strip('^^')
self.col_labels.remove(label_value)
del self.table.dataframe[label_value]
result = self.DeleteCols(pos=col_num, numCols=1, updateLabels=True)
self.size_grid()
return result | update table dataframe, and remove a column.
resize grid to display correctly | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/magic_grid3.py#L619-L629 |
PmagPy/PmagPy | programs/plotdi_a.py | main | def main():
"""
NAME
plotdi_a.py
DESCRIPTION
plots equal area projection from dec inc data and fisher mean, cone of confidence
INPUT FORMAT
takes dec, inc, alpha95 as first three columns in space delimited file
SYNTAX
plotdi_a.py [-i][-f FILE]
OPTIONS
-f FILE to read file name from command line
-fmt [png,jpg,eps,pdf,svg] set plot file format ['svg' is default]
-sav save plot and quit
"""
fmt,plot='svg',0
if len(sys.argv) > 0:
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-fmt' in sys.argv:
ind=sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
if '-sav' in sys.argv:plot=1
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
else:
data=sys.stdin.readlines() # read in data from standard input
DIs,Pars=[],[]
for line in data: # read in the data from standard input
pars=[]
rec=line.split() # split each line on space to get records
DIs.append([float(rec[0]),float(rec[1])])
pars.append(float(rec[0]))
pars.append(float(rec[1]))
pars.append(float(rec[2]))
pars.append(float(rec[0]))
isign=abs(float(rec[1])) / float(rec[1])
pars.append(float(rec[1])-isign*90.) #Beta inc
pars.append(float(rec[2])) # gamma
pars.append(float(rec[0])+90.) # Beta dec
pars.append(0.) #Beta inc
Pars.append(pars)
#
EQ={'eq':1} # make plot dictionary
pmagplotlib.plot_init(EQ['eq'],5,5)
title='Equal area projection'
pmagplotlib.plot_eq(EQ['eq'],DIs,title)# plot directions
for k in range(len(Pars)):
pmagplotlib.plot_ell(EQ['eq'],Pars[k],'b',0,1) # plot ellipses
files={}
for key in list(EQ.keys()):
files[key]=key+'.'+fmt
titles={}
titles['eq']='Equal Area Plot'
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
EQ = pmagplotlib.add_borders(EQ,titles,black,purple)
pmagplotlib.save_plots(EQ,files)
elif plot==0:
pmagplotlib.draw_figs(EQ)
ans=input(" S[a]ve to save plot, [q]uit, Return to continue: ")
if ans=="q": sys.exit()
if ans=="a":
pmagplotlib.save_plots(EQ,files)
else:
pmagplotlib.save_plots(EQ,files) | python | def main():
"""
NAME
plotdi_a.py
DESCRIPTION
plots equal area projection from dec inc data and fisher mean, cone of confidence
INPUT FORMAT
takes dec, inc, alpha95 as first three columns in space delimited file
SYNTAX
plotdi_a.py [-i][-f FILE]
OPTIONS
-f FILE to read file name from command line
-fmt [png,jpg,eps,pdf,svg] set plot file format ['svg' is default]
-sav save plot and quit
"""
fmt,plot='svg',0
if len(sys.argv) > 0:
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-fmt' in sys.argv:
ind=sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
if '-sav' in sys.argv:plot=1
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
else:
data=sys.stdin.readlines() # read in data from standard input
DIs,Pars=[],[]
for line in data: # read in the data from standard input
pars=[]
rec=line.split() # split each line on space to get records
DIs.append([float(rec[0]),float(rec[1])])
pars.append(float(rec[0]))
pars.append(float(rec[1]))
pars.append(float(rec[2]))
pars.append(float(rec[0]))
isign=abs(float(rec[1])) / float(rec[1])
pars.append(float(rec[1])-isign*90.) #Beta inc
pars.append(float(rec[2])) # gamma
pars.append(float(rec[0])+90.) # Beta dec
pars.append(0.) #Beta inc
Pars.append(pars)
#
EQ={'eq':1} # make plot dictionary
pmagplotlib.plot_init(EQ['eq'],5,5)
title='Equal area projection'
pmagplotlib.plot_eq(EQ['eq'],DIs,title)# plot directions
for k in range(len(Pars)):
pmagplotlib.plot_ell(EQ['eq'],Pars[k],'b',0,1) # plot ellipses
files={}
for key in list(EQ.keys()):
files[key]=key+'.'+fmt
titles={}
titles['eq']='Equal Area Plot'
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
EQ = pmagplotlib.add_borders(EQ,titles,black,purple)
pmagplotlib.save_plots(EQ,files)
elif plot==0:
pmagplotlib.draw_figs(EQ)
ans=input(" S[a]ve to save plot, [q]uit, Return to continue: ")
if ans=="q": sys.exit()
if ans=="a":
pmagplotlib.save_plots(EQ,files)
else:
pmagplotlib.save_plots(EQ,files) | NAME
plotdi_a.py
DESCRIPTION
plots equal area projection from dec inc data and fisher mean, cone of confidence
INPUT FORMAT
takes dec, inc, alpha95 as first three columns in space delimited file
SYNTAX
plotdi_a.py [-i][-f FILE]
OPTIONS
-f FILE to read file name from command line
-fmt [png,jpg,eps,pdf,svg] set plot file format ['svg' is default]
-sav save plot and quit | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/plotdi_a.py#L9-L84 |
PmagPy/PmagPy | programs/di_rot.py | main | def main():
"""
NAME
di_rot.py
DESCRIPTION
rotates set of directions to new coordinate system
SYNTAX
di_rot.py [command line options]
OPTIONS
-h prints help message and quits
-f specify input file, default is standard input
-F specify output file, default is standard output
-D D specify Dec of new coordinate system, default is 0
-I I specify Inc of new coordinate system, default is 90
INTPUT/OUTPUT
dec inc [space delimited]
"""
D,I=0.,90.
outfile=""
infile=""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind=sys.argv.index('-f')
infile=sys.argv[ind+1]
data=numpy.loadtxt(infile)
else:
data=numpy.loadtxt(sys.stdin,dtype=numpy.float)
if '-F' in sys.argv:
ind=sys.argv.index('-F')
outfile=sys.argv[ind+1]
out=open(outfile,'w')
if '-D' in sys.argv:
ind=sys.argv.index('-D')
D=float(sys.argv[ind+1])
if '-I' in sys.argv:
ind=sys.argv.index('-I')
I=float(sys.argv[ind+1])
if len(data.shape)>1: # 2-D array
N=data.shape[0]
DipDir,Dip=numpy.ones(N,dtype=numpy.float).transpose()*(D-180.),numpy.ones(N,dtype=numpy.float).transpose()*(90.-I)
data=data.transpose()
data=numpy.array([data[0],data[1],DipDir ,Dip]).transpose()
drot,irot=pmag.dotilt_V(data)
drot=(drot-180.)%360. #
for k in range(N):
if outfile=="":
print('%7.1f %7.1f ' % (drot[k],irot[k]))
else:
out.write('%7.1f %7.1f\n' % (drot[k],irot[k]))
else:
d,i=pmag.dotilt(data[0],data[1],(D-180.),90.-I)
if outfile=="":
print('%7.1f %7.1f ' % ((d-180.)%360.,i))
else:
out.write('%7.1f %7.1f\n' % ((d-180.)%360.,i)) | python | def main():
"""
NAME
di_rot.py
DESCRIPTION
rotates set of directions to new coordinate system
SYNTAX
di_rot.py [command line options]
OPTIONS
-h prints help message and quits
-f specify input file, default is standard input
-F specify output file, default is standard output
-D D specify Dec of new coordinate system, default is 0
-I I specify Inc of new coordinate system, default is 90
INTPUT/OUTPUT
dec inc [space delimited]
"""
D,I=0.,90.
outfile=""
infile=""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind=sys.argv.index('-f')
infile=sys.argv[ind+1]
data=numpy.loadtxt(infile)
else:
data=numpy.loadtxt(sys.stdin,dtype=numpy.float)
if '-F' in sys.argv:
ind=sys.argv.index('-F')
outfile=sys.argv[ind+1]
out=open(outfile,'w')
if '-D' in sys.argv:
ind=sys.argv.index('-D')
D=float(sys.argv[ind+1])
if '-I' in sys.argv:
ind=sys.argv.index('-I')
I=float(sys.argv[ind+1])
if len(data.shape)>1: # 2-D array
N=data.shape[0]
DipDir,Dip=numpy.ones(N,dtype=numpy.float).transpose()*(D-180.),numpy.ones(N,dtype=numpy.float).transpose()*(90.-I)
data=data.transpose()
data=numpy.array([data[0],data[1],DipDir ,Dip]).transpose()
drot,irot=pmag.dotilt_V(data)
drot=(drot-180.)%360. #
for k in range(N):
if outfile=="":
print('%7.1f %7.1f ' % (drot[k],irot[k]))
else:
out.write('%7.1f %7.1f\n' % (drot[k],irot[k]))
else:
d,i=pmag.dotilt(data[0],data[1],(D-180.),90.-I)
if outfile=="":
print('%7.1f %7.1f ' % ((d-180.)%360.,i))
else:
out.write('%7.1f %7.1f\n' % ((d-180.)%360.,i)) | NAME
di_rot.py
DESCRIPTION
rotates set of directions to new coordinate system
SYNTAX
di_rot.py [command line options]
OPTIONS
-h prints help message and quits
-f specify input file, default is standard input
-F specify output file, default is standard output
-D D specify Dec of new coordinate system, default is 0
-I I specify Inc of new coordinate system, default is 90
INTPUT/OUTPUT
dec inc [space delimited] | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/di_rot.py#L11-L72 |
PmagPy/PmagPy | programs/conversion_scripts2/_2g_bin_magic2.py | main | def main(command_line=True, **kwargs):
"""
NAME
_2g_bin_magic.py
DESCRIPTION
takes the binary 2g format magnetometer files and converts them to magic_measurements, er_samples.txt and er_sites.txt file
SYNTAX
2g_bin_magic.py [command line options]
OPTIONS
-f FILE: specify input 2g (binary) file
-F FILE: specify magic_measurements output file, default is: magic_measurements.txt
-Fsa FILE: specify output file, default is: er_samples.txt
-Fsi FILE: specify output file, default is: er_sites.txt
-ncn NCON: specify naming convention: default is #2 below
-ocn OCON: specify orientation convention, default is #5 below
-mcd: specify sampling method codes as a colon delimited string: [default is: FS-FD:SO-POM]
FS-FD field sampling done with a drill
FS-H field sampling done with hand samples
FS-LOC-GPS field location done with GPS
FS-LOC-MAP field location done with map
SO-POM a Pomeroy orientation device was used
SO-ASC an ASC orientation device was used
SO-MAG orientation with magnetic compass
SO-SUN orientation with sun compass
-loc: location name, default="unknown"
-spc NUM : specify number of characters to designate a specimen, default = 0
-ins INST : specify instsrument name
-a: average replicate measurements
INPUT FORMAT
Input files are horrible mag binary format (who knows why?)
Orientation convention:
[1] Lab arrow azimuth= mag_azimuth; Lab arrow dip=-field_dip
i.e., field_dip is degrees from vertical down - the hade [default]
[2] Lab arrow azimuth = mag_azimuth-90; Lab arrow dip = -field_dip
i.e., mag_azimuth is strike and field_dip is hade
[3] Lab arrow azimuth = mag_azimuth; Lab arrow dip = 90-field_dip
i.e., lab arrow same as field arrow, but field_dip was a hade.
[4] lab azimuth and dip are same as mag_azimuth, field_dip
[5] lab azimuth is same as mag_azimuth,lab arrow dip=field_dip-90
[6] Lab arrow azimuth = mag_azimuth-90; Lab arrow dip = 90-field_dip
[7] all others you will have to either customize your
self or e-mail [email protected] for help.
Magnetic declination convention:
Az will use supplied declination to correct azimuth
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name = sample name
[6] site name entered in site_name column in the orient.txt format input file -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail [email protected] for help.
OUTPUT
output saved in magic_measurements.txt & er_samples.txt formatted files
will overwrite any existing files
"""
#
# initialize variables
#
mag_file = ''
specnum = 0
ub_file, samp_file, or_con, corr, meas_file = "", "er_samples.txt", "3", "1", "magic_measurements.txt"
pos_file, site_file = "", "er_sites.txt"
noave = 1
args = sys.argv
bed_dip, bed_dip_dir = "", ""
samp_con, Z, average_bedding = "2", 1, "0"
meths = 'FS-FD'
sclass, lithology, _type = "", "", ""
user, inst = "", ""
DecCorr = 0.
location_name = "unknown"
months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
gmeths = ""
#
#
dir_path = '.'
if command_line:
if '-WD' in args:
ind = args.index("-WD")
dir_path = sys.argv[ind + 1]
if "-h" in args:
print(main.__doc__)
return False
if "-f" in args:
ind = args.index("-f")
mag_file = sys.argv[ind + 1]
if "-fpos" in args:
ind = args.index("-fpos")
pos_file = sys.argv[ind + 1]
if "-F" in args:
ind = args.index("-F")
meas_file = sys.argv[ind + 1]
if "-Fsa" in args:
ind = args.index("-Fsa")
samp_file = sys.argv[ind + 1]
if "-Fsi" in args:
ind = args.index("-Fsi")
site_file = sys.argv[ind + 1]
if "-ocn" in args:
ind = args.index("-ocn")
or_con = sys.argv[ind + 1]
if "-ncn" in args:
ind = args.index("-ncn")
samp_con = sys.argv[ind + 1]
if "-mcd" in args:
ind = args.index("-mcd")
gmeths = (sys.argv[ind + 1])
if "-loc" in args:
ind = args.index("-loc")
location_name = (sys.argv[ind + 1])
if "-spc" in args:
ind = args.index("-spc")
specnum = int(args[ind + 1])
if "-ins" in args:
ind = args.index("-ins")
inst = args[ind + 1]
if "-a" in args:
noave = 0
#
ID = False
if '-ID' in args:
ind = args.index('-ID')
ID = args[ind + 1]
#
if not command_line:
dir_path = kwargs.get('dir_path', '.')
mag_file = kwargs.get('mag_file', '')
pos_file = kwargs.get('pos_file', '')
meas_file = kwargs.get('meas_file', 'magic_measurements.txt')
samp_file = kwargs.get('samp_file', 'er_samples.txt')
site_file = kwargs.get('site_file', 'er_sites.txt')
or_con = kwargs.get('or_con', '3')
samp_con = kwargs.get('samp_con', '2')
corr = kwargs.get('corr', '1')
gmeths = kwargs.get('gmeths', '')
location_name = kwargs.get('location_name', '')
specnum = int(kwargs.get('specnum', 0))
inst = kwargs.get('inst', '')
noave = kwargs.get('noave', 1) # default is DO average
ID = kwargs.get('ID', '')
# format and fix variables acquired from command line args or input with
# **kwargs
if specnum != 0:
specnum = -specnum
if ID:
input_dir_path = ID
else:
input_dir_path = dir_path
if samp_con:
if "4" in samp_con:
if "-" not in samp_con:
print("option [4] must be in form 4-Z where Z is an integer")
return False, "option [4] must be in form 4-Z where Z is an integer"
else:
Z = samp_con.split("-")[1]
samp_con = "4"
if "7" in samp_con:
if "-" not in samp_con:
print("option [7] must be in form 7-Z where Z is an integer")
return False, "option [7] must be in form 7-Z where Z is an integer"
else:
Z = samp_con.split("-")[1]
samp_con = "7"
if "6" in samp_con:
print('Naming convention option [6] not currently supported')
return False, 'Naming convention option [6] not currently supported'
try:
Samps, file_type = pmag.magic_read(
os.path.join(input_dir_path, 'er_samples.txt'))
except:
print(
"there is no er_samples.txt file in your input directory - you can't use naming convention #6")
return False, "there is no er_samples.txt file in your input directory - you can't use naming convention #6"
if file_type == 'bad_file':
print(
"there is no er_samples.txt file in your input directory - you can't use naming convention #6")
return False, "there is no er_samples.txt file in your input directory - you can't use naming convention #6"
if not mag_file:
print("mag file is required input")
return False, "mag file is required input"
output_dir_path = dir_path
mag_file = os.path.join(input_dir_path, mag_file)
samp_file = output_dir_path + '/' + samp_file
site_file = output_dir_path + '/' + site_file
meas_file = output_dir_path + '/' + meas_file
samplist = []
try:
Samps, file_type = pmag.magic_read(samp_file)
for samp in Samps:
if samp['er_sample_name'] not in samplist:
samplist.append(samp['er_sample_name'])
except:
Samps = []
MagRecs = []
try:
f = open(mag_file, 'br')
input = str(f.read()).strip("b '")
f.close()
except Exception as ex:
print('ex', ex)
print("bad mag file")
return False, "bad mag file"
firstline, date = 1, ""
d = input.split('\\xcd')
for line in d:
rec = line.split('\\x00')
if firstline == 1:
firstline = 0
spec, vol = "", 1
el = 51
while line[el:el + 1] != "\\":
spec = spec + line[el]
el += 1
# check for bad sample name
test = spec.split('.')
date = ""
if len(test) > 1:
spec = test[0]
kk = 24
while line[kk] != '\\x01' and line[kk] != '\\x00':
kk += 1
vcc = line[24:kk]
el = 10
while rec[el].strip() != '':
el += 1
date, comments = rec[el + 7], []
else:
el = 9
while rec[el] != '\\x01':
el += 1
vcc, date, comments = rec[el - 3], rec[el + 7], []
specname = spec.lower()
print('importing ', specname)
el += 8
while rec[el].isdigit() == False:
comments.append(rec[el])
el += 1
while rec[el] == "":
el += 1
az = float(rec[el])
el += 1
while rec[el] == "":
el += 1
pl = float(rec[el])
el += 1
while rec[el] == "":
el += 1
bed_dip_dir = float(rec[el])
el += 1
while rec[el] == "":
el += 1
bed_dip = float(rec[el])
el += 1
while rec[el] == "":
el += 1
if rec[el] == '\\x01':
bed_dip = 180. - bed_dip
el += 1
while rec[el] == "":
el += 1
fold_az = float(rec[el])
el += 1
while rec[el] == "":
el += 1
fold_pl = rec[el]
el += 1
while rec[el] == "":
el += 1
if rec[el] != "" and rec[el] != '\\x02' and rec[el] != '\\x01':
deccorr = float(rec[el])
az += deccorr
bed_dip_dir += deccorr
fold_az += deccorr
if bed_dip_dir >= 360:
bed_dip_dir = bed_dip_dir - 360.
if az >= 360.:
az = az - 360.
if fold_az >= 360.:
fold_az = fold_az - 360.
else:
deccorr = 0
if specnum != 0:
sample = specname[:specnum]
else:
sample = specname
SampRec = {}
SampRec["er_sample_name"] = sample
SampRec["er_location_name"] = location_name
SampRec["er_citation_names"] = "This study"
# convert to labaz, labpl
labaz, labdip = pmag.orient(az, pl, or_con)
#
# parse information common to all orientation methods
#
SampRec["sample_bed_dip"] = '%7.1f' % (bed_dip)
SampRec["sample_bed_dip_direction"] = '%7.1f' % (bed_dip_dir)
SampRec["sample_dip"] = '%7.1f' % (labdip)
SampRec["sample_azimuth"] = '%7.1f' % (labaz)
if vcc.strip() != "":
vol = float(vcc) * 1e-6 # convert to m^3 from cc
SampRec["sample_volume"] = '%10.3e' % (vol)
SampRec["sample_class"] = sclass
SampRec["sample_lithology"] = lithology
SampRec["sample_type"] = _type
SampRec["sample_declination_correction"] = '%7.1f' % (deccorr)
methods = gmeths.split(':')
if deccorr != "0":
if 'SO-MAG' in methods:
del methods[methods.index('SO-MAG')]
methods.append('SO-CMD-NORTH')
meths = ""
for meth in methods:
meths = meths + meth + ":"
meths = meths[:-1]
SampRec["magic_method_codes"] = meths
if int(samp_con) < 6 or int(samp_con) == 7:
# parse out the site name
site = pmag.parse_site(SampRec["er_sample_name"], samp_con, Z)
SampRec["er_site_name"] = site
elif len(Samps) > 1:
site, location = "", ""
for samp in Samps:
if samp["er_sample_name"] == SampRec["er_sample_name"]:
site = samp["er_site_name"]
location = samp["er_location_name"]
break
SampRec["er_location_name"] = samp["er_location_name"]
SampRec["er_site_name"] = samp["er_site_name"]
if sample not in samplist:
samplist.append(sample)
Samps.append(SampRec)
else:
MagRec = {}
MagRec["treatment_temp"] = '%8.3e' % (273) # room temp in kelvin
MagRec["measurement_temp"] = '%8.3e' % (273) # room temp in kelvin
MagRec["treatment_ac_field"] = '0'
MagRec["treatment_dc_field"] = '0'
MagRec["treatment_dc_field_phi"] = '0'
MagRec["treatment_dc_field_theta"] = '0'
meas_type = "LT-NO"
MagRec["measurement_flag"] = 'g'
MagRec["measurement_standard"] = 'u'
MagRec["measurement_number"] = '1'
MagRec["er_specimen_name"] = specname
MagRec["er_sample_name"] = SampRec['er_sample_name']
MagRec["er_site_name"] = SampRec['er_site_name']
MagRec["er_location_name"] = location_name
el, demag = 1, ''
treat = rec[el]
if treat[-1] == 'C':
demag = 'T'
elif treat != 'NRM':
demag = 'AF'
el += 1
while rec[el] == "":
el += 1
MagRec["measurement_dec"] = rec[el]
cdec = float(rec[el])
el += 1
while rec[el] == "":
el += 1
MagRec["measurement_inc"] = rec[el]
cinc = float(rec[el])
el += 1
while rec[el] == "":
el += 1
gdec = rec[el]
el += 1
while rec[el] == "":
el += 1
ginc = rec[el]
el = skip(2, el, rec) # skip bdec,binc
# el=skip(4,el,rec) # skip gdec,ginc,bdec,binc
# print 'moment emu: ',rec[el]
MagRec["measurement_magn_moment"] = '%10.3e' % (
float(rec[el]) * 1e-3) # moment in Am^2 (from emu)
MagRec["measurement_magn_volume"] = '%10.3e' % (
float(rec[el]) * 1e-3 / vol) # magnetization in A/m
el = skip(2, el, rec) # skip to xsig
MagRec["measurement_sd_x"] = '%10.3e' % (
float(rec[el]) * 1e-3) # convert from emu
el = skip(3, el, rec) # skip to ysig
MagRec["measurement_sd_y"] = '%10.3e' % (
float(rec[el]) * 1e-3) # convert from emu
el = skip(3, el, rec) # skip to zsig
MagRec["measurement_sd_z"] = '%10.3e' % (
float(rec[el]) * 1e-3) # convert from emu
el += 1 # skip to positions
MagRec["measurement_positions"] = rec[el]
# el=skip(5,el,rec) # skip to date
# mm=str(months.index(date[0]))
# if len(mm)==1:
# mm='0'+str(mm)
# else:
# mm=str(mm)
# dstring=date[2]+':'+mm+':'+date[1]+":"+date[3]
# MagRec['measurement_date']=dstring
MagRec["magic_instrument_codes"] = inst
MagRec["er_analyst_mail_names"] = ""
MagRec["er_citation_names"] = "This study"
MagRec["magic_method_codes"] = meas_type
if demag == "AF":
MagRec["treatment_ac_field"] = '%8.3e' % (
float(treat[:-2]) * 1e-3) # peak field in tesla
meas_type = "LT-AF-Z"
MagRec["treatment_dc_field"] = '0'
elif demag == "T":
MagRec["treatment_temp"] = '%8.3e' % (
float(treat[:-1]) + 273.) # temp in kelvin
meas_type = "LT-T-Z"
MagRec['magic_method_codes'] = meas_type
MagRecs.append(MagRec)
MagOuts = pmag.measurements_methods(MagRecs, noave)
MagOuts, keylist = pmag.fillkeys(MagOuts)
pmag.magic_write(meas_file, MagOuts, 'magic_measurements')
print("Measurements put in ", meas_file)
SampsOut, sampkeys = pmag.fillkeys(Samps)
pmag.magic_write(samp_file, SampsOut, "er_samples")
Sites = []
for samp in Samps:
SiteRec = {}
SiteRec['er_site_name'] = samp['er_site_name']
SiteRec['er_location_name'] = samp['er_location_name']
SiteRec['site_definition'] = 's'
SiteRec['er_citation_names'] = 'This study'
if 'sample_class' in list(samp.keys()):
SiteRec['site_class'] = samp['sample_class']
if 'sample_lithology' in list(samp.keys()):
SiteRec['site_lithology'] = samp['sample_lithology']
if 'sample_type' in list(samp.keys()):
SiteRec['site_lithology'] = samp['sample_lithology']
if 'sample_lat' in list(samp.keys()):
SiteRec['site_lat'] = samp['sample_lat']
else:
SiteRec['site_lat'] = "-999"
if 'sample_lon' in list(samp.keys()):
SiteRec['site_lon'] = samp['sample_lon']
else:
SiteRec['site_lon'] = "-999"
if 'sample_height' in list(samp.keys()):
SiteRec['site_height'] = samp['sample_height']
Sites.append(SiteRec)
pmag.magic_write(site_file, Sites, 'er_sites')
return True, meas_file | python | def main(command_line=True, **kwargs):
"""
NAME
_2g_bin_magic.py
DESCRIPTION
takes the binary 2g format magnetometer files and converts them to magic_measurements, er_samples.txt and er_sites.txt file
SYNTAX
2g_bin_magic.py [command line options]
OPTIONS
-f FILE: specify input 2g (binary) file
-F FILE: specify magic_measurements output file, default is: magic_measurements.txt
-Fsa FILE: specify output file, default is: er_samples.txt
-Fsi FILE: specify output file, default is: er_sites.txt
-ncn NCON: specify naming convention: default is #2 below
-ocn OCON: specify orientation convention, default is #5 below
-mcd: specify sampling method codes as a colon delimited string: [default is: FS-FD:SO-POM]
FS-FD field sampling done with a drill
FS-H field sampling done with hand samples
FS-LOC-GPS field location done with GPS
FS-LOC-MAP field location done with map
SO-POM a Pomeroy orientation device was used
SO-ASC an ASC orientation device was used
SO-MAG orientation with magnetic compass
SO-SUN orientation with sun compass
-loc: location name, default="unknown"
-spc NUM : specify number of characters to designate a specimen, default = 0
-ins INST : specify instsrument name
-a: average replicate measurements
INPUT FORMAT
Input files are horrible mag binary format (who knows why?)
Orientation convention:
[1] Lab arrow azimuth= mag_azimuth; Lab arrow dip=-field_dip
i.e., field_dip is degrees from vertical down - the hade [default]
[2] Lab arrow azimuth = mag_azimuth-90; Lab arrow dip = -field_dip
i.e., mag_azimuth is strike and field_dip is hade
[3] Lab arrow azimuth = mag_azimuth; Lab arrow dip = 90-field_dip
i.e., lab arrow same as field arrow, but field_dip was a hade.
[4] lab azimuth and dip are same as mag_azimuth, field_dip
[5] lab azimuth is same as mag_azimuth,lab arrow dip=field_dip-90
[6] Lab arrow azimuth = mag_azimuth-90; Lab arrow dip = 90-field_dip
[7] all others you will have to either customize your
self or e-mail [email protected] for help.
Magnetic declination convention:
Az will use supplied declination to correct azimuth
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name = sample name
[6] site name entered in site_name column in the orient.txt format input file -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail [email protected] for help.
OUTPUT
output saved in magic_measurements.txt & er_samples.txt formatted files
will overwrite any existing files
"""
#
# initialize variables
#
mag_file = ''
specnum = 0
ub_file, samp_file, or_con, corr, meas_file = "", "er_samples.txt", "3", "1", "magic_measurements.txt"
pos_file, site_file = "", "er_sites.txt"
noave = 1
args = sys.argv
bed_dip, bed_dip_dir = "", ""
samp_con, Z, average_bedding = "2", 1, "0"
meths = 'FS-FD'
sclass, lithology, _type = "", "", ""
user, inst = "", ""
DecCorr = 0.
location_name = "unknown"
months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
gmeths = ""
#
#
dir_path = '.'
if command_line:
if '-WD' in args:
ind = args.index("-WD")
dir_path = sys.argv[ind + 1]
if "-h" in args:
print(main.__doc__)
return False
if "-f" in args:
ind = args.index("-f")
mag_file = sys.argv[ind + 1]
if "-fpos" in args:
ind = args.index("-fpos")
pos_file = sys.argv[ind + 1]
if "-F" in args:
ind = args.index("-F")
meas_file = sys.argv[ind + 1]
if "-Fsa" in args:
ind = args.index("-Fsa")
samp_file = sys.argv[ind + 1]
if "-Fsi" in args:
ind = args.index("-Fsi")
site_file = sys.argv[ind + 1]
if "-ocn" in args:
ind = args.index("-ocn")
or_con = sys.argv[ind + 1]
if "-ncn" in args:
ind = args.index("-ncn")
samp_con = sys.argv[ind + 1]
if "-mcd" in args:
ind = args.index("-mcd")
gmeths = (sys.argv[ind + 1])
if "-loc" in args:
ind = args.index("-loc")
location_name = (sys.argv[ind + 1])
if "-spc" in args:
ind = args.index("-spc")
specnum = int(args[ind + 1])
if "-ins" in args:
ind = args.index("-ins")
inst = args[ind + 1]
if "-a" in args:
noave = 0
#
ID = False
if '-ID' in args:
ind = args.index('-ID')
ID = args[ind + 1]
#
if not command_line:
dir_path = kwargs.get('dir_path', '.')
mag_file = kwargs.get('mag_file', '')
pos_file = kwargs.get('pos_file', '')
meas_file = kwargs.get('meas_file', 'magic_measurements.txt')
samp_file = kwargs.get('samp_file', 'er_samples.txt')
site_file = kwargs.get('site_file', 'er_sites.txt')
or_con = kwargs.get('or_con', '3')
samp_con = kwargs.get('samp_con', '2')
corr = kwargs.get('corr', '1')
gmeths = kwargs.get('gmeths', '')
location_name = kwargs.get('location_name', '')
specnum = int(kwargs.get('specnum', 0))
inst = kwargs.get('inst', '')
noave = kwargs.get('noave', 1) # default is DO average
ID = kwargs.get('ID', '')
# format and fix variables acquired from command line args or input with
# **kwargs
if specnum != 0:
specnum = -specnum
if ID:
input_dir_path = ID
else:
input_dir_path = dir_path
if samp_con:
if "4" in samp_con:
if "-" not in samp_con:
print("option [4] must be in form 4-Z where Z is an integer")
return False, "option [4] must be in form 4-Z where Z is an integer"
else:
Z = samp_con.split("-")[1]
samp_con = "4"
if "7" in samp_con:
if "-" not in samp_con:
print("option [7] must be in form 7-Z where Z is an integer")
return False, "option [7] must be in form 7-Z where Z is an integer"
else:
Z = samp_con.split("-")[1]
samp_con = "7"
if "6" in samp_con:
print('Naming convention option [6] not currently supported')
return False, 'Naming convention option [6] not currently supported'
try:
Samps, file_type = pmag.magic_read(
os.path.join(input_dir_path, 'er_samples.txt'))
except:
print(
"there is no er_samples.txt file in your input directory - you can't use naming convention #6")
return False, "there is no er_samples.txt file in your input directory - you can't use naming convention #6"
if file_type == 'bad_file':
print(
"there is no er_samples.txt file in your input directory - you can't use naming convention #6")
return False, "there is no er_samples.txt file in your input directory - you can't use naming convention #6"
if not mag_file:
print("mag file is required input")
return False, "mag file is required input"
output_dir_path = dir_path
mag_file = os.path.join(input_dir_path, mag_file)
samp_file = output_dir_path + '/' + samp_file
site_file = output_dir_path + '/' + site_file
meas_file = output_dir_path + '/' + meas_file
samplist = []
try:
Samps, file_type = pmag.magic_read(samp_file)
for samp in Samps:
if samp['er_sample_name'] not in samplist:
samplist.append(samp['er_sample_name'])
except:
Samps = []
MagRecs = []
try:
f = open(mag_file, 'br')
input = str(f.read()).strip("b '")
f.close()
except Exception as ex:
print('ex', ex)
print("bad mag file")
return False, "bad mag file"
firstline, date = 1, ""
d = input.split('\\xcd')
for line in d:
rec = line.split('\\x00')
if firstline == 1:
firstline = 0
spec, vol = "", 1
el = 51
while line[el:el + 1] != "\\":
spec = spec + line[el]
el += 1
# check for bad sample name
test = spec.split('.')
date = ""
if len(test) > 1:
spec = test[0]
kk = 24
while line[kk] != '\\x01' and line[kk] != '\\x00':
kk += 1
vcc = line[24:kk]
el = 10
while rec[el].strip() != '':
el += 1
date, comments = rec[el + 7], []
else:
el = 9
while rec[el] != '\\x01':
el += 1
vcc, date, comments = rec[el - 3], rec[el + 7], []
specname = spec.lower()
print('importing ', specname)
el += 8
while rec[el].isdigit() == False:
comments.append(rec[el])
el += 1
while rec[el] == "":
el += 1
az = float(rec[el])
el += 1
while rec[el] == "":
el += 1
pl = float(rec[el])
el += 1
while rec[el] == "":
el += 1
bed_dip_dir = float(rec[el])
el += 1
while rec[el] == "":
el += 1
bed_dip = float(rec[el])
el += 1
while rec[el] == "":
el += 1
if rec[el] == '\\x01':
bed_dip = 180. - bed_dip
el += 1
while rec[el] == "":
el += 1
fold_az = float(rec[el])
el += 1
while rec[el] == "":
el += 1
fold_pl = rec[el]
el += 1
while rec[el] == "":
el += 1
if rec[el] != "" and rec[el] != '\\x02' and rec[el] != '\\x01':
deccorr = float(rec[el])
az += deccorr
bed_dip_dir += deccorr
fold_az += deccorr
if bed_dip_dir >= 360:
bed_dip_dir = bed_dip_dir - 360.
if az >= 360.:
az = az - 360.
if fold_az >= 360.:
fold_az = fold_az - 360.
else:
deccorr = 0
if specnum != 0:
sample = specname[:specnum]
else:
sample = specname
SampRec = {}
SampRec["er_sample_name"] = sample
SampRec["er_location_name"] = location_name
SampRec["er_citation_names"] = "This study"
# convert to labaz, labpl
labaz, labdip = pmag.orient(az, pl, or_con)
#
# parse information common to all orientation methods
#
SampRec["sample_bed_dip"] = '%7.1f' % (bed_dip)
SampRec["sample_bed_dip_direction"] = '%7.1f' % (bed_dip_dir)
SampRec["sample_dip"] = '%7.1f' % (labdip)
SampRec["sample_azimuth"] = '%7.1f' % (labaz)
if vcc.strip() != "":
vol = float(vcc) * 1e-6 # convert to m^3 from cc
SampRec["sample_volume"] = '%10.3e' % (vol)
SampRec["sample_class"] = sclass
SampRec["sample_lithology"] = lithology
SampRec["sample_type"] = _type
SampRec["sample_declination_correction"] = '%7.1f' % (deccorr)
methods = gmeths.split(':')
if deccorr != "0":
if 'SO-MAG' in methods:
del methods[methods.index('SO-MAG')]
methods.append('SO-CMD-NORTH')
meths = ""
for meth in methods:
meths = meths + meth + ":"
meths = meths[:-1]
SampRec["magic_method_codes"] = meths
if int(samp_con) < 6 or int(samp_con) == 7:
# parse out the site name
site = pmag.parse_site(SampRec["er_sample_name"], samp_con, Z)
SampRec["er_site_name"] = site
elif len(Samps) > 1:
site, location = "", ""
for samp in Samps:
if samp["er_sample_name"] == SampRec["er_sample_name"]:
site = samp["er_site_name"]
location = samp["er_location_name"]
break
SampRec["er_location_name"] = samp["er_location_name"]
SampRec["er_site_name"] = samp["er_site_name"]
if sample not in samplist:
samplist.append(sample)
Samps.append(SampRec)
else:
MagRec = {}
MagRec["treatment_temp"] = '%8.3e' % (273) # room temp in kelvin
MagRec["measurement_temp"] = '%8.3e' % (273) # room temp in kelvin
MagRec["treatment_ac_field"] = '0'
MagRec["treatment_dc_field"] = '0'
MagRec["treatment_dc_field_phi"] = '0'
MagRec["treatment_dc_field_theta"] = '0'
meas_type = "LT-NO"
MagRec["measurement_flag"] = 'g'
MagRec["measurement_standard"] = 'u'
MagRec["measurement_number"] = '1'
MagRec["er_specimen_name"] = specname
MagRec["er_sample_name"] = SampRec['er_sample_name']
MagRec["er_site_name"] = SampRec['er_site_name']
MagRec["er_location_name"] = location_name
el, demag = 1, ''
treat = rec[el]
if treat[-1] == 'C':
demag = 'T'
elif treat != 'NRM':
demag = 'AF'
el += 1
while rec[el] == "":
el += 1
MagRec["measurement_dec"] = rec[el]
cdec = float(rec[el])
el += 1
while rec[el] == "":
el += 1
MagRec["measurement_inc"] = rec[el]
cinc = float(rec[el])
el += 1
while rec[el] == "":
el += 1
gdec = rec[el]
el += 1
while rec[el] == "":
el += 1
ginc = rec[el]
el = skip(2, el, rec) # skip bdec,binc
# el=skip(4,el,rec) # skip gdec,ginc,bdec,binc
# print 'moment emu: ',rec[el]
MagRec["measurement_magn_moment"] = '%10.3e' % (
float(rec[el]) * 1e-3) # moment in Am^2 (from emu)
MagRec["measurement_magn_volume"] = '%10.3e' % (
float(rec[el]) * 1e-3 / vol) # magnetization in A/m
el = skip(2, el, rec) # skip to xsig
MagRec["measurement_sd_x"] = '%10.3e' % (
float(rec[el]) * 1e-3) # convert from emu
el = skip(3, el, rec) # skip to ysig
MagRec["measurement_sd_y"] = '%10.3e' % (
float(rec[el]) * 1e-3) # convert from emu
el = skip(3, el, rec) # skip to zsig
MagRec["measurement_sd_z"] = '%10.3e' % (
float(rec[el]) * 1e-3) # convert from emu
el += 1 # skip to positions
MagRec["measurement_positions"] = rec[el]
# el=skip(5,el,rec) # skip to date
# mm=str(months.index(date[0]))
# if len(mm)==1:
# mm='0'+str(mm)
# else:
# mm=str(mm)
# dstring=date[2]+':'+mm+':'+date[1]+":"+date[3]
# MagRec['measurement_date']=dstring
MagRec["magic_instrument_codes"] = inst
MagRec["er_analyst_mail_names"] = ""
MagRec["er_citation_names"] = "This study"
MagRec["magic_method_codes"] = meas_type
if demag == "AF":
MagRec["treatment_ac_field"] = '%8.3e' % (
float(treat[:-2]) * 1e-3) # peak field in tesla
meas_type = "LT-AF-Z"
MagRec["treatment_dc_field"] = '0'
elif demag == "T":
MagRec["treatment_temp"] = '%8.3e' % (
float(treat[:-1]) + 273.) # temp in kelvin
meas_type = "LT-T-Z"
MagRec['magic_method_codes'] = meas_type
MagRecs.append(MagRec)
MagOuts = pmag.measurements_methods(MagRecs, noave)
MagOuts, keylist = pmag.fillkeys(MagOuts)
pmag.magic_write(meas_file, MagOuts, 'magic_measurements')
print("Measurements put in ", meas_file)
SampsOut, sampkeys = pmag.fillkeys(Samps)
pmag.magic_write(samp_file, SampsOut, "er_samples")
Sites = []
for samp in Samps:
SiteRec = {}
SiteRec['er_site_name'] = samp['er_site_name']
SiteRec['er_location_name'] = samp['er_location_name']
SiteRec['site_definition'] = 's'
SiteRec['er_citation_names'] = 'This study'
if 'sample_class' in list(samp.keys()):
SiteRec['site_class'] = samp['sample_class']
if 'sample_lithology' in list(samp.keys()):
SiteRec['site_lithology'] = samp['sample_lithology']
if 'sample_type' in list(samp.keys()):
SiteRec['site_lithology'] = samp['sample_lithology']
if 'sample_lat' in list(samp.keys()):
SiteRec['site_lat'] = samp['sample_lat']
else:
SiteRec['site_lat'] = "-999"
if 'sample_lon' in list(samp.keys()):
SiteRec['site_lon'] = samp['sample_lon']
else:
SiteRec['site_lon'] = "-999"
if 'sample_height' in list(samp.keys()):
SiteRec['site_height'] = samp['sample_height']
Sites.append(SiteRec)
pmag.magic_write(site_file, Sites, 'er_sites')
return True, meas_file | NAME
_2g_bin_magic.py
DESCRIPTION
takes the binary 2g format magnetometer files and converts them to magic_measurements, er_samples.txt and er_sites.txt file
SYNTAX
2g_bin_magic.py [command line options]
OPTIONS
-f FILE: specify input 2g (binary) file
-F FILE: specify magic_measurements output file, default is: magic_measurements.txt
-Fsa FILE: specify output file, default is: er_samples.txt
-Fsi FILE: specify output file, default is: er_sites.txt
-ncn NCON: specify naming convention: default is #2 below
-ocn OCON: specify orientation convention, default is #5 below
-mcd: specify sampling method codes as a colon delimited string: [default is: FS-FD:SO-POM]
FS-FD field sampling done with a drill
FS-H field sampling done with hand samples
FS-LOC-GPS field location done with GPS
FS-LOC-MAP field location done with map
SO-POM a Pomeroy orientation device was used
SO-ASC an ASC orientation device was used
SO-MAG orientation with magnetic compass
SO-SUN orientation with sun compass
-loc: location name, default="unknown"
-spc NUM : specify number of characters to designate a specimen, default = 0
-ins INST : specify instsrument name
-a: average replicate measurements
INPUT FORMAT
Input files are horrible mag binary format (who knows why?)
Orientation convention:
[1] Lab arrow azimuth= mag_azimuth; Lab arrow dip=-field_dip
i.e., field_dip is degrees from vertical down - the hade [default]
[2] Lab arrow azimuth = mag_azimuth-90; Lab arrow dip = -field_dip
i.e., mag_azimuth is strike and field_dip is hade
[3] Lab arrow azimuth = mag_azimuth; Lab arrow dip = 90-field_dip
i.e., lab arrow same as field arrow, but field_dip was a hade.
[4] lab azimuth and dip are same as mag_azimuth, field_dip
[5] lab azimuth is same as mag_azimuth,lab arrow dip=field_dip-90
[6] Lab arrow azimuth = mag_azimuth-90; Lab arrow dip = 90-field_dip
[7] all others you will have to either customize your
self or e-mail [email protected] for help.
Magnetic declination convention:
Az will use supplied declination to correct azimuth
Sample naming convention:
[1] XXXXY: where XXXX is an arbitrary length site designation and Y
is the single character sample designation. e.g., TG001a is the
first sample from site TG001. [default]
[2] XXXX-YY: YY sample from site XXXX (XXX, YY of arbitary length)
[3] XXXX.YY: YY sample from site XXXX (XXX, YY of arbitary length)
[4-Z] XXXX[YYY]: YYY is sample designation with Z characters from site XXX
[5] site name = sample name
[6] site name entered in site_name column in the orient.txt format input file -- NOT CURRENTLY SUPPORTED
[7-Z] [XXX]YYY: XXX is site designation with Z characters from samples XXXYYY
NB: all others you will have to either customize your
self or e-mail [email protected] for help.
OUTPUT
output saved in magic_measurements.txt & er_samples.txt formatted files
will overwrite any existing files | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/conversion_scripts2/_2g_bin_magic2.py#L20-L482 |
PmagPy/PmagPy | programs/unsquish.py | main | def main():
"""
NAME
unsquish.py
DESCRIPTION
takes dec/inc data and "unsquishes" with specified flattening factor, flt
using formula tan(If)=(1/flt)*tan(Io)
INPUT
declination inclination
OUTPUT
"unsquished" declincation inclination
SYNTAX
unsquish.py [command line options] [< filename]
OPTIONS
-h print help and quit
-f FILE, input file
-F FILE, output file
-flt FLT, flattening factor [required]
"""
ofile=""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-F' in sys.argv:
ind=sys.argv.index('-F')
ofile=sys.argv[ind+1]
out=open(ofile,'w')
if '-flt' in sys.argv:
ind=sys.argv.index('-flt')
flt=float(sys.argv[ind+1])
else:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
input=numpy.loadtxt(file)
else:
input=numpy.loadtxt(sys.stdin,dtype=numpy.float)
# read in inclination data
for line in input:
dec=float(line[0])
inc=float(line[1])*numpy.pi/180.
tincnew=(old_div(1,flt))*numpy.tan(inc)
incnew=numpy.arctan(tincnew)*180./numpy.pi
if ofile=="":
print('%7.1f %7.1f'% (dec,incnew))
else:
out.write('%7.1f %7.1f'% (dec,incnew)+'\n') | python | def main():
"""
NAME
unsquish.py
DESCRIPTION
takes dec/inc data and "unsquishes" with specified flattening factor, flt
using formula tan(If)=(1/flt)*tan(Io)
INPUT
declination inclination
OUTPUT
"unsquished" declincation inclination
SYNTAX
unsquish.py [command line options] [< filename]
OPTIONS
-h print help and quit
-f FILE, input file
-F FILE, output file
-flt FLT, flattening factor [required]
"""
ofile=""
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-F' in sys.argv:
ind=sys.argv.index('-F')
ofile=sys.argv[ind+1]
out=open(ofile,'w')
if '-flt' in sys.argv:
ind=sys.argv.index('-flt')
flt=float(sys.argv[ind+1])
else:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
input=numpy.loadtxt(file)
else:
input=numpy.loadtxt(sys.stdin,dtype=numpy.float)
# read in inclination data
for line in input:
dec=float(line[0])
inc=float(line[1])*numpy.pi/180.
tincnew=(old_div(1,flt))*numpy.tan(inc)
incnew=numpy.arctan(tincnew)*180./numpy.pi
if ofile=="":
print('%7.1f %7.1f'% (dec,incnew))
else:
out.write('%7.1f %7.1f'% (dec,incnew)+'\n') | NAME
unsquish.py
DESCRIPTION
takes dec/inc data and "unsquishes" with specified flattening factor, flt
using formula tan(If)=(1/flt)*tan(Io)
INPUT
declination inclination
OUTPUT
"unsquished" declincation inclination
SYNTAX
unsquish.py [command line options] [< filename]
OPTIONS
-h print help and quit
-f FILE, input file
-F FILE, output file
-flt FLT, flattening factor [required] | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/unsquish.py#L7-L60 |
PmagPy/PmagPy | programs/conversion_scripts2/iodp_jr6_magic2.py | main | def main(command_line=True, **kwargs):
"""
NAME
iodp_jr6_magic.py
DESCRIPTION
converts shipboard .jr6 format files to magic_measurements format files
SYNTAX
iodp_jr6_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-f FILE: specify input file, or
-F FILE: specify output file, default is magic_measurements.txt
-fsa FILE: specify er_samples.txt file for sample name lookup ,
default is 'er_samples.txt'
-loc HOLE : specify hole name (U1456A)
-A: don't average replicate measurements
INPUT
JR6 .jr6 format file
"""
def fix_separation(filename, new_filename):
old_file = open(filename, 'r')
data = old_file.readlines()
new_data = []
for line in data:
new_line = line.replace('-', ' -')
new_line = new_line.replace(' ', ' ')
new_data.append(new_line)
new_file = open(new_filename, 'w')
for s in new_data:
new_file.write(s)
old_file.close()
new_file.close()
return new_filename
def old_fix_separation(filename, new_filename):
old_file = open(filename, 'r')
data = old_file.readlines()
new_data = []
for line in data:
new_line = []
for i in line.split():
if '-' in i[1:]:
lead_char = '-' if i[0] == '-' else ''
if lead_char:
v = i[1:].split('-')
else:
v = i.split('-')
new_line.append(lead_char + v[0])
new_line.append('-' + v[1])
else:
new_line.append(i)
new_line = (' '.join(new_line)) + '\n'
new_data.append(new_line)
new_file = open(new_filename, 'w')
for s in new_data:
new_file.write(s)
new_file.close()
old_file.close()
return new_filename
# initialize some stuff
noave=0
volume=2.5**3 #default volume is a 2.5cm cube
inst=""
samp_con,Z='5',""
missing=1
demag="N"
er_location_name="unknown"
citation='This study'
args=sys.argv
meth_code="LP-NO"
version_num=pmag.get_version()
dir_path='.'
MagRecs=[]
samp_file = 'er_samples.txt'
meas_file = 'magic_measurements.txt'
mag_file = ''
#
# get command line arguments
#
if command_line:
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
if '-ID' in sys.argv:
ind = sys.argv.index('-ID')
input_dir_path = sys.argv[ind+1]
else:
input_dir_path = dir_path
output_dir_path = dir_path
if "-h" in args:
print(main.__doc__)
return False
if '-F' in args:
ind=args.index("-F")
meas_file = args[ind+1]
if '-fsa' in args:
ind = args.index("-fsa")
samp_file = args[ind+1]
if samp_file[0]!='/':
samp_file = os.path.join(input_dir_path, samp_file)
try:
open(samp_file,'r')
ErSamps,file_type=pmag.magic_read(samp_file)
except:
print(samp_file,' not found: ')
print(' download csv file and import to MagIC with iodp_samples_magic.py')
if '-f' in args:
ind = args.index("-f")
mag_file= args[ind+1]
if "-loc" in args:
ind=args.index("-loc")
er_location_name=args[ind+1]
if "-A" in args:
noave=1
if not command_line:
dir_path = kwargs.get('dir_path', '.')
input_dir_path = kwargs.get('input_dir_path', dir_path)
output_dir_path = dir_path
meas_file = kwargs.get('meas_file', 'magic_measurements.txt')
mag_file = kwargs.get('mag_file', '')
samp_file = kwargs.get('samp_file', 'er_samples.txt')
specnum = kwargs.get('specnum', 1)
samp_con = kwargs.get('samp_con', '1')
if len(str(samp_con)) > 1:
samp_con, Z = samp_con.split('-')
else:
Z = ''
er_location_name = kwargs.get('er_location_name', '')
noave = kwargs.get('noave', 0) # default (0) means DO average
meth_code = kwargs.get('meth_code', "LP-NO")
# format variables
meth_code=meth_code+":FS-C-DRILL-IODP:SP-SS-C:SO-V"
meth_code=meth_code.strip(":")
if mag_file:
mag_file = os.path.join(input_dir_path, mag_file)
samp_file = os.path.join(input_dir_path, samp_file)
meas_file = os.path.join(output_dir_path, meas_file)
# validate variables
if not mag_file:
print("You must provide an IODP_jr6 format file")
return False, "You must provide an IODP_jr6 format file"
if not os.path.exists(mag_file):
print('The input file you provided: {} does not exist.\nMake sure you have specified the correct filename AND correct input directory name.'.format(mag_file))
return False, 'The input file you provided: {} does not exist.\nMake sure you have specified the correct filename AND correct input directory name.'.format(mag_file)
if not os.path.exists(samp_file):
print("Your input directory:\n{}\nmust contain an er_samples.txt file, or you must explicitly provide one".format(input_dir_path))
return False, "Your input directory:\n{}\nmust contain an er_samples.txt file, or you must explicitly provide one".format(input_dir_path)
# parse data
temp = os.path.join(output_dir_path, 'temp.txt')
fix_separation(mag_file, temp)
samples, filetype = pmag.magic_read(samp_file)
with open(temp, 'r') as finput:
lines = finput.readlines()
os.remove(temp)
for line in lines:
MagRec = {}
line = line.split()
spec_text_id = line[0].split('_')[1]
SampRecs=pmag.get_dictitem(samples,'er_sample_alternatives',spec_text_id,'has')
if len(SampRecs)>0: # found one
MagRec['er_specimen_name']=SampRecs[0]['er_sample_name']
MagRec['er_sample_name']=MagRec['er_specimen_name']
MagRec['er_site_name']=MagRec['er_specimen_name']
MagRec["er_citation_names"]="This study"
MagRec['er_location_name']=er_location_name
MagRec['magic_software_packages']=version_num
MagRec["treatment_temp"]='%8.3e' % (273) # room temp in kelvin
MagRec["measurement_temp"]='%8.3e' % (273) # room temp in kelvin
MagRec["measurement_flag"]='g'
MagRec["measurement_standard"]='u'
MagRec["measurement_number"]='1'
MagRec["treatment_ac_field"]='0'
volume=float(SampRecs[0]['sample_volume'])
x = float(line[4])
y = float(line[3])
negz = float(line[2])
cart=np.array([x,y,-negz]).transpose()
direction = pmag.cart2dir(cart).transpose()
expon = float(line[5])
magn_volume = direction[2] * (10.0**expon)
moment = magn_volume * volume
MagRec["measurement_magn_moment"]=str(moment)
MagRec["measurement_magn_volume"]=str(magn_volume)#str(direction[2] * (10.0 ** expon))
MagRec["measurement_dec"]='%7.1f'%(direction[0])
MagRec["measurement_inc"]='%7.1f'%(direction[1])
step = line[1]
if step == 'NRM':
meas_type="LT-NO"
elif step[0:2] == 'AD':
meas_type="LT-AF-Z"
treat=float(step[2:])
MagRec["treatment_ac_field"]='%8.3e' %(treat*1e-3) # convert from mT to tesla
elif step[0:2] == 'TD':
meas_type="LT-T-Z"
treat=float(step[2:])
MagRec["treatment_temp"]='%8.3e' % (treat+273.) # temp in kelvin
elif step[0:3]=='ARM': #
meas_type="LT-AF-I"
treat=float(row['step'][3:])
MagRec["treatment_ac_field"]='%8.3e' %(treat*1e-3) # convert from mT to tesla
MagRec["treatment_dc_field"]='%8.3e' %(50e-6) # assume 50uT DC field
MagRec["measurement_description"]='Assumed DC field - actual unknown'
elif step[0:3]=='IRM': #
meas_type="LT-IRM"
treat=float(step[3:])
MagRec["treatment_dc_field"]='%8.3e' %(treat*1e-3) # convert from mT to tesla
else:
print('unknown treatment type for ',row)
return False, 'unknown treatment type for ',row
MagRec['magic_method_codes']=meas_type
MagRecs.append(MagRec.copy())
else:
print('sample name not found: ',row['specname'])
MagOuts=pmag.measurements_methods(MagRecs,noave)
file_created, error_message = pmag.magic_write(meas_file,MagOuts,'magic_measurements')
if file_created:
return True, meas_file
else:
return False, 'Results not written to file' | python | def main(command_line=True, **kwargs):
"""
NAME
iodp_jr6_magic.py
DESCRIPTION
converts shipboard .jr6 format files to magic_measurements format files
SYNTAX
iodp_jr6_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-f FILE: specify input file, or
-F FILE: specify output file, default is magic_measurements.txt
-fsa FILE: specify er_samples.txt file for sample name lookup ,
default is 'er_samples.txt'
-loc HOLE : specify hole name (U1456A)
-A: don't average replicate measurements
INPUT
JR6 .jr6 format file
"""
def fix_separation(filename, new_filename):
old_file = open(filename, 'r')
data = old_file.readlines()
new_data = []
for line in data:
new_line = line.replace('-', ' -')
new_line = new_line.replace(' ', ' ')
new_data.append(new_line)
new_file = open(new_filename, 'w')
for s in new_data:
new_file.write(s)
old_file.close()
new_file.close()
return new_filename
def old_fix_separation(filename, new_filename):
old_file = open(filename, 'r')
data = old_file.readlines()
new_data = []
for line in data:
new_line = []
for i in line.split():
if '-' in i[1:]:
lead_char = '-' if i[0] == '-' else ''
if lead_char:
v = i[1:].split('-')
else:
v = i.split('-')
new_line.append(lead_char + v[0])
new_line.append('-' + v[1])
else:
new_line.append(i)
new_line = (' '.join(new_line)) + '\n'
new_data.append(new_line)
new_file = open(new_filename, 'w')
for s in new_data:
new_file.write(s)
new_file.close()
old_file.close()
return new_filename
# initialize some stuff
noave=0
volume=2.5**3 #default volume is a 2.5cm cube
inst=""
samp_con,Z='5',""
missing=1
demag="N"
er_location_name="unknown"
citation='This study'
args=sys.argv
meth_code="LP-NO"
version_num=pmag.get_version()
dir_path='.'
MagRecs=[]
samp_file = 'er_samples.txt'
meas_file = 'magic_measurements.txt'
mag_file = ''
#
# get command line arguments
#
if command_line:
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
if '-ID' in sys.argv:
ind = sys.argv.index('-ID')
input_dir_path = sys.argv[ind+1]
else:
input_dir_path = dir_path
output_dir_path = dir_path
if "-h" in args:
print(main.__doc__)
return False
if '-F' in args:
ind=args.index("-F")
meas_file = args[ind+1]
if '-fsa' in args:
ind = args.index("-fsa")
samp_file = args[ind+1]
if samp_file[0]!='/':
samp_file = os.path.join(input_dir_path, samp_file)
try:
open(samp_file,'r')
ErSamps,file_type=pmag.magic_read(samp_file)
except:
print(samp_file,' not found: ')
print(' download csv file and import to MagIC with iodp_samples_magic.py')
if '-f' in args:
ind = args.index("-f")
mag_file= args[ind+1]
if "-loc" in args:
ind=args.index("-loc")
er_location_name=args[ind+1]
if "-A" in args:
noave=1
if not command_line:
dir_path = kwargs.get('dir_path', '.')
input_dir_path = kwargs.get('input_dir_path', dir_path)
output_dir_path = dir_path
meas_file = kwargs.get('meas_file', 'magic_measurements.txt')
mag_file = kwargs.get('mag_file', '')
samp_file = kwargs.get('samp_file', 'er_samples.txt')
specnum = kwargs.get('specnum', 1)
samp_con = kwargs.get('samp_con', '1')
if len(str(samp_con)) > 1:
samp_con, Z = samp_con.split('-')
else:
Z = ''
er_location_name = kwargs.get('er_location_name', '')
noave = kwargs.get('noave', 0) # default (0) means DO average
meth_code = kwargs.get('meth_code', "LP-NO")
# format variables
meth_code=meth_code+":FS-C-DRILL-IODP:SP-SS-C:SO-V"
meth_code=meth_code.strip(":")
if mag_file:
mag_file = os.path.join(input_dir_path, mag_file)
samp_file = os.path.join(input_dir_path, samp_file)
meas_file = os.path.join(output_dir_path, meas_file)
# validate variables
if not mag_file:
print("You must provide an IODP_jr6 format file")
return False, "You must provide an IODP_jr6 format file"
if not os.path.exists(mag_file):
print('The input file you provided: {} does not exist.\nMake sure you have specified the correct filename AND correct input directory name.'.format(mag_file))
return False, 'The input file you provided: {} does not exist.\nMake sure you have specified the correct filename AND correct input directory name.'.format(mag_file)
if not os.path.exists(samp_file):
print("Your input directory:\n{}\nmust contain an er_samples.txt file, or you must explicitly provide one".format(input_dir_path))
return False, "Your input directory:\n{}\nmust contain an er_samples.txt file, or you must explicitly provide one".format(input_dir_path)
# parse data
temp = os.path.join(output_dir_path, 'temp.txt')
fix_separation(mag_file, temp)
samples, filetype = pmag.magic_read(samp_file)
with open(temp, 'r') as finput:
lines = finput.readlines()
os.remove(temp)
for line in lines:
MagRec = {}
line = line.split()
spec_text_id = line[0].split('_')[1]
SampRecs=pmag.get_dictitem(samples,'er_sample_alternatives',spec_text_id,'has')
if len(SampRecs)>0: # found one
MagRec['er_specimen_name']=SampRecs[0]['er_sample_name']
MagRec['er_sample_name']=MagRec['er_specimen_name']
MagRec['er_site_name']=MagRec['er_specimen_name']
MagRec["er_citation_names"]="This study"
MagRec['er_location_name']=er_location_name
MagRec['magic_software_packages']=version_num
MagRec["treatment_temp"]='%8.3e' % (273) # room temp in kelvin
MagRec["measurement_temp"]='%8.3e' % (273) # room temp in kelvin
MagRec["measurement_flag"]='g'
MagRec["measurement_standard"]='u'
MagRec["measurement_number"]='1'
MagRec["treatment_ac_field"]='0'
volume=float(SampRecs[0]['sample_volume'])
x = float(line[4])
y = float(line[3])
negz = float(line[2])
cart=np.array([x,y,-negz]).transpose()
direction = pmag.cart2dir(cart).transpose()
expon = float(line[5])
magn_volume = direction[2] * (10.0**expon)
moment = magn_volume * volume
MagRec["measurement_magn_moment"]=str(moment)
MagRec["measurement_magn_volume"]=str(magn_volume)#str(direction[2] * (10.0 ** expon))
MagRec["measurement_dec"]='%7.1f'%(direction[0])
MagRec["measurement_inc"]='%7.1f'%(direction[1])
step = line[1]
if step == 'NRM':
meas_type="LT-NO"
elif step[0:2] == 'AD':
meas_type="LT-AF-Z"
treat=float(step[2:])
MagRec["treatment_ac_field"]='%8.3e' %(treat*1e-3) # convert from mT to tesla
elif step[0:2] == 'TD':
meas_type="LT-T-Z"
treat=float(step[2:])
MagRec["treatment_temp"]='%8.3e' % (treat+273.) # temp in kelvin
elif step[0:3]=='ARM': #
meas_type="LT-AF-I"
treat=float(row['step'][3:])
MagRec["treatment_ac_field"]='%8.3e' %(treat*1e-3) # convert from mT to tesla
MagRec["treatment_dc_field"]='%8.3e' %(50e-6) # assume 50uT DC field
MagRec["measurement_description"]='Assumed DC field - actual unknown'
elif step[0:3]=='IRM': #
meas_type="LT-IRM"
treat=float(step[3:])
MagRec["treatment_dc_field"]='%8.3e' %(treat*1e-3) # convert from mT to tesla
else:
print('unknown treatment type for ',row)
return False, 'unknown treatment type for ',row
MagRec['magic_method_codes']=meas_type
MagRecs.append(MagRec.copy())
else:
print('sample name not found: ',row['specname'])
MagOuts=pmag.measurements_methods(MagRecs,noave)
file_created, error_message = pmag.magic_write(meas_file,MagOuts,'magic_measurements')
if file_created:
return True, meas_file
else:
return False, 'Results not written to file' | NAME
iodp_jr6_magic.py
DESCRIPTION
converts shipboard .jr6 format files to magic_measurements format files
SYNTAX
iodp_jr6_magic.py [command line options]
OPTIONS
-h: prints the help message and quits.
-f FILE: specify input file, or
-F FILE: specify output file, default is magic_measurements.txt
-fsa FILE: specify er_samples.txt file for sample name lookup ,
default is 'er_samples.txt'
-loc HOLE : specify hole name (U1456A)
-A: don't average replicate measurements
INPUT
JR6 .jr6 format file | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/conversion_scripts2/iodp_jr6_magic2.py#L9-L245 |
PmagPy/PmagPy | SPD/lib/lib_ptrm_statistics.py | get_n_ptrm | def get_n_ptrm(tmin, tmax, ptrm_temps, ptrm_starting_temps):
"""
input: tmin, tmax, ptrm_temps, ptrm_starting_temps
returns number of ptrm_checks included in best fit segment.
excludes checks if temp exceeds tmax OR if starting temp exceeds tmax.
output: n_ptrm, ptrm_checks_included_temperatures
"""
# does not exclude ptrm checks that are less than tmin
ptrm_checks_included_temps= []
for num, check in enumerate(ptrm_temps):
if check > tmax:
pass
elif ptrm_starting_temps[num] > tmax: # or ptrm_starting_temps[num] < tmin:
pass
else:
ptrm_checks_included_temps.append(check)
return len(ptrm_checks_included_temps), ptrm_checks_included_temps | python | def get_n_ptrm(tmin, tmax, ptrm_temps, ptrm_starting_temps):
"""
input: tmin, tmax, ptrm_temps, ptrm_starting_temps
returns number of ptrm_checks included in best fit segment.
excludes checks if temp exceeds tmax OR if starting temp exceeds tmax.
output: n_ptrm, ptrm_checks_included_temperatures
"""
# does not exclude ptrm checks that are less than tmin
ptrm_checks_included_temps= []
for num, check in enumerate(ptrm_temps):
if check > tmax:
pass
elif ptrm_starting_temps[num] > tmax: # or ptrm_starting_temps[num] < tmin:
pass
else:
ptrm_checks_included_temps.append(check)
return len(ptrm_checks_included_temps), ptrm_checks_included_temps | input: tmin, tmax, ptrm_temps, ptrm_starting_temps
returns number of ptrm_checks included in best fit segment.
excludes checks if temp exceeds tmax OR if starting temp exceeds tmax.
output: n_ptrm, ptrm_checks_included_temperatures | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_ptrm_statistics.py#L11-L27 |
PmagPy/PmagPy | SPD/lib/lib_ptrm_statistics.py | get_max_ptrm_check | def get_max_ptrm_check(ptrm_checks_included_temps, ptrm_checks_all_temps, ptrm_x, t_Arai, x_Arai):
"""
input: ptrm_checks_included_temps, ptrm_checks_all_temps, ptrm_x, t_Arai, x_Arai.
sorts through included ptrm_checks and finds the largest ptrm check diff,
the sum of the total diffs,
and the percentage of the largest check / original measurement at that temperature step
output: max_diff, sum_diffs, check_percent, sum_abs_diffs.
"""
if not ptrm_checks_included_temps:
return [], float('nan'), float('nan'), float('nan'), float('nan')
diffs = []
abs_diffs = []
x_Arai_compare = []
ptrm_compare = []
check_percents = []
ptrm_checks_all_temps = list(ptrm_checks_all_temps)
for check in ptrm_checks_included_temps: # goes through each included temperature step
ptrm_ind = ptrm_checks_all_temps.index(check) # indexes the number of the check
ptrm_check = ptrm_x[ptrm_ind] # x value at that temperature step
ptrm_compare.append(ptrm_check) #
arai_ind = t_Arai.index(check)
ptrm_orig = x_Arai[arai_ind]
x_Arai_compare.append(ptrm_orig)
diff = ptrm_orig - ptrm_check
diffs.append(diff)
abs_diffs.append(abs(diff))
if ptrm_orig == 0:
check_percents.append(0)
else:
check_percents.append((old_div(abs(diff), ptrm_orig)) * 100)
max_diff = max(abs_diffs)
check_percent = max(check_percents)
sum_diffs = abs(sum(diffs))
sum_abs_diffs = sum(abs_diffs)
return diffs, max_diff, sum_diffs, check_percent, sum_abs_diffs | python | def get_max_ptrm_check(ptrm_checks_included_temps, ptrm_checks_all_temps, ptrm_x, t_Arai, x_Arai):
"""
input: ptrm_checks_included_temps, ptrm_checks_all_temps, ptrm_x, t_Arai, x_Arai.
sorts through included ptrm_checks and finds the largest ptrm check diff,
the sum of the total diffs,
and the percentage of the largest check / original measurement at that temperature step
output: max_diff, sum_diffs, check_percent, sum_abs_diffs.
"""
if not ptrm_checks_included_temps:
return [], float('nan'), float('nan'), float('nan'), float('nan')
diffs = []
abs_diffs = []
x_Arai_compare = []
ptrm_compare = []
check_percents = []
ptrm_checks_all_temps = list(ptrm_checks_all_temps)
for check in ptrm_checks_included_temps: # goes through each included temperature step
ptrm_ind = ptrm_checks_all_temps.index(check) # indexes the number of the check
ptrm_check = ptrm_x[ptrm_ind] # x value at that temperature step
ptrm_compare.append(ptrm_check) #
arai_ind = t_Arai.index(check)
ptrm_orig = x_Arai[arai_ind]
x_Arai_compare.append(ptrm_orig)
diff = ptrm_orig - ptrm_check
diffs.append(diff)
abs_diffs.append(abs(diff))
if ptrm_orig == 0:
check_percents.append(0)
else:
check_percents.append((old_div(abs(diff), ptrm_orig)) * 100)
max_diff = max(abs_diffs)
check_percent = max(check_percents)
sum_diffs = abs(sum(diffs))
sum_abs_diffs = sum(abs_diffs)
return diffs, max_diff, sum_diffs, check_percent, sum_abs_diffs | input: ptrm_checks_included_temps, ptrm_checks_all_temps, ptrm_x, t_Arai, x_Arai.
sorts through included ptrm_checks and finds the largest ptrm check diff,
the sum of the total diffs,
and the percentage of the largest check / original measurement at that temperature step
output: max_diff, sum_diffs, check_percent, sum_abs_diffs. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_ptrm_statistics.py#L29-L63 |
PmagPy/PmagPy | SPD/lib/lib_ptrm_statistics.py | get_DRAT | def get_DRAT(delta_x_prime, delta_y_prime, max_ptrm_check):
"""
Input: TRM length of best fit line (delta_x_prime),
NRM length of best fit line,
max_ptrm_check
Output: DRAT (maximum difference produced by a ptrm check normed by best fit line),
length best fit line
"""
L = numpy.sqrt(delta_x_prime**2 + delta_y_prime**2)
DRAT = (old_div(max_ptrm_check, L)) * 100
return DRAT, L | python | def get_DRAT(delta_x_prime, delta_y_prime, max_ptrm_check):
"""
Input: TRM length of best fit line (delta_x_prime),
NRM length of best fit line,
max_ptrm_check
Output: DRAT (maximum difference produced by a ptrm check normed by best fit line),
length best fit line
"""
L = numpy.sqrt(delta_x_prime**2 + delta_y_prime**2)
DRAT = (old_div(max_ptrm_check, L)) * 100
return DRAT, L | Input: TRM length of best fit line (delta_x_prime),
NRM length of best fit line,
max_ptrm_check
Output: DRAT (maximum difference produced by a ptrm check normed by best fit line),
length best fit line | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_ptrm_statistics.py#L73-L83 |
PmagPy/PmagPy | SPD/lib/lib_ptrm_statistics.py | get_CDRAT | def get_CDRAT(L, sum_ptrm_checks, sum_abs_ptrm_checks):
"""
input: best_fit line length, sum of ptrm check diffs,
sum of absolute value of ptrm check diffs
output: CDRAT (uses sum of diffs), CDRAT_prime (uses sum of absolute diffs)
"""
CDRAT = (old_div(sum_ptrm_checks, L)) * 100.
CDRAT_prime = (old_div(sum_abs_ptrm_checks, L)) * 100.
return CDRAT, CDRAT_prime | python | def get_CDRAT(L, sum_ptrm_checks, sum_abs_ptrm_checks):
"""
input: best_fit line length, sum of ptrm check diffs,
sum of absolute value of ptrm check diffs
output: CDRAT (uses sum of diffs), CDRAT_prime (uses sum of absolute diffs)
"""
CDRAT = (old_div(sum_ptrm_checks, L)) * 100.
CDRAT_prime = (old_div(sum_abs_ptrm_checks, L)) * 100.
return CDRAT, CDRAT_prime | input: best_fit line length, sum of ptrm check diffs,
sum of absolute value of ptrm check diffs
output: CDRAT (uses sum of diffs), CDRAT_prime (uses sum of absolute diffs) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_ptrm_statistics.py#L96-L104 |
PmagPy/PmagPy | SPD/lib/lib_ptrm_statistics.py | get_DRATS | def get_DRATS(sum_ptrm_checks, sum_abs_ptrm_checks, x_Arai, end):
"""
input: sum of ptrm check diffs, sum of absolute value of ptrm check diffs,
x_Arai set of points, end.
output: DRATS (uses sum of diffs), DRATS_prime (uses sum of absolute diffs)
"""
DRATS = (old_div(sum_ptrm_checks, x_Arai[end])) * 100.
DRATS_prime = (old_div(sum_abs_ptrm_checks, x_Arai[end])) * 100.
return DRATS, DRATS_prime | python | def get_DRATS(sum_ptrm_checks, sum_abs_ptrm_checks, x_Arai, end):
"""
input: sum of ptrm check diffs, sum of absolute value of ptrm check diffs,
x_Arai set of points, end.
output: DRATS (uses sum of diffs), DRATS_prime (uses sum of absolute diffs)
"""
DRATS = (old_div(sum_ptrm_checks, x_Arai[end])) * 100.
DRATS_prime = (old_div(sum_abs_ptrm_checks, x_Arai[end])) * 100.
return DRATS, DRATS_prime | input: sum of ptrm check diffs, sum of absolute value of ptrm check diffs,
x_Arai set of points, end.
output: DRATS (uses sum of diffs), DRATS_prime (uses sum of absolute diffs) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_ptrm_statistics.py#L106-L114 |
PmagPy/PmagPy | SPD/lib/lib_ptrm_statistics.py | get_mean_DRAT | def get_mean_DRAT(sum_ptrm_checks, sum_abs_ptrm_checks, n_pTRM, L):
"""
input: sum_ptrm_checks, sum_abs_ptrm_checks, n_pTRM, L
output: mean DRAT (the average difference produced by a pTRM check,
normalized by the length of the best-fit line)
"""
if not n_pTRM:
return float('nan'), float('nan')
mean_DRAT = ((old_div(1., n_pTRM)) * (old_div(sum_ptrm_checks, L))) * 100
mean_DRAT_prime = ((old_div(1., n_pTRM)) * (old_div(sum_abs_ptrm_checks, L))) * 100
return mean_DRAT, mean_DRAT_prime | python | def get_mean_DRAT(sum_ptrm_checks, sum_abs_ptrm_checks, n_pTRM, L):
"""
input: sum_ptrm_checks, sum_abs_ptrm_checks, n_pTRM, L
output: mean DRAT (the average difference produced by a pTRM check,
normalized by the length of the best-fit line)
"""
if not n_pTRM:
return float('nan'), float('nan')
mean_DRAT = ((old_div(1., n_pTRM)) * (old_div(sum_ptrm_checks, L))) * 100
mean_DRAT_prime = ((old_div(1., n_pTRM)) * (old_div(sum_abs_ptrm_checks, L))) * 100
return mean_DRAT, mean_DRAT_prime | input: sum_ptrm_checks, sum_abs_ptrm_checks, n_pTRM, L
output: mean DRAT (the average difference produced by a pTRM check,
normalized by the length of the best-fit line) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_ptrm_statistics.py#L116-L126 |
PmagPy/PmagPy | SPD/lib/lib_ptrm_statistics.py | get_mean_DEV | def get_mean_DEV(sum_ptrm_checks, sum_abs_ptrm_checks, n_pTRM, delta_x_prime):
"""
input: sum_ptrm_checks, sum_abs_ptrm_checks, n_pTRM, delta_x_prime
output: Mean deviation of a pTRM check
"""
if not n_pTRM:
return float('nan'), float('nan')
mean_DEV = ((old_div(1., n_pTRM)) * (old_div(sum_ptrm_checks, delta_x_prime))) * 100
mean_DEV_prime= ((old_div(1., n_pTRM)) * (old_div(sum_abs_ptrm_checks, delta_x_prime))) * 100
return mean_DEV, mean_DEV_prime | python | def get_mean_DEV(sum_ptrm_checks, sum_abs_ptrm_checks, n_pTRM, delta_x_prime):
"""
input: sum_ptrm_checks, sum_abs_ptrm_checks, n_pTRM, delta_x_prime
output: Mean deviation of a pTRM check
"""
if not n_pTRM:
return float('nan'), float('nan')
mean_DEV = ((old_div(1., n_pTRM)) * (old_div(sum_ptrm_checks, delta_x_prime))) * 100
mean_DEV_prime= ((old_div(1., n_pTRM)) * (old_div(sum_abs_ptrm_checks, delta_x_prime))) * 100
return mean_DEV, mean_DEV_prime | input: sum_ptrm_checks, sum_abs_ptrm_checks, n_pTRM, delta_x_prime
output: Mean deviation of a pTRM check | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_ptrm_statistics.py#L128-L137 |
PmagPy/PmagPy | SPD/lib/lib_ptrm_statistics.py | get_delta_pal_vectors | def get_delta_pal_vectors(PTRMS, PTRM_Checks, NRM):
""" takes in PTRM data in this format: [temp, dec, inc, moment, ZI or IZ] -- and PTRM_check data in this format: [temp, dec, inc, moment]. Returns them in vector form (cartesian). """
PTRMS = numpy.array(PTRMS)
PTRM_Checks = numpy.array(PTRM_Checks)
TRM_1 = lib_direct.dir2cart(PTRMS[0,1:3])
PTRMS_cart = []
Checks_cart = []
for num, ptrm in enumerate(PTRMS):
ptrm_cart = lib_direct.dir2cart([PTRMS[num][1], PTRMS[num][2], old_div(PTRMS[num][3], NRM)])
PTRMS_cart.append(ptrm_cart)
for num, check in enumerate(PTRM_Checks):
check_cart = lib_direct.dir2cart([PTRM_Checks[num][1], PTRM_Checks[num][2], old_div(PTRM_Checks[num][3], NRM)])
Checks_cart.append(check_cart)
return PTRMS_cart, Checks_cart, TRM_1 | python | def get_delta_pal_vectors(PTRMS, PTRM_Checks, NRM):
""" takes in PTRM data in this format: [temp, dec, inc, moment, ZI or IZ] -- and PTRM_check data in this format: [temp, dec, inc, moment]. Returns them in vector form (cartesian). """
PTRMS = numpy.array(PTRMS)
PTRM_Checks = numpy.array(PTRM_Checks)
TRM_1 = lib_direct.dir2cart(PTRMS[0,1:3])
PTRMS_cart = []
Checks_cart = []
for num, ptrm in enumerate(PTRMS):
ptrm_cart = lib_direct.dir2cart([PTRMS[num][1], PTRMS[num][2], old_div(PTRMS[num][3], NRM)])
PTRMS_cart.append(ptrm_cart)
for num, check in enumerate(PTRM_Checks):
check_cart = lib_direct.dir2cart([PTRM_Checks[num][1], PTRM_Checks[num][2], old_div(PTRM_Checks[num][3], NRM)])
Checks_cart.append(check_cart)
return PTRMS_cart, Checks_cart, TRM_1 | takes in PTRM data in this format: [temp, dec, inc, moment, ZI or IZ] -- and PTRM_check data in this format: [temp, dec, inc, moment]. Returns them in vector form (cartesian). | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_ptrm_statistics.py#L139-L152 |
PmagPy/PmagPy | SPD/lib/lib_ptrm_statistics.py | get_diffs | def get_diffs(ptrms_vectors, ptrm_checks_vectors, ptrms_orig, checks_orig):
"""
input: ptrms_vectors, ptrm_checks_vectors, ptrms_orig, checks_orig
output: vector diffs between original and ptrm check, C
"""
ptrm_temps = numpy.array(ptrms_orig)[:,0]
check_temps = numpy.array(checks_orig)[:,0]
index = numpy.zeros(len(ptrm_temps))
for num, temp in enumerate(ptrm_temps):
if len(numpy.where(check_temps == temp)[0]):
index[num] = numpy.where(check_temps == temp)[0][0]
else:
index[num] = float('nan')
diffs = numpy.zeros((len(ptrms_vectors), 3))
for num, ptrm in enumerate(ptrms_vectors):
if numpy.isnan(index[num]):
diffs[num] = numpy.array([0,0,0])
else:
diffs[num] = ptrm_checks_vectors[int(index[num])] - ptrm
C = numpy.cumsum(diffs, 0)
#print "diffs (should be same as to_sum"
#print diffs
#print "C (should be same as dpal_sum)"
#print C
return diffs, C | python | def get_diffs(ptrms_vectors, ptrm_checks_vectors, ptrms_orig, checks_orig):
"""
input: ptrms_vectors, ptrm_checks_vectors, ptrms_orig, checks_orig
output: vector diffs between original and ptrm check, C
"""
ptrm_temps = numpy.array(ptrms_orig)[:,0]
check_temps = numpy.array(checks_orig)[:,0]
index = numpy.zeros(len(ptrm_temps))
for num, temp in enumerate(ptrm_temps):
if len(numpy.where(check_temps == temp)[0]):
index[num] = numpy.where(check_temps == temp)[0][0]
else:
index[num] = float('nan')
diffs = numpy.zeros((len(ptrms_vectors), 3))
for num, ptrm in enumerate(ptrms_vectors):
if numpy.isnan(index[num]):
diffs[num] = numpy.array([0,0,0])
else:
diffs[num] = ptrm_checks_vectors[int(index[num])] - ptrm
C = numpy.cumsum(diffs, 0)
#print "diffs (should be same as to_sum"
#print diffs
#print "C (should be same as dpal_sum)"
#print C
return diffs, C | input: ptrms_vectors, ptrm_checks_vectors, ptrms_orig, checks_orig
output: vector diffs between original and ptrm check, C | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_ptrm_statistics.py#L154-L178 |
PmagPy/PmagPy | SPD/lib/lib_ptrm_statistics.py | get_TRM_star | def get_TRM_star(C, ptrms_vectors, start, end):
"""
input: C, ptrms_vectors, start, end
output: TRM_star, x_star (for delta_pal statistic)
"""
TRM_star = numpy.zeros([len(ptrms_vectors), 3])
TRM_star[0] = [0., 0., 0.]
x_star = numpy.zeros(len(ptrms_vectors))
for num, vec in enumerate(ptrms_vectors[1:]):
TRM_star[num+1] = vec + C[num]
# print 'vec', vec
# print 'C', C[num]
for num, trm in enumerate(TRM_star):
x_star[num] = numpy.linalg.norm(trm)
#print "x_star (should match corr_TRM / NRM)"
#print x_star[start:end+1]
return TRM_star[start:end+1], x_star[start:end+1] | python | def get_TRM_star(C, ptrms_vectors, start, end):
"""
input: C, ptrms_vectors, start, end
output: TRM_star, x_star (for delta_pal statistic)
"""
TRM_star = numpy.zeros([len(ptrms_vectors), 3])
TRM_star[0] = [0., 0., 0.]
x_star = numpy.zeros(len(ptrms_vectors))
for num, vec in enumerate(ptrms_vectors[1:]):
TRM_star[num+1] = vec + C[num]
# print 'vec', vec
# print 'C', C[num]
for num, trm in enumerate(TRM_star):
x_star[num] = numpy.linalg.norm(trm)
#print "x_star (should match corr_TRM / NRM)"
#print x_star[start:end+1]
return TRM_star[start:end+1], x_star[start:end+1] | input: C, ptrms_vectors, start, end
output: TRM_star, x_star (for delta_pal statistic) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_ptrm_statistics.py#L180-L196 |
PmagPy/PmagPy | SPD/lib/lib_ptrm_statistics.py | get_b_star | def get_b_star(x_star, y_err, y_mean, y_segment):
"""
input: x_star, y_err, y_mean, y_segment
output: b_star (corrected slope for delta_pal statistic)
"""
#print "x_star, should be same as Xcorr / NRM"
#print x_star
x_star_mean = numpy.mean(x_star)
x_err = x_star - x_star_mean
b_star = -1* numpy.sqrt( old_div(sum(numpy.array(y_err)**2), sum(numpy.array(x_err)**2)) ) # averaged slope
#print "y_segment", y_segment
b_star = numpy.sign(sum(x_err * y_err)) * numpy.std(y_segment, ddof=1) / numpy.std(x_star, ddof=1)
#print "b_star (should be same as corr_slope)"
#print b_star
return b_star | python | def get_b_star(x_star, y_err, y_mean, y_segment):
"""
input: x_star, y_err, y_mean, y_segment
output: b_star (corrected slope for delta_pal statistic)
"""
#print "x_star, should be same as Xcorr / NRM"
#print x_star
x_star_mean = numpy.mean(x_star)
x_err = x_star - x_star_mean
b_star = -1* numpy.sqrt( old_div(sum(numpy.array(y_err)**2), sum(numpy.array(x_err)**2)) ) # averaged slope
#print "y_segment", y_segment
b_star = numpy.sign(sum(x_err * y_err)) * numpy.std(y_segment, ddof=1) / numpy.std(x_star, ddof=1)
#print "b_star (should be same as corr_slope)"
#print b_star
return b_star | input: x_star, y_err, y_mean, y_segment
output: b_star (corrected slope for delta_pal statistic) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_ptrm_statistics.py#L198-L212 |
PmagPy/PmagPy | SPD/lib/lib_ptrm_statistics.py | get_delta_pal | def get_delta_pal(b, b_star):
"""
input: b, b_star (actual and corrected slope)
output: delta_pal
"""
delta_pal = numpy.abs(old_div((b - b_star), b)) * 100
return delta_pal | python | def get_delta_pal(b, b_star):
"""
input: b, b_star (actual and corrected slope)
output: delta_pal
"""
delta_pal = numpy.abs(old_div((b - b_star), b)) * 100
return delta_pal | input: b, b_star (actual and corrected slope)
output: delta_pal | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_ptrm_statistics.py#L215-L221 |
PmagPy/PmagPy | SPD/lib/lib_ptrm_statistics.py | get_full_delta_pal | def get_full_delta_pal(PTRMS, PTRM_Checks, NRM, y_err, y_mean, b, start, end, y_segment):
"""
input: PTRMS, PTRM_Checks, NRM, y_err, y_mean, b, start, end, y_segment
runs full sequence necessary to get delta_pal
"""
#print "-------"
#print "calling get_full_delta_pal in lib"
# return 0
PTRMS_cart, checks, TRM_1 = get_delta_pal_vectors(PTRMS, PTRM_Checks, NRM)
# print "PTRMS_Cart", PTRMS_cart
diffs, C = get_diffs(PTRMS_cart, checks, PTRMS, PTRM_Checks)
# print "C", C
TRM_star, x_star = get_TRM_star(C, PTRMS_cart, start, end)
# print "x_star", x_star
# print type(x_star)
b_star = get_b_star(x_star, y_err, y_mean, y_segment)
delta_pal = get_delta_pal(b, b_star)
return delta_pal | python | def get_full_delta_pal(PTRMS, PTRM_Checks, NRM, y_err, y_mean, b, start, end, y_segment):
"""
input: PTRMS, PTRM_Checks, NRM, y_err, y_mean, b, start, end, y_segment
runs full sequence necessary to get delta_pal
"""
#print "-------"
#print "calling get_full_delta_pal in lib"
# return 0
PTRMS_cart, checks, TRM_1 = get_delta_pal_vectors(PTRMS, PTRM_Checks, NRM)
# print "PTRMS_Cart", PTRMS_cart
diffs, C = get_diffs(PTRMS_cart, checks, PTRMS, PTRM_Checks)
# print "C", C
TRM_star, x_star = get_TRM_star(C, PTRMS_cart, start, end)
# print "x_star", x_star
# print type(x_star)
b_star = get_b_star(x_star, y_err, y_mean, y_segment)
delta_pal = get_delta_pal(b, b_star)
return delta_pal | input: PTRMS, PTRM_Checks, NRM, y_err, y_mean, b, start, end, y_segment
runs full sequence necessary to get delta_pal | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_ptrm_statistics.py#L224-L241 |
PmagPy/PmagPy | SPD/lib/lib_ptrm_statistics.py | get_segments | def get_segments(ptrms, ptrm_checks, tmax):
"""
input: ptrms, ptrm_checks, tmax
grabs ptrms that are done below tmax
grabs ptrm checks that are done below tmax AND whose starting temp is below tmax
output: ptrms_included, checks_included
"""
ptrms_included = []
checks_included = []
ptrms = numpy.array(ptrms)
for ptrm in ptrms:
if ptrm[0] <= tmax:
ptrms_included.append(ptrm)
for check in ptrm_checks:
if check[0] <= tmax:
checks_included.append(check)
#print "checks", ptrm_checks
#print "checks_included", checks_included
return ptrms_included, checks_included | python | def get_segments(ptrms, ptrm_checks, tmax):
"""
input: ptrms, ptrm_checks, tmax
grabs ptrms that are done below tmax
grabs ptrm checks that are done below tmax AND whose starting temp is below tmax
output: ptrms_included, checks_included
"""
ptrms_included = []
checks_included = []
ptrms = numpy.array(ptrms)
for ptrm in ptrms:
if ptrm[0] <= tmax:
ptrms_included.append(ptrm)
for check in ptrm_checks:
if check[0] <= tmax:
checks_included.append(check)
#print "checks", ptrm_checks
#print "checks_included", checks_included
return ptrms_included, checks_included | input: ptrms, ptrm_checks, tmax
grabs ptrms that are done below tmax
grabs ptrm checks that are done below tmax AND whose starting temp is below tmax
output: ptrms_included, checks_included | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_ptrm_statistics.py#L243-L261 |
PmagPy/PmagPy | pmagpy/Fit.py | Fit.select | def select(self):
"""
Makes this fit the selected fit on the GUI that is it's parent
(Note: may be moved into GUI soon)
"""
if self.GUI==None: return
self.GUI.current_fit = self
if self.tmax != None and self.tmin != None:
self.GUI.update_bounds_boxes()
if self.PCA_type != None:
self.GUI.update_PCA_box()
try: self.GUI.zijplot
except AttributeError: self.GUI.draw_figure(self.GUI.s)
self.GUI.fit_box.SetStringSelection(self.name)
self.GUI.get_new_PCA_parameters(-1) | python | def select(self):
"""
Makes this fit the selected fit on the GUI that is it's parent
(Note: may be moved into GUI soon)
"""
if self.GUI==None: return
self.GUI.current_fit = self
if self.tmax != None and self.tmin != None:
self.GUI.update_bounds_boxes()
if self.PCA_type != None:
self.GUI.update_PCA_box()
try: self.GUI.zijplot
except AttributeError: self.GUI.draw_figure(self.GUI.s)
self.GUI.fit_box.SetStringSelection(self.name)
self.GUI.get_new_PCA_parameters(-1) | Makes this fit the selected fit on the GUI that is it's parent
(Note: may be moved into GUI soon) | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/Fit.py#L58-L72 |
PmagPy/PmagPy | pmagpy/Fit.py | Fit.get | def get(self,coordinate_system):
"""
Return the pmagpy paramters dictionary associated with this fit and the given
coordinate system
@param: coordinate_system -> the coordinate system who's parameters to return
"""
if coordinate_system == 'DA-DIR' or coordinate_system == 'specimen':
return self.pars
elif coordinate_system == 'DA-DIR-GEO' or coordinate_system == 'geographic':
return self.geopars
elif coordinate_system == 'DA-DIR-TILT' or coordinate_system == 'tilt-corrected':
return self.tiltpars
else:
print("-E- no such parameters to fetch for " + coordinate_system + " in fit: " + self.name)
return None | python | def get(self,coordinate_system):
"""
Return the pmagpy paramters dictionary associated with this fit and the given
coordinate system
@param: coordinate_system -> the coordinate system who's parameters to return
"""
if coordinate_system == 'DA-DIR' or coordinate_system == 'specimen':
return self.pars
elif coordinate_system == 'DA-DIR-GEO' or coordinate_system == 'geographic':
return self.geopars
elif coordinate_system == 'DA-DIR-TILT' or coordinate_system == 'tilt-corrected':
return self.tiltpars
else:
print("-E- no such parameters to fetch for " + coordinate_system + " in fit: " + self.name)
return None | Return the pmagpy paramters dictionary associated with this fit and the given
coordinate system
@param: coordinate_system -> the coordinate system who's parameters to return | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/Fit.py#L74-L88 |
PmagPy/PmagPy | pmagpy/Fit.py | Fit.put | def put(self,specimen,coordinate_system,new_pars):
"""
Given a coordinate system and a new parameters dictionary that follows pmagpy
convention given by the pmag.py/domean function it alters this fit's bounds and
parameters such that it matches the new data.
@param: specimen -> None if fit is for a site or a sample or a valid specimen from self.GUI
@param: coordinate_system -> the coordinate system to alter
@param: new_pars -> the new paramters to change your fit to
@alters: tmin, tmax, pars, geopars, tiltpars, PCA_type
"""
if specimen != None:
if type(new_pars)==dict:
if 'er_specimen_name' not in list(new_pars.keys()): new_pars['er_specimen_name'] = specimen
if 'specimen_comp_name' not in list(new_pars.keys()): new_pars['specimen_comp_name'] = self.name
if type(new_pars) != dict or 'measurement_step_min' not in list(new_pars.keys()) or 'measurement_step_max' not in list(new_pars.keys()) or 'calculation_type' not in list(new_pars.keys()):
print("-E- invalid parameters cannot assign to fit %s for specimen %s - was given:\n%s"%(self.name,specimen,str(new_pars)))
return self.get(coordinate_system)
self.tmin = new_pars['measurement_step_min']
self.tmax = new_pars['measurement_step_max']
self.PCA_type = new_pars['calculation_type']
if self.GUI!=None:
steps = self.GUI.Data[specimen]['zijdblock_steps']
tl = [self.tmin,self.tmax]
for i,t in enumerate(tl):
if str(t) in steps: tl[i] = str(t)
elif str(int(t)) in steps: tl[i] = str(int(t))
elif "%.1fmT"%t in steps: tl[i] = "%.1fmT"%t
elif "%.0fC"%t in steps: tl[i] = "%.0fC"%t
else:
print("-E- Step " + str(tl[i]) + " does not exsist (func: Fit.put)")
tl[i] = str(t)
self.tmin,self.tmax = tl
elif meas_data != None:
steps = meas_data[specimen]['zijdblock_steps']
tl = [self.tmin,self.tmax]
for i,t in enumerate(tl):
if str(t) in steps: tl[i] = str(t)
elif str(int(t)) in steps: tl[i] = str(int(t))
elif "%.1fmT"%t in steps: tl[i] = "%.1fmT"%t
elif "%.0fC"%t in steps: tl[i] = "%.0fC"%t
else:
print("-E- Step " + str(tl[i]) + " does not exsist (func: Fit.put)")
tl[i] = str(t)
self.tmin,self.tmax = tl
else: self.tmin,self.tmax = list(map(str, tl))
if coordinate_system == 'DA-DIR' or coordinate_system == 'specimen':
self.pars = new_pars
elif coordinate_system == 'DA-DIR-GEO' or coordinate_system == 'geographic':
self.geopars = new_pars
elif coordinate_system == 'DA-DIR-TILT' or coordinate_system == 'tilt-corrected':
self.tiltpars = new_pars
else:
print('-E- no such coordinate system could not assign those parameters to fit') | python | def put(self,specimen,coordinate_system,new_pars):
"""
Given a coordinate system and a new parameters dictionary that follows pmagpy
convention given by the pmag.py/domean function it alters this fit's bounds and
parameters such that it matches the new data.
@param: specimen -> None if fit is for a site or a sample or a valid specimen from self.GUI
@param: coordinate_system -> the coordinate system to alter
@param: new_pars -> the new paramters to change your fit to
@alters: tmin, tmax, pars, geopars, tiltpars, PCA_type
"""
if specimen != None:
if type(new_pars)==dict:
if 'er_specimen_name' not in list(new_pars.keys()): new_pars['er_specimen_name'] = specimen
if 'specimen_comp_name' not in list(new_pars.keys()): new_pars['specimen_comp_name'] = self.name
if type(new_pars) != dict or 'measurement_step_min' not in list(new_pars.keys()) or 'measurement_step_max' not in list(new_pars.keys()) or 'calculation_type' not in list(new_pars.keys()):
print("-E- invalid parameters cannot assign to fit %s for specimen %s - was given:\n%s"%(self.name,specimen,str(new_pars)))
return self.get(coordinate_system)
self.tmin = new_pars['measurement_step_min']
self.tmax = new_pars['measurement_step_max']
self.PCA_type = new_pars['calculation_type']
if self.GUI!=None:
steps = self.GUI.Data[specimen]['zijdblock_steps']
tl = [self.tmin,self.tmax]
for i,t in enumerate(tl):
if str(t) in steps: tl[i] = str(t)
elif str(int(t)) in steps: tl[i] = str(int(t))
elif "%.1fmT"%t in steps: tl[i] = "%.1fmT"%t
elif "%.0fC"%t in steps: tl[i] = "%.0fC"%t
else:
print("-E- Step " + str(tl[i]) + " does not exsist (func: Fit.put)")
tl[i] = str(t)
self.tmin,self.tmax = tl
elif meas_data != None:
steps = meas_data[specimen]['zijdblock_steps']
tl = [self.tmin,self.tmax]
for i,t in enumerate(tl):
if str(t) in steps: tl[i] = str(t)
elif str(int(t)) in steps: tl[i] = str(int(t))
elif "%.1fmT"%t in steps: tl[i] = "%.1fmT"%t
elif "%.0fC"%t in steps: tl[i] = "%.0fC"%t
else:
print("-E- Step " + str(tl[i]) + " does not exsist (func: Fit.put)")
tl[i] = str(t)
self.tmin,self.tmax = tl
else: self.tmin,self.tmax = list(map(str, tl))
if coordinate_system == 'DA-DIR' or coordinate_system == 'specimen':
self.pars = new_pars
elif coordinate_system == 'DA-DIR-GEO' or coordinate_system == 'geographic':
self.geopars = new_pars
elif coordinate_system == 'DA-DIR-TILT' or coordinate_system == 'tilt-corrected':
self.tiltpars = new_pars
else:
print('-E- no such coordinate system could not assign those parameters to fit') | Given a coordinate system and a new parameters dictionary that follows pmagpy
convention given by the pmag.py/domean function it alters this fit's bounds and
parameters such that it matches the new data.
@param: specimen -> None if fit is for a site or a sample or a valid specimen from self.GUI
@param: coordinate_system -> the coordinate system to alter
@param: new_pars -> the new paramters to change your fit to
@alters: tmin, tmax, pars, geopars, tiltpars, PCA_type | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/Fit.py#L90-L146 |
PmagPy/PmagPy | pmagpy/Fit.py | Fit.has_values | def has_values(self, name, tmin, tmax):
"""
A basic fit equality checker compares name and bounds of 2 fits
@param: name -> name of the other fit
@param: tmin -> lower bound of the other fit
@param: tmax -> upper bound of the other fit
@return: boolean comaparing 2 fits
"""
return str(self.name) == str(name) and str(self.tmin) == str(tmin) and str(self.tmax) == str(tmax) | python | def has_values(self, name, tmin, tmax):
"""
A basic fit equality checker compares name and bounds of 2 fits
@param: name -> name of the other fit
@param: tmin -> lower bound of the other fit
@param: tmax -> upper bound of the other fit
@return: boolean comaparing 2 fits
"""
return str(self.name) == str(name) and str(self.tmin) == str(tmin) and str(self.tmax) == str(tmax) | A basic fit equality checker compares name and bounds of 2 fits
@param: name -> name of the other fit
@param: tmin -> lower bound of the other fit
@param: tmax -> upper bound of the other fit
@return: boolean comaparing 2 fits | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/pmagpy/Fit.py#L160-L168 |
PmagPy/PmagPy | SPD/lib/lib_tail_check_statistics.py | get_n_tail | def get_n_tail(tmax, tail_temps):
"""determines number of included tail checks in best fit segment"""
#print "tail_temps: {0}, tmax: {0}".format(tail_temps, tmax)
t_index = 0
adj_tmax = 0
if tmax < tail_temps[0]:
return 0
try:
t_index = list(tail_temps).index(tmax)
except: # finds correct tmax if there was no tail check performed at tmax
for temp in tail_temps:
if temp <= tmax:
adj_tmax = temp
t_index = list(tail_temps).index(adj_tmax)
incl_temps = tail_temps[0:t_index+1] # b/c not inclusive
return len(incl_temps) | python | def get_n_tail(tmax, tail_temps):
"""determines number of included tail checks in best fit segment"""
#print "tail_temps: {0}, tmax: {0}".format(tail_temps, tmax)
t_index = 0
adj_tmax = 0
if tmax < tail_temps[0]:
return 0
try:
t_index = list(tail_temps).index(tmax)
except: # finds correct tmax if there was no tail check performed at tmax
for temp in tail_temps:
if temp <= tmax:
adj_tmax = temp
t_index = list(tail_temps).index(adj_tmax)
incl_temps = tail_temps[0:t_index+1] # b/c not inclusive
return len(incl_temps) | determines number of included tail checks in best fit segment | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_tail_check_statistics.py#L7-L22 |
PmagPy/PmagPy | SPD/lib/lib_tail_check_statistics.py | get_max_tail_check | def get_max_tail_check(y_Arai, y_tail, t_Arai, tail_temps, n_tail):
"""
input: y_Arai, y_tail, t_Arai, tail_temps, n_tail
output: max_check, diffs
"""
if not n_tail:
return float('nan'), []
tail_compare = []
y_Arai_compare = []
for temp in tail_temps[:n_tail]:
tail_index = list(tail_temps).index(temp)
tail_check = y_tail[tail_index]
tail_compare.append(tail_check)
arai_index = list(t_Arai).index(temp)
nrm_orig = y_Arai[arai_index]
y_Arai_compare.append(nrm_orig)
diffs = numpy.array(y_Arai_compare) - numpy.array(tail_compare)
abs_diffs = abs(diffs)
max_check = max(abs_diffs)
return max_check, diffs | python | def get_max_tail_check(y_Arai, y_tail, t_Arai, tail_temps, n_tail):
"""
input: y_Arai, y_tail, t_Arai, tail_temps, n_tail
output: max_check, diffs
"""
if not n_tail:
return float('nan'), []
tail_compare = []
y_Arai_compare = []
for temp in tail_temps[:n_tail]:
tail_index = list(tail_temps).index(temp)
tail_check = y_tail[tail_index]
tail_compare.append(tail_check)
arai_index = list(t_Arai).index(temp)
nrm_orig = y_Arai[arai_index]
y_Arai_compare.append(nrm_orig)
diffs = numpy.array(y_Arai_compare) - numpy.array(tail_compare)
abs_diffs = abs(diffs)
max_check = max(abs_diffs)
return max_check, diffs | input: y_Arai, y_tail, t_Arai, tail_temps, n_tail
output: max_check, diffs | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_tail_check_statistics.py#L25-L44 |
PmagPy/PmagPy | SPD/lib/lib_tail_check_statistics.py | get_DRAT_tail | def get_DRAT_tail(max_check, L):
"""
input: tail_check_max, best fit line length
output: DRAT_tail
"""
if max_check == 0:
return float('nan')
DRAT_tail = (old_div(max_check, L)) * 100.
return DRAT_tail | python | def get_DRAT_tail(max_check, L):
"""
input: tail_check_max, best fit line length
output: DRAT_tail
"""
if max_check == 0:
return float('nan')
DRAT_tail = (old_div(max_check, L)) * 100.
return DRAT_tail | input: tail_check_max, best fit line length
output: DRAT_tail | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_tail_check_statistics.py#L47-L55 |
PmagPy/PmagPy | SPD/lib/lib_tail_check_statistics.py | get_delta_TR | def get_delta_TR(tail_check_max, y_int):
"""
input: tail_check_max, y_intercept
output: delta_TR
"""
if tail_check_max == 0 or numpy.isnan(tail_check_max):
return float('nan')
delta_TR = (old_div(tail_check_max, abs(y_int))) * 100.
return delta_TR | python | def get_delta_TR(tail_check_max, y_int):
"""
input: tail_check_max, y_intercept
output: delta_TR
"""
if tail_check_max == 0 or numpy.isnan(tail_check_max):
return float('nan')
delta_TR = (old_div(tail_check_max, abs(y_int))) * 100.
return delta_TR | input: tail_check_max, y_intercept
output: delta_TR | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_tail_check_statistics.py#L58-L66 |
PmagPy/PmagPy | SPD/lib/lib_tail_check_statistics.py | get_MD_VDS | def get_MD_VDS(tail_check_max, vds):
"""
input: tail_check_max, vector difference sum
output: MD_VDS
"""
if tail_check_max == 0 or numpy.isnan(tail_check_max):
return float('nan')
MD_VDS = (old_div(tail_check_max, vds)) * 100
return MD_VDS | python | def get_MD_VDS(tail_check_max, vds):
"""
input: tail_check_max, vector difference sum
output: MD_VDS
"""
if tail_check_max == 0 or numpy.isnan(tail_check_max):
return float('nan')
MD_VDS = (old_div(tail_check_max, vds)) * 100
return MD_VDS | input: tail_check_max, vector difference sum
output: MD_VDS | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/SPD/lib/lib_tail_check_statistics.py#L69-L77 |
PmagPy/PmagPy | programs/dir_redo.py | main | def main():
"""
NAME
dir_redo.py
DESCRIPTION
converts the Cogne DIR format to PmagPy redo file
SYNTAX
dir_redo.py [-h] [command line options]
OPTIONS
-h: prints help message and quits
-f FILE: specify input file
-F FILE: specify output file, default is 'zeq_redo'
"""
dir_path='.'
zfile='zeq_redo'
if '-WD' in sys.argv:
ind=sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind=sys.argv.index('-f')
inspec=sys.argv[ind+1]
if '-F' in sys.argv:
ind=sys.argv.index('-F')
zfile=sys.argv[ind+1]
inspec=dir_path+"/"+inspec
zfile=dir_path+"/"+zfile
zredo=open(zfile,"w")
#
# read in DIR file
#
specs=[]
prior_spec_data=open(inspec,'r').readlines()
for line in prior_spec_data:
line=line.replace("Dir"," Dir")
line=line.replace("OKir"," OKir")
line=line.replace("Fish"," Fish")
line=line.replace("Man"," Man")
line=line.replace("GC"," GC")
line=line.replace("-T"," - T")
line=line.replace("-M"," - M")
rec=line.split()
if len(rec)<2:
sys.exit()
if rec[1]=='Dir' or rec[1]=='GC': # skip all the other stuff
spec=rec[0]
specs.append(spec)
comp_name=string.uppercase[specs.count(spec)-1] # assign component names
calculation_type="DE-FM"
if rec[1]=='Dir' and rec[2]=="Kir": calculation_type="DE-BFL" # assume default calculation type is best-fit line
if rec[1]=='Dir' and rec[2]=="OKir": calculation_type="DE-BFL-A" # anchored best-fit line
if rec[1]=='Dir' and rec[2]=="Fish": calculation_type="DE-FM" # fisher mean
if rec[1]=='GC' : calculation_type="DE-BFP" # best-fit plane
min,max=rec[3],rec[5]
beg,end="",""
if min=="NRM": beg=0
if min[0]=='M':
beg=float(min[1:])*1e-3 # convert to T from mT
elif min[0]=='T':
beg=float(min[1:])+273 # convert to C to kelvin
if max[0]=='M':
end=float(max[1:])*1e-3 # convert to T from mT
elif max[0]=='T':
end=float(max[1:])+273 # convert to C to kelvin
if beg==0:beg=273
outstring='%s %s %s %s %s \n'%(spec,calculation_type,beg,end,comp_name)
zredo.write(outstring) | python | def main():
"""
NAME
dir_redo.py
DESCRIPTION
converts the Cogne DIR format to PmagPy redo file
SYNTAX
dir_redo.py [-h] [command line options]
OPTIONS
-h: prints help message and quits
-f FILE: specify input file
-F FILE: specify output file, default is 'zeq_redo'
"""
dir_path='.'
zfile='zeq_redo'
if '-WD' in sys.argv:
ind=sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-f' in sys.argv:
ind=sys.argv.index('-f')
inspec=sys.argv[ind+1]
if '-F' in sys.argv:
ind=sys.argv.index('-F')
zfile=sys.argv[ind+1]
inspec=dir_path+"/"+inspec
zfile=dir_path+"/"+zfile
zredo=open(zfile,"w")
#
# read in DIR file
#
specs=[]
prior_spec_data=open(inspec,'r').readlines()
for line in prior_spec_data:
line=line.replace("Dir"," Dir")
line=line.replace("OKir"," OKir")
line=line.replace("Fish"," Fish")
line=line.replace("Man"," Man")
line=line.replace("GC"," GC")
line=line.replace("-T"," - T")
line=line.replace("-M"," - M")
rec=line.split()
if len(rec)<2:
sys.exit()
if rec[1]=='Dir' or rec[1]=='GC': # skip all the other stuff
spec=rec[0]
specs.append(spec)
comp_name=string.uppercase[specs.count(spec)-1] # assign component names
calculation_type="DE-FM"
if rec[1]=='Dir' and rec[2]=="Kir": calculation_type="DE-BFL" # assume default calculation type is best-fit line
if rec[1]=='Dir' and rec[2]=="OKir": calculation_type="DE-BFL-A" # anchored best-fit line
if rec[1]=='Dir' and rec[2]=="Fish": calculation_type="DE-FM" # fisher mean
if rec[1]=='GC' : calculation_type="DE-BFP" # best-fit plane
min,max=rec[3],rec[5]
beg,end="",""
if min=="NRM": beg=0
if min[0]=='M':
beg=float(min[1:])*1e-3 # convert to T from mT
elif min[0]=='T':
beg=float(min[1:])+273 # convert to C to kelvin
if max[0]=='M':
end=float(max[1:])*1e-3 # convert to T from mT
elif max[0]=='T':
end=float(max[1:])+273 # convert to C to kelvin
if beg==0:beg=273
outstring='%s %s %s %s %s \n'%(spec,calculation_type,beg,end,comp_name)
zredo.write(outstring) | NAME
dir_redo.py
DESCRIPTION
converts the Cogne DIR format to PmagPy redo file
SYNTAX
dir_redo.py [-h] [command line options]
OPTIONS
-h: prints help message and quits
-f FILE: specify input file
-F FILE: specify output file, default is 'zeq_redo' | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/dir_redo.py#L5-L76 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.set_dm | def set_dm(self, num):
"""
Make GUI changes based on data model num.
Get info from WD in appropriate format.
"""
#enable or disable self.btn1a
if self.data_model_num == 3:
self.btn1a.Enable()
else:
self.btn1a.Disable()
#
# set pmag_gui_dialogs
global pmag_gui_dialogs
if self.data_model_num == 2:
pmag_gui_dialogs = pgd2
wx.CallAfter(self.get_wd_data2)
elif self.data_model_num == 3:
pmag_gui_dialogs = pgd3
wx.CallAfter(self.get_wd_data)
# do / re-do menubar
menubar = pmag_gui_menu.MagICMenu(self, data_model_num=self.data_model_num)
self.SetMenuBar(menubar)
self.menubar = menubar | python | def set_dm(self, num):
"""
Make GUI changes based on data model num.
Get info from WD in appropriate format.
"""
#enable or disable self.btn1a
if self.data_model_num == 3:
self.btn1a.Enable()
else:
self.btn1a.Disable()
#
# set pmag_gui_dialogs
global pmag_gui_dialogs
if self.data_model_num == 2:
pmag_gui_dialogs = pgd2
wx.CallAfter(self.get_wd_data2)
elif self.data_model_num == 3:
pmag_gui_dialogs = pgd3
wx.CallAfter(self.get_wd_data)
# do / re-do menubar
menubar = pmag_gui_menu.MagICMenu(self, data_model_num=self.data_model_num)
self.SetMenuBar(menubar)
self.menubar = menubar | Make GUI changes based on data model num.
Get info from WD in appropriate format. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L105-L128 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.get_wd_data | def get_wd_data(self):
"""
Show dialog to get user input for which directory
to set as working directory.
Called by self.get_dm_and_wd
"""
wait = wx.BusyInfo('Reading in data from current working directory, please wait...')
#wx.Yield()
print('-I- Read in any available data from working directory')
self.contribution = cb.Contribution(self.WD, dmodel=self.data_model)
del wait | python | def get_wd_data(self):
"""
Show dialog to get user input for which directory
to set as working directory.
Called by self.get_dm_and_wd
"""
wait = wx.BusyInfo('Reading in data from current working directory, please wait...')
#wx.Yield()
print('-I- Read in any available data from working directory')
self.contribution = cb.Contribution(self.WD, dmodel=self.data_model)
del wait | Show dialog to get user input for which directory
to set as working directory.
Called by self.get_dm_and_wd | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L130-L140 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.get_wd_data2 | def get_wd_data2(self):
"""
Get 2.5 data from self.WD and put it into
ErMagicBuilder object.
Called by get_dm_and_wd
"""
wait = wx.BusyInfo('Reading in data from current working directory, please wait...')
#wx.Yield()
print('-I- Read in any available data from working directory (data model 2)')
self.er_magic = builder.ErMagicBuilder(self.WD,
data_model=self.data_model)
del wait | python | def get_wd_data2(self):
"""
Get 2.5 data from self.WD and put it into
ErMagicBuilder object.
Called by get_dm_and_wd
"""
wait = wx.BusyInfo('Reading in data from current working directory, please wait...')
#wx.Yield()
print('-I- Read in any available data from working directory (data model 2)')
self.er_magic = builder.ErMagicBuilder(self.WD,
data_model=self.data_model)
del wait | Get 2.5 data from self.WD and put it into
ErMagicBuilder object.
Called by get_dm_and_wd | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L142-L154 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.InitUI | def InitUI(self):
"""
Build the mainframe
"""
menubar = pmag_gui_menu.MagICMenu(self, data_model_num=self.data_model_num)
self.SetMenuBar(menubar)
#pnl = self.panel
#---sizer logo ----
#start_image = wx.Image("/Users/ronshaar/PmagPy/images/logo2.png")
#start_image = wx.Image("/Users/Python/simple_examples/001.png")
#start_image.Rescale(start_image.GetWidth(), start_image.GetHeight())
#image = wx.BitmapFromImage(start_image)
#self.logo = wx.StaticBitmap(self.panel, -1, image)
#---sizer 0 ----
bSizer0 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Choose MagIC project directory"), wx.HORIZONTAL)
self.dir_path = wx.TextCtrl(self.panel, id=-1, size=(600,25), style=wx.TE_READONLY)
self.change_dir_button = buttons.GenButton(self.panel, id=-1, label="change directory",size=(-1, -1))
self.change_dir_button.SetBackgroundColour("#F8F8FF")
self.change_dir_button.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_change_dir_button, self.change_dir_button)
bSizer0.Add(self.change_dir_button, wx.ALIGN_LEFT)
bSizer0.AddSpacer(40)
bSizer0.Add(self.dir_path,wx.ALIGN_CENTER_VERTICAL)
# not fully implemented method for saving/reverting WD
# last saved: []
#bSizer0_1 = wx.StaticBoxSizer( wx.StaticBox( self.panel, wx.ID_ANY, "Save MagIC project directory in current state or revert to last-saved state" ), wx.HORIZONTAL )
#saved_label = wx.StaticText(self.panel, -1, "Last saved:", (20, 120))
#self.last_saved_time = wx.TextCtrl(self.panel, id=-1, size=(100,25), style=wx.TE_READONLY)
#now = datetime.datetime.now()
#now_string = "{}:{}:{}".format(now.hour, now.minute, now.second)
#self.last_saved_time.write(now_string)
#self.save_dir_button = buttons.GenButton(self.panel, id=-1, label = "save dir", size=(-1, -1))
#self.revert_dir_button = buttons.GenButton(self.panel, id=-1, label = "revert dir", size=(-1, -1))
#self.Bind(wx.EVT_BUTTON, self.on_revert_dir_button, self.revert_dir_button)
#self.Bind(wx.EVT_BUTTON, self.on_save_dir_button, self.save_dir_button)
#bSizer0_1.Add(saved_label, flag=wx.RIGHT, border=10)
#bSizer0_1.Add(self.last_saved_time, flag=wx.RIGHT, border=10)
#bSizer0_1.Add(self.save_dir_button,flag=wx.ALIGN_LEFT|wx.RIGHT, border=10)
#bSizer0_1.Add(self.revert_dir_button,wx.ALIGN_LEFT)
#
#---sizer 1 ----
bSizer1 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Import data to working directory"), wx.HORIZONTAL)
text = "1. Convert magnetometer files to MagIC format"
self.btn1 = buttons.GenButton(self.panel, id=-1, label=text,
size=(450, 50), name='step 1')
self.btn1.SetBackgroundColour("#FDC68A")
self.btn1.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_convert_file, self.btn1)
text = "2. (optional) Calculate geographic/tilt-corrected directions"
self.btn2 = buttons.GenButton(self.panel, id=-1, label=text, size=(450, 50), name='step 2')
self.btn2.SetBackgroundColour("#FDC68A")
self.btn2.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_btn_orientation, self.btn2)
text = "3. (optional) Add MagIC metadata for uploading data to MagIC "
self.btn3 = buttons.GenButton(self.panel, id=-1, label=text, size=(450, 50), name='step 3')
self.btn3.SetBackgroundColour("#FDC68A")
self.btn3.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_btn_metadata, self.btn3)
text = "Unpack txt file downloaded from MagIC"
self.btn4 = buttons.GenButton(self.panel, id=-1, label=text, size=(330, 50))
self.btn4.SetBackgroundColour("#FDC68A")
self.btn4.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_btn_unpack, self.btn4)
text = "Convert directory to 3.0. format (legacy data only)"
self.btn1a = buttons.GenButton(self.panel, id=-1, label=text,
size=(330, 50), name='step 1a')
self.btn1a.SetBackgroundColour("#FDC68A")
self.btn1a.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_btn_convert_3, self.btn1a)
#str = "OR"
OR = wx.StaticText(self.panel, -1, "or", (20, 120))
font = wx.Font(18, wx.SWISS, wx.NORMAL, wx.NORMAL)
OR.SetFont(font)
#bSizer0.Add(self.panel,self.btn1,wx.ALIGN_TOP)
bSizer1_1 = wx.BoxSizer(wx.VERTICAL)
bSizer1_1.AddSpacer(20)
bSizer1_1.Add(self.btn1, wx.ALIGN_TOP)
bSizer1_1.AddSpacer(20)
bSizer1_1.Add(self.btn2, wx.ALIGN_TOP)
bSizer1_1.AddSpacer(20)
bSizer1_1.Add(self.btn3, wx.ALIGN_TOP)
bSizer1_1.AddSpacer(20)
bSizer1.Add(bSizer1_1, wx.ALIGN_CENTER, wx.EXPAND)
bSizer1.AddSpacer(20)
bSizer1.Add(OR, 0, wx.ALIGN_CENTER, 0)
bSizer1.AddSpacer(20)
bSizer1_2 = wx.BoxSizer(wx.VERTICAL)
spacing = 60 #if self.data_model_num == 3 else 90
bSizer1_2.AddSpacer(spacing)
bSizer1_2.Add(self.btn4, 0, wx.ALIGN_CENTER, 0)
bSizer1_2.AddSpacer(20)
bSizer1_2.Add(self.btn1a, 0, wx.ALIGN_CENTER, 0)
bSizer1_2.AddSpacer(20)
bSizer1.Add(bSizer1_2)
bSizer1.AddSpacer(20)
#---sizer 2 ----
bSizer2 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Analysis and plots" ), wx.HORIZONTAL)
text = "Demag GUI"
self.btn_demag_gui = buttons.GenButton(self.panel, id=-1, label=text, size=(300, 50), name='demag gui')
self.btn_demag_gui.SetBackgroundColour("#6ECFF6")
self.btn_demag_gui.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_btn_demag_gui, self.btn_demag_gui)
text = "Thellier GUI"
self.btn_thellier_gui = buttons.GenButton(self.panel, id=-1, label=text, size=(300, 50), name='thellier gui')
self.btn_thellier_gui.SetBackgroundColour("#6ECFF6")
self.btn_thellier_gui.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_btn_thellier_gui, self.btn_thellier_gui)
bSizer2.AddSpacer(20)
bSizer2.Add(self.btn_demag_gui, 0, wx.ALIGN_CENTER, 0)
bSizer2.AddSpacer(20)
bSizer2.Add(self.btn_thellier_gui, 0, wx.ALIGN_CENTER, 0)
bSizer2.AddSpacer(20)
#---sizer 3 ----
bSizer3 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Create file for upload to MagIC database"), wx.HORIZONTAL)
text = "Create MagIC txt file for upload"
self.btn_upload = buttons.GenButton(self.panel, id=-1, label=text, size=(300, 50))
self.btn_upload.SetBackgroundColour("#C4DF9B")
self.btn_upload.InitColours()
bSizer3.AddSpacer(20)
bSizer3.Add(self.btn_upload, 0, wx.ALIGN_CENTER, 0)
bSizer3.AddSpacer(20)
self.Bind(wx.EVT_BUTTON, self.on_btn_upload, self.btn_upload)
#---arange sizers ----
hbox = wx.BoxSizer(wx.HORIZONTAL)
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.AddSpacer(5)
#vbox.Add(self.logo,0,wx.ALIGN_CENTER,0)
vbox.AddSpacer(5)
vbox.Add(bSizer0, 0, wx.ALIGN_CENTER, 0)
vbox.AddSpacer(10)
#vbox.Add(bSizer0_1, 0, wx.ALIGN_CENTER, 0)
#vbox.AddSpacer(10)
vbox.Add(bSizer1, 0, wx.ALIGN_CENTER, 0)
vbox.AddSpacer(10)
vbox.Add(bSizer2, 0, wx.ALIGN_CENTER, 0)
vbox.AddSpacer(10)
vbox.Add(bSizer3, 0, wx.ALIGN_CENTER, 0)
vbox.AddSpacer(10)
hbox.AddSpacer(10)
hbox.Add(vbox, 0, wx.ALIGN_CENTER, 0)
hbox.AddSpacer(5)
self.panel.SetSizer(hbox)
hbox.Fit(self) | python | def InitUI(self):
"""
Build the mainframe
"""
menubar = pmag_gui_menu.MagICMenu(self, data_model_num=self.data_model_num)
self.SetMenuBar(menubar)
#pnl = self.panel
#---sizer logo ----
#start_image = wx.Image("/Users/ronshaar/PmagPy/images/logo2.png")
#start_image = wx.Image("/Users/Python/simple_examples/001.png")
#start_image.Rescale(start_image.GetWidth(), start_image.GetHeight())
#image = wx.BitmapFromImage(start_image)
#self.logo = wx.StaticBitmap(self.panel, -1, image)
#---sizer 0 ----
bSizer0 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Choose MagIC project directory"), wx.HORIZONTAL)
self.dir_path = wx.TextCtrl(self.panel, id=-1, size=(600,25), style=wx.TE_READONLY)
self.change_dir_button = buttons.GenButton(self.panel, id=-1, label="change directory",size=(-1, -1))
self.change_dir_button.SetBackgroundColour("#F8F8FF")
self.change_dir_button.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_change_dir_button, self.change_dir_button)
bSizer0.Add(self.change_dir_button, wx.ALIGN_LEFT)
bSizer0.AddSpacer(40)
bSizer0.Add(self.dir_path,wx.ALIGN_CENTER_VERTICAL)
# not fully implemented method for saving/reverting WD
# last saved: []
#bSizer0_1 = wx.StaticBoxSizer( wx.StaticBox( self.panel, wx.ID_ANY, "Save MagIC project directory in current state or revert to last-saved state" ), wx.HORIZONTAL )
#saved_label = wx.StaticText(self.panel, -1, "Last saved:", (20, 120))
#self.last_saved_time = wx.TextCtrl(self.panel, id=-1, size=(100,25), style=wx.TE_READONLY)
#now = datetime.datetime.now()
#now_string = "{}:{}:{}".format(now.hour, now.minute, now.second)
#self.last_saved_time.write(now_string)
#self.save_dir_button = buttons.GenButton(self.panel, id=-1, label = "save dir", size=(-1, -1))
#self.revert_dir_button = buttons.GenButton(self.panel, id=-1, label = "revert dir", size=(-1, -1))
#self.Bind(wx.EVT_BUTTON, self.on_revert_dir_button, self.revert_dir_button)
#self.Bind(wx.EVT_BUTTON, self.on_save_dir_button, self.save_dir_button)
#bSizer0_1.Add(saved_label, flag=wx.RIGHT, border=10)
#bSizer0_1.Add(self.last_saved_time, flag=wx.RIGHT, border=10)
#bSizer0_1.Add(self.save_dir_button,flag=wx.ALIGN_LEFT|wx.RIGHT, border=10)
#bSizer0_1.Add(self.revert_dir_button,wx.ALIGN_LEFT)
#
#---sizer 1 ----
bSizer1 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Import data to working directory"), wx.HORIZONTAL)
text = "1. Convert magnetometer files to MagIC format"
self.btn1 = buttons.GenButton(self.panel, id=-1, label=text,
size=(450, 50), name='step 1')
self.btn1.SetBackgroundColour("#FDC68A")
self.btn1.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_convert_file, self.btn1)
text = "2. (optional) Calculate geographic/tilt-corrected directions"
self.btn2 = buttons.GenButton(self.panel, id=-1, label=text, size=(450, 50), name='step 2')
self.btn2.SetBackgroundColour("#FDC68A")
self.btn2.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_btn_orientation, self.btn2)
text = "3. (optional) Add MagIC metadata for uploading data to MagIC "
self.btn3 = buttons.GenButton(self.panel, id=-1, label=text, size=(450, 50), name='step 3')
self.btn3.SetBackgroundColour("#FDC68A")
self.btn3.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_btn_metadata, self.btn3)
text = "Unpack txt file downloaded from MagIC"
self.btn4 = buttons.GenButton(self.panel, id=-1, label=text, size=(330, 50))
self.btn4.SetBackgroundColour("#FDC68A")
self.btn4.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_btn_unpack, self.btn4)
text = "Convert directory to 3.0. format (legacy data only)"
self.btn1a = buttons.GenButton(self.panel, id=-1, label=text,
size=(330, 50), name='step 1a')
self.btn1a.SetBackgroundColour("#FDC68A")
self.btn1a.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_btn_convert_3, self.btn1a)
#str = "OR"
OR = wx.StaticText(self.panel, -1, "or", (20, 120))
font = wx.Font(18, wx.SWISS, wx.NORMAL, wx.NORMAL)
OR.SetFont(font)
#bSizer0.Add(self.panel,self.btn1,wx.ALIGN_TOP)
bSizer1_1 = wx.BoxSizer(wx.VERTICAL)
bSizer1_1.AddSpacer(20)
bSizer1_1.Add(self.btn1, wx.ALIGN_TOP)
bSizer1_1.AddSpacer(20)
bSizer1_1.Add(self.btn2, wx.ALIGN_TOP)
bSizer1_1.AddSpacer(20)
bSizer1_1.Add(self.btn3, wx.ALIGN_TOP)
bSizer1_1.AddSpacer(20)
bSizer1.Add(bSizer1_1, wx.ALIGN_CENTER, wx.EXPAND)
bSizer1.AddSpacer(20)
bSizer1.Add(OR, 0, wx.ALIGN_CENTER, 0)
bSizer1.AddSpacer(20)
bSizer1_2 = wx.BoxSizer(wx.VERTICAL)
spacing = 60 #if self.data_model_num == 3 else 90
bSizer1_2.AddSpacer(spacing)
bSizer1_2.Add(self.btn4, 0, wx.ALIGN_CENTER, 0)
bSizer1_2.AddSpacer(20)
bSizer1_2.Add(self.btn1a, 0, wx.ALIGN_CENTER, 0)
bSizer1_2.AddSpacer(20)
bSizer1.Add(bSizer1_2)
bSizer1.AddSpacer(20)
#---sizer 2 ----
bSizer2 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Analysis and plots" ), wx.HORIZONTAL)
text = "Demag GUI"
self.btn_demag_gui = buttons.GenButton(self.panel, id=-1, label=text, size=(300, 50), name='demag gui')
self.btn_demag_gui.SetBackgroundColour("#6ECFF6")
self.btn_demag_gui.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_btn_demag_gui, self.btn_demag_gui)
text = "Thellier GUI"
self.btn_thellier_gui = buttons.GenButton(self.panel, id=-1, label=text, size=(300, 50), name='thellier gui')
self.btn_thellier_gui.SetBackgroundColour("#6ECFF6")
self.btn_thellier_gui.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_btn_thellier_gui, self.btn_thellier_gui)
bSizer2.AddSpacer(20)
bSizer2.Add(self.btn_demag_gui, 0, wx.ALIGN_CENTER, 0)
bSizer2.AddSpacer(20)
bSizer2.Add(self.btn_thellier_gui, 0, wx.ALIGN_CENTER, 0)
bSizer2.AddSpacer(20)
#---sizer 3 ----
bSizer3 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Create file for upload to MagIC database"), wx.HORIZONTAL)
text = "Create MagIC txt file for upload"
self.btn_upload = buttons.GenButton(self.panel, id=-1, label=text, size=(300, 50))
self.btn_upload.SetBackgroundColour("#C4DF9B")
self.btn_upload.InitColours()
bSizer3.AddSpacer(20)
bSizer3.Add(self.btn_upload, 0, wx.ALIGN_CENTER, 0)
bSizer3.AddSpacer(20)
self.Bind(wx.EVT_BUTTON, self.on_btn_upload, self.btn_upload)
#---arange sizers ----
hbox = wx.BoxSizer(wx.HORIZONTAL)
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.AddSpacer(5)
#vbox.Add(self.logo,0,wx.ALIGN_CENTER,0)
vbox.AddSpacer(5)
vbox.Add(bSizer0, 0, wx.ALIGN_CENTER, 0)
vbox.AddSpacer(10)
#vbox.Add(bSizer0_1, 0, wx.ALIGN_CENTER, 0)
#vbox.AddSpacer(10)
vbox.Add(bSizer1, 0, wx.ALIGN_CENTER, 0)
vbox.AddSpacer(10)
vbox.Add(bSizer2, 0, wx.ALIGN_CENTER, 0)
vbox.AddSpacer(10)
vbox.Add(bSizer3, 0, wx.ALIGN_CENTER, 0)
vbox.AddSpacer(10)
hbox.AddSpacer(10)
hbox.Add(vbox, 0, wx.ALIGN_CENTER, 0)
hbox.AddSpacer(5)
self.panel.SetSizer(hbox)
hbox.Fit(self) | Build the mainframe | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L156-L333 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.get_dir | def get_dir(self):
"""
Choose a working directory dialog.
Called by self.get_dm_and_wd.
"""
if "-WD" in sys.argv and self.FIRST_RUN:
ind = sys.argv.index('-WD')
self.WD = os.path.abspath(sys.argv[ind+1])
os.chdir(self.WD)
self.WD = os.getcwd()
self.dir_path.SetValue(self.WD)
else:
self.on_change_dir_button(None)
#self.WD = os.getcwd()
self.FIRST_RUN = False | python | def get_dir(self):
"""
Choose a working directory dialog.
Called by self.get_dm_and_wd.
"""
if "-WD" in sys.argv and self.FIRST_RUN:
ind = sys.argv.index('-WD')
self.WD = os.path.abspath(sys.argv[ind+1])
os.chdir(self.WD)
self.WD = os.getcwd()
self.dir_path.SetValue(self.WD)
else:
self.on_change_dir_button(None)
#self.WD = os.getcwd()
self.FIRST_RUN = False | Choose a working directory dialog.
Called by self.get_dm_and_wd. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L337-L352 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.on_btn_thellier_gui | def on_btn_thellier_gui(self, event):
"""
Open Thellier GUI
"""
if not self.check_for_meas_file():
return
if not self.check_for_uncombined_files():
return
outstring = "thellier_gui.py -WD %s"%self.WD
print("-I- running python script:\n %s"%(outstring))
if self.data_model_num == 2.5:
thellier_gui.main(self.WD, standalone_app=False, parent=self, DM=self.data_model_num)
else:
# disable and hide Pmag GUI mainframe
self.Disable()
self.Hide()
# show busyinfo
wait = wx.BusyInfo('Compiling required data, please wait...')
wx.SafeYield()
# create custom Thellier GUI closing event and bind it
ThellierGuiExitEvent, EVT_THELLIER_GUI_EXIT = newevent.NewCommandEvent()
self.Bind(EVT_THELLIER_GUI_EXIT, self.on_analysis_gui_exit)
# make and show the Thellier GUI frame
thellier_gui_frame = thellier_gui.Arai_GUI(self.WD, self,
standalone=False,
DM=self.data_model_num,
evt_quit=ThellierGuiExitEvent)
if not thellier_gui_frame: print("Thellier GUI failed to start aborting"); del wait; return
thellier_gui_frame.Centre()
thellier_gui_frame.Show()
del wait | python | def on_btn_thellier_gui(self, event):
"""
Open Thellier GUI
"""
if not self.check_for_meas_file():
return
if not self.check_for_uncombined_files():
return
outstring = "thellier_gui.py -WD %s"%self.WD
print("-I- running python script:\n %s"%(outstring))
if self.data_model_num == 2.5:
thellier_gui.main(self.WD, standalone_app=False, parent=self, DM=self.data_model_num)
else:
# disable and hide Pmag GUI mainframe
self.Disable()
self.Hide()
# show busyinfo
wait = wx.BusyInfo('Compiling required data, please wait...')
wx.SafeYield()
# create custom Thellier GUI closing event and bind it
ThellierGuiExitEvent, EVT_THELLIER_GUI_EXIT = newevent.NewCommandEvent()
self.Bind(EVT_THELLIER_GUI_EXIT, self.on_analysis_gui_exit)
# make and show the Thellier GUI frame
thellier_gui_frame = thellier_gui.Arai_GUI(self.WD, self,
standalone=False,
DM=self.data_model_num,
evt_quit=ThellierGuiExitEvent)
if not thellier_gui_frame: print("Thellier GUI failed to start aborting"); del wait; return
thellier_gui_frame.Centre()
thellier_gui_frame.Show()
del wait | Open Thellier GUI | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L441-L471 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.on_btn_demag_gui | def on_btn_demag_gui(self, event):
"""
Open Demag GUI
"""
if not self.check_for_meas_file():
return
if not self.check_for_uncombined_files():
return
outstring = "demag_gui.py -WD %s"%self.WD
print("-I- running python script:\n %s"%(outstring))
if self.data_model_num == 2:
demag_gui.start(self.WD, standalone_app=False, parent=self, DM=self.data_model_num)
else:
# disable and hide Pmag GUI mainframe
self.Disable()
self.Hide()
# show busyinfo
wait = wx.BusyInfo('Compiling required data, please wait...')
wx.SafeYield()
# create custom Demag GUI closing event and bind it
DemagGuiExitEvent, EVT_DEMAG_GUI_EXIT = newevent.NewCommandEvent()
self.Bind(EVT_DEMAG_GUI_EXIT, self.on_analysis_gui_exit)
# make and show the Demag GUI frame
demag_gui_frame = demag_gui.Demag_GUI(self.WD, self,
write_to_log_file=False,
data_model=self.data_model_num,
evt_quit=DemagGuiExitEvent)
demag_gui_frame.Centre()
demag_gui_frame.Show()
del wait | python | def on_btn_demag_gui(self, event):
"""
Open Demag GUI
"""
if not self.check_for_meas_file():
return
if not self.check_for_uncombined_files():
return
outstring = "demag_gui.py -WD %s"%self.WD
print("-I- running python script:\n %s"%(outstring))
if self.data_model_num == 2:
demag_gui.start(self.WD, standalone_app=False, parent=self, DM=self.data_model_num)
else:
# disable and hide Pmag GUI mainframe
self.Disable()
self.Hide()
# show busyinfo
wait = wx.BusyInfo('Compiling required data, please wait...')
wx.SafeYield()
# create custom Demag GUI closing event and bind it
DemagGuiExitEvent, EVT_DEMAG_GUI_EXIT = newevent.NewCommandEvent()
self.Bind(EVT_DEMAG_GUI_EXIT, self.on_analysis_gui_exit)
# make and show the Demag GUI frame
demag_gui_frame = demag_gui.Demag_GUI(self.WD, self,
write_to_log_file=False,
data_model=self.data_model_num,
evt_quit=DemagGuiExitEvent)
demag_gui_frame.Centre()
demag_gui_frame.Show()
del wait | Open Demag GUI | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L474-L504 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.on_btn_convert_3 | def on_btn_convert_3(self, event):
"""
Open dialog for rough conversion of
2.5 files to 3.0 files.
Offer link to earthref for proper upgrade.
"""
dia = pw.UpgradeDialog(None)
dia.Center()
res = dia.ShowModal()
if res == wx.ID_CANCEL:
webbrowser.open("https://www2.earthref.org/MagIC/upgrade", new=2)
return
## more nicely styled way, but doesn't link to earthref
#msg = "This tool is meant for relatively simple upgrades (for instance, a measurement file, a sample file, and a criteria file).\nIf you have a more complex contribution to upgrade, and you want maximum accuracy, use the upgrade tool at https://www2.earthref.org/MagIC/upgrade.\n\nDo you want to continue?"
#result = pw.warning_with_override(msg)
#if result == wx.ID_NO:
#webbrowser.open("https://www2.earthref.org/MagIC/upgrade", new=2)
#return
# turn files from 2.5 --> 3.0 (rough translation)
meas, upgraded, no_upgrade = pmag.convert_directory_2_to_3('magic_measurements.txt',
input_dir=self.WD, output_dir=self.WD,
data_model=self.contribution.data_model)
if not meas:
wx.MessageBox('2.5 --> 3.0 failed. Do you have a magic_measurements.txt file in your working directory?',
'Info', wx.OK | wx.ICON_INFORMATION)
return
# create a contribution
self.contribution = cb.Contribution(self.WD)
# make skeleton files with specimen, sample, site, location data
self.contribution.propagate_measurement_info()
# pop up
upgraded_string = ", ".join(upgraded)
if no_upgrade:
no_upgrade_string = ", ".join(no_upgrade)
msg = '2.5 --> 3.0 translation completed!\n\nThese 3.0 format files were created: {}.\n\nHowever, these 2.5 format files could not be upgraded: {}.\n\nTo convert all 2.5 files, use the MagIC upgrade tool: https://www2.earthref.org/MagIC/upgrade\n'.format(upgraded_string, no_upgrade_string)
if 'criteria.txt' in upgraded:
msg += '\nNote: Please check your criteria file for completeness and accuracy, as not all 2.5 files will be fully upgraded.'
if 'pmag_criteria.txt' in no_upgrade:
msg += '\nNote: Not all criteria files can be upgraded, even on the MagIC site. You may need to recreate an old pmag_criteria file from scratch in Thellier GUI or Demag GUI.'
wx.MessageBox(msg, 'Warning', wx.OK | wx.ICON_INFORMATION)
else:
msg = '2.5 --> 3.0 translation completed!\nThese files were converted: {}'.format(upgraded_string)
wx.MessageBox(msg, 'Info', wx.OK | wx.ICON_INFORMATION) | python | def on_btn_convert_3(self, event):
"""
Open dialog for rough conversion of
2.5 files to 3.0 files.
Offer link to earthref for proper upgrade.
"""
dia = pw.UpgradeDialog(None)
dia.Center()
res = dia.ShowModal()
if res == wx.ID_CANCEL:
webbrowser.open("https://www2.earthref.org/MagIC/upgrade", new=2)
return
## more nicely styled way, but doesn't link to earthref
#msg = "This tool is meant for relatively simple upgrades (for instance, a measurement file, a sample file, and a criteria file).\nIf you have a more complex contribution to upgrade, and you want maximum accuracy, use the upgrade tool at https://www2.earthref.org/MagIC/upgrade.\n\nDo you want to continue?"
#result = pw.warning_with_override(msg)
#if result == wx.ID_NO:
#webbrowser.open("https://www2.earthref.org/MagIC/upgrade", new=2)
#return
# turn files from 2.5 --> 3.0 (rough translation)
meas, upgraded, no_upgrade = pmag.convert_directory_2_to_3('magic_measurements.txt',
input_dir=self.WD, output_dir=self.WD,
data_model=self.contribution.data_model)
if not meas:
wx.MessageBox('2.5 --> 3.0 failed. Do you have a magic_measurements.txt file in your working directory?',
'Info', wx.OK | wx.ICON_INFORMATION)
return
# create a contribution
self.contribution = cb.Contribution(self.WD)
# make skeleton files with specimen, sample, site, location data
self.contribution.propagate_measurement_info()
# pop up
upgraded_string = ", ".join(upgraded)
if no_upgrade:
no_upgrade_string = ", ".join(no_upgrade)
msg = '2.5 --> 3.0 translation completed!\n\nThese 3.0 format files were created: {}.\n\nHowever, these 2.5 format files could not be upgraded: {}.\n\nTo convert all 2.5 files, use the MagIC upgrade tool: https://www2.earthref.org/MagIC/upgrade\n'.format(upgraded_string, no_upgrade_string)
if 'criteria.txt' in upgraded:
msg += '\nNote: Please check your criteria file for completeness and accuracy, as not all 2.5 files will be fully upgraded.'
if 'pmag_criteria.txt' in no_upgrade:
msg += '\nNote: Not all criteria files can be upgraded, even on the MagIC site. You may need to recreate an old pmag_criteria file from scratch in Thellier GUI or Demag GUI.'
wx.MessageBox(msg, 'Warning', wx.OK | wx.ICON_INFORMATION)
else:
msg = '2.5 --> 3.0 translation completed!\nThese files were converted: {}'.format(upgraded_string)
wx.MessageBox(msg, 'Info', wx.OK | wx.ICON_INFORMATION) | Open dialog for rough conversion of
2.5 files to 3.0 files.
Offer link to earthref for proper upgrade. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L528-L571 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.on_btn_metadata | def on_btn_metadata(self, event):
"""
Initiate the series of windows to add metadata
to the contribution.
"""
# make sure we have a measurements file
if not self.check_for_meas_file():
return
# make sure all files of the same type have been combined
if not self.check_for_uncombined_files():
return
if self.data_model_num == 2:
wait = wx.BusyInfo('Compiling required data, please wait...')
wx.SafeYield()
self.ErMagic_frame = ErMagicBuilder.MagIC_model_builder(self.WD, self, self.er_magic)
elif self.data_model_num == 3:
wait = wx.BusyInfo('Compiling required data, please wait...')
wx.SafeYield()
self.ErMagic_frame = ErMagicBuilder.MagIC_model_builder3(self.WD, self, self.contribution)
#
self.ErMagic_frame.Show()
self.ErMagic_frame.Center()
# gets total available screen space - 10%
size = wx.DisplaySize()
size = (size[0] - 0.3 * size[0], size[1] - 0.3 * size[1])
self.ErMagic_frame.Raise()
del wait | python | def on_btn_metadata(self, event):
"""
Initiate the series of windows to add metadata
to the contribution.
"""
# make sure we have a measurements file
if not self.check_for_meas_file():
return
# make sure all files of the same type have been combined
if not self.check_for_uncombined_files():
return
if self.data_model_num == 2:
wait = wx.BusyInfo('Compiling required data, please wait...')
wx.SafeYield()
self.ErMagic_frame = ErMagicBuilder.MagIC_model_builder(self.WD, self, self.er_magic)
elif self.data_model_num == 3:
wait = wx.BusyInfo('Compiling required data, please wait...')
wx.SafeYield()
self.ErMagic_frame = ErMagicBuilder.MagIC_model_builder3(self.WD, self, self.contribution)
#
self.ErMagic_frame.Show()
self.ErMagic_frame.Center()
# gets total available screen space - 10%
size = wx.DisplaySize()
size = (size[0] - 0.3 * size[0], size[1] - 0.3 * size[1])
self.ErMagic_frame.Raise()
del wait | Initiate the series of windows to add metadata
to the contribution. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L574-L600 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.init_check_window2 | def init_check_window2(self):
"""
initiates the object that will control steps 1-6
of checking headers, filling in cell values, etc.
"""
self.check_dia = pmag_er_magic_dialogs.ErMagicCheckFrame(self, 'Check Data',
self.WD, self.er_magic) | python | def init_check_window2(self):
"""
initiates the object that will control steps 1-6
of checking headers, filling in cell values, etc.
"""
self.check_dia = pmag_er_magic_dialogs.ErMagicCheckFrame(self, 'Check Data',
self.WD, self.er_magic) | initiates the object that will control steps 1-6
of checking headers, filling in cell values, etc. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L602-L608 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.init_check_window | def init_check_window(self):
"""
initiates the object that will control steps 1-6
of checking headers, filling in cell values, etc.
"""
self.check_dia = pmag_er_magic_dialogs.ErMagicCheckFrame3(self, 'Check Data',
self.WD, self.contribution) | python | def init_check_window(self):
"""
initiates the object that will control steps 1-6
of checking headers, filling in cell values, etc.
"""
self.check_dia = pmag_er_magic_dialogs.ErMagicCheckFrame3(self, 'Check Data',
self.WD, self.contribution) | initiates the object that will control steps 1-6
of checking headers, filling in cell values, etc. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L610-L616 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.on_btn_orientation | def on_btn_orientation(self, event):
"""
Create and fill wxPython grid for entering
orientation data.
"""
wait = wx.BusyInfo('Compiling required data, please wait...')
wx.SafeYield()
#dw, dh = wx.DisplaySize()
size = wx.DisplaySize()
size = (size[0]-0.1 * size[0], size[1]-0.1 * size[1])
if self.data_model_num == 3:
frame = pmag_gui_dialogs.OrientFrameGrid3(self, -1, 'demag_orient.txt',
self.WD, self.contribution,
size)
else:
frame = pmag_gui_dialogs.OrientFrameGrid(self, -1, 'demag_orient.txt',
self.WD, self.er_magic, size)
frame.Show(True)
frame.Centre()
self.Hide()
del wait | python | def on_btn_orientation(self, event):
"""
Create and fill wxPython grid for entering
orientation data.
"""
wait = wx.BusyInfo('Compiling required data, please wait...')
wx.SafeYield()
#dw, dh = wx.DisplaySize()
size = wx.DisplaySize()
size = (size[0]-0.1 * size[0], size[1]-0.1 * size[1])
if self.data_model_num == 3:
frame = pmag_gui_dialogs.OrientFrameGrid3(self, -1, 'demag_orient.txt',
self.WD, self.contribution,
size)
else:
frame = pmag_gui_dialogs.OrientFrameGrid(self, -1, 'demag_orient.txt',
self.WD, self.er_magic, size)
frame.Show(True)
frame.Centre()
self.Hide()
del wait | Create and fill wxPython grid for entering
orientation data. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L619-L639 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.on_btn_unpack | def on_btn_unpack(self, event):
"""
Create dialog to choose a file to unpack
with download magic.
Then run download_magic and create self.contribution.
"""
dlg = wx.FileDialog(
None, message = "choose txt file to unpack",
defaultDir=self.WD,
defaultFile="",
style=wx.FD_OPEN #| wx.FD_CHANGE_DIR
)
if dlg.ShowModal() == wx.ID_OK:
FILE = dlg.GetPath()
input_dir, f = os.path.split(FILE)
else:
return False
outstring="download_magic.py -f {} -WD {} -ID {} -DM {}".format(f, self.WD, input_dir, self.data_model_num)
# run as module:
print("-I- running python script:\n %s"%(outstring))
wait = wx.BusyInfo("Please wait, working...")
wx.SafeYield()
ex = None
try:
if ipmag.download_magic(f, self.WD, input_dir, overwrite=True, data_model=self.data_model):
text = "Successfully ran download_magic.py program.\nMagIC files were saved in your working directory.\nSee Terminal/message window for details."
else:
text = "Something went wrong. Make sure you chose a valid file downloaded from the MagIC database and try again."
except Exception as ex:
text = "Something went wrong. Make sure you chose a valid file downloaded from the MagIC database and try again."
del wait
dlg = wx.MessageDialog(self, caption="Saved", message=text, style=wx.OK)
result = dlg.ShowModal()
if result == wx.ID_OK:
dlg.Destroy()
if ex:
raise(ex)
self.contribution = cb.Contribution(self.WD) | python | def on_btn_unpack(self, event):
"""
Create dialog to choose a file to unpack
with download magic.
Then run download_magic and create self.contribution.
"""
dlg = wx.FileDialog(
None, message = "choose txt file to unpack",
defaultDir=self.WD,
defaultFile="",
style=wx.FD_OPEN #| wx.FD_CHANGE_DIR
)
if dlg.ShowModal() == wx.ID_OK:
FILE = dlg.GetPath()
input_dir, f = os.path.split(FILE)
else:
return False
outstring="download_magic.py -f {} -WD {} -ID {} -DM {}".format(f, self.WD, input_dir, self.data_model_num)
# run as module:
print("-I- running python script:\n %s"%(outstring))
wait = wx.BusyInfo("Please wait, working...")
wx.SafeYield()
ex = None
try:
if ipmag.download_magic(f, self.WD, input_dir, overwrite=True, data_model=self.data_model):
text = "Successfully ran download_magic.py program.\nMagIC files were saved in your working directory.\nSee Terminal/message window for details."
else:
text = "Something went wrong. Make sure you chose a valid file downloaded from the MagIC database and try again."
except Exception as ex:
text = "Something went wrong. Make sure you chose a valid file downloaded from the MagIC database and try again."
del wait
dlg = wx.MessageDialog(self, caption="Saved", message=text, style=wx.OK)
result = dlg.ShowModal()
if result == wx.ID_OK:
dlg.Destroy()
if ex:
raise(ex)
self.contribution = cb.Contribution(self.WD) | Create dialog to choose a file to unpack
with download magic.
Then run download_magic and create self.contribution. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L641-L681 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.on_btn_upload | def on_btn_upload(self, event):
"""
Try to run upload_magic.
Open validation mode if the upload file has problems.
"""
if not self.check_for_uncombined_files():
return
outstring="upload_magic.py"
print("-I- running python script:\n %s"%(outstring))
wait = wx.BusyInfo("Please wait, working...")
wx.SafeYield()
self.contribution.tables['measurements'].add_measurement_names()
if self.data_model_num == 3:
res, error_message, has_problems, all_failing_items = ipmag.upload_magic(concat=False, dir_path=self.WD,
vocab=self.contribution.vocab,
contribution=self.contribution)
if self.data_model_num == 2:
res, error_message, errors = ipmag.upload_magic2(dir_path=self.WD, data_model=self.er_magic.data_model)
del wait
if res:
text = "You are ready to upload!\n{} was generated in {}".format(os.path.split(res)[1], os.path.split(res)[0])
dlg = pw.ChooseOne(self, "Go to MagIC for uploading", "Not ready yet", text, "Saved")
del wait
#dlg = wx.MessageDialog(self, caption="Saved", message=text, style=wx.OK)
else:
text = "There were some problems with the creation of your upload file.\nError message: {}\nSee Terminal/message window for details".format(error_message)
dlg = wx.MessageDialog(self, caption="Error", message=text, style=wx.OK)
dlg.Centre()
result = dlg.ShowModal()
if result == wx.ID_OK:
dlg.Destroy()
if result == wx.ID_YES:
pw.on_database_upload(None)
if self.data_model_num == 3:
if not res:
from programs import magic_gui
self.Disable()
self.Hide()
self.magic_gui_frame = magic_gui.MainFrame(self.WD,
dmodel=self.data_model,
title="Validations",
contribution=self.contribution)
self.magic_gui_frame.validation_mode = ['specimens']
self.magic_gui_frame.failing_items = all_failing_items
self.magic_gui_frame.change_dir_button.Disable()
self.magic_gui_frame.Centre()
self.magic_gui_frame.Show()
self.magic_gui_frame.highlight_problems(has_problems)
#
# change name of upload button to 'exit validation mode'
self.magic_gui_frame.bSizer2.GetStaticBox().SetLabel('return to main GUI')
self.magic_gui_frame.btn_upload.SetLabel("exit validation mode")
# bind that button to quitting magic gui and re-enabling Pmag GUI
self.magic_gui_frame.Bind(wx.EVT_BUTTON, self.on_end_validation, self.magic_gui_frame.btn_upload)
# do binding so that closing/quitting re-opens the main frame
self.magic_gui_frame.Bind(wx.EVT_CLOSE, self.on_end_validation)
# this makes it work with only the validation window open
self.magic_gui_frame.Bind(wx.EVT_MENU,
lambda event: self.menubar.on_quit(event, self.magic_gui_frame),
self.magic_gui_frame.menubar.file_quit)
# this makes it work if an additional grid is open
self.Bind(wx.EVT_MENU,
lambda event: self.menubar.on_quit(event, self.magic_gui_frame),
self.magic_gui_frame.menubar.file_quit) | python | def on_btn_upload(self, event):
"""
Try to run upload_magic.
Open validation mode if the upload file has problems.
"""
if not self.check_for_uncombined_files():
return
outstring="upload_magic.py"
print("-I- running python script:\n %s"%(outstring))
wait = wx.BusyInfo("Please wait, working...")
wx.SafeYield()
self.contribution.tables['measurements'].add_measurement_names()
if self.data_model_num == 3:
res, error_message, has_problems, all_failing_items = ipmag.upload_magic(concat=False, dir_path=self.WD,
vocab=self.contribution.vocab,
contribution=self.contribution)
if self.data_model_num == 2:
res, error_message, errors = ipmag.upload_magic2(dir_path=self.WD, data_model=self.er_magic.data_model)
del wait
if res:
text = "You are ready to upload!\n{} was generated in {}".format(os.path.split(res)[1], os.path.split(res)[0])
dlg = pw.ChooseOne(self, "Go to MagIC for uploading", "Not ready yet", text, "Saved")
del wait
#dlg = wx.MessageDialog(self, caption="Saved", message=text, style=wx.OK)
else:
text = "There were some problems with the creation of your upload file.\nError message: {}\nSee Terminal/message window for details".format(error_message)
dlg = wx.MessageDialog(self, caption="Error", message=text, style=wx.OK)
dlg.Centre()
result = dlg.ShowModal()
if result == wx.ID_OK:
dlg.Destroy()
if result == wx.ID_YES:
pw.on_database_upload(None)
if self.data_model_num == 3:
if not res:
from programs import magic_gui
self.Disable()
self.Hide()
self.magic_gui_frame = magic_gui.MainFrame(self.WD,
dmodel=self.data_model,
title="Validations",
contribution=self.contribution)
self.magic_gui_frame.validation_mode = ['specimens']
self.magic_gui_frame.failing_items = all_failing_items
self.magic_gui_frame.change_dir_button.Disable()
self.magic_gui_frame.Centre()
self.magic_gui_frame.Show()
self.magic_gui_frame.highlight_problems(has_problems)
#
# change name of upload button to 'exit validation mode'
self.magic_gui_frame.bSizer2.GetStaticBox().SetLabel('return to main GUI')
self.magic_gui_frame.btn_upload.SetLabel("exit validation mode")
# bind that button to quitting magic gui and re-enabling Pmag GUI
self.magic_gui_frame.Bind(wx.EVT_BUTTON, self.on_end_validation, self.magic_gui_frame.btn_upload)
# do binding so that closing/quitting re-opens the main frame
self.magic_gui_frame.Bind(wx.EVT_CLOSE, self.on_end_validation)
# this makes it work with only the validation window open
self.magic_gui_frame.Bind(wx.EVT_MENU,
lambda event: self.menubar.on_quit(event, self.magic_gui_frame),
self.magic_gui_frame.menubar.file_quit)
# this makes it work if an additional grid is open
self.Bind(wx.EVT_MENU,
lambda event: self.menubar.on_quit(event, self.magic_gui_frame),
self.magic_gui_frame.menubar.file_quit) | Try to run upload_magic.
Open validation mode if the upload file has problems. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L684-L751 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.on_end_validation | def on_end_validation(self, event):
"""
Switch back from validation mode to main Pmag GUI mode.
Hide validation frame and show main frame.
"""
self.Enable()
self.Show()
self.magic_gui_frame.Destroy() | python | def on_end_validation(self, event):
"""
Switch back from validation mode to main Pmag GUI mode.
Hide validation frame and show main frame.
"""
self.Enable()
self.Show()
self.magic_gui_frame.Destroy() | Switch back from validation mode to main Pmag GUI mode.
Hide validation frame and show main frame. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L755-L762 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.on_menu_exit | def on_menu_exit(self, event):
"""
Exit the GUI
"""
# also delete appropriate copy file
try:
self.help_window.Destroy()
except:
pass
if '-i' in sys.argv:
self.Destroy()
try:
sys.exit() # can raise TypeError if wx inspector was used
except Exception as ex:
if isinstance(ex, TypeError):
pass
else:
raise ex | python | def on_menu_exit(self, event):
"""
Exit the GUI
"""
# also delete appropriate copy file
try:
self.help_window.Destroy()
except:
pass
if '-i' in sys.argv:
self.Destroy()
try:
sys.exit() # can raise TypeError if wx inspector was used
except Exception as ex:
if isinstance(ex, TypeError):
pass
else:
raise ex | Exit the GUI | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L765-L782 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.check_for_uncombined_files | def check_for_uncombined_files(self):
"""
Go through working directory and check for uncombined files.
(I.e., location1_specimens.txt and location2_specimens.txt but no specimens.txt.)
Show a warning if uncombined files are found.
Return True if no uncombined files are found OR user elects
to continue anyway.
"""
wd_files = os.listdir(self.WD)
if self.data_model_num == 2:
ftypes = ['er_specimens.txt', 'er_samples.txt', 'er_sites.txt', 'er_locations.txt', 'pmag_specimens.txt', 'pmag_samples.txt', 'pmag_sites.txt', 'rmag_specimens.txt', 'rmag_results.txt', 'rmag_anisotropy.txt']
else:
ftypes = ['specimens.txt', 'samples.txt', 'sites.txt', 'locations.txt']
uncombined = set()
for ftype in ftypes:
if ftype not in wd_files:
for f in wd_files:
if f.endswith('_' + ftype):
uncombined.add(ftype)
if uncombined:
msg = 'It looks like you may have uncombined files of type(s) {} in your working directory.\nYou may want to go back to Step 1 and finish combining all files.\nIf you continue, the program will try to extract as much information as possible from your measurement file.'.format(", ".join(list(uncombined)))
dlg = pw.ChooseOne(self, 'Continue anyway', 'Go back', msg, title="Warning!")
res = dlg.ShowModal()
if res == wx.ID_NO:
return
return True | python | def check_for_uncombined_files(self):
"""
Go through working directory and check for uncombined files.
(I.e., location1_specimens.txt and location2_specimens.txt but no specimens.txt.)
Show a warning if uncombined files are found.
Return True if no uncombined files are found OR user elects
to continue anyway.
"""
wd_files = os.listdir(self.WD)
if self.data_model_num == 2:
ftypes = ['er_specimens.txt', 'er_samples.txt', 'er_sites.txt', 'er_locations.txt', 'pmag_specimens.txt', 'pmag_samples.txt', 'pmag_sites.txt', 'rmag_specimens.txt', 'rmag_results.txt', 'rmag_anisotropy.txt']
else:
ftypes = ['specimens.txt', 'samples.txt', 'sites.txt', 'locations.txt']
uncombined = set()
for ftype in ftypes:
if ftype not in wd_files:
for f in wd_files:
if f.endswith('_' + ftype):
uncombined.add(ftype)
if uncombined:
msg = 'It looks like you may have uncombined files of type(s) {} in your working directory.\nYou may want to go back to Step 1 and finish combining all files.\nIf you continue, the program will try to extract as much information as possible from your measurement file.'.format(", ".join(list(uncombined)))
dlg = pw.ChooseOne(self, 'Continue anyway', 'Go back', msg, title="Warning!")
res = dlg.ShowModal()
if res == wx.ID_NO:
return
return True | Go through working directory and check for uncombined files.
(I.e., location1_specimens.txt and location2_specimens.txt but no specimens.txt.)
Show a warning if uncombined files are found.
Return True if no uncombined files are found OR user elects
to continue anyway. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L784-L809 |
PmagPy/PmagPy | programs/pmag_gui.py | MagMainFrame.check_for_meas_file | def check_for_meas_file(self):
"""
Check the working directory for a measurement file.
If not found, show a warning and return False.
Otherwise return True.
"""
if self.data_model_num == 2:
meas_file_name = "magic_measurements.txt"
dm = "2.5"
else:
meas_file_name = "measurements.txt"
dm = "3.0"
if not os.path.isfile(os.path.join(self.WD, meas_file_name)):
pw.simple_warning("Your working directory must have a {} format {} file to run this step. Make sure you have fully completed step 1 (import magnetometer file) and ALSO converted to 3.0., if necessary), then try again.\n\nIf you are trying to look at data downloaded from MagIC, you must unpack the txt file first. Some contributions do not contain measurement data, in which case you won't be able to use this function.".format(dm, meas_file_name))
return False
return True | python | def check_for_meas_file(self):
"""
Check the working directory for a measurement file.
If not found, show a warning and return False.
Otherwise return True.
"""
if self.data_model_num == 2:
meas_file_name = "magic_measurements.txt"
dm = "2.5"
else:
meas_file_name = "measurements.txt"
dm = "3.0"
if not os.path.isfile(os.path.join(self.WD, meas_file_name)):
pw.simple_warning("Your working directory must have a {} format {} file to run this step. Make sure you have fully completed step 1 (import magnetometer file) and ALSO converted to 3.0., if necessary), then try again.\n\nIf you are trying to look at data downloaded from MagIC, you must unpack the txt file first. Some contributions do not contain measurement data, in which case you won't be able to use this function.".format(dm, meas_file_name))
return False
return True | Check the working directory for a measurement file.
If not found, show a warning and return False.
Otherwise return True. | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/pmag_gui.py#L811-L826 |
PmagPy/PmagPy | programs/dayplot_magic.py | main | def main():
"""
NAME
dayplot_magic.py
DESCRIPTION
makes 'day plots' (Day et al. 1977) and squareness/coercivity,
plots 'linear mixing' curve from Dunlop and Carter-Stiglitz (2006).
squareness coercivity of remanence (Neel, 1955) plots after
Tauxe et al. (2002)
SYNTAX
dayplot_magic.py [command line options]
OPTIONS
-h prints help message and quits
-f: specify input hysteresis file, default is specimens.txt
-fmt [svg,png,jpg] format for output plots, default svg
-sav saves plots and quits quietly
"""
args = sys.argv
if "-h" in args:
print(main.__doc__)
sys.exit()
dir_path = pmag.get_named_arg('-WD', '.')
fmt = pmag.get_named_arg('-fmt', 'svg')
save_plots = False
interactive = True
if '-sav' in sys.argv:
save_plots = True
interactive = False
infile = pmag.get_named_arg("-f", "specimens.txt")
ipmag.dayplot_magic(dir_path, infile, save=save_plots,
fmt=fmt, interactive=interactive) | python | def main():
"""
NAME
dayplot_magic.py
DESCRIPTION
makes 'day plots' (Day et al. 1977) and squareness/coercivity,
plots 'linear mixing' curve from Dunlop and Carter-Stiglitz (2006).
squareness coercivity of remanence (Neel, 1955) plots after
Tauxe et al. (2002)
SYNTAX
dayplot_magic.py [command line options]
OPTIONS
-h prints help message and quits
-f: specify input hysteresis file, default is specimens.txt
-fmt [svg,png,jpg] format for output plots, default svg
-sav saves plots and quits quietly
"""
args = sys.argv
if "-h" in args:
print(main.__doc__)
sys.exit()
dir_path = pmag.get_named_arg('-WD', '.')
fmt = pmag.get_named_arg('-fmt', 'svg')
save_plots = False
interactive = True
if '-sav' in sys.argv:
save_plots = True
interactive = False
infile = pmag.get_named_arg("-f", "specimens.txt")
ipmag.dayplot_magic(dir_path, infile, save=save_plots,
fmt=fmt, interactive=interactive) | NAME
dayplot_magic.py
DESCRIPTION
makes 'day plots' (Day et al. 1977) and squareness/coercivity,
plots 'linear mixing' curve from Dunlop and Carter-Stiglitz (2006).
squareness coercivity of remanence (Neel, 1955) plots after
Tauxe et al. (2002)
SYNTAX
dayplot_magic.py [command line options]
OPTIONS
-h prints help message and quits
-f: specify input hysteresis file, default is specimens.txt
-fmt [svg,png,jpg] format for output plots, default svg
-sav saves plots and quits quietly | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/dayplot_magic.py#L12-L45 |
PmagPy/PmagPy | dialogs/pmag_gui_menu2.py | MagICMenu.on_import1 | def on_import1(self, event):
"""
initialize window to import an arbitrary file into the working directory
"""
pmag_menu_dialogs.MoveFileIntoWD(self.parent, self.parent.WD) | python | def on_import1(self, event):
"""
initialize window to import an arbitrary file into the working directory
"""
pmag_menu_dialogs.MoveFileIntoWD(self.parent, self.parent.WD) | initialize window to import an arbitrary file into the working directory | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/pmag_gui_menu2.py#L168-L172 |
PmagPy/PmagPy | dialogs/pmag_gui_menu2.py | MagICMenu.orient_import2 | def orient_import2(self, event):
"""
initialize window to import an AzDip format file into the working directory
"""
pmag_menu_dialogs.ImportAzDipFile(self.parent, self.parent.WD) | python | def orient_import2(self, event):
"""
initialize window to import an AzDip format file into the working directory
"""
pmag_menu_dialogs.ImportAzDipFile(self.parent, self.parent.WD) | initialize window to import an AzDip format file into the working directory | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/dialogs/pmag_gui_menu2.py#L177-L181 |
PmagPy/PmagPy | programs/plot_map_pts.py | main | def main():
"""
NAME
plot_map_pts.py
DESCRIPTION
plots points on map
SYNTAX
plot_map_pts.py [command line options]
OPTIONS
-h prints help and quits
-sym [ro, bs, g^, r., b-, etc.] [1,5,10] symbol and size for points
colors are r=red,b=blue,g=green, etc.
symbols are '.' for points, ^, for triangle, s for square, etc.
-, for lines, -- for dotted lines, see matplotlib online documentation for plot()
-eye ELAT ELON [specify eyeball location]
-etp put on topography
-cmap color map [default is jet]
-f FILE, specify input file
-o color ocean blue/land green (default is not)
-res [c,l,i,h] specify resolution (crude, low, intermediate, high]
-fmt [pdf,eps, png] specify output format (default is pdf)
-R don't plot details of rivers
-B don't plot national/state boundaries, etc.
-pad [LAT LON] pad bounding box by LAT/LON (default is not)
-grd SPACE specify grid spacing
-sav save plot and quit
-prj PROJ, specify one of the supported projections:
pc = Plate Carree
aea = Albers Equal Area
aeqd = Azimuthal Equidistant
lcc = Lambert Conformal
lcyl = Lambert Cylindrical
merc = Mercator
mill = Miller Cylindrical
moll = Mollweide [default]
ortho = Orthographic
robin = Robinson
sinu = Sinusoidal
stere = Stereographic
tmerc = Transverse Mercator
utm = UTM
laea = Lambert Azimuthal Equal Area
geos = Geostationary
npstere = North-Polar Stereographic
spstere = South-Polar Stereographic
Special codes for MagIC formatted input files:
-n
-l
INPUTS
space or tab delimited LON LAT data
OR:
standard MagIC formatted er_sites or pmag_results table
DEFAULTS
res: c
prj: mollweide; lcc for MagIC format files
ELAT,ELON = 0,0
pad LAT,LON=0,0
NB: high resolution or lines can be very slow
"""
dir_path='.'
plot=0
ocean=0
res='c'
proj='moll'
Lats,Lons=[],[]
fmt='pdf'
sym='ro'
symsize=5
fancy=0
rivers,boundaries,ocean=1,1,0
latmin,latmax,lonmin,lonmax,lat_0,lon_0=-90,90,0.,360.,0.,0.
padlat,padlon,gridspace=0,0,30
lat_0,lon_0="",""
basemap=1
prn_name,prn_loc,names,locs=0,0,[],[]
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-fmt' in sys.argv:
ind = sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
if '-res' in sys.argv:
ind = sys.argv.index('-res')
res=sys.argv[ind+1]
if res!= 'c' and res!='l':
print('this resolution will take a while - be patient')
if '-etp' in sys.argv:
fancy=1
print ('-W- plotting will require patience!')
if '-ctp' in sys.argv: basemap=0
if '-sav' in sys.argv: plot=1
if '-R' in sys.argv:rivers=0
if '-B' in sys.argv:boundaries=0
if '-o' in sys.argv:ocean=1
if '-cmap' in sys.argv:
ind = sys.argv.index('-cmap')
cmap=float(sys.argv[ind+1])
else:
cmap='jet'
if '-grd' in sys.argv:
ind = sys.argv.index('-grd')
gridspace=float(sys.argv[ind+1])
if '-eye' in sys.argv:
ind = sys.argv.index('-eye')
lat_0=float(sys.argv[ind+1])
lon_0=float(sys.argv[ind+2])
if '-sym' in sys.argv:
ind = sys.argv.index('-sym')
sym=sys.argv[ind+1]
symsize=int(sys.argv[ind+2])
if '-pad' in sys.argv:
ind = sys.argv.index('-pad')
padlat=float(sys.argv[ind+1])
padlon=float(sys.argv[ind+2])
if '-f' in sys.argv:
ind = sys.argv.index('-f')
file=dir_path+'/'+sys.argv[ind+1]
header=open(file,'r').readlines()[0].split('\t')
if 'tab' in header[0]:
proj='lcc'
if 'sites' in header[1]:
latkey='lat'
lonkey='lon'
namekey='site'
lockey=''
else:
print('file type not supported')
print(main.__doc__)
sys.exit()
Sites,file_type=pmag.magic_read(file)
Lats=pmag.get_dictkey(Sites,latkey,'f')
Lons=pmag.get_dictkey(Sites,lonkey,'f')
if prn_name==1:names=pmag.get_dictkey(Sites,namekey,'')
if prn_loc==1:names=pmag.get_dictkey(Sites,lockey,'')
else:
ptdata=numpy.loadtxt(file)
Lons=ptdata.transpose()[0]
Lats=ptdata.transpose()[1]
latmin=numpy.min(Lats)-padlat
lonmin=numpy.min(Lons)-padlon
latmax=numpy.max(Lats)+padlat
lonmax=numpy.max(Lons)+padlon
if lon_0=="":
lon_0=0.5*(lonmin+lonmax)
lat_0=0.5*(latmin+latmax)
else:
print("input file must be specified")
sys.exit()
if '-prj' in sys.argv:
ind = sys.argv.index('-prj')
proj=sys.argv[ind+1]
FIG={'map':1}
pmagplotlib.plot_init(FIG['map'],6,6)
cnt=0
Opts={'latmin':latmin,'latmax':latmax,'lonmin':lonmin,'lonmax':lonmax,'lat_0':lat_0,'lon_0':lon_0,'proj':proj,'sym':sym,'symsize':3,'pltgrid':1,'res':res,'boundinglat':0.,'padlon':padlon,'padlat':padlat,'gridspace':gridspace,'cmap':cmap}
Opts['details']={}
Opts['details']['coasts']=1
Opts['details']['rivers']=rivers
Opts['details']['states']=boundaries
Opts['details']['countries']=boundaries
Opts['details']['ocean']=ocean
Opts['details']['fancy']=fancy
if len(names)>0:Opts['names']=names
if len(locs)>0:Opts['loc_name']=locs
if proj=='merc':
Opts['latmin']=-70
Opts['latmax']=70
Opts['lonmin']=-180
Opts['lonmax']=180
print('please wait to draw points')
Opts['sym']=sym
Opts['symsize']=symsize
if basemap:
pmagplotlib.plot_map(FIG['map'],Lats,Lons,Opts)
else:
pmagplotlib.plot_map(FIG['map'],Lats,Lons,Opts)
files={}
titles={}
titles['map']='PT Map'
for key in list(FIG.keys()):
files[key]='map_pts'+'.'+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
FIG = pmagplotlib.add_borders(FIG,titles,black,purple)
pmagplotlib.save_plots(FIG,files)
if plot==1:
pmagplotlib.save_plots(FIG,files)
else:
pmagplotlib.draw_figs(FIG)
ans=input(" S[a]ve to save plot, Return to quit: ")
if ans=="a": pmagplotlib.save_plots(FIG,files) | python | def main():
"""
NAME
plot_map_pts.py
DESCRIPTION
plots points on map
SYNTAX
plot_map_pts.py [command line options]
OPTIONS
-h prints help and quits
-sym [ro, bs, g^, r., b-, etc.] [1,5,10] symbol and size for points
colors are r=red,b=blue,g=green, etc.
symbols are '.' for points, ^, for triangle, s for square, etc.
-, for lines, -- for dotted lines, see matplotlib online documentation for plot()
-eye ELAT ELON [specify eyeball location]
-etp put on topography
-cmap color map [default is jet]
-f FILE, specify input file
-o color ocean blue/land green (default is not)
-res [c,l,i,h] specify resolution (crude, low, intermediate, high]
-fmt [pdf,eps, png] specify output format (default is pdf)
-R don't plot details of rivers
-B don't plot national/state boundaries, etc.
-pad [LAT LON] pad bounding box by LAT/LON (default is not)
-grd SPACE specify grid spacing
-sav save plot and quit
-prj PROJ, specify one of the supported projections:
pc = Plate Carree
aea = Albers Equal Area
aeqd = Azimuthal Equidistant
lcc = Lambert Conformal
lcyl = Lambert Cylindrical
merc = Mercator
mill = Miller Cylindrical
moll = Mollweide [default]
ortho = Orthographic
robin = Robinson
sinu = Sinusoidal
stere = Stereographic
tmerc = Transverse Mercator
utm = UTM
laea = Lambert Azimuthal Equal Area
geos = Geostationary
npstere = North-Polar Stereographic
spstere = South-Polar Stereographic
Special codes for MagIC formatted input files:
-n
-l
INPUTS
space or tab delimited LON LAT data
OR:
standard MagIC formatted er_sites or pmag_results table
DEFAULTS
res: c
prj: mollweide; lcc for MagIC format files
ELAT,ELON = 0,0
pad LAT,LON=0,0
NB: high resolution or lines can be very slow
"""
dir_path='.'
plot=0
ocean=0
res='c'
proj='moll'
Lats,Lons=[],[]
fmt='pdf'
sym='ro'
symsize=5
fancy=0
rivers,boundaries,ocean=1,1,0
latmin,latmax,lonmin,lonmax,lat_0,lon_0=-90,90,0.,360.,0.,0.
padlat,padlon,gridspace=0,0,30
lat_0,lon_0="",""
basemap=1
prn_name,prn_loc,names,locs=0,0,[],[]
if '-WD' in sys.argv:
ind = sys.argv.index('-WD')
dir_path=sys.argv[ind+1]
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-fmt' in sys.argv:
ind = sys.argv.index('-fmt')
fmt=sys.argv[ind+1]
if '-res' in sys.argv:
ind = sys.argv.index('-res')
res=sys.argv[ind+1]
if res!= 'c' and res!='l':
print('this resolution will take a while - be patient')
if '-etp' in sys.argv:
fancy=1
print ('-W- plotting will require patience!')
if '-ctp' in sys.argv: basemap=0
if '-sav' in sys.argv: plot=1
if '-R' in sys.argv:rivers=0
if '-B' in sys.argv:boundaries=0
if '-o' in sys.argv:ocean=1
if '-cmap' in sys.argv:
ind = sys.argv.index('-cmap')
cmap=float(sys.argv[ind+1])
else:
cmap='jet'
if '-grd' in sys.argv:
ind = sys.argv.index('-grd')
gridspace=float(sys.argv[ind+1])
if '-eye' in sys.argv:
ind = sys.argv.index('-eye')
lat_0=float(sys.argv[ind+1])
lon_0=float(sys.argv[ind+2])
if '-sym' in sys.argv:
ind = sys.argv.index('-sym')
sym=sys.argv[ind+1]
symsize=int(sys.argv[ind+2])
if '-pad' in sys.argv:
ind = sys.argv.index('-pad')
padlat=float(sys.argv[ind+1])
padlon=float(sys.argv[ind+2])
if '-f' in sys.argv:
ind = sys.argv.index('-f')
file=dir_path+'/'+sys.argv[ind+1]
header=open(file,'r').readlines()[0].split('\t')
if 'tab' in header[0]:
proj='lcc'
if 'sites' in header[1]:
latkey='lat'
lonkey='lon'
namekey='site'
lockey=''
else:
print('file type not supported')
print(main.__doc__)
sys.exit()
Sites,file_type=pmag.magic_read(file)
Lats=pmag.get_dictkey(Sites,latkey,'f')
Lons=pmag.get_dictkey(Sites,lonkey,'f')
if prn_name==1:names=pmag.get_dictkey(Sites,namekey,'')
if prn_loc==1:names=pmag.get_dictkey(Sites,lockey,'')
else:
ptdata=numpy.loadtxt(file)
Lons=ptdata.transpose()[0]
Lats=ptdata.transpose()[1]
latmin=numpy.min(Lats)-padlat
lonmin=numpy.min(Lons)-padlon
latmax=numpy.max(Lats)+padlat
lonmax=numpy.max(Lons)+padlon
if lon_0=="":
lon_0=0.5*(lonmin+lonmax)
lat_0=0.5*(latmin+latmax)
else:
print("input file must be specified")
sys.exit()
if '-prj' in sys.argv:
ind = sys.argv.index('-prj')
proj=sys.argv[ind+1]
FIG={'map':1}
pmagplotlib.plot_init(FIG['map'],6,6)
cnt=0
Opts={'latmin':latmin,'latmax':latmax,'lonmin':lonmin,'lonmax':lonmax,'lat_0':lat_0,'lon_0':lon_0,'proj':proj,'sym':sym,'symsize':3,'pltgrid':1,'res':res,'boundinglat':0.,'padlon':padlon,'padlat':padlat,'gridspace':gridspace,'cmap':cmap}
Opts['details']={}
Opts['details']['coasts']=1
Opts['details']['rivers']=rivers
Opts['details']['states']=boundaries
Opts['details']['countries']=boundaries
Opts['details']['ocean']=ocean
Opts['details']['fancy']=fancy
if len(names)>0:Opts['names']=names
if len(locs)>0:Opts['loc_name']=locs
if proj=='merc':
Opts['latmin']=-70
Opts['latmax']=70
Opts['lonmin']=-180
Opts['lonmax']=180
print('please wait to draw points')
Opts['sym']=sym
Opts['symsize']=symsize
if basemap:
pmagplotlib.plot_map(FIG['map'],Lats,Lons,Opts)
else:
pmagplotlib.plot_map(FIG['map'],Lats,Lons,Opts)
files={}
titles={}
titles['map']='PT Map'
for key in list(FIG.keys()):
files[key]='map_pts'+'.'+fmt
if pmagplotlib.isServer:
black = '#000000'
purple = '#800080'
FIG = pmagplotlib.add_borders(FIG,titles,black,purple)
pmagplotlib.save_plots(FIG,files)
if plot==1:
pmagplotlib.save_plots(FIG,files)
else:
pmagplotlib.draw_figs(FIG)
ans=input(" S[a]ve to save plot, Return to quit: ")
if ans=="a": pmagplotlib.save_plots(FIG,files) | NAME
plot_map_pts.py
DESCRIPTION
plots points on map
SYNTAX
plot_map_pts.py [command line options]
OPTIONS
-h prints help and quits
-sym [ro, bs, g^, r., b-, etc.] [1,5,10] symbol and size for points
colors are r=red,b=blue,g=green, etc.
symbols are '.' for points, ^, for triangle, s for square, etc.
-, for lines, -- for dotted lines, see matplotlib online documentation for plot()
-eye ELAT ELON [specify eyeball location]
-etp put on topography
-cmap color map [default is jet]
-f FILE, specify input file
-o color ocean blue/land green (default is not)
-res [c,l,i,h] specify resolution (crude, low, intermediate, high]
-fmt [pdf,eps, png] specify output format (default is pdf)
-R don't plot details of rivers
-B don't plot national/state boundaries, etc.
-pad [LAT LON] pad bounding box by LAT/LON (default is not)
-grd SPACE specify grid spacing
-sav save plot and quit
-prj PROJ, specify one of the supported projections:
pc = Plate Carree
aea = Albers Equal Area
aeqd = Azimuthal Equidistant
lcc = Lambert Conformal
lcyl = Lambert Cylindrical
merc = Mercator
mill = Miller Cylindrical
moll = Mollweide [default]
ortho = Orthographic
robin = Robinson
sinu = Sinusoidal
stere = Stereographic
tmerc = Transverse Mercator
utm = UTM
laea = Lambert Azimuthal Equal Area
geos = Geostationary
npstere = North-Polar Stereographic
spstere = South-Polar Stereographic
Special codes for MagIC formatted input files:
-n
-l
INPUTS
space or tab delimited LON LAT data
OR:
standard MagIC formatted er_sites or pmag_results table
DEFAULTS
res: c
prj: mollweide; lcc for MagIC format files
ELAT,ELON = 0,0
pad LAT,LON=0,0
NB: high resolution or lines can be very slow | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/plot_map_pts.py#L14-L213 |
PmagPy/PmagPy | programs/s_hext.py | main | def main():
"""
NAME
s_hext.py
DESCRIPTION
calculates Hext statistics for tensor data
SYNTAX
s_hext.py [-h][-i][-f file] [<filename]
OPTIONS
-h prints help message and quits
-f file specifies filename on command line
-l NMEAS do line by line instead of whole file, use number of measurements NMEAS for degrees of freedom
< filename, reads from standard input (Unix like operating systems only)
INPUT
x11,x22,x33,x12,x23,x13,sigma [sigma only if line by line]
OUTPUT
F F12 F23 sigma
and three sets of:
tau dec inc Eij dec inc Eik dec inc
DEFAULT
average whole file
"""
ave=1
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-l' in sys.argv:
ind=sys.argv.index('-l')
npts=int(sys.argv[ind+1])
ave=0
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
f.close()
else:
data=sys.stdin.readlines()
Ss=[]
for line in data:
s=[]
rec=line.split()
for i in range(6):
s.append(float(rec[i]))
if ave==0:
sig=float(rec[6])
hpars=pmag.dohext(npts-6,sig,s)
print('%s %4.2f %s %4.2f %s %4.2f'%('F = ',hpars['F'],'F12 = ',hpars['F12'],'F23 = ',hpars['F23']))
print('%s %i %s %14.12f'%('Nmeas = ',npts,' sigma = ',sig))
print('%7.5f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f'%(hpars["t1"],hpars["v1_dec"],hpars["v1_inc"],hpars["e12"],hpars["v2_dec"],hpars["v2_inc"],hpars["e13"],hpars["v3_dec"],hpars["v3_inc"] ))
print('%7.5f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f'%(hpars["t2"],hpars["v2_dec"],hpars["v2_inc"],hpars["e23"],hpars["v3_dec"],hpars["v3_inc"],hpars["e12"],hpars["v1_dec"],hpars["v1_inc"] ))
print('%7.5f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f'%(hpars["t3"],hpars["v3_dec"],hpars["v3_inc"],hpars["e13"],hpars["v1_dec"],hpars["v1_inc"],hpars["e23"],hpars["v2_dec"],hpars["v2_inc"] ))
else:
Ss.append(s)
if ave==1:
npts=len(Ss)
nf,sigma,avs=pmag.sbar(Ss)
hpars=pmag.dohext(nf,sigma,avs)
print('%s %4.2f %s %4.2f %s %4.2f'%('F = ',hpars['F'],'F12 = ',hpars['F12'],'F23 = ',hpars['F23']))
print('%s %i %s %14.12f'%('N = ',npts,' sigma = ',sigma))
print('%7.5f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f'%(hpars["t1"],hpars["v1_dec"],hpars["v1_inc"],hpars["e12"],hpars["v2_dec"],hpars["v2_inc"],hpars["e13"],hpars["v3_dec"],hpars["v3_inc"] ))
print('%7.5f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f'%(hpars["t2"],hpars["v2_dec"],hpars["v2_inc"],hpars["e23"],hpars["v3_dec"],hpars["v3_inc"],hpars["e12"],hpars["v1_dec"],hpars["v1_inc"] ))
print('%7.5f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f'%(hpars["t3"],hpars["v3_dec"],hpars["v3_inc"],hpars["e13"],hpars["v1_dec"],hpars["v1_inc"],hpars["e23"],hpars["v2_dec"],hpars["v2_inc"] )) | python | def main():
"""
NAME
s_hext.py
DESCRIPTION
calculates Hext statistics for tensor data
SYNTAX
s_hext.py [-h][-i][-f file] [<filename]
OPTIONS
-h prints help message and quits
-f file specifies filename on command line
-l NMEAS do line by line instead of whole file, use number of measurements NMEAS for degrees of freedom
< filename, reads from standard input (Unix like operating systems only)
INPUT
x11,x22,x33,x12,x23,x13,sigma [sigma only if line by line]
OUTPUT
F F12 F23 sigma
and three sets of:
tau dec inc Eij dec inc Eik dec inc
DEFAULT
average whole file
"""
ave=1
if '-h' in sys.argv:
print(main.__doc__)
sys.exit()
if '-l' in sys.argv:
ind=sys.argv.index('-l')
npts=int(sys.argv[ind+1])
ave=0
if '-f' in sys.argv:
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
f.close()
else:
data=sys.stdin.readlines()
Ss=[]
for line in data:
s=[]
rec=line.split()
for i in range(6):
s.append(float(rec[i]))
if ave==0:
sig=float(rec[6])
hpars=pmag.dohext(npts-6,sig,s)
print('%s %4.2f %s %4.2f %s %4.2f'%('F = ',hpars['F'],'F12 = ',hpars['F12'],'F23 = ',hpars['F23']))
print('%s %i %s %14.12f'%('Nmeas = ',npts,' sigma = ',sig))
print('%7.5f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f'%(hpars["t1"],hpars["v1_dec"],hpars["v1_inc"],hpars["e12"],hpars["v2_dec"],hpars["v2_inc"],hpars["e13"],hpars["v3_dec"],hpars["v3_inc"] ))
print('%7.5f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f'%(hpars["t2"],hpars["v2_dec"],hpars["v2_inc"],hpars["e23"],hpars["v3_dec"],hpars["v3_inc"],hpars["e12"],hpars["v1_dec"],hpars["v1_inc"] ))
print('%7.5f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f'%(hpars["t3"],hpars["v3_dec"],hpars["v3_inc"],hpars["e13"],hpars["v1_dec"],hpars["v1_inc"],hpars["e23"],hpars["v2_dec"],hpars["v2_inc"] ))
else:
Ss.append(s)
if ave==1:
npts=len(Ss)
nf,sigma,avs=pmag.sbar(Ss)
hpars=pmag.dohext(nf,sigma,avs)
print('%s %4.2f %s %4.2f %s %4.2f'%('F = ',hpars['F'],'F12 = ',hpars['F12'],'F23 = ',hpars['F23']))
print('%s %i %s %14.12f'%('N = ',npts,' sigma = ',sigma))
print('%7.5f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f'%(hpars["t1"],hpars["v1_dec"],hpars["v1_inc"],hpars["e12"],hpars["v2_dec"],hpars["v2_inc"],hpars["e13"],hpars["v3_dec"],hpars["v3_inc"] ))
print('%7.5f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f'%(hpars["t2"],hpars["v2_dec"],hpars["v2_inc"],hpars["e23"],hpars["v3_dec"],hpars["v3_inc"],hpars["e12"],hpars["v1_dec"],hpars["v1_inc"] ))
print('%7.5f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f %7.1f'%(hpars["t3"],hpars["v3_dec"],hpars["v3_inc"],hpars["e13"],hpars["v1_dec"],hpars["v1_inc"],hpars["e23"],hpars["v2_dec"],hpars["v2_inc"] )) | NAME
s_hext.py
DESCRIPTION
calculates Hext statistics for tensor data
SYNTAX
s_hext.py [-h][-i][-f file] [<filename]
OPTIONS
-h prints help message and quits
-f file specifies filename on command line
-l NMEAS do line by line instead of whole file, use number of measurements NMEAS for degrees of freedom
< filename, reads from standard input (Unix like operating systems only)
INPUT
x11,x22,x33,x12,x23,x13,sigma [sigma only if line by line]
OUTPUT
F F12 F23 sigma
and three sets of:
tau dec inc Eij dec inc Eik dec inc
DEFAULT
average whole file | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/s_hext.py#L8-L76 |
PmagPy/PmagPy | data_files/LearningPython/UTM.py | _UTMLetterDesignator | def _UTMLetterDesignator(Lat):
"""
This routine determines the correct UTM letter designator for the given latitude
returns 'Z' if latitude is outside the UTM limits of 84N to 80S
Written by Chuck Gantz- [email protected]
"""
if 84 >= Lat >= 72: return 'X'
elif 72 > Lat >= 64: return 'W'
elif 64 > Lat >= 56: return 'V'
elif 56 > Lat >= 48: return 'U'
elif 48 > Lat >= 40: return 'T'
elif 40 > Lat >= 32: return 'S'
elif 32 > Lat >= 24: return 'R'
elif 24 > Lat >= 16: return 'Q'
elif 16 > Lat >= 8: return 'P'
elif 8 > Lat >= 0: return 'N'
elif 0 > Lat >=-8: return 'M'
elif -8 > Lat >=-16: return 'L'
elif -16 > Lat >=-24: return 'K'
elif -24 > Lat >=-32: return 'J'
elif -32 > Lat >=-40: return 'H'
elif -40 > Lat >=-48: return 'G'
elif -48 > Lat >=-56: return 'F'
elif -56 > Lat >=-64: return 'E'
elif -64 > Lat >=-72: return 'D'
elif -72 > Lat >=-80: return 'C'
else: return 'Z' | python | def _UTMLetterDesignator(Lat):
"""
This routine determines the correct UTM letter designator for the given latitude
returns 'Z' if latitude is outside the UTM limits of 84N to 80S
Written by Chuck Gantz- [email protected]
"""
if 84 >= Lat >= 72: return 'X'
elif 72 > Lat >= 64: return 'W'
elif 64 > Lat >= 56: return 'V'
elif 56 > Lat >= 48: return 'U'
elif 48 > Lat >= 40: return 'T'
elif 40 > Lat >= 32: return 'S'
elif 32 > Lat >= 24: return 'R'
elif 24 > Lat >= 16: return 'Q'
elif 16 > Lat >= 8: return 'P'
elif 8 > Lat >= 0: return 'N'
elif 0 > Lat >=-8: return 'M'
elif -8 > Lat >=-16: return 'L'
elif -16 > Lat >=-24: return 'K'
elif -24 > Lat >=-32: return 'J'
elif -32 > Lat >=-40: return 'H'
elif -40 > Lat >=-48: return 'G'
elif -48 > Lat >=-56: return 'F'
elif -56 > Lat >=-64: return 'E'
elif -64 > Lat >=-72: return 'D'
elif -72 > Lat >=-80: return 'C'
else: return 'Z' | This routine determines the correct UTM letter designator for the given latitude
returns 'Z' if latitude is outside the UTM limits of 84N to 80S
Written by Chuck Gantz- [email protected] | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/data_files/LearningPython/UTM.py#L123-L150 |
PmagPy/PmagPy | data_files/LearningPython/UTM.py | UTMtoLL | def UTMtoLL(ReferenceEllipsoid, easting, northing, zone):
"""
converts UTM coords to lat/long. Equations from USGS Bulletin 1532
East Longitudes are positive, West longitudes are negative.
North latitudes are positive, South latitudes are negative
Lat and Long are in decimal degrees.
Written by Chuck Gantz- [email protected]
Converted to Python by Russ Nelson <[email protected]>
"""
k0 = 0.9996
a = _ellipsoid[ReferenceEllipsoid][_EquatorialRadius]
eccSquared = _ellipsoid[ReferenceEllipsoid][_eccentricitySquared]
e1 = old_div((1-sqrt(1-eccSquared)),(1+sqrt(1-eccSquared)))
#NorthernHemisphere; //1 for northern hemispher, 0 for southern
x = easting-500000.0 #remove 500,000 meter offset for longitude
y = northing
ZoneLetter = zone[-1]
if ZoneLetter == 'Z':
raise Exception("Latitude is outside the UTM limits")
ZoneNumber = int(zone[:-1])
if ZoneLetter >= 'N':
NorthernHemisphere = 1 # point is in northern hemisphere
else:
NorthernHemisphere = 0 # point is in southern hemisphere
y-= 10000000.0 # remove 10,000,000 meter offset used for southern hemisphere
LongOrigin = (ZoneNumber-1)*6-180+3 # +3 puts origin in middle of zone
eccPrimeSquared = old_div((eccSquared),(1-eccSquared))
M = old_div(y, k0)
mu = old_div(M,(a*(1-old_div(eccSquared,4)-3*eccSquared*eccSquared/64-5*eccSquared*eccSquared*eccSquared/256)))
phi1Rad = (mu+(3*e1/2-27*e1*e1*e1/32)*sin(2*mu)
+(21*e1*e1/16-55*e1*e1*e1*e1/32)*sin(4*mu)
+(151*e1*e1*e1/96)*sin(6*mu))
phi1 = degrees(phi1Rad);
N1 = old_div(a,sqrt(1-eccSquared*sin(phi1Rad)*sin(phi1Rad)))
T1 = tan(phi1Rad)*tan(phi1Rad)
C1 = eccPrimeSquared*cos(phi1Rad)*cos(phi1Rad)
R1 = a*(1-eccSquared)/pow(1-eccSquared*sin(phi1Rad)*sin(phi1Rad), 1.5)
D = old_div(x,(N1*k0))
Lat = phi1Rad-(N1*tan(phi1Rad)/R1)*(D*D/2-(5+3*T1+10*C1-4*C1*C1-9*eccPrimeSquared)*D*D*D*D/24
+(61+90*T1+298*C1+45*T1*T1-252*eccPrimeSquared-3*C1*C1)*D*D*D*D*D*D/720)
Lat = degrees(Lat)
Long = old_div((D-(1+2*T1+C1)*D*D*D/6+(5-2*C1+28*T1-3*C1*C1+8*eccPrimeSquared+24*T1*T1)*D*D*D*D*D/120),cos(phi1Rad))
Long = LongOrigin+degrees(Long)
return (Long, Lat) | python | def UTMtoLL(ReferenceEllipsoid, easting, northing, zone):
"""
converts UTM coords to lat/long. Equations from USGS Bulletin 1532
East Longitudes are positive, West longitudes are negative.
North latitudes are positive, South latitudes are negative
Lat and Long are in decimal degrees.
Written by Chuck Gantz- [email protected]
Converted to Python by Russ Nelson <[email protected]>
"""
k0 = 0.9996
a = _ellipsoid[ReferenceEllipsoid][_EquatorialRadius]
eccSquared = _ellipsoid[ReferenceEllipsoid][_eccentricitySquared]
e1 = old_div((1-sqrt(1-eccSquared)),(1+sqrt(1-eccSquared)))
#NorthernHemisphere; //1 for northern hemispher, 0 for southern
x = easting-500000.0 #remove 500,000 meter offset for longitude
y = northing
ZoneLetter = zone[-1]
if ZoneLetter == 'Z':
raise Exception("Latitude is outside the UTM limits")
ZoneNumber = int(zone[:-1])
if ZoneLetter >= 'N':
NorthernHemisphere = 1 # point is in northern hemisphere
else:
NorthernHemisphere = 0 # point is in southern hemisphere
y-= 10000000.0 # remove 10,000,000 meter offset used for southern hemisphere
LongOrigin = (ZoneNumber-1)*6-180+3 # +3 puts origin in middle of zone
eccPrimeSquared = old_div((eccSquared),(1-eccSquared))
M = old_div(y, k0)
mu = old_div(M,(a*(1-old_div(eccSquared,4)-3*eccSquared*eccSquared/64-5*eccSquared*eccSquared*eccSquared/256)))
phi1Rad = (mu+(3*e1/2-27*e1*e1*e1/32)*sin(2*mu)
+(21*e1*e1/16-55*e1*e1*e1*e1/32)*sin(4*mu)
+(151*e1*e1*e1/96)*sin(6*mu))
phi1 = degrees(phi1Rad);
N1 = old_div(a,sqrt(1-eccSquared*sin(phi1Rad)*sin(phi1Rad)))
T1 = tan(phi1Rad)*tan(phi1Rad)
C1 = eccPrimeSquared*cos(phi1Rad)*cos(phi1Rad)
R1 = a*(1-eccSquared)/pow(1-eccSquared*sin(phi1Rad)*sin(phi1Rad), 1.5)
D = old_div(x,(N1*k0))
Lat = phi1Rad-(N1*tan(phi1Rad)/R1)*(D*D/2-(5+3*T1+10*C1-4*C1*C1-9*eccPrimeSquared)*D*D*D*D/24
+(61+90*T1+298*C1+45*T1*T1-252*eccPrimeSquared-3*C1*C1)*D*D*D*D*D*D/720)
Lat = degrees(Lat)
Long = old_div((D-(1+2*T1+C1)*D*D*D/6+(5-2*C1+28*T1-3*C1*C1+8*eccPrimeSquared+24*T1*T1)*D*D*D*D*D/120),cos(phi1Rad))
Long = LongOrigin+degrees(Long)
return (Long, Lat) | converts UTM coords to lat/long. Equations from USGS Bulletin 1532
East Longitudes are positive, West longitudes are negative.
North latitudes are positive, South latitudes are negative
Lat and Long are in decimal degrees.
Written by Chuck Gantz- [email protected]
Converted to Python by Russ Nelson <[email protected]> | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/data_files/LearningPython/UTM.py#L152-L206 |
PmagPy/PmagPy | programs/magic_gui.py | MainFrame.InitUI | def InitUI(self):
"""
Make main user interface
"""
bSizer0 = wx.StaticBoxSizer(
wx.StaticBox(self.panel, wx.ID_ANY, "Choose MagIC project directory", name='bSizer0'), wx.HORIZONTAL
)
self.dir_path = wx.TextCtrl(self.panel, id=-1, size=(600, 25), style=wx.TE_READONLY)
self.dir_path.SetValue(self.WD)
self.change_dir_button = buttons.GenButton(
self.panel, id=-1, label="change directory", size=(-1, -1), name='change_dir_btn'
)
self.change_dir_button.SetBackgroundColour("#F8F8FF")
self.change_dir_button.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_change_dir_button, self.change_dir_button)
bSizer0.Add(self.change_dir_button, wx.ALIGN_LEFT)
bSizer0.AddSpacer(40)
bSizer0.Add(self.dir_path, wx.ALIGN_CENTER_VERTICAL)
self.bSizer_msg = wx.StaticBoxSizer(wx.StaticBox(
self.panel, wx.ID_ANY, "Message", name='bsizer_msg'),
wx.HORIZONTAL)
self.message = wx.StaticText(self.panel, -1,
label="Some text will be here",
name='messages')
self.bSizer_msg.Add(self.message)
#---sizer 1 ----
bSizer1 = wx.StaticBoxSizer(wx.StaticBox(
self.panel, wx.ID_ANY, "Add information to the data model", name='bSizer1'),
wx.HORIZONTAL)
text = "1. add location data"
self.btn1 = buttons.GenButton(self.panel, id=-1, label=text,
size=(300, 50), name='locations_btn')
self.btn1.SetBackgroundColour("#FDC68A")
self.btn1.InitColours()
self.Bind(wx.EVT_BUTTON, self.make_grid_frame, self.btn1)
text = "2. add site data"
self.btn2 = buttons.GenButton(self.panel, id=-1, label=text,
size=(300, 50), name='sites_btn')
self.btn2.SetBackgroundColour("#6ECFF6")
self.btn2.InitColours()
self.Bind(wx.EVT_BUTTON, self.make_grid_frame, self.btn2)
text = "3. add sample data"
self.btn3 = buttons.GenButton(self.panel, id=-1, label=text,
size=(300, 50), name='samples_btn')
self.btn3.SetBackgroundColour("#C4DF9B")
self.btn3.InitColours()
self.Bind(wx.EVT_BUTTON, self.make_grid_frame, self.btn3)
text = "4. add specimen data"
self.btn4 = buttons.GenButton(self.panel, id=-1,
label=text, size=(300, 50), name='specimens_btn')
self.btn4.SetBackgroundColour("#FDC68A")
self.btn4.InitColours()
self.Bind(wx.EVT_BUTTON, self.make_grid_frame, self.btn4)
text = "5. add age data"
self.btn5 = buttons.GenButton(self.panel, id=-1, label=text,
size=(300, 50), name='ages_btn')
self.btn5.SetBackgroundColour("#6ECFF6")
self.btn5.InitColours()
self.Bind(wx.EVT_BUTTON, self.make_grid_frame, self.btn5)
text = "6. add measurements data"
self.btn6 = buttons.GenButton(self.panel, id=-1, label=text,
size=(300, 50), name='measurements_btn')
self.btn6.SetBackgroundColour("#C4DF9B")
self.btn6.InitColours()
self.Bind(wx.EVT_BUTTON, self.make_grid_frame, self.btn6)
bsizer1a = wx.BoxSizer(wx.VERTICAL)
bsizer1a.AddSpacer(20)
bsizer1a.Add(self.btn1, wx.ALIGN_TOP)
bsizer1a.AddSpacer(20)
bsizer1a.Add(self.btn2, wx.ALIGN_TOP)
bsizer1a.AddSpacer(20)
bsizer1a.Add(self.btn3, wx.ALIGN_TOP)
bsizer1a.AddSpacer(20)
bSizer1.Add(bsizer1a, wx.ALIGN_CENTER, wx.EXPAND)
bSizer1.AddSpacer(20)
#bSizer1.Add(OR, 0, wx.ALIGN_CENTER, 0)
bSizer1.AddSpacer(20)
bsizer1b = wx.BoxSizer(wx.VERTICAL)
#__init__(self, parent, id, label, pos, size, style, validator, name
bsizer1b.Add(self.btn4, flag=wx.ALIGN_CENTER|wx.BOTTOM, border=20)
bsizer1b.Add(self.btn5, 0, flag=wx.ALIGN_CENTER|wx.BOTTOM, border=20)
bsizer1b.Add(self.btn6, 0, wx.ALIGN_CENTER, 0)
bSizer1.Add(bsizer1b, 0, wx.ALIGN_CENTER, 0)
bSizer1.AddSpacer(20)
#---sizer 2 ----
self.bSizer2 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Create file for upload to MagIC database", name='bSizer2'), wx.HORIZONTAL)
text = "prepare upload txt file"
self.btn_upload = buttons.GenButton(self.panel, id=-1, label=text,
size=(300, 50), name='upload_btn')
self.btn_upload.SetBackgroundColour("#C4DF9B")
self.btn_upload.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_upload_file, self.btn_upload)
self.bSizer2.AddSpacer(20)
self.bSizer2.Add(self.btn_upload, 0, wx.ALIGN_CENTER, 0)
self.bSizer2.AddSpacer(20)
#self.Bind(wx.EVT_BUTTON, self.on_btn_upload, self.btn_upload)
#---arrange sizers ----
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.AddSpacer(5)
#vbox.Add(self.logo,0,wx.ALIGN_CENTER,0)
vbox.AddSpacer(5)
vbox.Add(bSizer0, 0, wx.ALIGN_CENTER, 0)
vbox.AddSpacer(10)
#vbox.Add(bSizer0_1, 0, wx.ALIGN_CENTER, 0)
#vbox.AddSpacer(10)
vbox.Add(self.bSizer_msg, 0, wx.ALIGN_CENTER, 0)
self.bSizer_msg.ShowItems(False)
vbox.Add(bSizer1, 0, wx.ALIGN_CENTER, 0)
vbox.AddSpacer(10)
vbox.AddSpacer(10)
self.hbox.AddSpacer(10)
vbox.Add(self.bSizer2, 0, wx.ALIGN_CENTER, 0)
vbox.AddSpacer(10)
self.hbox.Add(vbox, 0, wx.ALIGN_CENTER, 0)
self.hbox.AddSpacer(5)
self.panel.SetSizer(self.hbox)
self.hbox.Fit(self)
# do menu
print("-I- Initializing menu")
menubar = MagICMenu(self)
self.SetMenuBar(menubar)
self.menubar = menubar | python | def InitUI(self):
"""
Make main user interface
"""
bSizer0 = wx.StaticBoxSizer(
wx.StaticBox(self.panel, wx.ID_ANY, "Choose MagIC project directory", name='bSizer0'), wx.HORIZONTAL
)
self.dir_path = wx.TextCtrl(self.panel, id=-1, size=(600, 25), style=wx.TE_READONLY)
self.dir_path.SetValue(self.WD)
self.change_dir_button = buttons.GenButton(
self.panel, id=-1, label="change directory", size=(-1, -1), name='change_dir_btn'
)
self.change_dir_button.SetBackgroundColour("#F8F8FF")
self.change_dir_button.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_change_dir_button, self.change_dir_button)
bSizer0.Add(self.change_dir_button, wx.ALIGN_LEFT)
bSizer0.AddSpacer(40)
bSizer0.Add(self.dir_path, wx.ALIGN_CENTER_VERTICAL)
self.bSizer_msg = wx.StaticBoxSizer(wx.StaticBox(
self.panel, wx.ID_ANY, "Message", name='bsizer_msg'),
wx.HORIZONTAL)
self.message = wx.StaticText(self.panel, -1,
label="Some text will be here",
name='messages')
self.bSizer_msg.Add(self.message)
#---sizer 1 ----
bSizer1 = wx.StaticBoxSizer(wx.StaticBox(
self.panel, wx.ID_ANY, "Add information to the data model", name='bSizer1'),
wx.HORIZONTAL)
text = "1. add location data"
self.btn1 = buttons.GenButton(self.panel, id=-1, label=text,
size=(300, 50), name='locations_btn')
self.btn1.SetBackgroundColour("#FDC68A")
self.btn1.InitColours()
self.Bind(wx.EVT_BUTTON, self.make_grid_frame, self.btn1)
text = "2. add site data"
self.btn2 = buttons.GenButton(self.panel, id=-1, label=text,
size=(300, 50), name='sites_btn')
self.btn2.SetBackgroundColour("#6ECFF6")
self.btn2.InitColours()
self.Bind(wx.EVT_BUTTON, self.make_grid_frame, self.btn2)
text = "3. add sample data"
self.btn3 = buttons.GenButton(self.panel, id=-1, label=text,
size=(300, 50), name='samples_btn')
self.btn3.SetBackgroundColour("#C4DF9B")
self.btn3.InitColours()
self.Bind(wx.EVT_BUTTON, self.make_grid_frame, self.btn3)
text = "4. add specimen data"
self.btn4 = buttons.GenButton(self.panel, id=-1,
label=text, size=(300, 50), name='specimens_btn')
self.btn4.SetBackgroundColour("#FDC68A")
self.btn4.InitColours()
self.Bind(wx.EVT_BUTTON, self.make_grid_frame, self.btn4)
text = "5. add age data"
self.btn5 = buttons.GenButton(self.panel, id=-1, label=text,
size=(300, 50), name='ages_btn')
self.btn5.SetBackgroundColour("#6ECFF6")
self.btn5.InitColours()
self.Bind(wx.EVT_BUTTON, self.make_grid_frame, self.btn5)
text = "6. add measurements data"
self.btn6 = buttons.GenButton(self.panel, id=-1, label=text,
size=(300, 50), name='measurements_btn')
self.btn6.SetBackgroundColour("#C4DF9B")
self.btn6.InitColours()
self.Bind(wx.EVT_BUTTON, self.make_grid_frame, self.btn6)
bsizer1a = wx.BoxSizer(wx.VERTICAL)
bsizer1a.AddSpacer(20)
bsizer1a.Add(self.btn1, wx.ALIGN_TOP)
bsizer1a.AddSpacer(20)
bsizer1a.Add(self.btn2, wx.ALIGN_TOP)
bsizer1a.AddSpacer(20)
bsizer1a.Add(self.btn3, wx.ALIGN_TOP)
bsizer1a.AddSpacer(20)
bSizer1.Add(bsizer1a, wx.ALIGN_CENTER, wx.EXPAND)
bSizer1.AddSpacer(20)
#bSizer1.Add(OR, 0, wx.ALIGN_CENTER, 0)
bSizer1.AddSpacer(20)
bsizer1b = wx.BoxSizer(wx.VERTICAL)
#__init__(self, parent, id, label, pos, size, style, validator, name
bsizer1b.Add(self.btn4, flag=wx.ALIGN_CENTER|wx.BOTTOM, border=20)
bsizer1b.Add(self.btn5, 0, flag=wx.ALIGN_CENTER|wx.BOTTOM, border=20)
bsizer1b.Add(self.btn6, 0, wx.ALIGN_CENTER, 0)
bSizer1.Add(bsizer1b, 0, wx.ALIGN_CENTER, 0)
bSizer1.AddSpacer(20)
#---sizer 2 ----
self.bSizer2 = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "Create file for upload to MagIC database", name='bSizer2'), wx.HORIZONTAL)
text = "prepare upload txt file"
self.btn_upload = buttons.GenButton(self.panel, id=-1, label=text,
size=(300, 50), name='upload_btn')
self.btn_upload.SetBackgroundColour("#C4DF9B")
self.btn_upload.InitColours()
self.Bind(wx.EVT_BUTTON, self.on_upload_file, self.btn_upload)
self.bSizer2.AddSpacer(20)
self.bSizer2.Add(self.btn_upload, 0, wx.ALIGN_CENTER, 0)
self.bSizer2.AddSpacer(20)
#self.Bind(wx.EVT_BUTTON, self.on_btn_upload, self.btn_upload)
#---arrange sizers ----
self.hbox = wx.BoxSizer(wx.HORIZONTAL)
vbox = wx.BoxSizer(wx.VERTICAL)
vbox.AddSpacer(5)
#vbox.Add(self.logo,0,wx.ALIGN_CENTER,0)
vbox.AddSpacer(5)
vbox.Add(bSizer0, 0, wx.ALIGN_CENTER, 0)
vbox.AddSpacer(10)
#vbox.Add(bSizer0_1, 0, wx.ALIGN_CENTER, 0)
#vbox.AddSpacer(10)
vbox.Add(self.bSizer_msg, 0, wx.ALIGN_CENTER, 0)
self.bSizer_msg.ShowItems(False)
vbox.Add(bSizer1, 0, wx.ALIGN_CENTER, 0)
vbox.AddSpacer(10)
vbox.AddSpacer(10)
self.hbox.AddSpacer(10)
vbox.Add(self.bSizer2, 0, wx.ALIGN_CENTER, 0)
vbox.AddSpacer(10)
self.hbox.Add(vbox, 0, wx.ALIGN_CENTER, 0)
self.hbox.AddSpacer(5)
self.panel.SetSizer(self.hbox)
self.hbox.Fit(self)
# do menu
print("-I- Initializing menu")
menubar = MagICMenu(self)
self.SetMenuBar(menubar)
self.menubar = menubar | Make main user interface | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/magic_gui.py#L89-L235 |
PmagPy/PmagPy | programs/magic_gui.py | MainFrame.on_change_dir_button | def on_change_dir_button(self, event=None):
"""
create change directory frame
"""
currentDirectory = self.WD #os.getcwd()
change_dir_dialog = wx.DirDialog(self.panel,
"Choose your working directory to create or edit a MagIC contribution:",
defaultPath=currentDirectory,
style=wx.DD_DEFAULT_STYLE | wx.DD_NEW_DIR_BUTTON | wx.DD_CHANGE_DIR)
result = change_dir_dialog.ShowModal()
if result == wx.ID_CANCEL:
return
if result == wx.ID_OK:
self.WD = change_dir_dialog.GetPath()
self.dir_path.SetValue(self.WD)
change_dir_dialog.Destroy()
self.get_wd_data() | python | def on_change_dir_button(self, event=None):
"""
create change directory frame
"""
currentDirectory = self.WD #os.getcwd()
change_dir_dialog = wx.DirDialog(self.panel,
"Choose your working directory to create or edit a MagIC contribution:",
defaultPath=currentDirectory,
style=wx.DD_DEFAULT_STYLE | wx.DD_NEW_DIR_BUTTON | wx.DD_CHANGE_DIR)
result = change_dir_dialog.ShowModal()
if result == wx.ID_CANCEL:
return
if result == wx.ID_OK:
self.WD = change_dir_dialog.GetPath()
self.dir_path.SetValue(self.WD)
change_dir_dialog.Destroy()
self.get_wd_data() | create change directory frame | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/magic_gui.py#L238-L254 |
PmagPy/PmagPy | programs/magic_gui.py | MainFrame.make_grid_frame | def make_grid_frame(self, event):
"""
Create a GridFrame for data type of the button that was clicked
"""
if self.grid_frame:
print('-I- You already have a grid frame open')
pw.simple_warning("You already have a grid open")
return
try:
grid_type = event.GetButtonObj().Name[:-4] # remove '_btn'
except AttributeError:
grid_type = self.FindWindowById(event.Id).Name[:-4] # remove ('_btn')
wait = wx.BusyInfo('Making {} grid, please wait...'.format(grid_type))
wx.SafeYield()
# propagate site lat/lon info into locations if necessary
if grid_type == 'locations' and 'sites' in self.contribution.tables:
self.contribution.get_min_max_lat_lon()
self.contribution.propagate_cols_up(['lithologies',
'geologic_classes'],
'locations', 'sites')
# propagate lithologies/type/class information from sites to samples/specimens
if grid_type in ['specimens', 'samples']:
self.contribution.propagate_lithology_cols()
# propagate average lat/lon info from samples table if
# available in samples and missing in sites
if grid_type == 'sites':
self.contribution.propagate_average_up(cols=['lat', 'lon', 'height'],
target_df_name='sites',
source_df_name='samples')
self.contribution.propagate_lithology_cols()
# hide mainframe
self.on_open_grid_frame()
# choose appropriate size for grid
if grid_type == 'measurements':
huge = True
else:
huge = False
# make grid frame
self.grid_frame = grid_frame.GridFrame(self.contribution, self.WD,
grid_type, grid_type,
self.panel, huge=huge)
row_string = ""
# paint validations if appropriate
if self.validation_mode:
if grid_type in self.validation_mode:
if grid_type == 'measurements':
skip_cell_render = True
else:
skip_cell_render = False
self.grid_frame.toggle_help(None, "open")
row_problems = self.failing_items[grid_type]["rows"]
missing_columns = self.failing_items[grid_type]["missing_columns"]
missing_groups = self.failing_items[grid_type]["missing_groups"]
#all_cols = row_problems.columns
#col_nums = range(len(all_cols))
#col_pos = dict(zip(all_cols, col_nums))
if len(row_problems):
row_string = "Columns and rows with problem data have been highlighted in blue.\n"
if not skip_cell_render:
row_string += "Cells with problem data are highlighted according to the type of problem.\nRed: incorrect data\n"
row_string += "For full error messages, see {}.".format(grid_type + "_errors.txt")
# reset codes button to show error file instead
self.grid_frame.toggle_codes_btn.SetLabel("Show errors")
self.grid_frame.Bind(wx.EVT_BUTTON, self.grid_frame.show_errors,
self.grid_frame.toggle_codes_btn)
# paint cells
for row in row_problems['num']:
self.grid_frame.grid.paint_invalid_row(row)
mask = row_problems["num"] == row
items = row_problems[mask]
cols = items.dropna(how="all", axis=1).drop(["num", "issues"], axis=1)
for col in cols:
pre, col_name = val_up3.extract_col_name(col)
col_ind = self.grid_frame.grid.col_labels.index(col_name)
self.grid_frame.grid.paint_invalid_cell(row, col_ind,
skip_cell=skip_cell_render)
current_label = self.grid_frame.msg_text.GetLabel()
if len(missing_columns):
col_string = "You are missing the following required columns: {}\n\n".format(", ".join(missing_columns))
else:
col_string = ""
if len(missing_groups):
group_string = "You must have at least one column from each of the following groups: {}\n\n".format(", ".join(missing_groups))
else:
group_string = ""
#
add_text = """{}{}{}""".format(col_string, group_string, row_string)
self.grid_frame.msg_text.SetLabel(add_text)
#self.on_finish_change_dir(self.change_dir_dialog)
self.grid_frame.do_fit(None)
del wait | python | def make_grid_frame(self, event):
"""
Create a GridFrame for data type of the button that was clicked
"""
if self.grid_frame:
print('-I- You already have a grid frame open')
pw.simple_warning("You already have a grid open")
return
try:
grid_type = event.GetButtonObj().Name[:-4] # remove '_btn'
except AttributeError:
grid_type = self.FindWindowById(event.Id).Name[:-4] # remove ('_btn')
wait = wx.BusyInfo('Making {} grid, please wait...'.format(grid_type))
wx.SafeYield()
# propagate site lat/lon info into locations if necessary
if grid_type == 'locations' and 'sites' in self.contribution.tables:
self.contribution.get_min_max_lat_lon()
self.contribution.propagate_cols_up(['lithologies',
'geologic_classes'],
'locations', 'sites')
# propagate lithologies/type/class information from sites to samples/specimens
if grid_type in ['specimens', 'samples']:
self.contribution.propagate_lithology_cols()
# propagate average lat/lon info from samples table if
# available in samples and missing in sites
if grid_type == 'sites':
self.contribution.propagate_average_up(cols=['lat', 'lon', 'height'],
target_df_name='sites',
source_df_name='samples')
self.contribution.propagate_lithology_cols()
# hide mainframe
self.on_open_grid_frame()
# choose appropriate size for grid
if grid_type == 'measurements':
huge = True
else:
huge = False
# make grid frame
self.grid_frame = grid_frame.GridFrame(self.contribution, self.WD,
grid_type, grid_type,
self.panel, huge=huge)
row_string = ""
# paint validations if appropriate
if self.validation_mode:
if grid_type in self.validation_mode:
if grid_type == 'measurements':
skip_cell_render = True
else:
skip_cell_render = False
self.grid_frame.toggle_help(None, "open")
row_problems = self.failing_items[grid_type]["rows"]
missing_columns = self.failing_items[grid_type]["missing_columns"]
missing_groups = self.failing_items[grid_type]["missing_groups"]
#all_cols = row_problems.columns
#col_nums = range(len(all_cols))
#col_pos = dict(zip(all_cols, col_nums))
if len(row_problems):
row_string = "Columns and rows with problem data have been highlighted in blue.\n"
if not skip_cell_render:
row_string += "Cells with problem data are highlighted according to the type of problem.\nRed: incorrect data\n"
row_string += "For full error messages, see {}.".format(grid_type + "_errors.txt")
# reset codes button to show error file instead
self.grid_frame.toggle_codes_btn.SetLabel("Show errors")
self.grid_frame.Bind(wx.EVT_BUTTON, self.grid_frame.show_errors,
self.grid_frame.toggle_codes_btn)
# paint cells
for row in row_problems['num']:
self.grid_frame.grid.paint_invalid_row(row)
mask = row_problems["num"] == row
items = row_problems[mask]
cols = items.dropna(how="all", axis=1).drop(["num", "issues"], axis=1)
for col in cols:
pre, col_name = val_up3.extract_col_name(col)
col_ind = self.grid_frame.grid.col_labels.index(col_name)
self.grid_frame.grid.paint_invalid_cell(row, col_ind,
skip_cell=skip_cell_render)
current_label = self.grid_frame.msg_text.GetLabel()
if len(missing_columns):
col_string = "You are missing the following required columns: {}\n\n".format(", ".join(missing_columns))
else:
col_string = ""
if len(missing_groups):
group_string = "You must have at least one column from each of the following groups: {}\n\n".format(", ".join(missing_groups))
else:
group_string = ""
#
add_text = """{}{}{}""".format(col_string, group_string, row_string)
self.grid_frame.msg_text.SetLabel(add_text)
#self.on_finish_change_dir(self.change_dir_dialog)
self.grid_frame.do_fit(None)
del wait | Create a GridFrame for data type of the button that was clicked | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/magic_gui.py#L287-L378 |
PmagPy/PmagPy | programs/magic_gui.py | MainFrame.highlight_problems | def highlight_problems(self, has_problems):
"""
Outline grid buttons in red if they have validation errors
"""
if has_problems:
self.validation_mode = set(has_problems)
# highlighting doesn't work with Windows
if sys.platform in ['win32', 'win62']:
self.message.SetLabel('The following grid(s) have incorrect or incomplete data:\n{}'.format(', '.join(self.validation_mode)))
# highlighting does work with OSX
else:
for dtype in ["specimens", "samples", "sites", "locations", "ages", "measurements"]:
wind = self.FindWindowByName(dtype + '_btn')
if dtype not in has_problems:
wind.Unbind(wx.EVT_PAINT, handler=self.highlight_button)
else:
wind.Bind(wx.EVT_PAINT, self.highlight_button)
self.Refresh()
self.message.SetLabel('Highlighted grids have incorrect or incomplete data')
self.bSizer_msg.ShowItems(True)
# manually fire a paint event to make sure all buttons
# are highlighted/unhighlighted appropriately
paintEvent = wx.CommandEvent(wx.wxEVT_PAINT,
self.GetId())
self.GetEventHandler().ProcessEvent(paintEvent)
else:
self.message.SetLabel("Validated!")
self.bSizer_msg.ShowItems(True)
self.hbox.Fit(self) | python | def highlight_problems(self, has_problems):
"""
Outline grid buttons in red if they have validation errors
"""
if has_problems:
self.validation_mode = set(has_problems)
# highlighting doesn't work with Windows
if sys.platform in ['win32', 'win62']:
self.message.SetLabel('The following grid(s) have incorrect or incomplete data:\n{}'.format(', '.join(self.validation_mode)))
# highlighting does work with OSX
else:
for dtype in ["specimens", "samples", "sites", "locations", "ages", "measurements"]:
wind = self.FindWindowByName(dtype + '_btn')
if dtype not in has_problems:
wind.Unbind(wx.EVT_PAINT, handler=self.highlight_button)
else:
wind.Bind(wx.EVT_PAINT, self.highlight_button)
self.Refresh()
self.message.SetLabel('Highlighted grids have incorrect or incomplete data')
self.bSizer_msg.ShowItems(True)
# manually fire a paint event to make sure all buttons
# are highlighted/unhighlighted appropriately
paintEvent = wx.CommandEvent(wx.wxEVT_PAINT,
self.GetId())
self.GetEventHandler().ProcessEvent(paintEvent)
else:
self.message.SetLabel("Validated!")
self.bSizer_msg.ShowItems(True)
self.hbox.Fit(self) | Outline grid buttons in red if they have validation errors | https://github.com/PmagPy/PmagPy/blob/c7984f8809bf40fe112e53dcc311a33293b62d0b/programs/magic_gui.py#L402-L431 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.