repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
azhou-determined/kineto | tb_plugin/torch_tb_profiler/profiler/trace.py | 46ed0ce917c1515db29c39cd87b0c5430f5be94e | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# --------------------------------------------------------------------------
from enum import IntEnum
from .. import utils
__all__ = ["EventTypes", "create_event"]
logger = utils.get_logger()
DeviceType = IntEnum('DeviceType', ['CPU', 'CUDA'], start=0)
class EventTypes(object):
TRACE = "Trace"
OPERATOR = "Operator"
PROFILER_STEP = "ProfilerStep"
RUNTIME = "Runtime"
KERNEL = "Kernel"
MEMCPY = "Memcpy"
MEMSET = "Memset"
PYTHON = "Python"
MEMORY = "Memory"
Supported_EventTypes = [v for k, v in vars(EventTypes).items() if not k.startswith("_") and v != EventTypes.PROFILER_STEP]
class BaseEvent(object):
def __init__(self, type, data):
self.type = type
self.name = data.get("name")
self.ts = data.get("ts")
self.pid = data.get("pid")
self.tid = data.get("tid")
self.args = data.get("args", {})
class TraceEvent(BaseEvent):
def __init__(self, type, data):
super().__init__(type, data)
self.category = data.get("cat", "")
self.duration = data.get("dur")
@property
def external_id(self):
extern_id = self.args.get("external id")
if extern_id is None:
extern_id = self.args.get("External id")
return extern_id
@property
def callstack(self):
return self.args.get("Call stack", "")
@property
def input_shape(self):
shape = self.args.get("Input Dims")
if shape is None:
shape = self.args.get("Input dims")
return shape
@property
def input_type(self):
return self.args.get("Input type")
class ProfilerStepEvent(TraceEvent):
def __init__(self, data):
super().__init__(EventTypes.PROFILER_STEP, data)
# torch.profiler.profile.step will invoke record_function with name like "ProfilerStep#5"
self.step = int(self.name.split("#")[1])
class MemoryEvent(BaseEvent):
def __init__(self, type, data):
super().__init__(type, data)
self.scope = data.get("s", "")
@property
def device_type(self):
dtype = self.args.get("Device Type")
if dtype is None:
return None
try:
return DeviceType(dtype)
except ValueError:
return None
@property
def device_id(self):
return self.args.get("Device Id")
@property
def bytes(self):
return self.args.get("Bytes", 0)
def create_event(event):
try:
type = event.get("ph")
if type == "X":
return create_trace_event(event)
elif type == "i" and event.get('s') == 't':
return MemoryEvent(EventTypes.MEMORY, event)
else:
return None
except Exception as ex:
logger.warning("Failed to parse profile event. Exception=%s. Event=%s", ex, event, exc_info=True)
raise
def create_trace_event(event):
category = event.get("cat")
if category == "Operator":
name = event.get("name")
if name and name.startswith("ProfilerStep#"):
return ProfilerStepEvent(event)
if category in Supported_EventTypes:
return TraceEvent(category, event)
else:
return None
| [((345, 392), 'enum.IntEnum', 'IntEnum', (['"""DeviceType"""', "['CPU', 'CUDA']"], {'start': '(0)'}), "('DeviceType', ['CPU', 'CUDA'], start=0)\n", (352, 392), False, 'from enum import IntEnum\n')] |
eval1749/elang | base/base.gyp | 5208b386ba3a3e866a5c0f0271280f79f9aac8c4 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'includes': [
'../build/win_precompile.gypi',
'base.gypi',
],
'targets': [
{
'target_name': 'base',
'type': '<(component)',
'toolsets': ['host', 'target'],
'variables': {
'base_target': 1,
'enable_wexit_time_destructors': 1,
'optimize': 'max',
},
'dependencies': [
'base_static',
'allocator/allocator.gyp:allocator_extension_thunks',
'../testing/gtest.gyp:gtest_prod',
'../third_party/modp_b64/modp_b64.gyp:modp_b64',
'third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
],
# TODO(gregoryd): direct_dependent_settings should be shared with the
# 64-bit target, but it doesn't work due to a bug in gyp
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'conditions': [
['desktop_linux == 1 or chromeos == 1', {
'conditions': [
['chromeos==1', {
'sources/': [ ['include', '_chromeos\\.cc$'] ]
}],
],
'dependencies': [
'symbolize',
'xdg_mime',
],
'defines': [
'USE_SYMBOLIZE',
],
}, { # desktop_linux == 0 and chromeos == 0
'sources/': [
['exclude', '/xdg_user_dirs/'],
['exclude', '_nss\\.cc$'],
],
}],
['use_glib==1', {
'dependencies': [
'../build/linux/system.gyp:glib',
],
'export_dependent_settings': [
'../build/linux/system.gyp:glib',
],
}],
['OS == "android" and _toolset == "host"', {
# Always build base as a static_library for host toolset, even if
# we're doing a component build. Specifically, we only care about the
# target toolset using components since that's what developers are
# focusing on. In theory we should do this more generally for all
# targets when building for host, but getting the gyp magic
# per-toolset for the "component" variable is hard, and we really only
# need base on host.
'type': 'static_library',
# Base for host support is the minimum required to run the
# ssl false start blacklist tool. It requires further changes
# to generically support host builds (and tests).
# Note: when building for host, gyp has OS == "android",
# hence the *_android.cc files are included but the actual code
# doesn't have OS_ANDROID / ANDROID defined.
'conditions': [
['host_os == "mac"', {
'sources/': [
['exclude', '^native_library_linux\\.cc$'],
['exclude', '^process_util_linux\\.cc$'],
['exclude', '^sys_info_linux\\.cc$'],
['exclude', '^sys_string_conversions_linux\\.cc$'],
['exclude', '^worker_pool_linux\\.cc$'],
],
}],
],
}],
['OS == "android" and _toolset == "target"', {
'dependencies': [
'base_java',
'base_jni_headers',
'../build/android/ndk.gyp:cpu_features',
'../third_party/ashmem/ashmem.gyp:ashmem',
],
'link_settings': {
'libraries': [
'-llog',
],
},
'sources!': [
'debug/stack_trace_posix.cc',
],
}],
['os_bsd==1', {
'include_dirs': [
'/usr/local/include',
],
'link_settings': {
'libraries': [
'-L/usr/local/lib -lexecinfo',
],
},
}],
['OS == "linux"', {
'link_settings': {
'libraries': [
# We need rt for clock_gettime().
'-lrt',
# For 'native_library_linux.cc'
'-ldl',
],
},
'conditions': [
['use_allocator!="tcmalloc"', {
'defines': [
'NO_TCMALLOC',
],
'direct_dependent_settings': {
'defines': [
'NO_TCMALLOC',
],
},
}],
],
}],
['OS == "win"', {
# Specify delayload for base.dll.
'msvs_settings': {
'VCLinkerTool': {
'DelayLoadDLLs': [
'cfgmgr32.dll',
'powrprof.dll',
'setupapi.dll',
],
'AdditionalDependencies': [
'cfgmgr32.lib',
'powrprof.lib',
'setupapi.lib',
],
},
},
# Specify delayload for components that link with base.lib.
'all_dependent_settings': {
'msvs_settings': {
'VCLinkerTool': {
'DelayLoadDLLs': [
'cfgmgr32.dll',
'powrprof.dll',
'setupapi.dll',
],
'AdditionalDependencies': [
'cfgmgr32.lib',
'powrprof.lib',
'setupapi.lib',
],
},
},
},
'copies': [
{
'destination': '<(PRODUCT_DIR)/',
'files': [
'../build/win/dbghelp_xp/dbghelp.dll',
],
},
],
'dependencies': [
'trace_event/etw_manifest/etw_manifest.gyp:etw_manifest',
],
}],
['OS == "mac" or (OS == "ios" and _toolset == "host")', {
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/AppKit.framework',
'$(SDKROOT)/System/Library/Frameworks/ApplicationServices.framework',
'$(SDKROOT)/System/Library/Frameworks/Carbon.framework',
'$(SDKROOT)/System/Library/Frameworks/CoreFoundation.framework',
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
'$(SDKROOT)/System/Library/Frameworks/IOKit.framework',
'$(SDKROOT)/System/Library/Frameworks/Security.framework',
],
},
}],
['OS == "ios" and _toolset != "host"', {
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/CoreFoundation.framework',
'$(SDKROOT)/System/Library/Frameworks/CoreGraphics.framework',
'$(SDKROOT)/System/Library/Frameworks/CoreText.framework',
'$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
'$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
],
},
}],
['OS != "win" and (OS != "ios" or _toolset == "host")', {
'dependencies': ['../third_party/libevent/libevent.gyp:libevent'],
},],
['component=="shared_library"', {
'conditions': [
['OS=="win"', {
'sources!': [
'debug/debug_on_start_win.cc',
],
}],
],
}],
['OS=="ios"', {
'sources!': [
'sync_socket.h',
'sync_socket_posix.cc',
]
}],
],
'sources': [
'auto_reset.h',
'linux_util.cc',
'linux_util.h',
'message_loop/message_pump_android.cc',
'message_loop/message_pump_android.h',
'message_loop/message_pump_glib.cc',
'message_loop/message_pump_glib.h',
'message_loop/message_pump_io_ios.cc',
'message_loop/message_pump_io_ios.h',
'message_loop/message_pump_libevent.cc',
'message_loop/message_pump_libevent.h',
'message_loop/message_pump_mac.h',
'message_loop/message_pump_mac.mm',
'metrics/field_trial.cc',
'metrics/field_trial.h',
'posix/file_descriptor_shuffle.cc',
'posix/file_descriptor_shuffle.h',
'sync_socket.h',
'sync_socket_posix.cc',
'sync_socket_win.cc',
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
'third_party/xdg_user_dirs/xdg_user_dir_lookup.h',
],
'includes': [
'../build/android/increase_size_for_speed.gypi',
],
},
{
'target_name': 'base_i18n',
'type': '<(component)',
'variables': {
'enable_wexit_time_destructors': 1,
'optimize': 'max',
'base_i18n_target': 1,
},
'dependencies': [
'base',
'third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
'../third_party/icu/icu.gyp:icui18n',
'../third_party/icu/icu.gyp:icuuc',
],
'conditions': [
['OS == "win"', {
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [
4267,
],
}],
['icu_use_data_file_flag==1', {
'defines': ['ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_FILE'],
}, { # else icu_use_data_file_flag !=1
'conditions': [
['OS=="win"', {
'defines': ['ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_SHARED'],
}, {
'defines': ['ICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_STATIC'],
}],
],
}],
['OS == "ios"', {
'toolsets': ['host', 'target'],
}],
],
'export_dependent_settings': [
'base',
'../third_party/icu/icu.gyp:icuuc',
'../third_party/icu/icu.gyp:icui18n',
],
'includes': [
'../build/android/increase_size_for_speed.gypi',
],
},
{
'target_name': 'base_message_loop_tests',
'type': 'static_library',
'dependencies': [
'base',
'../testing/gtest.gyp:gtest',
],
'sources': [
'message_loop/message_loop_test.cc',
'message_loop/message_loop_test.h',
],
},
{
'target_name': 'base_prefs',
'type': '<(component)',
'variables': {
'enable_wexit_time_destructors': 1,
'optimize': 'max',
},
'dependencies': [
'base',
],
'export_dependent_settings': [
'base',
],
'defines': [
'BASE_PREFS_IMPLEMENTATION',
],
'sources': [
'prefs/base_prefs_export.h',
'prefs/default_pref_store.cc',
'prefs/default_pref_store.h',
'prefs/json_pref_store.cc',
'prefs/json_pref_store.h',
'prefs/overlay_user_pref_store.cc',
'prefs/overlay_user_pref_store.h',
'prefs/persistent_pref_store.h',
'prefs/pref_change_registrar.cc',
'prefs/pref_change_registrar.h',
'prefs/pref_filter.h',
'prefs/pref_member.cc',
'prefs/pref_member.h',
'prefs/pref_notifier.h',
'prefs/pref_notifier_impl.cc',
'prefs/pref_notifier_impl.h',
'prefs/pref_observer.h',
'prefs/pref_registry.cc',
'prefs/pref_registry.h',
'prefs/pref_registry_simple.cc',
'prefs/pref_registry_simple.h',
'prefs/pref_service.cc',
'prefs/pref_service.h',
'prefs/pref_service_factory.cc',
'prefs/pref_service_factory.h',
'prefs/pref_store.cc',
'prefs/pref_store.h',
'prefs/pref_value_map.cc',
'prefs/pref_value_map.h',
'prefs/pref_value_store.cc',
'prefs/pref_value_store.h',
'prefs/scoped_user_pref_update.cc',
'prefs/scoped_user_pref_update.h',
'prefs/value_map_pref_store.cc',
'prefs/value_map_pref_store.h',
'prefs/writeable_pref_store.h',
],
'includes': [
'../build/android/increase_size_for_speed.gypi',
],
},
{
'target_name': 'base_prefs_test_support',
'type': 'static_library',
'dependencies': [
'base',
'base_prefs',
'../testing/gmock.gyp:gmock',
],
'sources': [
'prefs/mock_pref_change_callback.cc',
'prefs/pref_store_observer_mock.cc',
'prefs/pref_store_observer_mock.h',
'prefs/testing_pref_service.cc',
'prefs/testing_pref_service.h',
'prefs/testing_pref_store.cc',
'prefs/testing_pref_store.h',
],
},
{
# This is the subset of files from base that should not be used with a
# dynamic library. Note that this library cannot depend on base because
# base depends on base_static.
'target_name': 'base_static',
'type': 'static_library',
'variables': {
'enable_wexit_time_destructors': 1,
'optimize': 'max',
},
'toolsets': ['host', 'target'],
'sources': [
'base_switches.cc',
'base_switches.h',
'win/pe_image.cc',
'win/pe_image.h',
],
'include_dirs': [
'..',
],
'includes': [
'../build/android/increase_size_for_speed.gypi',
],
},
# Include this target for a main() function that simply instantiates
# and runs a base::TestSuite.
{
'target_name': 'run_all_unittests',
'type': 'static_library',
'dependencies': [
'test_support_base',
],
'sources': [
'test/run_all_unittests.cc',
],
},
{
'target_name': 'base_unittests',
'type': '<(gtest_target_type)',
'sources': [
'android/application_status_listener_unittest.cc',
'android/content_uri_utils_unittest.cc',
'android/jni_android_unittest.cc',
'android/jni_array_unittest.cc',
'android/jni_string_unittest.cc',
'android/library_loader/library_prefetcher_unittest.cc',
'android/path_utils_unittest.cc',
'android/scoped_java_ref_unittest.cc',
'android/sys_utils_unittest.cc',
'at_exit_unittest.cc',
'atomicops_unittest.cc',
'barrier_closure_unittest.cc',
'base64_unittest.cc',
'base64url_unittest.cc',
'big_endian_unittest.cc',
'bind_unittest.cc',
'bind_unittest.nc',
'bits_unittest.cc',
'build_time_unittest.cc',
'callback_helpers_unittest.cc',
'callback_list_unittest.cc',
'callback_list_unittest.nc',
'callback_unittest.cc',
'callback_unittest.nc',
'cancelable_callback_unittest.cc',
'command_line_unittest.cc',
'containers/adapters_unittest.cc',
'containers/hash_tables_unittest.cc',
'containers/linked_list_unittest.cc',
'containers/mru_cache_unittest.cc',
'containers/scoped_ptr_hash_map_unittest.cc',
'containers/small_map_unittest.cc',
'containers/stack_container_unittest.cc',
'cpu_unittest.cc',
'debug/crash_logging_unittest.cc',
'debug/debugger_unittest.cc',
'debug/leak_tracker_unittest.cc',
'debug/proc_maps_linux_unittest.cc',
'debug/stack_trace_unittest.cc',
'debug/task_annotator_unittest.cc',
'deferred_sequenced_task_runner_unittest.cc',
'environment_unittest.cc',
'feature_list_unittest.cc',
'file_version_info_unittest.cc',
'files/dir_reader_posix_unittest.cc',
'files/file_path_unittest.cc',
'files/file_path_watcher_unittest.cc',
'files/file_proxy_unittest.cc',
'files/file_unittest.cc',
'files/file_util_proxy_unittest.cc',
'files/file_util_unittest.cc',
'files/important_file_writer_unittest.cc',
'files/memory_mapped_file_unittest.cc',
'files/scoped_temp_dir_unittest.cc',
'gmock_unittest.cc',
'guid_unittest.cc',
'hash_unittest.cc',
'i18n/break_iterator_unittest.cc',
'i18n/case_conversion_unittest.cc',
'i18n/char_iterator_unittest.cc',
'i18n/file_util_icu_unittest.cc',
'i18n/icu_string_conversions_unittest.cc',
'i18n/message_formatter_unittest.cc',
'i18n/number_formatting_unittest.cc',
'i18n/rtl_unittest.cc',
'i18n/streaming_utf8_validator_unittest.cc',
'i18n/string_search_unittest.cc',
'i18n/time_formatting_unittest.cc',
'i18n/timezone_unittest.cc',
'id_map_unittest.cc',
'ios/crb_protocol_observers_unittest.mm',
'ios/device_util_unittest.mm',
'ios/weak_nsobject_unittest.mm',
'json/json_parser_unittest.cc',
'json/json_reader_unittest.cc',
'json/json_value_converter_unittest.cc',
'json/json_value_serializer_unittest.cc',
'json/json_writer_unittest.cc',
'json/string_escape_unittest.cc',
'lazy_instance_unittest.cc',
'logging_unittest.cc',
'mac/bind_objc_block_unittest.mm',
'mac/call_with_eh_frame_unittest.mm',
'mac/dispatch_source_mach_unittest.cc',
'mac/foundation_util_unittest.mm',
'mac/libdispatch_task_runner_unittest.cc',
'mac/mac_util_unittest.mm',
'mac/objc_property_releaser_unittest.mm',
'mac/scoped_nsobject_unittest.mm',
'mac/scoped_objc_class_swizzler_unittest.mm',
'mac/scoped_sending_event_unittest.mm',
'md5_unittest.cc',
'memory/aligned_memory_unittest.cc',
'memory/discardable_shared_memory_unittest.cc',
'memory/linked_ptr_unittest.cc',
'memory/memory_pressure_listener_unittest.cc',
'memory/memory_pressure_monitor_chromeos_unittest.cc',
'memory/memory_pressure_monitor_mac_unittest.cc',
'memory/memory_pressure_monitor_win_unittest.cc',
'memory/ref_counted_memory_unittest.cc',
'memory/ref_counted_unittest.cc',
'memory/scoped_ptr_unittest.cc',
'memory/scoped_ptr_unittest.nc',
'memory/scoped_vector_unittest.cc',
'memory/shared_memory_unittest.cc',
'memory/shared_memory_mac_unittest.cc',
'memory/singleton_unittest.cc',
'memory/weak_ptr_unittest.cc',
'memory/weak_ptr_unittest.nc',
'message_loop/message_loop_task_runner_unittest.cc',
'message_loop/message_loop_unittest.cc',
'message_loop/message_pump_glib_unittest.cc',
'message_loop/message_pump_io_ios_unittest.cc',
'message_loop/message_pump_libevent_unittest.cc',
'metrics/bucket_ranges_unittest.cc',
'metrics/field_trial_unittest.cc',
'metrics/histogram_base_unittest.cc',
'metrics/histogram_delta_serialization_unittest.cc',
'metrics/histogram_macros_unittest.cc',
'metrics/histogram_snapshot_manager_unittest.cc',
'metrics/histogram_unittest.cc',
'metrics/metrics_hashes_unittest.cc',
'metrics/sample_map_unittest.cc',
'metrics/sample_vector_unittest.cc',
'metrics/sparse_histogram_unittest.cc',
'metrics/statistics_recorder_unittest.cc',
'native_library_unittest.cc',
'numerics/safe_numerics_unittest.cc',
'observer_list_unittest.cc',
'os_compat_android_unittest.cc',
'path_service_unittest.cc',
'pickle_unittest.cc',
'posix/file_descriptor_shuffle_unittest.cc',
'posix/unix_domain_socket_linux_unittest.cc',
'power_monitor/power_monitor_unittest.cc',
'prefs/default_pref_store_unittest.cc',
'prefs/json_pref_store_unittest.cc',
'prefs/mock_pref_change_callback.h',
'prefs/overlay_user_pref_store_unittest.cc',
'prefs/pref_change_registrar_unittest.cc',
'prefs/pref_member_unittest.cc',
'prefs/pref_notifier_impl_unittest.cc',
'prefs/pref_service_unittest.cc',
'prefs/pref_value_map_unittest.cc',
'prefs/pref_value_store_unittest.cc',
'prefs/scoped_user_pref_update_unittest.cc',
'process/memory_unittest.cc',
'process/memory_unittest_mac.h',
'process/memory_unittest_mac.mm',
'process/process_metrics_unittest.cc',
'process/process_metrics_unittest_ios.cc',
'process/process_unittest.cc',
'process/process_util_unittest.cc',
'profiler/stack_sampling_profiler_unittest.cc',
'profiler/tracked_time_unittest.cc',
'rand_util_unittest.cc',
'scoped_clear_errno_unittest.cc',
'scoped_generic_unittest.cc',
'scoped_native_library_unittest.cc',
'security_unittest.cc',
'sequence_checker_unittest.cc',
'sha1_unittest.cc',
'stl_util_unittest.cc',
'strings/nullable_string16_unittest.cc',
'strings/pattern_unittest.cc',
'strings/safe_sprintf_unittest.cc',
'strings/string16_unittest.cc',
'strings/string_number_conversions_unittest.cc',
'strings/string_piece_unittest.cc',
'strings/string_split_unittest.cc',
'strings/string_tokenizer_unittest.cc',
'strings/string_util_unittest.cc',
'strings/stringize_macros_unittest.cc',
'strings/stringprintf_unittest.cc',
'strings/sys_string_conversions_mac_unittest.mm',
'strings/sys_string_conversions_unittest.cc',
'strings/utf_offset_string_conversions_unittest.cc',
'strings/utf_string_conversions_unittest.cc',
'supports_user_data_unittest.cc',
'sync_socket_unittest.cc',
'synchronization/cancellation_flag_unittest.cc',
'synchronization/condition_variable_unittest.cc',
'synchronization/lock_unittest.cc',
'synchronization/waitable_event_unittest.cc',
'synchronization/waitable_event_watcher_unittest.cc',
'sys_info_unittest.cc',
'system_monitor/system_monitor_unittest.cc',
'task/cancelable_task_tracker_unittest.cc',
'task_runner_util_unittest.cc',
'template_util_unittest.cc',
'test/histogram_tester_unittest.cc',
'test/test_pending_task_unittest.cc',
'test/test_reg_util_win_unittest.cc',
'test/trace_event_analyzer_unittest.cc',
'test/user_action_tester_unittest.cc',
'threading/non_thread_safe_unittest.cc',
'threading/platform_thread_unittest.cc',
'threading/sequenced_worker_pool_unittest.cc',
'threading/sequenced_task_runner_handle_unittest.cc',
'threading/simple_thread_unittest.cc',
'threading/thread_checker_unittest.cc',
'threading/thread_collision_warner_unittest.cc',
'threading/thread_id_name_manager_unittest.cc',
'threading/thread_local_storage_unittest.cc',
'threading/thread_local_unittest.cc',
'threading/thread_unittest.cc',
'threading/watchdog_unittest.cc',
'threading/worker_pool_posix_unittest.cc',
'threading/worker_pool_unittest.cc',
'time/pr_time_unittest.cc',
'time/time_unittest.cc',
'time/time_win_unittest.cc',
'timer/hi_res_timer_manager_unittest.cc',
'timer/mock_timer_unittest.cc',
'timer/timer_unittest.cc',
'tools_sanity_unittest.cc',
'tracked_objects_unittest.cc',
'tuple_unittest.cc',
'values_unittest.cc',
'version_unittest.cc',
'vlog_unittest.cc',
'win/dllmain.cc',
'win/enum_variant_unittest.cc',
'win/event_trace_consumer_unittest.cc',
'win/event_trace_controller_unittest.cc',
'win/event_trace_provider_unittest.cc',
'win/i18n_unittest.cc',
'win/iunknown_impl_unittest.cc',
'win/message_window_unittest.cc',
'win/object_watcher_unittest.cc',
'win/pe_image_unittest.cc',
'win/registry_unittest.cc',
'win/scoped_bstr_unittest.cc',
'win/scoped_comptr_unittest.cc',
'win/scoped_handle_unittest.cc',
'win/scoped_process_information_unittest.cc',
'win/scoped_variant_unittest.cc',
'win/shortcut_unittest.cc',
'win/startup_information_unittest.cc',
'win/win_util_unittest.cc',
'win/wrapped_window_proc_unittest.cc',
'<@(trace_event_test_sources)',
],
'dependencies': [
'base',
'base_i18n',
'base_message_loop_tests',
'base_prefs',
'base_prefs_test_support',
'base_static',
'run_all_unittests',
'test_support_base',
'third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
'../third_party/icu/icu.gyp:icui18n',
'../third_party/icu/icu.gyp:icuuc',
],
'includes': ['../build/nocompile.gypi'],
'variables': {
# TODO(ajwong): Is there a way to autodetect this?
'module_dir': 'base'
},
'conditions': [
['OS == "android"', {
'dependencies': [
'android/jni_generator/jni_generator.gyp:jni_generator_tests',
'../testing/android/native_test.gyp:native_test_native_code',
],
}],
['OS == "ios" and _toolset != "host"', {
'sources/': [
# iOS does not support FilePathWatcher.
['exclude', '^files/file_path_watcher_unittest\\.cc$'],
# Only test the iOS-meaningful portion of memory and process_utils.
['exclude', '^memory/discardable_shared_memory_unittest\\.cc$'],
['exclude', '^memory/shared_memory_unittest\\.cc$'],
['exclude', '^process/memory_unittest'],
['exclude', '^process/process_unittest\\.cc$'],
['exclude', '^process/process_util_unittest\\.cc$'],
['include', '^process/process_util_unittest_ios\\.cc$'],
# iOS does not use message_pump_libevent.
['exclude', '^message_loop/message_pump_libevent_unittest\\.cc$'],
],
'actions': [
{
'action_name': 'copy_test_data',
'variables': {
'test_data_files': [
'test/data',
],
'test_data_prefix': 'base',
},
'includes': [ '../build/copy_test_data_ios.gypi' ],
},
],
}],
['desktop_linux == 1 or chromeos == 1', {
'defines': [
'USE_SYMBOLIZE',
],
'sources!': [
'file_version_info_unittest.cc',
],
'conditions': [
[ 'desktop_linux==1', {
'sources': [
'nix/xdg_util_unittest.cc',
],
}],
],
}],
['use_glib == 1', {
'dependencies': [
'../build/linux/system.gyp:glib',
],
}, { # use_glib == 0
'sources!': [
'message_loop/message_pump_glib_unittest.cc',
]
}],
['use_ozone == 1', {
'sources!': [
'message_loop/message_pump_glib_unittest.cc',
]
}],
['OS == "linux"', {
'dependencies': [
'malloc_wrapper',
],
'conditions': [
['use_allocator!="none"', {
'dependencies': [
'allocator/allocator.gyp:allocator',
],
}],
]},
],
[ 'OS == "win" and target_arch == "x64"', {
'sources': [
'profiler/win32_stack_frame_unwinder_unittest.cc',
],
'dependencies': [
'base_profiler_test_support_library',
],
}],
['OS == "win"', {
'sources!': [
'file_descriptor_shuffle_unittest.cc',
'files/dir_reader_posix_unittest.cc',
'message_loop/message_pump_libevent_unittest.cc',
'threading/worker_pool_posix_unittest.cc',
],
# TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [
4267,
],
'conditions': [
# This is needed so base_unittests uses the allocator shim, as
# SecurityTest.MemoryAllocationRestriction* tests are dependent
# on tcmalloc.
# TODO(wfh): crbug.com/246278 Move tcmalloc specific tests into
# their own test suite.
['win_use_allocator_shim==1', {
'dependencies': [
'allocator/allocator.gyp:allocator',
],
}],
['icu_use_data_file_flag==0', {
# This is needed to trigger the dll copy step on windows.
# TODO(mark): This should not be necessary.
'dependencies': [
'../third_party/icu/icu.gyp:icudata',
],
}],
],
}, { # OS != "win"
'dependencies': [
'../third_party/libevent/libevent.gyp:libevent'
],
}],
], # conditions
'target_conditions': [
['OS == "ios" and _toolset != "host"', {
'sources/': [
# Pull in specific Mac files for iOS (which have been filtered out
# by file name rules).
['include', '^mac/bind_objc_block_unittest\\.mm$'],
['include', '^mac/foundation_util_unittest\\.mm$',],
['include', '^mac/objc_property_releaser_unittest\\.mm$'],
['include', '^mac/scoped_nsobject_unittest\\.mm$'],
['include', '^sys_string_conversions_mac_unittest\\.mm$'],
],
}],
['OS == "android"', {
'sources/': [
['include', '^debug/proc_maps_linux_unittest\\.cc$'],
],
}],
# Enable more direct string conversions on platforms with native utf8
# strings
['OS=="mac" or OS=="ios" or <(chromeos)==1 or <(chromecast)==1', {
'defines': ['SYSTEM_NATIVE_UTF8'],
}],
# SyncSocket isn't used on iOS
['OS=="ios"', {
'sources!': [
'sync_socket_unittest.cc',
],
}],
], # target_conditions
},
{
# GN: //base:base_perftests
'target_name': 'base_perftests',
'type': '<(gtest_target_type)',
'dependencies': [
'base',
'test_support_base',
'../testing/gtest.gyp:gtest',
],
'sources': [
'message_loop/message_pump_perftest.cc',
'test/run_all_unittests.cc',
'threading/thread_perftest.cc',
'../testing/perf/perf_test.cc'
],
'conditions': [
['OS == "android"', {
'dependencies': [
'../testing/android/native_test.gyp:native_test_native_code',
],
}],
],
},
{
# GN: //base:base_i18n_perftests
'target_name': 'base_i18n_perftests',
'type': '<(gtest_target_type)',
'dependencies': [
'test_support_base',
'test_support_perf',
'../testing/gtest.gyp:gtest',
'base_i18n',
'base',
],
'sources': [
'i18n/streaming_utf8_validator_perftest.cc',
],
},
{
# GN: //base/test:test_support
'target_name': 'test_support_base',
'type': 'static_library',
'dependencies': [
'base',
'base_static',
'base_i18n',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
'../third_party/icu/icu.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
],
'export_dependent_settings': [
'base',
],
'conditions': [
['os_posix==0', {
'sources!': [
'test/scoped_locale.cc',
'test/scoped_locale.h',
],
}],
['os_bsd==1', {
'sources!': [
'test/test_file_util_linux.cc',
],
}],
['OS == "android"', {
'dependencies': [
'base_unittests_jni_headers',
'base_java_unittest_support',
],
}],
['OS == "ios"', {
'toolsets': ['host', 'target'],
}],
],
'sources': [
'test/gtest_util.cc',
'test/gtest_util.h',
'test/gtest_xml_unittest_result_printer.cc',
'test/gtest_xml_unittest_result_printer.h',
'test/gtest_xml_util.cc',
'test/gtest_xml_util.h',
'test/histogram_tester.cc',
'test/histogram_tester.h',
'test/icu_test_util.cc',
'test/icu_test_util.h',
'test/ios/wait_util.h',
'test/ios/wait_util.mm',
'test/launcher/test_launcher.cc',
'test/launcher/test_launcher.h',
'test/launcher/test_result.cc',
'test/launcher/test_result.h',
'test/launcher/test_results_tracker.cc',
'test/launcher/test_results_tracker.h',
'test/launcher/unit_test_launcher.cc',
'test/launcher/unit_test_launcher.h',
'test/launcher/unit_test_launcher_ios.cc',
'test/mock_chrome_application_mac.h',
'test/mock_chrome_application_mac.mm',
'test/mock_devices_changed_observer.cc',
'test/mock_devices_changed_observer.h',
'test/mock_entropy_provider.cc',
'test/mock_entropy_provider.h',
'test/mock_log.cc',
'test/mock_log.h',
'test/multiprocess_test.cc',
'test/multiprocess_test.h',
'test/multiprocess_test_android.cc',
'test/null_task_runner.cc',
'test/null_task_runner.h',
'test/opaque_ref_counted.cc',
'test/opaque_ref_counted.h',
'test/perf_log.cc',
'test/perf_log.h',
'test/perf_test_suite.cc',
'test/perf_test_suite.h',
'test/perf_time_logger.cc',
'test/perf_time_logger.h',
'test/power_monitor_test_base.cc',
'test/power_monitor_test_base.h',
'test/scoped_locale.cc',
'test/scoped_locale.h',
'test/scoped_path_override.cc',
'test/scoped_path_override.h',
'test/sequenced_task_runner_test_template.cc',
'test/sequenced_task_runner_test_template.h',
'test/sequenced_worker_pool_owner.cc',
'test/sequenced_worker_pool_owner.h',
'test/simple_test_clock.cc',
'test/simple_test_clock.h',
'test/simple_test_tick_clock.cc',
'test/simple_test_tick_clock.h',
'test/task_runner_test_template.cc',
'test/task_runner_test_template.h',
'test/test_discardable_memory_allocator.cc',
'test/test_discardable_memory_allocator.h',
'test/test_file_util.cc',
'test/test_file_util.h',
'test/test_file_util_android.cc',
'test/test_file_util_linux.cc',
'test/test_file_util_mac.cc',
'test/test_file_util_posix.cc',
'test/test_file_util_win.cc',
'test/test_io_thread.cc',
'test/test_io_thread.h',
'test/test_listener_ios.h',
'test/test_listener_ios.mm',
'test/test_mock_time_task_runner.cc',
'test/test_mock_time_task_runner.h',
'test/test_pending_task.cc',
'test/test_pending_task.h',
'test/test_reg_util_win.cc',
'test/test_reg_util_win.h',
'test/test_shortcut_win.cc',
'test/test_shortcut_win.h',
'test/test_simple_task_runner.cc',
'test/test_simple_task_runner.h',
'test/test_suite.cc',
'test/test_suite.h',
'test/test_support_android.cc',
'test/test_support_android.h',
'test/test_support_ios.h',
'test/test_support_ios.mm',
'test/test_switches.cc',
'test/test_switches.h',
'test/test_timeouts.cc',
'test/test_timeouts.h',
'test/test_ui_thread_android.cc',
'test/test_ui_thread_android.h',
'test/thread_test_helper.cc',
'test/thread_test_helper.h',
'test/trace_event_analyzer.cc',
'test/trace_event_analyzer.h',
'test/trace_to_file.cc',
'test/trace_to_file.h',
'test/user_action_tester.cc',
'test/user_action_tester.h',
'test/values_test_util.cc',
'test/values_test_util.h',
],
'target_conditions': [
['OS == "ios"', {
'sources/': [
# Pull in specific Mac files for iOS (which have been filtered out
# by file name rules).
['include', '^test/test_file_util_mac\\.cc$'],
],
}],
['OS == "ios" and _toolset == "target"', {
'sources!': [
# iOS uses its own unit test launcher.
'test/launcher/unit_test_launcher.cc',
],
}],
['OS == "ios" and _toolset == "host"', {
'sources!': [
'test/launcher/unit_test_launcher_ios.cc',
'test/test_support_ios.h',
'test/test_support_ios.mm',
],
}],
], # target_conditions
},
{
'target_name': 'test_support_perf',
'type': 'static_library',
'dependencies': [
'base',
'test_support_base',
'../testing/gtest.gyp:gtest',
],
'sources': [
'test/run_all_perftests.cc',
],
'direct_dependent_settings': {
'defines': [
'PERF_TEST',
],
},
},
{
'target_name': 'test_launcher_nacl_nonsfi',
'conditions': [
['disable_nacl==0 and disable_nacl_untrusted==0 and enable_nacl_nonsfi_test==1', {
'type': 'static_library',
'sources': [
'test/launcher/test_launcher_nacl_nonsfi.cc',
],
'dependencies': [
'test_support_base',
],
}, {
'type': 'none',
}],
],
},
],
'conditions': [
['OS=="ios" and "<(GENERATOR)"=="ninja"', {
'targets': [
{
'target_name': 'test_launcher',
'toolsets': ['host'],
'type': 'executable',
'dependencies': [
'test_support_base',
],
'sources': [
'test/launcher/test_launcher_ios.cc',
],
},
],
}],
['OS!="ios"', {
'targets': [
{
# GN: //base:check_example
'target_name': 'check_example',
'type': 'executable',
'sources': [
'check_example.cc',
],
'dependencies': [
'base',
],
},
{
'target_name': 'build_utf8_validator_tables',
'type': 'executable',
'toolsets': ['host'],
'dependencies': [
'base',
'../third_party/icu/icu.gyp:icuuc',
],
'sources': [
'i18n/build_utf8_validator_tables.cc'
],
},
],
}],
['OS == "win" and target_arch=="ia32"', {
'targets': [
# The base_win64 target here allows us to use base for Win64 targets
# (the normal build is 32 bits).
{
'target_name': 'base_win64',
'type': '<(component)',
'variables': {
'base_target': 1,
},
'dependencies': [
'base_static_win64',
'allocator/allocator.gyp:allocator_extension_thunks_win64',
'../third_party/modp_b64/modp_b64.gyp:modp_b64_win64',
'third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations_win64',
'trace_event/etw_manifest/etw_manifest.gyp:etw_manifest',
],
# TODO(gregoryd): direct_dependent_settings should be shared with the
# 32-bit target, but it doesn't work due to a bug in gyp
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'defines': [
'BASE_WIN64',
'<@(nacl_win64_defines)',
],
'configurations': {
'Common_Base': {
'msvs_target_platform': 'x64',
},
},
'conditions': [
['component == "shared_library"', {
'sources!': [
'debug/debug_on_start_win.cc',
],
}],
],
# Specify delayload for base_win64.dll.
'msvs_settings': {
'VCLinkerTool': {
'DelayLoadDLLs': [
'cfgmgr32.dll',
'powrprof.dll',
'setupapi.dll',
],
'AdditionalDependencies': [
'cfgmgr32.lib',
'powrprof.lib',
'setupapi.lib',
],
},
},
# Specify delayload for components that link with base_win64.lib.
'all_dependent_settings': {
'msvs_settings': {
'VCLinkerTool': {
'DelayLoadDLLs': [
'cfgmgr32.dll',
'powrprof.dll',
'setupapi.dll',
],
'AdditionalDependencies': [
'cfgmgr32.lib',
'powrprof.lib',
'setupapi.lib',
],
},
},
},
# TODO(rvargas): Bug 78117. Remove this.
'msvs_disabled_warnings': [
4244,
4996,
4267,
],
'sources': [
'auto_reset.h',
'linux_util.cc',
'linux_util.h',
'md5.cc',
'md5.h',
'message_loop/message_pump_libevent.cc',
'message_loop/message_pump_libevent.h',
'metrics/field_trial.cc',
'metrics/field_trial.h',
'posix/file_descriptor_shuffle.cc',
'posix/file_descriptor_shuffle.h',
'sync_socket.h',
'sync_socket_posix.cc',
'sync_socket_win.cc',
'third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
'third_party/xdg_user_dirs/xdg_user_dir_lookup.h',
],
},
{
'target_name': 'base_i18n_nacl_win64',
'type': '<(component)',
# TODO(gregoryd): direct_dependent_settings should be shared with the
# 32-bit target, but it doesn't work due to a bug in gyp
'direct_dependent_settings': {
'include_dirs': [
'..',
],
},
'defines': [
'<@(nacl_win64_defines)',
'BASE_I18N_IMPLEMENTATION',
],
'include_dirs': [
'..',
],
'sources': [
'i18n/icu_util_nacl_win64.cc',
],
'configurations': {
'Common_Base': {
'msvs_target_platform': 'x64',
},
},
},
{
# TODO(rvargas): Remove this when gyp finally supports a clean model.
# See bug 36232.
'target_name': 'base_static_win64',
'type': 'static_library',
'sources': [
'base_switches.cc',
'base_switches.h',
'win/pe_image.cc',
'win/pe_image.h',
],
'sources!': [
# base64.cc depends on modp_b64.
'base64.cc',
],
'include_dirs': [
'..',
],
'configurations': {
'Common_Base': {
'msvs_target_platform': 'x64',
},
},
'defines': [
'<@(nacl_win64_defines)',
],
# TODO(rvargas): Bug 78117. Remove this.
'msvs_disabled_warnings': [
4244,
],
},
],
}],
['OS == "win" and target_arch=="x64"', {
'targets': [
{
'target_name': 'base_profiler_test_support_library',
# Must be a shared library so that it can be unloaded during testing.
'type': 'shared_library',
'include_dirs': [
'..',
],
'sources': [
'profiler/test_support_library.cc',
],
},
]
}],
['os_posix==1 and OS!="mac" and OS!="ios"', {
'targets': [
{
'target_name': 'symbolize',
'type': 'static_library',
'toolsets': ['host', 'target'],
'variables': {
'chromium_code': 0,
},
'conditions': [
['OS == "solaris"', {
'include_dirs': [
'/usr/gnu/include',
'/usr/gnu/include/libelf',
],
},],
],
'cflags': [
'-Wno-sign-compare',
],
'cflags!': [
'-Wextra',
],
'defines': [
'GLOG_BUILD_CONFIG_INCLUDE="build/build_config.h"',
],
'sources': [
'third_party/symbolize/config.h',
'third_party/symbolize/demangle.cc',
'third_party/symbolize/demangle.h',
'third_party/symbolize/glog/logging.h',
'third_party/symbolize/glog/raw_logging.h',
'third_party/symbolize/symbolize.cc',
'third_party/symbolize/symbolize.h',
'third_party/symbolize/utilities.h',
],
'include_dirs': [
'..',
],
'includes': [
'../build/android/increase_size_for_speed.gypi',
],
},
{
'target_name': 'xdg_mime',
'type': 'static_library',
'toolsets': ['host', 'target'],
'variables': {
'chromium_code': 0,
},
'cflags!': [
'-Wextra',
],
'sources': [
'third_party/xdg_mime/xdgmime.c',
'third_party/xdg_mime/xdgmime.h',
'third_party/xdg_mime/xdgmimealias.c',
'third_party/xdg_mime/xdgmimealias.h',
'third_party/xdg_mime/xdgmimecache.c',
'third_party/xdg_mime/xdgmimecache.h',
'third_party/xdg_mime/xdgmimeglob.c',
'third_party/xdg_mime/xdgmimeglob.h',
'third_party/xdg_mime/xdgmimeicon.c',
'third_party/xdg_mime/xdgmimeicon.h',
'third_party/xdg_mime/xdgmimeint.c',
'third_party/xdg_mime/xdgmimeint.h',
'third_party/xdg_mime/xdgmimemagic.c',
'third_party/xdg_mime/xdgmimemagic.h',
'third_party/xdg_mime/xdgmimeparent.c',
'third_party/xdg_mime/xdgmimeparent.h',
],
'includes': [
'../build/android/increase_size_for_speed.gypi',
],
},
],
}],
['OS == "linux"', {
'targets': [
{
'target_name': 'malloc_wrapper',
'type': 'shared_library',
'dependencies': [
'base',
],
'sources': [
'test/malloc_wrapper.cc',
],
}
],
}],
['OS == "android"', {
'targets': [
{
# GN: //base:base_jni_headers
'target_name': 'base_jni_headers',
'type': 'none',
'sources': [
'android/java/src/org/chromium/base/ApkAssets.java',
'android/java/src/org/chromium/base/ApplicationStatus.java',
'android/java/src/org/chromium/base/AnimationFrameTimeHistogram.java',
'android/java/src/org/chromium/base/BuildInfo.java',
'android/java/src/org/chromium/base/CommandLine.java',
'android/java/src/org/chromium/base/ContentUriUtils.java',
'android/java/src/org/chromium/base/ContextUtils.java',
'android/java/src/org/chromium/base/CpuFeatures.java',
'android/java/src/org/chromium/base/EventLog.java',
'android/java/src/org/chromium/base/FieldTrialList.java',
'android/java/src/org/chromium/base/ImportantFileWriterAndroid.java',
'android/java/src/org/chromium/base/JNIUtils.java',
'android/java/src/org/chromium/base/JavaHandlerThread.java',
'android/java/src/org/chromium/base/LocaleUtils.java',
'android/java/src/org/chromium/base/MemoryPressureListener.java',
'android/java/src/org/chromium/base/PathService.java',
'android/java/src/org/chromium/base/PathUtils.java',
'android/java/src/org/chromium/base/PowerMonitor.java',
'android/java/src/org/chromium/base/SysUtils.java',
'android/java/src/org/chromium/base/SystemMessageHandler.java',
'android/java/src/org/chromium/base/ThreadUtils.java',
'android/java/src/org/chromium/base/TraceEvent.java',
'android/java/src/org/chromium/base/library_loader/LibraryLoader.java',
'android/java/src/org/chromium/base/metrics/RecordHistogram.java',
'android/java/src/org/chromium/base/metrics/RecordUserAction.java',
],
'variables': {
'jni_gen_package': 'base',
},
'dependencies': [
'android_runtime_jni_headers',
],
'includes': [ '../build/jni_generator.gypi' ],
},
{
# GN: //base:android_runtime_jni_headers
'target_name': 'android_runtime_jni_headers',
'type': 'none',
'variables': {
'jni_gen_package': 'base',
'input_java_class': 'java/lang/Runtime.class',
},
'includes': [ '../build/jar_file_jni_generator.gypi' ],
},
{
# GN: //base:base_unittests_jni_headers
'target_name': 'base_unittests_jni_headers',
'type': 'none',
'sources': [
'test/android/java/src/org/chromium/base/ContentUriTestUtils.java',
'test/android/java/src/org/chromium/base/TestUiThread.java',
],
'variables': {
'jni_gen_package': 'base',
},
'includes': [ '../build/jni_generator.gypi' ],
},
{
# GN: //base:base_native_libraries_gen
'target_name': 'base_native_libraries_gen',
'type': 'none',
'sources': [
'android/java/templates/NativeLibraries.template',
],
'variables': {
'package_name': 'org/chromium/base/library_loader',
'template_deps': [],
},
'includes': [ '../build/android/java_cpp_template.gypi' ],
},
{
# GN: //base:base_multidex_gen
'target_name': 'base_multidex_gen',
'type': 'none',
'sources': [
'android/java/templates/ChromiumMultiDex.template',
],
'variables': {
'package_name': 'org/chromium/base/multidex',
'template_deps': [],
'additional_gcc_preprocess_options': [
'--defines', 'MULTIDEX_CONFIGURATION_<(CONFIGURATION_NAME)',
],
},
'includes': ['../build/android/java_cpp_template.gypi'],
},
{
# GN: //base:base_android_java_enums_srcjar
'target_name': 'base_java_library_process_type',
'type': 'none',
'variables': {
'source_file': 'android/library_loader/library_loader_hooks.h',
},
'includes': [ '../build/android/java_cpp_enum.gypi' ],
},
{
# GN: //base:base_java
'target_name': 'base_java',
'type': 'none',
'variables': {
'java_in_dir': 'android/java',
'jar_excluded_classes': [ '*/NativeLibraries.class' ],
},
'dependencies': [
'base_java_application_state',
'base_java_library_load_from_apk_status_codes',
'base_java_library_process_type',
'base_java_memory_pressure_level',
'base_multidex_gen',
'base_native_libraries_gen',
'../third_party/android_tools/android_tools.gyp:android_support_multidex_javalib',
'../third_party/jsr-305/jsr-305.gyp:jsr_305_javalib',
],
'includes': [ '../build/java.gypi' ],
},
{
# GN: //base:base_java_unittest_support
'target_name': 'base_java_unittest_support',
'type': 'none',
'dependencies': [
'base_java',
],
'variables': {
'java_in_dir': '../base/test/android/java',
},
'includes': [ '../build/java.gypi' ],
},
{
# GN: //base:base_android_java_enums_srcjar
'target_name': 'base_java_application_state',
'type': 'none',
'variables': {
'source_file': 'android/application_status_listener.h',
},
'includes': [ '../build/android/java_cpp_enum.gypi' ],
},
{
# GN: //base:base_android_java_enums_srcjar
'target_name': 'base_java_library_load_from_apk_status_codes',
'type': 'none',
'variables': {
'source_file': 'android/library_loader/library_load_from_apk_status_codes.h'
},
'includes': [ '../build/android/java_cpp_enum.gypi' ],
},
{
# GN: //base:base_android_java_enums_srcjar
'target_name': 'base_java_memory_pressure_level',
'type': 'none',
'variables': {
'source_file': 'memory/memory_pressure_listener.h',
},
'includes': [ '../build/android/java_cpp_enum.gypi' ],
},
{
# GN: //base:base_java_test_support
'target_name': 'base_java_test_support',
'type': 'none',
'dependencies': [
'base_java',
'../testing/android/on_device_instrumentation.gyp:reporter_java',
],
'variables': {
'java_in_dir': '../base/test/android/javatests',
},
'includes': [ '../build/java.gypi' ],
},
{
# TODO(jbudorick): Remove this once we roll to robolectric 3.0 and pull
# in the multidex shadow library. crbug.com/522043
# GN: //base:base_junit_test_support
'target_name': 'base_junit_test_support',
'type': 'none',
'dependencies': [
'../testing/android/junit/junit_test.gyp:junit_test_support',
'../third_party/android_tools/android_tools.gyp:android_support_multidex_javalib',
],
'variables': {
'src_paths': [
'../base/test/android/junit/',
],
},
'includes': [ '../build/host_jar.gypi' ]
},
{
# GN: //base:base_junit_tests
'target_name': 'base_junit_tests',
'type': 'none',
'dependencies': [
'base_java',
'base_java_test_support',
'base_junit_test_support',
'../testing/android/junit/junit_test.gyp:junit_test_support',
],
'variables': {
'main_class': 'org.chromium.testing.local.JunitTestMain',
'src_paths': [
'../base/android/junit/',
],
},
'includes': [ '../build/host_jar.gypi' ],
},
{
# GN: //base:base_javatests
'target_name': 'base_javatests',
'type': 'none',
'dependencies': [
'base_java',
'base_java_test_support',
],
'variables': {
'java_in_dir': '../base/android/javatests',
},
'includes': [ '../build/java.gypi' ],
},
{
# GN: //base/android/linker:chromium_android_linker
'target_name': 'chromium_android_linker',
'type': 'shared_library',
'sources': [
'android/linker/android_dlext.h',
'android/linker/legacy_linker_jni.cc',
'android/linker/legacy_linker_jni.h',
'android/linker/linker_jni.cc',
'android/linker/linker_jni.h',
'android/linker/modern_linker_jni.cc',
'android/linker/modern_linker_jni.h',
],
# The crazy linker is never instrumented.
'cflags!': [
'-finstrument-functions',
],
'dependencies': [
# The NDK contains the crazy_linker here:
# '<(android_ndk_root)/crazy_linker.gyp:crazy_linker'
# However, we use our own fork. See bug 384700.
'../third_party/android_crazy_linker/crazy_linker.gyp:crazy_linker',
],
},
{
# GN: //base:base_perftests_apk
'target_name': 'base_perftests_apk',
'type': 'none',
'dependencies': [
'base_perftests',
],
'variables': {
'test_suite_name': 'base_perftests',
},
'includes': [ '../build/apk_test.gypi' ],
},
{
# GN: //base:base_unittests_apk
'target_name': 'base_unittests_apk',
'type': 'none',
'dependencies': [
'base_java',
'base_unittests',
],
'variables': {
'test_suite_name': 'base_unittests',
'isolate_file': 'base_unittests.isolate',
},
'includes': [ '../build/apk_test.gypi' ],
},
],
'conditions': [
['test_isolation_mode != "noop"',
{
'targets': [
{
'target_name': 'base_unittests_apk_run',
'type': 'none',
'dependencies': [
'base_unittests_apk',
],
'includes': [
'../build/isolate.gypi',
],
'sources': [
'base_unittests_apk.isolate',
],
},
]
}
],
],
}],
['OS == "win"', {
'targets': [
{
# Target to manually rebuild pe_image_test.dll which is checked into
# base/test/data/pe_image.
'target_name': 'pe_image_test',
'type': 'shared_library',
'sources': [
'win/pe_image_test.cc',
],
'msvs_settings': {
'VCLinkerTool': {
'SubSystem': '2', # Set /SUBSYSTEM:WINDOWS
'DelayLoadDLLs': [
'cfgmgr32.dll',
'shell32.dll',
],
'AdditionalDependencies': [
'cfgmgr32.lib',
'shell32.lib',
],
},
},
},
],
}],
['test_isolation_mode != "noop"', {
'targets': [
{
'target_name': 'base_unittests_run',
'type': 'none',
'dependencies': [
'base_unittests',
],
'includes': [
'../build/isolate.gypi',
],
'sources': [
'base_unittests.isolate',
],
},
],
}],
],
}
| [] |
KerimovEmil/ProjectEuler | solutions/PE4.py | bc9cb682181c1ac7889ee57c36d32beae7b441a8 | """
PROBLEM
A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers
is 9009 = 91 × 99.
Find the largest palindrome made from the product of two 3-digit numbers.
ANSWER:
906609
Solve time ~ 0.760 seconds
"""
from itertools import product
import unittest
from util.utils import timeit
class Problem4:
def __init__(self, num_digits):
self.lower = 10 ** (num_digits - 1) - 1
self.upper = 10 ** num_digits - 1
@staticmethod
def is_palindrome(num):
return str(num) == str(num)[::-1]
@timeit
def solve(self):
pds = []
for i, j in product(range(self.lower, self.upper), repeat=2):
if self.is_palindrome(i * j):
pds.append(i * j)
return max(pds)
class Solution4(unittest.TestCase):
def setUp(self):
self.problem = Problem4(3)
def test_solution(self):
self.assertEqual(906609, self.problem.solve())
if __name__ == '__main__':
unittest.main()
| [((1013, 1028), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1026, 1028), False, 'import unittest\n')] |
uc-cdis/indexclient | indexclient/parsers/info.py | 5d61bdb2cb9c0104f173d7bba43d92449a093c6d | import sys
import json
import logging
import argparse
import warnings
import requests
from indexclient import errors
# DEPRECATED 11/2019 -- interacts with old `/alias/` endpoint.
# For creating aliases for indexd records, prefer using
# the `add_alias` function, which interacts with the new
# `/index/{GUID}/aliases` endpoint.
def info(host, port, name, **kwargs):
"""
Retrieve info by name.
"""
warnings.warn(
(
"This function is deprecated. For creating aliases for indexd "
"records, prefer using the `add_alias_for_did` function, which "
"interacts with the new `/index/{GUID}/aliases` endpoint."
),
DeprecationWarning,
)
resource = "http://{host}:{port}/alias/{name}".format(
host=host, port=port, name=name
)
res = requests.get(resource)
try:
res.raise_for_status()
except Exception as err:
raise errors.BaseIndexError(res.status_code, res.text)
try:
doc = res.json()
except ValueError as err:
reason = json.dumps({"error": "invalid json payload returned"})
raise errors.BaseIndexError(res.status_code, reason)
sys.stdout.write(json.dumps(doc))
def config(parser):
"""
Configure the info command.
"""
parser.set_defaults(func=info)
parser.add_argument("name", help="name of information to retrieve")
| [((418, 644), 'warnings.warn', 'warnings.warn', (['"""This function is deprecated. For creating aliases for indexd records, prefer using the `add_alias_for_did` function, which interacts with the new `/index/{GUID}/aliases` endpoint."""', 'DeprecationWarning'], {}), "(\n 'This function is deprecated. For creating aliases for indexd records, prefer using the `add_alias_for_did` function, which interacts with the new `/index/{GUID}/aliases` endpoint.'\n , DeprecationWarning)\n", (431, 644), False, 'import warnings\n'), ((828, 850), 'requests.get', 'requests.get', (['resource'], {}), '(resource)\n', (840, 850), False, 'import requests\n'), ((1204, 1219), 'json.dumps', 'json.dumps', (['doc'], {}), '(doc)\n', (1214, 1219), False, 'import json\n'), ((935, 983), 'indexclient.errors.BaseIndexError', 'errors.BaseIndexError', (['res.status_code', 'res.text'], {}), '(res.status_code, res.text)\n', (956, 983), False, 'from indexclient import errors\n'), ((1066, 1120), 'json.dumps', 'json.dumps', (["{'error': 'invalid json payload returned'}"], {}), "({'error': 'invalid json payload returned'})\n", (1076, 1120), False, 'import json\n'), ((1135, 1181), 'indexclient.errors.BaseIndexError', 'errors.BaseIndexError', (['res.status_code', 'reason'], {}), '(res.status_code, reason)\n', (1156, 1181), False, 'from indexclient import errors\n')] |
guilhermebc/docker-playground | email-worker-compose/app/sender.py | e614c314ed2f5ab54835a8c45b4b3eec1ac4c57b | import psycopg2
import redis
import json
from bottle import Bottle, request
class Sender(Bottle):
def __init__(self):
super().__init__()
self.route('/', method='POST', callback=self.send)
self.fila = redis.StrictRedis(host='queue', port=6379, db=0)
DSN = 'dbname=email_sender user=postgress host=db'
self.conn = psycopg2.connect(DSN)
def register_message(self, assunto, mensagem):
SQL = 'INSERT INTO emails (assunto, mensagem) VALUES (%s, %s)'
cur = self.conn.cursor()
cur.execute(SQL, (assunto, mensagem))
self.conn.commit()
cur.close()
msg = {'assunto': assunto, 'mensagem': mensagem}
self.fila.rpush('sender', json.dumps(msg))
print('Message registered!')
def send(self):
assunto = request.forms.get('assunto')
mensagem = request.forms.get('mensagem')
self.register_message(assunto, mensagem)
return 'Message queued! Assunto: {} Mensage: {}'.format(
assunto, mensagem
)
if __name__ == '__main__':
sender = Sender()
sender.run(host='0.0.0.0', port=8080, debug=True) | [((230, 278), 'redis.StrictRedis', 'redis.StrictRedis', ([], {'host': '"""queue"""', 'port': '(6379)', 'db': '(0)'}), "(host='queue', port=6379, db=0)\n", (247, 278), False, 'import redis\n'), ((358, 379), 'psycopg2.connect', 'psycopg2.connect', (['DSN'], {}), '(DSN)\n', (374, 379), False, 'import psycopg2\n'), ((815, 843), 'bottle.request.forms.get', 'request.forms.get', (['"""assunto"""'], {}), "('assunto')\n", (832, 843), False, 'from bottle import Bottle, request\n'), ((863, 892), 'bottle.request.forms.get', 'request.forms.get', (['"""mensagem"""'], {}), "('mensagem')\n", (880, 892), False, 'from bottle import Bottle, request\n'), ((721, 736), 'json.dumps', 'json.dumps', (['msg'], {}), '(msg)\n', (731, 736), False, 'import json\n')] |
zsimic/sandbox | tests/ximpl.py | 3d1571ca723d1a5e80ddecae0ad912160334fee9 | import click
import poyo
import ruamel.yaml
import runez
import strictyaml
import yaml as pyyaml
from zyaml import load_path, load_string, tokens_from_path, tokens_from_string
from zyaml.marshal import decode, default_marshal, represented_scalar
from . import TestSettings
class ImplementationCollection(object):
def __init__(self, names, default="zyaml,ruamel"):
av = [ZyamlImplementation, RuamelImplementation, PyyamlBaseImplementation, PoyoImplementation, StrictImplementation]
self.available = dict((m.name, m()) for m in av)
self.unknown = []
self.selected = []
if names.startswith("+"):
names = "%s,%s" % (names[1:], default)
names = [s.strip() for s in names.split(",")]
names = [s for s in names if s]
seen = {}
for name in names:
found = 0
for i in self.available.values():
if name == "all" or name in i.name:
if i.name not in seen:
seen[i.name] = True
self.selected.append(i)
found += 1
if found == 0:
self.unknown.append(name)
self.combinations = None
def track_result_combination(self, impl, data):
if isinstance(data, Exception):
value = runez.stringified(data)
else:
value = runez.represented_json(data, stringify=decode, keep_none=True, none_key="-null-")
name = impl.name
if self.combinations is None:
self.combinations = {}
for i1 in self.selected:
for i2 in self.selected:
if i1.name < i2.name:
self.combinations[(i1.name, i2.name)] = set()
for names, values in self.combinations.items():
if name in names:
values.add(value)
def __repr__(self):
return ",".join(str(i) for i in self.selected)
def __len__(self):
return len(self.selected)
def __iter__(self):
for i in self.selected:
yield i
class Implementation(object):
"""Implementation of loading a yml file"""
name = None # type: str
def __repr__(self):
return self.name
@classmethod
def option(cls, default="zyaml,ruamel", count=None, **kwargs):
"""
Args:
default (str | None): Default implementation(s) to use
count (int | None): Optional: exact number of implementations that have to specified
**kwargs: Passed-through to click
"""
kwargs["default"] = default
def _callback(_ctx, _param, value):
if not value:
return None
impls = ImplementationCollection(value, default=default)
if impls.unknown:
raise click.BadParameter("Unknown implementation(s): %s" % ", ".join(impls.unknown))
if count and len(impls) != count:
if count == 1:
raise click.BadParameter("Need exactly 1 implementation")
raise click.BadParameter("Need exactly %s" % runez.plural(count, "implementation"))
if count == 1:
return impls.selected[0]
return impls
metavar = "I1,..."
hlp = "Implementation(s)"
if count:
hlp = runez.plural(count, "implementation")
metavar = ",".join("I%s" % (i + 1) for i in range(count))
kwargs.setdefault("help", "%s to use" % hlp)
kwargs.setdefault("show_default", True)
kwargs.setdefault("metavar", metavar)
name = "implementation" if count == 1 else "implementations"
return click.option(name, "-i", callback=_callback, **kwargs)
def show_result(self, data, tokens=False):
rtype = "tokens" if tokens else data.__class__.__name__ if data is not None else "None"
rep = data
if not tokens or isinstance(data, Exception):
rep = TestSettings.represented(data)
message = "---- %s: %s" % (runez.bold(self.name), runez.dim(rtype))
if isinstance(data, NotImplementedError):
print("%s - %s" % (message, rep))
return
print(message)
print(rep)
def get_outcome(self, content, tokens=False):
if tokens:
data = self.tokens(content)
if isinstance(data, list):
data = "\n".join(self.represented_token(t) for t in data)
return data
return self.deserialized(content)
def deserialized(self, source):
value = TestSettings.protected_call(self._deserialized, source)
return self._simplified(value)
def tokens(self, source):
return TestSettings.protected_call(self._tokenize, source)
def represented_token(self, token):
return str(token)
def _deserialized(self, source):
if hasattr(source, "path"):
return self._deserialized_from_path(source.path)
return self._deserialized_from_string(source)
def _deserialized_from_path(self, path):
with open(path) as fh:
return self._deserialized_from_string(fh.read())
def _deserialized_from_string(self, source):
raise NotImplementedError()
def _tokenize(self, source):
if hasattr(source, "path"):
return self._tokens_from_path(source.path)
return self._tokens_from_string(source)
def _tokens_from_path(self, path):
with open(path) as fh:
return TestSettings.unwrapped(self._tokens_from_string(fh.read()))
def _tokens_from_string(self, source):
raise NotImplementedError()
def _simplified(self, value):
if isinstance(value, list) and len(value) == 1:
return value[0]
return value
class ZyamlImplementation(Implementation):
name = "zyaml"
def _deserialized_from_path(self, path):
return load_path(path)
def _deserialized_from_string(self, source):
return load_string(source)
def _tokens_from_path(self, path):
return tokens_from_path(path)
def _tokens_from_string(self, source):
return tokens_from_string(source)
def _simplified(self, value):
return value
def ruamel_passthrough_tags(loader, tag, node):
name = node.__class__.__name__
if "Seq" in name:
result = []
for v in node.value:
result.append(ruamel_passthrough_tags(loader, tag, v))
return result
if "Map" in name:
result = {}
for k, v in node.value:
k = ruamel_passthrough_tags(loader, tag, k)
v = ruamel_passthrough_tags(loader, tag, v)
result[k] = v
return result
return default_marshal(node.value)
class RuamelImplementation(Implementation):
name = "ruamel"
def _deserialized_from_string(self, source):
y = ruamel.yaml.YAML(typ="safe")
ruamel.yaml.add_multi_constructor("", ruamel_passthrough_tags, Loader=ruamel.yaml.SafeLoader)
return y.load_all(source)
def _tokens_from_string(self, source):
return ruamel.yaml.main.scan(source)
class PyyamlBaseImplementation(Implementation):
name = "pyyaml"
def _deserialized_from_string(self, source):
return pyyaml.load_all(source, Loader=pyyaml.BaseLoader)
def _tokens_from_string(self, source):
yaml_loader = pyyaml.BaseLoader(source)
curr = yaml_loader.get_token()
while curr is not None:
yield curr
curr = yaml_loader.get_token()
def represented_token(self, token):
linenum = token.start_mark.line + 1
column = token.start_mark.column + 1
result = "%s[%s,%s]" % (token.__class__.__name__, linenum, column)
value = getattr(token, "value", None)
if value is not None:
if token.id == "<scalar>":
value = represented_scalar(token.style, value)
elif token.id == "<anchor>":
value = "&%s" % value
elif token.id == "<alias>":
value = "*%s" % value
elif token.id == "<tag>":
assert isinstance(value, tuple)
value = " ".join(str(s) for s in runez.flattened(value))
elif token.id == "<directive>":
result += " %s" % token.name
value = " ".join(str(s) for s in runez.flattened(value))
else:
assert False
result = "%s %s" % (result, value)
return result
class PoyoImplementation(Implementation):
name = "poyo"
def _deserialized_from_string(self, source):
return [poyo.parse_string(source)]
class StrictImplementation(Implementation):
name = "strict"
def _deserialized_from_string(self, source):
obj = strictyaml.dirty_load(source, allow_flow_style=True)
return obj.data
| [((6779, 6806), 'zyaml.marshal.default_marshal', 'default_marshal', (['node.value'], {}), '(node.value)\n', (6794, 6806), False, 'from zyaml.marshal import decode, default_marshal, represented_scalar\n'), ((3720, 3774), 'click.option', 'click.option', (['name', '"""-i"""'], {'callback': '_callback'}), "(name, '-i', callback=_callback, **kwargs)\n", (3732, 3774), False, 'import click\n'), ((5964, 5979), 'zyaml.load_path', 'load_path', (['path'], {}), '(path)\n', (5973, 5979), False, 'from zyaml import load_path, load_string, tokens_from_path, tokens_from_string\n'), ((6045, 6064), 'zyaml.load_string', 'load_string', (['source'], {}), '(source)\n', (6056, 6064), False, 'from zyaml import load_path, load_string, tokens_from_path, tokens_from_string\n'), ((6120, 6142), 'zyaml.tokens_from_path', 'tokens_from_path', (['path'], {}), '(path)\n', (6136, 6142), False, 'from zyaml import load_path, load_string, tokens_from_path, tokens_from_string\n'), ((6202, 6228), 'zyaml.tokens_from_string', 'tokens_from_string', (['source'], {}), '(source)\n', (6220, 6228), False, 'from zyaml import load_path, load_string, tokens_from_path, tokens_from_string\n'), ((7324, 7373), 'yaml.load_all', 'pyyaml.load_all', (['source'], {'Loader': 'pyyaml.BaseLoader'}), '(source, Loader=pyyaml.BaseLoader)\n', (7339, 7373), True, 'import yaml as pyyaml\n'), ((7440, 7465), 'yaml.BaseLoader', 'pyyaml.BaseLoader', (['source'], {}), '(source)\n', (7457, 7465), True, 'import yaml as pyyaml\n'), ((8872, 8924), 'strictyaml.dirty_load', 'strictyaml.dirty_load', (['source'], {'allow_flow_style': '(True)'}), '(source, allow_flow_style=True)\n', (8893, 8924), False, 'import strictyaml\n'), ((1336, 1359), 'runez.stringified', 'runez.stringified', (['data'], {}), '(data)\n', (1353, 1359), False, 'import runez\n'), ((1395, 1481), 'runez.represented_json', 'runez.represented_json', (['data'], {'stringify': 'decode', 'keep_none': '(True)', 'none_key': '"""-null-"""'}), "(data, stringify=decode, keep_none=True, none_key=\n '-null-')\n", (1417, 1481), False, 'import runez\n'), ((3380, 3417), 'runez.plural', 'runez.plural', (['count', '"""implementation"""'], {}), "(count, 'implementation')\n", (3392, 3417), False, 'import runez\n'), ((8715, 8740), 'poyo.parse_string', 'poyo.parse_string', (['source'], {}), '(source)\n', (8732, 8740), False, 'import poyo\n'), ((4077, 4098), 'runez.bold', 'runez.bold', (['self.name'], {}), '(self.name)\n', (4087, 4098), False, 'import runez\n'), ((4100, 4116), 'runez.dim', 'runez.dim', (['rtype'], {}), '(rtype)\n', (4109, 4116), False, 'import runez\n'), ((7947, 7985), 'zyaml.marshal.represented_scalar', 'represented_scalar', (['token.style', 'value'], {}), '(token.style, value)\n', (7965, 7985), False, 'from zyaml.marshal import decode, default_marshal, represented_scalar\n'), ((3034, 3085), 'click.BadParameter', 'click.BadParameter', (['"""Need exactly 1 implementation"""'], {}), "('Need exactly 1 implementation')\n", (3052, 3085), False, 'import click\n'), ((3148, 3185), 'runez.plural', 'runez.plural', (['count', '"""implementation"""'], {}), "(count, 'implementation')\n", (3160, 3185), False, 'import runez\n'), ((8281, 8303), 'runez.flattened', 'runez.flattened', (['value'], {}), '(value)\n', (8296, 8303), False, 'import runez\n'), ((8444, 8466), 'runez.flattened', 'runez.flattened', (['value'], {}), '(value)\n', (8459, 8466), False, 'import runez\n')] |
kwangilkimkenny/chatbot_seq2seq_flask | ai_analysis.py | f2f3bda9311c5f2930aebc8ae4a6497597b190e1 |
import pandas as pd
import numpy as np
import re
import pickle
# plotting
import seaborn as sns
import matplotlib.pyplot as plt
# Tune learning_rate
from numpy import loadtxt
from xgboost import XGBClassifier
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import StratifiedKFold
# First XGBoost model for MBTI dataset
from numpy import loadtxt
from xgboost import XGBClassifier
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
##### Compute list of subject with Type | list of comments
from nltk.stem import PorterStemmer, WordNetLemmatizer
from nltk.corpus import stopwords
from nltk import word_tokenize
import nltk
nltk.download('wordnet')
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.manifold import TSNE
#타입을 숫자로 변환
def get_types(row):
t=row['type']
I = 0; N = 0
T = 0; J = 0
if t[0] == 'I': I = 1
elif t[0] == 'E': I = 0
else: print('I-E incorrect')
if t[1] == 'N': N = 1
elif t[1] == 'S': N = 0
else: print('N-S incorrect')
if t[2] == 'T': T = 1
elif t[2] == 'F': T = 0
else: print('T-F incorrect')
if t[3] == 'J': J = 1
elif t[3] == 'P': J = 0
else: print('J-P incorrect')
return pd.Series( {'IE':I, 'NS':N , 'TF': T, 'JP': J })
#딕셔너리파일 설정
b_Pers = {'I':0, 'E':1, 'N':0, 'S':1, 'F':0, 'T':1, 'J':0, 'P':1}
#리스트를 두개씩 묶어서 리스트로 만듬
b_Pers_list = [{0:'I', 1:'E'}, {0:'N', 1:'S'}, {0:'F', 1:'T'}, {0:'J', 1:'P'}]
def translate_personality(personality):
# transform mbti to binary vector
return [b_Pers[l] for l in personality]
def translate_back(personality):
# transform binary vector to mbti personality
s = ""
for i, l in enumerate(personality):
s += b_Pers_list[i][l]
return s
# We want to remove these from the psosts
unique_type_list = ['INFJ', 'ENTP', 'INTP', 'INTJ', 'ENTJ', 'ENFJ', 'INFP', 'ENFP',
'ISFP', 'ISTP', 'ISFJ', 'ISTJ', 'ESTP', 'ESFP', 'ESTJ', 'ESFJ']
unique_type_list = [x.lower() for x in unique_type_list]
# Lemmatize
stemmer = PorterStemmer()
lemmatiser = WordNetLemmatizer()
# Cache the stop words for speed
cachedStopWords = stopwords.words("english")
def pre_process_data(data, remove_stop_words=True, remove_mbti_profiles=True):
list_personality = []
list_posts = []
len_data = len(data)
i=0
for row in data.iterrows():
i+=1
if (i % 500 == 0 or i == 1 or i == len_data):
print("%s of %s rows" % (i, len_data))
##### Remove and clean comments
posts = row[1].posts
temp = re.sub('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', ' ', posts)
temp = re.sub("[^a-zA-Z]", " ", temp)
temp = re.sub(' +', ' ', temp).lower()
if remove_stop_words:
temp = " ".join([lemmatiser.lemmatize(w) for w in temp.split(' ') if w not in cachedStopWords])
else:
temp = " ".join([lemmatiser.lemmatize(w) for w in temp.split(' ')])
if remove_mbti_profiles:
for t in unique_type_list:
temp = temp.replace(t,"")
type_labelized = translate_personality(row[1].type)
list_personality.append(type_labelized)
list_posts.append(temp)
list_posts = np.array(list_posts)
list_personality = np.array(list_personality)
return list_posts, list_personality
# read data
# data = pd.read_csv('/Users/jongphilkim/Desktop/Django_WEB/essayfitaiproject_2020_12_09/essayai/mbti_1.csv')
data = pd.read_csv('./mbti/mbti_1.csv')
# get_types 함수 적용
data = data.join(data.apply (lambda row: get_types (row),axis=1))
# load
with open('./mbti/list_posts.pickle', 'rb') as f:
list_posts = pickle.load(f)
# load
with open('./mbti/list_personality.pickle', 'rb') as f:
list_personality = pickle.load(f)
# # Posts to a matrix of token counts
cntizer = CountVectorizer(analyzer="word",
max_features=1500,
tokenizer=None,
preprocessor=None,
stop_words=None,
max_df=0.7,
min_df=0.1)
# Learn the vocabulary dictionary and return term-document matrix
print("CountVectorizer...")
X_cnt = cntizer.fit_transform(list_posts)
#################################################
#save!!! model X_cnt
import pickle
# save
# with open('./essayai/ai_character/mbti/data_X_cnt.pickle', 'wb') as f:
# pickle.dump(X_cnt, f, pickle.HIGHEST_PROTOCOL)
# load
with open('./mbti/data_X_cnt.pickle', 'rb') as f:
X_cnt = pickle.load(f)
#################################################
# Transform the count matrix to a normalized tf or tf-idf representation
tfizer = TfidfTransformer()
print("Tf-idf...")
# Learn the idf vector (fit) and transform a count matrix to a tf-idf representation
X_tfidf = tfizer.fit_transform(X_cnt).toarray()
# load
with open('./mbti/data.pickle', 'rb') as f:
X_tfidf = pickle.load(f)
def mbti_classify(text):
type_indicators = [ "IE: Introversion (I) / Extroversion (E)", "NS: Intuition (N) – Sensing (S)",
"FT: Feeling (F) - Thinking (T)", "JP: Judging (J) – Perceiving (P)" ]
# Posts in tf-idf representation
X = X_tfidf
my_posts = str(text)
# The type is just a dummy so that the data prep fucntion can be reused
mydata = pd.DataFrame(data={'type': ['INFJ'], 'posts': [my_posts]})
my_posts, dummy = pre_process_data(mydata, remove_stop_words=True)
my_X_cnt = cntizer.transform(my_posts)
my_X_tfidf = tfizer.transform(my_X_cnt).toarray()
# setup parameters for xgboost
param = {}
param['n_estimators'] = 200
param['max_depth'] = 2
param['nthread'] = 8
param['learning_rate'] = 0.2
result = []
# Let's train type indicator individually
for l in range(len(type_indicators)):
print("%s ..." % (type_indicators[l]))
Y = list_personality[:,l]
# split data into train and test sets
seed = 7
test_size = 0.33
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=test_size, random_state=seed)
# fit model on training data
model = XGBClassifier(**param)
model.fit(X_train, y_train)
# make predictions for my data
y_pred = model.predict(my_X_tfidf)
result.append(y_pred[0])
# print("* %s prediction: %s" % (type_indicators[l], y_pred))
print("The result is: ", translate_back(result))
#결과를 리스트에 담고
Result_list = list(translate_back(result))
#mbit 결과값에 따라 내용 print 하기
# read data
# data = pd.read_csv('/Users/jongphilkim/Desktop/Django_WEB/essayfitaiproject/essayai/mbti_exp.csv')
data = pd.read_csv('./mbti/mbti_exp.csv')
#새로운 데이터프레임을 만들어서 계산된 값을 추가할 예정
df2 = pd.DataFrame(index=range(0,4),columns=['Type', 'Explain'])
#리스트에서 한글자씩 불러와서 데이터프레임의 값을 출력하면 됨
for i in range(0, len(Result_list)):
type = Result_list[i]
for j in range(0, len(data)):
if type == data.iloc[j,0]:
break
is_mbti = data.iloc[j,2]
df2.iloc[i, [0,1]] = [type, is_mbti]
print(df2)
return df2
# my_posts = """Describe a place or environment where you are perfectly content. What do you do or experience there, and why is it meaningful to you? 644 words out of 650 Gettysburg, a small town in the middle of Pennsylvania, was the sight of the largest, bloodiest battle in the Civil War. Something about these hallowed grounds draws me back every year for a three day camping trip with my family over Labor Day weekend. Every year, once school starts, I count the days until I take that three and half hour drive from Pittsburgh to Gettysburg. Each year, we leave after school ends on Friday and arrive in Gettysburg with just enough daylight to pitch the tents and cook up a quick dinner on the campfire. As more of the extended family arrives, we circle around the campfire and find out what is new with everyone. The following morning, everyone is up by nine and helping to make breakfast which is our best meal of the day while camping. Breakfast will fuel us for the day as we hike the vast battlefields. My Uncle Mark, my twin brother, Andrew, and I like to take charge of the family tour since we have the most passion and knowledge about the battle. I have learned so much from the stories Mark tells us while walking on the tours. Through my own research during these last couple of trips, I did some of the explaining about the events that occurred during the battle 150 years ago. My fondest experience during one trip was when we decided to go off of the main path to find a carving in a rock from a soldier during the battle. Mark had read about the carving in one of his books about Gettysburg, and we were determined to locate it. After almost an hour of scanning rocks in the area, we finally found it with just enough daylight to read what it said. After a long day of exploring the battlefield, we went back to the campsite for some 'civil war' stew. There is nothing special about the stew, just meat, vegetables and gravy, but for whatever reason, it is some of the best stew I have ever eaten. For the rest of the night, we enjoy the company of our extended family. My cousins, my brother and I listen to the stories from Mark and his friends experiences' in the military. After the parents have gone to bed, we stay up talking with each other, inching closer and closer to the fire as it gets colder. Finally, we creep back into our tents, trying to be as quiet as possible to not wake our parents. The next morning we awake red-eyed from the lack of sleep and cook up another fantastic breakfast. Unfortunately, after breakfast we have to pack up and head back to Pittsburgh. It will be another year until I visit Gettysburg again. There is something about that time I spend in Gettysburg that keeps me coming back to visit. For one, it is just a fun, relaxing time I get to spend with my family. This trip also fulfills my love for the outdoors. From sitting by the campfire and falling asleep to the chirp of the crickets, that is my definition of a perfect weekend. Gettysburg is also an interesting place to go for Civil War buffs like me. While walking down the Union line or walking Pickett's Charge, I imagine how the battle would have been played out around me. Every year when I visit Gettysburg, I learn more facts and stories about the battle, soldiers and generally about the Civil War. While I am in Gettysburg, I am perfectly content, passionate about the history and just enjoying the great outdoors with my family. This drive to learn goes beyond just my passion for history but applies to all of the math, science and business classes I have taken and clubs I am involved in at school. Every day, I am genuinely excited to learn.
# """
# test = mbti_classify(my_posts)
# print ('check')
# test
# print ('check2') | [((732, 756), 'nltk.download', 'nltk.download', (['"""wordnet"""'], {}), "('wordnet')\n", (745, 756), False, 'import nltk\n'), ((2277, 2292), 'nltk.stem.PorterStemmer', 'PorterStemmer', ([], {}), '()\n', (2290, 2292), False, 'from nltk.stem import PorterStemmer, WordNetLemmatizer\n'), ((2307, 2326), 'nltk.stem.WordNetLemmatizer', 'WordNetLemmatizer', ([], {}), '()\n', (2324, 2326), False, 'from nltk.stem import PorterStemmer, WordNetLemmatizer\n'), ((2383, 2409), 'nltk.corpus.stopwords.words', 'stopwords.words', (['"""english"""'], {}), "('english')\n", (2398, 2409), False, 'from nltk.corpus import stopwords\n'), ((3794, 3826), 'pandas.read_csv', 'pd.read_csv', (['"""./mbti/mbti_1.csv"""'], {}), "('./mbti/mbti_1.csv')\n", (3805, 3826), True, 'import pandas as pd\n'), ((4170, 4301), 'sklearn.feature_extraction.text.CountVectorizer', 'CountVectorizer', ([], {'analyzer': '"""word"""', 'max_features': '(1500)', 'tokenizer': 'None', 'preprocessor': 'None', 'stop_words': 'None', 'max_df': '(0.7)', 'min_df': '(0.1)'}), "(analyzer='word', max_features=1500, tokenizer=None,\n preprocessor=None, stop_words=None, max_df=0.7, min_df=0.1)\n", (4185, 4301), False, 'from sklearn.feature_extraction.text import CountVectorizer\n'), ((5081, 5099), 'sklearn.feature_extraction.text.TfidfTransformer', 'TfidfTransformer', ([], {}), '()\n', (5097, 5099), False, 'from sklearn.feature_extraction.text import TfidfTransformer\n'), ((1422, 1469), 'pandas.Series', 'pd.Series', (["{'IE': I, 'NS': N, 'TF': T, 'JP': J}"], {}), "({'IE': I, 'NS': N, 'TF': T, 'JP': J})\n", (1431, 1469), True, 'import pandas as pd\n'), ((3544, 3564), 'numpy.array', 'np.array', (['list_posts'], {}), '(list_posts)\n', (3552, 3564), True, 'import numpy as np\n'), ((3589, 3615), 'numpy.array', 'np.array', (['list_personality'], {}), '(list_personality)\n', (3597, 3615), True, 'import numpy as np\n'), ((3995, 4009), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (4006, 4009), False, 'import pickle\n'), ((4101, 4115), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (4112, 4115), False, 'import pickle\n'), ((4927, 4941), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (4938, 4941), False, 'import pickle\n'), ((5328, 5342), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (5339, 5342), False, 'import pickle\n'), ((5760, 5818), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': "{'type': ['INFJ'], 'posts': [my_posts]}"}), "(data={'type': ['INFJ'], 'posts': [my_posts]})\n", (5772, 5818), True, 'import pandas as pd\n'), ((7183, 7217), 'pandas.read_csv', 'pd.read_csv', (['"""./mbti/mbti_exp.csv"""'], {}), "('./mbti/mbti_exp.csv')\n", (7194, 7217), True, 'import pandas as pd\n'), ((2826, 2920), 're.sub', 're.sub', (['"""http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|(?:%[0-9a-fA-F][0-9a-fA-F]))+"""', '""" """', 'posts'], {}), "('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|(?:%[0-9a-fA-F][0-9a-fA-F]))+',\n ' ', posts)\n", (2832, 2920), False, 'import re\n'), ((2933, 2963), 're.sub', 're.sub', (['"""[^a-zA-Z]"""', '""" """', 'temp'], {}), "('[^a-zA-Z]', ' ', temp)\n", (2939, 2963), False, 'import re\n'), ((6502, 6564), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'Y'], {'test_size': 'test_size', 'random_state': 'seed'}), '(X, Y, test_size=test_size, random_state=seed)\n', (6518, 6564), False, 'from sklearn.model_selection import train_test_split\n'), ((6622, 6644), 'xgboost.XGBClassifier', 'XGBClassifier', ([], {}), '(**param)\n', (6635, 6644), False, 'from xgboost import XGBClassifier\n'), ((2980, 3003), 're.sub', 're.sub', (['""" +"""', '""" """', 'temp'], {}), "(' +', ' ', temp)\n", (2986, 3003), False, 'import re\n')] |
mummi-framework/mummi-ras | mummi_ras/online/aa/aa_get_tiltrot_z_state.py | 7f4522aad36661e4530e39c830ab8c2a6f134060 | ###############################################################################
# @todo add Pilot2-splash-app disclaimer
###############################################################################
""" Get's KRAS states """
import MDAnalysis as mda
from MDAnalysis.analysis import align
from MDAnalysis.lib.mdamath import make_whole
import os
import numpy as np
import math
############## Below section needs to be uncommented ############
import mummi_core
import mummi_ras
from mummi_core.utils import Naming
# # Logger has to be initialized the first thing in the script
from logging import getLogger
LOGGER = getLogger(__name__)
# # Innitilize MuMMI if it has not been done before
# MUMMI_ROOT = mummi.init(True)
# This is needed so the Naming works below
#@TODO fix this so we don't have these on import make them as an init
mummi_core.init()
dirKRASStates = Naming.dir_res('states')
dirKRASStructures = Naming.dir_res('structures')
# #RAS_ONLY_macrostate = np.loadtxt(os.path.join(dirKRASStates, "RAS-ONLY.microstates.txt"))
RAS_ONLY_macrostate = np.loadtxt(os.path.join(dirKRASStates, "ras-states.txt"),comments='#')
# #RAS_RAF_macrostate = np.loadtxt(os.path.join(dirKRASStates, "RAS-RAF.microstates.txt"))
RAS_RAF_macrostate = np.loadtxt(os.path.join(dirKRASStates, "ras-raf-states.txt"),comments='#') # Note diffrent number of columns so index change below
# TODO: CS, my edits to test
# RAS_ONLY_macrostate = np.loadtxt('ras-states.txt')
# RAS_RAF_macrostate = np.loadtxt('ras-raf-states.txt')
############## above section needs to be uncommented ############
# TODO: CS, my edits to test
# TODO: TSC, The reference structure has to currently be set as the 'RAS-ONLY-reference-structure.gro'
# TODO: TSC, path to the reference structure is: mummi_resources/structures/
kras_ref_universe = mda.Universe(os.path.join(dirKRASStructures, "RAS-ONLY-reference-structure.gro"))
# kras_ref_universe = mda.Universe("RAS-ONLY-reference-structure.gro")
# kras_ref_universe = mda.Universe('AA_pfpatch_000000004641_RAS_RAF2_411.gro')
# TODO: CS, not using these for x4 proteins; instead using protein_systems below to set num_res
######### Below hard codes the number of residues within RAS-only and RAS-RAF ##########
RAS_only_num_res = 184
RAS_RAF_num_res = 320
######### Above hard codes the number of residues within RAS-only and RAS-RAF ##########
####### This can be removed
# def get_kras(syst, kras_start):
# """Gets all atoms for a KRAS protein starting at 'kras_start'."""
# return syst.atoms[kras_start:kras_start+428]
####### This can be removed
def get_segids(u):
"""Identifies the list of segments within the system. Only needs to be called x1 time"""
segs = u.segments
segs = segs.segids
ras_segids = []
rasraf_segids = []
for i in range(len(segs)):
# print(segs[i])
if segs[i][-3:] == 'RAS':
ras_segids.append(segs[i])
if segs[i][-3:] == 'RAF':
rasraf_segids.append(segs[i])
return ras_segids, rasraf_segids
def get_protein_info(u,tag):
"""Uses the segments identified in get_segids to make a list of all proteins in the systems.\
Outputs a list of the first residue number of the protein, and whether it is 'RAS-ONLY', or 'RAS-RAF'.\
The 'tag' input defines what is used to identify the first residue of the protein. i.e. 'resname ACE1 and name BB'.\
Only needs to be called x1 time"""
ras_segids, rasraf_segids = get_segids(u)
if len(ras_segids) > 0:
RAS = u.select_atoms('segid '+ras_segids[0]+' and '+str(tag))
else:
RAS = []
if len(rasraf_segids) > 0:
RAF = u.select_atoms('segid '+rasraf_segids[0]+' and '+str(tag))
else:
RAF = []
protein_info = []#np.empty([len(RAS)+len(RAF),2])
for i in range(len(RAS)):
protein_info.append((RAS[i].resid,'RAS-ONLY'))
for i in range(len(RAF)):
protein_info.append((RAF[i].resid,'RAS-RAF'))
######## sort protein info
protein_info = sorted(protein_info)
######## sort protein info
return protein_info
def get_ref_kras():
"""Gets the reference KRAS struct. Only called x1 time when class is loaded"""
start_of_g_ref = kras_ref_universe.residues[0].resid
ref_selection = 'resid '+str(start_of_g_ref)+':'+str(start_of_g_ref+24)+' ' +\
str(start_of_g_ref+38)+':'+str(start_of_g_ref+54)+' ' +\
str(start_of_g_ref+67)+':'+str(start_of_g_ref+164)+' ' +\
'and (name CA or name BB)'
r2_26r40_56r69_166_ref = kras_ref_universe.select_atoms(str(ref_selection))
return kras_ref_universe.select_atoms(str(ref_selection)).positions - kras_ref_universe.select_atoms(str(ref_selection)).center_of_mass()
# Load inital ref frames (only need to do this once)
ref0 = get_ref_kras()
def getKRASstates(u,kras_indices):
"""Gets states for all KRAS proteins in path."""
# res_shift = 8
# all_glycine = u.select_atoms("resname GLY")
# kras_indices = []
# for i in range(0, len(all_glycine), 26):
# kras_indices.append(all_glycine[i].index)
########## Below is taken out of the function so it is only done once #########
# kras_indices = get_protein_info(u,'resname ACE1 and name BB')
########## Above is taken out of the function so it is only done once #########
# CS, for x4 cases:
# [{protein_x4: (protein_type, num_res)}]
protein_systems = [{'ras4a': ('RAS-ONLY', 185),
'ras4araf': ('RAS-RAF', 321),
'ras': ('RAS-ONLY', 184),
'rasraf': ('RAS-RAF', 320)}]
ALLOUT = []
for k in range(len(kras_indices)):
start_of_g = kras_indices[k][0]
protein_x4 = str(kras_indices[k][1])
try:
protein_type = [item[protein_x4] for item in protein_systems][0][0] # 'RAS-ONLY' OR 'RAS-RAF'
num_res = [item[protein_x4] for item in protein_systems][0][1]
except:
LOGGER.error('Check KRas naming between modules')
raise Exception('Error: unknown KRas name')
# TODO: CS, replacing this comment section with the above, to handle x4 protein types
# ---------------------------------------
# ALLOUT = []
# for k in range(len(kras_indices)):
# start_of_g = kras_indices[k][0]
# protein_type = str(kras_indices[k][1])
# ########## BELOW SECTION TO DETERMINE WHICH RESIDUES ARE PART OF THE PROTEIN GROUP - NEEDED FOR PBC REMOVAL ##############
# ########## POTENTIALLY REDO WITH A 'HARD-CODED' NUMBER OF RESIDUES PER PROTEIN GROUP (WHETHER RAS-ONLY OR RAS-RAF) #######
# ########## HAS BEEN REDONE WITH A 'HARD-CODED' NUMBER OF RESIDUES PER PROTEIN GROUP (WHETHER RAS-ONLY OR RAS-RAF) ########
# # if len(kras_indices) == 1:
# # krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(len(u.residues))+' and name BB') ####### HAS TO BE FIXED FOR BACKBONE ATOMS FOR SPECIFIC PROTEIN
# # elif len(kras_indices) > 1:
# # if k == len(kras_indices)-1:
# # krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(len(u.residues))+' and name BB')
# # else:
# # krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(kras_indices[k+1][0])+' and name BB')
# ########## ABOVE SECTION TO DETERMINE WHICH RESIDUES ARE PART OF THE PROTEIN GROUP - NEEDED FOR PBC REMOVAL ##############
#
# ########## Below hard codes the number of residues/beads in the RAS-ONLY and RAS-RAF simulations #########################
# if protein_type == 'RAS-ONLY':
# num_res = RAS_only_num_res
# elif protein_type == 'RAS-RAF':
# num_res = RAS_RAF_num_res
# ########## Above hard codes the number of residues/beads in the RAS-ONLY and RAS-RAF simulations #########################
# ---------------------------------------
# TODO: TSC, I changed the selection below, which can be used for the make_whole...
# krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(start_of_g+num_res)+' and (name CA or name BB)')
krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(start_of_g+num_res))
krases0_BB.guess_bonds()
r2_26r40_56r69_166 = u.select_atoms('resid '+str(start_of_g)+':'+str(start_of_g+24)+' ' +\
str(start_of_g+38)+':'+str(start_of_g+54)+' ' +\
str(start_of_g+67)+':'+str(start_of_g+164)+\
' and (name CA or name BB)')
u_selection = \
'resid '+str(start_of_g)+':'+str(start_of_g+24)+' '+str(start_of_g+38)+':'+str(start_of_g+54)+' ' +\
str(start_of_g+67)+':'+str(start_of_g+164)+' and (name CA or name BB)'
mobile0 = u.select_atoms(str(u_selection)).positions - u.select_atoms(str(u_selection)).center_of_mass()
# TODO: CS, something wrong with ref0 from get_kras_ref()
# just making ref0 = mobile0 to test for now
# ref0 = mobile0
# TSC removed this
R, RMSD_junk = align.rotation_matrix(mobile0, ref0)
######## TODO: TSC, Adjusted for AA lipid names ########
# lipids = u.select_atoms('resname POPX POPC PAPC POPE DIPE DPSM PAPS PAP6 CHOL')
lipids = u.select_atoms('resname POPC PAPC POPE DIPE SSM PAPS SAPI CHL1')
coords = ref0
RotMat = []
OS = []
r152_165 = krases0_BB.select_atoms('resid '+str(start_of_g+150)+':'+str(start_of_g+163)+' and (name CA or name BB)')
r65_74 = krases0_BB.select_atoms('resid '+str(start_of_g+63)+':'+str(start_of_g+72)+' and (name CA or name BB)')
timeframes = []
# TODO: CS, for AA need bonds to run make_whole()
# krases0_BB.guess_bonds()
# TODO: CS, turn off for now to test beyond this point
''' *** for AA, need to bring that back on once all else runs ***
'''
# @Tim and Chris S. this was commented out - please check.
#make_whole(krases0_BB)
j, rmsd_junk = mda.analysis.align.rotation_matrix((r2_26r40_56r69_166.positions-r2_26r40_56r69_166.center_of_mass()), coords)
RotMat.append(j)
OS.append(r65_74.center_of_mass()-r152_165.center_of_mass())
timeframes.append(u.trajectory.time)
if protein_type == 'RAS-RAF':
z_pos = []
############### NEED TO CONFIRM THE SELECTION OF THE RAF LOOP RESIDUES BELOW ####################
############### TODO: TSC, zshifting is set to -1 (instead of -2), as there are ACE caps that are separate residues in AA
#zshifting=-1
if protein_x4 == 'rasraf':
zshifting = -1
elif protein_x4 == 'ras4araf':
zshifting = 0
else:
zshifting = 0
LOGGER.error('Found unsupported protein_x4 type')
raf_loops_selection = u.select_atoms('resid '+str(start_of_g+zshifting+291)+':'+str(start_of_g+zshifting+294)+' ' +\
str(start_of_g+zshifting+278)+':'+str(start_of_g+zshifting+281)+' ' +\
' and (name CA or name BB)')
############### NEED TO CONFIRM THE SELECTION OF THE RAF LOOP RESIDUES ABOVE ####################
diff = (lipids.center_of_mass()[2]-raf_loops_selection.center_of_mass(unwrap=True)[2])/10
if diff < 0:
diff = diff+(u.dimensions[2]/10)
z_pos.append(diff)
z_pos = np.array(z_pos)
RotMatNP = np.array(RotMat)
OS = np.array(OS)
OA = RotMatNP[:, 2, :]/(((RotMatNP[:, 2, 0]**2)+(RotMatNP[:, 2, 1]**2)+(RotMatNP[:, 2, 2]**2))**0.5)[:, None]
OWAS = np.arccos(RotMatNP[:, 2, 2])*180/math.pi
OC_temp = np.concatenate((OA, OS), axis=1)
t = ((OC_temp[:, 0]*OC_temp[:, 3])+(OC_temp[:, 1]*OC_temp[:, 4]) +
(OC_temp[:, 2]*OC_temp[:, 5]))/((OC_temp[:, 0]**2)+(OC_temp[:, 1]**2)+(OC_temp[:, 2]**2))
OC = OA*t[:, None]
ORS_tp = np.concatenate((OC, OS), axis=1)
ORS_norm = (((ORS_tp[:, 3]-ORS_tp[:, 0])**2)+((ORS_tp[:, 4]-ORS_tp[:, 1])**2)+((ORS_tp[:, 5]-ORS_tp[:, 2])**2))**0.5
ORS = (OS - OC)/ORS_norm[:, None]
OACRS = np.cross(OA, ORS)
OZCA = OA * OA[:, 2][:, None]
Z_unit = np.full([len(OZCA), 3], 1)
Z_adjust = np.array([0, 0, 1])
Z_unit = Z_unit*Z_adjust
Z_OZCA = Z_unit-OZCA
OZPACB = Z_OZCA/((Z_OZCA[:, 0]**2+Z_OZCA[:, 1]**2+Z_OZCA[:, 2]**2)**0.5)[:, None]
OROTNOTSIGNED = np.zeros([len(ORS)])
for i in range(len(ORS)):
OROTNOTSIGNED[i] = np.arccos(np.dot(OZPACB[i, :], ORS[i, :]) /
(np.sqrt(np.dot(OZPACB[i, :], OZPACB[i, :]))) *
(np.sqrt(np.dot(ORS[i, :], ORS[i, :]))))*180/math.pi
OZPACBCRS_cross = np.cross(OZPACB, ORS)
OZPACBCRS = OZPACBCRS_cross/((OZPACBCRS_cross[:, 0]**2+OZPACBCRS_cross[:, 1]**2+OZPACBCRS_cross[:, 2]**2)**0.5)[:, None]
OFORSIGN_temp = (OA - OZPACBCRS)**2
OFORSIGN = OFORSIGN_temp[:, 0]+OFORSIGN_temp[:, 1]+OFORSIGN_temp[:, 2]
OROT = OROTNOTSIGNED
for i in range(len(OROT)):
if OROT[i] < 0:
OROT[i] = -(OROT[i])
for i in range(len(OROT)):
if OFORSIGN[i] < 0.25:
OROT[i] = -(OROT[i])
###### Below introduces new shift to account for upper vs. lower leaflet #####
for i in range(len(OWAS)):
OWAS[i] = abs(-(OWAS[i])+180) # made this an absolute value so that the tilt remains positive
for i in range(len(OROT)):
if OROT[i] < 0:
OROT[i] = OROT[i]+180
elif OROT[i] > 0:
OROT[i] = OROT[i]-180
###### Above introduces new shift to account for upper vs. lower leaflet #####
###### Below might have to be updated to take into account the periodic nature of the rotation ######
if protein_type == 'RAS-ONLY':
states = np.zeros(len(OROT))
for j in range(len(OROT)):
diff0 = []
for i in range(len(RAS_ONLY_macrostate)):
#diff0.append([((RAS_ONLY_macrostate[i,0]-OWAS[j])**2+(RAS_ONLY_macrostate[i,1]-OROT[j])**2)**0.5, RAS_ONLY_macrostate[i,6]])
diff0.append([((RAS_ONLY_macrostate[i,1]-OWAS[j])**2+(RAS_ONLY_macrostate[i,0]-OROT[j])**2)**0.5, RAS_ONLY_macrostate[i,5]])
diff0.sort()
states[j] = diff0[0][1]
elif protein_type == 'RAS-RAF':
states = np.zeros(len(OROT))
for j in range(len(OROT)):
### below: adding in the requirements for the 'high-z' state ###
if (OROT[j] < -45 or OROT[j] > 140) and z_pos[j] > 4.8:
states[j] = 3
else:
### above: adding in the requirements for the 'high-z' state ###
diff0 = []
for i in range(len(RAS_RAF_macrostate)):
#diff0.append([((RAS_RAF_macrostate[i,0]-OWAS[j])**2+(RAS_RAF_macrostate[i,1]-OROT[j])**2)**0.5, RAS_RAF_macrostate[i,6]])
diff0.append([((RAS_RAF_macrostate[i,1]-OWAS[j])**2+(RAS_RAF_macrostate[i,0]-OROT[j])**2)**0.5, RAS_RAF_macrostate[i,4]])
diff0.sort()
states[j] = diff0[0][1]
###### Above might have to be updated to take into account the periodic nature of the rotation ######
###### Assume we want to remove this? Where is the code that reads this information? i.e. will there be knock-on effects? ######
###### If feedback code needs index 5 (two_states) from the output, deleting this four_states will shift that to index 4 #######
# four_states = np.zeros(len(OROT))
# for j in range(len(OROT)):
# diff0 = []
# for i in range(len(macrostate4)):
# diff0.append([((macrostate4[i,0]-OWAS[j])**2+(macrostate4[i,1]-OROT[j])**2)**0.5, macrostate4[i,6]])
# diff0.sort()
# four_states[j] = diff0[0][1]+1
###### below: old output details.... ######################################
###### Updated - RAS-only to NOT HAVE the Z-distance ######################
###### Updated - Added in the protein 'tag', i.e. RAS-ONLY or RAS-RAF #####
# OUTPUT = np.zeros([len(OROT), 6])
# for i in range(len(OROT)):
# OUTPUT[i] = timeframes[i], OWAS[i], OROT[i], z_pos[i], four_states[i], two_states[i]
###### above: old output details.... ######################################
###### below: NEW output details.... ######################################
if protein_type == 'RAS-ONLY':
OUTPUT = np.zeros([len(OROT), 6]).astype(object)
for i in range(len(OROT)):
OUTPUT[i] = str(protein_type), timeframes[i], OWAS[i], OROT[i], 'n/a', int(states[i])
elif protein_type == 'RAS-RAF':
OUTPUT = np.zeros([len(OROT), 6]).astype(object)
for i in range(len(OROT)):
OUTPUT[i] = str(protein_type), timeframes[i], OWAS[i], OROT[i], z_pos[i], int(states[i])
ALLOUT.append(OUTPUT)
return np.asarray(ALLOUT)
#np.savetxt(str(tpr)+"_tilt_rot_z_state.KRAS_"+str(k+1)+".txt", OUTPUT, fmt=['%i','%10.3f','%10.3f','%10.3f','%i','%i'], delimiter=' ')
| [((621, 640), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (630, 640), False, 'from logging import getLogger\n'), ((840, 857), 'mummi_core.init', 'mummi_core.init', ([], {}), '()\n', (855, 857), False, 'import mummi_core\n'), ((875, 899), 'mummi_core.utils.Naming.dir_res', 'Naming.dir_res', (['"""states"""'], {}), "('states')\n", (889, 899), False, 'from mummi_core.utils import Naming\n'), ((920, 948), 'mummi_core.utils.Naming.dir_res', 'Naming.dir_res', (['"""structures"""'], {}), "('structures')\n", (934, 948), False, 'from mummi_core.utils import Naming\n'), ((1075, 1120), 'os.path.join', 'os.path.join', (['dirKRASStates', '"""ras-states.txt"""'], {}), "(dirKRASStates, 'ras-states.txt')\n", (1087, 1120), False, 'import os\n'), ((1259, 1308), 'os.path.join', 'os.path.join', (['dirKRASStates', '"""ras-raf-states.txt"""'], {}), "(dirKRASStates, 'ras-raf-states.txt')\n", (1271, 1308), False, 'import os\n'), ((1830, 1897), 'os.path.join', 'os.path.join', (['dirKRASStructures', '"""RAS-ONLY-reference-structure.gro"""'], {}), "(dirKRASStructures, 'RAS-ONLY-reference-structure.gro')\n", (1842, 1897), False, 'import os\n'), ((17305, 17323), 'numpy.asarray', 'np.asarray', (['ALLOUT'], {}), '(ALLOUT)\n', (17315, 17323), True, 'import numpy as np\n'), ((9165, 9201), 'MDAnalysis.analysis.align.rotation_matrix', 'align.rotation_matrix', (['mobile0', 'ref0'], {}), '(mobile0, ref0)\n', (9186, 9201), False, 'from MDAnalysis.analysis import align\n'), ((11648, 11664), 'numpy.array', 'np.array', (['RotMat'], {}), '(RotMat)\n', (11656, 11664), True, 'import numpy as np\n'), ((11679, 11691), 'numpy.array', 'np.array', (['OS'], {}), '(OS)\n', (11687, 11691), True, 'import numpy as np\n'), ((11884, 11916), 'numpy.concatenate', 'np.concatenate', (['(OA, OS)'], {'axis': '(1)'}), '((OA, OS), axis=1)\n', (11898, 11916), True, 'import numpy as np\n'), ((12139, 12171), 'numpy.concatenate', 'np.concatenate', (['(OC, OS)'], {'axis': '(1)'}), '((OC, OS), axis=1)\n', (12153, 12171), True, 'import numpy as np\n'), ((12355, 12372), 'numpy.cross', 'np.cross', (['OA', 'ORS'], {}), '(OA, ORS)\n', (12363, 12372), True, 'import numpy as np\n'), ((12474, 12493), 'numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (12482, 12493), True, 'import numpy as np\n'), ((13010, 13031), 'numpy.cross', 'np.cross', (['OZPACB', 'ORS'], {}), '(OZPACB, ORS)\n', (13018, 13031), True, 'import numpy as np\n'), ((11612, 11627), 'numpy.array', 'np.array', (['z_pos'], {}), '(z_pos)\n', (11620, 11627), True, 'import numpy as np\n'), ((11825, 11857), 'numpy.arccos', 'np.arccos', (['RotMatNP[:, (2), (2)]'], {}), '(RotMatNP[:, (2), (2)])\n', (11834, 11857), True, 'import numpy as np\n'), ((12766, 12801), 'numpy.dot', 'np.dot', (['OZPACB[(i), :]', 'ORS[(i), :]'], {}), '(OZPACB[(i), :], ORS[(i), :])\n', (12772, 12801), True, 'import numpy as np\n'), ((12939, 12971), 'numpy.dot', 'np.dot', (['ORS[(i), :]', 'ORS[(i), :]'], {}), '(ORS[(i), :], ORS[(i), :])\n', (12945, 12971), True, 'import numpy as np\n'), ((12850, 12888), 'numpy.dot', 'np.dot', (['OZPACB[(i), :]', 'OZPACB[(i), :]'], {}), '(OZPACB[(i), :], OZPACB[(i), :])\n', (12856, 12888), True, 'import numpy as np\n')] |
maddox/home-assistant | homeassistant/components/switch/hikvisioncam.py | 6624cfefd6ea81b559085779173b91a3dc6bd349 | """
homeassistant.components.switch.hikvision
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Support turning on/off motion detection on Hikvision cameras.
Note: Currently works using default https port only.
CGI API Guide: http://bit.ly/1RuyUuF
Configuration:
To use the Hikvision motion detection switch you will need to add something
like the following to your config/configuration.yaml
switch:
platform: hikvisioncam
name: Hikvision Cam 1 Motion Detection
host: 192.168.1.32
username: YOUR_USERNAME
password: YOUR_PASSWORD
Variables:
host
*Required
This is the IP address of your Hikvision camera. Example: 192.168.1.32
username
*Required
Your Hikvision camera username.
password
*Required
Your Hikvision camera username.
name
*Optional
The name to use when displaying this switch instance.
"""
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.const import STATE_ON, STATE_OFF
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
import logging
try:
import hikvision.api
from hikvision.error import HikvisionError, MissingParamError
except ImportError:
hikvision.api = None
_LOGGING = logging.getLogger(__name__)
REQUIREMENTS = ['hikvision==0.4']
# pylint: disable=too-many-arguments
# pylint: disable=too-many-instance-attributes
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
""" Setup Hikvision Camera config. """
host = config.get(CONF_HOST, None)
port = config.get('port', "80")
name = config.get('name', "Hikvision Camera Motion Detection")
username = config.get(CONF_USERNAME, "admin")
password = config.get(CONF_PASSWORD, "12345")
if hikvision.api is None:
_LOGGING.error((
"Failed to import hikvision. Did you maybe not install the "
"'hikvision' dependency?"))
return False
try:
hikvision_cam = hikvision.api.CreateDevice(
host, port=port, username=username,
password=password, is_https=False)
except MissingParamError as param_err:
_LOGGING.error("Missing required param: %s", param_err)
return False
except HikvisionError as conn_err:
_LOGGING.error("Unable to connect: %s", conn_err)
return False
add_devices_callback([
HikvisionMotionSwitch(name, hikvision_cam)
])
class HikvisionMotionSwitch(ToggleEntity):
""" Provides a switch to toggle on/off motion detection. """
def __init__(self, name, hikvision_cam):
self._name = name
self._hikvision_cam = hikvision_cam
self._state = STATE_OFF
@property
def should_poll(self):
""" Poll for status regularly. """
return True
@property
def name(self):
""" Returns the name of the device if any. """
return self._name
@property
def state(self):
""" Returns the state of the device if any. """
return self._state
@property
def is_on(self):
""" True if device is on. """
return self._state == STATE_ON
def turn_on(self, **kwargs):
""" Turn the device on. """
_LOGGING.info("Turning on Motion Detection ")
self._hikvision_cam.enable_motion_detection()
def turn_off(self, **kwargs):
""" Turn the device off. """
_LOGGING.info("Turning off Motion Detection ")
self._hikvision_cam.disable_motion_detection()
def update(self):
""" Update Motion Detection state """
enabled = self._hikvision_cam.is_motion_detection_enabled()
_LOGGING.info('enabled: %s', enabled)
self._state = STATE_ON if enabled else STATE_OFF
| [((1170, 1197), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1187, 1197), False, 'import logging\n')] |
leduong/richie | src/richie/apps/search/filter_definitions/mixins.py | bf7ed379b7e2528cd790dadcec10ac2656efd189 | """Define mixins to easily compose custom FilterDefinition classes."""
class TermsQueryMixin:
"""A mixin for filter definitions that need to apply term queries."""
def get_query_fragment(self, data):
"""Build the query fragments as term queries for each selected value."""
value_list = data.get(self.name)
# For terms filters, as the name implies, it's a simple terms fragment
return (
[{"key": self.name, "fragment": [{"terms": {self.term: value_list}}]}]
if value_list
else []
)
class ChoicesQueryMixin:
"""A mixin for filter definitions that need to apply predefined queries."""
def get_query_fragment(self, data):
"""Pick the hardcoded query fragment for each selected value."""
fragment_map = self.get_fragment_map()
return [
{"key": self.name, "fragment": fragment_map[value]}
for value in data.get(self.name, [])
]
class ChoicesAggsMixin:
"""A mixin for filter definitions that need to apply aggregations for predefined choices."""
# pylint: disable=unused-argument
def get_aggs_fragment(self, queries, *args, **kwargs):
"""
Build the aggregations as a set of filters, one for each possible value of the field.
"""
return {
# Create a custom aggregation for each possible choice for this filter
# eg `availability@coming_soon` & `availability@current` & `availability@open`
"{:s}@{:s}".format(self.name, choice_key): {
"filter": {
"bool": {
# Use all the query fragments from the queries *but* the one(s) that
# filter on the current filter: we manually add back the only one that
# is relevant to the current choice.
"must": choice_fragment
+ [
clause
for kf_pair in queries
for clause in kf_pair["fragment"]
if kf_pair["key"] is not self.name
]
}
}
}
for choice_key, choice_fragment in self.get_fragment_map().items()
}
class NestedChoicesAggsMixin:
"""
A mixin for filter definitions that are related to a nested field. The aggregation filter can
only be recomputed at the level of the parent because it should group all queries of fields
nested below the parent.
"""
# pylint: disable=unused-argument
def get_aggs_fragment(self, queries, data, parent, *args, **kwargs):
"""
Computing aggregations for a nested field is DIFFICULT because query fragments related to
nested fields are grouped under their common path. For example combined filters on
availability and languages would lead to a query like:
{
"query": {
"nested": {
"path": "course_runs",
"query": {
"bool": {
"must": [
{"range": {"course_runs.end": {"lte": "01-01-2019"}}},
{"terms": {"course_runs.languages": ["de", "en", fr"]}},
]
}
},
}
}
}
In this example, computing the facet count for the French filter, is done with the
following filter (excluding the filter on English and German so we only count French):
{
"query": {
"nested": {
"path": "course_runs",
"query": {
"bool": {
"must": [
{"range": {"course_runs.end": {"lte": "01-01-2019"}}},
{"terms": {"course_runs.languages": ["fr"]}},
]
}
},
}
}
}
This can only be built by calling the parent NestingWrapper with customized filter data.
"""
return {
# Create a custom aggregation for each possible choice for this filter
# eg `availability@coming_soon` & `availability@current` & `availability@open`
"{:s}@{:s}".format(self.name, choice_key): {
"filter": {
"bool": {
# Use all the query fragments from the queries (the nesting parent is
# responsible for excluding the queries related to nested fields so we
# have to manually add them, making sure to apply on the current field
# only the current choice.
"must": [
clause
for kf_pair in (
queries
+ parent.get_query_fragment(
# override data with only the current choice
{**data, self.name: [choice_key]}
)
)
for clause in kf_pair["fragment"]
]
}
}
}
for choice_key, choice_fragment in self.get_fragment_map().items()
}
| [] |
AustEcon/electrumsv | electrumsv/gui/qt/receive_view.py | db924efc69f091f39e7d02e7f2d7a71350f4e6af | from typing import List, Optional, TYPE_CHECKING
import weakref
from PyQt5.QtCore import QEvent, Qt
from PyQt5.QtWidgets import (QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit,
QVBoxLayout, QWidget)
from electrumsv.app_state import app_state
from electrumsv.bitcoin import script_template_to_string
from electrumsv.constants import PaymentFlag, RECEIVING_SUBPATH
from electrumsv.i18n import _
from electrumsv.logs import logs
from electrumsv.wallet_database.tables import KeyInstanceRow
from electrumsv import web
from .amountedit import AmountEdit, BTCAmountEdit
from .constants import expiration_values
if TYPE_CHECKING:
from .main_window import ElectrumWindow
from .qrcodewidget import QRCodeWidget
from .qrwindow import QR_Window
from .request_list import RequestList
from .table_widgets import TableTopButtonLayout
from .util import ButtonsLineEdit, EnterButton, HelpLabel
class ReceiveView(QWidget):
_qr_window: Optional[QR_Window] = None
def __init__(self, main_window: 'ElectrumWindow', account_id: int) -> None:
super().__init__(main_window)
self._main_window = weakref.proxy(main_window)
self._account_id = account_id
self._account = main_window._wallet.get_account(account_id)
self._logger = logs.get_logger(f"receive-view[{self._account_id}]")
self._receive_key_id: Optional[int] = None
self._request_list_toolbar_layout = TableTopButtonLayout()
self._request_list_toolbar_layout.refresh_signal.connect(
self._main_window.refresh_wallet_display)
self._request_list_toolbar_layout.filter_signal.connect(self._filter_request_list)
form_layout = self.create_form_layout()
self._request_list = RequestList(self, main_window)
request_container = self.create_request_list_container()
vbox = QVBoxLayout(self)
vbox.addLayout(form_layout)
vbox.addSpacing(20)
vbox.addWidget(request_container, 1)
self.setLayout(vbox)
def clean_up(self) -> None:
# If there are no accounts there won't be a receive QR code object created yet.
if self._receive_qr is not None:
self._receive_qr.clean_up()
if self._qr_window is not None:
self._qr_window.close()
def create_form_layout(self) -> QHBoxLayout:
# A 4-column grid layout. All the stretch is in the last column.
# The exchange rate plugin adds a fiat widget in column 2
grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnStretch(3, 1)
self._receive_destination_e = ButtonsLineEdit()
self._receive_destination_e.addCopyButton(app_state.app)
self._receive_destination_e.setReadOnly(True)
msg = _('Bitcoin SV payment destination where the payment should be received. '
'Note that each payment request uses a different Bitcoin SV payment destination.')
receive_address_label = HelpLabel(_('Receiving destination'), msg)
self._receive_destination_e.textChanged.connect(self._update_receive_qr)
self._receive_destination_e.setFocusPolicy(Qt.NoFocus)
grid.addWidget(receive_address_label, 0, 0)
grid.addWidget(self._receive_destination_e, 0, 1, 1, -1)
self._receive_message_e = QLineEdit()
grid.addWidget(QLabel(_('Description')), 1, 0)
grid.addWidget(self._receive_message_e, 1, 1, 1, -1)
self._receive_message_e.textChanged.connect(self._update_receive_qr)
self._receive_amount_e = BTCAmountEdit()
grid.addWidget(QLabel(_('Requested amount')), 2, 0)
grid.addWidget(self._receive_amount_e, 2, 1)
self._receive_amount_e.textChanged.connect(self._update_receive_qr)
self._fiat_receive_e = AmountEdit(app_state.fx.get_currency if app_state.fx else '')
if not app_state.fx or not app_state.fx.is_enabled():
self._fiat_receive_e.setVisible(False)
grid.addWidget(self._fiat_receive_e, 2, 2, Qt.AlignLeft)
self._main_window.connect_fields(self._receive_amount_e, self._fiat_receive_e)
self._expires_combo = QComboBox()
self._expires_combo.addItems([i[0] for i in expiration_values])
self._expires_combo.setCurrentIndex(3)
self._expires_combo.setFixedWidth(self._receive_amount_e.width())
msg = ' '.join([
_('Expiration date of your request.'),
_('This information is seen by the recipient if you send them '
'a signed payment request.'),
_('Expired requests have to be deleted manually from your list, '
'in order to free the corresponding Bitcoin SV addresses.'),
_('The Bitcoin SV address never expires and will always be part '
'of this ElectrumSV wallet.'),
])
grid.addWidget(HelpLabel(_('Request expires'), msg), 3, 0)
grid.addWidget(self._expires_combo, 3, 1)
self._expires_label = QLineEdit('')
self._expires_label.setReadOnly(1)
self._expires_label.setFocusPolicy(Qt.NoFocus)
self._expires_label.hide()
grid.addWidget(self._expires_label, 3, 1)
self._save_request_button = EnterButton(_('Save request'), self._save_form_as_request)
self._new_request_button = EnterButton(_('New'), self._new_payment_request)
self._receive_qr = QRCodeWidget(fixedSize=200)
self._receive_qr.link_to_window(self._toggle_qr_window)
buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self._save_request_button)
buttons.addWidget(self._new_request_button)
grid.addLayout(buttons, 4, 1, 1, 2)
vbox_g = QVBoxLayout()
vbox_g.addLayout(grid)
vbox_g.addStretch()
hbox = QHBoxLayout()
hbox.addLayout(vbox_g)
hbox.addWidget(self._receive_qr)
return hbox
def create_request_list_container(self) -> QGroupBox:
layout = QVBoxLayout()
layout.setSpacing(0)
layout.setContentsMargins(6, 0, 6, 6)
layout.addLayout(self._request_list_toolbar_layout)
layout.addWidget(self._request_list)
request_box = QGroupBox()
request_box.setTitle(_('Requests'))
request_box.setAlignment(Qt.AlignCenter)
request_box.setContentsMargins(0, 0, 0, 0)
request_box.setLayout(layout)
return request_box
def update_widgets(self) -> None:
self._request_list.update()
def update_destination(self) -> None:
text = ""
if self._receive_key_id is not None:
script_template = self._account.get_script_template_for_id(self._receive_key_id)
if script_template is not None:
text = script_template_to_string(script_template)
self._receive_destination_e.setText(text)
def update_contents(self) -> None:
self._expires_label.hide()
self._expires_combo.show()
if self._account.is_deterministic():
fresh_key = self._account.get_fresh_keys(RECEIVING_SUBPATH, 1)[0]
self.set_receive_key(fresh_key)
def update_for_fx_quotes(self) -> None:
if self._account_id is not None:
edit = (self._fiat_receive_e
if self._fiat_receive_e.is_last_edited else self._receive_amount_e)
edit.textEdited.emit(edit.text())
# Bound to text fields in `_create_receive_form_layout`.
def _update_receive_qr(self) -> None:
if self._receive_key_id is None:
return
amount = self._receive_amount_e.get_amount()
message = self._receive_message_e.text()
self._save_request_button.setEnabled((amount is not None) or (message != ""))
script_template = self._account.get_script_template_for_id(self._receive_key_id)
address_text = script_template_to_string(script_template)
uri = web.create_URI(address_text, amount, message)
self._receive_qr.setData(uri)
if self._qr_window and self._qr_window.isVisible():
self._qr_window.set_content(self._receive_destination_e.text(), amount,
message, uri)
def _toggle_qr_window(self, event: QEvent) -> None:
if self._receive_key_id is None:
self.show_message(_("No available receiving destination."))
return
if not self._qr_window:
self._qr_window = QR_Window(self)
self._qr_window.setVisible(True)
self._qr_window_geometry = self._qr_window.geometry()
else:
if not self._qr_window.isVisible():
self._qr_window.setVisible(True)
self._qr_window.setGeometry(self._qr_window_geometry)
else:
self._qr_window_geometry = self._qr_window.geometry()
self._qr_window.setVisible(False)
self._update_receive_qr()
def set_fiat_ccy_enabled(self, flag: bool) -> None:
self._fiat_receive_e.setVisible(flag)
def get_bsv_edits(self) -> List[BTCAmountEdit]:
return [ self._receive_amount_e ]
def _save_form_as_request(self) -> None:
if not self._receive_key_id:
self._main_window.show_error(_('No receiving payment destination'))
return
amount = self._receive_amount_e.get_amount()
message = self._receive_message_e.text()
if not message and not amount:
self._main_window.show_error(_('No message or amount'))
return
def callback(exc_value: Optional[Exception]=None) -> None:
if exc_value is not None:
raise exc_value # pylint: disable=raising-bad-type
self._request_list.update_signal.emit()
i = self._expires_combo.currentIndex()
expiration = [x[1] for x in expiration_values][i]
row = self._account.requests.get_request_for_key_id(self._receive_key_id)
if row is None:
row = self._account.requests.create_request(self._receive_key_id,
PaymentFlag.UNPAID, amount, expiration, message, callback)
else:
# Expiration is just a label, so we don't use the value.
self._account.requests.update_request(row.paymentrequest_id, row.state, amount,
row.expiration, message, callback)
self._save_request_button.setEnabled(False)
def _new_payment_request(self) -> None:
keyinstances: List[KeyInstanceRow] = []
if self._account.is_deterministic():
keyinstances = self._account.get_fresh_keys(RECEIVING_SUBPATH, 1)
if not len(keyinstances):
if not self._account.is_deterministic():
msg = [
_('No more payment destinations in your wallet.'),
_('You are using a non-deterministic account, which '
'cannot create new payment destinations.'),
_('If you want to create new payment destinations, '
'use a deterministic account instead.')
]
self._main_window.show_message(' '.join(msg))
return
self._main_window.show_message(
_('Your wallet is broken and could not allocate a new payment destination.'))
self.update_contents()
self._new_request_button.setEnabled(False)
self._receive_message_e.setFocus(1)
def get_receive_key_id(self) -> Optional[int]:
return self._receive_key_id
# Only called from key list menu.
def receive_at_id(self, key_id: int) -> None:
self._receive_key_id = key_id
self._new_request_button.setEnabled(True)
self.update_destination()
self._main_window.show_receive_tab()
def set_receive_key_id(self, key_id: int) -> None:
self._receive_key_id = key_id
def set_receive_key(self, keyinstance: KeyInstanceRow) -> None:
self._receive_key_id = keyinstance.keyinstance_id
self._receive_message_e.setText("")
self._receive_amount_e.setAmount(None)
self.update_destination()
def set_form_contents(self, address_text: str, value: int, description: Optional[str]=None,
expires_description: str="") -> None:
self._receive_destination_e.setText(address_text)
self._receive_message_e.setText(description or "")
self._receive_amount_e.setAmount(value)
self._expires_combo.hide()
self._expires_label.show()
self._expires_label.setText(expires_description)
self._new_request_button.setEnabled(True)
def set_new_button_enabled(self, flag: bool) -> None:
self._new_request_button.setEnabled(flag)
def _filter_request_list(self, text: str) -> None:
self._request_list.filter(text)
| [((1129, 1155), 'weakref.proxy', 'weakref.proxy', (['main_window'], {}), '(main_window)\n', (1142, 1155), False, 'import weakref\n'), ((1285, 1337), 'electrumsv.logs.logs.get_logger', 'logs.get_logger', (['f"""receive-view[{self._account_id}]"""'], {}), "(f'receive-view[{self._account_id}]')\n", (1300, 1337), False, 'from electrumsv.logs import logs\n'), ((1859, 1876), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', (['self'], {}), '(self)\n', (1870, 1876), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((2498, 2511), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (2509, 2511), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((2765, 2923), 'electrumsv.i18n._', '_', (['"""Bitcoin SV payment destination where the payment should be received. Note that each payment request uses a different Bitcoin SV payment destination."""'], {}), "('Bitcoin SV payment destination where the payment should be received. Note that each payment request uses a different Bitcoin SV payment destination.'\n )\n", (2766, 2923), False, 'from electrumsv.i18n import _\n'), ((3309, 3320), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (3318, 3320), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((4143, 4154), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {}), '()\n', (4152, 4154), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((4978, 4991), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', (['""""""'], {}), "('')\n", (4987, 4991), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((5494, 5507), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (5505, 5507), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((5705, 5718), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (5716, 5718), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((5794, 5807), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (5805, 5807), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((5977, 5990), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (5988, 5990), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((6194, 6205), 'PyQt5.QtWidgets.QGroupBox', 'QGroupBox', ([], {}), '()\n', (6203, 6205), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((7849, 7891), 'electrumsv.bitcoin.script_template_to_string', 'script_template_to_string', (['script_template'], {}), '(script_template)\n', (7874, 7891), False, 'from electrumsv.bitcoin import script_template_to_string\n'), ((7907, 7952), 'electrumsv.web.create_URI', 'web.create_URI', (['address_text', 'amount', 'message'], {}), '(address_text, amount, message)\n', (7921, 7952), False, 'from electrumsv import web\n'), ((2980, 3006), 'electrumsv.i18n._', '_', (['"""Receiving destination"""'], {}), "('Receiving destination')\n", (2981, 3006), False, 'from electrumsv.i18n import _\n'), ((5224, 5241), 'electrumsv.i18n._', '_', (['"""Save request"""'], {}), "('Save request')\n", (5225, 5241), False, 'from electrumsv.i18n import _\n'), ((5318, 5326), 'electrumsv.i18n._', '_', (['"""New"""'], {}), "('New')\n", (5319, 5326), False, 'from electrumsv.i18n import _\n'), ((6235, 6248), 'electrumsv.i18n._', '_', (['"""Requests"""'], {}), "('Requests')\n", (6236, 6248), False, 'from electrumsv.i18n import _\n'), ((3351, 3367), 'electrumsv.i18n._', '_', (['"""Description"""'], {}), "('Description')\n", (3352, 3367), False, 'from electrumsv.i18n import _\n'), ((3594, 3615), 'electrumsv.i18n._', '_', (['"""Requested amount"""'], {}), "('Requested amount')\n", (3595, 3615), False, 'from electrumsv.i18n import _\n'), ((3882, 3907), 'electrumsv.app_state.app_state.fx.is_enabled', 'app_state.fx.is_enabled', ([], {}), '()\n', (3905, 3907), False, 'from electrumsv.app_state import app_state\n'), ((4385, 4422), 'electrumsv.i18n._', '_', (['"""Expiration date of your request."""'], {}), "('Expiration date of your request.')\n", (4386, 4422), False, 'from electrumsv.i18n import _\n'), ((4436, 4530), 'electrumsv.i18n._', '_', (['"""This information is seen by the recipient if you send them a signed payment request."""'], {}), "('This information is seen by the recipient if you send them a signed payment request.'\n )\n", (4437, 4530), False, 'from electrumsv.i18n import _\n'), ((4556, 4683), 'electrumsv.i18n._', '_', (['"""Expired requests have to be deleted manually from your list, in order to free the corresponding Bitcoin SV addresses."""'], {}), "('Expired requests have to be deleted manually from your list, in order to free the corresponding Bitcoin SV addresses.'\n )\n", (4557, 4683), False, 'from electrumsv.i18n import _\n'), ((4709, 4806), 'electrumsv.i18n._', '_', (['"""The Bitcoin SV address never expires and will always be part of this ElectrumSV wallet."""'], {}), "('The Bitcoin SV address never expires and will always be part of this ElectrumSV wallet.'\n )\n", (4710, 4806), False, 'from electrumsv.i18n import _\n'), ((4864, 4884), 'electrumsv.i18n._', '_', (['"""Request expires"""'], {}), "('Request expires')\n", (4865, 4884), False, 'from electrumsv.i18n import _\n'), ((6756, 6798), 'electrumsv.bitcoin.script_template_to_string', 'script_template_to_string', (['script_template'], {}), '(script_template)\n', (6781, 6798), False, 'from electrumsv.bitcoin import script_template_to_string\n'), ((8316, 8356), 'electrumsv.i18n._', '_', (['"""No available receiving destination."""'], {}), "('No available receiving destination.')\n", (8317, 8356), False, 'from electrumsv.i18n import _\n'), ((9243, 9280), 'electrumsv.i18n._', '_', (['"""No receiving payment destination"""'], {}), "('No receiving payment destination')\n", (9244, 9280), False, 'from electrumsv.i18n import _\n'), ((9484, 9509), 'electrumsv.i18n._', '_', (['"""No message or amount"""'], {}), "('No message or amount')\n", (9485, 9509), False, 'from electrumsv.i18n import _\n'), ((11239, 11315), 'electrumsv.i18n._', '_', (['"""Your wallet is broken and could not allocate a new payment destination."""'], {}), "('Your wallet is broken and could not allocate a new payment destination.')\n", (11240, 11315), False, 'from electrumsv.i18n import _\n'), ((10745, 10794), 'electrumsv.i18n._', '_', (['"""No more payment destinations in your wallet."""'], {}), "('No more payment destinations in your wallet.')\n", (10746, 10794), False, 'from electrumsv.i18n import _\n'), ((10816, 10914), 'electrumsv.i18n._', '_', (['"""You are using a non-deterministic account, which cannot create new payment destinations."""'], {}), "('You are using a non-deterministic account, which cannot create new payment destinations.'\n )\n", (10817, 10914), False, 'from electrumsv.i18n import _\n'), ((10956, 11050), 'electrumsv.i18n._', '_', (['"""If you want to create new payment destinations, use a deterministic account instead."""'], {}), "('If you want to create new payment destinations, use a deterministic account instead.'\n )\n", (10957, 11050), False, 'from electrumsv.i18n import _\n')] |
BluechipSystems/thrift | test/crossrunner/compat.py | c595aa18cba0032e074f9585aa2d6ca548f07197 | import os
import sys
if sys.version_info[0] == 2:
_ENCODE = sys.getfilesystemencoding()
def path_join(*args):
bin_args = map(lambda a: a.decode(_ENCODE), args)
return os.path.join(*bin_args).encode(_ENCODE)
def str_join(s, l):
bin_args = map(lambda a: a.decode(_ENCODE), l)
b = s.decode(_ENCODE)
return b.join(bin_args).encode(_ENCODE)
logfile_open = open
else:
path_join = os.path.join
str_join = str.join
def logfile_open(*args):
return open(*args, errors='replace')
| [((63, 90), 'sys.getfilesystemencoding', 'sys.getfilesystemencoding', ([], {}), '()\n', (88, 90), False, 'import sys\n'), ((181, 204), 'os.path.join', 'os.path.join', (['*bin_args'], {}), '(*bin_args)\n', (193, 204), False, 'import os\n')] |
usamaahmadkhan/vpp | test/test_vom.py | cece3e682f6dba68ba86b66b295f99a33496d9ee | #!/usr/bin/env python
""" VAPI test """
import unittest
import os
import signal
from framework import VppTestCase, running_extended_tests, \
VppTestRunner, Worker
@unittest.skipUnless(running_extended_tests(), "part of extended tests")
class VOMTestCase(VppTestCase):
""" VPP Object Model Test """
def test_vom_cpp(self):
""" run C++ VOM tests """
var = "TEST_DIR"
built_root = os.getenv(var, None)
self.assertIsNotNone(built_root,
"Environment variable `%s' not set" % var)
executable = "%s/build/vom_test/vom_test" % built_root
worker = Worker(
[executable, "vpp object model", self.shm_prefix], self.logger)
worker.start()
timeout = 120
worker.join(timeout)
self.logger.info("Worker result is `%s'" % worker.result)
error = False
if worker.result is None:
try:
error = True
self.logger.error(
"Timeout! Worker did not finish in %ss" % timeout)
os.killpg(os.getpgid(worker.process.pid), signal.SIGTERM)
worker.join()
except:
raise Exception("Couldn't kill worker-spawned process")
if error:
raise Exception(
"Timeout! Worker did not finish in %ss" % timeout)
self.assert_equal(worker.result, 0, "Binary test return code")
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)
| [((191, 215), 'framework.running_extended_tests', 'running_extended_tests', ([], {}), '()\n', (213, 215), False, 'from framework import VppTestCase, running_extended_tests, VppTestRunner, Worker\n'), ((1478, 1517), 'unittest.main', 'unittest.main', ([], {'testRunner': 'VppTestRunner'}), '(testRunner=VppTestRunner)\n', (1491, 1517), False, 'import unittest\n'), ((418, 438), 'os.getenv', 'os.getenv', (['var', 'None'], {}), '(var, None)\n', (427, 438), False, 'import os\n'), ((632, 702), 'framework.Worker', 'Worker', (["[executable, 'vpp object model', self.shm_prefix]", 'self.logger'], {}), "([executable, 'vpp object model', self.shm_prefix], self.logger)\n", (638, 702), False, 'from framework import VppTestCase, running_extended_tests, VppTestRunner, Worker\n'), ((1090, 1120), 'os.getpgid', 'os.getpgid', (['worker.process.pid'], {}), '(worker.process.pid)\n', (1100, 1120), False, 'import os\n')] |
bealbrown/allhours | locations/spiders/tesco.py | f750ee7644246a97bd16879f14115d7845f76b89 | import json
import re
import scrapy
from locations.hourstudy import inputoutput
DAYS = {
'mo': 'Mo',
'tu': 'Tu',
'we': 'We',
'fr': 'Fr',
'th': 'Th',
'sa': 'Sa',
'su': 'Su',
}
class TescoSpider(scrapy.Spider):
name = "tesco"
allowed_domains = ["tescolocation.api.tesco.com"]
def store_hours(self, store_hours):
clean_time=''
for key, value in store_hours.items():
if('isOpen' in value and 'open' in value and 'close' in value):
if(value['isOpen']=='true'):
clean_time = clean_time + DAYS[key]+' '+value['open'][0:2]+':'+value['open'][2:]+'-'+value['close'][0:2]+':'+value['close'][2:]+';'
else:
clean_time = clean_time + DAYS[key]+' '+'Closed'+';'
return clean_time
def start_requests(self):
url = 'https://tescolocation.api.tesco.com/v3/locations/search?offset=0&limit=1000000&sort=near:%2251.499207299999995,-0.08800609999999999%22&filter=category:Store%20AND%20isoCountryCode:x-uk&fields=name,geo,openingHours,altIds.branchNumber,contact'
headers = {
'Accept-Language': 'en-US,en;q=0.9',
'Origin': 'https://www.tesco.com',
'Accept-Encoding': 'gzip, deflate, br',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Referer': 'https://www.kfc.com/store-locator?query=90210',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'X-Requested-With': 'XMLHttpRequest',
'x-appkey':'store-locator-web-cde'
}
yield scrapy.http.FormRequest(
url=url, method='GET',
headers=headers, callback=self.parse
)
def parse(self, response):
data = json.loads(response.body_as_unicode())
stores = data['results']
for store in stores:
addr_full=''
for add in store['location']['contact']['address']['lines']:
addr_full=addr_full+' '+add['text']
properties = {
'ref': store['location']['id'],
'name': store['location']['name'],
'addr_full': addr_full,
'city': store['location']['contact']['address']['town'],
'state': '',
'country':'United Kingdom',
'postcode': store['location']['contact']['address']['postcode'],
'lat': store['location']['geo']['coordinates']['latitude'],
'lon': store['location']['geo']['coordinates']['longitude'],
'phone': store['location']['contact']['phoneNumbers'][0]['number'],
}
opening_hours = self.store_hours(store['location']['openingHours'][0]['standardOpeningHours'])
if opening_hours:
properties['opening_hours'] = opening_hours
raw = store['location']['openingHours'][0]['standardOpeningHours']
formatted = opening_hours
yield inputoutput(raw,formatted)
# yield inputoutput(**properties)
| [((1597, 1686), 'scrapy.http.FormRequest', 'scrapy.http.FormRequest', ([], {'url': 'url', 'method': '"""GET"""', 'headers': 'headers', 'callback': 'self.parse'}), "(url=url, method='GET', headers=headers, callback=\n self.parse)\n", (1620, 1686), False, 'import scrapy\n'), ((3018, 3045), 'locations.hourstudy.inputoutput', 'inputoutput', (['raw', 'formatted'], {}), '(raw, formatted)\n', (3029, 3045), False, 'from locations.hourstudy import inputoutput\n')] |
tacaswell/astropy | astropy/table/serialize.py | 75046e61916da36dffe87ddf59a7c6bfb00de81c | from importlib import import_module
import re
from copy import deepcopy
from collections import OrderedDict
from astropy.utils.data_info import MixinInfo
from .column import Column
from .table import Table, QTable, has_info_class
from astropy.units.quantity import QuantityInfo
__construct_mixin_classes = ('astropy.time.core.Time',
'astropy.time.core.TimeDelta',
'astropy.units.quantity.Quantity',
'astropy.coordinates.angles.Latitude',
'astropy.coordinates.angles.Longitude',
'astropy.coordinates.angles.Angle',
'astropy.coordinates.distances.Distance',
'astropy.coordinates.earth.EarthLocation',
'astropy.coordinates.sky_coordinate.SkyCoord',
'astropy.table.table.NdarrayMixin',
'astropy.table.column.MaskedColumn')
class SerializedColumn(dict):
"""
Subclass of dict that is a used in the representation to contain the name
(and possible other info) for a mixin attribute (either primary data or an
array-like attribute) that is serialized as a column in the table.
Normally contains the single key ``name`` with the name of the column in the
table.
"""
pass
def _represent_mixin_as_column(col, name, new_cols, mixin_cols,
exclude_classes=()):
"""Carry out processing needed to serialize ``col`` in an output table
consisting purely of plain ``Column`` or ``MaskedColumn`` columns. This
relies on the object determine if any transformation is required and may
depend on the ``serialize_method`` and ``serialize_context`` context
variables. For instance a ``MaskedColumn`` may be stored directly to
FITS, but can also be serialized as separate data and mask columns.
This function builds up a list of plain columns in the ``new_cols`` arg (which
is passed as a persistent list). This includes both plain columns from the
original table and plain columns that represent data from serialized columns
(e.g. ``jd1`` and ``jd2`` arrays from a ``Time`` column).
For serialized columns the ``mixin_cols`` dict is updated with required
attributes and information to subsequently reconstruct the table.
Table mixin columns are always serialized and get represented by one
or more data columns. In earlier versions of the code *only* mixin
columns were serialized, hence the use within this code of "mixin"
to imply serialization. Starting with version 3.1, the non-mixin
``MaskedColumn`` can also be serialized.
"""
obj_attrs = col.info._represent_as_dict()
ordered_keys = col.info._represent_as_dict_attrs
# If serialization is not required (see function docstring above)
# or explicitly specified as excluded, then treat as a normal column.
if not obj_attrs or col.__class__ in exclude_classes:
new_cols.append(col)
return
# Subtlety here is handling mixin info attributes. The basic list of such
# attributes is: 'name', 'unit', 'dtype', 'format', 'description', 'meta'.
# - name: handled directly [DON'T store]
# - unit: DON'T store if this is a parent attribute
# - dtype: captured in plain Column if relevant [DON'T store]
# - format: possibly irrelevant but settable post-object creation [DO store]
# - description: DO store
# - meta: DO store
info = {}
for attr, nontrivial, xform in (('unit', lambda x: x is not None and x != '', str),
('format', lambda x: x is not None, None),
('description', lambda x: x is not None, None),
('meta', lambda x: x, None)):
col_attr = getattr(col.info, attr)
if nontrivial(col_attr):
info[attr] = xform(col_attr) if xform else col_attr
data_attrs = [key for key in ordered_keys if key in obj_attrs and
getattr(obj_attrs[key], 'shape', ())[:1] == col.shape[:1]]
for data_attr in data_attrs:
data = obj_attrs[data_attr]
# New column name combines the old name and attribute
# (e.g. skycoord.ra, skycoord.dec).unless it is the primary data
# attribute for the column (e.g. value for Quantity or data
# for MaskedColumn)
if data_attr == col.info._represent_as_dict_primary_data:
new_name = name
else:
new_name = name + '.' + data_attr
if not has_info_class(data, MixinInfo):
new_cols.append(Column(data, name=new_name, **info))
obj_attrs[data_attr] = SerializedColumn({'name': new_name})
else:
# recurse. This will define obj_attrs[new_name].
_represent_mixin_as_column(data, new_name, new_cols, obj_attrs)
obj_attrs[data_attr] = SerializedColumn(obj_attrs.pop(new_name))
# Strip out from info any attributes defined by the parent
for attr in col.info.attrs_from_parent:
if attr in info:
del info[attr]
if info:
obj_attrs['__info__'] = info
# Store the fully qualified class name
obj_attrs['__class__'] = col.__module__ + '.' + col.__class__.__name__
mixin_cols[name] = obj_attrs
def represent_mixins_as_columns(tbl, exclude_classes=()):
"""Represent input Table ``tbl`` using only `~astropy.table.Column`
or `~astropy.table.MaskedColumn` objects.
This function represents any mixin columns like `~astropy.time.Time` in
``tbl`` to one or more plain ``~astropy.table.Column`` objects and returns
a new Table. A single mixin column may be split into multiple column
components as needed for fully representing the column. This includes the
possibility of recursive splitting, as shown in the example below. The
new column names are formed as ``<column_name>.<component>``, e.g.
``sc.ra`` for a `~astropy.coordinates.SkyCoord` column named ``sc``.
In addition to splitting columns, this function updates the table ``meta``
dictionary to include a dict named ``__serialized_columns__`` which provides
additional information needed to construct the original mixin columns from
the split columns.
This function is used by astropy I/O when writing tables to ECSV, FITS,
HDF5 formats.
Note that if the table does not include any mixin columns then the original
table is returned with no update to ``meta``.
Parameters
----------
tbl : `~astropy.table.Table` or subclass
Table to represent mixins as Columns
exclude_classes : tuple of classes
Exclude any mixin columns which are instannces of any classes in the tuple
Returns
-------
tbl : `~astropy.table.Table`
New Table with updated columns, or else the original input ``tbl``
Examples
--------
>>> from astropy.table import Table, represent_mixins_as_columns
>>> from astropy.time import Time
>>> from astropy.coordinates import SkyCoord
>>> x = [100.0, 200.0]
>>> obstime = Time([1999.0, 2000.0], format='jyear')
>>> sc = SkyCoord([1, 2], [3, 4], unit='deg', obstime=obstime)
>>> tbl = Table([sc, x], names=['sc', 'x'])
>>> represent_mixins_as_columns(tbl)
<Table length=2>
sc.ra sc.dec sc.obstime.jd1 sc.obstime.jd2 x
deg deg
float64 float64 float64 float64 float64
------- ------- -------------- -------------- -------
1.0 3.0 2451180.0 -0.25 100.0
2.0 4.0 2451545.0 0.0 200.0
"""
# Dict of metadata for serializing each column, keyed by column name.
# Gets filled in place by _represent_mixin_as_column().
mixin_cols = {}
# List of columns for the output table. For plain Column objects
# this will just be the original column object.
new_cols = []
# Go through table columns and represent each column as one or more
# plain Column objects (in new_cols) + metadata (in mixin_cols).
for col in tbl.itercols():
_represent_mixin_as_column(col, col.info.name, new_cols, mixin_cols,
exclude_classes=exclude_classes)
# If no metadata was created then just return the original table.
if not mixin_cols:
return tbl
meta = deepcopy(tbl.meta)
meta['__serialized_columns__'] = mixin_cols
out = Table(new_cols, meta=meta, copy=False)
return out
def _construct_mixin_from_obj_attrs_and_info(obj_attrs, info):
cls_full_name = obj_attrs.pop('__class__')
# If this is a supported class then import the class and run
# the _construct_from_col method. Prevent accidentally running
# untrusted code by only importing known astropy classes.
if cls_full_name not in __construct_mixin_classes:
raise ValueError('unsupported class for construct {}'.format(cls_full_name))
mod_name, cls_name = re.match(r'(.+)\.(\w+)', cls_full_name).groups()
module = import_module(mod_name)
cls = getattr(module, cls_name)
for attr, value in info.items():
if attr in cls.info.attrs_from_parent:
obj_attrs[attr] = value
mixin = cls.info._construct_from_dict(obj_attrs)
for attr, value in info.items():
if attr not in obj_attrs:
setattr(mixin.info, attr, value)
return mixin
class _TableLite(OrderedDict):
"""
Minimal table-like object for _construct_mixin_from_columns. This allows
manipulating the object like a Table but without the actual overhead
for a full Table.
More pressing, there is an issue with constructing MaskedColumn, where the
encoded Column components (data, mask) are turned into a MaskedColumn.
When this happens in a real table then all other columns are immediately
Masked and a warning is issued. This is not desirable.
"""
def add_column(self, col, index=0):
colnames = self.colnames
self[col.info.name] = col
for ii, name in enumerate(colnames):
if ii >= index:
self.move_to_end(name)
@property
def colnames(self):
return list(self.keys())
def itercols(self):
return self.values()
def _construct_mixin_from_columns(new_name, obj_attrs, out):
data_attrs_map = {}
for name, val in obj_attrs.items():
if isinstance(val, SerializedColumn):
if 'name' in val:
data_attrs_map[val['name']] = name
else:
_construct_mixin_from_columns(name, val, out)
data_attrs_map[name] = name
for name in data_attrs_map.values():
del obj_attrs[name]
# Get the index where to add new column
idx = min(out.colnames.index(name) for name in data_attrs_map)
# Name is the column name in the table (e.g. "coord.ra") and
# data_attr is the object attribute name (e.g. "ra"). A different
# example would be a formatted time object that would have (e.g.)
# "time_col" and "value", respectively.
for name, data_attr in data_attrs_map.items():
col = out[name]
obj_attrs[data_attr] = col
del out[name]
info = obj_attrs.pop('__info__', {})
if len(data_attrs_map) == 1:
# col is the first and only serialized column; in that case, use info
# stored on the column.
for attr, nontrivial in (('unit', lambda x: x not in (None, '')),
('format', lambda x: x is not None),
('description', lambda x: x is not None),
('meta', lambda x: x)):
col_attr = getattr(col.info, attr)
if nontrivial(col_attr):
info[attr] = col_attr
info['name'] = new_name
col = _construct_mixin_from_obj_attrs_and_info(obj_attrs, info)
out.add_column(col, index=idx)
def _construct_mixins_from_columns(tbl):
if '__serialized_columns__' not in tbl.meta:
return tbl
meta = tbl.meta.copy()
mixin_cols = meta.pop('__serialized_columns__')
out = _TableLite(tbl.columns)
for new_name, obj_attrs in mixin_cols.items():
_construct_mixin_from_columns(new_name, obj_attrs, out)
# If no quantity subclasses are in the output then output as Table.
# For instance ascii.read(file, format='ecsv') doesn't specify an
# output class and should return the minimal table class that
# represents the table file.
has_quantities = any(isinstance(col.info, QuantityInfo)
for col in out.itercols())
out_cls = QTable if has_quantities else Table
return out_cls(list(out.values()), names=out.colnames, copy=False, meta=meta)
| [((8503, 8521), 'copy.deepcopy', 'deepcopy', (['tbl.meta'], {}), '(tbl.meta)\n', (8511, 8521), False, 'from copy import deepcopy\n'), ((9171, 9194), 'importlib.import_module', 'import_module', (['mod_name'], {}), '(mod_name)\n', (9184, 9194), False, 'from importlib import import_module\n'), ((9109, 9149), 're.match', 're.match', (['"""(.+)\\\\.(\\\\w+)"""', 'cls_full_name'], {}), "('(.+)\\\\.(\\\\w+)', cls_full_name)\n", (9117, 9149), False, 'import re\n')] |
tadvi/uva | UVa 573 - The Snail/sample/main.py | 0ac0cbdf593879b4fb02a3efc09adbb031cb47d5 | '''
Created on Jun 18, 2013
@author: Yubin Bai
All rights reserved.
'''
import time
from multiprocessing.pool import Pool
parallelSolve = False
infinity = 1 << 30
def solve(par):
H, U, D, F = par
day = 0
amountRise = U
currH = 0
while True:
amountRise = U * (1 - 0.01 * F * day)
currH += amountRise
if currH > H:
return 'success on day %d' % (day + 1)
currH -= D
if currH < 0:
return 'failure on day %d' % (day + 1)
day += 1
class Solver:
def getInput(self):
self.input = []
self.numOfTests = 0
while True:
H, U, D, F = map(int, self.fIn.readline().strip().split())
if H == 0:
break
self.numOfTests += 1
self.input.append((H, U, D, F))
def __init__(self):
self.fIn = open('input.txt')
self.fOut = open('output.txt', 'w')
self.results = []
def parallel(self):
self.getInput()
p = Pool(4)
millis1 = int(round(time.time() * 1000))
self.results = p.map(solve, self.input)
millis2 = int(round(time.time() * 1000))
print("Time in milliseconds: %d " % (millis2 - millis1))
self.makeOutput()
def sequential(self):
self.getInput()
millis1 = int(round(time.time() * 1000))
for i in self.input:
self.results.append(solve(i))
millis2 = int(round(time.time() * 1000))
print("Time in milliseconds: %d " % (millis2 - millis1))
self.makeOutput()
def makeOutput(self):
for test in range(self.numOfTests):
self.fOut.write("Case #%d: %s\n" % (test + 1, self.results[test]))
self.fIn.close()
self.fOut.close()
if __name__ == '__main__':
solver = Solver()
if parallelSolve:
solver.parallel()
else:
solver.sequential()
| [((1021, 1028), 'multiprocessing.pool.Pool', 'Pool', (['(4)'], {}), '(4)\n', (1025, 1028), False, 'from multiprocessing.pool import Pool\n'), ((1057, 1068), 'time.time', 'time.time', ([], {}), '()\n', (1066, 1068), False, 'import time\n'), ((1154, 1165), 'time.time', 'time.time', ([], {}), '()\n', (1163, 1165), False, 'import time\n'), ((1345, 1356), 'time.time', 'time.time', ([], {}), '()\n', (1354, 1356), False, 'import time\n'), ((1465, 1476), 'time.time', 'time.time', ([], {}), '()\n', (1474, 1476), False, 'import time\n')] |
tomhoper/scibert | scibert/models/text_classifier.py | 3cc65f433808f7879c973dc4fc41bd25e465dc15 | from typing import Dict, Optional, List, Any
import torch
import torch.nn.functional as F
from allennlp.data import Vocabulary
from allennlp.models.model import Model
from allennlp.modules import FeedForward, TextFieldEmbedder, Seq2SeqEncoder
from allennlp.nn import InitializerApplicator, RegularizerApplicator
from allennlp.nn import util
from allennlp.training.metrics import CategoricalAccuracy, F1Measure
from overrides import overrides
@Model.register("text_classifier")
class TextClassifier(Model):
"""
Implements a basic text classifier:
1) Embed tokens using `text_field_embedder`
2) Seq2SeqEncoder, e.g. BiLSTM
3) Append the first and last encoder states
4) Final feedforward layer
Optimized with CrossEntropyLoss. Evaluated with CategoricalAccuracy & F1.
"""
def __init__(self, vocab: Vocabulary,
text_field_embedder: TextFieldEmbedder,
text_encoder: Seq2SeqEncoder,
classifier_feedforward: FeedForward,
verbose_metrics: False,
initializer: InitializerApplicator = InitializerApplicator(),
regularizer: Optional[RegularizerApplicator] = None,
) -> None:
super(TextClassifier, self).__init__(vocab, regularizer)
self.text_field_embedder = text_field_embedder
self.num_classes = self.vocab.get_vocab_size("labels")
self.text_encoder = text_encoder
self.classifier_feedforward = classifier_feedforward
self.prediction_layer = torch.nn.Linear(self.classifier_feedforward.get_output_dim() , self.num_classes)
self.label_accuracy = CategoricalAccuracy()
self.label_f1_metrics = {}
self.verbose_metrics = verbose_metrics
for i in range(self.num_classes):
self.label_f1_metrics[vocab.get_token_from_index(index=i, namespace="labels")] = F1Measure(positive_label=i)
self.loss = torch.nn.CrossEntropyLoss()
self.pool = lambda text, mask: util.get_final_encoder_states(text, mask, bidirectional=True)
initializer(self)
@overrides
def forward(self,
text: Dict[str, torch.LongTensor],
label: torch.IntTensor = None,
metadata: List[Dict[str, Any]] = None) -> Dict[str, torch.Tensor]:
"""
Parameters
----------
text : Dict[str, torch.LongTensor]
From a ``TextField``
label : torch.IntTensor, optional (default = None)
From a ``LabelField``
metadata : ``List[Dict[str, Any]]``, optional, (default = None)
Metadata containing the original tokenization of the premise and
hypothesis with 'premise_tokens' and 'hypothesis_tokens' keys respectively.
Returns
-------
An output dictionary consisting of:
label_logits : torch.FloatTensor
A tensor of shape ``(batch_size, num_labels)`` representing unnormalised log probabilities of the label.
label_probs : torch.FloatTensor
A tensor of shape ``(batch_size, num_labels)`` representing probabilities of the label.
loss : torch.FloatTensor, optional
A scalar loss to be optimised.
"""
embedded_text = self.text_field_embedder(text)
mask = util.get_text_field_mask(text)
encoded_text = self.text_encoder(embedded_text, mask)
pooled = self.pool(encoded_text, mask)
ff_hidden = self.classifier_feedforward(pooled)
logits = self.prediction_layer(ff_hidden)
class_probs = F.softmax(logits, dim=1)
output_dict = {"logits": logits}
if label is not None:
loss = self.loss(logits, label)
output_dict["loss"] = loss
# compute F1 per label
for i in range(self.num_classes):
metric = self.label_f1_metrics[self.vocab.get_token_from_index(index=i, namespace="labels")]
metric(class_probs, label)
self.label_accuracy(logits, label)
return output_dict
@overrides
def decode(self, output_dict: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:
class_probabilities = F.softmax(output_dict['logits'], dim=-1)
output_dict['class_probs'] = class_probabilities
return output_dict
def get_metrics(self, reset: bool = False) -> Dict[str, float]:
metric_dict = {}
sum_f1 = 0.0
for name, metric in self.label_f1_metrics.items():
metric_val = metric.get_metric(reset)
if self.verbose_metrics:
metric_dict[name + '_P'] = metric_val[0]
metric_dict[name + '_R'] = metric_val[1]
metric_dict[name + '_F1'] = metric_val[2]
sum_f1 += metric_val[2]
names = list(self.label_f1_metrics.keys())
total_len = len(names)
average_f1 = sum_f1 / total_len
metric_dict['average_F1'] = average_f1
metric_dict['accuracy'] = self.label_accuracy.get_metric(reset)
return metric_dict
| [((446, 479), 'allennlp.models.model.Model.register', 'Model.register', (['"""text_classifier"""'], {}), "('text_classifier')\n", (460, 479), False, 'from allennlp.models.model import Model\n'), ((1102, 1125), 'allennlp.nn.InitializerApplicator', 'InitializerApplicator', ([], {}), '()\n', (1123, 1125), False, 'from allennlp.nn import InitializerApplicator, RegularizerApplicator\n'), ((1656, 1677), 'allennlp.training.metrics.CategoricalAccuracy', 'CategoricalAccuracy', ([], {}), '()\n', (1675, 1677), False, 'from allennlp.training.metrics import CategoricalAccuracy, F1Measure\n'), ((1945, 1972), 'torch.nn.CrossEntropyLoss', 'torch.nn.CrossEntropyLoss', ([], {}), '()\n', (1970, 1972), False, 'import torch\n'), ((3321, 3351), 'allennlp.nn.util.get_text_field_mask', 'util.get_text_field_mask', (['text'], {}), '(text)\n', (3345, 3351), False, 'from allennlp.nn import util\n'), ((3589, 3613), 'torch.nn.functional.softmax', 'F.softmax', (['logits'], {'dim': '(1)'}), '(logits, dim=1)\n', (3598, 3613), True, 'import torch.nn.functional as F\n'), ((4210, 4250), 'torch.nn.functional.softmax', 'F.softmax', (["output_dict['logits']"], {'dim': '(-1)'}), "(output_dict['logits'], dim=-1)\n", (4219, 4250), True, 'import torch.nn.functional as F\n'), ((1897, 1924), 'allennlp.training.metrics.F1Measure', 'F1Measure', ([], {'positive_label': 'i'}), '(positive_label=i)\n', (1906, 1924), False, 'from allennlp.training.metrics import CategoricalAccuracy, F1Measure\n'), ((2013, 2074), 'allennlp.nn.util.get_final_encoder_states', 'util.get_final_encoder_states', (['text', 'mask'], {'bidirectional': '(True)'}), '(text, mask, bidirectional=True)\n', (2042, 2074), False, 'from allennlp.nn import util\n')] |
crotwell/cmd2 | plugins/template/tasks.py | 5ce3a64e41258b6a694ad45bb1c604be53a1e974 | #
# -*- coding: utf-8 -*-
"""Development related tasks to be run with 'invoke'"""
import os
import pathlib
import shutil
import invoke
TASK_ROOT = pathlib.Path(__file__).resolve().parent
TASK_ROOT_STR = str(TASK_ROOT)
# shared function
def rmrf(items, verbose=True):
"""Silently remove a list of directories or files"""
if isinstance(items, str):
items = [items]
for item in items:
if verbose:
print("Removing {}".format(item))
shutil.rmtree(item, ignore_errors=True)
# rmtree doesn't remove bare files
try:
os.remove(item)
except FileNotFoundError:
pass
# create namespaces
namespace = invoke.Collection()
namespace_clean = invoke.Collection('clean')
namespace.add_collection(namespace_clean, 'clean')
#####
#
# pytest, pylint, and codecov
#
#####
@invoke.task
def pytest(context, junit=False, pty=True, append_cov=False):
"""Run tests and code coverage using pytest"""
ROOT_PATH = TASK_ROOT.parent.parent
with context.cd(str(ROOT_PATH)):
command_str = 'pytest --cov=cmd2_myplugin --cov-report=term --cov-report=html'
if append_cov:
command_str += ' --cov-append'
if junit:
command_str += ' --junitxml=junit/test-results.xml'
command_str += ' ' + str((TASK_ROOT / 'tests').relative_to(ROOT_PATH))
context.run(command_str, pty=pty)
namespace.add_task(pytest)
@invoke.task
def pytest_clean(context):
"""Remove pytest cache and code coverage files and directories"""
# pylint: disable=unused-argument
with context.cd(TASK_ROOT_STR):
dirs = ['.pytest_cache', '.cache', '.coverage']
rmrf(dirs)
namespace_clean.add_task(pytest_clean, 'pytest')
@invoke.task
def pylint(context):
"""Check code quality using pylint"""
context.run('pylint --rcfile=cmd2_myplugin/pylintrc cmd2_myplugin')
namespace.add_task(pylint)
@invoke.task
def pylint_tests(context):
"""Check code quality of test suite using pylint"""
context.run('pylint --rcfile=tests/pylintrc tests')
namespace.add_task(pylint_tests)
#####
#
# build and distribute
#
#####
BUILDDIR = 'build'
DISTDIR = 'dist'
@invoke.task
def build_clean(context):
"""Remove the build directory"""
# pylint: disable=unused-argument
rmrf(BUILDDIR)
namespace_clean.add_task(build_clean, 'build')
@invoke.task
def dist_clean(context):
"""Remove the dist directory"""
# pylint: disable=unused-argument
rmrf(DISTDIR)
namespace_clean.add_task(dist_clean, 'dist')
@invoke.task
def eggs_clean(context):
"""Remove egg directories"""
# pylint: disable=unused-argument
dirs = set()
dirs.add('.eggs')
for name in os.listdir(os.curdir):
if name.endswith('.egg-info'):
dirs.add(name)
if name.endswith('.egg'):
dirs.add(name)
rmrf(dirs)
namespace_clean.add_task(eggs_clean, 'eggs')
@invoke.task
def bytecode_clean(context):
"""Remove __pycache__ directories and *.pyc files"""
# pylint: disable=unused-argument
dirs = set()
for root, dirnames, files in os.walk(os.curdir):
if '__pycache__' in dirnames:
dirs.add(os.path.join(root, '__pycache__'))
for file in files:
if file.endswith(".pyc"):
dirs.add(os.path.join(root, file))
print("Removing __pycache__ directories and .pyc files")
rmrf(dirs, verbose=False)
namespace_clean.add_task(bytecode_clean, 'bytecode')
#
# make a dummy clean task which runs all the tasks in the clean namespace
clean_tasks = list(namespace_clean.tasks.values())
@invoke.task(pre=list(namespace_clean.tasks.values()), default=True)
def clean_all(context):
"""Run all clean tasks"""
# pylint: disable=unused-argument
pass
namespace_clean.add_task(clean_all, 'all')
@invoke.task(pre=[clean_all])
def sdist(context):
"""Create a source distribution"""
context.run('python setup.py sdist')
namespace.add_task(sdist)
@invoke.task(pre=[clean_all])
def wheel(context):
"""Build a wheel distribution"""
context.run('python setup.py bdist_wheel')
namespace.add_task(wheel)
#
# these two tasks are commented out so you don't
# accidentally run them and upload this template to pypi
#
# @invoke.task(pre=[sdist, wheel])
# def pypi(context):
# """Build and upload a distribution to pypi"""
# context.run('twine upload dist/*')
# namespace.add_task(pypi)
# @invoke.task(pre=[sdist, wheel])
# def pypi_test(context):
# """Build and upload a distribution to https://test.pypi.org"""
# context.run('twine upload --repository-url https://test.pypi.org/legacy/ dist/*')
# namespace.add_task(pypi_test)
| [((691, 710), 'invoke.Collection', 'invoke.Collection', ([], {}), '()\n', (708, 710), False, 'import invoke\n'), ((729, 755), 'invoke.Collection', 'invoke.Collection', (['"""clean"""'], {}), "('clean')\n", (746, 755), False, 'import invoke\n'), ((3856, 3884), 'invoke.task', 'invoke.task', ([], {'pre': '[clean_all]'}), '(pre=[clean_all])\n', (3867, 3884), False, 'import invoke\n'), ((4016, 4044), 'invoke.task', 'invoke.task', ([], {'pre': '[clean_all]'}), '(pre=[clean_all])\n', (4027, 4044), False, 'import invoke\n'), ((2731, 2752), 'os.listdir', 'os.listdir', (['os.curdir'], {}), '(os.curdir)\n', (2741, 2752), False, 'import os\n'), ((3132, 3150), 'os.walk', 'os.walk', (['os.curdir'], {}), '(os.curdir)\n', (3139, 3150), False, 'import os\n'), ((482, 521), 'shutil.rmtree', 'shutil.rmtree', (['item'], {'ignore_errors': '(True)'}), '(item, ignore_errors=True)\n', (495, 521), False, 'import shutil\n'), ((150, 172), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (162, 172), False, 'import pathlib\n'), ((590, 605), 'os.remove', 'os.remove', (['item'], {}), '(item)\n', (599, 605), False, 'import os\n'), ((3211, 3244), 'os.path.join', 'os.path.join', (['root', '"""__pycache__"""'], {}), "(root, '__pycache__')\n", (3223, 3244), False, 'import os\n'), ((3336, 3360), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (3348, 3360), False, 'import os\n')] |
timgates42/trex-core | scripts/automation/trex_control_plane/interactive/trex/examples/stl/ndr_plugin.py | efe94752fcb2d0734c83d4877afe92a3dbf8eccd | import stl_path
class MyNDRPlugin():
def __init__(self):
pass
def pre_iteration(self, finding_max_rate, run_results=None, **kwargs):
""" Function ran before each iteration.
:parameters:
finding_max_rate: boolean
Indicates whether we are running for the first time, trying to find the max rate. In this is the case, the run_results will be None.
run_results: dict
A dictionary that contains the following keys:
queue_full_percentage: Percentage of packets that are queued.
drop_rate_percentage: Percentage of packets that were dropped.
rate_tx_bps: TX rate in bps.
rate_rx_bps: RX rate in bps.
tx_util: TX utilization percentage.
latency: Latency groups.
cpu_util: CPU utilization percentage.
tx_pps: TX in pps.
rx_pps: RX in pps.
tx_bps: TX in bps.
rx_bps: RX in bps.
bw_per_core: Bandwidth per core.
rate_p: Running rate in percentage out of max.
total_tx_L1: Total TX L1.
total_rx_L1: Total RX L1.
iteration: Description of iteration (not necessarily a number)
Pay attention: The rate is of the upcoming iteration. All the rest are of the previous iteration.
kwargs: dict
List of tunables passed as parameters.
"""
# Pre iteration function. This function will run before TRex transmits to the DUT.
# Could use this to better prepare the DUT, for example define shapers, policers, increase buffers and queues.
# You can receive tunables in the command line, through the kwargs argument.
pass
def post_iteration(self, finding_max_rate, run_results, **kwargs):
""" Function ran after each iteration.
:parameters:
finding_max_rate: boolean
Indicates whether we are running for the first time, trying to find the max rate. If this is the case, some values of run_results (like iteration for example) are not relevant.
run_results: dict
A dictionary that contains the following keys:
queue_full_percentage: Percentage of packets that are queued.
drop_rate_percentage: Percentage of packets that were dropped.
rate_tx_bps: TX rate in bps.
rate_rx_bps: RX rate in bps.
tx_util: TX utilization percentage.
latency: Latency groups.
cpu_util: CPU utilization percentage.
tx_pps: TX in pps.
rx_pps: RX in pps.
tx_bps: TX in bps.
rx_bps: RX in bps.
bw_per_core: Bandwidth per core.
rate_p: Running rate in percentage out of max.
total_tx_L1: Total TX L1.
total_rx_L1: Total RX L1.
iteration: Description of iteration (not necessarily a number)
kwargs: dict
List of tunables passed as parameters.
:returns:
bool: should stop the benchmarking or not.
"""
# Post iteration function. This function will run after TRex transmits to the DUT.
# Could use this to decide if to continue the benchmark after querying the DUT post run. The DUT might be overheated or any other thing that might make you want to stop the run.
# You can receive tunables in the command line, through the kwargs argument.
should_stop = False
return should_stop
# dynamic load of python module
def register():
return MyNDRPlugin() | [] |
maexono/home-assistant | homeassistant/components/epsonworkforce/sensor.py | c174b83f5408124fc7834e8282969a1e8f9cca16 | """Support for Epson Workforce Printer."""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_HOST, CONF_MONITORED_CONDITIONS
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
REQUIREMENTS = ['epsonprinter==0.0.8']
_LOGGER = logging.getLogger(__name__)
MONITORED_CONDITIONS = {
'black': ['Inklevel Black', '%', 'mdi:water'],
'magenta': ['Inklevel Magenta', '%', 'mdi:water'],
'cyan': ['Inklevel Cyan', '%', 'mdi:water'],
'yellow': ['Inklevel Yellow', '%', 'mdi:water'],
'clean': ['Inklevel Cleaning', '%', 'mdi:water'],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_MONITORED_CONDITIONS):
vol.All(cv.ensure_list, [vol.In(MONITORED_CONDITIONS)]),
})
SCAN_INTERVAL = timedelta(minutes=60)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the cartridge sensor."""
host = config.get(CONF_HOST)
from epsonprinter_pkg.epsonprinterapi import EpsonPrinterAPI
api = EpsonPrinterAPI(host)
if not api.available:
raise PlatformNotReady()
sensors = [EpsonPrinterCartridge(api, condition)
for condition in config[CONF_MONITORED_CONDITIONS]]
add_devices(sensors, True)
class EpsonPrinterCartridge(Entity):
"""Representation of a cartridge sensor."""
def __init__(self, api, cartridgeidx):
"""Initialize a cartridge sensor."""
self._api = api
self._id = cartridgeidx
self._name = MONITORED_CONDITIONS[self._id][0]
self._unit = MONITORED_CONDITIONS[self._id][1]
self._icon = MONITORED_CONDITIONS[self._id][2]
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit
@property
def state(self):
"""Return the state of the device."""
return self._api.getSensorValue(self._id)
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self._api.available
def update(self):
"""Get the latest data from the Epson printer."""
self._api.update()
| [((451, 478), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (468, 478), False, 'import logging\n'), ((980, 1001), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(60)'}), '(minutes=60)\n', (989, 1001), False, 'from datetime import timedelta\n'), ((1220, 1241), 'epsonprinter_pkg.epsonprinterapi.EpsonPrinterAPI', 'EpsonPrinterAPI', (['host'], {}), '(host)\n', (1235, 1241), False, 'from epsonprinter_pkg.epsonprinterapi import EpsonPrinterAPI\n'), ((815, 838), 'voluptuous.Required', 'vol.Required', (['CONF_HOST'], {}), '(CONF_HOST)\n', (827, 838), True, 'import voluptuous as vol\n'), ((855, 894), 'voluptuous.Required', 'vol.Required', (['CONF_MONITORED_CONDITIONS'], {}), '(CONF_MONITORED_CONDITIONS)\n', (867, 894), True, 'import voluptuous as vol\n'), ((1282, 1300), 'homeassistant.exceptions.PlatformNotReady', 'PlatformNotReady', ([], {}), '()\n', (1298, 1300), False, 'from homeassistant.exceptions import PlatformNotReady\n'), ((929, 957), 'voluptuous.In', 'vol.In', (['MONITORED_CONDITIONS'], {}), '(MONITORED_CONDITIONS)\n', (935, 957), True, 'import voluptuous as vol\n')] |
bast0006/bot | bot/exts/help_channels/_cog.py | dec9a9dba77aa4322f9dc37b6493a8410e7482ec | import asyncio
import logging
import random
import typing as t
from datetime import datetime, timezone
from operator import attrgetter
import discord
import discord.abc
from discord.ext import commands
from bot import constants
from bot.bot import Bot
from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats
from bot.utils import channel as channel_utils, lock, scheduling
log = logging.getLogger(__name__)
NAMESPACE = "help"
HELP_CHANNEL_TOPIC = """
This is a Python help channel. You can claim your own help channel in the Python Help: Available category.
"""
class HelpChannels(commands.Cog):
"""
Manage the help channel system of the guild.
The system is based on a 3-category system:
Available Category
* Contains channels which are ready to be occupied by someone who needs help
* Will always contain `constants.HelpChannels.max_available` channels; refilled automatically
from the pool of dormant channels
* Prioritise using the channels which have been dormant for the longest amount of time
* If there are no more dormant channels, the bot will automatically create a new one
* If there are no dormant channels to move, helpers will be notified (see `notify()`)
* When a channel becomes available, the dormant embed will be edited to show `AVAILABLE_MSG`
* User can only claim a channel at an interval `constants.HelpChannels.claim_minutes`
* To keep track of cooldowns, user which claimed a channel will have a temporary role
In Use Category
* Contains all channels which are occupied by someone needing help
* Channel moves to dormant category after `constants.HelpChannels.idle_minutes` of being idle
* Command can prematurely mark a channel as dormant
* Channel claimant is allowed to use the command
* Allowed roles for the command are configurable with `constants.HelpChannels.cmd_whitelist`
* When a channel becomes dormant, an embed with `DORMANT_MSG` will be sent
Dormant Category
* Contains channels which aren't in use
* Channels are used to refill the Available category
Help channels are named after the chemical elements in `bot/resources/elements.json`.
"""
def __init__(self, bot: Bot):
self.bot = bot
self.scheduler = scheduling.Scheduler(self.__class__.__name__)
# Categories
self.available_category: discord.CategoryChannel = None
self.in_use_category: discord.CategoryChannel = None
self.dormant_category: discord.CategoryChannel = None
# Queues
self.channel_queue: asyncio.Queue[discord.TextChannel] = None
self.name_queue: t.Deque[str] = None
self.last_notification: t.Optional[datetime] = None
# Asyncio stuff
self.queue_tasks: t.List[asyncio.Task] = []
self.init_task = self.bot.loop.create_task(self.init_cog())
def cog_unload(self) -> None:
"""Cancel the init task and scheduled tasks when the cog unloads."""
log.trace("Cog unload: cancelling the init_cog task")
self.init_task.cancel()
log.trace("Cog unload: cancelling the channel queue tasks")
for task in self.queue_tasks:
task.cancel()
self.scheduler.cancel_all()
@lock.lock_arg(NAMESPACE, "message", attrgetter("channel.id"))
@lock.lock_arg(NAMESPACE, "message", attrgetter("author.id"))
@lock.lock_arg(f"{NAMESPACE}.unclaim", "message", attrgetter("author.id"), wait=True)
async def claim_channel(self, message: discord.Message) -> None:
"""
Claim the channel in which the question `message` was sent.
Move the channel to the In Use category and pin the `message`. Add a cooldown to the
claimant to prevent them from asking another question. Lastly, make a new channel available.
"""
log.info(f"Channel #{message.channel} was claimed by `{message.author.id}`.")
await self.move_to_in_use(message.channel)
await _cooldown.revoke_send_permissions(message.author, self.scheduler)
await _message.pin(message)
try:
await _message.dm_on_open(message)
except Exception as e:
log.warning("Error occurred while sending DM:", exc_info=e)
# Add user with channel for dormant check.
await _caches.claimants.set(message.channel.id, message.author.id)
self.bot.stats.incr("help.claimed")
# Must use a timezone-aware datetime to ensure a correct POSIX timestamp.
timestamp = datetime.now(timezone.utc).timestamp()
await _caches.claim_times.set(message.channel.id, timestamp)
await _caches.unanswered.set(message.channel.id, True)
# Not awaited because it may indefinitely hold the lock while waiting for a channel.
scheduling.create_task(self.move_to_available(), name=f"help_claim_{message.id}")
def create_channel_queue(self) -> asyncio.Queue:
"""
Return a queue of dormant channels to use for getting the next available channel.
The channels are added to the queue in a random order.
"""
log.trace("Creating the channel queue.")
channels = list(_channel.get_category_channels(self.dormant_category))
random.shuffle(channels)
log.trace("Populating the channel queue with channels.")
queue = asyncio.Queue()
for channel in channels:
queue.put_nowait(channel)
return queue
async def create_dormant(self) -> t.Optional[discord.TextChannel]:
"""
Create and return a new channel in the Dormant category.
The new channel will sync its permission overwrites with the category.
Return None if no more channel names are available.
"""
log.trace("Getting a name for a new dormant channel.")
try:
name = self.name_queue.popleft()
except IndexError:
log.debug("No more names available for new dormant channels.")
return None
log.debug(f"Creating a new dormant channel named {name}.")
return await self.dormant_category.create_text_channel(name, topic=HELP_CHANNEL_TOPIC)
async def close_check(self, ctx: commands.Context) -> bool:
"""Return True if the channel is in use and the user is the claimant or has a whitelisted role."""
if ctx.channel.category != self.in_use_category:
log.debug(f"{ctx.author} invoked command 'close' outside an in-use help channel")
return False
if await _caches.claimants.get(ctx.channel.id) == ctx.author.id:
log.trace(f"{ctx.author} is the help channel claimant, passing the check for dormant.")
self.bot.stats.incr("help.dormant_invoke.claimant")
return True
log.trace(f"{ctx.author} is not the help channel claimant, checking roles.")
has_role = await commands.has_any_role(*constants.HelpChannels.cmd_whitelist).predicate(ctx)
if has_role:
self.bot.stats.incr("help.dormant_invoke.staff")
return has_role
@commands.command(name="close", aliases=["dormant", "solved"], enabled=False)
async def close_command(self, ctx: commands.Context) -> None:
"""
Make the current in-use help channel dormant.
May only be invoked by the channel's claimant or by staff.
"""
# Don't use a discord.py check because the check needs to fail silently.
if await self.close_check(ctx):
log.info(f"Close command invoked by {ctx.author} in #{ctx.channel}.")
await self.unclaim_channel(ctx.channel, is_auto=False)
async def get_available_candidate(self) -> discord.TextChannel:
"""
Return a dormant channel to turn into an available channel.
If no channel is available, wait indefinitely until one becomes available.
"""
log.trace("Getting an available channel candidate.")
try:
channel = self.channel_queue.get_nowait()
except asyncio.QueueEmpty:
log.info("No candidate channels in the queue; creating a new channel.")
channel = await self.create_dormant()
if not channel:
log.info("Couldn't create a candidate channel; waiting to get one from the queue.")
notify_channel = self.bot.get_channel(constants.HelpChannels.notify_channel)
last_notification = await _message.notify(notify_channel, self.last_notification)
if last_notification:
self.last_notification = last_notification
self.bot.stats.incr("help.out_of_channel_alerts")
channel = await self.wait_for_dormant_channel()
return channel
async def init_available(self) -> None:
"""Initialise the Available category with channels."""
log.trace("Initialising the Available category with channels.")
channels = list(_channel.get_category_channels(self.available_category))
missing = constants.HelpChannels.max_available - len(channels)
# If we've got less than `max_available` channel available, we should add some.
if missing > 0:
log.trace(f"Moving {missing} missing channels to the Available category.")
for _ in range(missing):
await self.move_to_available()
# If for some reason we have more than `max_available` channels available,
# we should move the superfluous ones over to dormant.
elif missing < 0:
log.trace(f"Moving {abs(missing)} superfluous available channels over to the Dormant category.")
for channel in channels[:abs(missing)]:
await self.unclaim_channel(channel)
async def init_categories(self) -> None:
"""Get the help category objects. Remove the cog if retrieval fails."""
log.trace("Getting the CategoryChannel objects for the help categories.")
try:
self.available_category = await channel_utils.try_get_channel(
constants.Categories.help_available
)
self.in_use_category = await channel_utils.try_get_channel(
constants.Categories.help_in_use
)
self.dormant_category = await channel_utils.try_get_channel(
constants.Categories.help_dormant
)
except discord.HTTPException:
log.exception("Failed to get a category; cog will be removed")
self.bot.remove_cog(self.qualified_name)
async def init_cog(self) -> None:
"""Initialise the help channel system."""
log.trace("Waiting for the guild to be available before initialisation.")
await self.bot.wait_until_guild_available()
log.trace("Initialising the cog.")
await self.init_categories()
await _cooldown.check_cooldowns(self.scheduler)
self.channel_queue = self.create_channel_queue()
self.name_queue = _name.create_name_queue(
self.available_category,
self.in_use_category,
self.dormant_category,
)
log.trace("Moving or rescheduling in-use channels.")
for channel in _channel.get_category_channels(self.in_use_category):
await self.move_idle_channel(channel, has_task=False)
# Prevent the command from being used until ready.
# The ready event wasn't used because channels could change categories between the time
# the command is invoked and the cog is ready (e.g. if move_idle_channel wasn't called yet).
# This may confuse users. So would potentially long delays for the cog to become ready.
self.close_command.enabled = True
await self.init_available()
_stats.report_counts()
log.info("Cog is ready!")
async def move_idle_channel(self, channel: discord.TextChannel, has_task: bool = True) -> None:
"""
Make the `channel` dormant if idle or schedule the move if still active.
If `has_task` is True and rescheduling is required, the extant task to make the channel
dormant will first be cancelled.
"""
log.trace(f"Handling in-use channel #{channel} ({channel.id}).")
if not await _message.is_empty(channel):
idle_seconds = constants.HelpChannels.idle_minutes * 60
else:
idle_seconds = constants.HelpChannels.deleted_idle_minutes * 60
time_elapsed = await _channel.get_idle_time(channel)
if time_elapsed is None or time_elapsed >= idle_seconds:
log.info(
f"#{channel} ({channel.id}) is idle longer than {idle_seconds} seconds "
f"and will be made dormant."
)
await self.unclaim_channel(channel)
else:
# Cancel the existing task, if any.
if has_task:
self.scheduler.cancel(channel.id)
delay = idle_seconds - time_elapsed
log.info(
f"#{channel} ({channel.id}) is still active; "
f"scheduling it to be moved after {delay} seconds."
)
self.scheduler.schedule_later(delay, channel.id, self.move_idle_channel(channel))
async def move_to_available(self) -> None:
"""Make a channel available."""
log.trace("Making a channel available.")
channel = await self.get_available_candidate()
log.info(f"Making #{channel} ({channel.id}) available.")
await _message.send_available_message(channel)
log.trace(f"Moving #{channel} ({channel.id}) to the Available category.")
await _channel.move_to_bottom(
channel=channel,
category_id=constants.Categories.help_available,
)
_stats.report_counts()
async def move_to_dormant(self, channel: discord.TextChannel) -> None:
"""Make the `channel` dormant."""
log.info(f"Moving #{channel} ({channel.id}) to the Dormant category.")
await _channel.move_to_bottom(
channel=channel,
category_id=constants.Categories.help_dormant,
)
log.trace(f"Sending dormant message for #{channel} ({channel.id}).")
embed = discord.Embed(description=_message.DORMANT_MSG)
await channel.send(embed=embed)
log.trace(f"Pushing #{channel} ({channel.id}) into the channel queue.")
self.channel_queue.put_nowait(channel)
_stats.report_counts()
@lock.lock_arg(f"{NAMESPACE}.unclaim", "channel")
async def unclaim_channel(self, channel: discord.TextChannel, *, is_auto: bool = True) -> None:
"""
Unclaim an in-use help `channel` to make it dormant.
Unpin the claimant's question message and move the channel to the Dormant category.
Remove the cooldown role from the channel claimant if they have no other channels claimed.
Cancel the scheduled cooldown role removal task.
Set `is_auto` to True if the channel was automatically closed or False if manually closed.
"""
claimant_id = await _caches.claimants.get(channel.id)
_unclaim_channel = self._unclaim_channel
# It could be possible that there is no claimant cached. In such case, it'd be useless and
# possibly incorrect to lock on None. Therefore, the lock is applied conditionally.
if claimant_id is not None:
decorator = lock.lock_arg(f"{NAMESPACE}.unclaim", "claimant_id", wait=True)
_unclaim_channel = decorator(_unclaim_channel)
return await _unclaim_channel(channel, claimant_id, is_auto)
async def _unclaim_channel(self, channel: discord.TextChannel, claimant_id: int, is_auto: bool) -> None:
"""Actual implementation of `unclaim_channel`. See that for full documentation."""
await _caches.claimants.delete(channel.id)
# Ignore missing tasks because a channel may still be dormant after the cooldown expires.
if claimant_id in self.scheduler:
self.scheduler.cancel(claimant_id)
claimant = self.bot.get_guild(constants.Guild.id).get_member(claimant_id)
if claimant is None:
log.info(f"{claimant_id} left the guild during their help session; the cooldown role won't be removed")
elif not any(claimant.id == user_id for _, user_id in await _caches.claimants.items()):
# Remove the cooldown role if the claimant has no other channels left
await _cooldown.remove_cooldown_role(claimant)
await _message.unpin(channel)
await _stats.report_complete_session(channel.id, is_auto)
await self.move_to_dormant(channel)
# Cancel the task that makes the channel dormant only if called by the close command.
# In other cases, the task is either already done or not-existent.
if not is_auto:
self.scheduler.cancel(channel.id)
async def move_to_in_use(self, channel: discord.TextChannel) -> None:
"""Make a channel in-use and schedule it to be made dormant."""
log.info(f"Moving #{channel} ({channel.id}) to the In Use category.")
await _channel.move_to_bottom(
channel=channel,
category_id=constants.Categories.help_in_use,
)
timeout = constants.HelpChannels.idle_minutes * 60
log.trace(f"Scheduling #{channel} ({channel.id}) to become dormant in {timeout} sec.")
self.scheduler.schedule_later(timeout, channel.id, self.move_idle_channel(channel))
_stats.report_counts()
@commands.Cog.listener()
async def on_message(self, message: discord.Message) -> None:
"""Move an available channel to the In Use category and replace it with a dormant one."""
if message.author.bot:
return # Ignore messages sent by bots.
await self.init_task
if channel_utils.is_in_category(message.channel, constants.Categories.help_available):
if not _channel.is_excluded_channel(message.channel):
await self.claim_channel(message)
else:
await _message.check_for_answer(message)
@commands.Cog.listener()
async def on_message_delete(self, msg: discord.Message) -> None:
"""
Reschedule an in-use channel to become dormant sooner if the channel is empty.
The new time for the dormant task is configured with `HelpChannels.deleted_idle_minutes`.
"""
await self.init_task
if not channel_utils.is_in_category(msg.channel, constants.Categories.help_in_use):
return
if not await _message.is_empty(msg.channel):
return
log.info(f"Claimant of #{msg.channel} ({msg.author}) deleted message, channel is empty now. Rescheduling task.")
# Cancel existing dormant task before scheduling new.
self.scheduler.cancel(msg.channel.id)
delay = constants.HelpChannels.deleted_idle_minutes * 60
self.scheduler.schedule_later(delay, msg.channel.id, self.move_idle_channel(msg.channel))
async def wait_for_dormant_channel(self) -> discord.TextChannel:
"""Wait for a dormant channel to become available in the queue and return it."""
log.trace("Waiting for a dormant channel.")
task = asyncio.create_task(self.channel_queue.get())
self.queue_tasks.append(task)
channel = await task
log.trace(f"Channel #{channel} ({channel.id}) finally retrieved from the queue.")
self.queue_tasks.remove(task)
return channel
| [((415, 442), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (432, 442), False, 'import logging\n'), ((7145, 7221), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""close"""', 'aliases': "['dormant', 'solved']", 'enabled': '(False)'}), "(name='close', aliases=['dormant', 'solved'], enabled=False)\n", (7161, 7221), False, 'from discord.ext import commands\n'), ((14590, 14638), 'bot.utils.lock.lock_arg', 'lock.lock_arg', (['f"""{NAMESPACE}.unclaim"""', '"""channel"""'], {}), "(f'{NAMESPACE}.unclaim', 'channel')\n", (14603, 14638), False, 'from bot.utils import channel as channel_utils, lock, scheduling\n'), ((17670, 17693), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (17691, 17693), False, 'from discord.ext import commands\n'), ((18256, 18279), 'discord.ext.commands.Cog.listener', 'commands.Cog.listener', ([], {}), '()\n', (18277, 18279), False, 'from discord.ext import commands\n'), ((2338, 2383), 'bot.utils.scheduling.Scheduler', 'scheduling.Scheduler', (['self.__class__.__name__'], {}), '(self.__class__.__name__)\n', (2358, 2383), False, 'from bot.utils import channel as channel_utils, lock, scheduling\n'), ((3350, 3374), 'operator.attrgetter', 'attrgetter', (['"""channel.id"""'], {}), "('channel.id')\n", (3360, 3374), False, 'from operator import attrgetter\n'), ((3417, 3440), 'operator.attrgetter', 'attrgetter', (['"""author.id"""'], {}), "('author.id')\n", (3427, 3440), False, 'from operator import attrgetter\n'), ((3496, 3519), 'operator.attrgetter', 'attrgetter', (['"""author.id"""'], {}), "('author.id')\n", (3506, 3519), False, 'from operator import attrgetter\n'), ((5305, 5329), 'random.shuffle', 'random.shuffle', (['channels'], {}), '(channels)\n', (5319, 5329), False, 'import random\n'), ((5412, 5427), 'asyncio.Queue', 'asyncio.Queue', ([], {}), '()\n', (5425, 5427), False, 'import asyncio\n'), ((11075, 11173), 'bot.exts.help_channels._name.create_name_queue', '_name.create_name_queue', (['self.available_category', 'self.in_use_category', 'self.dormant_category'], {}), '(self.available_category, self.in_use_category, self\n .dormant_category)\n', (11098, 11173), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((11301, 11353), 'bot.exts.help_channels._channel.get_category_channels', '_channel.get_category_channels', (['self.in_use_category'], {}), '(self.in_use_category)\n', (11331, 11353), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((11861, 11883), 'bot.exts.help_channels._stats.report_counts', '_stats.report_counts', ([], {}), '()\n', (11881, 11883), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((13885, 13907), 'bot.exts.help_channels._stats.report_counts', '_stats.report_counts', ([], {}), '()\n', (13905, 13907), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((14336, 14383), 'discord.Embed', 'discord.Embed', ([], {'description': '_message.DORMANT_MSG'}), '(description=_message.DORMANT_MSG)\n', (14349, 14383), False, 'import discord\n'), ((14561, 14583), 'bot.exts.help_channels._stats.report_counts', '_stats.report_counts', ([], {}), '()\n', (14581, 14583), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((17641, 17663), 'bot.exts.help_channels._stats.report_counts', '_stats.report_counts', ([], {}), '()\n', (17661, 17663), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((17983, 18070), 'bot.utils.channel.is_in_category', 'channel_utils.is_in_category', (['message.channel', 'constants.Categories.help_available'], {}), '(message.channel, constants.Categories.\n help_available)\n', (18011, 18070), True, 'from bot.utils import channel as channel_utils, lock, scheduling\n'), ((4039, 4104), 'bot.exts.help_channels._cooldown.revoke_send_permissions', '_cooldown.revoke_send_permissions', (['message.author', 'self.scheduler'], {}), '(message.author, self.scheduler)\n', (4072, 4104), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((4120, 4141), 'bot.exts.help_channels._message.pin', '_message.pin', (['message'], {}), '(message)\n', (4132, 4141), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((4371, 4431), 'bot.exts.help_channels._caches.claimants.set', '_caches.claimants.set', (['message.channel.id', 'message.author.id'], {}), '(message.channel.id, message.author.id)\n', (4392, 4431), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((4633, 4687), 'bot.exts.help_channels._caches.claim_times.set', '_caches.claim_times.set', (['message.channel.id', 'timestamp'], {}), '(message.channel.id, timestamp)\n', (4656, 4687), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((4703, 4751), 'bot.exts.help_channels._caches.unanswered.set', '_caches.unanswered.set', (['message.channel.id', '(True)'], {}), '(message.channel.id, True)\n', (4725, 4751), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((5242, 5295), 'bot.exts.help_channels._channel.get_category_channels', '_channel.get_category_channels', (['self.dormant_category'], {}), '(self.dormant_category)\n', (5272, 5295), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((9032, 9087), 'bot.exts.help_channels._channel.get_category_channels', '_channel.get_category_channels', (['self.available_category'], {}), '(self.available_category)\n', (9062, 9087), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((10949, 10990), 'bot.exts.help_channels._cooldown.check_cooldowns', '_cooldown.check_cooldowns', (['self.scheduler'], {}), '(self.scheduler)\n', (10974, 10990), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((12574, 12605), 'bot.exts.help_channels._channel.get_idle_time', '_channel.get_idle_time', (['channel'], {}), '(channel)\n', (12596, 12605), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((13612, 13652), 'bot.exts.help_channels._message.send_available_message', '_message.send_available_message', (['channel'], {}), '(channel)\n', (13643, 13652), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((13751, 13845), 'bot.exts.help_channels._channel.move_to_bottom', '_channel.move_to_bottom', ([], {'channel': 'channel', 'category_id': 'constants.Categories.help_available'}), '(channel=channel, category_id=constants.Categories.\n help_available)\n', (13774, 13845), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((14119, 14211), 'bot.exts.help_channels._channel.move_to_bottom', '_channel.move_to_bottom', ([], {'channel': 'channel', 'category_id': 'constants.Categories.help_dormant'}), '(channel=channel, category_id=constants.Categories.\n help_dormant)\n', (14142, 14211), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((15201, 15234), 'bot.exts.help_channels._caches.claimants.get', '_caches.claimants.get', (['channel.id'], {}), '(channel.id)\n', (15222, 15234), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((15536, 15599), 'bot.utils.lock.lock_arg', 'lock.lock_arg', (['f"""{NAMESPACE}.unclaim"""', '"""claimant_id"""'], {'wait': '(True)'}), "(f'{NAMESPACE}.unclaim', 'claimant_id', wait=True)\n", (15549, 15599), False, 'from bot.utils import channel as channel_utils, lock, scheduling\n'), ((15944, 15980), 'bot.exts.help_channels._caches.claimants.delete', '_caches.claimants.delete', (['channel.id'], {}), '(channel.id)\n', (15968, 15980), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((16649, 16672), 'bot.exts.help_channels._message.unpin', '_message.unpin', (['channel'], {}), '(channel)\n', (16663, 16672), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((16687, 16738), 'bot.exts.help_channels._stats.report_complete_session', '_stats.report_complete_session', (['channel.id', 'is_auto'], {}), '(channel.id, is_auto)\n', (16717, 16738), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((17263, 17354), 'bot.exts.help_channels._channel.move_to_bottom', '_channel.move_to_bottom', ([], {'channel': 'channel', 'category_id': 'constants.Categories.help_in_use'}), '(channel=channel, category_id=constants.Categories.\n help_in_use)\n', (17286, 17354), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((18604, 18679), 'bot.utils.channel.is_in_category', 'channel_utils.is_in_category', (['msg.channel', 'constants.Categories.help_in_use'], {}), '(msg.channel, constants.Categories.help_in_use)\n', (18632, 18679), True, 'from bot.utils import channel as channel_utils, lock, scheduling\n'), ((4173, 4201), 'bot.exts.help_channels._message.dm_on_open', '_message.dm_on_open', (['message'], {}), '(message)\n', (4192, 4201), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((4580, 4606), 'datetime.datetime.now', 'datetime.now', (['timezone.utc'], {}), '(timezone.utc)\n', (4592, 4606), False, 'from datetime import datetime, timezone\n'), ((6600, 6637), 'bot.exts.help_channels._caches.claimants.get', '_caches.claimants.get', (['ctx.channel.id'], {}), '(ctx.channel.id)\n', (6621, 6637), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((10096, 10162), 'bot.utils.channel.try_get_channel', 'channel_utils.try_get_channel', (['constants.Categories.help_available'], {}), '(constants.Categories.help_available)\n', (10125, 10162), True, 'from bot.utils import channel as channel_utils, lock, scheduling\n'), ((10234, 10297), 'bot.utils.channel.try_get_channel', 'channel_utils.try_get_channel', (['constants.Categories.help_in_use'], {}), '(constants.Categories.help_in_use)\n', (10263, 10297), True, 'from bot.utils import channel as channel_utils, lock, scheduling\n'), ((10370, 10434), 'bot.utils.channel.try_get_channel', 'channel_utils.try_get_channel', (['constants.Categories.help_dormant'], {}), '(constants.Categories.help_dormant)\n', (10399, 10434), True, 'from bot.utils import channel as channel_utils, lock, scheduling\n'), ((12358, 12384), 'bot.exts.help_channels._message.is_empty', '_message.is_empty', (['channel'], {}), '(channel)\n', (12375, 12384), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((18086, 18131), 'bot.exts.help_channels._channel.is_excluded_channel', '_channel.is_excluded_channel', (['message.channel'], {}), '(message.channel)\n', (18114, 18131), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((18215, 18249), 'bot.exts.help_channels._message.check_for_answer', '_message.check_for_answer', (['message'], {}), '(message)\n', (18240, 18249), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((18722, 18752), 'bot.exts.help_channels._message.is_empty', '_message.is_empty', (['msg.channel'], {}), '(msg.channel)\n', (18739, 18752), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((6955, 7015), 'discord.ext.commands.has_any_role', 'commands.has_any_role', (['*constants.HelpChannels.cmd_whitelist'], {}), '(*constants.HelpChannels.cmd_whitelist)\n', (6976, 7015), False, 'from discord.ext import commands\n'), ((16593, 16633), 'bot.exts.help_channels._cooldown.remove_cooldown_role', '_cooldown.remove_cooldown_role', (['claimant'], {}), '(claimant)\n', (16623, 16633), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((8511, 8566), 'bot.exts.help_channels._message.notify', '_message.notify', (['notify_channel', 'self.last_notification'], {}), '(notify_channel, self.last_notification)\n', (8526, 8566), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n'), ((16465, 16490), 'bot.exts.help_channels._caches.claimants.items', '_caches.claimants.items', ([], {}), '()\n', (16488, 16490), False, 'from bot.exts.help_channels import _caches, _channel, _cooldown, _message, _name, _stats\n')] |
LordZagreus/LodeRunner | code/menu/screens/shopmenu.py | 68aab36be47cabe31e52f3ee43520bdafcdf3c95 | import os
import math
import random
import time
from code.menu.menu import Menu
from code.tools.eventqueue import EventQueue
from code.tools.xml import XMLParser
from code.utils.common import coalesce, intersect, offset_rect, log, log2, xml_encode, xml_decode, translate_rgb_to_string
from code.constants.common import SCREEN_WIDTH, SCREEN_HEIGHT, PAUSE_MENU_X, PAUSE_MENU_Y, PAUSE_MENU_WIDTH, PAUSE_MENU_HEIGHT, MODE_GAME, TILE_WIDTH, TILE_HEIGHT, DIR_UP, DIR_RIGHT, DIR_DOWN, DIR_LEFT, SPLASH_MODE_GREYSCALE_ANIMATED
from code.constants.states import STATUS_ACTIVE, STATUS_INACTIVE, GAME_STATE_ACTIVE, GAME_STATE_NOT_READY
from code.constants.newsfeeder import *
class ShopMenu(Menu):
def __init__(self):
Menu.__init__(self)
# Assume all shop menus come from already-lightboxed dialogues.
self.lightbox_controller.set_interval( self.lightbox_controller.get_target() )
# We're going to keep a handle to the seller so that we can
# remove items from their inventory after a purchase...
self.vendor = None#seller
# Shop title (e.g. "Bob's Fine Items")
self.title = "Shoppe"
# Salutation (e.g. "Look at these great items")
self.message = "Take a look at my inventory."
# Before we begin populating the shop menu, we'll first
# make sure the NPC seller stocks any specified "required" items...
self.required_item_names = []
# Track item quality threshholds (low and high)
self.min_item_quality = 0
self.max_item_quality = 0
# Items in stock at any given time
self.max_items_stocked = 1
# Number of times the vendor can restock
self.max_item_reloads = 1
# Track whether this is the first build or a refresh
self.first_build = True
# Fire build event
self.fire_event("build")
def handle_event(self, event, control_center, universe):#params, user_input, network_controller, universe, active_map, session, widget_dispatcher, text_renderer, save_controller, refresh = False):
# Events that result from event handling
results = EventQueue()
# Convenience
(action, params) = (
event.get_action(),
event.get_params()
)
# Build root menu
if ( action == "build" ):
results.append(
self.handle_build_event(event, control_center, universe)
)
# Select an item, get confirmation...
elif ( action == "show:confirm-purchase" ):
results.append(
self.handle_show_confirm_purchase_event(event, control_center, universe)
)
# Commit an item purchase
elif ( action == "game:buy-item" ):
results.append(
self.handle_shop_buy_item_event(event, control_center, universe)
)
# Go to the previous page (e.g. close buy item confirm dialog)
elif ( action == "back" ):
results.append(
self.handle_back_event(event, control_center, universe)
)
# Finalize a "back" call
elif ( action == "previous-page" ):
# Let's just go back one page
self.page_back(1)
# Leave shop, resume game
elif ( action == "resume-game" ):
results.append(
self.handle_resume_game_event(event, control_center, universe)
)
# Restore the universe to active game state, set this very menu to inactive
elif ( action == "kill" ):
results.append(
self.handle_kill_event(event, control_center, universe)
)
# Return events
return results
# Configure the shop menu (more options than your typical menu, we need to define many parameters)
def configure(self, options):
# Common menu configuration
self.__std_configure__(options)
if ( "vendor" in options ):
self.vendor = options["vendor"]
if ( "title" in options ):
self.title = options["title"]
if ( "message" in options ):
self.message = options["message"]
if ( "required-item-names" in options ):
self.required_item_names.extend( options["required-item-names"] )#.split(";") )
if ( "min-quality" in options ):
self.min_item_quality = int( options["min-quality"] )
if ( "max-quality" in options ):
self.max_item_quality = int( options["max-quality"] )
if ( "max-items" in options ):
self.max_items_stocked = int( options["max-items"] )
if ( "max-reloads" in options ):
self.max_item_reloads = int( options["max-reloads"] )
# For chaining
return self
# Build the shop menu
def handle_build_event(self, event, control_center, universe):
# Events that result from handling this event (on-birth events, etc.)
results = EventQueue()
# Convenience
params = event.get_params()
# Fetch the widget dispatcher
widget_dispatcher = control_center.get_widget_dispatcher()
# Pause the game so that we can shop, if this is the first build...
if (self.first_build):
# Pause
universe.pause()
# Call in the pause splash
control_center.get_splash_controller().set_mode(SPLASH_MODE_GREYSCALE_ANIMATED)
# Before populating the vendor's inventory (or re-populating),
# clear it of any items the player has acquired since last shopping with this vendor...
self.vendor.remove_erstwhile_acquired_items_from_inventory(universe)
# Populate inventory for this shoppe's vendor...
self.vendor.populate_vendor_inventory(
min_quality = self.min_item_quality,#int( node.get_attribute("min-quality") ),
max_quality = self.max_item_quality,#int( node.get_attribute("min-quality") ),
required_item_names = self.required_item_names,
max_items = self.max_items_stocked,#int( node.get_attribute("max-items") ),
max_reloads = self.max_item_reloads,#int( node.get_attribute("max-reloads") ),
universe = universe
)
# Scope
root = None
# Does the vendor have anything in stock? Use this data
# to determine which template we load...
if ( self.vendor.get_vendor_inventory_count() == 0 ):
# Fetch the "nothing in stock" template
template = self.fetch_xml_template( "shop.directory", version = "out-of-items" ).add_parameters({
"@x": xml_encode( "%d" % (SCREEN_WIDTH - (int( (SCREEN_WIDTH - PAUSE_MENU_WIDTH) / 2 ))) ),
"@y": xml_encode( "%d" % PAUSE_MENU_Y ),
"@width": xml_encode( "%d" % int(PAUSE_MENU_WIDTH / 2) ),
"@height": xml_encode( "%d" % PAUSE_MENU_HEIGHT ),
"@shop-title": xml_encode( self.title )
})
# Compile template
root = template.compile_node_by_id("menu")
# We have items to sell...
else:
# Fetch the "shopping directory" template
template = self.fetch_xml_template( "shop.directory", version = "default" ).add_parameters({
"@x": xml_encode( "%d" % (SCREEN_WIDTH - (int( (SCREEN_WIDTH - PAUSE_MENU_WIDTH) / 2 ))) ),
"@y": xml_encode( "%d" % PAUSE_MENU_Y ),
"@width": xml_encode( "%d" % int(PAUSE_MENU_WIDTH / 2) ),
"@height": xml_encode( "%d" % PAUSE_MENU_HEIGHT ),
"@shop-title": xml_encode( self.title ),
"@salutation": xml_encode( self.message )
})
# Compile template
root = template.compile_node_by_id("menu")
# Now we'll add an entry for each available item...
for item_name in self.vendor.get_vendor_inventory_item_names():
# Grab handle
item = universe.get_item_by_name(item_name)
# Validate
if (item):
# How much money do we currently have?
money = int( universe.get_session_variable("core.gold.wallet").get_value() )
# Template version for this item depends on whether we can afford it...
template_version = ( "affordable" if (money >= item.cost) else "unaffordable" )
# Fetch the appropriate template for an individual item
template = self.fetch_xml_template( "shop.directory.insert", version = template_version ).add_parameters({
"@item-name": xml_encode( item.name ),
"@item-title": xml_encode( item.title ),
"@item-cost": xml_encode( "%d" % item.cost ),
"@item-advertisement": xml_encode( item.description )
})
# Compile
node = template.compile_node_by_id("insert")
# Inject into inventory area...
root.find_node_by_id("ext.inventory").add_node(node)
# Create widget
widget = widget_dispatcher.convert_node_to_widget(root, control_center, universe)
widget.set_id("root")
# We have definitely completed the first build now
self.first_build = False
# Add the new page
self.add_widget_via_event(widget, event)
# Return events
return results
# Show the "are you sure you wanna buy this?" page
def handle_show_confirm_purchase_event(self, event, control_center, universe):
# Events that result from handling this event (on-birth events, etc.)
results = EventQueue()
# Convenience
params = event.get_params()
# Fetch the widget dispatcher
widget_dispatcher = control_center.get_widget_dispatcher()
# Get a handle to the actual item...
item = universe.get_item_by_name( params["item-name"] )
# Validate
if (item):
# Fetch confirm purchase template
template = self.fetch_xml_template("shop.buy.confirm").add_parameters({
"@width": xml_encode( "%d" % int(PAUSE_MENU_WIDTH / 2) ),
"@height": xml_encode( "%d" % SCREEN_HEIGHT ),
"@item-name": xml_encode( item.get_name() ),
"@item-title": xml_encode( item.get_title() ),
"@item-cost": xml_encode( "%d" % item.get_cost() )
})
# Compile template
root = template.compile_node_by_id("menu")
# Create widget
widget = widget_dispatcher.convert_node_to_widget(root, control_center, universe)
widget.set_id("confirm-shop-purchase")
# Add the new page
self.add_widget_via_event(widget, event, exclusive = False)
# Return events
return results
# Commit an item purchase
def handle_shop_buy_item_event(self, event, control_center, universe):
# Events that result from handling this event (on-birth events, etc.)
results = EventQueue()
# Convenience
params = event.get_params()
# Get a reference to the item (for cost info, etc.)
item = universe.get_item_by_name( params["item-name"] )
# Acquire the item by its name
universe.acquire_item_by_name( item.get_name() )
# Post a newsfeeder notice
control_center.get_window_controller().get_newsfeeder().post({
"type": NEWS_ITEM_NEW,
"title": control_center.get_localization_controller().get_label("new-item-purchased:header"),
"content": item.get_title()
})
# Add a historical record
universe.add_historical_record(
"purchases",
control_center.get_localization_controller().get_label(
"purchased-m-from-n-for-g:message",
{
"@m": item.get_title(),
"@n": self.vendor.nick,
"@g": item.get_cost()
}
)
#"Bought [color=special]%s[/color] for [color=special]%s[/color] gold." % ( item.get_title(), item.get_cost() )
)
# Remove from seller's inventory
self.vendor.remove_item_from_vendor_inventory( item.get_name() )
# Increase sales count for vendor
self.vendor.increase_sales_count(1)
# Reduce player's wallet amount by the cost...
universe.increment_session_variable(
"core.gold.wallet",
-1 * item.get_cost()
)
# Count as gold spent
universe.increment_session_variable(
"stats.gold-spent",
item.get_cost()
)
# Execute the "wallet-changed" achievement hook
universe.execute_achievement_hook( "wallet-changed", control_center )
# Increase universe stats for items bought
universe.get_session_variable("stats.items-bought").increment_value(1)
# Execute the "bought-item" achievement hook
universe.execute_achievement_hook( "bought-item", control_center )
# Get the active map
m = universe.get_active_map()
# Check for a generic "onpurchase" script for the vendor
m.run_script(
"%s.onpurchase" % self.vendor.get_name(),
control_center,
universe,
execute_all = True # Try to loop entire script (?)
)
# Check for an onpurchase script (perhaps the game reacts in some way to an item you might have bought)
m.run_script(
name = "%s.onpurchase" % item.get_name(),
control_center = control_center,
universe = universe,
execute_all = True
)
# Refresh UI
self.refresh_pages(control_center, universe, curtailed_count = 1)
# After rebuilding the UI, we will have restocked the NPC's inventory.
# Thus, if the NPC has no inventory available, we have just bought their last item...
if ( self.vendor.get_vendor_inventory_count() == 0 ):
# Execute the "bought-all-items" achievement hook
universe.execute_achievement_hook( "bought-all-items", control_center )
# I'm going to set the cursor at "home" position for the shop
self.get_widget_by_id("root").set_cursor_at_beginning()#finalize = True)
# Return events
return results
# Go back a page (animated)
def handle_back_event(self, event, control_center, universe):
# Events that result from handling this event (on-birth events, etc.)
results = EventQueue()
# Convenience
params = event.get_params()
# Get the active page
page = self.get_active_page()
# Validate
if (page):
# Dismiss the page
page.hide(
on_complete = "previous-page"
)
# Return events
return results
# Leave the shop and resume play
def handle_resume_game_event(self, event, control_center, universe):
# Events that result from handling this event (on-birth events, etc.)
results = EventQueue()
# Convenience
params = event.get_params()
# Dismiss lightbox effect
self.lightbox_controller.set_target(0)
# Dismiss the splash controller, calling to resume game action once done...
control_center.get_splash_controller().dismiss(
on_complete = "game:unpause"
)
#hmenu.slide(DIR_LEFT, percent = 1.0)
#row_menu.slide(DIR_RIGHT, percent = 1.0)
# Resume game, killing shop menu when widget disappears
self.get_widget_by_id("root").hide(
on_complete = "kill"
)
# Return events
return results
# Kill event. Set game status back to active when shopping is done.
def handle_kill_event(self, event, control_center, universe):
# Events that result from handling this event (on-birth events, etc.)
results = EventQueue()
# Convenience
params = event.get_params()
# Done with the shop menu widget; trash it.
self.set_status(STATUS_INACTIVE)
# Return events
return results
| [((731, 750), 'code.menu.menu.Menu.__init__', 'Menu.__init__', (['self'], {}), '(self)\n', (744, 750), False, 'from code.menu.menu import Menu\n'), ((2166, 2178), 'code.tools.eventqueue.EventQueue', 'EventQueue', ([], {}), '()\n', (2176, 2178), False, 'from code.tools.eventqueue import EventQueue\n'), ((5047, 5059), 'code.tools.eventqueue.EventQueue', 'EventQueue', ([], {}), '()\n', (5057, 5059), False, 'from code.tools.eventqueue import EventQueue\n'), ((9868, 9880), 'code.tools.eventqueue.EventQueue', 'EventQueue', ([], {}), '()\n', (9878, 9880), False, 'from code.tools.eventqueue import EventQueue\n'), ((11289, 11301), 'code.tools.eventqueue.EventQueue', 'EventQueue', ([], {}), '()\n', (11299, 11301), False, 'from code.tools.eventqueue import EventQueue\n'), ((14864, 14876), 'code.tools.eventqueue.EventQueue', 'EventQueue', ([], {}), '()\n', (14874, 14876), False, 'from code.tools.eventqueue import EventQueue\n'), ((15417, 15429), 'code.tools.eventqueue.EventQueue', 'EventQueue', ([], {}), '()\n', (15427, 15429), False, 'from code.tools.eventqueue import EventQueue\n'), ((16300, 16312), 'code.tools.eventqueue.EventQueue', 'EventQueue', ([], {}), '()\n', (16310, 16312), False, 'from code.tools.eventqueue import EventQueue\n'), ((6833, 6864), 'code.utils.common.xml_encode', 'xml_encode', (["('%d' % PAUSE_MENU_Y)"], {}), "('%d' % PAUSE_MENU_Y)\n", (6843, 6864), False, 'from code.utils.common import coalesce, intersect, offset_rect, log, log2, xml_encode, xml_decode, translate_rgb_to_string\n'), ((6969, 7005), 'code.utils.common.xml_encode', 'xml_encode', (["('%d' % PAUSE_MENU_HEIGHT)"], {}), "('%d' % PAUSE_MENU_HEIGHT)\n", (6979, 7005), False, 'from code.utils.common import coalesce, intersect, offset_rect, log, log2, xml_encode, xml_decode, translate_rgb_to_string\n'), ((7040, 7062), 'code.utils.common.xml_encode', 'xml_encode', (['self.title'], {}), '(self.title)\n', (7050, 7062), False, 'from code.utils.common import coalesce, intersect, offset_rect, log, log2, xml_encode, xml_decode, translate_rgb_to_string\n'), ((7508, 7539), 'code.utils.common.xml_encode', 'xml_encode', (["('%d' % PAUSE_MENU_Y)"], {}), "('%d' % PAUSE_MENU_Y)\n", (7518, 7539), False, 'from code.utils.common import coalesce, intersect, offset_rect, log, log2, xml_encode, xml_decode, translate_rgb_to_string\n'), ((7644, 7680), 'code.utils.common.xml_encode', 'xml_encode', (["('%d' % PAUSE_MENU_HEIGHT)"], {}), "('%d' % PAUSE_MENU_HEIGHT)\n", (7654, 7680), False, 'from code.utils.common import coalesce, intersect, offset_rect, log, log2, xml_encode, xml_decode, translate_rgb_to_string\n'), ((7715, 7737), 'code.utils.common.xml_encode', 'xml_encode', (['self.title'], {}), '(self.title)\n', (7725, 7737), False, 'from code.utils.common import coalesce, intersect, offset_rect, log, log2, xml_encode, xml_decode, translate_rgb_to_string\n'), ((7772, 7796), 'code.utils.common.xml_encode', 'xml_encode', (['self.message'], {}), '(self.message)\n', (7782, 7796), False, 'from code.utils.common import coalesce, intersect, offset_rect, log, log2, xml_encode, xml_decode, translate_rgb_to_string\n'), ((10429, 10461), 'code.utils.common.xml_encode', 'xml_encode', (["('%d' % SCREEN_HEIGHT)"], {}), "('%d' % SCREEN_HEIGHT)\n", (10439, 10461), False, 'from code.utils.common import coalesce, intersect, offset_rect, log, log2, xml_encode, xml_decode, translate_rgb_to_string\n'), ((8781, 8802), 'code.utils.common.xml_encode', 'xml_encode', (['item.name'], {}), '(item.name)\n', (8791, 8802), False, 'from code.utils.common import coalesce, intersect, offset_rect, log, log2, xml_encode, xml_decode, translate_rgb_to_string\n'), ((8845, 8867), 'code.utils.common.xml_encode', 'xml_encode', (['item.title'], {}), '(item.title)\n', (8855, 8867), False, 'from code.utils.common import coalesce, intersect, offset_rect, log, log2, xml_encode, xml_decode, translate_rgb_to_string\n'), ((8909, 8937), 'code.utils.common.xml_encode', 'xml_encode', (["('%d' % item.cost)"], {}), "('%d' % item.cost)\n", (8919, 8937), False, 'from code.utils.common import coalesce, intersect, offset_rect, log, log2, xml_encode, xml_decode, translate_rgb_to_string\n'), ((8988, 9016), 'code.utils.common.xml_encode', 'xml_encode', (['item.description'], {}), '(item.description)\n', (8998, 9016), False, 'from code.utils.common import coalesce, intersect, offset_rect, log, log2, xml_encode, xml_decode, translate_rgb_to_string\n')] |
liudonghua123/bridgedb | lib/bridgedb/runner.py | 94dd10673f9e6650e8a00e162f348e64f7a1ecab | # -*- coding: utf-8 ; test-case-name: bridgedb.test.test_runner -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :authors: Isis Lovecruft 0xA3ADB67A2CDB8B35 <[email protected]>
# please also see AUTHORS file
# :copyright: (c) 2007-2015, The Tor Project, Inc.
# (c) 2007-2015, all entities within the AUTHORS file
# (c) 2012-2015, Isis Lovecruft
# :license: 3-clause BSD, see included LICENSE for information
"""Classes for running components and servers, as well as daemonisation.
** Module Overview: **
"""
from __future__ import print_function
import logging
import sys
import os
from twisted.python import procutils
def find(filename):
"""Find the executable ``filename``.
:param string filename: The executable to search for. Must be in the
effective user ID's $PATH.
:rtype: string
:returns: The location of the executable, if found. Otherwise, returns
None.
"""
executable = None
logging.debug("Searching for installed '%s'..." % filename)
which = procutils.which(filename, os.X_OK)
if len(which) > 0:
for that in which:
if os.stat(that).st_uid == os.geteuid():
executable = that
break
if not executable:
return None
logging.debug("Found installed script at '%s'" % executable)
return executable
def generateDescriptors(count=None, rundir=None):
"""Run a script which creates fake bridge descriptors for testing purposes.
This will run Leekspin_ to create bridge server descriptors, bridge
extra-info descriptors, and networkstatus document.
.. warning: This function can take a very long time to run, especially in
headless environments where entropy sources are minimal, because it
creates the keys for each mocked OR, which are embedded in the server
descriptors, used to calculate the OR fingerprints, and sign the
descriptors, among other things.
.. _Leekspin: https://gitweb.torproject.org/user/isis/leekspin.git
:param integer count: Number of mocked bridges to generate descriptor
for. (default: 3)
:type rundir: string or None
:param rundir: If given, use this directory as the current working
directory for the bridge descriptor generator script to run in. The
directory MUST already exist, and the descriptor files will be created
in it. If None, use the whatever directory we are currently in.
"""
import subprocess
import os.path
proc = None
statuscode = 0
script = 'leekspin'
rundir = rundir if os.path.isdir(rundir) else None
count = count if count else 3
try:
proc = subprocess.Popen([script, '-n', str(count)],
close_fds=True, cwd=rundir)
finally:
if proc is not None:
proc.wait()
if proc.returncode:
print("There was an error generating bridge descriptors.",
"(Returncode: %d)" % proc.returncode)
statuscode = proc.returncode
else:
print("Sucessfully generated %s descriptors." % str(count))
del subprocess
return statuscode
def doDumpBridges(config):
"""Dump bridges by assignment to a file.
This function handles the commandline '--dump-bridges' option.
:type config: :class:`bridgedb.Main.Conf`
:param config: The current configuration.
"""
import bridgedb.Bucket as bucket
bucketManager = bucket.BucketManager(config)
bucketManager.assignBridgesToBuckets()
bucketManager.dumpBridges()
| [((1004, 1063), 'logging.debug', 'logging.debug', (['("Searching for installed \'%s\'..." % filename)'], {}), '("Searching for installed \'%s\'..." % filename)\n', (1017, 1063), False, 'import logging\n'), ((1076, 1110), 'twisted.python.procutils.which', 'procutils.which', (['filename', 'os.X_OK'], {}), '(filename, os.X_OK)\n', (1091, 1110), False, 'from twisted.python import procutils\n'), ((1319, 1379), 'logging.debug', 'logging.debug', (['("Found installed script at \'%s\'" % executable)'], {}), '("Found installed script at \'%s\'" % executable)\n', (1332, 1379), False, 'import logging\n'), ((3561, 3589), 'bridgedb.Bucket.BucketManager', 'bucket.BucketManager', (['config'], {}), '(config)\n', (3581, 3589), True, 'import bridgedb.Bucket as bucket\n'), ((2645, 2666), 'os.path.isdir', 'os.path.isdir', (['rundir'], {}), '(rundir)\n', (2658, 2666), False, 'import os\n'), ((1201, 1213), 'os.geteuid', 'os.geteuid', ([], {}), '()\n', (1211, 1213), False, 'import os\n'), ((1177, 1190), 'os.stat', 'os.stat', (['that'], {}), '(that)\n', (1184, 1190), False, 'import os\n')] |
pfrouleau/wce-triage-v2 | wce_triage/ops/create_image_runner.py | 25610cda55f5cb2170e13e121ae1cbaa92ef7626 | #!/usr/bin/env python3
#
# Create disk image
#
import re, sys, traceback
from .tasks import task_fetch_partitions, task_refresh_partitions, task_mount, task_remove_persistent_rules, task_remove_logs, task_fsck, task_shrink_partition, task_expand_partition, task_unmount
from .partclone_tasks import task_create_disk_image
from .ops_ui import console_ui
from ..components.disk import create_storage_instance
from .runner import Runner
from ..lib.disk_images import make_disk_image_name
from .json_ui import json_ui
from ..lib.util import init_triage_logger, is_block_device
# "Waiting", "Prepare", "Preflight", "Running", "Success", "Failed"]
my_messages = { "Waiting": "Saving disk is waiting.",
"Prepare": "Savign disk is preparing.",
"Preflight": "Saving disk is preparing.",
"Running": "{step} of {steps}: Running {task}",
"Success": "Saving disk completed successfully.",
"Failed": "Saving disk failed." }
#
class ImageDiskRunner(Runner):
'''Runner for creating disk image. does fsck, shrink partition, create disk
image and resize the file system back to the max.
For now, this is only dealing with the EXT4 linux partition.
'''
# FIXME: If I want to make this to a generic clone app, I need to deal with all of partitions on the disk.
# One step at a time.
def __init__(self, ui, runner_id, disk, destdir, suggestedname=None, partition_id='Linux'):
super().__init__(ui, runner_id)
self.time_estimate = 600
self.disk = disk
self.partition_id = partition_id
self.destdir = destdir
self.imagename = make_disk_image_name(destdir, suggestedname)
pass
def prepare(self):
super().prepare()
# self.tasks.append(task_mount_nfs_destination(self, "Mount the destination volume"))
self.tasks.append(task_fetch_partitions("Fetch partitions", self.disk))
self.tasks.append(task_refresh_partitions("Refresh partition information", self.disk))
self.tasks.append(task_mount("Mount the target disk", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_remove_persistent_rules("Remove persistent rules", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_remove_logs("Remove/Clean Logs", disk=self.disk, partition_id=self.partition_id))
task = task_unmount("Unmount target", disk=self.disk, partition_id=self.partition_id)
task.set_teardown_task()
self.tasks.append(task)
self.tasks.append(task_fsck("fsck partition", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_shrink_partition("Shrink partition to smallest", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_create_disk_image("Create disk image", disk=self.disk, partition_id=self.partition_id, imagename=self.imagename))
task = task_expand_partition("Expand the partion back", disk=self.disk, partition_id=self.partition_id)
task.set_teardown_task()
self.tasks.append(task)
pass
pass
if __name__ == "__main__":
tlog = init_triage_logger()
if len(sys.argv) == 1:
print( 'Unloader: devicename part destdir')
sys.exit(0)
# NOTREACHED
pass
devname = sys.argv[1]
if not is_block_device(devname):
print( '%s is not a block device.' % devname)
sys.exit(1)
# NOTREACHED
pass
part = sys.argv[2] # This is a partition id
destdir = sys.argv[3] # Destination directory
disk = create_storage_instance(devname)
# Preflight is for me to see the tasks. http server runs this with json_ui.
do_it = True
if destdir == "preflight":
ui = console_ui()
do_it = False
pass
elif destdir == "testflight":
ui = console_ui()
do_it = True
pass
else:
ui = json_ui(wock_event="saveimage", message_catalog=my_messages)
pass
if re.match(part, '\d+'):
part = int(part)
pass
runner_id = disk.device_name
runner = ImageDiskRunner(ui, runner_id, disk, destdir, partition_id=part)
try:
runner.prepare()
runner.preflight()
runner.explain()
runner.run()
sys.exit(0)
# NOTREACHED
except Exception as exc:
sys.stderr.write(traceback.format_exc(exc) + "\n")
sys.exit(1)
# NOTREACHED
pass
pass
| [((3844, 3866), 're.match', 're.match', (['part', '"""\\\\d+"""'], {}), "(part, '\\\\d+')\n", (3852, 3866), False, 'import re, sys, traceback\n'), ((3172, 3183), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (3180, 3183), False, 'import re, sys, traceback\n'), ((3324, 3335), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (3332, 3335), False, 'import re, sys, traceback\n'), ((4098, 4109), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4106, 4109), False, 'import re, sys, traceback\n'), ((4213, 4224), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4221, 4224), False, 'import re, sys, traceback\n'), ((4175, 4200), 'traceback.format_exc', 'traceback.format_exc', (['exc'], {}), '(exc)\n', (4195, 4200), False, 'import re, sys, traceback\n')] |
KeeplerIO/meetup-hands-on-gcp-2019 | batch_processing_dataflow/play_store_flow.py | 3674922d89d2be8984eb5719f0faaae127823ab4 | import argparse
import logging
import apache_beam as beam
from apache_beam.io import WriteToBigQuery
from apache_beam.io import ReadFromText, WriteToText
from apache_beam.options.pipeline_options import PipelineOptions
class ProcessCSV(beam.DoFn):
def process(self, element, *args, **kwargs):
import csv
formated_element = [element.encode('utf8')]
processed_csv = csv.DictReader(formated_element, fieldnames=['App', 'Category', 'Rating', 'Reviews', 'Size',
'Installs', 'Type', 'Price', 'Content_Rating',
'Genres', 'Last_Updated', 'Current_Ver',
'Android_Ver'], delimiter=',')
processed_fields = processed_csv.next()
if processed_fields.get('Category').replace('.','').isdigit():
return None
return [processed_fields]
class ParseRecord(beam.DoFn):
def process(self, element, *args, **kwargs):
from datetime import datetime
import math
def string_to_megabyte(raw_string):
if raw_string.upper().endswith('K'):
multiplier = 1000
elif raw_string.upper().endswith('M'):
multiplier = 1000 * 1000
else:
return None
return (float(raw_string[:-1]) * multiplier) / 1000000
new_element = {}
rating = float(element['Rating'])
new_element['Rating'] = rating if not math.isnan(rating) else None
new_element['Size'] = string_to_megabyte(element['Size'])
new_element['Price'] = float(element['Price'].replace("$",""))
new_element['Installs'] = int(element['Installs'].replace("+", "").replace(",",""))
new_element['Last_Updated'] = datetime.strptime(element['Last_Updated'], '%B %d, %Y').strftime('%Y-%m-%d')
new_element['Category'] = element['Category']
new_element['Genres'] = element['Genres']
new_element['App'] = element['App']
new_element['Content_Rating'] = element['Content_Rating']
new_element['Reviews'] = element['Reviews']
new_element['Android_Ver'] = element['Android_Ver']
new_element['Type'] = element['Type']
new_element['Current_Ver'] = element['Current_Ver']
logging.info(new_element)
return [new_element]
def run(argv=None):
"""Main entry point. It defines and runs the pipeline."""
parser = argparse.ArgumentParser()
parser.add_argument('--input',
dest='input',
default='gs://meetup-batch-processing/input/googleplaystore.csv',
help='Input file to process.')
parser.add_argument('--output',
dest='output',
default='gs://meetup-batch-processing/output/googleplaystore.csv',
help='Output file to process.')
parser.add_argument('--table-output',
dest='table_output',
default='meetup-hands-on-gcp-2019:googleplaystore_batch_dataflow.play_store',
help='Bigquery table name for output.')
known_args, pipeline_args = parser.parse_known_args(argv)
pipeline_options = PipelineOptions(pipeline_args)
with beam.Pipeline(options=pipeline_options) as pipeline:
raw_lines = pipeline | 'ReadFromCsv' >> ReadFromText(known_args.input, skip_header_lines=1)
lines = raw_lines | 'processCsv' >> beam.ParDo(ProcessCSV())
output = lines | 'parseRecord' >> beam.ParDo(ParseRecord())
output | 'writeBigQuery' >> WriteToBigQuery(known_args.table_output,
write_disposition=beam.io.BigQueryDisposition.WRITE_TRUNCATE,
create_disposition=beam.io.BigQueryDisposition.CREATE_NEVER)
logging.info('Finished.')
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
run()
| [((2553, 2578), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2576, 2578), False, 'import argparse\n'), ((3358, 3388), 'apache_beam.options.pipeline_options.PipelineOptions', 'PipelineOptions', (['pipeline_args'], {}), '(pipeline_args)\n', (3373, 3388), False, 'from apache_beam.options.pipeline_options import PipelineOptions\n'), ((396, 615), 'csv.DictReader', 'csv.DictReader', (['formated_element'], {'fieldnames': "['App', 'Category', 'Rating', 'Reviews', 'Size', 'Installs', 'Type',\n 'Price', 'Content_Rating', 'Genres', 'Last_Updated', 'Current_Ver',\n 'Android_Ver']", 'delimiter': '""","""'}), "(formated_element, fieldnames=['App', 'Category', 'Rating',\n 'Reviews', 'Size', 'Installs', 'Type', 'Price', 'Content_Rating',\n 'Genres', 'Last_Updated', 'Current_Ver', 'Android_Ver'], delimiter=',')\n", (410, 615), False, 'import csv\n'), ((2400, 2425), 'logging.info', 'logging.info', (['new_element'], {}), '(new_element)\n', (2412, 2425), False, 'import logging\n'), ((3398, 3437), 'apache_beam.Pipeline', 'beam.Pipeline', ([], {'options': 'pipeline_options'}), '(options=pipeline_options)\n', (3411, 3437), True, 'import apache_beam as beam\n'), ((3940, 3965), 'logging.info', 'logging.info', (['"""Finished."""'], {}), "('Finished.')\n", (3952, 3965), False, 'import logging\n'), ((3999, 4018), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (4016, 4018), False, 'import logging\n'), ((1578, 1596), 'math.isnan', 'math.isnan', (['rating'], {}), '(rating)\n', (1588, 1596), False, 'import math\n'), ((1874, 1929), 'datetime.datetime.strptime', 'datetime.strptime', (["element['Last_Updated']", '"""%B %d, %Y"""'], {}), "(element['Last_Updated'], '%B %d, %Y')\n", (1891, 1929), False, 'from datetime import datetime\n'), ((3499, 3550), 'apache_beam.io.ReadFromText', 'ReadFromText', (['known_args.input'], {'skip_header_lines': '(1)'}), '(known_args.input, skip_header_lines=1)\n', (3511, 3550), False, 'from apache_beam.io import ReadFromText, WriteToText\n'), ((3735, 3908), 'apache_beam.io.WriteToBigQuery', 'WriteToBigQuery', (['known_args.table_output'], {'write_disposition': 'beam.io.BigQueryDisposition.WRITE_TRUNCATE', 'create_disposition': 'beam.io.BigQueryDisposition.CREATE_NEVER'}), '(known_args.table_output, write_disposition=beam.io.\n BigQueryDisposition.WRITE_TRUNCATE, create_disposition=beam.io.\n BigQueryDisposition.CREATE_NEVER)\n', (3750, 3908), False, 'from apache_beam.io import WriteToBigQuery\n')] |
Manotomo-Alliance-Support-Squad/WWS | backend/main/server/resources/Message.py | 3df21a3f715eeb3b57314bf08c38f2239b2ba399 | from flask import request
from flask_jwt import jwt_required
from flask_restful import Resource
from main.server import app, cache, db
from main.server.models import Message, MessageSchema
messages_schema = MessageSchema(many=True)
message_schema = MessageSchema()
@app.after_request
def add_header(response):
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Credentials'] = 'true'
response.headers['Access-Control-Allow-Methods'] = 'GET, POST'
response.headers[
'Access-Control-Allow-Headers'] = 'Access-Control-Allow-Headers, Origin,Accept, X-Requested-With, Content-Type, Access-Control-Request-Method, Access-Control-Request-Headers'
return response
class MessageCount(Resource):
@cache.cached(timeout=100)
def get(self):
"""Gets the number of messages available on the server"""
return {'status': 'success', 'count': Message.query.count()}, 200
class MessageListRangeResource(Resource):
@cache.cached(timeout=100)
def get(self, lower, upper):
"""Gets a range of messages on the server"""
if int(lower) < 1:
return {'status': 'fail', 'messages': 'Invalid index: ' + str(lower)}, 400
if int(lower) > int(upper):
return {'status': 'fail',
'messages': 'Upper range cannot be less than lower range: ' + str(lower) + '>' + str(upper)}, 400
messages = Message.query.filter(Message.messageID >= int(lower)).filter(Message.messageID <= int(upper))
if not messages:
return {'status': 'fail',
'messages': 'Out of range: ' + str(lower) + ' - ' + str(upper) + ' does not exist'}, 404
messages = messages_schema.dump(messages)
if not Message.query.filter_by(messageID=upper).first(): # the last item in the range
return {'status': 'success', 'messages': messages}, 206 # Partial Content Served
return {'status': 'success', 'messages': messages}, 200
class MessageListResource(Resource):
@cache.cached(timeout=100)
def get(self):
"""Gets all messages on the server"""
messages = Message.query.all()
messages = messages_schema.dump(messages)
if not messages:
return {'status': 'success', 'messages': messages}, 206 # Partial Content Served
return {'status': 'success', 'messages': messages}, 200
@jwt_required()
def post(self):
"""Add message"""
json_data = request.get_json(force=True)
if not json_data:
return {'status': 'fail', 'message': 'No input data'}, 400
errors = message_schema.validate(json_data)
if errors:
return {'status': 'fail', 'message': 'Error handling request'}, 422
data = message_schema.load(json_data)
message = Message.query.filter_by(orig_msg=data.get('orig_msg')).first()
if message:
return {'status': 'fail', 'message': 'Message already exists'}, 400
message = Message(orig_msg=data.get('orig_msg'),
tl_msg=data.get('tl_msg'),
country=data.get('country'),
username=data.get('username'))
db.session.add(message)
db.session.commit()
return {'status': 'success', 'message': 'Message successfully created'}, 201
class MessageResource(Resource):
@cache.cached(timeout=100)
def get(self, messageID):
""""Get a message by message ID"""
message = Message.query.filter_by(messageID=messageID)
if not message.first():
return {'status': 'fail', 'message': 'No message with ID ' + str(messageID) + ' exists'}, 404
message = messages_schema.dump(message)
return {'status': 'success', 'messages': message}, 200
@jwt_required()
def delete(self, messageID):
"""delete a message by ID"""
message = Message.query.filter_by(messageID=messageID)
if not message.first():
return {'status': 'fail', 'message': 'No message with ID ' + str(messageID) + ' exists'}, 404
message.delete()
db.session.commit()
return {'status': 'sucess', 'message': 'Message Deleted'}, 200
| [((210, 234), 'main.server.models.MessageSchema', 'MessageSchema', ([], {'many': '(True)'}), '(many=True)\n', (223, 234), False, 'from main.server.models import Message, MessageSchema\n'), ((252, 267), 'main.server.models.MessageSchema', 'MessageSchema', ([], {}), '()\n', (265, 267), False, 'from main.server.models import Message, MessageSchema\n'), ((768, 793), 'main.server.cache.cached', 'cache.cached', ([], {'timeout': '(100)'}), '(timeout=100)\n', (780, 793), False, 'from main.server import app, cache, db\n'), ((1002, 1027), 'main.server.cache.cached', 'cache.cached', ([], {'timeout': '(100)'}), '(timeout=100)\n', (1014, 1027), False, 'from main.server import app, cache, db\n'), ((2055, 2080), 'main.server.cache.cached', 'cache.cached', ([], {'timeout': '(100)'}), '(timeout=100)\n', (2067, 2080), False, 'from main.server import app, cache, db\n'), ((2426, 2440), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (2438, 2440), False, 'from flask_jwt import jwt_required\n'), ((3427, 3452), 'main.server.cache.cached', 'cache.cached', ([], {'timeout': '(100)'}), '(timeout=100)\n', (3439, 3452), False, 'from main.server import app, cache, db\n'), ((3846, 3860), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (3858, 3860), False, 'from flask_jwt import jwt_required\n'), ((2165, 2184), 'main.server.models.Message.query.all', 'Message.query.all', ([], {}), '()\n', (2182, 2184), False, 'from main.server.models import Message, MessageSchema\n'), ((2507, 2535), 'flask.request.get_json', 'request.get_json', ([], {'force': '(True)'}), '(force=True)\n', (2523, 2535), False, 'from flask import request\n'), ((3249, 3272), 'main.server.db.session.add', 'db.session.add', (['message'], {}), '(message)\n', (3263, 3272), False, 'from main.server import app, cache, db\n'), ((3281, 3300), 'main.server.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3298, 3300), False, 'from main.server import app, cache, db\n'), ((3544, 3588), 'main.server.models.Message.query.filter_by', 'Message.query.filter_by', ([], {'messageID': 'messageID'}), '(messageID=messageID)\n', (3567, 3588), False, 'from main.server.models import Message, MessageSchema\n'), ((3950, 3994), 'main.server.models.Message.query.filter_by', 'Message.query.filter_by', ([], {'messageID': 'messageID'}), '(messageID=messageID)\n', (3973, 3994), False, 'from main.server.models import Message, MessageSchema\n'), ((4168, 4187), 'main.server.db.session.commit', 'db.session.commit', ([], {}), '()\n', (4185, 4187), False, 'from main.server import app, cache, db\n'), ((925, 946), 'main.server.models.Message.query.count', 'Message.query.count', ([], {}), '()\n', (944, 946), False, 'from main.server.models import Message, MessageSchema\n'), ((1773, 1813), 'main.server.models.Message.query.filter_by', 'Message.query.filter_by', ([], {'messageID': 'upper'}), '(messageID=upper)\n', (1796, 1813), False, 'from main.server.models import Message, MessageSchema\n')] |
icaros-usc/wecook | demos/chicken_pasta/chicken_pasta.py | 27bbb6b78a48e04765a87d33cc8a5d3748d2d4cc | #!/usr/bin/env python3
import rospy
from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg
def talker():
pub = rospy.Publisher('WeCookDispatch', TaskMsg, queue_size=10)
rospy.init_node('wecook_chicken_pasta', anonymous=True)
scene_msg = SceneMsg([ObjectMsg('wall0',
'package://wecook_assets/data/furniture/wall.urdf',
[0.75, 0.05, 0., 0., 0., 0., 1.]),
ObjectMsg('wall1',
'package://wecook_assets/data/furniture/wall.urdf',
[-0.85, 1.45, 0., 0., 0., 0.707, 0.707]),
ObjectMsg('counter0',
'package://wecook_assets/data/furniture/kitchen_counter.urdf',
[0.3, 0., 0., 0., 0., 0., 1.]),
ObjectMsg('counter1',
'package://wecook_assets/data/furniture/kitchen_counter.urdf',
[0., 1.0, 0., 0., 0., 0.707, 0.707]),
ObjectMsg('sink0',
'package://wecook_assets/data/furniture/sink_counter.urdf',
[-1.3, 1.05, 0., 0., 0., 0.707, 0.707]),
ObjectMsg('shelf0',
'package://wecook_assets/data/furniture/bookcase.urdf',
[0.3, -1.05, 0., 0., 0., 0., 1.]),
ObjectMsg('stove0',
'package://wecook_assets/data/objects/stove.urdf',
[-0.35, 0.95, 0.75, 0., 0., 0., 1.]),
ObjectMsg('pot0',
'package://wecook_assets/data/objects/cooking_pot.urdf',
[0.35, 1.1, 0.75, 0., 0., 0., 1.]),
ObjectMsg('skillet0',
'package://wecook_assets/data/objects/skillet.urdf',
[0.3, 0.7, 0.75, 0., 0., -0.707, .707]),
ObjectMsg('cutting_board0',
'package://wecook_assets/data/objects/cutting_board.urdf',
[0.3, -0.3, 0.75, 0., 0., 0., 1.]),
ObjectMsg('knife0',
'package://wecook_assets/data/objects/knife_big.urdf',
[0.215, -0.55, 0.775, 0., 0., 0., 1.]),
ObjectMsg('plate0',
'package://wecook_assets/data/objects/plate.urdf',
[0.3, 0.075, 0.75, 0., 0., 0., 1.]),
ObjectMsg('bowl0',
'package://wecook_assets/data/objects/bowl_green.urdf',
[0.45, 0.375, 0.75, 0., 0., 0., 1.]),
ObjectMsg('bowl1',
'package://wecook_assets/data/objects/bowl_green.urdf',
[0.15, 0.375, 0.75, 0., 0., 0., 1.]),
ObjectMsg('oil0',
'package://wecook_assets/data/objects/olive_oil.urdf',
[0., 1.15, 0.75, 0., 0., 0.707, 0.707]),
ObjectMsg('salt0',
'package://wecook_assets/data/objects/salt.urdf',
[0., 1.0, 0.75, 0., 0., 0.707, 0.707]),
ObjectMsg('pepper0',
'package://wecook_assets/data/objects/black_pepper.urdf',
[0., 0.9, 0.75, 0., 0., 0.707, 0.707]),
ObjectMsg('chicken0',
'package://wecook_assets/data/food/chicken.urdf',
[0.3, 0.075, 0.757, 0., 0., 0., 1.]),
ObjectMsg('lime0',
'package://wecook_assets/data/food/lime.urdf',
[0.3, -0.3, 0.757, 0., 0., 0., 1.]),
ObjectMsg('pasta0',
'package://wecook_assets/data/food/pasta.urdf',
[0.45, 0.375, 0.757, 0., 0., 0., 1.])],
[ContainingMsg(['plate0', 'chicken0']),
ContainingMsg(['bowl0', 'pasta0'])])
task_msg = TaskMsg(scene_msg,
[ActionMsg(['p1'], 'cut', ['plate0'], 'knife0', ['lime0'])],
[AgentMsg('p1', 'r', [0., 0., 0.75, 0., 0., 0., 0.])],
"",
"",
"follow",
"RRTConnect",
False)
# sleeping 10 seconds to publish
rospy.sleep(1)
pub.publish(task_msg)
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass
| [((151, 208), 'rospy.Publisher', 'rospy.Publisher', (['"""WeCookDispatch"""', 'TaskMsg'], {'queue_size': '(10)'}), "('WeCookDispatch', TaskMsg, queue_size=10)\n", (166, 208), False, 'import rospy\n'), ((213, 268), 'rospy.init_node', 'rospy.init_node', (['"""wecook_chicken_pasta"""'], {'anonymous': '(True)'}), "('wecook_chicken_pasta', anonymous=True)\n", (228, 268), False, 'import rospy\n'), ((5021, 5035), 'rospy.sleep', 'rospy.sleep', (['(1)'], {}), '(1)\n', (5032, 5035), False, 'import rospy\n'), ((296, 410), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""wall0"""', '"""package://wecook_assets/data/furniture/wall.urdf"""', '[0.75, 0.05, 0.0, 0.0, 0.0, 0.0, 1.0]'], {}), "('wall0', 'package://wecook_assets/data/furniture/wall.urdf', [\n 0.75, 0.05, 0.0, 0.0, 0.0, 0.0, 1.0])\n", (305, 410), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((500, 619), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""wall1"""', '"""package://wecook_assets/data/furniture/wall.urdf"""', '[-0.85, 1.45, 0.0, 0.0, 0.0, 0.707, 0.707]'], {}), "('wall1', 'package://wecook_assets/data/furniture/wall.urdf', [-\n 0.85, 1.45, 0.0, 0.0, 0.0, 0.707, 0.707])\n", (509, 619), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((711, 841), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""counter0"""', '"""package://wecook_assets/data/furniture/kitchen_counter.urdf"""', '[0.3, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0]'], {}), "('counter0',\n 'package://wecook_assets/data/furniture/kitchen_counter.urdf', [0.3, \n 0.0, 0.0, 0.0, 0.0, 0.0, 1.0])\n", (720, 841), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((926, 1060), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""counter1"""', '"""package://wecook_assets/data/furniture/kitchen_counter.urdf"""', '[0.0, 1.0, 0.0, 0.0, 0.0, 0.707, 0.707]'], {}), "('counter1',\n 'package://wecook_assets/data/furniture/kitchen_counter.urdf', [0.0, \n 1.0, 0.0, 0.0, 0.0, 0.707, 0.707])\n", (935, 1060), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((1147, 1276), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""sink0"""', '"""package://wecook_assets/data/furniture/sink_counter.urdf"""', '[-1.3, 1.05, 0.0, 0.0, 0.0, 0.707, 0.707]'], {}), "('sink0',\n 'package://wecook_assets/data/furniture/sink_counter.urdf', [-1.3, 1.05,\n 0.0, 0.0, 0.0, 0.707, 0.707])\n", (1156, 1276), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((1365, 1483), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""shelf0"""', '"""package://wecook_assets/data/furniture/bookcase.urdf"""', '[0.3, -1.05, 0.0, 0.0, 0.0, 0.0, 1.0]'], {}), "('shelf0', 'package://wecook_assets/data/furniture/bookcase.urdf',\n [0.3, -1.05, 0.0, 0.0, 0.0, 0.0, 1.0])\n", (1374, 1483), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((1574, 1690), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""stove0"""', '"""package://wecook_assets/data/objects/stove.urdf"""', '[-0.35, 0.95, 0.75, 0.0, 0.0, 0.0, 1.0]'], {}), "('stove0', 'package://wecook_assets/data/objects/stove.urdf', [-\n 0.35, 0.95, 0.75, 0.0, 0.0, 0.0, 1.0])\n", (1583, 1690), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((1781, 1898), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""pot0"""', '"""package://wecook_assets/data/objects/cooking_pot.urdf"""', '[0.35, 1.1, 0.75, 0.0, 0.0, 0.0, 1.0]'], {}), "('pot0', 'package://wecook_assets/data/objects/cooking_pot.urdf',\n [0.35, 1.1, 0.75, 0.0, 0.0, 0.0, 1.0])\n", (1790, 1898), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((1990, 2111), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""skillet0"""', '"""package://wecook_assets/data/objects/skillet.urdf"""', '[0.3, 0.7, 0.75, 0.0, 0.0, -0.707, 0.707]'], {}), "('skillet0', 'package://wecook_assets/data/objects/skillet.urdf',\n [0.3, 0.7, 0.75, 0.0, 0.0, -0.707, 0.707])\n", (1999, 2111), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((2204, 2338), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""cutting_board0"""', '"""package://wecook_assets/data/objects/cutting_board.urdf"""', '[0.3, -0.3, 0.75, 0.0, 0.0, 0.0, 1.0]'], {}), "('cutting_board0',\n 'package://wecook_assets/data/objects/cutting_board.urdf', [0.3, -0.3, \n 0.75, 0.0, 0.0, 0.0, 1.0])\n", (2213, 2338), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((2434, 2555), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""knife0"""', '"""package://wecook_assets/data/objects/knife_big.urdf"""', '[0.215, -0.55, 0.775, 0.0, 0.0, 0.0, 1.0]'], {}), "('knife0', 'package://wecook_assets/data/objects/knife_big.urdf',\n [0.215, -0.55, 0.775, 0.0, 0.0, 0.0, 1.0])\n", (2443, 2555), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((2647, 2761), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""plate0"""', '"""package://wecook_assets/data/objects/plate.urdf"""', '[0.3, 0.075, 0.75, 0.0, 0.0, 0.0, 1.0]'], {}), "('plate0', 'package://wecook_assets/data/objects/plate.urdf', [0.3,\n 0.075, 0.75, 0.0, 0.0, 0.0, 1.0])\n", (2656, 2761), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((2853, 2972), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""bowl0"""', '"""package://wecook_assets/data/objects/bowl_green.urdf"""', '[0.45, 0.375, 0.75, 0.0, 0.0, 0.0, 1.0]'], {}), "('bowl0', 'package://wecook_assets/data/objects/bowl_green.urdf',\n [0.45, 0.375, 0.75, 0.0, 0.0, 0.0, 1.0])\n", (2862, 2972), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((3064, 3183), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""bowl1"""', '"""package://wecook_assets/data/objects/bowl_green.urdf"""', '[0.15, 0.375, 0.75, 0.0, 0.0, 0.0, 1.0]'], {}), "('bowl1', 'package://wecook_assets/data/objects/bowl_green.urdf',\n [0.15, 0.375, 0.75, 0.0, 0.0, 0.0, 1.0])\n", (3073, 3183), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((3275, 3395), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""oil0"""', '"""package://wecook_assets/data/objects/olive_oil.urdf"""', '[0.0, 1.15, 0.75, 0.0, 0.0, 0.707, 0.707]'], {}), "('oil0', 'package://wecook_assets/data/objects/olive_oil.urdf', [\n 0.0, 1.15, 0.75, 0.0, 0.0, 0.707, 0.707])\n", (3284, 3395), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((3487, 3602), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""salt0"""', '"""package://wecook_assets/data/objects/salt.urdf"""', '[0.0, 1.0, 0.75, 0.0, 0.0, 0.707, 0.707]'], {}), "('salt0', 'package://wecook_assets/data/objects/salt.urdf', [0.0, \n 1.0, 0.75, 0.0, 0.0, 0.707, 0.707])\n", (3496, 3602), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((3694, 3823), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""pepper0"""', '"""package://wecook_assets/data/objects/black_pepper.urdf"""', '[0.0, 0.9, 0.75, 0.0, 0.0, 0.707, 0.707]'], {}), "('pepper0',\n 'package://wecook_assets/data/objects/black_pepper.urdf', [0.0, 0.9, \n 0.75, 0.0, 0.0, 0.707, 0.707])\n", (3703, 3823), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((3911, 4028), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""chicken0"""', '"""package://wecook_assets/data/food/chicken.urdf"""', '[0.3, 0.075, 0.757, 0.0, 0.0, 0.0, 1.0]'], {}), "('chicken0', 'package://wecook_assets/data/food/chicken.urdf', [\n 0.3, 0.075, 0.757, 0.0, 0.0, 0.0, 1.0])\n", (3920, 4028), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((4119, 4229), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""lime0"""', '"""package://wecook_assets/data/food/lime.urdf"""', '[0.3, -0.3, 0.757, 0.0, 0.0, 0.0, 1.0]'], {}), "('lime0', 'package://wecook_assets/data/food/lime.urdf', [0.3, -\n 0.3, 0.757, 0.0, 0.0, 0.0, 1.0])\n", (4128, 4229), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((4320, 4434), 'wecook.msg.ObjectMsg', 'ObjectMsg', (['"""pasta0"""', '"""package://wecook_assets/data/food/pasta.urdf"""', '[0.45, 0.375, 0.757, 0.0, 0.0, 0.0, 1.0]'], {}), "('pasta0', 'package://wecook_assets/data/food/pasta.urdf', [0.45, \n 0.375, 0.757, 0.0, 0.0, 0.0, 1.0])\n", (4329, 4434), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((4526, 4563), 'wecook.msg.ContainingMsg', 'ContainingMsg', (["['plate0', 'chicken0']"], {}), "(['plate0', 'chicken0'])\n", (4539, 4563), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((4591, 4625), 'wecook.msg.ContainingMsg', 'ContainingMsg', (["['bowl0', 'pasta0']"], {}), "(['bowl0', 'pasta0'])\n", (4604, 4625), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((4687, 4744), 'wecook.msg.ActionMsg', 'ActionMsg', (["['p1']", '"""cut"""', "['plate0']", '"""knife0"""', "['lime0']"], {}), "(['p1'], 'cut', ['plate0'], 'knife0', ['lime0'])\n", (4696, 4744), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n'), ((4771, 4828), 'wecook.msg.AgentMsg', 'AgentMsg', (['"""p1"""', '"""r"""', '[0.0, 0.0, 0.75, 0.0, 0.0, 0.0, 0.0]'], {}), "('p1', 'r', [0.0, 0.0, 0.75, 0.0, 0.0, 0.0, 0.0])\n", (4779, 4828), False, 'from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg\n')] |
rmay-intwine/volttron | volttron/platform/vip/agent/subsystems/heartbeat.py | a449f70e32f73ff0136a838d0feddb928ede6298 | # -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
#
# Copyright 2017, Battelle Memorial Institute.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This material was prepared as an account of work sponsored by an agency of
# the United States Government. Neither the United States Government nor the
# United States Department of Energy, nor Battelle, nor any of their
# employees, nor any jurisdiction or organization that has cooperated in the
# development of these materials, makes any warranty, express or
# implied, or assumes any legal liability or responsibility for the accuracy,
# completeness, or usefulness or any information, apparatus, product,
# software, or process disclosed, or represents that its use would not infringe
# privately owned rights. Reference herein to any specific commercial product,
# process, or service by trade name, trademark, manufacturer, or otherwise
# does not necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# Battelle Memorial Institute. The views and opinions of authors expressed
# herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY operated by
# BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
# }}}
import os
import weakref
from datetime import datetime
from .base import SubsystemBase
from volttron.platform.messaging.headers import TIMESTAMP
from volttron.platform.agent.utils import (get_aware_utc_now,
format_timestamp)
from volttron.platform.scheduling import periodic
from ..errors import Unreachable, VIPError
"""The heartbeat subsystem adds an optional periodic publish to all agents.
Heartbeats can be started with agents and toggled on and off at runtime.
"""
__docformat__ = 'reStructuredText'
__version__ = '1.0'
class Heartbeat(SubsystemBase):
def __init__(self, owner, core, rpc, pubsub, heartbeat_autostart,
heartbeat_period):
self.owner = owner
self.core = weakref.ref(core)
self.pubsub = weakref.ref(pubsub)
self.autostart = heartbeat_autostart
self.period = heartbeat_period
self.enabled = False
self.connect_error = False
def onsetup(sender, **kwargs):
rpc.export(self.start, 'heartbeat.start')
rpc.export(self.start_with_period, 'heartbeat.start_with_period')
rpc.export(self.stop, 'heartbeat.stop')
rpc.export(self.restart, 'heartbeat.restart')
rpc.export(self.set_period, 'heartbeat.set_period')
def onstart(sender, **kwargs):
if self.autostart:
self.start()
core.onsetup.connect(onsetup, self)
core.onstart.connect(onstart, self)
core.onconnected.connect(self.reconnect)
def start(self):
"""RPC method
Starts an agent's heartbeat.
"""
if not self.enabled:
self.scheduled = self.core().schedule(periodic(self.period), self.publish)
self.enabled = True
def start_with_period(self, period):
"""RPC method
Set period and start heartbeat.
:param period: Time in seconds between publishes.
"""
self.set_period(period)
self.start()
def reconnect(self, sender, **kwargs):
if self.connect_error:
self.restart()
self.connect_error = False
def stop(self):
"""RPC method
Stop an agent's heartbeat.
"""
if self.enabled:
# Trap the fact that scheduled may not have been
# set yet if the start hasn't been called.
try:
self.scheduled.cancel()
except AttributeError:
pass
self.enabled = False
def restart(self):
"""RPC method
Restart the heartbeat with the current period. The heartbeat will
be immediately sending the heartbeat to the message bus.
"""
self.stop()
self.start()
def set_period(self, period):
"""RPC method
Set heartbeat period.
:param period: Time in seconds between publishes.
"""
if self.enabled:
self.stop()
self.period = period
self.start()
else:
self.period = period
def publish(self):
topic = 'heartbeat/' + self.core().identity
headers = {TIMESTAMP: format_timestamp(get_aware_utc_now())}
message = self.owner.vip.health.get_status_value()
try:
self.pubsub().publish('pubsub', topic, headers, message)
except Unreachable as exc:
self.connect_error = True
self.stop()
| [((2660, 2677), 'weakref.ref', 'weakref.ref', (['core'], {}), '(core)\n', (2671, 2677), False, 'import weakref\n'), ((2700, 2719), 'weakref.ref', 'weakref.ref', (['pubsub'], {}), '(pubsub)\n', (2711, 2719), False, 'import weakref\n'), ((3626, 3647), 'volttron.platform.scheduling.periodic', 'periodic', (['self.period'], {}), '(self.period)\n', (3634, 3647), False, 'from volttron.platform.scheduling import periodic\n'), ((5119, 5138), 'volttron.platform.agent.utils.get_aware_utc_now', 'get_aware_utc_now', ([], {}), '()\n', (5136, 5138), False, 'from volttron.platform.agent.utils import get_aware_utc_now, format_timestamp\n')] |
kyawlin/smlb | datasets/experimental/ni_superalloys/Ni_superalloy.py | 79c757d7fc040fb30ad44410be158b3ce3bdf30d | """Ni-Superalloy dataset.
Scientific Machine Learning Benchmark
A benchmark of regression models in chem- and materials informatics.
2019, Brendan Folie, Citrine Informatics.
See class NiSuperalloyDataset for details.
"""
import os
import json
import zipfile
from typing import List, Optional, Tuple, Union
import numpy as np
from smlb.exceptions import InvalidParameterError
from smlb.parameters import params
from smlb.tabular_data import TabularData
class NiSuperalloyDataset(TabularData):
"""
Ni-Superalloy dataset.
Based on:
Bryce D. Conduit, Nicholas G. Jones, Howard J. Stone, Gareth John Conduit:
Design of a nickel-base superalloy using a neural network, Materials & Design 131: 358-365,
Elsevier, 2017. DOI 10.1016/j.matdes.2017.06.007
The dataset was downloaded from the Citrination platform (https://citrination.com),
dataset identifier #153493, Version 10.
There are 2800 rows.
The data have columns for composition (25 elements are present in at least one row),
whether the alloy was powder processed (0 or 1), whether it was pressure treated (0 or 1),
heat treatment time (hours) and temperature (degrees Celcius) for up to 4 heat treatment steps,
the total time spent in heat treatment (hours), the maximum heat treatment temperature
(degrees Celcius), and the area under the time-temperature curve (degrees Celcius * hours).
A value of 0 generally implies that the heat treatment step was not done, but there
are some missing values. The total time and max temperature are generally more reliable
than the individual heating steps. The total compositions do not always add up to 100%,
but with about a dozen exceptions they always add up to somewhere between 95% and 105%.
There are also three columns for a pressure treatment step (temperature, time, pressure),
but since only 51 rows have non-zero entries, this information is not used.
There are 5 labels: ultimate tensile strength (MPa), elongation (unitless), stress rupture
stress (MPa), stress rupture time (hours), and yield strength (MPa). Tensile strength and
elongation occur together in 898 rows, stress rupture stress and time occur together in
856 rows, and yield strength occurs in 1046 rows. 898+856+1046=2800, so every row has exactly
one output set. The other values are denoted as NaN.
"""
DEFAULT_PATH = os.path.split(os.path.realpath(__file__))[0] + "/ni_superalloys_3.json.zip"
POWDER_PROCESSED_NO = 0
POWDER_PROCESSED_YES = 1
def __init__(
self, labels_to_load: Optional[Union[str, List[str]]] = None, ignore_dubious: bool = False
):
"""Initialize Ni-superalloy dataset with specified labels.
Parameters:
labels_to_load (str or List[str]): which labels to load. Options are
'Yield Strength', 'Ultimate Tensile Strength', 'Stress Rupture Time',
'Stress Rupture Stress', and 'Elongation'.
If None, then all labels are loaded.
ignore_dubious: whether or not to ignore samples that have something
questionable about them
"""
labels_to_load = params.optional_(
labels_to_load,
lambda arg: params.any_(
arg, params.string, lambda arg: params.sequence(arg, type_=str),
),
)
ignore_dubious = params.boolean(ignore_dubious)
filepath = self.DEFAULT_PATH
data, labels = self._load_data_and_labels(filepath, labels_to_load, ignore_dubious)
super().__init__(data=data, labels=labels)
def _load_data_and_labels(
self,
filepath: str,
labels_to_load: Optional[List[str]] = None,
ignore_dubious: bool = False,
):
"""Load data and labels from .json file."""
raw = self._unzip_json_file(filepath)
if ignore_dubious:
raw = [e for e in raw if self._filter_dubious(e)]
# dtype=object is necessary because this is a mixed-type array (float and string)
data = np.array([self._parse_json_data(e) for e in raw], dtype=object)
labels = np.array([self._parse_json_labels(e, labels_to_load) for e in raw], dtype=float)
return data, labels
@staticmethod
def _unzip_json_file(filepath: str):
"""Open and read zipped json file."""
filename = os.path.basename(filepath)
assert (
filename[-4:] == ".zip"
), f"File path must point to a .zip file, instead got '{filepath}'"
with zipfile.ZipFile(filepath) as zf:
unzipped_filename = filename[:-4]
with zf.open(unzipped_filename) as fp:
raw = json.load(fp)
return raw
@staticmethod
def _extract_raw_composition(entry: dict) -> List[dict]:
"""Get composition in its raw form."""
raw_composition = entry.get("composition")
if raw_composition is None or not isinstance(raw_composition, list):
raise InvalidParameterError(
expected="Chemical composition as a list", got=raw_composition
)
return raw_composition
@staticmethod
def _filter_dubious(entry: dict) -> bool:
"""
Determine whether or not a json entry has something questionable about it.
Currently, the only thing filtered on is if the composition has an asterisk in it,
which occurs for 6 samples.
Parameters:
entry (dict): A json entry corresponding to a row in the dataset.
Returns: bool
True if the composition contains an asterisk.
"""
raw_composition = NiSuperalloyDataset._extract_raw_composition(entry)
composition_dict = NiSuperalloyDataset._parse_composition_as_dict(raw_composition)
composition_dict_float, exception_caught = NiSuperalloyDataset._dict_values_to_float(
composition_dict
)
return not exception_caught
def _parse_json_data(self, entry: dict):
"""
Helper function to parse data in a single row from the raw json.
Parameters:
entry (dict): A json entry corresponding to a row in the dataset.
Returns: array
Array of data in this row.
"""
assert entry["category"] == "system.chemical"
raw_composition = NiSuperalloyDataset._extract_raw_composition(entry)
composition: str = self._parse_composition(raw_composition)
properties = entry.get("properties")
if properties is None or not isinstance(properties, list):
raise InvalidParameterError(
expected="A list of dictionaries, one for each property", got=properties
)
heat_treatment_1_time = self._get_scalar_property(
properties, "Heat treatment 1 Time", units="hours", default_value=0
)
heat_treatment_1_temp = self._get_scalar_property(
properties, "Heat treatment 1 Temperature", units="$^{\\circ}$C", default_value=0
)
heat_treatment_2_time = self._get_scalar_property(
properties, "Heat treatment 2 Time", units="hours", default_value=0
)
heat_treatment_2_temp = self._get_scalar_property(
properties, "Heat treatment 2 Temperature", units="$^{\\circ}$C", default_value=0
)
heat_treatment_3_time = self._get_scalar_property(
properties, "Heat treatment 3 Time", units="hours", default_value=0
)
heat_treatment_3_temp = self._get_scalar_property(
properties, "Heat treatment 3 Temperature", units="$^{\\circ}$C", default_value=0
)
heat_treatment_4_time = self._get_scalar_property(
properties, "Heat treatment 4 Time", units="hours", default_value=0
)
heat_treatment_4_temp = self._get_scalar_property(
properties, "Heat treatment 4 Temperature", units="$^{\\circ}$C", default_value=0
)
total_heat_treatment_time = self._get_scalar_property(
properties, "Total heat treatment time", units="hours"
)
max_heat_treatment_temp = self._get_scalar_property(
properties, "Max Heat Treatment Temperature", units="$^{\\circ}$C"
)
area_under_heat_treatment_curve = self._get_scalar_property(
properties, "Area under heat treatment curve", units="$^{\\circ}$C * hours"
)
powder_processed_dict = {"No": self.POWDER_PROCESSED_NO, "Yes": self.POWDER_PROCESSED_YES}
powder_processed = self._get_categorical_property(
properties, "Powder processed", categories_dict=powder_processed_dict
)
data_array = [
composition,
heat_treatment_1_time,
heat_treatment_1_temp,
heat_treatment_2_time,
heat_treatment_2_temp,
heat_treatment_3_time,
heat_treatment_3_temp,
heat_treatment_4_time,
heat_treatment_4_temp,
total_heat_treatment_time,
max_heat_treatment_temp,
area_under_heat_treatment_curve,
powder_processed,
]
return data_array
def _parse_json_labels(self, entry: dict, labels_to_load: Optional[List[str]] = None):
"""
Helper function to parse labels in a single row from the raw json.
Parameters:
entry (dict): A json entry corresponding to a row in the dataset.
labels_to_load (List[str]): Optional list of labels to load.
Returns: array
Array of labels in this row that we are interested in.
"""
if labels_to_load is None:
labels_to_load = [
"Yield Strength",
"Ultimate Tensile Strength",
"Stress Rupture Time",
"Stress Rupture Stress",
"Elongation",
]
properties = entry.get("properties")
if properties is None or not isinstance(properties, list):
raise InvalidParameterError(
expected="A list of dictionaries, one for each property", got=properties
)
labels_array = []
for label in labels_to_load:
labels_array.append(self._get_scalar_property(properties, label, default_value=None))
return labels_array
@staticmethod
def _parse_composition(raw_composition: List[dict]) -> str:
"""
Helper function to parse composition as a string.
Parameters:
raw_composition (List[dict]): A list, each entry of which corresponds to an element.
An entry is a dict with an 'element' key and an 'idealWeightPercent' key.
The element is a string (e.g., 'Cu') and the weight percent is another dict
with a single key, 'value', pointing to a floating point number.
The values are in percentage points, and add up to ~100.
Returns: str
Chemical composition as string, e.g. 'Al5.5Ni94.0W0.5'
"""
composition_dict = NiSuperalloyDataset._parse_composition_as_dict(raw_composition)
composition_dict_float, _ = NiSuperalloyDataset._dict_values_to_float(composition_dict)
composition_str: str = ""
for element_name, element_amount in composition_dict_float.items():
if element_amount > 0:
composition_str += element_name + str(element_amount)
return composition_str
@staticmethod
def _parse_composition_as_dict(raw_composition: List[dict]) -> dict:
"""
Helper function to parse composition as a dictionary.
Parameters:
raw_composition (List[dict]): A list, each entry of which corresponds to an element.
An entry is a dict with an 'element' key and an 'idealWeightPercent' key.
The element is a string (e.g., 'Cu') and the weight percent is another dict
with a single key, 'value', pointing to a floating point number.
The values are in percentage points, and add up to ~100 (but not exactly).
Returns: dict
Chemical composition as a dictionary with the elements as keys
and their raw amounts as values
"""
composition_dict = dict()
for entry in raw_composition:
try:
element_name = entry["element"]
element_amount = entry["idealWeightPercent"]["value"]
except KeyError:
raise InvalidParameterError(
expected="Element amount as a dictionary of the form\n"
"{'element': <element name>,"
"'idealWeightPercent': "
"{'value': <element amount>}}",
got=entry,
)
composition_dict[element_name] = element_amount
return composition_dict
@staticmethod
def _dict_values_to_float(d: dict) -> Tuple[dict, bool]:
"""
Convert a dictionary's values to their floating point representations, if possible.
Parameters:
d: a dictionary
Returns: dict, bool
A modified version of `d`, and a boolean flag indicating whether or not
an Exception was caught
"""
d_copy = dict()
exception_caught = False
for key, value in d.items():
try:
value_float = float(value)
except ValueError:
exception_caught = True
value_float = NiSuperalloyDataset._parse_peculiar_amount(value)
d_copy[key] = value_float
return d_copy, exception_caught
@staticmethod
def _parse_peculiar_amount(x: str) -> float:
"""
Deals with dataset-specific-peculiarities in composition amounts.
Some composition amounts have a trailing asterisk, e.g., '2*'. The meaning is unclear.
Perhaps it denotes that the amount is imprecise. In any case, they only occur in 6
samples. The trailing asterisk will be ignored.
"""
if x[-1] == "*":
x = x[:-1]
try:
return float(x)
except ValueError:
raise InvalidParameterError("Amount as a float", x)
def _get_scalar_property(
self,
properties: List[dict],
property_name: str,
units: Optional[str] = None,
default_value: Optional[float] = None,
) -> float:
"""
A helper function to get a single scalar property.
This calls _get_single_property and then checks that the result can be
turned into a float.
Parameters:
properties: A list of dicts, each of which is a single property.
property_name: The name of the property to get the value of.
units: Optional expected units string.
default_value: Value to return if `property_name` is not present.
Raises:
InvalidParameterError: if the value cannot be expressed as a float
Returns: float
The value of the desired property.
"""
try:
val = self._get_single_property(properties, property_name, units, default_value)
if val is None:
return None
return float(val)
except ValueError:
raise InvalidParameterError(
expected=f"Property {property_name} should have a value "
f"that can be expressed as a float",
got=properties,
)
def _get_categorical_property(
self, properties: List[dict], property_name: str, categories_dict: dict
) -> int:
"""
Helper function to get a single categorical property as an int.
Parameters:
properties: A list of dicts, each of which is a single property.
property_name: The name of the property to get the value of.
categories_dict: Dict from the categorical property (string) to a unique integer value.
Raises:
InvalidParameterError: if the value is not in the expected list of possible categories
as given by the keys in `categories_dict`
Returns: int
An integer that corresponds to the value of the desired property.
"""
category = self._get_single_property(properties, property_name)
try:
return categories_dict[category]
except KeyError:
raise InvalidParameterError(
f"A value in the array: {categories_dict.keys()}", category
)
@staticmethod
def _get_single_property(
properties: List[dict], property_name: str, units: Optional[str] = None, default_value=None
):
"""
Helper function to get a single property.
Parameters:
properties: A list of dicts, each of which is a single property. Each entry is expected
to have a 'name' field that corresponds to the property name and a `scalars` field
that is a list with one entry, a dict of the form {'value': <property value>}.
It may also have a 'units' field.
property_name: The name of the property to get the value of. `properties` is expected
to have exactly one entry with the 'name' field equal to `property_name`.
units: Optional expected value of 'units' field. If specified, then there must be a
'units' field and its value must correspond to `units`.
default_value: Value to return if `property_name` is not present.
Raises:
InvalidParameterError: if `properties` does not conform to the expected structure
Returns:
The value of the property `property_name`
"""
matching_props = [prop for prop in properties if prop.get("name") == property_name]
if len(matching_props) == 0:
return default_value
elif len(matching_props) > 1:
raise InvalidParameterError(
expected=f"Only one entry in properties should have name" f" '{property_name}'",
got=properties,
)
matching_prop = matching_props[0]
try:
scalars = matching_prop["scalars"]
assert len(scalars) == 1
val = scalars[0]["value"]
if units is not None:
assert matching_prop["units"] == units
except (KeyError, AssertionError):
units_str = "" if units is None else f", 'units': {units}"
raise InvalidParameterError(
expected="Property as a dictionary of the form\n"
"{'name': <property name>, 'scalars': "
"[{'value': <property value>}]" + units_str + "}",
got=matching_prop,
)
return val
| [((3413, 3443), 'smlb.parameters.params.boolean', 'params.boolean', (['ignore_dubious'], {}), '(ignore_dubious)\n', (3427, 3443), False, 'from smlb.parameters import params\n'), ((4398, 4424), 'os.path.basename', 'os.path.basename', (['filepath'], {}), '(filepath)\n', (4414, 4424), False, 'import os\n'), ((4568, 4593), 'zipfile.ZipFile', 'zipfile.ZipFile', (['filepath'], {}), '(filepath)\n', (4583, 4593), False, 'import zipfile\n'), ((5026, 5116), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', ([], {'expected': '"""Chemical composition as a list"""', 'got': 'raw_composition'}), "(expected='Chemical composition as a list', got=\n raw_composition)\n", (5047, 5116), False, 'from smlb.exceptions import InvalidParameterError\n'), ((6628, 6728), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', ([], {'expected': '"""A list of dictionaries, one for each property"""', 'got': 'properties'}), "(expected=\n 'A list of dictionaries, one for each property', got=properties)\n", (6649, 6728), False, 'from smlb.exceptions import InvalidParameterError\n'), ((10081, 10181), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', ([], {'expected': '"""A list of dictionaries, one for each property"""', 'got': 'properties'}), "(expected=\n 'A list of dictionaries, one for each property', got=properties)\n", (10102, 10181), False, 'from smlb.exceptions import InvalidParameterError\n'), ((2427, 2453), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (2443, 2453), False, 'import os\n'), ((4720, 4733), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (4729, 4733), False, 'import json\n'), ((14310, 14355), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', (['"""Amount as a float"""', 'x'], {}), "('Amount as a float', x)\n", (14331, 14355), False, 'from smlb.exceptions import InvalidParameterError\n'), ((15457, 15595), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', ([], {'expected': 'f"""Property {property_name} should have a value that can be expressed as a float"""', 'got': 'properties'}), "(expected=\n f'Property {property_name} should have a value that can be expressed as a float'\n , got=properties)\n", (15478, 15595), False, 'from smlb.exceptions import InvalidParameterError\n'), ((18139, 18263), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', ([], {'expected': 'f"""Only one entry in properties should have name \'{property_name}\'"""', 'got': 'properties'}), '(expected=\n f"Only one entry in properties should have name \'{property_name}\'", got\n =properties)\n', (18160, 18263), False, 'from smlb.exceptions import InvalidParameterError\n'), ((18704, 18892), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', ([], {'expected': '(\n """Property as a dictionary of the form\n{\'name\': <property name>, \'scalars\': [{\'value\': <property value>}]"""\n + units_str + \'}\')', 'got': 'matching_prop'}), '(expected=\n """Property as a dictionary of the form\n{\'name\': <property name>, \'scalars\': [{\'value\': <property value>}]"""\n + units_str + \'}\', got=matching_prop)\n', (18725, 18892), False, 'from smlb.exceptions import InvalidParameterError\n'), ((12594, 12773), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', ([], {'expected': '"""Element amount as a dictionary of the form\n{\'element\': <element name>,\'idealWeightPercent\': {\'value\': <element amount>}}"""', 'got': 'entry'}), '(expected=\n """Element amount as a dictionary of the form\n{\'element\': <element name>,\'idealWeightPercent\': {\'value\': <element amount>}}"""\n , got=entry)\n', (12615, 12773), False, 'from smlb.exceptions import InvalidParameterError\n'), ((3330, 3361), 'smlb.parameters.params.sequence', 'params.sequence', (['arg'], {'type_': 'str'}), '(arg, type_=str)\n', (3345, 3361), False, 'from smlb.parameters import params\n')] |
SHolzhauer/beats | metricbeat/module/postgresql/test_postgresql.py | 39679a536a22e8a0d7534a2475504488909d19fd | import metricbeat
import os
import pytest
import sys
import unittest
class Test(metricbeat.BaseTest):
COMPOSE_SERVICES = ['postgresql']
def common_checks(self, output):
# Ensure no errors or warnings exist in the log.
self.assert_no_logged_warnings()
for evt in output:
top_level_fields = metricbeat.COMMON_FIELDS + ["postgresql"]
self.assertCountEqual(self.de_dot(top_level_fields), evt.keys())
self.assert_fields_are_documented(evt)
def get_hosts(self):
username = "postgres"
host = self.compose_host()
dsn = "postgres://{}?sslmode=disable".format(host)
return (
[dsn],
username,
os.getenv("POSTGRESQL_PASSWORD"),
)
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_activity(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["activity"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "name" in evt["postgresql"]["activity"]["database"]
assert "oid" in evt["postgresql"]["activity"]["database"]
assert "state" in evt["postgresql"]["activity"]
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_database(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["database"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "name" in evt["postgresql"]["database"]
assert "oid" in evt["postgresql"]["database"]
assert "blocks" in evt["postgresql"]["database"]
assert "rows" in evt["postgresql"]["database"]
assert "conflicts" in evt["postgresql"]["database"]
assert "deadlocks" in evt["postgresql"]["database"]
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_bgwriter(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["bgwriter"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "checkpoints" in evt["postgresql"]["bgwriter"]
assert "buffers" in evt["postgresql"]["bgwriter"]
assert "stats_reset" in evt["postgresql"]["bgwriter"]
| [((779, 848), 'unittest.skipUnless', 'unittest.skipUnless', (['metricbeat.INTEGRATION_TESTS', '"""integration test"""'], {}), "(metricbeat.INTEGRATION_TESTS, 'integration test')\n", (798, 848), False, 'import unittest\n'), ((854, 884), 'pytest.mark.tag', 'pytest.mark.tag', (['"""integration"""'], {}), "('integration')\n", (869, 884), False, 'import pytest\n'), ((1728, 1797), 'unittest.skipUnless', 'unittest.skipUnless', (['metricbeat.INTEGRATION_TESTS', '"""integration test"""'], {}), "(metricbeat.INTEGRATION_TESTS, 'integration test')\n", (1747, 1797), False, 'import unittest\n'), ((1803, 1833), 'pytest.mark.tag', 'pytest.mark.tag', (['"""integration"""'], {}), "('integration')\n", (1818, 1833), False, 'import pytest\n'), ((2841, 2910), 'unittest.skipUnless', 'unittest.skipUnless', (['metricbeat.INTEGRATION_TESTS', '"""integration test"""'], {}), "(metricbeat.INTEGRATION_TESTS, 'integration test')\n", (2860, 2910), False, 'import unittest\n'), ((2916, 2946), 'pytest.mark.tag', 'pytest.mark.tag', (['"""integration"""'], {}), "('integration')\n", (2931, 2946), False, 'import pytest\n'), ((729, 761), 'os.getenv', 'os.getenv', (['"""POSTGRESQL_PASSWORD"""'], {}), "('POSTGRESQL_PASSWORD')\n", (738, 761), False, 'import os\n')] |
ozen/pytorch-lightning | pytorch_lightning/accelerators/cpu_backend.py | 3b0b402d30fa19e0fef7d150c30ff4bb14a64230 | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
from pytorch_lightning.accelerators.base_backend import Accelerator
from pytorch_lightning.utilities import AMPType, rank_zero_warn
from pytorch_lightning.utilities.exceptions import MisconfigurationException
class CPUBackend(Accelerator):
def __init__(self, trainer, cluster_environment=None):
super().__init__(trainer, cluster_environment)
def setup(self, model):
# run through amp wrapper
if self.trainer.amp_backend:
raise MisconfigurationException('amp + cpu is not supported. Please use a GPU option')
# call setup after the ddp process has connected
self.trainer.call_setup_hook(model)
# CHOOSE OPTIMIZER
# allow for lr schedulers as well
self.setup_optimizers(model)
self.trainer.model = model
def train(self):
model = self.trainer.model
# set up training routine
self.trainer.train_loop.setup_training(model)
# train or test
results = self.train_or_test()
return results
def training_step(self, args):
if self.trainer.amp_backend == AMPType.NATIVE:
with torch.cuda.amp.autocast():
output = self.trainer.model.training_step(*args)
else:
output = self.trainer.model.training_step(*args)
return output
def validation_step(self, args):
if self.trainer.amp_backend == AMPType.NATIVE:
with torch.cuda.amp.autocast():
output = self.trainer.model.validation_step(*args)
else:
output = self.trainer.model.validation_step(*args)
return output
def test_step(self, args):
if self.trainer.amp_backend == AMPType.NATIVE:
with torch.cuda.amp.autocast():
output = self.trainer.model.test_step(*args)
else:
output = self.trainer.model.test_step(*args)
return output
| [((1075, 1161), 'pytorch_lightning.utilities.exceptions.MisconfigurationException', 'MisconfigurationException', (['"""amp + cpu is not supported. Please use a GPU option"""'], {}), "(\n 'amp + cpu is not supported. Please use a GPU option')\n", (1100, 1161), False, 'from pytorch_lightning.utilities.exceptions import MisconfigurationException\n'), ((1743, 1768), 'torch.cuda.amp.autocast', 'torch.cuda.amp.autocast', ([], {}), '()\n', (1766, 1768), False, 'import torch\n'), ((2042, 2067), 'torch.cuda.amp.autocast', 'torch.cuda.amp.autocast', ([], {}), '()\n', (2065, 2067), False, 'import torch\n'), ((2339, 2364), 'torch.cuda.amp.autocast', 'torch.cuda.amp.autocast', ([], {}), '()\n', (2362, 2364), False, 'import torch\n')] |
aurphillus/Django-Library-Completed | books/admin.py | f46e45f85c888e7694323e22f6e966c291a4a0be | from django.contrib import admin
from books.models import Genre, Author, Book, TBR
# Register your models here.
admin.site.register(Genre)
admin.site.register(Author)
admin.site.register(Book)
admin.site.register(TBR) | [((114, 140), 'django.contrib.admin.site.register', 'admin.site.register', (['Genre'], {}), '(Genre)\n', (133, 140), False, 'from django.contrib import admin\n'), ((141, 168), 'django.contrib.admin.site.register', 'admin.site.register', (['Author'], {}), '(Author)\n', (160, 168), False, 'from django.contrib import admin\n'), ((169, 194), 'django.contrib.admin.site.register', 'admin.site.register', (['Book'], {}), '(Book)\n', (188, 194), False, 'from django.contrib import admin\n'), ((195, 219), 'django.contrib.admin.site.register', 'admin.site.register', (['TBR'], {}), '(TBR)\n', (214, 219), False, 'from django.contrib import admin\n')] |
jfmyers9/integrations-core | rabbitmq/tests/common.py | 8793c784f1d5b2c9541b2dd4214dd91584793ced | # (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
from packaging import version
from datadog_checks.base.utils.common import get_docker_hostname
HERE = os.path.dirname(os.path.abspath(__file__))
ROOT = os.path.dirname(os.path.dirname(HERE))
RABBITMQ_VERSION_RAW = os.environ['RABBITMQ_VERSION']
RABBITMQ_VERSION = version.parse(RABBITMQ_VERSION_RAW)
CHECK_NAME = 'rabbitmq'
HOST = get_docker_hostname()
PORT = 15672
URL = 'http://{}:{}/api/'.format(HOST, PORT)
CONFIG = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'queues': ['test1'],
'tags': ["tag1:1", "tag2"],
'exchanges': ['test1'],
}
CONFIG_NO_NODES = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'queues': ['test1'],
'tags': ["tag1:1", "tag2"],
'exchanges': ['test1'],
'collect_node_metrics': False,
}
CONFIG_REGEX = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'queues_regexes': [r'test\d+'],
'exchanges_regexes': [r'test\d+'],
}
CONFIG_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'vhosts': ['/', 'myvhost'],
}
CONFIG_WITH_FAMILY = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'tag_families': True,
'queues_regexes': [r'(test)\d+'],
'exchanges_regexes': [r'(test)\d+'],
}
CONFIG_DEFAULT_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'vhosts': ['/', 'test'],
}
CONFIG_TEST_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'vhosts': ['test', 'test2'],
}
EXCHANGE_MESSAGE_STATS = {
'ack': 1.0,
'ack_details': {'rate': 1.0},
'confirm': 1.0,
'confirm_details': {'rate': 1.0},
'deliver_get': 1.0,
'deliver_get_details': {'rate': 1.0},
'publish': 1.0,
'publish_details': {'rate': 1.0},
'publish_in': 1.0,
'publish_in_details': {'rate': 1.0},
'publish_out': 1.0,
'publish_out_details': {'rate': 1.0},
'return_unroutable': 1.0,
'return_unroutable_details': {'rate': 1.0},
'redeliver': 1.0,
'redeliver_details': {'rate': 1.0},
}
| [((394, 429), 'packaging.version.parse', 'version.parse', (['RABBITMQ_VERSION_RAW'], {}), '(RABBITMQ_VERSION_RAW)\n', (407, 429), False, 'from packaging import version\n'), ((463, 484), 'datadog_checks.base.utils.common.get_docker_hostname', 'get_docker_hostname', ([], {}), '()\n', (482, 484), False, 'from datadog_checks.base.utils.common import get_docker_hostname\n'), ((247, 272), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (262, 272), False, 'import os\n'), ((297, 318), 'os.path.dirname', 'os.path.dirname', (['HERE'], {}), '(HERE)\n', (312, 318), False, 'import os\n')] |
timgates42/st2 | st2common/st2common/util/pack.py | 0e8ae756f30ffe2e017c64bff67830abdee7f7c9 | # Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import os
import re
import collections
import six
from st2common.util import schema as util_schema
from st2common.constants.pack import MANIFEST_FILE_NAME
from st2common.constants.pack import PACK_REF_WHITELIST_REGEX
from st2common.content.loader import MetaLoader
from st2common.persistence.pack import Pack
from st2common.exceptions.apivalidation import ValueValidationException
from st2common.util import jinja as jinja_utils
__all__ = [
'get_pack_ref_from_metadata',
'get_pack_metadata',
'get_pack_warnings',
'get_pack_common_libs_path_for_pack_ref',
'get_pack_common_libs_path_for_pack_db',
'validate_config_against_schema',
'normalize_pack_version'
]
# Common format for python 2.7 warning
if six.PY2:
PACK_PYTHON2_WARNING = "DEPRECATION WARNING: Pack %s only supports Python 2.x. " \
"Python 2 support will be dropped in future releases. " \
"Please consider updating your packs to work with Python 3.x"
else:
PACK_PYTHON2_WARNING = "DEPRECATION WARNING: Pack %s only supports Python 2.x. " \
"Python 2 support has been removed since st2 v3.4.0. " \
"Please update your packs to work with Python 3.x"
def get_pack_ref_from_metadata(metadata, pack_directory_name=None):
"""
Utility function which retrieves pack "ref" attribute from the pack metadata file.
If this attribute is not provided, an attempt is made to infer "ref" from the "name" attribute.
:rtype: ``str``
"""
pack_ref = None
# The rules for the pack ref are as follows:
# 1. If ref attribute is available, we used that
# 2. If pack_directory_name is available we use that (this only applies to packs
# which are in sub-directories)
# 2. If attribute is not available, but pack name is and pack name meets the valid name
# criteria, we use that
if metadata.get('ref', None):
pack_ref = metadata['ref']
elif pack_directory_name and re.match(PACK_REF_WHITELIST_REGEX, pack_directory_name):
pack_ref = pack_directory_name
else:
if re.match(PACK_REF_WHITELIST_REGEX, metadata['name']):
pack_ref = metadata['name']
else:
msg = ('Pack name "%s" contains invalid characters and "ref" attribute is not '
'available. You either need to add "ref" attribute which contains only word '
'characters to the pack metadata file or update name attribute to contain only'
'word characters.')
raise ValueError(msg % (metadata['name']))
return pack_ref
def get_pack_metadata(pack_dir):
"""
Return parsed metadata for a particular pack directory.
:rtype: ``dict``
"""
manifest_path = os.path.join(pack_dir, MANIFEST_FILE_NAME)
if not os.path.isfile(manifest_path):
raise ValueError('Pack "%s" is missing %s file' % (pack_dir, MANIFEST_FILE_NAME))
meta_loader = MetaLoader()
content = meta_loader.load(manifest_path)
if not content:
raise ValueError('Pack "%s" metadata file is empty' % (pack_dir))
return content
def get_pack_warnings(pack_metadata):
"""
Return warning string if pack metadata indicates only python 2 is supported
:rtype: ``str``
"""
warning = None
versions = pack_metadata.get('python_versions', None)
pack_name = pack_metadata.get('name', None)
if versions and set(versions) == set(['2']):
warning = PACK_PYTHON2_WARNING % pack_name
return warning
def validate_config_against_schema(config_schema, config_object, config_path,
pack_name=None):
"""
Validate provided config dictionary against the provided config schema
dictionary.
"""
# NOTE: Lazy improt to avoid performance overhead of importing this module when it's not used
import jsonschema
pack_name = pack_name or 'unknown'
schema = util_schema.get_schema_for_resource_parameters(parameters_schema=config_schema,
allow_additional_properties=True)
instance = config_object
try:
cleaned = util_schema.validate(instance=instance, schema=schema,
cls=util_schema.CustomValidator, use_default=True,
allow_default_none=True)
for key in cleaned:
if (jinja_utils.is_jinja_expression(value=cleaned.get(key)) and
"decrypt_kv" in cleaned.get(key) and config_schema.get(key).get('secret')):
raise ValueValidationException('Values specified as "secret: True" in config '
'schema are automatically decrypted by default. Use '
'of "decrypt_kv" jinja filter is not allowed for '
'such values. Please check the specified values in '
'the config or the default values in the schema.')
except jsonschema.ValidationError as e:
attribute = getattr(e, 'path', [])
if isinstance(attribute, (tuple, list, collections.Iterable)):
attribute = [str(item) for item in attribute]
attribute = '.'.join(attribute)
else:
attribute = str(attribute)
msg = ('Failed validating attribute "%s" in config for pack "%s" (%s): %s' %
(attribute, pack_name, config_path, six.text_type(e)))
raise jsonschema.ValidationError(msg)
return cleaned
def get_pack_common_libs_path_for_pack_ref(pack_ref):
pack_db = Pack.get_by_ref(pack_ref)
pack_common_libs_path = get_pack_common_libs_path_for_pack_db(pack_db=pack_db)
return pack_common_libs_path
def get_pack_common_libs_path_for_pack_db(pack_db):
"""
Return the pack's common lib path. This is the path where common code for sensors
and actions are placed.
For example, if the pack is at /opt/stackstorm/packs/my_pack, you can place
common library code for actions and sensors in /opt/stackstorm/packs/my_pack/lib/.
This common library code is only available for python sensors and actions. The lib
structure also needs to follow a python convention with a __init__.py file.
:param pack_db: Pack DB model
:type pack_db: :class:`PackDB`
:rtype: ``str``
"""
pack_dir = getattr(pack_db, 'path', None)
if not pack_dir:
return None
libs_path = os.path.join(pack_dir, 'lib')
return libs_path
def normalize_pack_version(version):
"""
Normalize old, pre StackStorm v2.1 non valid semver version string (e.g. 0.2) to a valid
semver version string (0.2.0).
:rtype: ``str``
"""
version = str(version)
version_seperator_count = version.count('.')
if version_seperator_count == 1:
version = version + '.0'
return version
| [((3469, 3511), 'os.path.join', 'os.path.join', (['pack_dir', 'MANIFEST_FILE_NAME'], {}), '(pack_dir, MANIFEST_FILE_NAME)\n', (3481, 3511), False, 'import os\n'), ((3664, 3676), 'st2common.content.loader.MetaLoader', 'MetaLoader', ([], {}), '()\n', (3674, 3676), False, 'from st2common.content.loader import MetaLoader\n'), ((4650, 4768), 'st2common.util.schema.get_schema_for_resource_parameters', 'util_schema.get_schema_for_resource_parameters', ([], {'parameters_schema': 'config_schema', 'allow_additional_properties': '(True)'}), '(parameters_schema=\n config_schema, allow_additional_properties=True)\n', (4696, 4768), True, 'from st2common.util import schema as util_schema\n'), ((6388, 6413), 'st2common.persistence.pack.Pack.get_by_ref', 'Pack.get_by_ref', (['pack_ref'], {}), '(pack_ref)\n', (6403, 6413), False, 'from st2common.persistence.pack import Pack\n'), ((7245, 7274), 'os.path.join', 'os.path.join', (['pack_dir', '"""lib"""'], {}), "(pack_dir, 'lib')\n", (7257, 7274), False, 'import os\n'), ((3524, 3553), 'os.path.isfile', 'os.path.isfile', (['manifest_path'], {}), '(manifest_path)\n', (3538, 3553), False, 'import os\n'), ((4881, 5016), 'st2common.util.schema.validate', 'util_schema.validate', ([], {'instance': 'instance', 'schema': 'schema', 'cls': 'util_schema.CustomValidator', 'use_default': '(True)', 'allow_default_none': '(True)'}), '(instance=instance, schema=schema, cls=util_schema.\n CustomValidator, use_default=True, allow_default_none=True)\n', (4901, 5016), True, 'from st2common.util import schema as util_schema\n'), ((2688, 2743), 're.match', 're.match', (['PACK_REF_WHITELIST_REGEX', 'pack_directory_name'], {}), '(PACK_REF_WHITELIST_REGEX, pack_directory_name)\n', (2696, 2743), False, 'import re\n'), ((2805, 2857), 're.match', 're.match', (['PACK_REF_WHITELIST_REGEX', "metadata['name']"], {}), "(PACK_REF_WHITELIST_REGEX, metadata['name'])\n", (2813, 2857), False, 'import re\n'), ((6266, 6297), 'jsonschema.ValidationError', 'jsonschema.ValidationError', (['msg'], {}), '(msg)\n', (6292, 6297), False, 'import jsonschema\n'), ((5312, 5591), 'st2common.exceptions.apivalidation.ValueValidationException', 'ValueValidationException', (['"""Values specified as "secret: True" in config schema are automatically decrypted by default. Use of "decrypt_kv" jinja filter is not allowed for such values. Please check the specified values in the config or the default values in the schema."""'], {}), '(\n \'Values specified as "secret: True" in config schema are automatically decrypted by default. Use of "decrypt_kv" jinja filter is not allowed for such values. Please check the specified values in the config or the default values in the schema.\'\n )\n', (5336, 5591), False, 'from st2common.exceptions.apivalidation import ValueValidationException\n'), ((6233, 6249), 'six.text_type', 'six.text_type', (['e'], {}), '(e)\n', (6246, 6249), False, 'import six\n')] |
jrslagle/DS-Unit-3-Sprint-2-SQL-and-Databases | module1-introduction-to-sql/query.py | 8a6b3fd14b6a6833ee3a14b2d8a7db3bee494a14 |
# Look at the charactercreator_character table
# GET_CHARACTERS = """
# SELECT *
# FROM charactercreator_character;
# """
# How many total Characters are there? (302)
TOTAL_CHARACTERS = """
SELECT COUNT(*) as number_of_characters
FROM charactercreator_character;
"""
# How many of each specific subclass?
# TOTAL_SUBCLASS = """
# SELECT
# (SELECT COUNT(*) FROM charactercreator_necromancer) AS necros,
# (SELECT COUNT(*) FROM charactercreator_mage) AS mages,
# (SELECT COUNT(*) FROM charactercreator_thief) AS thiefs,
# (SELECT COUNT(*) FROM charactercreator_cleric) AS clerics,
# (SELECT COUNT(*) FROM charactercreator_fighter) AS fighters;
# """
CLASS = "SELECT COUNT(*) FROM charactercreator_"
# How many total Items? (174)
TOTAL_ITEMS = """
SELECT COUNT(item_id) as items
FROM armory_item;
"""
# How many of the Items are weapons? (37)
WEAPONS = """
SELECT COUNT(item_ptr_id)
FROM armory_weapon;
"""
# How many of the items are not weapons? (137)
NON_WEAPONS = """
SELECT COUNT(items.name)
FROM armory_item as items
WHERE items.item_id NOT IN(
SELECT armory_weapon.item_ptr_id
FROM armory_weapon);
"""
# How many Items does each character have? (Return first 20 rows)
CHARACTER_ITEMS = """
SELECT character.name as "character_name", COUNT(inventory.id) as "#_of_items"
FROM charactercreator_character AS character, charactercreator_character_inventory AS inventory
WHERE character.character_id = inventory.character_id
GROUP BY character.name
ORDER BY character.name
LIMIT 20;
"""
# How many Weapons does each character have? (Return first 20 rows)
CHARACTER_WEAPONS = """
SELECT character.name as "character_name", COUNT(weapon.item_ptr_id) as "#_of_weapons"
FROM charactercreator_character AS character, charactercreator_character_inventory AS inventory, armory_weapon as weapon
WHERE character.character_id = inventory.character_id AND inventory.item_id = weapon.item_ptr_id
GROUP BY character.name
ORDER BY character.name
LIMIT 20;
"""
# On average, how many Items does each Character have? (3.02)
AVG_CHARACTER_ITEMS = """
SELECT
AVG("#_of_items") as "avg_#_of_items"
FROM
(
SELECT
COUNT(inventory.id) AS "#_of_items"
FROM
charactercreator_character AS character,
charactercreator_character_inventory AS inventory
WHERE
character.character_id = inventory.character_id
GROUP BY character.name
);
"""
# On average, how many Weapons does each character have? (0.67)
AVG_CHARACTER_WEAPONS = """
SELECT
AVG(weapon_count) as avg_weapons_per_char
FROM (
SELECT
character.character_id,
COUNT(DISTINCT weapon.item_ptr_id) as weapon_count
FROM
charactercreator_character AS character
LEFT JOIN charactercreator_character_inventory inventory -- characters may have zero items
ON character.character_id = inventory.character_id
LEFT JOIN armory_weapon weapon -- many items are not weapons, so only retain weapons
ON inventory.item_id = weapon.item_ptr_id
GROUP BY character.character_id
) subq;
"""
| [] |
MickaelRigault/pixelproject | pixelproject/grid.py | d98db99a8e69eafa7a979c02a099e4c07f5fd568 | #! /usr/bin/env python
#
import warnings
import numpy as np
UNIT_SQUARE = np.asarray([[0,0],[0,1],[1,1],[1,0]])-0.5
from propobject import BaseObject
from shapely import geometry
import pandas
import geopandas
# ======================= #
# #
# Functions #
# #
# ======================= #
def get_simple_grid(xbounds, ybounds, shift_origin=None):
""" """
xbounds = np.atleast_1d(xbounds)
if len(xbounds)==1:
xmin,xmax = 0,xbounds[0]
else:
xmin,xmax = xbounds
ybounds = np.atleast_1d(ybounds)
if len(ybounds)==1:
ymin,ymax = 0,ybounds[0]
else:
ymin,ymax = ybounds
pixels = np.mgrid[xmin:xmax,ymin:ymax]
pixels2_flat = np.concatenate(pixels.T, axis=0)
if shift_origin is not None:
# not += because conflict between int and float array
pixels2_flat = pixels2_flat+ shift_origin
return Grid(pixels2_flat, UNIT_SQUARE)
# ======================= #
# #
# Classes #
# #
# ======================= #
class GridProjector( BaseObject ):
""" """
PROPERTIES = ["gridin", "gridout"]
DERIVED_PROPERTIES = ["gridinterest"]
def __init__(self, grid_in=None, grid_out=None):
""" """
if grid_in is not None:
self.set_grid(grid_in, "in")
if grid_out is not None:
self.set_grid(grid_out, "out")
# =================== #
# Methods #
# =================== #
# --------- #
# SETTER #
# --------- #
def set_grid(self, grid, which="in"):
""" """
if which not in ["in","out"]:
raise ValueError("Which should either be 'in' our 'out'")
self._properties["grid%s"%which] = grid
self._derived_properties["gridinterest"] = None
def _measure_gridinterest_(self):
""" """
# -- internal -- #
def localdef_get_area(l):
return l.geometry.area/self.gridin.geodataframe.iloc[l.id_1].geometry.area
# -------------- #
if self.gridin is not None and self.gridout is not None:
#
# Most likely there is a faster method if is_shape_unique
#
self._derived_properties["gridinterest"] = geopandas.overlay(self.gridin.geodataframe,
self.gridout.geodataframe,
how='intersection')
self.gridinterest["area"] = self.gridinterest.apply(localdef_get_area, axis=1)
else:
warnings.warn("Cannot measure gridinterest, because gridin and/or gridout is/are None")
# -------------- #
# Measurement #
# -------------- #
def project_data(self, data, as_serie=True, use="sum"):
""" Use gridinteresect
Parameters
----------
data: [ndarray or string or pandas.Serie]
data associated to gridin that should be projected in gridout.
could be:
- ndarray: must have the same length as gridin
- string: name of a gridin column (pandas)
- pandas.Serie: serie that will be matched with gridin
"""
# Calcul itself
projected_data = self._project_data_(self._parse_data_(data), use=use)
if as_serie:
return projected_data
projected_data_array = np.zeros( len(self.gridout.geodataframe) )
projected_data_array[projected_data.index.values] = projected_data.values
return projected_data_array
def _project_data_(self, data, use="sum"):
""" """
self.gridinterest["_tmp"] = data[ self.gridin.geodataframe.loc[ self.gridinterest["id_1"]].index
] * self.gridinterest["area"]
return getattr(self.gridinterest.groupby("id_2")["_tmp"],use)()
def _parse_data_(self,data):
"""
Parameters
----------
data: [ndarray or string or pandas.Serie]
data associated to gridin that should be projected in gridout.
could be:
- ndarray: must have the same length as gridin
- string: name of a gridin column (pandas)
- pandas.Serie: serie that will be matched with gridin
Returns
-------
ndarray
"""
if type(data) == str:
if data not in self.gridin.geodataframe.columns:
raise ValueError("Unknown gridin column '%s'"%data)
return self.gridin.geodataframe[data].values
elif type(data) == pandas.Series:
return data.values
elif len(data) != len(self.gridin.geodataframe):
raise ValueError("data given as ndarray but lengthes do not match")
return data
# =================== #
# Properties #
# =================== #
@property
def gridin(self):
""" """
return self._properties["gridin"]
@property
def gridout(self):
""" """
return self._properties["gridout"]
@property
def gridinterest(self):
""" """
if self._derived_properties["gridinterest"] is None:
self._measure_gridinterest_()
return self._derived_properties["gridinterest"]
class Grid( BaseObject ):
PROPERTIES = ["pixels", "shape"]
SIDE_PROPERTIES = ["indexes"]
DERIVED_PROPERTIES = ["vertices","geodataframe", "triangulation"]
def __init__(self, pixels=None, shape=UNIT_SQUARE, indexes=None):
""" """
if pixels is not None:
self.set_pixels(pixels,shape=shape)
if indexes is not None:
self.set_indexes(indexes)
# =================== #
# Methods #
# =================== #
@classmethod
def from_stamps(cls, stamp, origin=[0,0]):
""" stamps are 2d array, something you could to ax.imshow(stamps)
data will be stored as 'data' in the grid's dataframe
"""
this = get_simple_grid(*np.shape(stamp), shift_origin=origin)
this.add_data(np.ravel(stamp), "data")
return this
@classmethod
def from_vertices(cls, vertices, indexes=None):
""" directly provide the vertices
Parameters:
-----------
vertices: [list of array or dictionary]
The vertices of all the grid entries.
Could have two format:
- list of array: [[vert_1],[vert_2],....], then you may want to provide indexes
- dictionary: {id_1:vert_1,id_2: vert_2, ...}
if a dictionary is provided, the indexes will be set by the vertices.
indexes: [list or None] -optional-
(Ignored if vertices is a dict)
If you provide vertices as a list of vertices, you can provide the
indexes of each of the vertices.
-> if None, then indexes = np.arange(len(vertices))
Returns
-------
Grid
"""
this = cls()
if type(vertices) is dict:
indexes, vertices = list(vertices.keys()), list(vertices.values())
this.set_vertices(vertices)
if indexes is not None:
this.set_indexes(indexes)
return this
@classmethod
def set_from(cls, datainput):
""" Creates a new Grid objects from the given input data:
Parameters
----------
datainput: [geopandas.geodataframe.GeoDataFrame or ndarray]
this could either be a:
- geodataframe (and this calls self.set_geodataframe)
- geoSeries
- ndarray: if 3-shaped, this calls set_vertices ;
if 2-shaped, this calls set_pixels.
Returns
-------
Grid
"""
this = cls()
if type(datainput) == geopandas.geodataframe.GeoDataFrame:
this.set_geodataframe(datainput)
return this
if type(datainput) == np.ndarray:
if len(np.shape( datainput) ) == 3: # vertices
this.set_vertices(datainput)
elif len(np.shape( datainput) ) == 3: # pixels
this.set_pixels(datainput)
else:
raise TypeError("cannot parse the shape of the given datainput")
return this
raise TypeError("cannot parse the format of the given input")
# --------- #
# SETTER #
# --------- #
def set_indexes(self, indexes, update=True):
""" provide the indexes associated to each pixels
Parameters
----------
indexes: [ndarray]
indexes associated to the pixels.
This should have the length equal to th number of pixels (if any).
update: [bool] -optional-
should the geodataframe be updated ?
[use True if you are not sure]
Returns
-------
Void
"""
if self.pixels is not None and len(indexes) != self.npixels:
raise AssertionError("not the same number of indexes as the number of pixels")
self._side_properties["indexes"] = indexes
if update:
self._update_geodataframe_()
def set_pixels(self, pixels, shape=None, update=True):
""" provide the pixels.
Pixels define the position up on which the geometries are defined.
NB: vertices = pixels+shape
"""
# Setting the pixels
if np.shape(pixels)[-1] != 2:
raise ValueError("pixels must be [N,2] arrays")
self._properties["pixels"] = np.asarray(pixels)
if shape is not None:
self.set_pixelshapes(shape, update=False)
if update:
self._update_geodataframe_()
def set_pixelshapes(self, shape, update=True):
""" """
# Setting the pixel shape.s
if len(np.shape(shape))==2:
self._properties["shape"] = np.asarray(shape)
elif len(np.shape(shape))==3:
if self.pixels is not None and np.shape(shape)[0] != self.npixels:
raise AssertionError("`shape` must be unique or have the same lenth as pixels")
self._properties["shape"] = np.asarray(shape)
else:
raise ValueError("Cannot parse the given shape, must be [M,2] or [N,M,2] when N is the number of pixel and M the number of vertices")
if update:
self._update_geodataframe_()
def set_vertices(self, vertices, overwrite=False, **kwargs):
""" """
if not overwrite and (self.pixels is not None and self.shape is not None):
raise ValueError("Pixels and shape already defined. set the overwrite option to true, to update vertices")
try:
pixels = np.mean(vertices, axis=1)
except:
# Means vertices have different size.
self._derived_properties["vertices"] = vertices
pixels = np.asarray([np.mean(v_, axis=0) for v_ in vertices])
self.set_pixels(pixels, None, **kwargs)
return
self._derived_properties["vertices"] = np.asarray(vertices)
shape = self.vertices - pixels[:,None]
shape_unique = np.unique(shape, axis=0)
if len(shape_unique)==1:
shape = shape_unique[0]
self.set_pixels(pixels, shape, **kwargs)
def set_geodataframe(self, geodataframe, overwrite=False):
""" """
if not overwrite and (self.pixels is not None and self.shape is not None):
raise ValueError("Pixels and shape already defined. set the overwrite option to true, to update geodataframe")
if "geometry" not in geodataframe.columns:
raise TypeError("The given geodataframe does not have 'geometry' column. It is required")
self._derived_properties["geodataframe"] = geodataframe
if "id" not in geodataframe.columns:
self.geodataframe["id"] = self.indexes if self.pixels is not None else np.arange( len(geodataframe) )
# - get the vertices:
def get_verts(poly_):
return np.stack(poly_.exterior.xy).T[:-1]
vertices = geodataframe["geometry"].apply(get_verts).values
self.set_vertices(vertices, update=False) # don't update the geodataframe
# --------- #
# UPDATE #
# --------- #
def _update_geodataframe_(self):
""" """
dataseries = self.get_geoseries()
x,y = self.pixels.T
self._derived_properties["geodataframe"] = \
geopandas.GeoDataFrame({'geometry': dataseries,
'id':self.indexes,
'x':x,'y':y})
def add_data(self, data, name, indexes=None, inplace=True):
""" """
if indexes is None:
indexes = self.indexes
s_ = pandas.Series(data, name=name, index=indexes)
if not inplace:
return self.geodataframe.join(s_)
self._derived_properties["geodataframe"] = self.geodataframe.join(s_)
# --------- #
# GETTER #
# --------- #
def get_geoseries(self):
""" build a new geodataframe and returns it. """
import geopandas
return geopandas.GeoSeries([geometry.Polygon(v) for v in self.vertices])
def get_triangulation_grid(self):
""" Returns a grid of triangulation. """
return Grid.set_from( np.concatenate(self.triangulation, axis=0) )
def get_pixels_in(self, polygon, invert=False):
""" checks if the centroid of the pixel is in or out the given shapely polygon.
Parameters
----------
polygon: [shapely.geometry]
reference polygon
invert: [bool] -optional-
Get the pixel inside the polygon [invert=False] or outsite [invert=True]
Returns
-------
list of pixels and boolean mask
"""
from shapely import vectorized
flagin = vectorized.contains(polygon, *self.pixels.T)
if invert:
flagin = ~flagin
return self.pixels[flagin], flagin
# --------- #
# Project #
# --------- #
def project_to(self, othergrid, column="*", asgrid=True, use="sum"):
""" project data in the given grid
Parameters
----------
othergrid: [Grid]
New grid where data should be projected to
column: [str/None/list of] -optional-
Which data should be projected ?
If None or '*' all the non-structural columns will be
(structural columns are 'geometry', 'id', 'x', 'y')
asgrid: [bool] -optional-
Should this return a new Grid (actually same object as othergrid)
or a dict [asgrid=False]?
Returns
-------
Grid or dict (see asgrid)
"""
gproj = GridProjector(self, othergrid)
if column is None or column in ["*","all"]:
column = [k for k in self.geodataframe if k not in ['geometry', 'id', 'x', 'y']]
datas = {k:gproj.project_data(k, use=use) for k in column}
if not asgrid:
return datas
# building and setting the new grid
gout = othergrid.__class__.set_from(othergrid.geodataframe)
for k in column:
gout.add_data(datas[k],k)
return gout
def project_to_wcs(self, wcs_, asgrid=True, **kwargs):
""" provide an astropy.wcs.WCS and this will project
the current grid into it (assuming grid's vertices coordinates are in pixels)
Parameters
----------
wcs_: [astropy.wcs.WCS]
The world coordinate solution
asgrid: [bool] -optional-
Should this return a load Grid object or an array of vertices (in degree)
**kwargs goes to wcs_.all_pix2world
Returns
-------
Grid or array (see asgrid)
"""
verts = self.vertices
verts_shape = np.shape(verts)
flatten_verts = np.concatenate(verts, axis=0)
#
flatten_verts_wcs = np.asarray(wcs_.all_pix2world(flatten_verts[:,0],
flatten_verts[:,1], 0,
**kwargs)).T
#
verts_wcs = flatten_verts_wcs.reshape(verts_shape)
if not asgrid:
return verts_wcs
g_wcs = Grid.set_from(verts_wcs)
g_wcs.geodataframe["x_pix"],g_wcs.geodataframe["y_pix"] = self.pixels.T
return g_wcs
def evaluate(self, func, vectorized=True):
""" Evaluate the given function throughout the grid.
This evulation is using polynome triangulation to integrate the
given function inside the polyname using triangle integration.
-> dependency: the integration is made using quadpy.
Examples:
# Remark the np.stack(x, axis=-1).
# This is mandatory since integration is going to send
# x = [ [[....],[...]], [[....],[...]], ... ] for triangles
```python
def get_2dgauss(x, mu=[4,4], cov=[[1,0],[0,2]]):
""" """
return stats.multivariate_normal.pdf(np.stack(x, axis=-1), mean=mu, cov=cov)
```
"""
try:
import quadpy
except ImportError:
raise ImportError("Integration is made using quadpy. pip install quadpy")
# Is Triangulation made ?
if self._derived_properties["triangulation"] is None:
warnings.warn("triangles not defined: deriving triangulation.")
self.derive_triangulation()
# Let's get the triangles
trs = np.stack(self.triangulation)
shape_trs = np.shape(trs)
if len(shape_trs)==4 and vectorized: # All Polygon have the same topology (same amount of vertices)
tr_flat = np.stack(np.concatenate(trs, axis=0), axis=-2)
val = quadpy.triangle.strang_fix_cowper_09().integrate(func,tr_flat).reshape(shape_trs[:2])
else:
val = np.asarray([quadpy.triangle.strang_fix_cowper_09().integrate(func,np.stack(t_, axis=-2)) for t_ in trs])
return np.sum(val, axis=1)
def derive_triangulation(self, fast_unique=True):
""" """
def triangulate(geom):
""" Return triangulate format that quadpy likes """
from shapely import ops
triangles = ops.triangulate(geom)
return np.stack([np.asarray(t.exterior.coords.xy).T[:-1] for t in triangles])
if not self.is_shape_unique or not fast_unique:
self._derived_properties["triangulation"] = self.geodataframe["geometry"].apply(triangulate)
else:
self._derived_properties["triangulation"] = self.pixels[:,None,None] + triangulate(geometry.Polygon(self.shape))
# --------- #
# PLOTTER #
# --------- #
def show(self, column=None, ax=None, edgecolor="0.7", facecolor="None", **kwargs):
""" """
if column is not None:
facecolor=None
return self.geodataframe.plot(column, ax=ax,facecolor=facecolor,
edgecolor=edgecolor, **kwargs)
# =================== #
# Properties #
# =================== #
@property
def pixels(self):
""" """
return self._properties["pixels"]
@property
def npixels(self):
""" """
return len(self.pixels)
@property
def shape(self):
""" """
if self._properties["shape"] is None:
self._properties["shape"] = UNIT_SQUARE
return self._properties["shape"]
# -- Side
@property
def indexes(self):
""" """
if self._side_properties["indexes"] is None:
self._side_properties["indexes"] = np.arange(self.npixels)
return self._side_properties["indexes"]
# -- Derived
@property
def vertices(self):
""" """
if self._derived_properties["vertices"] is None and (self.pixels is not None and self.shape is not None):
self._derived_properties["vertices"] = self.pixels[:,None]+self.shape
return self._derived_properties["vertices"]
@property
def is_shape_unique(self):
""" """
return len(np.shape(self.shape))==2
@property
def geodataframe(self):
""" """
if self._derived_properties["geodataframe"] is None:
self._update_geodataframe_()
return self._derived_properties["geodataframe"]
@property
def triangulation(self):
""" Triangulation of the vertices. Based on Delaunay tesselation, see shapely.ops.triangulate """
if self._derived_properties["triangulation"] is None:
self.derive_triangulation()
return self._derived_properties["triangulation"]
| [((75, 119), 'numpy.asarray', 'np.asarray', (['[[0, 0], [0, 1], [1, 1], [1, 0]]'], {}), '([[0, 0], [0, 1], [1, 1], [1, 0]])\n', (85, 119), True, 'import numpy as np\n'), ((438, 460), 'numpy.atleast_1d', 'np.atleast_1d', (['xbounds'], {}), '(xbounds)\n', (451, 460), True, 'import numpy as np\n'), ((579, 601), 'numpy.atleast_1d', 'np.atleast_1d', (['ybounds'], {}), '(ybounds)\n', (592, 601), True, 'import numpy as np\n'), ((761, 793), 'numpy.concatenate', 'np.concatenate', (['pixels.T'], {'axis': '(0)'}), '(pixels.T, axis=0)\n', (775, 793), True, 'import numpy as np\n'), ((9878, 9896), 'numpy.asarray', 'np.asarray', (['pixels'], {}), '(pixels)\n', (9888, 9896), True, 'import numpy as np\n'), ((11428, 11448), 'numpy.asarray', 'np.asarray', (['vertices'], {}), '(vertices)\n', (11438, 11448), True, 'import numpy as np\n'), ((11519, 11543), 'numpy.unique', 'np.unique', (['shape'], {'axis': '(0)'}), '(shape, axis=0)\n', (11528, 11543), True, 'import numpy as np\n'), ((12885, 12973), 'geopandas.GeoDataFrame', 'geopandas.GeoDataFrame', (["{'geometry': dataseries, 'id': self.indexes, 'x': x, 'y': y}"], {}), "({'geometry': dataseries, 'id': self.indexes, 'x': x,\n 'y': y})\n", (12907, 12973), False, 'import geopandas\n'), ((13205, 13250), 'pandas.Series', 'pandas.Series', (['data'], {'name': 'name', 'index': 'indexes'}), '(data, name=name, index=indexes)\n', (13218, 13250), False, 'import pandas\n'), ((14333, 14377), 'shapely.vectorized.contains', 'vectorized.contains', (['polygon', '*self.pixels.T'], {}), '(polygon, *self.pixels.T)\n', (14352, 14377), False, 'from shapely import vectorized\n'), ((16395, 16410), 'numpy.shape', 'np.shape', (['verts'], {}), '(verts)\n', (16403, 16410), True, 'import numpy as np\n'), ((16439, 16468), 'numpy.concatenate', 'np.concatenate', (['verts'], {'axis': '(0)'}), '(verts, axis=0)\n', (16453, 16468), True, 'import numpy as np\n'), ((18139, 18167), 'numpy.stack', 'np.stack', (['self.triangulation'], {}), '(self.triangulation)\n', (18147, 18167), True, 'import numpy as np\n'), ((18188, 18201), 'numpy.shape', 'np.shape', (['trs'], {}), '(trs)\n', (18196, 18201), True, 'import numpy as np\n'), ((18645, 18664), 'numpy.sum', 'np.sum', (['val'], {'axis': '(1)'}), '(val, axis=1)\n', (18651, 18664), True, 'import numpy as np\n'), ((2350, 2445), 'geopandas.overlay', 'geopandas.overlay', (['self.gridin.geodataframe', 'self.gridout.geodataframe'], {'how': '"""intersection"""'}), "(self.gridin.geodataframe, self.gridout.geodataframe, how=\n 'intersection')\n", (2367, 2445), False, 'import geopandas\n'), ((2704, 2796), 'warnings.warn', 'warnings.warn', (['"""Cannot measure gridinterest, because gridin and/or gridout is/are None"""'], {}), "(\n 'Cannot measure gridinterest, because gridin and/or gridout is/are None')\n", (2717, 2796), False, 'import warnings\n'), ((6322, 6337), 'numpy.ravel', 'np.ravel', (['stamp'], {}), '(stamp)\n', (6330, 6337), True, 'import numpy as np\n'), ((10255, 10272), 'numpy.asarray', 'np.asarray', (['shape'], {}), '(shape)\n', (10265, 10272), True, 'import numpy as np\n'), ((11083, 11108), 'numpy.mean', 'np.mean', (['vertices'], {'axis': '(1)'}), '(vertices, axis=1)\n', (11090, 11108), True, 'import numpy as np\n'), ((13772, 13814), 'numpy.concatenate', 'np.concatenate', (['self.triangulation'], {'axis': '(0)'}), '(self.triangulation, axis=0)\n', (13786, 13814), True, 'import numpy as np\n'), ((17974, 18037), 'warnings.warn', 'warnings.warn', (['"""triangles not defined: deriving triangulation."""'], {}), "('triangles not defined: deriving triangulation.')\n", (17987, 18037), False, 'import warnings\n'), ((18908, 18929), 'shapely.ops.triangulate', 'ops.triangulate', (['geom'], {}), '(geom)\n', (18923, 18929), False, 'from shapely import ops\n'), ((20319, 20342), 'numpy.arange', 'np.arange', (['self.npixels'], {}), '(self.npixels)\n', (20328, 20342), True, 'import numpy as np\n'), ((6262, 6277), 'numpy.shape', 'np.shape', (['stamp'], {}), '(stamp)\n', (6270, 6277), True, 'import numpy as np\n'), ((9754, 9770), 'numpy.shape', 'np.shape', (['pixels'], {}), '(pixels)\n', (9762, 9770), True, 'import numpy as np\n'), ((10194, 10209), 'numpy.shape', 'np.shape', (['shape'], {}), '(shape)\n', (10202, 10209), True, 'import numpy as np\n'), ((10526, 10543), 'numpy.asarray', 'np.asarray', (['shape'], {}), '(shape)\n', (10536, 10543), True, 'import numpy as np\n'), ((13609, 13628), 'shapely.geometry.Polygon', 'geometry.Polygon', (['v'], {}), '(v)\n', (13625, 13628), False, 'from shapely import geometry\n'), ((18350, 18377), 'numpy.concatenate', 'np.concatenate', (['trs'], {'axis': '(0)'}), '(trs, axis=0)\n', (18364, 18377), True, 'import numpy as np\n'), ((20809, 20829), 'numpy.shape', 'np.shape', (['self.shape'], {}), '(self.shape)\n', (20817, 20829), True, 'import numpy as np\n'), ((8290, 8309), 'numpy.shape', 'np.shape', (['datainput'], {}), '(datainput)\n', (8298, 8309), True, 'import numpy as np\n'), ((10290, 10305), 'numpy.shape', 'np.shape', (['shape'], {}), '(shape)\n', (10298, 10305), True, 'import numpy as np\n'), ((12423, 12450), 'numpy.stack', 'np.stack', (['poly_.exterior.xy'], {}), '(poly_.exterior.xy)\n', (12431, 12450), True, 'import numpy as np\n'), ((19291, 19319), 'shapely.geometry.Polygon', 'geometry.Polygon', (['self.shape'], {}), '(self.shape)\n', (19307, 19319), False, 'from shapely import geometry\n'), ((8396, 8415), 'numpy.shape', 'np.shape', (['datainput'], {}), '(datainput)\n', (8404, 8415), True, 'import numpy as np\n'), ((11268, 11287), 'numpy.mean', 'np.mean', (['v_'], {'axis': '(0)'}), '(v_, axis=0)\n', (11275, 11287), True, 'import numpy as np\n'), ((18590, 18611), 'numpy.stack', 'np.stack', (['t_'], {'axis': '(-2)'}), '(t_, axis=-2)\n', (18598, 18611), True, 'import numpy as np\n'), ((10354, 10369), 'numpy.shape', 'np.shape', (['shape'], {}), '(shape)\n', (10362, 10369), True, 'import numpy as np\n'), ((18406, 18444), 'quadpy.triangle.strang_fix_cowper_09', 'quadpy.triangle.strang_fix_cowper_09', ([], {}), '()\n', (18442, 18444), False, 'import quadpy\n'), ((18536, 18574), 'quadpy.triangle.strang_fix_cowper_09', 'quadpy.triangle.strang_fix_cowper_09', ([], {}), '()\n', (18572, 18574), False, 'import quadpy\n'), ((18959, 18991), 'numpy.asarray', 'np.asarray', (['t.exterior.coords.xy'], {}), '(t.exterior.coords.xy)\n', (18969, 18991), True, 'import numpy as np\n')] |
AnirudhDagar/numpy | numpy/lib/format.py | 77bc3225e6f4badf83190ec300a0e10e56949644 | """
Binary serialization
NPY format
==========
A simple format for saving numpy arrays to disk with the full
information about them.
The ``.npy`` format is the standard binary file format in NumPy for
persisting a *single* arbitrary NumPy array on disk. The format stores all
of the shape and dtype information necessary to reconstruct the array
correctly even on another machine with a different architecture.
The format is designed to be as simple as possible while achieving
its limited goals.
The ``.npz`` format is the standard format for persisting *multiple* NumPy
arrays on disk. A ``.npz`` file is a zip file containing multiple ``.npy``
files, one for each array.
Capabilities
------------
- Can represent all NumPy arrays including nested record arrays and
object arrays.
- Represents the data in its native binary form.
- Supports Fortran-contiguous arrays directly.
- Stores all of the necessary information to reconstruct the array
including shape and dtype on a machine of a different
architecture. Both little-endian and big-endian arrays are
supported, and a file with little-endian numbers will yield
a little-endian array on any machine reading the file. The
types are described in terms of their actual sizes. For example,
if a machine with a 64-bit C "long int" writes out an array with
"long ints", a reading machine with 32-bit C "long ints" will yield
an array with 64-bit integers.
- Is straightforward to reverse engineer. Datasets often live longer than
the programs that created them. A competent developer should be
able to create a solution in their preferred programming language to
read most ``.npy`` files that they have been given without much
documentation.
- Allows memory-mapping of the data. See `open_memmap`.
- Can be read from a filelike stream object instead of an actual file.
- Stores object arrays, i.e. arrays containing elements that are arbitrary
Python objects. Files with object arrays are not to be mmapable, but
can be read and written to disk.
Limitations
-----------
- Arbitrary subclasses of numpy.ndarray are not completely preserved.
Subclasses will be accepted for writing, but only the array data will
be written out. A regular numpy.ndarray object will be created
upon reading the file.
.. warning::
Due to limitations in the interpretation of structured dtypes, dtypes
with fields with empty names will have the names replaced by 'f0', 'f1',
etc. Such arrays will not round-trip through the format entirely
accurately. The data is intact; only the field names will differ. We are
working on a fix for this. This fix will not require a change in the
file format. The arrays with such structures can still be saved and
restored, and the correct dtype may be restored by using the
``loadedarray.view(correct_dtype)`` method.
File extensions
---------------
We recommend using the ``.npy`` and ``.npz`` extensions for files saved
in this format. This is by no means a requirement; applications may wish
to use these file formats but use an extension specific to the
application. In the absence of an obvious alternative, however,
we suggest using ``.npy`` and ``.npz``.
Version numbering
-----------------
The version numbering of these formats is independent of NumPy version
numbering. If the format is upgraded, the code in `numpy.io` will still
be able to read and write Version 1.0 files.
Format Version 1.0
------------------
The first 6 bytes are a magic string: exactly ``\\x93NUMPY``.
The next 1 byte is an unsigned byte: the major version number of the file
format, e.g. ``\\x01``.
The next 1 byte is an unsigned byte: the minor version number of the file
format, e.g. ``\\x00``. Note: the version of the file format is not tied
to the version of the numpy package.
The next 2 bytes form a little-endian unsigned short int: the length of
the header data HEADER_LEN.
The next HEADER_LEN bytes form the header data describing the array's
format. It is an ASCII string which contains a Python literal expression
of a dictionary. It is terminated by a newline (``\\n``) and padded with
spaces (``\\x20``) to make the total of
``len(magic string) + 2 + len(length) + HEADER_LEN`` be evenly divisible
by 64 for alignment purposes.
The dictionary contains three keys:
"descr" : dtype.descr
An object that can be passed as an argument to the `numpy.dtype`
constructor to create the array's dtype.
"fortran_order" : bool
Whether the array data is Fortran-contiguous or not. Since
Fortran-contiguous arrays are a common form of non-C-contiguity,
we allow them to be written directly to disk for efficiency.
"shape" : tuple of int
The shape of the array.
For repeatability and readability, the dictionary keys are sorted in
alphabetic order. This is for convenience only. A writer SHOULD implement
this if possible. A reader MUST NOT depend on this.
Following the header comes the array data. If the dtype contains Python
objects (i.e. ``dtype.hasobject is True``), then the data is a Python
pickle of the array. Otherwise the data is the contiguous (either C-
or Fortran-, depending on ``fortran_order``) bytes of the array.
Consumers can figure out the number of bytes by multiplying the number
of elements given by the shape (noting that ``shape=()`` means there is
1 element) by ``dtype.itemsize``.
Format Version 2.0
------------------
The version 1.0 format only allowed the array header to have a total size of
65535 bytes. This can be exceeded by structured arrays with a large number of
columns. The version 2.0 format extends the header size to 4 GiB.
`numpy.save` will automatically save in 2.0 format if the data requires it,
else it will always use the more compatible 1.0 format.
The description of the fourth element of the header therefore has become:
"The next 4 bytes form a little-endian unsigned int: the length of the header
data HEADER_LEN."
Format Version 3.0
------------------
This version replaces the ASCII string (which in practice was latin1) with
a utf8-encoded string, so supports structured types with any unicode field
names.
Notes
-----
The ``.npy`` format, including motivation for creating it and a comparison of
alternatives, is described in the
:doc:`"npy-format" NEP <neps:nep-0001-npy-format>`, however details have
evolved with time and this document is more current.
"""
import numpy
import io
import warnings
from numpy.lib.utils import safe_eval
from numpy.compat import (
isfileobj, os_fspath, pickle
)
__all__ = []
EXPECTED_KEYS = {'descr', 'fortran_order', 'shape'}
MAGIC_PREFIX = b'\x93NUMPY'
MAGIC_LEN = len(MAGIC_PREFIX) + 2
ARRAY_ALIGN = 64 # plausible values are powers of 2 between 16 and 4096
BUFFER_SIZE = 2**18 # size of buffer for reading npz files in bytes
# difference between version 1.0 and 2.0 is a 4 byte (I) header length
# instead of 2 bytes (H) allowing storage of large structured arrays
_header_size_info = {
(1, 0): ('<H', 'latin1'),
(2, 0): ('<I', 'latin1'),
(3, 0): ('<I', 'utf8'),
}
def _check_version(version):
if version not in [(1, 0), (2, 0), (3, 0), None]:
msg = "we only support format version (1,0), (2,0), and (3,0), not %s"
raise ValueError(msg % (version,))
def magic(major, minor):
""" Return the magic string for the given file format version.
Parameters
----------
major : int in [0, 255]
minor : int in [0, 255]
Returns
-------
magic : str
Raises
------
ValueError if the version cannot be formatted.
"""
if major < 0 or major > 255:
raise ValueError("major version must be 0 <= major < 256")
if minor < 0 or minor > 255:
raise ValueError("minor version must be 0 <= minor < 256")
return MAGIC_PREFIX + bytes([major, minor])
def read_magic(fp):
""" Read the magic string to get the version of the file format.
Parameters
----------
fp : filelike object
Returns
-------
major : int
minor : int
"""
magic_str = _read_bytes(fp, MAGIC_LEN, "magic string")
if magic_str[:-2] != MAGIC_PREFIX:
msg = "the magic string is not correct; expected %r, got %r"
raise ValueError(msg % (MAGIC_PREFIX, magic_str[:-2]))
major, minor = magic_str[-2:]
return major, minor
def _has_metadata(dt):
if dt.metadata is not None:
return True
elif dt.names is not None:
return any(_has_metadata(dt[k]) for k in dt.names)
elif dt.subdtype is not None:
return _has_metadata(dt.base)
else:
return False
def dtype_to_descr(dtype):
"""
Get a serializable descriptor from the dtype.
The .descr attribute of a dtype object cannot be round-tripped through
the dtype() constructor. Simple types, like dtype('float32'), have
a descr which looks like a record array with one field with '' as
a name. The dtype() constructor interprets this as a request to give
a default name. Instead, we construct descriptor that can be passed to
dtype().
Parameters
----------
dtype : dtype
The dtype of the array that will be written to disk.
Returns
-------
descr : object
An object that can be passed to `numpy.dtype()` in order to
replicate the input dtype.
"""
if _has_metadata(dtype):
warnings.warn("metadata on a dtype may be saved or ignored, but will "
"raise if saved when read. Use another form of storage.",
UserWarning, stacklevel=2)
if dtype.names is not None:
# This is a record array. The .descr is fine. XXX: parts of the
# record array with an empty name, like padding bytes, still get
# fiddled with. This needs to be fixed in the C implementation of
# dtype().
return dtype.descr
else:
return dtype.str
def descr_to_dtype(descr):
"""
Returns a dtype based off the given description.
This is essentially the reverse of `dtype_to_descr()`. It will remove
the valueless padding fields created by, i.e. simple fields like
dtype('float32'), and then convert the description to its corresponding
dtype.
Parameters
----------
descr : object
The object retreived by dtype.descr. Can be passed to
`numpy.dtype()` in order to replicate the input dtype.
Returns
-------
dtype : dtype
The dtype constructed by the description.
"""
if isinstance(descr, str):
# No padding removal needed
return numpy.dtype(descr)
elif isinstance(descr, tuple):
# subtype, will always have a shape descr[1]
dt = descr_to_dtype(descr[0])
return numpy.dtype((dt, descr[1]))
titles = []
names = []
formats = []
offsets = []
offset = 0
for field in descr:
if len(field) == 2:
name, descr_str = field
dt = descr_to_dtype(descr_str)
else:
name, descr_str, shape = field
dt = numpy.dtype((descr_to_dtype(descr_str), shape))
# Ignore padding bytes, which will be void bytes with '' as name
# Once support for blank names is removed, only "if name == ''" needed)
is_pad = (name == '' and dt.type is numpy.void and dt.names is None)
if not is_pad:
title, name = name if isinstance(name, tuple) else (None, name)
titles.append(title)
names.append(name)
formats.append(dt)
offsets.append(offset)
offset += dt.itemsize
return numpy.dtype({'names': names, 'formats': formats, 'titles': titles,
'offsets': offsets, 'itemsize': offset})
def header_data_from_array_1_0(array):
""" Get the dictionary of header metadata from a numpy.ndarray.
Parameters
----------
array : numpy.ndarray
Returns
-------
d : dict
This has the appropriate entries for writing its string representation
to the header of the file.
"""
d = {'shape': array.shape}
if array.flags.c_contiguous:
d['fortran_order'] = False
elif array.flags.f_contiguous:
d['fortran_order'] = True
else:
# Totally non-contiguous data. We will have to make it C-contiguous
# before writing. Note that we need to test for C_CONTIGUOUS first
# because a 1-D array is both C_CONTIGUOUS and F_CONTIGUOUS.
d['fortran_order'] = False
d['descr'] = dtype_to_descr(array.dtype)
return d
def _wrap_header(header, version):
"""
Takes a stringified header, and attaches the prefix and padding to it
"""
import struct
assert version is not None
fmt, encoding = _header_size_info[version]
if not isinstance(header, bytes): # always true on python 3
header = header.encode(encoding)
hlen = len(header) + 1
padlen = ARRAY_ALIGN - ((MAGIC_LEN + struct.calcsize(fmt) + hlen) % ARRAY_ALIGN)
try:
header_prefix = magic(*version) + struct.pack(fmt, hlen + padlen)
except struct.error:
msg = "Header length {} too big for version={}".format(hlen, version)
raise ValueError(msg) from None
# Pad the header with spaces and a final newline such that the magic
# string, the header-length short and the header are aligned on a
# ARRAY_ALIGN byte boundary. This supports memory mapping of dtypes
# aligned up to ARRAY_ALIGN on systems like Linux where mmap()
# offset must be page-aligned (i.e. the beginning of the file).
return header_prefix + header + b' '*padlen + b'\n'
def _wrap_header_guess_version(header):
"""
Like `_wrap_header`, but chooses an appropriate version given the contents
"""
try:
return _wrap_header(header, (1, 0))
except ValueError:
pass
try:
ret = _wrap_header(header, (2, 0))
except UnicodeEncodeError:
pass
else:
warnings.warn("Stored array in format 2.0. It can only be"
"read by NumPy >= 1.9", UserWarning, stacklevel=2)
return ret
header = _wrap_header(header, (3, 0))
warnings.warn("Stored array in format 3.0. It can only be "
"read by NumPy >= 1.17", UserWarning, stacklevel=2)
return header
def _write_array_header(fp, d, version=None):
""" Write the header for an array and returns the version used
Parameters
----------
fp : filelike object
d : dict
This has the appropriate entries for writing its string representation
to the header of the file.
version: tuple or None
None means use oldest that works
explicit version will raise a ValueError if the format does not
allow saving this data. Default: None
"""
header = ["{"]
for key, value in sorted(d.items()):
# Need to use repr here, since we eval these when reading
header.append("'%s': %s, " % (key, repr(value)))
header.append("}")
header = "".join(header)
if version is None:
header = _wrap_header_guess_version(header)
else:
header = _wrap_header(header, version)
fp.write(header)
def write_array_header_1_0(fp, d):
""" Write the header for an array using the 1.0 format.
Parameters
----------
fp : filelike object
d : dict
This has the appropriate entries for writing its string
representation to the header of the file.
"""
_write_array_header(fp, d, (1, 0))
def write_array_header_2_0(fp, d):
""" Write the header for an array using the 2.0 format.
The 2.0 format allows storing very large structured arrays.
.. versionadded:: 1.9.0
Parameters
----------
fp : filelike object
d : dict
This has the appropriate entries for writing its string
representation to the header of the file.
"""
_write_array_header(fp, d, (2, 0))
def read_array_header_1_0(fp):
"""
Read an array header from a filelike object using the 1.0 file format
version.
This will leave the file object located just after the header.
Parameters
----------
fp : filelike object
A file object or something with a `.read()` method like a file.
Returns
-------
shape : tuple of int
The shape of the array.
fortran_order : bool
The array data will be written out directly if it is either
C-contiguous or Fortran-contiguous. Otherwise, it will be made
contiguous before writing it out.
dtype : dtype
The dtype of the file's data.
Raises
------
ValueError
If the data is invalid.
"""
return _read_array_header(fp, version=(1, 0))
def read_array_header_2_0(fp):
"""
Read an array header from a filelike object using the 2.0 file format
version.
This will leave the file object located just after the header.
.. versionadded:: 1.9.0
Parameters
----------
fp : filelike object
A file object or something with a `.read()` method like a file.
Returns
-------
shape : tuple of int
The shape of the array.
fortran_order : bool
The array data will be written out directly if it is either
C-contiguous or Fortran-contiguous. Otherwise, it will be made
contiguous before writing it out.
dtype : dtype
The dtype of the file's data.
Raises
------
ValueError
If the data is invalid.
"""
return _read_array_header(fp, version=(2, 0))
def _filter_header(s):
"""Clean up 'L' in npz header ints.
Cleans up the 'L' in strings representing integers. Needed to allow npz
headers produced in Python2 to be read in Python3.
Parameters
----------
s : string
Npy file header.
Returns
-------
header : str
Cleaned up header.
"""
import tokenize
from io import StringIO
tokens = []
last_token_was_number = False
for token in tokenize.generate_tokens(StringIO(s).readline):
token_type = token[0]
token_string = token[1]
if (last_token_was_number and
token_type == tokenize.NAME and
token_string == "L"):
continue
else:
tokens.append(token)
last_token_was_number = (token_type == tokenize.NUMBER)
return tokenize.untokenize(tokens)
def _read_array_header(fp, version):
"""
see read_array_header_1_0
"""
# Read an unsigned, little-endian short int which has the length of the
# header.
import struct
hinfo = _header_size_info.get(version)
if hinfo is None:
raise ValueError("Invalid version {!r}".format(version))
hlength_type, encoding = hinfo
hlength_str = _read_bytes(fp, struct.calcsize(hlength_type), "array header length")
header_length = struct.unpack(hlength_type, hlength_str)[0]
header = _read_bytes(fp, header_length, "array header")
header = header.decode(encoding)
# The header is a pretty-printed string representation of a literal
# Python dictionary with trailing newlines padded to a ARRAY_ALIGN byte
# boundary. The keys are strings.
# "shape" : tuple of int
# "fortran_order" : bool
# "descr" : dtype.descr
# Versions (2, 0) and (1, 0) could have been created by a Python 2
# implementation before header filtering was implemented.
if version <= (2, 0):
header = _filter_header(header)
try:
d = safe_eval(header)
except SyntaxError as e:
msg = "Cannot parse header: {!r}"
raise ValueError(msg.format(header)) from e
if not isinstance(d, dict):
msg = "Header is not a dictionary: {!r}"
raise ValueError(msg.format(d))
if EXPECTED_KEYS != d.keys():
keys = sorted(d.keys())
msg = "Header does not contain the correct keys: {!r}"
raise ValueError(msg.format(keys))
# Sanity-check the values.
if (not isinstance(d['shape'], tuple) or
not all(isinstance(x, int) for x in d['shape'])):
msg = "shape is not valid: {!r}"
raise ValueError(msg.format(d['shape']))
if not isinstance(d['fortran_order'], bool):
msg = "fortran_order is not a valid bool: {!r}"
raise ValueError(msg.format(d['fortran_order']))
try:
dtype = descr_to_dtype(d['descr'])
except TypeError as e:
msg = "descr is not a valid dtype descriptor: {!r}"
raise ValueError(msg.format(d['descr'])) from e
return d['shape'], d['fortran_order'], dtype
def write_array(fp, array, version=None, allow_pickle=True, pickle_kwargs=None):
"""
Write an array to an NPY file, including a header.
If the array is neither C-contiguous nor Fortran-contiguous AND the
file_like object is not a real file object, this function will have to
copy data in memory.
Parameters
----------
fp : file_like object
An open, writable file object, or similar object with a
``.write()`` method.
array : ndarray
The array to write to disk.
version : (int, int) or None, optional
The version number of the format. None means use the oldest
supported version that is able to store the data. Default: None
allow_pickle : bool, optional
Whether to allow writing pickled data. Default: True
pickle_kwargs : dict, optional
Additional keyword arguments to pass to pickle.dump, excluding
'protocol'. These are only useful when pickling objects in object
arrays on Python 3 to Python 2 compatible format.
Raises
------
ValueError
If the array cannot be persisted. This includes the case of
allow_pickle=False and array being an object array.
Various other errors
If the array contains Python objects as part of its dtype, the
process of pickling them may raise various errors if the objects
are not picklable.
"""
_check_version(version)
_write_array_header(fp, header_data_from_array_1_0(array), version)
if array.itemsize == 0:
buffersize = 0
else:
# Set buffer size to 16 MiB to hide the Python loop overhead.
buffersize = max(16 * 1024 ** 2 // array.itemsize, 1)
if array.dtype.hasobject:
# We contain Python objects so we cannot write out the data
# directly. Instead, we will pickle it out
if not allow_pickle:
raise ValueError("Object arrays cannot be saved when "
"allow_pickle=False")
if pickle_kwargs is None:
pickle_kwargs = {}
pickle.dump(array, fp, protocol=3, **pickle_kwargs)
elif array.flags.f_contiguous and not array.flags.c_contiguous:
if isfileobj(fp):
array.T.tofile(fp)
else:
for chunk in numpy.nditer(
array, flags=['external_loop', 'buffered', 'zerosize_ok'],
buffersize=buffersize, order='F'):
fp.write(chunk.tobytes('C'))
else:
if isfileobj(fp):
array.tofile(fp)
else:
for chunk in numpy.nditer(
array, flags=['external_loop', 'buffered', 'zerosize_ok'],
buffersize=buffersize, order='C'):
fp.write(chunk.tobytes('C'))
def read_array(fp, allow_pickle=False, pickle_kwargs=None):
"""
Read an array from an NPY file.
Parameters
----------
fp : file_like object
If this is not a real file object, then this may take extra memory
and time.
allow_pickle : bool, optional
Whether to allow writing pickled data. Default: False
.. versionchanged:: 1.16.3
Made default False in response to CVE-2019-6446.
pickle_kwargs : dict
Additional keyword arguments to pass to pickle.load. These are only
useful when loading object arrays saved on Python 2 when using
Python 3.
Returns
-------
array : ndarray
The array from the data on disk.
Raises
------
ValueError
If the data is invalid, or allow_pickle=False and the file contains
an object array.
"""
version = read_magic(fp)
_check_version(version)
shape, fortran_order, dtype = _read_array_header(fp, version)
if len(shape) == 0:
count = 1
else:
count = numpy.multiply.reduce(shape, dtype=numpy.int64)
# Now read the actual data.
if dtype.hasobject:
# The array contained Python objects. We need to unpickle the data.
if not allow_pickle:
raise ValueError("Object arrays cannot be loaded when "
"allow_pickle=False")
if pickle_kwargs is None:
pickle_kwargs = {}
try:
array = pickle.load(fp, **pickle_kwargs)
except UnicodeError as err:
# Friendlier error message
raise UnicodeError("Unpickling a python object failed: %r\n"
"You may need to pass the encoding= option "
"to numpy.load" % (err,)) from err
else:
if isfileobj(fp):
# We can use the fast fromfile() function.
array = numpy.fromfile(fp, dtype=dtype, count=count)
else:
# This is not a real file. We have to read it the
# memory-intensive way.
# crc32 module fails on reads greater than 2 ** 32 bytes,
# breaking large reads from gzip streams. Chunk reads to
# BUFFER_SIZE bytes to avoid issue and reduce memory overhead
# of the read. In non-chunked case count < max_read_count, so
# only one read is performed.
# Use np.ndarray instead of np.empty since the latter does
# not correctly instantiate zero-width string dtypes; see
# https://github.com/numpy/numpy/pull/6430
array = numpy.ndarray(count, dtype=dtype)
if dtype.itemsize > 0:
# If dtype.itemsize == 0 then there's nothing more to read
max_read_count = BUFFER_SIZE // min(BUFFER_SIZE, dtype.itemsize)
for i in range(0, count, max_read_count):
read_count = min(max_read_count, count - i)
read_size = int(read_count * dtype.itemsize)
data = _read_bytes(fp, read_size, "array data")
array[i:i+read_count] = numpy.frombuffer(data, dtype=dtype,
count=read_count)
if fortran_order:
array.shape = shape[::-1]
array = array.transpose()
else:
array.shape = shape
return array
def open_memmap(filename, mode='r+', dtype=None, shape=None,
fortran_order=False, version=None):
"""
Open a .npy file as a memory-mapped array.
This may be used to read an existing file or create a new one.
Parameters
----------
filename : str or path-like
The name of the file on disk. This may *not* be a file-like
object.
mode : str, optional
The mode in which to open the file; the default is 'r+'. In
addition to the standard file modes, 'c' is also accepted to mean
"copy on write." See `memmap` for the available mode strings.
dtype : data-type, optional
The data type of the array if we are creating a new file in "write"
mode, if not, `dtype` is ignored. The default value is None, which
results in a data-type of `float64`.
shape : tuple of int
The shape of the array if we are creating a new file in "write"
mode, in which case this parameter is required. Otherwise, this
parameter is ignored and is thus optional.
fortran_order : bool, optional
Whether the array should be Fortran-contiguous (True) or
C-contiguous (False, the default) if we are creating a new file in
"write" mode.
version : tuple of int (major, minor) or None
If the mode is a "write" mode, then this is the version of the file
format used to create the file. None means use the oldest
supported version that is able to store the data. Default: None
Returns
-------
marray : memmap
The memory-mapped array.
Raises
------
ValueError
If the data or the mode is invalid.
IOError
If the file is not found or cannot be opened correctly.
See Also
--------
numpy.memmap
"""
if isfileobj(filename):
raise ValueError("Filename must be a string or a path-like object."
" Memmap cannot use existing file handles.")
if 'w' in mode:
# We are creating the file, not reading it.
# Check if we ought to create the file.
_check_version(version)
# Ensure that the given dtype is an authentic dtype object rather
# than just something that can be interpreted as a dtype object.
dtype = numpy.dtype(dtype)
if dtype.hasobject:
msg = "Array can't be memory-mapped: Python objects in dtype."
raise ValueError(msg)
d = dict(
descr=dtype_to_descr(dtype),
fortran_order=fortran_order,
shape=shape,
)
# If we got here, then it should be safe to create the file.
with open(os_fspath(filename), mode+'b') as fp:
_write_array_header(fp, d, version)
offset = fp.tell()
else:
# Read the header of the file first.
with open(os_fspath(filename), 'rb') as fp:
version = read_magic(fp)
_check_version(version)
shape, fortran_order, dtype = _read_array_header(fp, version)
if dtype.hasobject:
msg = "Array can't be memory-mapped: Python objects in dtype."
raise ValueError(msg)
offset = fp.tell()
if fortran_order:
order = 'F'
else:
order = 'C'
# We need to change a write-only mode to a read-write mode since we've
# already written data to the file.
if mode == 'w+':
mode = 'r+'
marray = numpy.memmap(filename, dtype=dtype, shape=shape, order=order,
mode=mode, offset=offset)
return marray
def _read_bytes(fp, size, error_template="ran out of data"):
"""
Read from file-like object until size bytes are read.
Raises ValueError if not EOF is encountered before size bytes are read.
Non-blocking objects only supported if they derive from io objects.
Required as e.g. ZipExtFile in python 2.6 can return less data than
requested.
"""
data = bytes()
while True:
# io files (default in python3) return None or raise on
# would-block, python2 file will truncate, probably nothing can be
# done about that. note that regular files can't be non-blocking
try:
r = fp.read(size - len(data))
data += r
if len(r) == 0 or len(data) == size:
break
except io.BlockingIOError:
pass
if len(data) != size:
msg = "EOF: reading %s, expected %d bytes got %d"
raise ValueError(msg % (error_template, size, len(data)))
else:
return data
| [((11608, 11719), 'numpy.dtype', 'numpy.dtype', (["{'names': names, 'formats': formats, 'titles': titles, 'offsets': offsets,\n 'itemsize': offset}"], {}), "({'names': names, 'formats': formats, 'titles': titles,\n 'offsets': offsets, 'itemsize': offset})\n", (11619, 11719), False, 'import numpy\n'), ((14171, 14288), 'warnings.warn', 'warnings.warn', (['"""Stored array in format 3.0. It can only be read by NumPy >= 1.17"""', 'UserWarning'], {'stacklevel': '(2)'}), "(\n 'Stored array in format 3.0. It can only be read by NumPy >= 1.17',\n UserWarning, stacklevel=2)\n", (14184, 14288), False, 'import warnings\n'), ((18411, 18438), 'tokenize.untokenize', 'tokenize.untokenize', (['tokens'], {}), '(tokens)\n', (18430, 18438), False, 'import tokenize\n'), ((28671, 28690), 'numpy.compat.isfileobj', 'isfileobj', (['filename'], {}), '(filename)\n', (28680, 28690), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((30329, 30420), 'numpy.memmap', 'numpy.memmap', (['filename'], {'dtype': 'dtype', 'shape': 'shape', 'order': 'order', 'mode': 'mode', 'offset': 'offset'}), '(filename, dtype=dtype, shape=shape, order=order, mode=mode,\n offset=offset)\n', (30341, 30420), False, 'import numpy\n'), ((9372, 9534), 'warnings.warn', 'warnings.warn', (['"""metadata on a dtype may be saved or ignored, but will raise if saved when read. Use another form of storage."""', 'UserWarning'], {'stacklevel': '(2)'}), "(\n 'metadata on a dtype may be saved or ignored, but will raise if saved when read. Use another form of storage.'\n , UserWarning, stacklevel=2)\n", (9385, 9534), False, 'import warnings\n'), ((10584, 10602), 'numpy.dtype', 'numpy.dtype', (['descr'], {}), '(descr)\n', (10595, 10602), False, 'import numpy\n'), ((13973, 14083), 'warnings.warn', 'warnings.warn', (['"""Stored array in format 2.0. It can only beread by NumPy >= 1.9"""', 'UserWarning'], {'stacklevel': '(2)'}), "('Stored array in format 2.0. It can only beread by NumPy >= 1.9',\n UserWarning, stacklevel=2)\n", (13986, 14083), False, 'import warnings\n'), ((18832, 18861), 'struct.calcsize', 'struct.calcsize', (['hlength_type'], {}), '(hlength_type)\n', (18847, 18861), False, 'import struct\n'), ((18906, 18946), 'struct.unpack', 'struct.unpack', (['hlength_type', 'hlength_str'], {}), '(hlength_type, hlength_str)\n', (18919, 18946), False, 'import struct\n'), ((19546, 19563), 'numpy.lib.utils.safe_eval', 'safe_eval', (['header'], {}), '(header)\n', (19555, 19563), False, 'from numpy.lib.utils import safe_eval\n'), ((22694, 22745), 'numpy.compat.pickle.dump', 'pickle.dump', (['array', 'fp'], {'protocol': '(3)'}), '(array, fp, protocol=3, **pickle_kwargs)\n', (22705, 22745), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((24465, 24512), 'numpy.multiply.reduce', 'numpy.multiply.reduce', (['shape'], {'dtype': 'numpy.int64'}), '(shape, dtype=numpy.int64)\n', (24486, 24512), False, 'import numpy\n'), ((25236, 25249), 'numpy.compat.isfileobj', 'isfileobj', (['fp'], {}), '(fp)\n', (25245, 25249), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((29155, 29173), 'numpy.dtype', 'numpy.dtype', (['dtype'], {}), '(dtype)\n', (29166, 29173), False, 'import numpy\n'), ((10744, 10771), 'numpy.dtype', 'numpy.dtype', (['(dt, descr[1])'], {}), '((dt, descr[1]))\n', (10755, 10771), False, 'import numpy\n'), ((13049, 13080), 'struct.pack', 'struct.pack', (['fmt', '(hlen + padlen)'], {}), '(fmt, hlen + padlen)\n', (13060, 13080), False, 'import struct\n'), ((18059, 18070), 'io.StringIO', 'StringIO', (['s'], {}), '(s)\n', (18067, 18070), False, 'from io import StringIO\n'), ((22825, 22838), 'numpy.compat.isfileobj', 'isfileobj', (['fp'], {}), '(fp)\n', (22834, 22838), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((23124, 23137), 'numpy.compat.isfileobj', 'isfileobj', (['fp'], {}), '(fp)\n', (23133, 23137), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((24892, 24924), 'numpy.compat.pickle.load', 'pickle.load', (['fp'], {}), '(fp, **pickle_kwargs)\n', (24903, 24924), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((25326, 25370), 'numpy.fromfile', 'numpy.fromfile', (['fp'], {'dtype': 'dtype', 'count': 'count'}), '(fp, dtype=dtype, count=count)\n', (25340, 25370), False, 'import numpy\n'), ((26029, 26062), 'numpy.ndarray', 'numpy.ndarray', (['count'], {'dtype': 'dtype'}), '(count, dtype=dtype)\n', (26042, 26062), False, 'import numpy\n'), ((22910, 23019), 'numpy.nditer', 'numpy.nditer', (['array'], {'flags': "['external_loop', 'buffered', 'zerosize_ok']", 'buffersize': 'buffersize', 'order': '"""F"""'}), "(array, flags=['external_loop', 'buffered', 'zerosize_ok'],\n buffersize=buffersize, order='F')\n", (22922, 23019), False, 'import numpy\n'), ((23207, 23316), 'numpy.nditer', 'numpy.nditer', (['array'], {'flags': "['external_loop', 'buffered', 'zerosize_ok']", 'buffersize': 'buffersize', 'order': '"""C"""'}), "(array, flags=['external_loop', 'buffered', 'zerosize_ok'],\n buffersize=buffersize, order='C')\n", (23219, 23316), False, 'import numpy\n'), ((29533, 29552), 'numpy.compat.os_fspath', 'os_fspath', (['filename'], {}), '(filename)\n', (29542, 29552), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((29723, 29742), 'numpy.compat.os_fspath', 'os_fspath', (['filename'], {}), '(filename)\n', (29732, 29742), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((12954, 12974), 'struct.calcsize', 'struct.calcsize', (['fmt'], {}), '(fmt)\n', (12969, 12974), False, 'import struct\n'), ((26555, 26608), 'numpy.frombuffer', 'numpy.frombuffer', (['data'], {'dtype': 'dtype', 'count': 'read_count'}), '(data, dtype=dtype, count=read_count)\n', (26571, 26608), False, 'import numpy\n')] |
gva-jjoyce/gva_data | gva/data/validator/is_valid_enum.py | cda990d0abb4b175025aaf16e75192bd9cc213af | """
Enumerator Test
"""
from typing import Any
class is_valid_enum():
"""
Test if a variable is on a list of valid values
"""
__slots__ = ('symbols')
def __init__(self, **kwargs):
"""
-> "type": "enum", "symbols": ["up", "down"]
symbols: list of allowed values (case sensitive)
"""
self.symbols = kwargs.get('symbols', ())
def __call__(self, value: Any) -> bool:
return value and value in self.symbols
def __str__(self):
return f'enum {self.symbols}' | [] |
los-verdes/lv-event-pagenerator | events_page/app.py | 88416b626ff2dca6e2d71fa60bff4823954b3131 | #!/usr/bin/env python
from zoneinfo import ZoneInfo
import flask
from dateutil.parser import parse
from flask_assets import Bundle, Environment
from logzero import logger, setup_logger
from webassets.filter import get_filter
from config import cfg
from apis import calendar as gcal
setup_logger(name=__name__)
app = flask.Flask(__name__)
libsass = get_filter(
"libsass",
as_output=True,
style="compressed",
)
assets = Environment(app) # create an Environment instance
bundles = { # define nested Bundle
"style": Bundle(
"scss/*.scss",
filters=(libsass),
output="style.css",
)
}
assets.register(bundles)
@app.route("/")
def events():
return flask.render_template(
"index.html",
calendar=gcal.load_calendar(
service=gcal.build_service(),
calendar_id=cfg.calendar_id,
),
)
@app.template_filter()
def parse_tz_datetime(datetime_str):
return parse(datetime_str).replace(tzinfo=ZoneInfo(app.config["display_timezone"]))
@app.template_filter()
def replace_tz(datetime_obj):
return datetime_obj.replace(tzinfo=ZoneInfo(app.config["display_timezone"]))
@app.template_filter()
def hex2rgb(hex, alpha=None):
"""Convert a string to all caps."""
if not hex.startswith("#"):
return hex
h = hex.lstrip("#")
try:
rgb = tuple(int(h[i : i + 2], 16) for i in (0, 2, 4)) # noqa
except Exception as err:
logger.exception(f"unable to convert {hex=} to rgb: {err}")
return h
if alpha is None:
return f"rgb({rgb[0]}, {rgb[1]}, {rgb[2]})"
else:
return f"rgba({rgb[0]}, {rgb[1]}, {rgb[2]}, {alpha})"
def get_base_url():
if prefix := cfg.gcs_bucket_prefix:
return f"https://{cfg.hostname}/{prefix}"
return f"https://{cfg.hostname}"
def create_app():
cfg.load()
# TODO: do this default settings thing better?
default_app_config = dict(
display_timezone=cfg.display_timezone,
FREEZER_BASE_URL=get_base_url(),
FREEZER_STATIC_IGNORE=["*.scss", ".webassets-cache/*", ".DS_Store"],
FREEZER_RELATIVE_URLS=False,
FREEZER_REMOVE_EXTRA_FILES=True,
)
logger.info(f"create_app() => {default_app_config=}")
app.config.update(default_app_config)
return app
if __name__ == "__main__":
app = create_app()
app.run(
host="0.0.0.0",
debug=True,
)
| [((286, 313), 'logzero.setup_logger', 'setup_logger', ([], {'name': '__name__'}), '(name=__name__)\n', (298, 313), False, 'from logzero import logger, setup_logger\n'), ((321, 342), 'flask.Flask', 'flask.Flask', (['__name__'], {}), '(__name__)\n', (332, 342), False, 'import flask\n'), ((353, 410), 'webassets.filter.get_filter', 'get_filter', (['"""libsass"""'], {'as_output': '(True)', 'style': '"""compressed"""'}), "('libsass', as_output=True, style='compressed')\n", (363, 410), False, 'from webassets.filter import get_filter\n'), ((435, 451), 'flask_assets.Environment', 'Environment', (['app'], {}), '(app)\n', (446, 451), False, 'from flask_assets import Bundle, Environment\n'), ((535, 593), 'flask_assets.Bundle', 'Bundle', (['"""scss/*.scss"""'], {'filters': 'libsass', 'output': '"""style.css"""'}), "('scss/*.scss', filters=libsass, output='style.css')\n", (541, 593), False, 'from flask_assets import Bundle, Environment\n'), ((1847, 1857), 'config.cfg.load', 'cfg.load', ([], {}), '()\n', (1855, 1857), False, 'from config import cfg\n'), ((2194, 2267), 'logzero.logger.info', 'logger.info', (['f"""create_app() => default_app_config={default_app_config!r}"""'], {}), "(f'create_app() => default_app_config={default_app_config!r}')\n", (2205, 2267), False, 'from logzero import logger, setup_logger\n'), ((952, 971), 'dateutil.parser.parse', 'parse', (['datetime_str'], {}), '(datetime_str)\n', (957, 971), False, 'from dateutil.parser import parse\n'), ((987, 1027), 'zoneinfo.ZoneInfo', 'ZoneInfo', (["app.config['display_timezone']"], {}), "(app.config['display_timezone'])\n", (995, 1027), False, 'from zoneinfo import ZoneInfo\n'), ((1123, 1163), 'zoneinfo.ZoneInfo', 'ZoneInfo', (["app.config['display_timezone']"], {}), "(app.config['display_timezone'])\n", (1131, 1163), False, 'from zoneinfo import ZoneInfo\n'), ((1451, 1515), 'logzero.logger.exception', 'logger.exception', (['f"""unable to convert hex={hex!r} to rgb: {err}"""'], {}), "(f'unable to convert hex={hex!r} to rgb: {err}')\n", (1467, 1515), False, 'from logzero import logger, setup_logger\n'), ((799, 819), 'apis.calendar.build_service', 'gcal.build_service', ([], {}), '()\n', (817, 819), True, 'from apis import calendar as gcal\n')] |
desihub/desicmx | bin/focus_scan.py | 6f7c9a3cff25c970af57de20e3a12001382deb23 | #!/usr/bin/env python
import astropy.io.fits as fits
import numpy as np
import os
import matplotlib.pyplot as plt
import argparse
def _fname(expid, night,
basedir='/n/home/datasystems/users/ameisner/reduced/focus',
ccds=False):
fname = basedir + '/' + night + '/' + str(expid).zfill(8) + '/gfa-' + str(expid).zfill(8) + '_psfs.fits'
if ccds:
fname = fname.replace('_psfs.fits', '_ccds.fits')
return fname
def _actual_expid_list(expids, night, basedir='/n/home/datasystems/users/ameisner/reduced/focus'):
keep = []
for i, expid in enumerate(expids):
fname = _fname(expid, night, basedir=basedir, ccds=True)
if not os.path.exists(fname):
continue
tab = fits.getdata(fname)
# try to handle case where observer accidentally lists the 'setup focus scan'
# 1 second exposure as the start of the focus scan
if (i == 0) & (tab[0]['EXPTIME'] < 1.1):
print('SKIPPING DUMMY SETUP EXPOSURE')
continue
program = tab[0]['PROGRAM'].strip()
if program != 'focus scan':
break
keep.append(expid)
return keep
def focus_plots(night, expids,
basedir='/n/home/datasystems/users/ameisner/reduced/focus',
outdir='/n/home/desiobserver/focus_scan_pngs', no_popups=False):
expids = _actual_expid_list(expids, night, basedir=basedir)
if len(expids) == 0:
print('NO FOCUS SCAN EXPOSURES TO ANALYZE ??')
assert(False)
plt.figure(1, figsize=(12.0*(len(expids)/7.0), 9))
extnames = ['GUIDE0', 'GUIDE2', 'GUIDE3', 'GUIDE5', 'GUIDE7', 'GUIDE8']
focus_z = []
fwhm_pix = []
# PSF stamps plot
plt.subplots_adjust(hspace=0.01, wspace=0.01)
for i, expid in enumerate(expids):
fname = _fname(expid, night, basedir=basedir)
print(fname)
fname_ccds = _fname(expid, night, basedir=basedir, ccds=True)
if not os.path.exists(fname):
continue
ccds = fits.getdata(fname_ccds)
if np.sum(np.isfinite(ccds['PSF_FWHM_PIX'])) != 0:
fwhm_pix.append(np.median(ccds['PSF_FWHM_PIX'][np.isfinite(ccds['PSF_FWHM_PIX'])]))
focus_z.append(float(ccds[0]['FOCUS'].split(',')[2]))
hdul = fits.open(fname)
extnames_present = [hdu.header['EXTNAME'] for hdu in hdul]
for j, extname in enumerate(extnames):
if extname not in extnames_present:
continue
print(i, j)
plt.subplot(6, len(expids), len(expids)*j + i + 1)
plt.xticks([])
plt.yticks([])
im = fits.getdata(fname, extname=extname)
plt.imshow(im, interpolation='nearest', origin='lower', cmap='gray_r', vmin=0.01)
plt.text(5, 44, str(expid) + '; ' + extname, color='r', fontsize=9)
plt.text(10, 3.5, 'z = ' + str(int(float(ccds[0]['FOCUS'].split(',')[2]))), color='r')
if np.isfinite(ccds[j]['XCENTROID_PSF']) and np.isfinite(ccds[j]['YCENTROID_PSF']):
plt.scatter([ccds[j]['XCENTROID_PSF']], [ccds[j]['YCENTROID_PSF']], marker='.', c='r')
expid_min = int(np.min(expids))
print(focus_z)
print(fwhm_pix)
plt.savefig(os.path.join(outdir, 'stamps_focus_scan-' + str(expid_min).zfill(8)+'.png'), bbox_inches='tight')
#plt.cla()
plt.figure(200)
asec_per_pix = 0.205
focus_z = np.array(focus_z)
fwhm_asec = np.array(fwhm_pix)*asec_per_pix
plt.scatter(focus_z, fwhm_asec)
plt.xlabel('focus z (micron)')
plt.ylabel('FWHM (asec)')
coeff = np.polyfit(focus_z, fwhm_asec, 2)
xsamp = np.arange(np.min(focus_z), np.max(focus_z))
ysamp = coeff[0]*(np.power(xsamp, 2)) + coeff[1]*xsamp + coeff[2]
plt.title('focus scan starting with EXPID = ' + str(expid_min))
plt.plot(xsamp, ysamp)
zmin = -coeff[1]/(2*coeff[0])
min_fwhm_fit_asec = coeff[0]*(zmin**2) + coeff[1]*zmin + coeff[2]
yrange = [np.min(fwhm_asec), np.max(fwhm_asec)]
plt.text(focus_z[2], yrange[0] + 0.8*(yrange[1]-yrange[0]), 'best FWHM (meas) : ' + '{:.2f}'.format(np.min(fwhm_asec)))
plt.text(focus_z[2], yrange[0] + 0.7*(yrange[1]-yrange[0]), 'best FWHM (fit) : ' + '{:.2f}'.format(min_fwhm_fit_asec))
plt.text(focus_z[2], yrange[0] + 0.9*(yrange[1]-yrange[0]), 'best focus : ' + str(int(np.round(zmin))))
plt.savefig(os.path.join(outdir, 'fit_focus_scan-' + str(expid_min).zfill(8) + '.png'), bbox_inches='tight')
if not no_popups:
plt.show()
def _test():
night = '20200131'
expids = 45446 + np.arange(7)
focus_plots(night, expids, basedir='/project/projectdirs/desi/users/ameisner/GFA/run/psf_flux_weighted_centroid', outdir='.')
def _test_missing_cam():
night = '20200131'
expids = 45485 + np.arange(7)
focus_plots(night, expids, basedir='/project/projectdirs/desi/users/ameisner/GFA/run/psf_flux_weighted_centroid')
if __name__ == "__main__":
descr = 'GFA focus sequence plots/analysis'
parser = argparse.ArgumentParser(description=descr)
parser.add_argument('first_expid', type=int, nargs=1)
parser.add_argument('night', type=str, nargs=1)
parser.add_argument('--basedir', default='/n/home/datasystems/users/ameisner/reduced/focus',
type=str, help='base directory for GFA reductions')
parser.add_argument('--outdir', default='/n/home/desiobserver/focus_scan_pngs',
type=str, help='output directory for plot PNGs')
parser.add_argument('--no_popups', default=False, action='store_true',
help='write PNGs without popping up plot windows')
args = parser.parse_args()
expids = args.first_expid + np.arange(16, dtype=int)
print(expids)
print(args.night[0])
print(args.basedir)
outdir = args.outdir if os.path.exists(args.outdir) else '.'
focus_plots(args.night[0], expids, basedir=args.basedir, outdir=outdir, no_popups=args.no_popups)
| [((1758, 1803), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.01)', 'wspace': '(0.01)'}), '(hspace=0.01, wspace=0.01)\n', (1777, 1803), True, 'import matplotlib.pyplot as plt\n'), ((3439, 3454), 'matplotlib.pyplot.figure', 'plt.figure', (['(200)'], {}), '(200)\n', (3449, 3454), True, 'import matplotlib.pyplot as plt\n'), ((3500, 3517), 'numpy.array', 'np.array', (['focus_z'], {}), '(focus_z)\n', (3508, 3517), True, 'import numpy as np\n'), ((3570, 3601), 'matplotlib.pyplot.scatter', 'plt.scatter', (['focus_z', 'fwhm_asec'], {}), '(focus_z, fwhm_asec)\n', (3581, 3601), True, 'import matplotlib.pyplot as plt\n'), ((3606, 3636), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""focus z (micron)"""'], {}), "('focus z (micron)')\n", (3616, 3636), True, 'import matplotlib.pyplot as plt\n'), ((3641, 3666), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""FWHM (asec)"""'], {}), "('FWHM (asec)')\n", (3651, 3666), True, 'import matplotlib.pyplot as plt\n'), ((3680, 3713), 'numpy.polyfit', 'np.polyfit', (['focus_z', 'fwhm_asec', '(2)'], {}), '(focus_z, fwhm_asec, 2)\n', (3690, 3713), True, 'import numpy as np\n'), ((3920, 3942), 'matplotlib.pyplot.plot', 'plt.plot', (['xsamp', 'ysamp'], {}), '(xsamp, ysamp)\n', (3928, 3942), True, 'import matplotlib.pyplot as plt\n'), ((5117, 5159), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'descr'}), '(description=descr)\n', (5140, 5159), False, 'import argparse\n'), ((777, 796), 'astropy.io.fits.getdata', 'fits.getdata', (['fname'], {}), '(fname)\n', (789, 796), True, 'import astropy.io.fits as fits\n'), ((2062, 2086), 'astropy.io.fits.getdata', 'fits.getdata', (['fname_ccds'], {}), '(fname_ccds)\n', (2074, 2086), True, 'import astropy.io.fits as fits\n'), ((2337, 2353), 'astropy.io.fits.open', 'fits.open', (['fname'], {}), '(fname)\n', (2346, 2353), True, 'import astropy.io.fits as fits\n'), ((3248, 3262), 'numpy.min', 'np.min', (['expids'], {}), '(expids)\n', (3254, 3262), True, 'import numpy as np\n'), ((3534, 3552), 'numpy.array', 'np.array', (['fwhm_pix'], {}), '(fwhm_pix)\n', (3542, 3552), True, 'import numpy as np\n'), ((3737, 3752), 'numpy.min', 'np.min', (['focus_z'], {}), '(focus_z)\n', (3743, 3752), True, 'import numpy as np\n'), ((3754, 3769), 'numpy.max', 'np.max', (['focus_z'], {}), '(focus_z)\n', (3760, 3769), True, 'import numpy as np\n'), ((4068, 4085), 'numpy.min', 'np.min', (['fwhm_asec'], {}), '(fwhm_asec)\n', (4074, 4085), True, 'import numpy as np\n'), ((4087, 4104), 'numpy.max', 'np.max', (['fwhm_asec'], {}), '(fwhm_asec)\n', (4093, 4104), True, 'import numpy as np\n'), ((4609, 4619), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4617, 4619), True, 'import matplotlib.pyplot as plt\n'), ((4682, 4694), 'numpy.arange', 'np.arange', (['(7)'], {}), '(7)\n', (4691, 4694), True, 'import numpy as np\n'), ((4896, 4908), 'numpy.arange', 'np.arange', (['(7)'], {}), '(7)\n', (4905, 4908), True, 'import numpy as np\n'), ((5825, 5849), 'numpy.arange', 'np.arange', (['(16)'], {'dtype': 'int'}), '(16, dtype=int)\n', (5834, 5849), True, 'import numpy as np\n'), ((5951, 5978), 'os.path.exists', 'os.path.exists', (['args.outdir'], {}), '(args.outdir)\n', (5965, 5978), False, 'import os\n'), ((719, 740), 'os.path.exists', 'os.path.exists', (['fname'], {}), '(fname)\n', (733, 740), False, 'import os\n'), ((2003, 2024), 'os.path.exists', 'os.path.exists', (['fname'], {}), '(fname)\n', (2017, 2024), False, 'import os\n'), ((2641, 2655), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[]'], {}), '([])\n', (2651, 2655), True, 'import matplotlib.pyplot as plt\n'), ((2668, 2682), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[]'], {}), '([])\n', (2678, 2682), True, 'import matplotlib.pyplot as plt\n'), ((2700, 2736), 'astropy.io.fits.getdata', 'fits.getdata', (['fname'], {'extname': 'extname'}), '(fname, extname=extname)\n', (2712, 2736), True, 'import astropy.io.fits as fits\n'), ((2749, 2835), 'matplotlib.pyplot.imshow', 'plt.imshow', (['im'], {'interpolation': '"""nearest"""', 'origin': '"""lower"""', 'cmap': '"""gray_r"""', 'vmin': '(0.01)'}), "(im, interpolation='nearest', origin='lower', cmap='gray_r', vmin\n =0.01)\n", (2759, 2835), True, 'import matplotlib.pyplot as plt\n'), ((2106, 2139), 'numpy.isfinite', 'np.isfinite', (["ccds['PSF_FWHM_PIX']"], {}), "(ccds['PSF_FWHM_PIX'])\n", (2117, 2139), True, 'import numpy as np\n'), ((3038, 3075), 'numpy.isfinite', 'np.isfinite', (["ccds[j]['XCENTROID_PSF']"], {}), "(ccds[j]['XCENTROID_PSF'])\n", (3049, 3075), True, 'import numpy as np\n'), ((3080, 3117), 'numpy.isfinite', 'np.isfinite', (["ccds[j]['YCENTROID_PSF']"], {}), "(ccds[j]['YCENTROID_PSF'])\n", (3091, 3117), True, 'import numpy as np\n'), ((3135, 3226), 'matplotlib.pyplot.scatter', 'plt.scatter', (["[ccds[j]['XCENTROID_PSF']]", "[ccds[j]['YCENTROID_PSF']]"], {'marker': '"""."""', 'c': '"""r"""'}), "([ccds[j]['XCENTROID_PSF']], [ccds[j]['YCENTROID_PSF']], marker=\n '.', c='r')\n", (3146, 3226), True, 'import matplotlib.pyplot as plt\n'), ((3793, 3811), 'numpy.power', 'np.power', (['xsamp', '(2)'], {}), '(xsamp, 2)\n', (3801, 3811), True, 'import numpy as np\n'), ((4210, 4227), 'numpy.min', 'np.min', (['fwhm_asec'], {}), '(fwhm_asec)\n', (4216, 4227), True, 'import numpy as np\n'), ((4443, 4457), 'numpy.round', 'np.round', (['zmin'], {}), '(zmin)\n', (4451, 4457), True, 'import numpy as np\n'), ((2206, 2239), 'numpy.isfinite', 'np.isfinite', (["ccds['PSF_FWHM_PIX']"], {}), "(ccds['PSF_FWHM_PIX'])\n", (2217, 2239), True, 'import numpy as np\n')] |
jadnohra/connect | proto_3/ddq/topics/logics/topic.py | 8eb21e6f122898094447bc3d5edb3053d5a2adf2 | from typing import List
from ddq.taxonomy.reference import Reference
from ddq.topics.topic import Topic
class Logic(Topic):
def references(self) -> List[Reference]:
return [
Reference("Classical and Nonclassical Logics",
[("Eric", "Schechter")])
]
| [((200, 271), 'ddq.taxonomy.reference.Reference', 'Reference', (['"""Classical and Nonclassical Logics"""', "[('Eric', 'Schechter')]"], {}), "('Classical and Nonclassical Logics', [('Eric', 'Schechter')])\n", (209, 271), False, 'from ddq.taxonomy.reference import Reference\n')] |
abhiskk/pythia | pythia/utils/logger.py | c33fb45d74353c25b6269b44551bcafefecb5c7e | # Copyright (c) Facebook, Inc. and its affiliates.
import base64
import logging
import os
import sys
from tensorboardX import SummaryWriter
from pythia.utils.distributed_utils import is_main_process
from pythia.utils.general import (ckpt_name_from_core_args,
foldername_from_config_override)
from pythia.utils.timer import Timer
class Logger:
def __init__(self, config):
self.logger = None
self.summary_writer = None
if not is_main_process():
return
self.timer = Timer()
self.config = config
self.save_dir = config.training_parameters.save_dir
self.log_folder = ckpt_name_from_core_args(config)
self.log_folder += foldername_from_config_override(config)
time_format = "%Y-%m-%dT%H:%M:%S"
self.log_filename = ckpt_name_from_core_args(config) + "_"
self.log_filename += self.timer.get_time_hhmmss(None, format=time_format)
self.log_filename += ".log"
self.log_folder = os.path.join(self.save_dir, self.log_folder, "logs")
arg_log_dir = self.config.get("log_dir", None)
if arg_log_dir:
self.log_folder = arg_log_dir
if not os.path.exists(self.log_folder):
os.makedirs(self.log_folder)
tensorboard_folder = os.path.join(self.log_folder, "tensorboard")
self.summary_writer = SummaryWriter(tensorboard_folder)
self.log_filename = os.path.join(self.log_folder, self.log_filename)
print("Logging to:", self.log_filename)
logging.captureWarnings(True)
self.logger = logging.getLogger(__name__)
self._file_only_logger = logging.getLogger(__name__)
warnings_logger = logging.getLogger("py.warnings")
# Set level
level = config["training_parameters"].get("logger_level", "info")
self.logger.setLevel(getattr(logging, level.upper()))
self._file_only_logger.setLevel(getattr(logging, level.upper()))
formatter = logging.Formatter(
"%(asctime)s %(levelname)s: %(message)s", datefmt="%Y-%m-%dT%H:%M:%S"
)
# Add handler to file
channel = logging.FileHandler(filename=self.log_filename, mode="a")
channel.setFormatter(formatter)
self.logger.addHandler(channel)
self._file_only_logger.addHandler(channel)
warnings_logger.addHandler(channel)
# Add handler to stdout
channel = logging.StreamHandler(sys.stdout)
channel.setFormatter(formatter)
self.logger.addHandler(channel)
warnings_logger.addHandler(channel)
should_not_log = self.config["training_parameters"]["should_not_log"]
self.should_log = not should_not_log
# Single log wrapper map
self._single_log_map = set()
def __del__(self):
if getattr(self, "summary_writer", None) is not None:
self.summary_writer.close()
def write(self, x, level="info", donot_print=False):
if self.logger is None:
return
# if it should not log then just print it
if self.should_log:
if hasattr(self.logger, level):
if donot_print:
getattr(self._file_only_logger, level)(str(x))
else:
getattr(self.logger, level)(str(x))
else:
self.logger.error("Unknown log level type: %s" % level)
else:
print(str(x) + "\n")
def single_write(self, x, level="info"):
if x + "_" + level in self._single_log_map:
return
else:
self.write(x, level)
def add_scalar(self, key, value, iteration):
if self.summary_writer is None:
return
self.summary_writer.add_scalar(key, value, iteration)
def add_scalars(self, scalar_dict, iteration):
if self.summary_writer is None:
return
for key, val in scalar_dict.items():
self.summary_writer.add_scalar(key, val, iteration)
def add_histogram_for_model(self, model, iteration):
if self.summary_writer is None:
return
for name, param in model.named_parameters():
np_param = param.clone().cpu().data.numpy()
self.summary_writer.add_histogram(name, np_param, iteration)
| [((551, 558), 'pythia.utils.timer.Timer', 'Timer', ([], {}), '()\n', (556, 558), False, 'from pythia.utils.timer import Timer\n'), ((674, 706), 'pythia.utils.general.ckpt_name_from_core_args', 'ckpt_name_from_core_args', (['config'], {}), '(config)\n', (698, 706), False, 'from pythia.utils.general import ckpt_name_from_core_args, foldername_from_config_override\n'), ((734, 773), 'pythia.utils.general.foldername_from_config_override', 'foldername_from_config_override', (['config'], {}), '(config)\n', (765, 773), False, 'from pythia.utils.general import ckpt_name_from_core_args, foldername_from_config_override\n'), ((1028, 1080), 'os.path.join', 'os.path.join', (['self.save_dir', 'self.log_folder', '"""logs"""'], {}), "(self.save_dir, self.log_folder, 'logs')\n", (1040, 1080), False, 'import os\n'), ((1323, 1367), 'os.path.join', 'os.path.join', (['self.log_folder', '"""tensorboard"""'], {}), "(self.log_folder, 'tensorboard')\n", (1335, 1367), False, 'import os\n'), ((1398, 1431), 'tensorboardX.SummaryWriter', 'SummaryWriter', (['tensorboard_folder'], {}), '(tensorboard_folder)\n', (1411, 1431), False, 'from tensorboardX import SummaryWriter\n'), ((1461, 1509), 'os.path.join', 'os.path.join', (['self.log_folder', 'self.log_filename'], {}), '(self.log_folder, self.log_filename)\n', (1473, 1509), False, 'import os\n'), ((1568, 1597), 'logging.captureWarnings', 'logging.captureWarnings', (['(True)'], {}), '(True)\n', (1591, 1597), False, 'import logging\n'), ((1621, 1648), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1638, 1648), False, 'import logging\n'), ((1682, 1709), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1699, 1709), False, 'import logging\n'), ((1736, 1768), 'logging.getLogger', 'logging.getLogger', (['"""py.warnings"""'], {}), "('py.warnings')\n", (1753, 1768), False, 'import logging\n'), ((2020, 2113), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s %(levelname)s: %(message)s"""'], {'datefmt': '"""%Y-%m-%dT%H:%M:%S"""'}), "('%(asctime)s %(levelname)s: %(message)s', datefmt=\n '%Y-%m-%dT%H:%M:%S')\n", (2037, 2113), False, 'import logging\n'), ((2180, 2237), 'logging.FileHandler', 'logging.FileHandler', ([], {'filename': 'self.log_filename', 'mode': '"""a"""'}), "(filename=self.log_filename, mode='a')\n", (2199, 2237), False, 'import logging\n'), ((2465, 2498), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (2486, 2498), False, 'import logging\n'), ((491, 508), 'pythia.utils.distributed_utils.is_main_process', 'is_main_process', ([], {}), '()\n', (506, 508), False, 'from pythia.utils.distributed_utils import is_main_process\n'), ((844, 876), 'pythia.utils.general.ckpt_name_from_core_args', 'ckpt_name_from_core_args', (['config'], {}), '(config)\n', (868, 876), False, 'from pythia.utils.general import ckpt_name_from_core_args, foldername_from_config_override\n'), ((1219, 1250), 'os.path.exists', 'os.path.exists', (['self.log_folder'], {}), '(self.log_folder)\n', (1233, 1250), False, 'import os\n'), ((1264, 1292), 'os.makedirs', 'os.makedirs', (['self.log_folder'], {}), '(self.log_folder)\n', (1275, 1292), False, 'import os\n')] |
AaronFriel/pulumi-google-native | sdk/python/pulumi_google_native/healthcare/v1beta1/user_data_mapping.py | 75d1cda425e33d4610348972cd70bddf35f1770d | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._inputs import *
__all__ = ['UserDataMappingArgs', 'UserDataMapping']
@pulumi.input_type
class UserDataMappingArgs:
def __init__(__self__, *,
consent_store_id: pulumi.Input[str],
data_id: pulumi.Input[str],
dataset_id: pulumi.Input[str],
user_id: pulumi.Input[str],
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
resource_attributes: Optional[pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]]] = None):
"""
The set of arguments for constructing a UserDataMapping resource.
:param pulumi.Input[str] data_id: A unique identifier for the mapped resource.
:param pulumi.Input[str] user_id: User's UUID provided by the client.
:param pulumi.Input[str] name: Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
:param pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]] resource_attributes: Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
"""
pulumi.set(__self__, "consent_store_id", consent_store_id)
pulumi.set(__self__, "data_id", data_id)
pulumi.set(__self__, "dataset_id", dataset_id)
pulumi.set(__self__, "user_id", user_id)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if resource_attributes is not None:
pulumi.set(__self__, "resource_attributes", resource_attributes)
@property
@pulumi.getter(name="consentStoreId")
def consent_store_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "consent_store_id")
@consent_store_id.setter
def consent_store_id(self, value: pulumi.Input[str]):
pulumi.set(self, "consent_store_id", value)
@property
@pulumi.getter(name="dataId")
def data_id(self) -> pulumi.Input[str]:
"""
A unique identifier for the mapped resource.
"""
return pulumi.get(self, "data_id")
@data_id.setter
def data_id(self, value: pulumi.Input[str]):
pulumi.set(self, "data_id", value)
@property
@pulumi.getter(name="datasetId")
def dataset_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "dataset_id")
@dataset_id.setter
def dataset_id(self, value: pulumi.Input[str]):
pulumi.set(self, "dataset_id", value)
@property
@pulumi.getter(name="userId")
def user_id(self) -> pulumi.Input[str]:
"""
User's UUID provided by the client.
"""
return pulumi.get(self, "user_id")
@user_id.setter
def user_id(self, value: pulumi.Input[str]):
pulumi.set(self, "user_id", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="resourceAttributes")
def resource_attributes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]]]:
"""
Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
"""
return pulumi.get(self, "resource_attributes")
@resource_attributes.setter
def resource_attributes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]]]):
pulumi.set(self, "resource_attributes", value)
class UserDataMapping(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
consent_store_id: Optional[pulumi.Input[str]] = None,
data_id: Optional[pulumi.Input[str]] = None,
dataset_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
resource_attributes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttributeArgs']]]]] = None,
user_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates a new User data mapping in the parent consent store.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] data_id: A unique identifier for the mapped resource.
:param pulumi.Input[str] name: Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttributeArgs']]]] resource_attributes: Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
:param pulumi.Input[str] user_id: User's UUID provided by the client.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: UserDataMappingArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates a new User data mapping in the parent consent store.
:param str resource_name: The name of the resource.
:param UserDataMappingArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(UserDataMappingArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
consent_store_id: Optional[pulumi.Input[str]] = None,
data_id: Optional[pulumi.Input[str]] = None,
dataset_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
resource_attributes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttributeArgs']]]]] = None,
user_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = UserDataMappingArgs.__new__(UserDataMappingArgs)
if consent_store_id is None and not opts.urn:
raise TypeError("Missing required property 'consent_store_id'")
__props__.__dict__["consent_store_id"] = consent_store_id
if data_id is None and not opts.urn:
raise TypeError("Missing required property 'data_id'")
__props__.__dict__["data_id"] = data_id
if dataset_id is None and not opts.urn:
raise TypeError("Missing required property 'dataset_id'")
__props__.__dict__["dataset_id"] = dataset_id
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["resource_attributes"] = resource_attributes
if user_id is None and not opts.urn:
raise TypeError("Missing required property 'user_id'")
__props__.__dict__["user_id"] = user_id
__props__.__dict__["archive_time"] = None
__props__.__dict__["archived"] = None
super(UserDataMapping, __self__).__init__(
'google-native:healthcare/v1beta1:UserDataMapping',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'UserDataMapping':
"""
Get an existing UserDataMapping resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = UserDataMappingArgs.__new__(UserDataMappingArgs)
__props__.__dict__["archive_time"] = None
__props__.__dict__["archived"] = None
__props__.__dict__["data_id"] = None
__props__.__dict__["name"] = None
__props__.__dict__["resource_attributes"] = None
__props__.__dict__["user_id"] = None
return UserDataMapping(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="archiveTime")
def archive_time(self) -> pulumi.Output[str]:
"""
Indicates the time when this mapping was archived.
"""
return pulumi.get(self, "archive_time")
@property
@pulumi.getter
def archived(self) -> pulumi.Output[bool]:
"""
Indicates whether this mapping is archived.
"""
return pulumi.get(self, "archived")
@property
@pulumi.getter(name="dataId")
def data_id(self) -> pulumi.Output[str]:
"""
A unique identifier for the mapped resource.
"""
return pulumi.get(self, "data_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceAttributes")
def resource_attributes(self) -> pulumi.Output[Sequence['outputs.AttributeResponse']]:
"""
Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
"""
return pulumi.get(self, "resource_attributes")
@property
@pulumi.getter(name="userId")
def user_id(self) -> pulumi.Output[str]:
"""
User's UUID provided by the client.
"""
return pulumi.get(self, "user_id")
| [((2471, 2507), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""consentStoreId"""'}), "(name='consentStoreId')\n", (2484, 2507), False, 'import pulumi\n'), ((2773, 2801), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dataId"""'}), "(name='dataId')\n", (2786, 2801), False, 'import pulumi\n'), ((3099, 3130), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""datasetId"""'}), "(name='datasetId')\n", (3112, 3130), False, 'import pulumi\n'), ((3366, 3394), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""userId"""'}), "(name='userId')\n", (3379, 3394), False, 'import pulumi\n'), ((4663, 4703), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""resourceAttributes"""'}), "(name='resourceAttributes')\n", (4676, 4703), False, 'import pulumi\n'), ((11810, 11843), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""archiveTime"""'}), "(name='archiveTime')\n", (11823, 11843), False, 'import pulumi\n'), ((12246, 12274), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dataId"""'}), "(name='dataId')\n", (12259, 12274), False, 'import pulumi\n'), ((12804, 12844), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""resourceAttributes"""'}), "(name='resourceAttributes')\n", (12817, 12844), False, 'import pulumi\n'), ((13336, 13364), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""userId"""'}), "(name='userId')\n", (13349, 13364), False, 'import pulumi\n'), ((1869, 1927), 'pulumi.set', 'pulumi.set', (['__self__', '"""consent_store_id"""', 'consent_store_id'], {}), "(__self__, 'consent_store_id', consent_store_id)\n", (1879, 1927), False, 'import pulumi\n'), ((1936, 1976), 'pulumi.set', 'pulumi.set', (['__self__', '"""data_id"""', 'data_id'], {}), "(__self__, 'data_id', data_id)\n", (1946, 1976), False, 'import pulumi\n'), ((1985, 2031), 'pulumi.set', 'pulumi.set', (['__self__', '"""dataset_id"""', 'dataset_id'], {}), "(__self__, 'dataset_id', dataset_id)\n", (1995, 2031), False, 'import pulumi\n'), ((2040, 2080), 'pulumi.set', 'pulumi.set', (['__self__', '"""user_id"""', 'user_id'], {}), "(__self__, 'user_id', user_id)\n", (2050, 2080), False, 'import pulumi\n'), ((2576, 2612), 'pulumi.get', 'pulumi.get', (['self', '"""consent_store_id"""'], {}), "(self, 'consent_store_id')\n", (2586, 2612), False, 'import pulumi\n'), ((2709, 2752), 'pulumi.set', 'pulumi.set', (['self', '"""consent_store_id"""', 'value'], {}), "(self, 'consent_store_id', value)\n", (2719, 2752), False, 'import pulumi\n'), ((2938, 2965), 'pulumi.get', 'pulumi.get', (['self', '"""data_id"""'], {}), "(self, 'data_id')\n", (2948, 2965), False, 'import pulumi\n'), ((3044, 3078), 'pulumi.set', 'pulumi.set', (['self', '"""data_id"""', 'value'], {}), "(self, 'data_id', value)\n", (3054, 3078), False, 'import pulumi\n'), ((3193, 3223), 'pulumi.get', 'pulumi.get', (['self', '"""dataset_id"""'], {}), "(self, 'dataset_id')\n", (3203, 3223), False, 'import pulumi\n'), ((3308, 3345), 'pulumi.set', 'pulumi.set', (['self', '"""dataset_id"""', 'value'], {}), "(self, 'dataset_id', value)\n", (3318, 3345), False, 'import pulumi\n'), ((3522, 3549), 'pulumi.get', 'pulumi.get', (['self', '"""user_id"""'], {}), "(self, 'user_id')\n", (3532, 3549), False, 'import pulumi\n'), ((3628, 3662), 'pulumi.set', 'pulumi.set', (['self', '"""user_id"""', 'value'], {}), "(self, 'user_id', value)\n", (3638, 3662), False, 'import pulumi\n'), ((3767, 3795), 'pulumi.get', 'pulumi.get', (['self', '"""location"""'], {}), "(self, 'location')\n", (3777, 3795), False, 'import pulumi\n'), ((3886, 3921), 'pulumi.set', 'pulumi.set', (['self', '"""location"""', 'value'], {}), "(self, 'location', value)\n", (3896, 3921), False, 'import pulumi\n'), ((4250, 4274), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (4260, 4274), False, 'import pulumi\n'), ((4357, 4388), 'pulumi.set', 'pulumi.set', (['self', '"""name"""', 'value'], {}), "(self, 'name', value)\n", (4367, 4388), False, 'import pulumi\n'), ((4492, 4519), 'pulumi.get', 'pulumi.get', (['self', '"""project"""'], {}), "(self, 'project')\n", (4502, 4519), False, 'import pulumi\n'), ((4608, 4642), 'pulumi.set', 'pulumi.set', (['self', '"""project"""', 'value'], {}), "(self, 'project', value)\n", (4618, 4642), False, 'import pulumi\n'), ((5146, 5185), 'pulumi.get', 'pulumi.get', (['self', '"""resource_attributes"""'], {}), "(self, 'resource_attributes')\n", (5156, 5185), False, 'import pulumi\n'), ((5334, 5380), 'pulumi.set', 'pulumi.set', (['self', '"""resource_attributes"""', 'value'], {}), "(self, 'resource_attributes', value)\n", (5344, 5380), False, 'import pulumi\n'), ((11992, 12024), 'pulumi.get', 'pulumi.get', (['self', '"""archive_time"""'], {}), "(self, 'archive_time')\n", (12002, 12024), False, 'import pulumi\n'), ((12197, 12225), 'pulumi.get', 'pulumi.get', (['self', '"""archived"""'], {}), "(self, 'archived')\n", (12207, 12225), False, 'import pulumi\n'), ((12412, 12439), 'pulumi.get', 'pulumi.get', (['self', '"""data_id"""'], {}), "(self, 'data_id')\n", (12422, 12439), False, 'import pulumi\n'), ((12759, 12783), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (12769, 12783), False, 'import pulumi\n'), ((13276, 13315), 'pulumi.get', 'pulumi.get', (['self', '"""resource_attributes"""'], {}), "(self, 'resource_attributes')\n", (13286, 13315), False, 'import pulumi\n'), ((13493, 13520), 'pulumi.get', 'pulumi.get', (['self', '"""user_id"""'], {}), "(self, 'user_id')\n", (13503, 13520), False, 'import pulumi\n'), ((2126, 2168), 'pulumi.set', 'pulumi.set', (['__self__', '"""location"""', 'location'], {}), "(__self__, 'location', location)\n", (2136, 2168), False, 'import pulumi\n'), ((2210, 2244), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (2220, 2244), False, 'import pulumi\n'), ((2289, 2329), 'pulumi.set', 'pulumi.set', (['__self__', '"""project"""', 'project'], {}), "(__self__, 'project', project)\n", (2299, 2329), False, 'import pulumi\n'), ((2386, 2450), 'pulumi.set', 'pulumi.set', (['__self__', '"""resource_attributes"""', 'resource_attributes'], {}), "(__self__, 'resource_attributes', resource_attributes)\n", (2396, 2450), False, 'import pulumi\n'), ((8918, 8942), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {}), '()\n', (8940, 8942), False, 'import pulumi\n'), ((11325, 11354), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'id': 'id'}), '(id=id)\n', (11347, 11354), False, 'import pulumi\n')] |
Jerome-maker/ensae_teaching_cs | _doc/sphinxdoc/source/conf.py | 43ea044361ee60c00c85aea354a7b25c21c0fd07 | import sys
import os
import sphinx_rtd_theme
source_path = os.path.normpath(
os.path.join(
os.path.abspath(
os.path.split(__file__)[0])))
try:
from conf_base import *
except ImportError:
sys.path.append(source_path)
from conf_base import *
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
templates_path = [os.path.join(source_path, 'phdoc_static')]
html_static_path = [os.path.join(source_path, 'phdoc_static')]
if not os.path.exists(templates_path[0]):
raise FileNotFoundError(templates_path[0])
blog_root = "http://www.xavierdupre.fr/app/ensae_teaching_cs/helpsphinx3/"
| [((331, 369), 'sphinx_rtd_theme.get_html_theme_path', 'sphinx_rtd_theme.get_html_theme_path', ([], {}), '()\n', (367, 369), False, 'import sphinx_rtd_theme\n'), ((389, 430), 'os.path.join', 'os.path.join', (['source_path', '"""phdoc_static"""'], {}), "(source_path, 'phdoc_static')\n", (401, 430), False, 'import os\n'), ((452, 493), 'os.path.join', 'os.path.join', (['source_path', '"""phdoc_static"""'], {}), "(source_path, 'phdoc_static')\n", (464, 493), False, 'import os\n'), ((503, 536), 'os.path.exists', 'os.path.exists', (['templates_path[0]'], {}), '(templates_path[0])\n', (517, 536), False, 'import os\n'), ((222, 250), 'sys.path.append', 'sys.path.append', (['source_path'], {}), '(source_path)\n', (237, 250), False, 'import sys\n'), ((134, 157), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (147, 157), False, 'import os\n')] |
lkilcher/dolfyn-light | dolfyn/adv/api.py | 416bf6aa8a3455cebf973f416c9e4ba89a801a71 | """
This module contains routines for reading and working with adv
data. It contains:
+-----------------------------------+-----------------------------------------+
| Name | Description |
+===================================+=========================================+
| :func:`~dolfyn.adv.base.load` | A function for loading ADV data in |
| | DOLfYN format. |
+-----------------------------------+-----------------------------------------+
| :func:`~dolfyn.adv.base.mmload` | A function for loading ADV data in |
| | DOLfYN format (as memory mapped arrays).|
+-----------------------------------+-----------------------------------------+
| :func:`~dolfyn.io.nortek.\ | A function for reading Nortek Vector |
| read_nortek` | files. |
+-----------------------------------+-----------------------------------------+
| :mod:`rotate <dolfyn.adv.rotate>` | A module containing classes and |
| | functions for rotating adv data between |
| | different coordinate systems |
+-----------------------------------+-----------------------------------------+
| :mod:`motion <dolfyn.adv.rotate>` | A module containing classes and |
| | functions for performing motion |
| | correction. |
+-----------------------------------+-----------------------------------------+
| :class:`~dolfyn.\ | A class for breaking ADV data into |
| adv.turbulence.TurbBinner` | 'bins', averaging it and estimating |
| | various turbulence statistics. |
+-----------------------------------+-----------------------------------------+
Examples
--------
.. literalinclude:: ../examples/adv_example01.py
"""
from .base import load, mmload
from .turbulence import TurbBinner
from . import clean
from ..io.nortek import read_nortek
from . import rotate
from . import motion
| [] |
ShahriarDhruvo/WebTech_Assignment2 | server/api/migrations/0002_auto_20201011_1053.py | 845d198a91b1dcc8ed149362499754167fca419d | # Generated by Django 3.1.2 on 2020-10-11 10:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='task',
name='author',
field=models.CharField(default='Anonymous', max_length=100),
),
migrations.AlterField(
model_name='task',
name='deadline',
field=models.DateTimeField(default='2020-10-11 10:53'),
),
]
| [((319, 372), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""Anonymous"""', 'max_length': '(100)'}), "(default='Anonymous', max_length=100)\n", (335, 372), False, 'from django.db import migrations, models\n'), ((494, 542), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': '"""2020-10-11 10:53"""'}), "(default='2020-10-11 10:53')\n", (514, 542), False, 'from django.db import migrations, models\n')] |
lisy09/flink-ai-extended | flink-ai-flow/lib/notification_service/notification_service/mongo_event_storage.py | 011a5a332f7641f66086653e715d0596eab2e107 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import time
import socket
from collections import Iterable
from typing import Union, Tuple
from mongoengine import connect
from notification_service.event_storage import BaseEventStorage
from notification_service.base_notification import BaseEvent
from notification_service.mongo_notification import MongoEvent
class MongoEventStorage(BaseEventStorage):
def __init__(self, *args, **kwargs):
self.db_conn = self.setup_connection(**kwargs)
self.server_ip = socket.gethostbyname(socket.gethostname())
def setup_connection(self, **kwargs):
db_conf = {
"host": kwargs.get("host"),
"port": kwargs.get("port"),
"db": kwargs.get("db"),
}
username = kwargs.get("username", None)
password = kwargs.get("password", None)
authentication_source = kwargs.get("authentication_source", "admin")
if (username or password) and not (username and password):
raise Exception("Please provide valid username and password")
if username and password:
db_conf.update({
"username": username,
"password": password,
"authentication_source": authentication_source
})
return connect(**db_conf)
def get_latest_version(self, key: str, namespace: str = None):
mongo_events = MongoEvent.get_by_key(key, 0, 1, "-version")
if not mongo_events:
return 0
return mongo_events[0].version
def add_event(self, event: BaseEvent, uuid: str):
kwargs = {
"server_ip": self.server_ip,
"create_time": int(time.time() * 1000),
"event_type": event.event_type,
"key": event.key,
"value": event.value,
"context": event.context,
"namespace": event.namespace,
"sender": event.sender,
"uuid": uuid
}
mongo_event = MongoEvent(**kwargs)
mongo_event.save()
mongo_event.reload()
event.create_time = mongo_event.create_time
event.version = mongo_event.version
return event
def list_events(self,
key: Union[str, Tuple[str]],
version: int = None,
event_type: str = None,
start_time: int = None,
namespace: str = None,
sender: str = None):
key = None if key == "" else key
version = None if version == 0 else version
event_type = None if event_type == "" else event_type
namespace = None if namespace == "" else namespace
sender = None if sender == "" else sender
if isinstance(key, str):
key = (key,)
elif isinstance(key, Iterable):
key = tuple(key)
res = MongoEvent.get_base_events(key, version, event_type, start_time, namespace, sender)
return res
def list_all_events(self, start_time: int):
res = MongoEvent.get_base_events_by_time(start_time)
return res
def list_all_events_from_version(self, start_version: int, end_version: int = None):
res = MongoEvent.get_base_events_by_version(start_version, end_version)
return res
def clean_up(self):
MongoEvent.delete_by_client(self.server_ip)
| [((2045, 2063), 'mongoengine.connect', 'connect', ([], {}), '(**db_conf)\n', (2052, 2063), False, 'from mongoengine import connect\n'), ((2155, 2199), 'notification_service.mongo_notification.MongoEvent.get_by_key', 'MongoEvent.get_by_key', (['key', '(0)', '(1)', '"""-version"""'], {}), "(key, 0, 1, '-version')\n", (2176, 2199), False, 'from notification_service.mongo_notification import MongoEvent\n'), ((2737, 2757), 'notification_service.mongo_notification.MongoEvent', 'MongoEvent', ([], {}), '(**kwargs)\n', (2747, 2757), False, 'from notification_service.mongo_notification import MongoEvent\n'), ((3625, 3712), 'notification_service.mongo_notification.MongoEvent.get_base_events', 'MongoEvent.get_base_events', (['key', 'version', 'event_type', 'start_time', 'namespace', 'sender'], {}), '(key, version, event_type, start_time, namespace,\n sender)\n', (3651, 3712), False, 'from notification_service.mongo_notification import MongoEvent\n'), ((3791, 3837), 'notification_service.mongo_notification.MongoEvent.get_base_events_by_time', 'MongoEvent.get_base_events_by_time', (['start_time'], {}), '(start_time)\n', (3825, 3837), False, 'from notification_service.mongo_notification import MongoEvent\n'), ((3961, 4026), 'notification_service.mongo_notification.MongoEvent.get_base_events_by_version', 'MongoEvent.get_base_events_by_version', (['start_version', 'end_version'], {}), '(start_version, end_version)\n', (3998, 4026), False, 'from notification_service.mongo_notification import MongoEvent\n'), ((4079, 4122), 'notification_service.mongo_notification.MongoEvent.delete_by_client', 'MongoEvent.delete_by_client', (['self.server_ip'], {}), '(self.server_ip)\n', (4106, 4122), False, 'from notification_service.mongo_notification import MongoEvent\n'), ((1288, 1308), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (1306, 1308), False, 'import socket\n'), ((2435, 2446), 'time.time', 'time.time', ([], {}), '()\n', (2444, 2446), False, 'import time\n')] |
tahigash/unicon.plugins | src/unicon/plugins/confd/csp/__init__.py | 1b43a5a61244ea9312387fd855442ace37c65db9 | __author__ = "Dave Wapstra <[email protected]>"
from unicon.plugins.confd import ConfdServiceList, ConfdConnection, ConfdConnectionProvider
from .statemachine import CspStateMachine
from .settings import CspSettings
from . import service_implementation as csp_svc
class CspServiceList(ConfdServiceList):
def __init__(self):
super().__init__()
delattr(self, 'cli_style')
self.reload = csp_svc.Reload
class CspSingleRPConnection(ConfdConnection):
os = 'confd'
series = 'csp'
chassis_type = 'single_rp'
state_machine_class = CspStateMachine
connection_provider_class = ConfdConnectionProvider
subcommand_list = CspServiceList
settings = CspSettings()
| [] |
rrwen/search_google | setup.py | e647868ba5da2803e787a3c06b32e09452068736 | # -*- coding: utf-8 -*-
from setuptools import setup
import search_google as package
def readme():
with open('README.rst') as f:
return ''.join(f.readlines()[11:])
setup(
name=package.__name__,
version=package.__version__,
description=package.__description__,
long_description=readme(),
author=package.__author__,
author_email=package.__email__,
license=package.__license__,
url=package.__url__,
download_url=package.__download_url__,
keywords =package. __keywords__,
entry_points=package.__entry_points__,
packages=package.__packages__,
package_data=package.__package_data__,
install_requires=package.__install_requires__
)
| [] |
chearon/macpack | setup.py | 1cf6ce453dd33a811343e4bb6ee5575bc9fe919d | import setuptools
import os
try:
import pypandoc
description = pypandoc.convert('README.md', 'rst') if os.path.exists('README.md') else ''
except ImportError:
description = ''
setuptools.setup(
name = 'macpack',
packages = setuptools.find_packages(),
version = '1.0.3',
description = 'Makes a macOS binary redistributable by searching the dependency tree and copying/patching non-system libraries.',
long_description = description,
author = 'Caleb Hearon',
author_email = '[email protected]',
url = 'https://github.com/chearon/macpack',
download_url = 'https://github.com/chearon/macpack/tarball/v1.0.3',
keywords = ['macos', 'bundle', 'package', 'redistribute', 'redistributable', 'install_name_tool', 'otool', 'mach'],
classifiers = [],
entry_points = {
'console_scripts': ['macpack=macpack.patcher:main'],
}
)
| [((108, 135), 'os.path.exists', 'os.path.exists', (['"""README.md"""'], {}), "('README.md')\n", (122, 135), False, 'import os\n'), ((68, 104), 'pypandoc.convert', 'pypandoc.convert', (['"""README.md"""', '"""rst"""'], {}), "('README.md', 'rst')\n", (84, 104), False, 'import pypandoc\n'), ((235, 261), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (259, 261), False, 'import setuptools\n')] |
tizon9804/SS2017 | WEEK2/day5/scripts/06_NB_Challenges_Isolines.py | 7cb374ad21cdfeeef223ac4a65cbbf40dab22e06 | import vtk
# Read the file (to test that it was written correctly)
reader = vtk.vtkXMLImageDataReader()
reader.SetFileName("../data/wind_image.vti")
reader.Update()
print(reader.GetOutput())
# Convert the image to a polydata
imageDataGeometryFilter = vtk.vtkImageDataGeometryFilter()
imageDataGeometryFilter.SetInputConnection(reader.GetOutputPort())
imageDataGeometryFilter.Update()
scalarRange = reader.GetOutput().GetPointData().GetScalars().GetRange(-1)
contoursFilter = vtk.vtkContourFilter()
contoursFilter.SetInputConnection(imageDataGeometryFilter.GetOutputPort())
contoursFilter.GenerateValues(60, scalarRange)
contoursMapper = vtk.vtkPolyDataMapper()
contoursMapper.SetInputConnection(contoursFilter.GetOutputPort())
contoursMapper.SetColorModeToMapScalars()
contoursMapper.ScalarVisibilityOn()
contoursMapper.SelectColorArray("JPEGImage")
contoursMapper.SetScalarRange(scalarRange)
contoursActor = vtk.vtkActor()
contoursActor.SetMapper(contoursMapper)
actor = vtk.vtkActor()
actor.SetMapper(contoursMapper)
# Setup rendering
renderer = vtk.vtkRenderer()
renderer.AddActor(actor)
renderer.SetBackground(1,1,1)
renderer.ResetCamera()
renderWindow = vtk.vtkRenderWindow()
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
renderWindowInteractor.Start()
| [((77, 104), 'vtk.vtkXMLImageDataReader', 'vtk.vtkXMLImageDataReader', ([], {}), '()\n', (102, 104), False, 'import vtk\n'), ((252, 284), 'vtk.vtkImageDataGeometryFilter', 'vtk.vtkImageDataGeometryFilter', ([], {}), '()\n', (282, 284), False, 'import vtk\n'), ((477, 499), 'vtk.vtkContourFilter', 'vtk.vtkContourFilter', ([], {}), '()\n', (497, 499), False, 'import vtk\n'), ((640, 663), 'vtk.vtkPolyDataMapper', 'vtk.vtkPolyDataMapper', ([], {}), '()\n', (661, 663), False, 'import vtk\n'), ((913, 927), 'vtk.vtkActor', 'vtk.vtkActor', ([], {}), '()\n', (925, 927), False, 'import vtk\n'), ((977, 991), 'vtk.vtkActor', 'vtk.vtkActor', ([], {}), '()\n', (989, 991), False, 'import vtk\n'), ((1055, 1072), 'vtk.vtkRenderer', 'vtk.vtkRenderer', ([], {}), '()\n', (1070, 1072), False, 'import vtk\n'), ((1168, 1189), 'vtk.vtkRenderWindow', 'vtk.vtkRenderWindow', ([], {}), '()\n', (1187, 1189), False, 'import vtk\n'), ((1252, 1283), 'vtk.vtkRenderWindowInteractor', 'vtk.vtkRenderWindowInteractor', ([], {}), '()\n', (1281, 1283), False, 'import vtk\n')] |
hbutsuak95/iv_rl | mbbl_envs/mbbl/env/gym_env/invertedPendulum.py | 0f72a8f077a238237027ea96b7d1160c35ac9959 | """
# -----------------------------------------------------------------------------
# @brief:
# Tingwu: reset the reward function so that it's more similar to the one
# defined in GYM
# -----------------------------------------------------------------------------
"""
import numpy as np
from mbbl.config import init_path
from mbbl.env import base_env_wrapper as bew
from mbbl.env import env_register
from mbbl.env import env_util
from mbbl.util.common import logger
class env(bew.base_env):
# acrobot has applied sin/cos obs
PENDULUM = ['gym_invertedPendulum']
def __init__(self, env_name, rand_seed, misc_info):
super(env, self).__init__(env_name, rand_seed, misc_info)
self._base_path = init_path.get_abs_base_dir()
self._len_qpos, self._len_qvel = \
env_util.get_gym_q_info(self._env, self._current_version)
# return the reset as the gym?
if 'reset_type' in misc_info and misc_info['reset_type'] == 'gym':
self._reset_return_obs_only = True
self.observation_space, self.action_space = \
self._env.observation_space, self._env.action_space
# it's possible some environments have different obs
self.observation_space = \
env_util.box(self._env_info['ob_size'], -1, 1)
else:
self._reset_return_obs_only = False
def step(self, action):
_, _, _, info = self._env.step(action)
ob = self._get_observation()
# get the reward
reward = self.reward(
{'end_state': ob, 'start_state': self._old_ob, 'action': action}
)
# from mbbl.util.common.fpdb import fpdb; fpdb().set_trace()
# get the end signal
self._current_step += 1
info['current_step'] = self._current_step
if self._current_step > self._env_info['max_length']:
done = True
else:
done = False # will raise warnings -> set logger flag to ignore
self._old_ob = np.array(ob)
return ob, reward, done, info
def reset(self, control_info={}):
self._current_step = 0
self._env.reset()
# the following is a hack, there is some precision issue in mujoco_py
self._old_ob = self._get_observation()
self._env.reset()
self.set_state({'start_state': self._old_ob.copy()})
self._old_ob = self._get_observation()
if self._reset_return_obs_only:
return self._old_ob.copy()
else:
return self._old_ob.copy(), 0.0, False, {}
def _get_observation(self):
if self._current_version in ['0.7.4', '0.9.4']:
qpos = self._env.env.data.qpos
qvel = self._env.env.data.qvel
else:
qpos = self._env.env.sim.data.qpos
qvel = self._env.env.sim.data.qvel
"""
if self._env_name == 'gym_doublePendulum':
if self._current_version in ['0.7.4', '0.9.4']:
site_xpos = self._env.env.data.site_xpos[:, [0, 2]]
else:
site_xpos = self._env.env.sim.data.site_xpos[:, [0, 2]]
site_xpos = np.transpose(site_xpos)
return np.concatenate([qpos, qvel, site_xpos]).ravel()
else:
"""
assert self._env_name == 'gym_invertedPendulum'
return np.concatenate([qpos, qvel]).ravel()
def _build_env(self):
import gym
self._current_version = gym.__version__
if self._current_version in ['0.7.4', '0.9.4']:
_env_name = {
'gym_invertedPendulum': 'InvertedPendulum-v1',
}
elif self._current_version == NotImplementedError:
# TODO: other gym versions here
_env_name = {
'gym_invertedPendulum': 'InvertedPendulum-v2',
}
else:
raise ValueError("Invalid gym-{}".format(self._current_version))
# make the environments
self._env_info = env_register.get_env_info(self._env_name)
self._env_name = self._env_name.split('-')[0]
self._env = gym.make(_env_name[self._env_name])
def _set_groundtruth_api(self):
""" @brief:
In this function, we could provide the ground-truth dynamics
and rewards APIs for the agent to call.
For the new environments, if we don't set their ground-truth
apis, then we cannot test the algorithm using ground-truth
dynamics or reward
"""
self._set_reward_api()
self._set_dynamics_api()
def _set_dynamics_api(self):
def set_state(data_dict):
qpos = np.zeros([self._len_qpos])
qvel = np.zeros([self._len_qvel])
qpos[:] = data_dict['start_state'][:self._len_qpos]
qvel[:] = data_dict['start_state'][
self._len_qpos: self._len_qpos + self._len_qvel
]
# reset the state
if self._current_version in ['0.7.4', '0.9.4']:
self._env.env.data.qpos = qpos.reshape([-1, 1])
self._env.env.data.qvel = qvel.reshape([-1, 1])
else:
self._env.env.sim.data.qpos = qpos.reshape([-1])
self._env.env.sim.data.qvel = qpos.reshape([-1])
self._env.env.model._compute_subtree() # pylint: disable=W0212
self._env.env.model.forward()
self._old_ob = self._get_observation()
self.set_state = set_state
def fdynamics(data_dict):
# make sure reset is called before using self.fynamics()
self.set_state(data_dict)
return self.step(data_dict['action'])[0]
self.fdynamics = fdynamics
def _set_reward_api(self):
"""
def _step(self, a):
reward = 1.0
self.do_simulation(a, self.frame_skip)
ob = self._get_obs()
notdone = np.isfinite(ob).all() and (np.abs(ob[1]) <= .2)
done = not notdone
self.do_simulation(action, self.frame_skip)
ob = self._get_obs()
x, _, y = self.model.data.site_xpos[0]
dist_penalty = 0.01 * x ** 2 + (y - 2) ** 2
v1, v2 = self.model.data.qvel[1:3]
vel_penalty = 1e-3 * v1**2 + 5e-3 * v2**2
alive_bonus = 10
r = (alive_bonus - dist_penalty - vel_penalty)[0]
done = bool(y <= 1)
return ob, r, done, {}
reward:
@xpos_penalty: x ** 2
@ypos_penalty: (y - 2) ** 2
pendulum: (slide, hinge)
qpos: 2 (0, 1)
qvel: 2 (2, 3)
double_pendulum: (slide, hinge, hinge)
qpos: 3 (0, 1, 2)
qvel: 3 (3, 4, 5)
site_pose: 2 (6, 7)
"""
# step 1, set the zero-order reward function
assert self._env_name in self.PENDULUM
"""
xpos_ob_pos = \
{'gym_pendulum': 0, 'gym_doublePendulum': 6}[self._env_name]
ypos_ob_pos = \
{'gym_pendulum': 1, 'gym_doublePendulum': 7}[self._env_name]
ypos_target = \
{'gym_pendulum': 0.0, 'gym_doublePendulum': 2}[self._env_name]
xpos_coeff = \
{'gym_pendulum': 0.0, 'gym_doublePendulum': 0.01}[self._env_name]
"""
xpos_ob_pos = 0
ypos_ob_pos = 1
ypos_target = 0.0
xpos_coeff = 0.0
def reward(data_dict):
# xpos penalty
xpos = data_dict['start_state'][xpos_ob_pos]
xpos_reward = -(xpos ** 2) * xpos_coeff
# ypos penalty
ypos = data_dict['start_state'][ypos_ob_pos]
ypos_reward = -(ypos - ypos_target) ** 2
return xpos_reward + ypos_reward
self.reward = reward
def reward_derivative(data_dict, target):
num_data = len(data_dict['start_state'])
if target == 'state':
derivative_data = np.zeros(
[num_data, self._env_info['ob_size']], dtype=np.float
)
# the xpos reward part
derivative_data[:, xpos_ob_pos] += - 2.0 * xpos_coeff * \
(data_dict['start_state'][:, xpos_ob_pos])
# the ypos reward part
derivative_data[:, ypos_ob_pos] += - 2.0 * \
(data_dict['start_state'][:, ypos_ob_pos] - ypos_target)
elif target == 'action':
derivative_data = np.zeros(
[num_data, self._env_info['action_size']], dtype=np.float
)
elif target == 'state-state':
derivative_data = np.zeros(
[num_data,
self._env_info['ob_size'], self._env_info['ob_size']],
dtype=np.float
)
# the xpos reward
derivative_data[:, xpos_ob_pos, xpos_ob_pos] += \
- 2.0 * xpos_coeff
# the ypos reward
derivative_data[:, ypos_ob_pos, ypos_ob_pos] += \
- 2.0
elif target == 'action-state':
derivative_data = np.zeros(
[num_data, self._env_info['action_size'],
self._env_info['ob_size']],
dtype=np.float
)
elif target == 'state-action':
derivative_data = np.zeros(
[num_data, self._env_info['ob_size'],
self._env_info['action_size']],
dtype=np.float
)
elif target == 'action-action':
derivative_data = np.zeros(
[num_data, self._env_info['action_size'],
self._env_info['action_size']],
dtype=np.float
)
else:
assert False, logger.error('Invalid target {}'.format(target))
return derivative_data
self.reward_derivative = reward_derivative
def render(self, *args, **kwargs):
return
if __name__ == '__main__':
# test_env_name = ['gym_doublePendulum']
test_env_name = ['gym_invertedPendulum']
for env_name in test_env_name:
test_env = env(env_name, 1234, {})
api_env = env(env_name, 1234, {})
api_env.reset()
ob, reward, _, _ = test_env.reset()
for _ in range(100):
action = np.random.uniform(-1, 1, test_env._env.action_space.shape)
new_ob, reward, _, _ = test_env.step(action)
# test the reward api
reward_from_api = \
api_env.reward({'start_state': ob, 'action': action})
reward_error = np.sum(np.abs(reward_from_api - reward))
# test the dynamics api
newob_from_api = \
api_env.fdynamics({'start_state': ob, 'action': action})
ob_error = np.sum(np.abs(newob_from_api - new_ob))
ob = new_ob
print('reward error: {}, dynamics error: {}'.format(
reward_error, ob_error)
)
| [((735, 763), 'mbbl.config.init_path.get_abs_base_dir', 'init_path.get_abs_base_dir', ([], {}), '()\n', (761, 763), False, 'from mbbl.config import init_path\n'), ((820, 877), 'mbbl.env.env_util.get_gym_q_info', 'env_util.get_gym_q_info', (['self._env', 'self._current_version'], {}), '(self._env, self._current_version)\n', (843, 877), False, 'from mbbl.env import env_util\n'), ((2032, 2044), 'numpy.array', 'np.array', (['ob'], {}), '(ob)\n', (2040, 2044), True, 'import numpy as np\n'), ((4011, 4052), 'mbbl.env.env_register.get_env_info', 'env_register.get_env_info', (['self._env_name'], {}), '(self._env_name)\n', (4036, 4052), False, 'from mbbl.env import env_register\n'), ((4127, 4162), 'gym.make', 'gym.make', (['_env_name[self._env_name]'], {}), '(_env_name[self._env_name])\n', (4135, 4162), False, 'import gym\n'), ((1286, 1332), 'mbbl.env.env_util.box', 'env_util.box', (["self._env_info['ob_size']", '(-1)', '(1)'], {}), "(self._env_info['ob_size'], -1, 1)\n", (1298, 1332), False, 'from mbbl.env import env_util\n'), ((4704, 4730), 'numpy.zeros', 'np.zeros', (['[self._len_qpos]'], {}), '([self._len_qpos])\n', (4712, 4730), True, 'import numpy as np\n'), ((4750, 4776), 'numpy.zeros', 'np.zeros', (['[self._len_qvel]'], {}), '([self._len_qvel])\n', (4758, 4776), True, 'import numpy as np\n'), ((10633, 10691), 'numpy.random.uniform', 'np.random.uniform', (['(-1)', '(1)', 'test_env._env.action_space.shape'], {}), '(-1, 1, test_env._env.action_space.shape)\n', (10650, 10691), True, 'import numpy as np\n'), ((3365, 3393), 'numpy.concatenate', 'np.concatenate', (['[qpos, qvel]'], {}), '([qpos, qvel])\n', (3379, 3393), True, 'import numpy as np\n'), ((8120, 8183), 'numpy.zeros', 'np.zeros', (["[num_data, self._env_info['ob_size']]"], {'dtype': 'np.float'}), "([num_data, self._env_info['ob_size']], dtype=np.float)\n", (8128, 8183), True, 'import numpy as np\n'), ((10920, 10952), 'numpy.abs', 'np.abs', (['(reward_from_api - reward)'], {}), '(reward_from_api - reward)\n', (10926, 10952), True, 'import numpy as np\n'), ((11125, 11156), 'numpy.abs', 'np.abs', (['(newob_from_api - new_ob)'], {}), '(newob_from_api - new_ob)\n', (11131, 11156), True, 'import numpy as np\n'), ((8649, 8716), 'numpy.zeros', 'np.zeros', (["[num_data, self._env_info['action_size']]"], {'dtype': 'np.float'}), "([num_data, self._env_info['action_size']], dtype=np.float)\n", (8657, 8716), True, 'import numpy as np\n'), ((8832, 8926), 'numpy.zeros', 'np.zeros', (["[num_data, self._env_info['ob_size'], self._env_info['ob_size']]"], {'dtype': 'np.float'}), "([num_data, self._env_info['ob_size'], self._env_info['ob_size']],\n dtype=np.float)\n", (8840, 8926), True, 'import numpy as np\n'), ((9347, 9446), 'numpy.zeros', 'np.zeros', (["[num_data, self._env_info['action_size'], self._env_info['ob_size']]"], {'dtype': 'np.float'}), "([num_data, self._env_info['action_size'], self._env_info['ob_size'\n ]], dtype=np.float)\n", (9355, 9446), True, 'import numpy as np\n'), ((9598, 9697), 'numpy.zeros', 'np.zeros', (["[num_data, self._env_info['ob_size'], self._env_info['action_size']]"], {'dtype': 'np.float'}), "([num_data, self._env_info['ob_size'], self._env_info['action_size'\n ]], dtype=np.float)\n", (9606, 9697), True, 'import numpy as np\n'), ((9851, 9954), 'numpy.zeros', 'np.zeros', (["[num_data, self._env_info['action_size'], self._env_info['action_size']]"], {'dtype': 'np.float'}), "([num_data, self._env_info['action_size'], self._env_info[\n 'action_size']], dtype=np.float)\n", (9859, 9954), True, 'import numpy as np\n')] |
mauriziokovacic/ACME | ACME/visdom/__init__.py | 2615b66dd4addfd5c03d9d91a24c7da414294308 | from .bar import *
from .create_session import *
from .image import *
from .line import *
from .mesh import *
from .pie import *
from .text import *
from .VisdomFigure import *
from .VisdomScene import *
| [] |
andrewdyersmith/pingpongpi | web/pingpongpiweb.py | 63e969468da24b2d00e86033dfcb22de75f264bc | # Ping Pong Pi web UI running on flask.
# Uses zmq to speak to daemon controlling screen.
from flask import Flask, render_template, appcontext_tearing_down, request
from multiprocessing import Process, Queue
from multiprocessing.connection import Client
import atexit
import time
import zmq
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
MODE="mode"
@app.route('/mode/<name>', methods=['POST'])
def mode(name):
text = request.args.get("val", default="", type=str)
message_queue.put([MODE,name,text])
return "\"OK\""
message_queue = Queue()
message_process = None
def message_loop(message_queue):
print("Starting message loop")
context = zmq.Context()
while True:
try:
socket = context.socket(zmq.REQ)
socket.connect("tcp://localhost:5555")
print("Connected to daemon")
while True:
msg = message_queue.get()
print("Sending ", msg)
socket.send_json(msg)
socket.recv()
except Exception as ex:
print(ex)
time.sleep(5)
def stop_message_loop():
print("Terminating")
if message_process:
message_process.terminate()
atexit.register(stop_message_loop)
@app.before_first_request
def setup_ipc():
global message_process
message_process = Process(target=message_loop, args=(message_queue,))
message_process.start()
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
| [((299, 314), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (304, 314), False, 'from flask import Flask, render_template, appcontext_tearing_down, request\n'), ((588, 595), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (593, 595), False, 'from multiprocessing import Process, Queue\n'), ((1156, 1190), 'atexit.register', 'atexit.register', (['stop_message_loop'], {}), '(stop_message_loop)\n', (1171, 1190), False, 'import atexit\n'), ((355, 384), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (370, 384), False, 'from flask import Flask, render_template, appcontext_tearing_down, request\n'), ((469, 514), 'flask.request.args.get', 'request.args.get', (['"""val"""'], {'default': '""""""', 'type': 'str'}), "('val', default='', type=str)\n", (485, 514), False, 'from flask import Flask, render_template, appcontext_tearing_down, request\n'), ((698, 711), 'zmq.Context', 'zmq.Context', ([], {}), '()\n', (709, 711), False, 'import zmq\n'), ((1281, 1332), 'multiprocessing.Process', 'Process', ([], {'target': 'message_loop', 'args': '(message_queue,)'}), '(target=message_loop, args=(message_queue,))\n', (1288, 1332), False, 'from multiprocessing import Process, Queue\n'), ((1037, 1050), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1047, 1050), False, 'import time\n')] |
ajaytikoo/watcher | watcher/tests/decision_engine/strategy/strategies/test_base.py | 6dbac1f6ae7f3e10dfdcef5721fa4af7af54e159 | # -*- encoding: utf-8 -*-
# Copyright (c) 2019 European Organization for Nuclear Research (CERN)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
from watcher.common import exception
from watcher.decision_engine.datasources import manager
from watcher.decision_engine.model import model_root
from watcher.decision_engine.strategy import strategies
from watcher.tests import base
from watcher.tests.decision_engine.model import faker_cluster_state
class TestBaseStrategy(base.TestCase):
def setUp(self):
super(TestBaseStrategy, self).setUp()
# fake cluster
self.fake_c_cluster = faker_cluster_state.FakerModelCollector()
p_c_model = mock.patch.object(
strategies.BaseStrategy, "compute_model",
new_callable=mock.PropertyMock)
self.m_c_model = p_c_model.start()
self.addCleanup(p_c_model.stop)
p_audit_scope = mock.patch.object(
strategies.BaseStrategy, "audit_scope",
new_callable=mock.PropertyMock)
self.m_audit_scope = p_audit_scope.start()
self.addCleanup(p_audit_scope.stop)
self.m_audit_scope.return_value = mock.Mock()
self.m_c_model.return_value = model_root.ModelRoot()
self.strategy = strategies.DummyStrategy(config=mock.Mock())
class TestBaseStrategyDatasource(TestBaseStrategy):
def setUp(self):
super(TestBaseStrategyDatasource, self).setUp()
self.strategy = strategies.DummyStrategy(
config=mock.Mock(datasources=None))
@mock.patch.object(strategies.BaseStrategy, 'osc', None)
@mock.patch.object(manager, 'DataSourceManager')
@mock.patch.object(strategies.base, 'CONF')
def test_global_preference(self, m_conf, m_manager):
"""Test if the global preference is used"""
m_conf.watcher_datasources.datasources = \
['gnocchi', 'monasca', 'ceilometer']
# Make sure we access the property and not the underlying function.
m_manager.return_value.get_backend.return_value = \
mock.NonCallableMock()
# Access the property so that the configuration is read in order to
# get the correct datasource
self.strategy.datasource_backend
m_manager.assert_called_once_with(
config=m_conf.watcher_datasources, osc=None)
@mock.patch.object(strategies.BaseStrategy, 'osc', None)
@mock.patch.object(manager, 'DataSourceManager')
@mock.patch.object(strategies.base, 'CONF')
def test_global_preference_reverse(self, m_conf, m_manager):
"""Test if the global preference is used with another order"""
m_conf.watcher_datasources.datasources = \
['ceilometer', 'monasca', 'gnocchi']
# Make sure we access the property and not the underlying function.
m_manager.return_value.get_backend.return_value = \
mock.NonCallableMock()
# Access the property so that the configuration is read in order to
# get the correct datasource
self.strategy.datasource_backend
m_manager.assert_called_once_with(
config=m_conf.watcher_datasources, osc=None)
@mock.patch.object(strategies.BaseStrategy, 'osc', None)
@mock.patch.object(manager, 'DataSourceManager')
@mock.patch.object(strategies.base, 'CONF')
def test_strategy_preference_override(self, m_conf, m_manager):
"""Test if the global preference can be overridden"""
datasources = mock.Mock(datasources=['ceilometer'])
self.strategy = strategies.DummyStrategy(
config=datasources)
m_conf.watcher_datasources.datasources = \
['ceilometer', 'monasca', 'gnocchi']
# Access the property so that the configuration is read in order to
# get the correct datasource
self.strategy.datasource_backend
m_manager.assert_called_once_with(
config=datasources, osc=None)
class TestBaseStrategyException(TestBaseStrategy):
def setUp(self):
super(TestBaseStrategyException, self).setUp()
def test_exception_model(self):
self.m_c_model.return_value = None
self.assertRaises(
exception.ClusterStateNotDefined, self.strategy.execute)
def test_exception_stale_cdm(self):
self.fake_c_cluster.set_cluster_data_model_as_stale()
self.m_c_model.return_value = self.fake_c_cluster.cluster_data_model
self.assertRaises(
# TODO(Dantali0n) This should return ClusterStale,
# improve set_cluster_data_model_as_stale().
exception.ClusterStateNotDefined,
self.strategy.execute)
| [((2053, 2108), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.BaseStrategy', '"""osc"""', 'None'], {}), "(strategies.BaseStrategy, 'osc', None)\n", (2070, 2108), False, 'from unittest import mock\n'), ((2114, 2161), 'unittest.mock.patch.object', 'mock.patch.object', (['manager', '"""DataSourceManager"""'], {}), "(manager, 'DataSourceManager')\n", (2131, 2161), False, 'from unittest import mock\n'), ((2167, 2209), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.base', '"""CONF"""'], {}), "(strategies.base, 'CONF')\n", (2184, 2209), False, 'from unittest import mock\n'), ((2854, 2909), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.BaseStrategy', '"""osc"""', 'None'], {}), "(strategies.BaseStrategy, 'osc', None)\n", (2871, 2909), False, 'from unittest import mock\n'), ((2915, 2962), 'unittest.mock.patch.object', 'mock.patch.object', (['manager', '"""DataSourceManager"""'], {}), "(manager, 'DataSourceManager')\n", (2932, 2962), False, 'from unittest import mock\n'), ((2968, 3010), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.base', '"""CONF"""'], {}), "(strategies.base, 'CONF')\n", (2985, 3010), False, 'from unittest import mock\n'), ((3682, 3737), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.BaseStrategy', '"""osc"""', 'None'], {}), "(strategies.BaseStrategy, 'osc', None)\n", (3699, 3737), False, 'from unittest import mock\n'), ((3743, 3790), 'unittest.mock.patch.object', 'mock.patch.object', (['manager', '"""DataSourceManager"""'], {}), "(manager, 'DataSourceManager')\n", (3760, 3790), False, 'from unittest import mock\n'), ((3796, 3838), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.base', '"""CONF"""'], {}), "(strategies.base, 'CONF')\n", (3813, 3838), False, 'from unittest import mock\n'), ((1133, 1174), 'watcher.tests.decision_engine.model.faker_cluster_state.FakerModelCollector', 'faker_cluster_state.FakerModelCollector', ([], {}), '()\n', (1172, 1174), False, 'from watcher.tests.decision_engine.model import faker_cluster_state\n'), ((1196, 1292), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.BaseStrategy', '"""compute_model"""'], {'new_callable': 'mock.PropertyMock'}), "(strategies.BaseStrategy, 'compute_model', new_callable=\n mock.PropertyMock)\n", (1213, 1292), False, 'from unittest import mock\n'), ((1421, 1515), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.BaseStrategy', '"""audit_scope"""'], {'new_callable': 'mock.PropertyMock'}), "(strategies.BaseStrategy, 'audit_scope', new_callable=mock\n .PropertyMock)\n", (1438, 1515), False, 'from unittest import mock\n'), ((1674, 1685), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (1683, 1685), False, 'from unittest import mock\n'), ((1725, 1747), 'watcher.decision_engine.model.model_root.ModelRoot', 'model_root.ModelRoot', ([], {}), '()\n', (1745, 1747), False, 'from watcher.decision_engine.model import model_root\n'), ((2569, 2591), 'unittest.mock.NonCallableMock', 'mock.NonCallableMock', ([], {}), '()\n', (2589, 2591), False, 'from unittest import mock\n'), ((3397, 3419), 'unittest.mock.NonCallableMock', 'mock.NonCallableMock', ([], {}), '()\n', (3417, 3419), False, 'from unittest import mock\n'), ((3992, 4029), 'unittest.mock.Mock', 'mock.Mock', ([], {'datasources': "['ceilometer']"}), "(datasources=['ceilometer'])\n", (4001, 4029), False, 'from unittest import mock\n'), ((4055, 4099), 'watcher.decision_engine.strategy.strategies.DummyStrategy', 'strategies.DummyStrategy', ([], {'config': 'datasources'}), '(config=datasources)\n', (4079, 4099), False, 'from watcher.decision_engine.strategy import strategies\n'), ((1804, 1815), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (1813, 1815), False, 'from unittest import mock\n'), ((2018, 2045), 'unittest.mock.Mock', 'mock.Mock', ([], {'datasources': 'None'}), '(datasources=None)\n', (2027, 2045), False, 'from unittest import mock\n')] |
BenG49/sudoku | main.py | e4b14655e23d04c161feb16ceb1338537f519bdb | from sudoku import Sudoku
def main():
s = Sudoku.parse(
'''
-------------
| |2 | |
| | 6 |4 3|
| | 5| 7 |
-------------
| 7 | 2|8 |
|51 | 4|9 |
| 9| 3| |
-------------
| | 9| |
| 2| | 98|
| 83|1 |2 |
-------------
'''
)
print(s)
print(s.solve())
if __name__ == '__main__':
main()
| [((44, 285), 'sudoku.Sudoku.parse', 'Sudoku.parse', (['"""\n\t\t-------------\n\t\t| |2 | |\n\t\t| | 6 |4 3|\n\t\t| | 5| 7 |\n\t\t-------------\n\t\t| 7 | 2|8 |\n\t\t|51 | 4|9 |\n\t\t| 9| 3| |\n\t\t-------------\n\t\t| | 9| |\n\t\t| 2| | 98|\n\t\t| 83|1 |2 |\n\t\t-------------\n\t\t"""'], {}), '(\n """\n\t\t-------------\n\t\t| |2 | |\n\t\t| | 6 |4 3|\n\t\t| | 5| 7 |\n\t\t-------------\n\t\t| 7 | 2|8 |\n\t\t|51 | 4|9 |\n\t\t| 9| 3| |\n\t\t-------------\n\t\t| | 9| |\n\t\t| 2| | 98|\n\t\t| 83|1 |2 |\n\t\t-------------\n\t\t"""\n )\n', (56, 285), False, 'from sudoku import Sudoku\n')] |
pfnet-research/autogbt-alt | test/test_sampler.py | 57f7ae1bce2923d11f73c3631e34be49c7dd25da | import numpy as np
import pandas as pd
from autogbt.sampler import MajorityUnderSampler
def _test_sample(y):
sampler = MajorityUnderSampler()
idx = sampler.sample(y, 40000, 3.0)
assert len(idx) == 40000
assert y[idx].sum() == 10000
def test_sample_with_series():
y = pd.Series(np.concatenate([np.ones((10000)), np.zeros((100000))]))
y = y.sample(frac=1.0)
_test_sample(y)
def test_sample_with_ndarray():
y = np.concatenate([np.ones((10000)), np.zeros((100000))])
_test_sample(y)
def test_sample_for_regression():
y = np.concatenate([
2*np.ones((10000)),
1*np.ones((10000)),
0*np.ones((10000)),
])
sampler = MajorityUnderSampler()
idx = sampler.sample(y, 0.1, 3.0)
assert len(idx) == 3000
| [((125, 147), 'autogbt.sampler.MajorityUnderSampler', 'MajorityUnderSampler', ([], {}), '()\n', (145, 147), False, 'from autogbt.sampler import MajorityUnderSampler\n'), ((687, 709), 'autogbt.sampler.MajorityUnderSampler', 'MajorityUnderSampler', ([], {}), '()\n', (707, 709), False, 'from autogbt.sampler import MajorityUnderSampler\n'), ((462, 476), 'numpy.ones', 'np.ones', (['(10000)'], {}), '(10000)\n', (469, 476), True, 'import numpy as np\n'), ((480, 496), 'numpy.zeros', 'np.zeros', (['(100000)'], {}), '(100000)\n', (488, 496), True, 'import numpy as np\n'), ((317, 331), 'numpy.ones', 'np.ones', (['(10000)'], {}), '(10000)\n', (324, 331), True, 'import numpy as np\n'), ((335, 351), 'numpy.zeros', 'np.zeros', (['(100000)'], {}), '(100000)\n', (343, 351), True, 'import numpy as np\n'), ((592, 606), 'numpy.ones', 'np.ones', (['(10000)'], {}), '(10000)\n', (599, 606), True, 'import numpy as np\n'), ((620, 634), 'numpy.ones', 'np.ones', (['(10000)'], {}), '(10000)\n', (627, 634), True, 'import numpy as np\n'), ((648, 662), 'numpy.ones', 'np.ones', (['(10000)'], {}), '(10000)\n', (655, 662), True, 'import numpy as np\n')] |
leandro-santiago/bloomwisard | experiment/diabetes/accuracy_info.py | 4c02610c4ef2d2cf8424797c8a815da182ca2383 | import numpy as np
import sys
from timeit import default_timer as timer
sys.path.append("../../")
from core import wnn
from encoding import thermometer
from encoding import util
#Load Diabetes data
base_path = "../../dataset/diabetes/"
#2/3 Test
bits_encoding = 20
train_data, train_label, test_data, test_label, data_min, data_max = util.load_3data(base_path)
ths = []
for i in range(len(data_max)):
ths.append(thermometer.Thermometer(data_min[i], data_max[i], bits_encoding))
train_bin = []
test_bin = []
i = 0
for data in train_data:
train_bin.append(np.array([], dtype=bool))
t = 0
for v in data:
binarr = ths[t].binarize(v)
train_bin[i] = np.append(train_bin[i], binarr)
t += 1
i += 1
i = 0
for data in test_data:
test_bin.append(np.array([], dtype=bool))
t = 0
for v in data:
binarr = ths[t].binarize(v)
test_bin[i] = np.append(test_bin[i], binarr)
t += 1
i += 1
#print test_label
#Wisard
num_classes = 2
tuple_list = [2, 4, 8, 14, 16, 18, 20, 22, 24, 26, 28, 30]
acc_list = []
test_length = len(test_label)
entry_size = len(train_bin[0])
#print entry_size
for t in tuple_list:
wisard = wnn.Wisard(entry_size, t, num_classes)
wisard.train(train_bin, train_label)
rank_result = wisard.rank(test_bin)
num_hits = 0
for i in range(test_length):
if rank_result[i] == test_label[i]:
num_hits += 1
acc_list.append(float(num_hits)/float(test_length))
#Bloom Wisard
btuple_list = [2, 4, 8, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 40, 56]
bacc_list = []
#capacity = len(train_bin)
capacity = 10
print capacity
for t in btuple_list:
bwisard = wnn.BloomWisard(entry_size, t, num_classes, capacity)
bwisard.train(train_bin, train_label)
rank_result = bwisard.rank(test_bin)
num_hits = 0
for i in range(test_length):
if rank_result[i] == test_label[i]:
num_hits += 1
bacc_list.append(float(num_hits)/float(test_length))
print "Tuples=", tuple_list
print "Wisard Accuracy=", acc_list
print "Tuples=", btuple_list
print "BloomWisard Accuracy=",bacc_list
| [] |
hoppfull/Legacy-Python | PyGame/pygame1/tutorial1/startercode.py | 43f465bfdb76c91f2ac16aabb0783fdf5f459adb | from pygamehelper import *
from pygame import *
from pygame.locals import *
from vec2d import *
from random import uniform
import numpy as np
class Starter(PygameHelper):
def __init__(self):
self.w, self.h = 800, 600
PygameHelper.__init__(self, size=(self.w, self.h), fill=((0,0,0)))
def update(self):
pass
def keyUp(self, key):
pass
def mouseUp(self, button, pos):
pass
def mouseMotion(self, buttons, pos, rel):
pass
def draw(self):
self.screen.fill((np.random.random()*255, np.random.random()*255, np.random.random()*255))
s = Starter()
s.mainLoop(40)
| [((579, 597), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (595, 597), True, 'import numpy as np\n'), ((603, 621), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (619, 621), True, 'import numpy as np\n'), ((627, 645), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (643, 645), True, 'import numpy as np\n')] |
thewhiteninja/twitch-recorder | main.py | 815b571e22917daa906d054a8ab2fe794e99bb8a | import glob
import os
import sys
import utils
from recorder import StreamRec
OUTDIR = ""
def parse_args(a):
global OUTDIR
i = 1
while i < len(a):
if a[i] in ["-h", "--help", "/?"]:
usage()
if a[i] in ["-d", "--dir"]:
OUTDIR = a[i + 1]
i += 1
i += 1
def usage():
print("Record your favorite Twitch streams!")
print("Check an example of .stream file in data/ to see how to add a stream to record")
print()
print("Usage: %s [Options]" % (os.path.basename(sys.argv[0])))
print()
print("Options :")
print(" -d, --dir : Output directory")
print(" -h, --help : Help")
sys.exit(1)
def load_streams():
all_inst = []
stream_files = glob.glob('data/**/*.stream', recursive=True)
for stream_file in stream_files:
inst = StreamRec(stream_file, OUTDIR)
all_inst.append(inst)
for inst in all_inst:
inst.start()
for inst in all_inst:
inst.join()
def main():
utils.welcome()
parse_args(sys.argv)
utils.make_directory(OUTDIR)
load_streams()
if __name__ == '__main__':
main()
| [((681, 692), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (689, 692), False, 'import sys\n'), ((752, 797), 'glob.glob', 'glob.glob', (['"""data/**/*.stream"""'], {'recursive': '(True)'}), "('data/**/*.stream', recursive=True)\n", (761, 797), False, 'import glob\n'), ((1022, 1037), 'utils.welcome', 'utils.welcome', ([], {}), '()\n', (1035, 1037), False, 'import utils\n'), ((1067, 1095), 'utils.make_directory', 'utils.make_directory', (['OUTDIR'], {}), '(OUTDIR)\n', (1087, 1095), False, 'import utils\n'), ((850, 880), 'recorder.StreamRec', 'StreamRec', (['stream_file', 'OUTDIR'], {}), '(stream_file, OUTDIR)\n', (859, 880), False, 'from recorder import StreamRec\n'), ((529, 558), 'os.path.basename', 'os.path.basename', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (545, 558), False, 'import os\n')] |
scottwedge/OpenStack-Stein | karbor-1.3.0/karbor/policies/protectables.py | 7077d1f602031dace92916f14e36b124f474de15 | # Copyright (c) 2017 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from karbor.policies import base
GET_POLICY = 'protectable:get'
GET_ALL_POLICY = 'protectable:get_all'
INSTANCES_GET_POLICY = 'protectable:instance_get'
INSTANCES_GET_ALL_POLICY = 'protectable:instance_get_all'
protectables_policies = [
policy.DocumentedRuleDefault(
name=GET_POLICY,
check_str=base.RULE_ADMIN_OR_OWNER,
description='Show a protectable type.',
operations=[
{
'method': 'GET',
'path': '/protectables/{protectable_type}'
}
]),
policy.DocumentedRuleDefault(
name=GET_ALL_POLICY,
check_str=base.RULE_ADMIN_OR_OWNER,
description='List protectable types.',
operations=[
{
'method': 'GET',
'path': '/protectables'
}
]),
policy.DocumentedRuleDefault(
name=INSTANCES_GET_POLICY,
check_str=base.RULE_ADMIN_OR_OWNER,
description='Show a protectable instance.',
operations=[
{
'method': 'GET',
'path': '/protectables/{protectable_type}/'
'instances/{resource_id}'
}
]),
policy.DocumentedRuleDefault(
name=INSTANCES_GET_ALL_POLICY,
check_str=base.RULE_ADMIN_OR_OWNER,
description='List protectable instances.',
operations=[
{
'method': 'GET',
'path': '/protectables/{protectable_type}/instances'
}
]),
]
def list_rules():
return protectables_policies
| [((925, 1132), 'oslo_policy.policy.DocumentedRuleDefault', 'policy.DocumentedRuleDefault', ([], {'name': 'GET_POLICY', 'check_str': 'base.RULE_ADMIN_OR_OWNER', 'description': '"""Show a protectable type."""', 'operations': "[{'method': 'GET', 'path': '/protectables/{protectable_type}'}]"}), "(name=GET_POLICY, check_str=base.\n RULE_ADMIN_OR_OWNER, description='Show a protectable type.', operations\n =[{'method': 'GET', 'path': '/protectables/{protectable_type}'}])\n", (953, 1132), False, 'from oslo_policy import policy\n'), ((1229, 1420), 'oslo_policy.policy.DocumentedRuleDefault', 'policy.DocumentedRuleDefault', ([], {'name': 'GET_ALL_POLICY', 'check_str': 'base.RULE_ADMIN_OR_OWNER', 'description': '"""List protectable types."""', 'operations': "[{'method': 'GET', 'path': '/protectables'}]"}), "(name=GET_ALL_POLICY, check_str=base.\n RULE_ADMIN_OR_OWNER, description='List protectable types.', operations=\n [{'method': 'GET', 'path': '/protectables'}])\n", (1257, 1420), False, 'from oslo_policy import policy\n'), ((1517, 1765), 'oslo_policy.policy.DocumentedRuleDefault', 'policy.DocumentedRuleDefault', ([], {'name': 'INSTANCES_GET_POLICY', 'check_str': 'base.RULE_ADMIN_OR_OWNER', 'description': '"""Show a protectable instance."""', 'operations': "[{'method': 'GET', 'path':\n '/protectables/{protectable_type}/instances/{resource_id}'}]"}), "(name=INSTANCES_GET_POLICY, check_str=base.\n RULE_ADMIN_OR_OWNER, description='Show a protectable instance.',\n operations=[{'method': 'GET', 'path':\n '/protectables/{protectable_type}/instances/{resource_id}'}])\n", (1545, 1765), False, 'from oslo_policy import policy\n'), ((1886, 2123), 'oslo_policy.policy.DocumentedRuleDefault', 'policy.DocumentedRuleDefault', ([], {'name': 'INSTANCES_GET_ALL_POLICY', 'check_str': 'base.RULE_ADMIN_OR_OWNER', 'description': '"""List protectable instances."""', 'operations': "[{'method': 'GET', 'path': '/protectables/{protectable_type}/instances'}]"}), "(name=INSTANCES_GET_ALL_POLICY, check_str=base.\n RULE_ADMIN_OR_OWNER, description='List protectable instances.',\n operations=[{'method': 'GET', 'path':\n '/protectables/{protectable_type}/instances'}])\n", (1914, 2123), False, 'from oslo_policy import policy\n')] |
unyo/uhpackage | router.example.py | 07d0263c586e5daa0012c3ff82754be381850911 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# routers are dictionaries of URL routing parameters.
#
# For each request, the effective router is:
# the built-in default base router (shown below),
# updated by the BASE router in routes.py routers,
# updated by the app-specific router in routes.py routers (if any),
# updated by the app-specific router from applcations/app/routes.py routers (if any)
#
#
# Router members:
#
# default_application: default application name
# applications: list of all recognized applications, or 'ALL' to use all currently installed applications
# Names in applications are always treated as an application names when they appear first in an incoming URL.
# Set applications to None to disable the removal of application names from outgoing URLs.
# domains: optional dict mapping domain names to application names
# The domain name can include a port number: domain.com:8080
# The application name can include a controller: appx/ctlrx
# path_prefix: a path fragment that is prefixed to all outgoing URLs and stripped from all incoming URLs
#
# Note: default_application, applications, domains & path_prefix are permitted only in the BASE router,
# and domain makes sense only in an application-specific router.
# The remaining members can appear in the BASE router (as defaults for all applications)
# or in application-specific routers.
#
# default_controller: name of default controller
# default_function: name of default function (all controllers)
# controllers: list of valid controllers in selected app
# or "DEFAULT" to use all controllers in the selected app plus 'static'
# or None to disable controller-name removal.
# Names in controllers are always treated as controller names when they appear in an incoming URL after
# the (optional) application and language names.
# languages: list of all supported languages
# Names in controllers are always treated as language names when they appear in an incoming URL after
# the (optional) application name.
# default_language
# The language code (for example: en, it-it) optionally appears in the URL following
# the application (which may be omitted). For incoming URLs, the code is copied to
# request.language; for outgoing URLs it is taken from request.language.
# If languages=None, language support is disabled.
# The default_language, if any, is omitted from the URL.
# root_static: list of static files accessed from root
# (mapped to the current application's static/ directory)
# Each application has its own root-static files.
# domain: the domain that maps to this application (alternative to using domains in the BASE router)
# map_hyphen: If True (default), hyphens in incoming /a/c/f fields are converted to underscores,
# and back to hyphens in outgoing URLs. Language, args and the query string are not affected.
# map_static: By default, the default application is not stripped from static URLs. Set map_static=True
# to override this policy.
# acfe_match: regex for valid application, controller, function, extension /a/c/f.e
# file_match: regex for valid file (used for static file names)
# args_match: regex for valid args
# This validation provides a measure of security.
# If it is changed, the application perform its own validation.
#
#
# The built-in default router supplies default values (undefined members are None):
#
# default_router = dict(
# default_application = 'init',
# applications = 'ALL',
# default_controller = 'default',
# controllers = 'DEFAULT',
# default_function = 'index',
# default_language = None,
# languages = None,
# root_static = ['favicon.ico', 'robots.txt'],
# domains = None,
# map_hyphen = True,
# acfe_match = r'\w+$', # legal app/ctlr/fcn/ext
# file_match = r'(\w+[-=./]?)+$', # legal file (path) name
# args_match = r'([\w@ -]+[=.]?)+$', # legal arg in args
# )
#
# See rewrite.map_url_in() and rewrite.map_url_out() for implementation details.
# This simple router set overrides only the default application name,
# but provides full rewrite functionality.
routers = dict(
# base router
BASE = dict(
default_application = 'welcome',
),
# 'admin' application router
admin = dict(
controllers = [], # don't remove controller names from admin URLs
map_hyphen = False, # don't map hyphens to underscores
),
)
# Error-handling redirects all HTTP errors (status codes >= 400) to a specified
# path. If you wish to use error-handling redirects, uncomment the tuple
# below. You can customize responses by adding a tuple entry with the first
# value in 'appName/HTTPstatusCode' format. ( Only HTTP codes >= 400 are
# routed. ) and the value as a path to redirect the user to. You may also use
# '*' as a wildcard.
#
# The error handling page is also passed the error code and ticket as
# variables. Traceback information will be stored in the ticket.
#
# routes_onerror = [
# (r'init/400', r'/init/default/login')
# ,(r'init/*', r'/init/static/fail.html')
# ,(r'*/404', r'/init/static/cantfind.html')
# ,(r'*/*', r'/init/error/index')
# ]
# specify action in charge of error handling
#
# error_handler = dict(application='error',
# controller='default',
# function='index')
# In the event that the error-handling page itself returns an error, web2py will
# fall back to its old static responses. You can customize them here.
# ErrorMessageTicket takes a string format dictionary containing (only) the
# "ticket" key.
# error_message = '<html><body><h1>Invalid request</h1></body></html>'
# error_message_ticket = '<html><body><h1>Internal error</h1>Ticket issued: <a href="/admin/default/ticket/%(ticket)s" target="_blank">%(ticket)s</a></body></html>'
def __routes_doctest():
'''
Dummy function for doctesting routes.py.
Use filter_url() to test incoming or outgoing routes;
filter_err() for error redirection.
filter_url() accepts overrides for method and remote host:
filter_url(url, method='get', remote='0.0.0.0', out=False)
filter_err() accepts overrides for application and ticket:
filter_err(status, application='app', ticket='tkt')
>>> import os
>>> import gluon.main
>>> from gluon.rewrite import load, filter_url, filter_err, get_effective_router
>>> load(routes=os.path.basename(__file__))
>>> filter_url('http://domain.com/abc', app=True)
'welcome'
>>> filter_url('http://domain.com/welcome', app=True)
'welcome'
>>> os.path.relpath(filter_url('http://domain.com/favicon.ico'))
'applications/welcome/static/favicon.ico'
>>> filter_url('http://domain.com/abc')
'/welcome/default/abc'
>>> filter_url('http://domain.com/index/abc')
"/welcome/default/index ['abc']"
>>> filter_url('http://domain.com/default/abc.css')
'/welcome/default/abc.css'
>>> filter_url('http://domain.com/default/index/abc')
"/welcome/default/index ['abc']"
>>> filter_url('http://domain.com/default/index/a bc')
"/welcome/default/index ['a bc']"
>>> filter_url('http://domain.com/admin/bad!ctl')
Traceback (most recent call last):
...
HTTP: 400 BAD REQUEST [invalid controller]
>>> filter_url('http://domain.com/admin/ctl/bad!fcn')
Traceback (most recent call last):
...
HTTP: 400 BAD REQUEST [invalid function]
>>> filter_url('http://domain.com/admin/ctl/fcn.bad!ext')
Traceback (most recent call last):
...
HTTP: 400 BAD REQUEST [invalid extension]
>>> filter_url('http://domain.com/admin/ctl/fcn/bad!arg')
Traceback (most recent call last):
...
HTTP: 400 BAD REQUEST [invalid arg <bad!arg>]
>>> filter_url('https://domain.com/app/ctr/fcn', out=True)
'/app/ctr/fcn'
>>> filter_url('https://domain.com/welcome/ctr/fcn', out=True)
'/ctr/fcn'
>>> filter_url('https://domain.com/welcome/default/fcn', out=True)
'/fcn'
>>> filter_url('https://domain.com/welcome/default/index', out=True)
'/'
>>> filter_url('https://domain.com/welcome/appadmin/index', out=True)
'/appadmin'
>>> filter_url('http://domain.com/welcome/default/fcn?query', out=True)
'/fcn?query'
>>> filter_url('http://domain.com/welcome/default/fcn#anchor', out=True)
'/fcn#anchor'
>>> filter_url('http://domain.com/welcome/default/fcn?query#anchor', out=True)
'/fcn?query#anchor'
>>> filter_url('http://domain.com/appadmin/fcn-1')
'/welcome/appadmin/fcn_1'
>>> filter_url('http://domain.com/welcome/appadmin/fcn_1', out=True)
'/appadmin/fcn-1'
>>> filter_url('http://domain.com/examples/appadmin/fcn-1')
'/examples/appadmin/fcn_1'
>>> filter_url('http://domain.com/examples/appadmin/fcn_1', out=True)
'/examples/appadmin/fcn-1'
>>> filter_url('http://domain.com/app/static/filename-with_underscore', out=True)
'/app/static/filename-with_underscore'
>>> os.path.relpath(filter_url('http://domain.com/admin/static/filename-with_underscore'))
'applications/admin/static/filename-with_underscore'
>>> filter_err(200)
200
>>> filter_err(399)
399
>>> filter_err(400)
400
'''
pass
if __name__ == '__main__':
import doctest
doctest.testmod()
| [((9489, 9506), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (9504, 9506), False, 'import doctest\n')] |
costrouc/uarray | notebooks/2018.11.09 Meeting.py | c3c42147181a88265942ad5f9cf439467f746782 | #%%
from uarray.core import *
#%%
s = Scalar(Int(10))
#%%
@operation
def Always(a: T) -> CCallableUnary[T, CContent]:
...
#%%
register(Call(Always(w("a")), w("idx")), lambda a, idx: a)
#%%
a_ten = Always(s)
#%%
s = Sequence(Int(10), a_ten)
| [] |
dwstreetNNL/spack | var/spack/repos/builtin/packages/py-black/package.py | 8f929707147c49606d00386a10161529dad4ec56 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyBlack(PythonPackage):
"""Black is the uncompromising Python code formatter. By using it, you agree to
cede control over minutiae of hand-formatting. In return, Black gives you
speed, determinism, and freedom from pycodestyle nagging about formatting.
"""
homepage = "https://github.com/psf/black"
url = "https://pypi.io/packages/source/b/black/black-20.8b1.tar.gz"
version('20.8b1', sha256='1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea')
version('19.3b0', sha256='68950ffd4d9169716bcb8719a56c07a2f4485354fec061cdd5910aa07369731c')
version('18.9b0', sha256='e030a9a28f542debc08acceb273f228ac422798e5215ba2a791a6ddeaaca22a5')
variant('d', default=False, description='enable blackd HTTP server')
depends_on('[email protected]:')
# Needs setuptools at runtime so that `import pkg_resources` succeeds
# See #8843 and #8689 for examples of setuptools added as a runtime dep
depends_on('py-setuptools', type=('build', 'run'))
# Translated from black's setup.py:
# https://github.com/ambv/black/blob/master/setup.py
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', when='@20.8b1:', type=('build', 'run'))
depends_on('[email protected]:', when='@:20.8b0', type=('build', 'run'))
depends_on('py-appdirs', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', when='@20.8b1:', type=('build', 'run'))
depends_on('[email protected]:', when='@19.10b0:', type=('build', 'run'))
depends_on('[email protected]:', when='@20.8b0:', type=('build', 'run'))
depends_on('[email protected]:0.999', when='@19.10b0:', type=('build', 'run'))
depends_on('[email protected]:', when='@20.8b0:^python@:3.6', type=('build', 'run'))
depends_on('[email protected]:', when='@20.8b0:', type=('build', 'run'))
depends_on('[email protected]:', when='@20.8b0:', type=('build', 'run'))
depends_on('[email protected]:', when='+d', type=('build', 'run'))
depends_on('py-aiohttp-cors', when='+d', type=('build', 'run'))
@property
def import_modules(self):
modules = ['blib2to3', 'blib2to3.pgen2', 'black']
if '+d' in self.spec:
modules.append('blackd')
return modules
| [] |
vallemrv/my_store_test | store/adminshop/templatetags/admin_extras.py | 2da624fd02c5f1784464f15b751b488f3dd2bae6 | # -*- coding: utf-8 -*-
# @Author: Manuel Rodriguez <valle>
# @Date: 27-Aug-2017
# @Email: [email protected]
# @Filename: admin_extras.py
# @Last modified by: valle
# @Last modified time: 02-Feb-2018
# @License: Apache license vesion 2.0
from django import template
from django.db.models import Q
try:
from django.core.urlresolvers import reverse
except ImportError:
from django.urls import reverse
from adminshop.models import Testeo, Compras, Presupuesto
import json
import sys
register = template.Library()
@register.filter(name='get_nombre_cliente')
def get_nombre_cliente(f):
return f.get_nombre_cliente()
@register.filter(name='enviado')
def enviado(f):
return "No" if not f.enviado else "Si"
@register.filter(name='get_user')
def get_user(f):
return f.get_user()
@register.filter(name='get_ns_imei')
def get_ns_imei(f):
return f.get_ns_imei()
@register.filter(name='get_producto_pk')
def get_producto_pk(f):
return f.get_producto_pk()
@register.filter(name='addcss')
def addcss(field, css):
return field.as_widget(attrs={"class":css})
@register.filter(name='reparacion')
def reparacion(p):
try:
pres = Presupuesto.objects.filter(producto__pk=p.id)[0]
return pres.notas_cliente
except:
return ""
@register.filter(name='num_pres')
def num_pres(p):
try:
pres = Presupuesto.objects.filter(producto__pk=p.id)[0]
return pres.pk
except:
return -1
@register.filter(name='precio_venta')
def precio_venta(p):
precio = 0 if p.precio_venta == None else p.precio_venta
return "{0:.2f} €".format(precio)
@register.filter(name='precio_usado')
def precio_usado(p):
return "{0:.2f} €".format(p.modelo.precio_usado * p.tipo.incremento)
@register.filter(name='document_show')
def document_show(p):
compras = Compras.objects.filter(producto__id=p.pk)
if len(compras) > 0:
compra = compras[0]
else:
compra = Compras()
return p.estado in ["ST", "VD", "OL", "VT"]
@register.filter(name='document_href')
def document_href(p):
if p.estado in ["ST", "VT", "OL"]:
return reverse("get_document_by_id", args=[p.pk])
elif p.estado in ["RP", "OK", "PD"]:
return reverse("get_presupuesto_pdf", args=[p.pk])
elif p.estado == "VD":
return reverse("get_all_document", args=[p.pk])
else:
return "#"
@register.filter(name='have_sign')
def have_sign(p):
compras = Compras.objects.filter(producto__id=p.pk)
compra = Compras()
if len(compras) > 0:
compra = compras[0]
return p.estado in ["ST", "VD", "OL", "VT"] and compra.firma == ""
@register.filter(name='editable')
def editable(p):
return p.estado in ["ST", "OL", "VT"]
@register.simple_tag(name='get_estado_value')
def get_estado_value(test_id, p_id, estado):
testeos = Testeo.objects.filter(Q(descripcion__pk=test_id) &
Q(producto__pk=p_id))
send = ""
if len(testeos) > 0 and testeos[0].estado == estado:
send = "selected"
return send
@register.filter(name='addattrs')
def addattrs(field, args):
attr = {}
try:
args_parse = args.replace("'", '"')
attr = json.loads(args_parse)
except Exception as error:
print(error)
return field.as_widget(attrs=attr)
@register.filter('klass')
def klass(ob):
return ob.field.widget.__class__.__name__
@register.filter('display')
def display(form, value):
return dict(form.field.choices)[value]
@register.filter('modelo')
def modelo(p):
if p.modelo != None:
return str(p.modelo)
else:
return p.detalle
@register.filter('marca')
def marca(p):
if p.modelo != None:
return str(p.modelo.marca)
else:
return ""
| [((509, 527), 'django.template.Library', 'template.Library', ([], {}), '()\n', (525, 527), False, 'from django import template\n'), ((1833, 1874), 'adminshop.models.Compras.objects.filter', 'Compras.objects.filter', ([], {'producto__id': 'p.pk'}), '(producto__id=p.pk)\n', (1855, 1874), False, 'from adminshop.models import Testeo, Compras, Presupuesto\n'), ((2452, 2493), 'adminshop.models.Compras.objects.filter', 'Compras.objects.filter', ([], {'producto__id': 'p.pk'}), '(producto__id=p.pk)\n', (2474, 2493), False, 'from adminshop.models import Testeo, Compras, Presupuesto\n'), ((2507, 2516), 'adminshop.models.Compras', 'Compras', ([], {}), '()\n', (2514, 2516), False, 'from adminshop.models import Testeo, Compras, Presupuesto\n'), ((1955, 1964), 'adminshop.models.Compras', 'Compras', ([], {}), '()\n', (1962, 1964), False, 'from adminshop.models import Testeo, Compras, Presupuesto\n'), ((2129, 2171), 'django.urls.reverse', 'reverse', (['"""get_document_by_id"""'], {'args': '[p.pk]'}), "('get_document_by_id', args=[p.pk])\n", (2136, 2171), False, 'from django.urls import reverse\n'), ((3209, 3231), 'json.loads', 'json.loads', (['args_parse'], {}), '(args_parse)\n', (3219, 3231), False, 'import json\n'), ((1173, 1218), 'adminshop.models.Presupuesto.objects.filter', 'Presupuesto.objects.filter', ([], {'producto__pk': 'p.id'}), '(producto__pk=p.id)\n', (1199, 1218), False, 'from adminshop.models import Testeo, Compras, Presupuesto\n'), ((1362, 1407), 'adminshop.models.Presupuesto.objects.filter', 'Presupuesto.objects.filter', ([], {'producto__pk': 'p.id'}), '(producto__pk=p.id)\n', (1388, 1407), False, 'from adminshop.models import Testeo, Compras, Presupuesto\n'), ((2228, 2271), 'django.urls.reverse', 'reverse', (['"""get_presupuesto_pdf"""'], {'args': '[p.pk]'}), "('get_presupuesto_pdf', args=[p.pk])\n", (2235, 2271), False, 'from django.urls import reverse\n'), ((2864, 2890), 'django.db.models.Q', 'Q', ([], {'descripcion__pk': 'test_id'}), '(descripcion__pk=test_id)\n', (2865, 2890), False, 'from django.db.models import Q\n'), ((2929, 2949), 'django.db.models.Q', 'Q', ([], {'producto__pk': 'p_id'}), '(producto__pk=p_id)\n', (2930, 2949), False, 'from django.db.models import Q\n'), ((2314, 2354), 'django.urls.reverse', 'reverse', (['"""get_all_document"""'], {'args': '[p.pk]'}), "('get_all_document', args=[p.pk])\n", (2321, 2354), False, 'from django.urls import reverse\n')] |
abdalazizrashid/Theano-PyMC | doc/tutorial/using_gpu_solution_1.py | 90fa750461e91fb6281d494ae86404e2153fd7eb | #!/usr/bin/env python
# Aesara tutorial
# Solution to Exercise in section 'Using the GPU'
# 1. Raw results
import numpy as np
import aesara
import aesara.tensor as tt
aesara.config.floatX = "float32"
rng = np.random
N = 400
feats = 784
D = (
rng.randn(N, feats).astype(aesara.config.floatX),
rng.randint(size=N, low=0, high=2).astype(aesara.config.floatX),
)
training_steps = 10000
# Declare Aesara symbolic variables
x = aesara.shared(D[0], name="x")
y = aesara.shared(D[1], name="y")
w = aesara.shared(rng.randn(feats).astype(aesara.config.floatX), name="w")
b = aesara.shared(np.asarray(0.0, dtype=aesara.config.floatX), name="b")
x.tag.test_value = D[0]
y.tag.test_value = D[1]
# print "Initial model:"
# print w.get_value(), b.get_value()
# Construct Aesara expression graph
p_1 = 1 / (1 + tt.exp(-tt.dot(x, w) - b)) # Probability of having a one
prediction = p_1 > 0.5 # The prediction that is done: 0 or 1
xent = -y * tt.log(p_1) - (1 - y) * tt.log(1 - p_1) # Cross-entropy
cost = tt.cast(xent.mean(), "float32") + 0.01 * (w ** 2).sum() # The cost to optimize
gw, gb = tt.grad(cost, [w, b])
# Compile expressions to functions
train = aesara.function(
inputs=[],
outputs=[prediction, xent],
updates=[(w, w - 0.01 * gw), (b, b - 0.01 * gb)],
name="train",
)
predict = aesara.function(inputs=[], outputs=prediction, name="predict")
if any(
[
n.op.__class__.__name__ in ["Gemv", "CGemv", "Gemm", "CGemm"]
for n in train.maker.fgraph.toposort()
]
):
print("Used the cpu")
elif any(
[
n.op.__class__.__name__ in ["GpuGemm", "GpuGemv"]
for n in train.maker.fgraph.toposort()
]
):
print("Used the gpu")
else:
print("ERROR, not able to tell if aesara used the cpu or the gpu")
print(train.maker.fgraph.toposort())
for i in range(training_steps):
pred, err = train()
# print "Final model:"
# print w.get_value(), b.get_value()
print("target values for D")
print(D[1])
print("prediction on D")
print(predict())
"""
# 2. Profiling
# 2.1 Profiling for CPU computations
# In your terminal, type:
$ THEANO_FLAGS=profile=True,device=cpu python using_gpu_solution_1.py
# You'll see first the output of the script:
Used the cpu
target values for D
prediction on D
# Followed by the output of profiling.. You'll see profiling results for each function
# in the script, followed by a summary for all functions.
# We'll show here only the summary:
Results were produced using an Intel(R) Core(TM) i7-5930K CPU @ 3.50GHz
Function profiling
==================
Message: Sum of all(2) printed profiles at exit excluding Scan op profile.
Time in 10001 calls to Function.__call__: 1.300452e+00s
Time in Function.fn.__call__: 1.215823e+00s (93.492%)
Time in thunks: 1.157602e+00s (89.015%)
Total compile time: 8.922548e-01s
Number of Apply nodes: 17
Aesara Optimizer time: 6.270301e-01s
Aesara validate time: 5.993605e-03s
Aesara Linker time (includes C, CUDA code generation/compiling): 2.949309e-02s
Import time 3.543139e-03s
Time in all call to aesara.grad() 1.848292e-02s
Time since aesara import 2.864s
Class
---
<% time> <sum %> <apply time> <time per call> <type> <#call> <#apply> <Class name>
64.5% 64.5% 0.747s 3.73e-05s C 20001 3 aesara.tensor.blas_c.CGemv
33.1% 97.7% 0.384s 4.79e-06s C 80001 9 aesara.tensor.elemwise.Elemwise
1.0% 98.6% 0.011s 1.14e-06s C 10000 1 aesara.tensor.elemwise.Sum
0.7% 99.4% 0.009s 2.85e-07s C 30001 4 aesara.tensor.elemwise.DimShuffle
0.3% 99.7% 0.004s 3.64e-07s C 10001 2 aesara.tensor.basic.AllocEmpty
0.3% 100.0% 0.004s 1.78e-07s C 20001 3 aesara.compile.ops.Shape_i
... (remaining 0 Classes account for 0.00%(0.00s) of the runtime)
Ops
---
<% time> <sum %> <apply time> <time per call> <type> <#call> <#apply> <Op name>
64.5% 64.5% 0.747s 3.73e-05s C 20001 3 CGemv{inplace}
18.7% 83.2% 0.217s 2.17e-05s C 10000 1 Elemwise{Composite{((i0 * scalar_softplus(i1)) - (i2 * i3 * scalar_softplus(i4)))}}[(0, 4)]
8.9% 92.1% 0.103s 1.03e-05s C 10000 1 Elemwise{Composite{(((scalar_sigmoid(i0) * i1 * i2) / i3) - ((scalar_sigmoid((-i0)) * i1 * i4) / i3))}}[(0, 0)]
4.3% 96.4% 0.050s 4.98e-06s C 10000 1 Elemwise{Composite{GT(scalar_sigmoid(i0), i1)}}
1.0% 97.4% 0.011s 1.14e-06s C 10000 1 Sum{acc_dtype=float64}
0.5% 97.9% 0.006s 2.83e-07s C 20001 3 InplaceDimShuffle{x}
0.4% 98.3% 0.004s 4.22e-07s C 10000 1 Elemwise{sub,no_inplace}
0.3% 98.6% 0.004s 3.70e-07s C 10000 1 Elemwise{neg,no_inplace}
0.3% 98.9% 0.004s 3.64e-07s C 10001 2 AllocEmpty{dtype='float32'}
0.3% 99.2% 0.004s 1.78e-07s C 20001 3 Shape_i{0}
0.2% 99.5% 0.003s 2.88e-07s C 10000 1 InplaceDimShuffle{1,0}
0.2% 99.7% 0.003s 2.65e-07s C 10000 1 Elemwise{Composite{((-i0) - i1)}}[(0, 0)]
0.2% 99.9% 0.002s 1.98e-07s C 10000 1 Elemwise{Cast{float32}}
0.1% 100.0% 0.002s 1.54e-07s C 10000 1 Elemwise{Composite{(i0 - (i1 * i2))}}[(0, 0)]
0.0% 100.0% 0.000s 4.77e-06s C 1 1 Elemwise{Composite{GT(scalar_sigmoid((-((-i0) - i1))), i2)}}
... (remaining 0 Ops account for 0.00%(0.00s) of the runtime)
Apply
------
<% time> <sum %> <apply time> <time per call> <#call> <id> <Apply name>
34.0% 34.0% 0.394s 3.94e-05s 10000 7 CGemv{inplace}(AllocEmpty{dtype='float32'}.0, TensorConstant{1.0}, x, w, TensorConstant{0.0})
30.5% 64.5% 0.353s 3.53e-05s 10000 15 CGemv{inplace}(w, TensorConstant{-0.00999999977648}, x.T, Elemwise{Composite{(((scalar_sigmoid(i0) * i1 * i2) / i3) - ((scalar_sigmoid((-i0)) * i1 * i4) / i3))}}[(0, 0)].0, TensorConstant{0.999800026417})
18.7% 83.2% 0.217s 2.17e-05s 10000 12 Elemwise{Composite{((i0 * scalar_softplus(i1)) - (i2 * i3 * scalar_softplus(i4)))}}[(0, 4)](y, Elemwise{Composite{((-i0) - i1)}}[(0, 0)].0, TensorConstant{(1,) of -1.0}, Elemwise{sub,no_inplace}.0, Elemwise{neg,no_inplace}.0)
8.9% 92.1% 0.103s 1.03e-05s 10000 13 Elemwise{Composite{(((scalar_sigmoid(i0) * i1 * i2) / i3) - ((scalar_sigmoid((-i0)) * i1 * i4) / i3))}}[(0, 0)](Elemwise{Composite{((-i0) - i1)}}[(0, 0)].0, TensorConstant{(1,) of -1.0}, y, Elemwise{Cast{float32}}.0, Elemwise{sub,no_inplace}.0)
4.3% 96.4% 0.050s 4.98e-06s 10000 11 Elemwise{Composite{GT(scalar_sigmoid(i0), i1)}}(Elemwise{neg,no_inplace}.0, TensorConstant{(1,) of 0.5})
1.0% 97.4% 0.011s 1.14e-06s 10000 14 Sum{acc_dtype=float64}(Elemwise{Composite{(((scalar_sigmoid(i0) * i1 * i2) / i3) - ((scalar_sigmoid((-i0)) * i1 * i4) / i3))}}[(0, 0)].0)
0.4% 97.8% 0.004s 4.22e-07s 10000 4 Elemwise{sub,no_inplace}(TensorConstant{(1,) of 1.0}, y)
0.3% 98.1% 0.004s 3.76e-07s 10000 0 InplaceDimShuffle{x}(b)
0.3% 98.4% 0.004s 3.70e-07s 10000 10 Elemwise{neg,no_inplace}(Elemwise{Composite{((-i0) - i1)}}[(0, 0)].0)
0.3% 98.7% 0.004s 3.64e-07s 10000 5 AllocEmpty{dtype='float32'}(Shape_i{0}.0)
0.2% 99.0% 0.003s 2.88e-07s 10000 2 InplaceDimShuffle{1,0}(x)
0.2% 99.2% 0.003s 2.65e-07s 10000 9 Elemwise{Composite{((-i0) - i1)}}[(0, 0)](CGemv{inplace}.0, InplaceDimShuffle{x}.0)
0.2% 99.4% 0.002s 2.21e-07s 10000 1 Shape_i{0}(x)
0.2% 99.6% 0.002s 1.98e-07s 10000 8 Elemwise{Cast{float32}}(InplaceDimShuffle{x}.0)
0.2% 99.7% 0.002s 1.90e-07s 10000 6 InplaceDimShuffle{x}(Shape_i{0}.0)
0.1% 99.9% 0.002s 1.54e-07s 10000 16 Elemwise{Composite{(i0 - (i1 * i2))}}[(0, 0)](b, TensorConstant{0.00999999977648}, Sum{acc_dtype=float64}.0)
0.1% 100.0% 0.001s 1.34e-07s 10000 3 Shape_i{0}(y)
0.0% 100.0% 0.000s 3.89e-05s 1 3 CGemv{inplace}(AllocEmpty{dtype='float32'}.0, TensorConstant{1.0}, x, w, TensorConstant{0.0})
0.0% 100.0% 0.000s 4.77e-06s 1 4 Elemwise{Composite{GT(scalar_sigmoid((-((-i0) - i1))), i2)}}(CGemv{inplace}.0, InplaceDimShuffle{x}.0, TensorConstant{(1,) of 0.5})
0.0% 100.0% 0.000s 1.19e-06s 1 0 InplaceDimShuffle{x}(b)
... (remaining 2 Apply instances account for 0.00%(0.00s) of the runtime)
# 2.2 Profiling for GPU computations
# In your terminal, type:
$ CUDA_LAUNCH_BLOCKING=1 THEANO_FLAGS=profile=True,device=cuda python using_gpu_solution_1.py
# You'll see first the output of the script:
Used the gpu
target values for D
prediction on D
Results were produced using a GeForce GTX TITAN X
# Profiling summary for all functions:
Function profiling
==================
Message: Sum of all(2) printed profiles at exit excluding Scan op profile.
Time in 10001 calls to Function.__call__: 4.181247e+00s
Time in Function.fn.__call__: 4.081113e+00s (97.605%)
Time in thunks: 3.915566e+00s (93.646%)
Total compile time: 9.256095e+00s
Number of Apply nodes: 21
Aesara Optimizer time: 9.996419e-01s
Aesara validate time: 6.523132e-03s
Aesara Linker time (includes C, CUDA code generation/compiling): 8.239602e+00s
Import time 4.228115e-03s
Time in all call to aesara.grad() 3.286195e-02s
Time since aesara import 15.415s
Class
---
<% time> <sum %> <apply time> <time per call> <type> <#call> <#apply> <Class name>
59.5% 59.5% 2.329s 1.16e-04s C 20001 3 aesara.sandbox.gpuarray.blas.GpuGemv
29.8% 89.3% 1.166s 1.30e-05s C 90001 10 aesara.sandbox.gpuarray.elemwise.GpuElemwise
4.1% 93.4% 0.162s 8.10e-06s C 20001 3 aesara.sandbox.gpuarray.basic_ops.HostFromGpu
3.3% 96.7% 0.131s 1.31e-05s C 10000 1 aesara.sandbox.gpuarray.elemwise.GpuCAReduceCuda
1.6% 98.3% 0.061s 6.10e-06s C 10000 1 aesara.sandbox.gpuarray.basic_ops.GpuFromHost
0.8% 99.1% 0.033s 1.09e-06s C 30001 4 aesara.sandbox.gpuarray.elemwise.GpuDimShuffle
0.7% 99.8% 0.026s 2.59e-06s C 10001 2 aesara.sandbox.gpuarray.basic_ops.GpuAllocEmpty
0.2% 100.0% 0.008s 3.95e-07s C 20001 3 aesara.compile.ops.Shape_i
... (remaining 0 Classes account for 0.00%(0.00s) of the runtime)
Ops
---
<% time> <sum %> <apply time> <time per call> <type> <#call> <#apply> <Op name>
59.5% 59.5% 2.329s 1.16e-04s C 20001 3 GpuGemv{inplace=True}
4.1% 63.6% 0.162s 8.10e-06s C 20001 3 HostFromGpu(gpuarray)
4.0% 67.6% 0.157s 1.57e-05s C 10000 1 GpuElemwise{Composite{((i0 * scalar_softplus(i1)) - (i2 * i3 * scalar_softplus(i4)))}}[]<gpuarray>
3.8% 71.4% 0.149s 1.49e-05s C 10000 1 GpuElemwise{Composite{(((scalar_sigmoid(i0) * i1 * i2) / i3) - ((i4 * i1 * i5) / i3))}}[(0, 0)]<gpuarray>
3.7% 75.1% 0.144s 1.44e-05s C 10000 1 GpuElemwise{sub,no_inplace}
3.6% 78.7% 0.141s 1.41e-05s C 10000 1 GpuElemwise{gt,no_inplace}
3.4% 82.1% 0.133s 1.33e-05s C 10000 1 GpuElemwise{Cast{float32}}[]<gpuarray>
3.4% 85.5% 0.133s 1.33e-05s C 10000 1 GpuElemwise{Composite{((-i0) - i1)}}[(0, 0)]<gpuarray>
3.3% 88.8% 0.131s 1.31e-05s C 10000 1 GpuCAReduceCuda{add}
2.9% 91.7% 0.112s 1.12e-05s C 10000 1 GpuElemwise{neg,no_inplace}
2.6% 94.3% 0.102s 1.02e-05s C 10000 1 GpuElemwise{Composite{(i0 - (i1 * i2))}}[(0, 0)]<gpuarray>
2.5% 96.7% 0.096s 9.63e-06s C 10000 1 GpuElemwise{ScalarSigmoid}[(0, 0)]<gpuarray>
1.6% 98.3% 0.061s 6.10e-06s C 10000 1 GpuFromHost<None>
0.7% 99.0% 0.026s 2.59e-06s C 10001 2 GpuAllocEmpty{dtype='float32', context_name=None}
0.5% 99.5% 0.021s 1.06e-06s C 20001 3 InplaceGpuDimShuffle{x}
0.3% 99.8% 0.011s 1.14e-06s C 10000 1 InplaceGpuDimShuffle{1,0}
0.2% 100.0% 0.008s 3.95e-07s C 20001 3 Shape_i{0}
0.0% 100.0% 0.000s 2.00e-05s C 1 1 GpuElemwise{Composite{GT(scalar_sigmoid((-((-i0) - i1))), i2)}}[]<gpuarray>
... (remaining 0 Ops account for 0.00%(0.00s) of the runtime)
Apply
------
<% time> <sum %> <apply time> <time per call> <#call> <id> <Apply name>
55.0% 55.0% 2.154s 2.15e-04s 10000 7 GpuGemv{inplace=True}(GpuAllocEmpty{dtype='float32', context_name=None}.0, TensorConstant{1.0}, x, w, TensorConstant{0.0})
4.5% 59.5% 0.176s 1.76e-05s 10000 18 GpuGemv{inplace=True}(w, TensorConstant{-0.00999999977648}, InplaceGpuDimShuffle{1,0}.0, GpuElemwise{Composite{(((scalar_sigmoid(i0) * i1 * i2) / i3) - ((i4 * i1 * i5) / i3))}}[(0, 0)]<gpuarray>.0, TensorConstant{0.999800026417})
4.0% 63.5% 0.157s 1.57e-05s 10000 12 GpuElemwise{Composite{((i0 * scalar_softplus(i1)) - (i2 * i3 * scalar_softplus(i4)))}}[]<gpuarray>(y, GpuElemwise{Composite{((-i0) - i1)}}[(0, 0)]<gpuarray>.0, GpuArrayConstant{[-1.]}, GpuElemwise{sub,no_inplace}.0, GpuElemwise{neg,no_inplace}.0)
3.8% 67.3% 0.149s 1.49e-05s 10000 15 GpuElemwise{Composite{(((scalar_sigmoid(i0) * i1 * i2) / i3) - ((i4 * i1 * i5) / i3))}}[(0, 0)]<gpuarray>(GpuElemwise{Composite{((-i0) - i1)}}[(0, 0)]<gpuarray>.0, GpuArrayConstant{[-1.]}, y, GpuElemwise{Cast{float32}}[]<gpuarray>.0, GpuElemwise{ScalarSigmoid}[(0, 0)]<gpuarray>.0, GpuElemwise{sub,no_inplace}.0)
3.7% 71.0% 0.144s 1.44e-05s 10000 4 GpuElemwise{sub,no_inplace}(GpuArrayConstant{[ 1.]}, y)
3.6% 74.6% 0.141s 1.41e-05s 10000 16 GpuElemwise{gt,no_inplace}(GpuElemwise{ScalarSigmoid}[(0, 0)]<gpuarray>.0, GpuArrayConstant{[ 0.5]})
3.4% 78.0% 0.133s 1.33e-05s 10000 10 GpuElemwise{Cast{float32}}[]<gpuarray>(InplaceGpuDimShuffle{x}.0)
3.4% 81.4% 0.133s 1.33e-05s 10000 9 GpuElemwise{Composite{((-i0) - i1)}}[(0, 0)]<gpuarray>(GpuGemv{inplace=True}.0, InplaceGpuDimShuffle{x}.0)
3.3% 84.7% 0.131s 1.31e-05s 10000 17 GpuCAReduceCuda{add}(GpuElemwise{Composite{(((scalar_sigmoid(i0) * i1 * i2) / i3) - ((i4 * i1 * i5) / i3))}}[(0, 0)]<gpuarray>.0)
2.9% 87.5% 0.112s 1.12e-05s 10000 11 GpuElemwise{neg,no_inplace}(GpuElemwise{Composite{((-i0) - i1)}}[(0, 0)]<gpuarray>.0)
2.6% 90.1% 0.102s 1.02e-05s 10000 20 GpuElemwise{Composite{(i0 - (i1 * i2))}}[(0, 0)]<gpuarray>(b, GpuArrayConstant{0.00999999977648}, GpuCAReduceCuda{add}.0)
2.5% 92.6% 0.096s 9.63e-06s 10000 13 GpuElemwise{ScalarSigmoid}[(0, 0)]<gpuarray>(GpuElemwise{neg,no_inplace}.0)
2.3% 94.9% 0.090s 9.04e-06s 10000 19 HostFromGpu(gpuarray)(GpuElemwise{gt,no_inplace}.0)
1.8% 96.7% 0.072s 7.16e-06s 10000 14 HostFromGpu(gpuarray)(GpuElemwise{Composite{((i0 * scalar_softplus(i1)) - (i2 * i3 * scalar_softplus(i4)))}}[]<gpuarray>.0)
1.6% 98.3% 0.061s 6.10e-06s 10000 6 GpuFromHost<None>(Shape_i{0}.0)
0.7% 99.0% 0.026s 2.59e-06s 10000 5 GpuAllocEmpty{dtype='float32', context_name=None}(Shape_i{0}.0)
0.3% 99.3% 0.013s 1.33e-06s 10000 0 InplaceGpuDimShuffle{x}(b)
0.3% 99.6% 0.011s 1.14e-06s 10000 2 InplaceGpuDimShuffle{1,0}(x)
0.2% 99.8% 0.008s 7.94e-07s 10000 8 InplaceGpuDimShuffle{x}(GpuFromHost<None>.0)
0.1% 99.9% 0.005s 5.27e-07s 10000 1 Shape_i{0}(x)
... (remaining 7 Apply instances account for 0.07%(0.00s) of the runtime)
# 3. Conclusions
Examine and compare 'Ops' summaries for CPU and GPU. Usually GPU ops 'GpuFromHost' and 'HostFromGpu' by themselves
consume a large amount of extra time, but by making as few as possible data transfers between GPU and CPU, you can minimize their overhead.
Notice that each of the GPU ops consumes more time than its CPU counterpart. This is because the ops operate on small inputs;
if you increase the input data size (e.g. set N = 4000), you will see a gain from using the GPU.
"""
| [((440, 469), 'aesara.shared', 'aesara.shared', (['D[0]'], {'name': '"""x"""'}), "(D[0], name='x')\n", (453, 469), False, 'import aesara\n'), ((474, 503), 'aesara.shared', 'aesara.shared', (['D[1]'], {'name': '"""y"""'}), "(D[1], name='y')\n", (487, 503), False, 'import aesara\n'), ((1099, 1120), 'aesara.tensor.grad', 'tt.grad', (['cost', '[w, b]'], {}), '(cost, [w, b])\n', (1106, 1120), True, 'import aesara.tensor as tt\n'), ((1165, 1288), 'aesara.function', 'aesara.function', ([], {'inputs': '[]', 'outputs': '[prediction, xent]', 'updates': '[(w, w - 0.01 * gw), (b, b - 0.01 * gb)]', 'name': '"""train"""'}), "(inputs=[], outputs=[prediction, xent], updates=[(w, w - \n 0.01 * gw), (b, b - 0.01 * gb)], name='train')\n", (1180, 1288), False, 'import aesara\n'), ((1313, 1375), 'aesara.function', 'aesara.function', ([], {'inputs': '[]', 'outputs': 'prediction', 'name': '"""predict"""'}), "(inputs=[], outputs=prediction, name='predict')\n", (1328, 1375), False, 'import aesara\n'), ((597, 640), 'numpy.asarray', 'np.asarray', (['(0.0)'], {'dtype': 'aesara.config.floatX'}), '(0.0, dtype=aesara.config.floatX)\n', (607, 640), True, 'import numpy as np\n'), ((946, 957), 'aesara.tensor.log', 'tt.log', (['p_1'], {}), '(p_1)\n', (952, 957), True, 'import aesara.tensor as tt\n'), ((970, 985), 'aesara.tensor.log', 'tt.log', (['(1 - p_1)'], {}), '(1 - p_1)\n', (976, 985), True, 'import aesara.tensor as tt\n'), ((822, 834), 'aesara.tensor.dot', 'tt.dot', (['x', 'w'], {}), '(x, w)\n', (828, 834), True, 'import aesara.tensor as tt\n')] |
souravsingh/chainercv | chainercv/transforms/bbox/translate_bbox.py | 8f76510472bc95018c183e72f37bc6c34a89969c | def translate_bbox(bbox, y_offset=0, x_offset=0):
"""Translate bounding boxes.
This method is mainly used together with image transforms, such as padding
and cropping, which translates the left top point of the image from
coordinate :math:`(0, 0)` to coordinate
:math:`(y, x) = (y_{offset}, x_{offset})`.
The bounding boxes are expected to be packed into a two dimensional
tensor of shape :math:`(R, 4)`, where :math:`R` is the number of
bounding boxes in the image. The second axis represents attributes of
the bounding box. They are :math:`(y_{min}, x_{min}, y_{max}, x_{max})`,
where the four attributes are coordinates of the top left and the
bottom right vertices.
Args:
bbox (~numpy.ndarray): Bounding boxes to be transformed. The shape is
:math:`(R, 4)`. :math:`R` is the number of bounding boxes.
y_offset (int or float): The offset along y axis.
x_offset (int or float): The offset along x axis.
Returns:
~numpy.ndarray:
Bounding boxes translated according to the given offsets.
"""
out_bbox = bbox.copy()
out_bbox[:, :2] += (y_offset, x_offset)
out_bbox[:, 2:] += (y_offset, x_offset)
return out_bbox
| [] |
ministryofjustice/cla-end-to-end-tests | behave/features/environment.py | 3d7e525c17f38403a91087c2b1af460ca1109a9b | import os
from configparser import ConfigParser
from helper.helper_web import get_browser
def before_all(context):
config = ConfigParser()
print((os.path.join(os.getcwd(), 'setup.cfg')))
my_file = (os.path.join(os.getcwd(), 'setup.cfg'))
config.read(my_file)
# Reading the browser type from the configuration file
helper_func = get_browser(config.get('Environment', 'Browser'))
context.helperfunc = helper_func
def after_all(context):
context.helperfunc.close() | [((130, 144), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (142, 144), False, 'from configparser import ConfigParser\n'), ((225, 236), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (234, 236), False, 'import os\n'), ((169, 180), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (178, 180), False, 'import os\n')] |
j-sommer/recipe-organizer | recipe_organizer/gui/recipe_list/recipe_source.py | 91d39e12c453ecf3d3254645b565bbceacaecde9 | from pathlib import Path
from tkinter import Frame, Label
from recipe_organizer.events.event import Event, EventType
from recipe_organizer.events.event_observer import EventObserver
from recipe_organizer.events.event_publisher import EventPublisher
from recipe_organizer.gui.interfaces.widget_container import WidgetContainer
from recipe_organizer.gui.recipe_summary.recipe_summary import RecipeSummary
from recipe_organizer.recipe.recipe import Recipe
class RecipeSource(Frame, WidgetContainer, EventObserver):
_MAX_COLUMN_COUNT = 6
_label_source_directory: Label
_recipe_summaries: [RecipeSummary] = []
_row_index = 0
def __init__(self, parent):
Frame.__init__(self, parent)
self.define_widgets()
self.define_layout()
EventPublisher.add(self)
def define_widgets(self) -> None:
self._label_source_directory = Label(self, text="-")
def define_layout(self) -> None:
self._label_source_directory.grid(row=self.__get_row_index())
def notify(self, event: Event) -> None:
if event.event_type == EventType.SOURCE_SET:
self._label_source_directory.configure(text=event.payload.name)
self.__load_recipes(event.payload)
def __get_row_index(self) -> int:
current_index = self._row_index
self._row_index += 1
return current_index
def __load_recipes(self, directory: Path):
recipes: [Recipe] = []
file_paths = directory.glob("**/*.json")
for file_path in file_paths:
with open(file_path, "r", encoding="utf-8") as file:
json_data = file.read()
try:
recipe = Recipe.from_json(json_data)
except KeyError:
pass
else:
recipes.append(recipe)
self.__create_list(recipes)
def __create_list(self, recipes: [Recipe]):
current_row_index = self.__get_row_index()
for index, recipe in enumerate(recipes):
if index % self._MAX_COLUMN_COUNT == 0:
current_row_index = self.__get_row_index()
recipe_summary = RecipeSummary(self, recipe)
recipe_summary.grid(row=current_row_index, column=index % self._MAX_COLUMN_COUNT, padx=16, pady=10)
self.columnconfigure(index, minsize=200)
self._recipe_summaries.append(recipe_summary)
| [((682, 710), 'tkinter.Frame.__init__', 'Frame.__init__', (['self', 'parent'], {}), '(self, parent)\n', (696, 710), False, 'from tkinter import Frame, Label\n'), ((780, 804), 'recipe_organizer.events.event_publisher.EventPublisher.add', 'EventPublisher.add', (['self'], {}), '(self)\n', (798, 804), False, 'from recipe_organizer.events.event_publisher import EventPublisher\n'), ((883, 904), 'tkinter.Label', 'Label', (['self'], {'text': '"""-"""'}), "(self, text='-')\n", (888, 904), False, 'from tkinter import Frame, Label\n'), ((2170, 2197), 'recipe_organizer.gui.recipe_summary.recipe_summary.RecipeSummary', 'RecipeSummary', (['self', 'recipe'], {}), '(self, recipe)\n', (2183, 2197), False, 'from recipe_organizer.gui.recipe_summary.recipe_summary import RecipeSummary\n'), ((1691, 1718), 'recipe_organizer.recipe.recipe.Recipe.from_json', 'Recipe.from_json', (['json_data'], {}), '(json_data)\n', (1707, 1718), False, 'from recipe_organizer.recipe.recipe import Recipe\n')] |
xiki-tempula/spack | var/spack/repos/builtin/packages/spot/package.py | 9d66c05e93ab8a933fc59915040c0e0c86a4aac4 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Spot(AutotoolsPackage):
"""Spot is a C++11 library for omega-automata manipulation and model
checking."""
homepage = "https://spot.lrde.epita.fr/"
url = "http://www.lrde.epita.fr/dload/spot/spot-1.99.3.tar.gz"
version('1.99.3', sha256='86964af559994af4451a8dca663a9e1db6e869ed60e747ab60ce72dddc31b61b')
version('1.2.6', sha256='360678c75f6741f697e8e56cdbc9937f104eb723a839c3629f0dc5dc6de11bfc')
variant('python', default=True, description='Enable python API')
depends_on("[email protected]:", when='@1.99.5: +python')
depends_on("[email protected]:", when='@1.99: +python')
depends_on("python@2:", when='+python')
depends_on('boost', when='@:1.2.6')
| [] |
itspuneet/itspuneet | 0.py | d44f78afcff275aa56f03bba738ac3e4f2c30843 | k=0
while k!=1:
print(k)
k+=1
| [] |
Cole-vJ/AdvEMDpy | jss_figures_replication_script.py | 160cd44b371a2c8aa66961f23062c1d7305dd728 |
# ________
# /
# \ /
# \ /
# \/
import random
import textwrap
import emd_mean
import AdvEMDpy
import emd_basis
import emd_utils
import numpy as np
import pandas as pd
import cvxpy as cvx
import seaborn as sns
import matplotlib.pyplot as plt
from scipy.integrate import odeint
from scipy.ndimage import gaussian_filter
from emd_utils import time_extension, Utility
from scipy.interpolate import CubicSpline
from emd_hilbert import Hilbert, hilbert_spectrum
from emd_preprocess import Preprocess
from emd_mean import Fluctuation
from AdvEMDpy import EMD
# alternate packages
from PyEMD import EMD as pyemd0215
import emd as emd040
sns.set(style='darkgrid')
pseudo_alg_time = np.linspace(0, 2 * np.pi, 1001)
pseudo_alg_time_series = np.sin(pseudo_alg_time) + np.sin(5 * pseudo_alg_time)
pseudo_utils = Utility(time=pseudo_alg_time, time_series=pseudo_alg_time_series)
# plot 0 - addition
fig = plt.figure(figsize=(9, 4))
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('First Iteration of Sifting Algorithm')
plt.plot(pseudo_alg_time, pseudo_alg_time_series, label=r'$h_{(1,0)}(t)$', zorder=1)
plt.scatter(pseudo_alg_time[pseudo_utils.max_bool_func_1st_order_fd()],
pseudo_alg_time_series[pseudo_utils.max_bool_func_1st_order_fd()],
c='r', label=r'$M(t_i)$', zorder=2)
plt.plot(pseudo_alg_time, np.sin(pseudo_alg_time) + 1, '--', c='r', label=r'$\tilde{h}_{(1,0)}^M(t)$', zorder=4)
plt.scatter(pseudo_alg_time[pseudo_utils.min_bool_func_1st_order_fd()],
pseudo_alg_time_series[pseudo_utils.min_bool_func_1st_order_fd()],
c='c', label=r'$m(t_j)$', zorder=3)
plt.plot(pseudo_alg_time, np.sin(pseudo_alg_time) - 1, '--', c='c', label=r'$\tilde{h}_{(1,0)}^m(t)$', zorder=5)
plt.plot(pseudo_alg_time, np.sin(pseudo_alg_time), '--', c='purple', label=r'$\tilde{h}_{(1,0)}^{\mu}(t)$', zorder=5)
plt.yticks(ticks=[-2, -1, 0, 1, 2])
plt.xticks(ticks=[0, np.pi, 2 * np.pi],
labels=[r'0', r'$\pi$', r'$2\pi$'])
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.95, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/pseudo_algorithm.png')
plt.show()
knots = np.arange(12)
time = np.linspace(0, 11, 1101)
basis = emd_basis.Basis(time=time, time_series=time)
b_spline_basis = basis.cubic_b_spline(knots)
chsi_basis = basis.chsi_basis(knots)
# plot 1
plt.title('Non-Natural Cubic B-Spline Bases at Boundary')
plt.plot(time[500:], b_spline_basis[2, 500:].T, '--', label=r'$ B_{-3,4}(t) $')
plt.plot(time[500:], b_spline_basis[3, 500:].T, '--', label=r'$ B_{-2,4}(t) $')
plt.plot(time[500:], b_spline_basis[4, 500:].T, '--', label=r'$ B_{-1,4}(t) $')
plt.plot(time[500:], b_spline_basis[5, 500:].T, '--', label=r'$ B_{0,4}(t) $')
plt.plot(time[500:], b_spline_basis[6, 500:].T, '--', label=r'$ B_{1,4}(t) $')
plt.xticks([5, 6], [r'$ \tau_0 $', r'$ \tau_1 $'])
plt.xlim(4.4, 6.6)
plt.plot(5 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
plt.plot(6 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
plt.legend(loc='upper left')
plt.savefig('jss_figures/boundary_bases.png')
plt.show()
# plot 1a - addition
knot_demonstrate_time = np.linspace(0, 2 * np.pi, 1001)
knot_demonstrate_time_series = np.sin(knot_demonstrate_time) + np.sin(5 * knot_demonstrate_time)
knots_uniform = np.linspace(0, 2 * np.pi, 51)
emd = EMD(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)
imfs = emd.empirical_mode_decomposition(knots=knots_uniform, edge_effect='anti-symmetric', verbose=False)[0]
fig, axs = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.6)
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Time Series and Uniform Knots')
axs[0].plot(knot_demonstrate_time, knot_demonstrate_time_series, Linewidth=2, zorder=100)
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].set_title('IMF 1 and Uniform Knots')
axs[1].plot(knot_demonstrate_time, imfs[1, :], Linewidth=2, zorder=100)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[1].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[2].set_title('IMF 2 and Uniform Knots')
axs[2].plot(knot_demonstrate_time, imfs[2, :], Linewidth=2, zorder=100)
axs[2].set_yticks(ticks=[-2, 0, 2])
axs[2].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[2].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[0].plot(knots_uniform[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[0].legend(loc='lower left')
axs[1].plot(knots_uniform[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[2].plot(knots_uniform[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
for i in range(3):
for j in range(1, len(knots_uniform)):
axs[i].plot(knots_uniform[j] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey')
plt.savefig('jss_figures/knot_uniform.png')
plt.show()
# plot 1b - addition
knot_demonstrate_time = np.linspace(0, 2 * np.pi, 1001)
knot_demonstrate_time_series = np.sin(knot_demonstrate_time) + np.sin(5 * knot_demonstrate_time)
emd = EMD(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)
imfs, _, _, _, knots, _, _ = emd.empirical_mode_decomposition(edge_effect='anti-symmetric',
optimise_knots=1, verbose=False)
fig, axs = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.6)
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Time Series and Statically Optimised Knots')
axs[0].plot(knot_demonstrate_time, knot_demonstrate_time_series, Linewidth=2, zorder=100)
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].set_title('IMF 1 and Statically Optimised Knots')
axs[1].plot(knot_demonstrate_time, imfs[1, :], Linewidth=2, zorder=100)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[1].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[2].set_title('IMF 2 and Statically Optimised Knots')
axs[2].plot(knot_demonstrate_time, imfs[2, :], Linewidth=2, zorder=100)
axs[2].set_yticks(ticks=[-2, 0, 2])
axs[2].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[2].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[0].plot(knots[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[0].legend(loc='lower left')
axs[1].plot(knots[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[2].plot(knots[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
for i in range(3):
for j in range(1, len(knots)):
axs[i].plot(knots[j] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey')
plt.savefig('jss_figures/knot_1.png')
plt.show()
# plot 1c - addition
knot_demonstrate_time = np.linspace(0, 2 * np.pi, 1001)
knot_demonstrate_time_series = np.sin(knot_demonstrate_time) + np.sin(5 * knot_demonstrate_time)
emd = EMD(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)
imfs, _, _, _, knots, _, _ = emd.empirical_mode_decomposition(edge_effect='anti-symmetric',
optimise_knots=2, verbose=False)
fig, axs = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.6)
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Time Series and Dynamically Optimised Knots')
axs[0].plot(knot_demonstrate_time, knot_demonstrate_time_series, Linewidth=2, zorder=100)
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].set_title('IMF 1 and Dynamically Knots')
axs[1].plot(knot_demonstrate_time, imfs[1, :], Linewidth=2, zorder=100)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[1].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[2].set_title('IMF 2 and Dynamically Knots')
axs[2].plot(knot_demonstrate_time, imfs[2, :], Linewidth=2, zorder=100)
axs[2].set_yticks(ticks=[-2, 0, 2])
axs[2].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[2].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[0].plot(knots[0][0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[0].legend(loc='lower left')
axs[1].plot(knots[1][0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[2].plot(knots[2][0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
for i in range(3):
for j in range(1, len(knots[i])):
axs[i].plot(knots[i][j] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey')
plt.savefig('jss_figures/knot_2.png')
plt.show()
# plot 1d - addition
window = 81
fig, axs = plt.subplots(2, 1)
fig.subplots_adjust(hspace=0.4)
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Preprocess Filtering Demonstration')
axs[1].set_title('Zoomed Region')
preprocess_time = pseudo_alg_time.copy()
np.random.seed(1)
random.seed(1)
preprocess_time_series = pseudo_alg_time_series + np.random.normal(0, 0.1, len(preprocess_time))
for i in random.sample(range(1000), 500):
preprocess_time_series[i] += np.random.normal(0, 1)
preprocess = Preprocess(time=preprocess_time, time_series=preprocess_time_series)
axs[0].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[0].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple',
label=textwrap.fill('Noiseless time series', 12))
axs[0].plot(preprocess_time, preprocess.mean_filter(window_width=window)[1], label=textwrap.fill('Mean filter', 12))
axs[0].plot(preprocess_time, preprocess.median_filter(window_width=window)[1], label=textwrap.fill('Median filter', 13))
axs[0].plot(preprocess_time, preprocess.winsorize(window_width=window, a=0.8)[1], label=textwrap.fill('Windsorize filter', 12))
axs[0].plot(preprocess_time, preprocess.winsorize_interpolate(window_width=window, a=0.8)[1],
label=textwrap.fill('Windsorize interpolation filter', 14))
axs[0].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.90)[1], c='grey',
label=textwrap.fill('Quantile window', 12))
axs[0].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.10)[1], c='grey')
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), -3 * np.ones(101), '--', c='black',
label=textwrap.fill('Zoomed region', 10))
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), 3 * np.ones(101), '--', c='black')
axs[0].plot(0.85 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].plot(1.15 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[1].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple', label=textwrap.fill('Noiseless time series', 12))
axs[1].plot(preprocess_time, preprocess.mean_filter(window_width=window)[1], label=textwrap.fill('Mean filter', 12))
axs[1].plot(preprocess_time, preprocess.median_filter(window_width=window)[1], label=textwrap.fill('Median filter', 13))
axs[1].plot(preprocess_time, preprocess.winsorize(window_width=window, a=0.8)[1], label=textwrap.fill('Windsorize filter', 12))
axs[1].plot(preprocess_time, preprocess.winsorize_interpolate(window_width=window, a=0.8)[1],
label=textwrap.fill('Windsorize interpolation filter', 14))
axs[1].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.90)[1], c='grey',
label=textwrap.fill('Quantile window', 12))
axs[1].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.10)[1], c='grey')
axs[1].set_xlim(0.85 * np.pi, 1.15 * np.pi)
axs[1].set_ylim(-3, 3)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[np.pi])
axs[1].set_xticklabels(labels=[r'$\pi$'])
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, -0.15))
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.05, box_1.y0, box_1.width * 0.85, box_1.height])
plt.savefig('jss_figures/preprocess_filter.png')
plt.show()
# plot 1e - addition
fig, axs = plt.subplots(2, 1)
fig.subplots_adjust(hspace=0.4)
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Preprocess Smoothing Demonstration')
axs[1].set_title('Zoomed Region')
axs[0].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[0].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple',
label=textwrap.fill('Noiseless time series', 12))
axs[0].plot(preprocess_time, preprocess.hp()[1],
label=textwrap.fill('Hodrick-Prescott smoothing', 12))
axs[0].plot(preprocess_time, preprocess.hw(order=51)[1],
label=textwrap.fill('Henderson-Whittaker smoothing', 13))
downsampled_and_decimated = preprocess.downsample()
axs[0].plot(downsampled_and_decimated[0], downsampled_and_decimated[1],
label=textwrap.fill('Downsampled & decimated', 11))
downsampled = preprocess.downsample(decimate=False)
axs[0].plot(downsampled[0], downsampled[1],
label=textwrap.fill('Downsampled', 13))
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), -3 * np.ones(101), '--', c='black',
label=textwrap.fill('Zoomed region', 10))
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), 3 * np.ones(101), '--', c='black')
axs[0].plot(0.85 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].plot(1.15 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[1].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple',
label=textwrap.fill('Noiseless time series', 12))
axs[1].plot(preprocess_time, preprocess.hp()[1],
label=textwrap.fill('Hodrick-Prescott smoothing', 12))
axs[1].plot(preprocess_time, preprocess.hw(order=51)[1],
label=textwrap.fill('Henderson-Whittaker smoothing', 13))
axs[1].plot(downsampled_and_decimated[0], downsampled_and_decimated[1],
label=textwrap.fill('Downsampled & decimated', 13))
axs[1].plot(downsampled[0], downsampled[1],
label=textwrap.fill('Downsampled', 13))
axs[1].set_xlim(0.85 * np.pi, 1.15 * np.pi)
axs[1].set_ylim(-3, 3)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[np.pi])
axs[1].set_xticklabels(labels=[r'$\pi$'])
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.06, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, -0.15))
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.06, box_1.y0, box_1.width * 0.85, box_1.height])
plt.savefig('jss_figures/preprocess_smooth.png')
plt.show()
# plot 2
fig, axs = plt.subplots(1, 2, sharey=True)
axs[0].set_title('Cubic B-Spline Bases')
axs[0].plot(time, b_spline_basis[2, :].T, '--', label='Basis 1')
axs[0].plot(time, b_spline_basis[3, :].T, '--', label='Basis 2')
axs[0].plot(time, b_spline_basis[4, :].T, '--', label='Basis 3')
axs[0].plot(time, b_spline_basis[5, :].T, '--', label='Basis 4')
axs[0].legend(loc='upper left')
axs[0].plot(5 * np.ones(100), np.linspace(-0.2, 0.8, 100), 'k-')
axs[0].plot(6 * np.ones(100), np.linspace(-0.2, 0.8, 100), 'k-')
axs[0].set_xticks([5, 6])
axs[0].set_xticklabels([r'$ \tau_k $', r'$ \tau_{k+1} $'])
axs[0].set_xlim(4.5, 6.5)
axs[1].set_title('Cubic Hermite Spline Bases')
axs[1].plot(time, chsi_basis[10, :].T, '--')
axs[1].plot(time, chsi_basis[11, :].T, '--')
axs[1].plot(time, chsi_basis[12, :].T, '--')
axs[1].plot(time, chsi_basis[13, :].T, '--')
axs[1].plot(5 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
axs[1].plot(6 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
axs[1].set_xticks([5, 6])
axs[1].set_xticklabels([r'$ \tau_k $', r'$ \tau_{k+1} $'])
axs[1].set_xlim(4.5, 6.5)
plt.savefig('jss_figures/comparing_bases.png')
plt.show()
# plot 3
a = 0.25
width = 0.2
time = np.linspace(0, (5 - a) * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
max_dash_time = np.linspace(maxima_x[-1] - width, maxima_x[-1] + width, 101)
max_dash = maxima_y[-1] * np.ones_like(max_dash_time)
min_dash_time = np.linspace(minima_x[-1] - width, minima_x[-1] + width, 101)
min_dash = minima_y[-1] * np.ones_like(min_dash_time)
dash_1_time = np.linspace(maxima_x[-1], minima_x[-1], 101)
dash_1 = np.linspace(maxima_y[-1], minima_y[-1], 101)
max_discard = maxima_y[-1]
max_discard_time = minima_x[-1] - maxima_x[-1] + minima_x[-1]
max_discard_dash_time = np.linspace(max_discard_time - width, max_discard_time + width, 101)
max_discard_dash = max_discard * np.ones_like(max_discard_dash_time)
dash_2_time = np.linspace(minima_x[-1], max_discard_time, 101)
dash_2 = np.linspace(minima_y[-1], max_discard, 101)
end_point_time = time[-1]
end_point = time_series[-1]
time_reflect = np.linspace((5 - a) * np.pi, (5 + a) * np.pi, 101)
time_series_reflect = np.flip(np.cos(np.linspace((5 - 2.6 * a) * np.pi,
(5 - a) * np.pi, 101)) + np.cos(5 * np.linspace((5 - 2.6 * a) * np.pi,
(5 - a) * np.pi, 101)))
time_series_anti_reflect = time_series_reflect[0] - time_series_reflect
utils = emd_utils.Utility(time=time, time_series=time_series_anti_reflect)
anti_max_bool = utils.max_bool_func_1st_order_fd()
anti_max_point_time = time_reflect[anti_max_bool]
anti_max_point = time_series_anti_reflect[anti_max_bool]
utils = emd_utils.Utility(time=time, time_series=time_series_reflect)
no_anchor_max_time = time_reflect[utils.max_bool_func_1st_order_fd()]
no_anchor_max = time_series_reflect[utils.max_bool_func_1st_order_fd()]
point_1 = 5.4
length_distance = np.linspace(maxima_y[-1], minima_y[-1], 101)
length_distance_time = point_1 * np.pi * np.ones_like(length_distance)
length_time = np.linspace(point_1 * np.pi - width, point_1 * np.pi + width, 101)
length_top = maxima_y[-1] * np.ones_like(length_time)
length_bottom = minima_y[-1] * np.ones_like(length_time)
point_2 = 5.2
length_distance_2 = np.linspace(time_series[-1], minima_y[-1], 101)
length_distance_time_2 = point_2 * np.pi * np.ones_like(length_distance_2)
length_time_2 = np.linspace(point_2 * np.pi - width, point_2 * np.pi + width, 101)
length_top_2 = time_series[-1] * np.ones_like(length_time_2)
length_bottom_2 = minima_y[-1] * np.ones_like(length_time_2)
symmetry_axis_1_time = minima_x[-1] * np.ones(101)
symmetry_axis_2_time = time[-1] * np.ones(101)
symmetry_axis = np.linspace(-2, 2, 101)
end_time = np.linspace(time[-1] - width, time[-1] + width, 101)
end_signal = time_series[-1] * np.ones_like(end_time)
anti_symmetric_time = np.linspace(time[-1] - 0.5, time[-1] + 0.5, 101)
anti_symmetric_signal = time_series[-1] * np.ones_like(anti_symmetric_time)
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.plot(time, time_series, LineWidth=2, label='Signal')
plt.title('Symmetry Edge Effects Example')
plt.plot(time_reflect, time_series_reflect, 'g--', LineWidth=2, label=textwrap.fill('Symmetric signal', 10))
plt.plot(time_reflect[:51], time_series_anti_reflect[:51], '--', c='purple', LineWidth=2,
label=textwrap.fill('Anti-symmetric signal', 10))
plt.plot(max_dash_time, max_dash, 'k-')
plt.plot(min_dash_time, min_dash, 'k-')
plt.plot(dash_1_time, dash_1, 'k--')
plt.plot(dash_2_time, dash_2, 'k--')
plt.plot(length_distance_time, length_distance, 'k--')
plt.plot(length_distance_time_2, length_distance_2, 'k--')
plt.plot(length_time, length_top, 'k-')
plt.plot(length_time, length_bottom, 'k-')
plt.plot(length_time_2, length_top_2, 'k-')
plt.plot(length_time_2, length_bottom_2, 'k-')
plt.plot(end_time, end_signal, 'k-')
plt.plot(symmetry_axis_1_time, symmetry_axis, 'r--', zorder=1)
plt.plot(anti_symmetric_time, anti_symmetric_signal, 'r--', zorder=1)
plt.plot(symmetry_axis_2_time, symmetry_axis, 'r--', label=textwrap.fill('Axes of symmetry', 10), zorder=1)
plt.text(5.1 * np.pi, -0.7, r'$\beta$L')
plt.text(5.34 * np.pi, -0.05, 'L')
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.scatter(max_discard_time, max_discard, c='purple', zorder=4, label=textwrap.fill('Symmetric Discard maxima', 10))
plt.scatter(end_point_time, end_point, c='orange', zorder=4, label=textwrap.fill('Symmetric Anchor maxima', 10))
plt.scatter(anti_max_point_time, anti_max_point, c='green', zorder=4, label=textwrap.fill('Anti-Symmetric maxima', 10))
plt.scatter(no_anchor_max_time, no_anchor_max, c='gray', zorder=4, label=textwrap.fill('Symmetric maxima', 10))
plt.xlim(3.9 * np.pi, 5.5 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/edge_effects_symmetry_anti.png')
plt.show()
# plot 4
a = 0.21
width = 0.2
time = np.linspace(0, (5 - a) * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
max_dash_1 = np.linspace(maxima_y[-1] - width, maxima_y[-1] + width, 101)
max_dash_2 = np.linspace(maxima_y[-2] - width, maxima_y[-2] + width, 101)
max_dash_time_1 = maxima_x[-1] * np.ones_like(max_dash_1)
max_dash_time_2 = maxima_x[-2] * np.ones_like(max_dash_1)
min_dash_1 = np.linspace(minima_y[-1] - width, minima_y[-1] + width, 101)
min_dash_2 = np.linspace(minima_y[-2] - width, minima_y[-2] + width, 101)
min_dash_time_1 = minima_x[-1] * np.ones_like(min_dash_1)
min_dash_time_2 = minima_x[-2] * np.ones_like(min_dash_1)
dash_1_time = np.linspace(maxima_x[-1], minima_x[-1], 101)
dash_1 = np.linspace(maxima_y[-1], minima_y[-1], 101)
dash_2_time = np.linspace(maxima_x[-1], minima_x[-2], 101)
dash_2 = np.linspace(maxima_y[-1], minima_y[-2], 101)
s1 = (minima_y[-2] - maxima_y[-1]) / (minima_x[-2] - maxima_x[-1])
slope_based_maximum_time = maxima_x[-1] + (maxima_x[-1] - maxima_x[-2])
slope_based_maximum = minima_y[-1] + (slope_based_maximum_time - minima_x[-1]) * s1
max_dash_time_3 = slope_based_maximum_time * np.ones_like(max_dash_1)
max_dash_3 = np.linspace(slope_based_maximum - width, slope_based_maximum + width, 101)
dash_3_time = np.linspace(minima_x[-1], slope_based_maximum_time, 101)
dash_3 = np.linspace(minima_y[-1], slope_based_maximum, 101)
s2 = (minima_y[-1] - maxima_y[-1]) / (minima_x[-1] - maxima_x[-1])
slope_based_minimum_time = minima_x[-1] + (minima_x[-1] - minima_x[-2])
slope_based_minimum = slope_based_maximum - (slope_based_maximum_time - slope_based_minimum_time) * s2
min_dash_time_3 = slope_based_minimum_time * np.ones_like(min_dash_1)
min_dash_3 = np.linspace(slope_based_minimum - width, slope_based_minimum + width, 101)
dash_4_time = np.linspace(slope_based_maximum_time, slope_based_minimum_time)
dash_4 = np.linspace(slope_based_maximum, slope_based_minimum)
maxima_dash = np.linspace(2.5 - width, 2.5 + width, 101)
maxima_dash_time_1 = maxima_x[-2] * np.ones_like(maxima_dash)
maxima_dash_time_2 = maxima_x[-1] * np.ones_like(maxima_dash)
maxima_dash_time_3 = slope_based_maximum_time * np.ones_like(maxima_dash)
maxima_line_dash_time = np.linspace(maxima_x[-2], slope_based_maximum_time, 101)
maxima_line_dash = 2.5 * np.ones_like(maxima_line_dash_time)
minima_dash = np.linspace(-3.4 - width, -3.4 + width, 101)
minima_dash_time_1 = minima_x[-2] * np.ones_like(minima_dash)
minima_dash_time_2 = minima_x[-1] * np.ones_like(minima_dash)
minima_dash_time_3 = slope_based_minimum_time * np.ones_like(minima_dash)
minima_line_dash_time = np.linspace(minima_x[-2], slope_based_minimum_time, 101)
minima_line_dash = -3.4 * np.ones_like(minima_line_dash_time)
# slightly edit signal to make difference between slope-based method and improved slope-based method more clear
time_series[time >= minima_x[-1]] = 1.5 * (time_series[time >= minima_x[-1]] - time_series[time == minima_x[-1]]) + \
time_series[time == minima_x[-1]]
improved_slope_based_maximum_time = time[-1]
improved_slope_based_maximum = time_series[-1]
improved_slope_based_minimum_time = slope_based_minimum_time
improved_slope_based_minimum = improved_slope_based_maximum + s2 * (improved_slope_based_minimum_time -
improved_slope_based_maximum_time)
min_dash_4 = np.linspace(improved_slope_based_minimum - width, improved_slope_based_minimum + width, 101)
min_dash_time_4 = improved_slope_based_minimum_time * np.ones_like(min_dash_4)
dash_final_time = np.linspace(improved_slope_based_maximum_time, improved_slope_based_minimum_time, 101)
dash_final = np.linspace(improved_slope_based_maximum, improved_slope_based_minimum, 101)
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 0.9
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.gcf().subplots_adjust(bottom=0.10)
plt.plot(time, time_series, LineWidth=2, label='Signal')
plt.title('Slope-Based Edge Effects Example')
plt.plot(max_dash_time_1, max_dash_1, 'k-')
plt.plot(max_dash_time_2, max_dash_2, 'k-')
plt.plot(max_dash_time_3, max_dash_3, 'k-')
plt.plot(min_dash_time_1, min_dash_1, 'k-')
plt.plot(min_dash_time_2, min_dash_2, 'k-')
plt.plot(min_dash_time_3, min_dash_3, 'k-')
plt.plot(min_dash_time_4, min_dash_4, 'k-')
plt.plot(maxima_dash_time_1, maxima_dash, 'k-')
plt.plot(maxima_dash_time_2, maxima_dash, 'k-')
plt.plot(maxima_dash_time_3, maxima_dash, 'k-')
plt.plot(minima_dash_time_1, minima_dash, 'k-')
plt.plot(minima_dash_time_2, minima_dash, 'k-')
plt.plot(minima_dash_time_3, minima_dash, 'k-')
plt.text(4.34 * np.pi, -3.2, r'$\Delta{t^{min}_{m}}$')
plt.text(4.74 * np.pi, -3.2, r'$\Delta{t^{min}_{m}}$')
plt.text(4.12 * np.pi, 2, r'$\Delta{t^{max}_{M}}$')
plt.text(4.50 * np.pi, 2, r'$\Delta{t^{max}_{M}}$')
plt.text(4.30 * np.pi, 0.35, r'$s_1$')
plt.text(4.43 * np.pi, -0.20, r'$s_2$')
plt.text(4.30 * np.pi + (minima_x[-1] - minima_x[-2]), 0.35 + (minima_y[-1] - minima_y[-2]), r'$s_1$')
plt.text(4.43 * np.pi + (slope_based_minimum_time - minima_x[-1]),
-0.20 + (slope_based_minimum - minima_y[-1]), r'$s_2$')
plt.text(4.50 * np.pi + (slope_based_minimum_time - minima_x[-1]),
1.20 + (slope_based_minimum - minima_y[-1]), r'$s_2$')
plt.plot(minima_line_dash_time, minima_line_dash, 'k--')
plt.plot(maxima_line_dash_time, maxima_line_dash, 'k--')
plt.plot(dash_1_time, dash_1, 'k--')
plt.plot(dash_2_time, dash_2, 'k--')
plt.plot(dash_3_time, dash_3, 'k--')
plt.plot(dash_4_time, dash_4, 'k--')
plt.plot(dash_final_time, dash_final, 'k--')
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.scatter(slope_based_maximum_time, slope_based_maximum, c='orange', zorder=4,
label=textwrap.fill('Slope-based maximum', 11))
plt.scatter(slope_based_minimum_time, slope_based_minimum, c='purple', zorder=4,
label=textwrap.fill('Slope-based minimum', 11))
plt.scatter(improved_slope_based_maximum_time, improved_slope_based_maximum, c='deeppink', zorder=4,
label=textwrap.fill('Improved slope-based maximum', 11))
plt.scatter(improved_slope_based_minimum_time, improved_slope_based_minimum, c='dodgerblue', zorder=4,
label=textwrap.fill('Improved slope-based minimum', 11))
plt.xlim(3.9 * np.pi, 5.5 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-3, -2, -1, 0, 1, 2), ('-3', '-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/edge_effects_slope_based.png')
plt.show()
# plot 5
a = 0.25
width = 0.2
time = np.linspace(0, (5 - a) * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
A2 = np.abs(maxima_y[-2] - minima_y[-2]) / 2
A1 = np.abs(maxima_y[-1] - minima_y[-1]) / 2
P2 = 2 * np.abs(maxima_x[-2] - minima_x[-2])
P1 = 2 * np.abs(maxima_x[-1] - minima_x[-1])
Huang_time = (P1 / P2) * (time[time >= maxima_x[-2]] - time[time == maxima_x[-2]]) + maxima_x[-1]
Huang_wave = (A1 / A2) * (time_series[time >= maxima_x[-2]] - time_series[time == maxima_x[-2]]) + maxima_y[-1]
Coughlin_time = Huang_time
Coughlin_wave = A1 * np.cos(2 * np.pi * (1 / P1) * (Coughlin_time - Coughlin_time[0]))
Average_max_time = maxima_x[-1] + (maxima_x[-1] - maxima_x[-2])
Average_max = (maxima_y[-2] + maxima_y[-1]) / 2
Average_min_time = minima_x[-1] + (minima_x[-1] - minima_x[-2])
Average_min = (minima_y[-2] + minima_y[-1]) / 2
utils_Huang = emd_utils.Utility(time=time, time_series=Huang_wave)
Huang_max_bool = utils_Huang.max_bool_func_1st_order_fd()
Huang_min_bool = utils_Huang.min_bool_func_1st_order_fd()
utils_Coughlin = emd_utils.Utility(time=time, time_series=Coughlin_wave)
Coughlin_max_bool = utils_Coughlin.max_bool_func_1st_order_fd()
Coughlin_min_bool = utils_Coughlin.min_bool_func_1st_order_fd()
Huang_max_time = Huang_time[Huang_max_bool]
Huang_max = Huang_wave[Huang_max_bool]
Huang_min_time = Huang_time[Huang_min_bool]
Huang_min = Huang_wave[Huang_min_bool]
Coughlin_max_time = Coughlin_time[Coughlin_max_bool]
Coughlin_max = Coughlin_wave[Coughlin_max_bool]
Coughlin_min_time = Coughlin_time[Coughlin_min_bool]
Coughlin_min = Coughlin_wave[Coughlin_min_bool]
max_2_x_time = np.linspace(maxima_x[-2] - width, maxima_x[-2] + width, 101)
max_2_x_time_side = np.linspace(5.3 * np.pi - width, 5.3 * np.pi + width, 101)
max_2_x = maxima_y[-2] * np.ones_like(max_2_x_time)
min_2_x_time = np.linspace(minima_x[-2] - width, minima_x[-2] + width, 101)
min_2_x_time_side = np.linspace(5.3 * np.pi - width, 5.3 * np.pi + width, 101)
min_2_x = minima_y[-2] * np.ones_like(min_2_x_time)
dash_max_min_2_x = np.linspace(minima_y[-2], maxima_y[-2], 101)
dash_max_min_2_x_time = 5.3 * np.pi * np.ones_like(dash_max_min_2_x)
max_2_y = np.linspace(maxima_y[-2] - width, maxima_y[-2] + width, 101)
max_2_y_side = np.linspace(-1.8 - width, -1.8 + width, 101)
max_2_y_time = maxima_x[-2] * np.ones_like(max_2_y)
min_2_y = np.linspace(minima_y[-2] - width, minima_y[-2] + width, 101)
min_2_y_side = np.linspace(-1.8 - width, -1.8 + width, 101)
min_2_y_time = minima_x[-2] * np.ones_like(min_2_y)
dash_max_min_2_y_time = np.linspace(minima_x[-2], maxima_x[-2], 101)
dash_max_min_2_y = -1.8 * np.ones_like(dash_max_min_2_y_time)
max_1_x_time = np.linspace(maxima_x[-1] - width, maxima_x[-1] + width, 101)
max_1_x_time_side = np.linspace(5.4 * np.pi - width, 5.4 * np.pi + width, 101)
max_1_x = maxima_y[-1] * np.ones_like(max_1_x_time)
min_1_x_time = np.linspace(minima_x[-1] - width, minima_x[-1] + width, 101)
min_1_x_time_side = np.linspace(5.4 * np.pi - width, 5.4 * np.pi + width, 101)
min_1_x = minima_y[-1] * np.ones_like(min_1_x_time)
dash_max_min_1_x = np.linspace(minima_y[-1], maxima_y[-1], 101)
dash_max_min_1_x_time = 5.4 * np.pi * np.ones_like(dash_max_min_1_x)
max_1_y = np.linspace(maxima_y[-1] - width, maxima_y[-1] + width, 101)
max_1_y_side = np.linspace(-2.1 - width, -2.1 + width, 101)
max_1_y_time = maxima_x[-1] * np.ones_like(max_1_y)
min_1_y = np.linspace(minima_y[-1] - width, minima_y[-1] + width, 101)
min_1_y_side = np.linspace(-2.1 - width, -2.1 + width, 101)
min_1_y_time = minima_x[-1] * np.ones_like(min_1_y)
dash_max_min_1_y_time = np.linspace(minima_x[-1], maxima_x[-1], 101)
dash_max_min_1_y = -2.1 * np.ones_like(dash_max_min_1_y_time)
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('Characteristic Wave Effects Example')
plt.plot(time, time_series, LineWidth=2, label='Signal')
plt.scatter(Huang_max_time, Huang_max, c='magenta', zorder=4, label=textwrap.fill('Huang maximum', 10))
plt.scatter(Huang_min_time, Huang_min, c='lime', zorder=4, label=textwrap.fill('Huang minimum', 10))
plt.scatter(Coughlin_max_time, Coughlin_max, c='darkorange', zorder=4,
label=textwrap.fill('Coughlin maximum', 14))
plt.scatter(Coughlin_min_time, Coughlin_min, c='dodgerblue', zorder=4,
label=textwrap.fill('Coughlin minimum', 14))
plt.scatter(Average_max_time, Average_max, c='orangered', zorder=4,
label=textwrap.fill('Average maximum', 14))
plt.scatter(Average_min_time, Average_min, c='cyan', zorder=4,
label=textwrap.fill('Average minimum', 14))
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.plot(Huang_time, Huang_wave, '--', c='darkviolet', label=textwrap.fill('Huang Characteristic Wave', 14))
plt.plot(Coughlin_time, Coughlin_wave, '--', c='darkgreen', label=textwrap.fill('Coughlin Characteristic Wave', 14))
plt.plot(max_2_x_time, max_2_x, 'k-')
plt.plot(max_2_x_time_side, max_2_x, 'k-')
plt.plot(min_2_x_time, min_2_x, 'k-')
plt.plot(min_2_x_time_side, min_2_x, 'k-')
plt.plot(dash_max_min_2_x_time, dash_max_min_2_x, 'k--')
plt.text(5.16 * np.pi, 0.85, r'$2a_2$')
plt.plot(max_2_y_time, max_2_y, 'k-')
plt.plot(max_2_y_time, max_2_y_side, 'k-')
plt.plot(min_2_y_time, min_2_y, 'k-')
plt.plot(min_2_y_time, min_2_y_side, 'k-')
plt.plot(dash_max_min_2_y_time, dash_max_min_2_y, 'k--')
plt.text(4.08 * np.pi, -2.2, r'$\frac{p_2}{2}$')
plt.plot(max_1_x_time, max_1_x, 'k-')
plt.plot(max_1_x_time_side, max_1_x, 'k-')
plt.plot(min_1_x_time, min_1_x, 'k-')
plt.plot(min_1_x_time_side, min_1_x, 'k-')
plt.plot(dash_max_min_1_x_time, dash_max_min_1_x, 'k--')
plt.text(5.42 * np.pi, -0.1, r'$2a_1$')
plt.plot(max_1_y_time, max_1_y, 'k-')
plt.plot(max_1_y_time, max_1_y_side, 'k-')
plt.plot(min_1_y_time, min_1_y, 'k-')
plt.plot(min_1_y_time, min_1_y_side, 'k-')
plt.plot(dash_max_min_1_y_time, dash_max_min_1_y, 'k--')
plt.text(4.48 * np.pi, -2.5, r'$\frac{p_1}{2}$')
plt.xlim(3.9 * np.pi, 5.6 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/edge_effects_characteristic_wave.png')
plt.show()
# plot 6
t = np.linspace(5, 95, 100)
signal_orig = np.cos(2 * np.pi * t / 50) + 0.6 * np.cos(2 * np.pi * t / 25) + 0.5 * np.sin(2 * np.pi * t / 200)
util_nn = emd_utils.Utility(time=t, time_series=signal_orig)
maxima = signal_orig[util_nn.max_bool_func_1st_order_fd()]
minima = signal_orig[util_nn.min_bool_func_1st_order_fd()]
cs_max = CubicSpline(t[util_nn.max_bool_func_1st_order_fd()], maxima)
cs_min = CubicSpline(t[util_nn.min_bool_func_1st_order_fd()], minima)
time = np.linspace(0, 5 * np.pi, 1001)
lsq_signal = np.cos(time) + np.cos(5 * time)
knots = np.linspace(0, 5 * np.pi, 101)
time_extended = time_extension(time)
time_series_extended = np.zeros_like(time_extended) / 0
time_series_extended[int(len(lsq_signal) - 1):int(2 * (len(lsq_signal) - 1) + 1)] = lsq_signal
neural_network_m = 200
neural_network_k = 100
# forward ->
P = np.zeros((int(neural_network_k + 1), neural_network_m))
for col in range(neural_network_m):
P[:-1, col] = lsq_signal[(-(neural_network_m + neural_network_k - col)):(-(neural_network_m - col))]
P[-1, col] = 1 # for additive constant
t = lsq_signal[-neural_network_m:]
# test - top
seed_weights = np.ones(neural_network_k) / neural_network_k
weights = 0 * seed_weights.copy()
train_input = P[:-1, :]
lr = 0.01
for iterations in range(1000):
output = np.matmul(weights, train_input)
error = (t - output)
gradients = error * (- train_input)
# guess average gradients
average_gradients = np.mean(gradients, axis=1)
# steepest descent
max_gradient_vector = average_gradients * (np.abs(average_gradients) == max(np.abs(average_gradients)))
adjustment = - lr * average_gradients
# adjustment = - lr * max_gradient_vector
weights += adjustment
# test - bottom
weights_right = np.hstack((weights, 0))
max_count_right = 0
min_count_right = 0
i_right = 0
while ((max_count_right < 1) or (min_count_right < 1)) and (i_right < len(lsq_signal) - 1):
time_series_extended[int(2 * (len(lsq_signal) - 1) + 1 + i_right)] = \
sum(weights_right * np.hstack((time_series_extended[
int(2 * (len(lsq_signal) - 1) + 1 - neural_network_k + i_right):
int(2 * (len(lsq_signal) - 1) + 1 + i_right)], 1)))
i_right += 1
if i_right > 1:
emd_utils_max = \
emd_utils.Utility(time=time_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)],
time_series=time_series_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)])
if sum(emd_utils_max.max_bool_func_1st_order_fd()) > 0:
max_count_right += 1
emd_utils_min = \
emd_utils.Utility(time=time_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)],
time_series=time_series_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)])
if sum(emd_utils_min.min_bool_func_1st_order_fd()) > 0:
min_count_right += 1
# backward <-
P = np.zeros((int(neural_network_k + 1), neural_network_m))
for col in range(neural_network_m):
P[:-1, col] = lsq_signal[int(col + 1):int(col + neural_network_k + 1)]
P[-1, col] = 1 # for additive constant
t = lsq_signal[:neural_network_m]
vx = cvx.Variable(int(neural_network_k + 1))
objective = cvx.Minimize(cvx.norm((2 * (vx * P) + 1 - t), 2)) # linear activation function is arbitrary
prob = cvx.Problem(objective)
result = prob.solve(verbose=True, solver=cvx.ECOS)
weights_left = np.array(vx.value)
max_count_left = 0
min_count_left = 0
i_left = 0
while ((max_count_left < 1) or (min_count_left < 1)) and (i_left < len(lsq_signal) - 1):
time_series_extended[int(len(lsq_signal) - 2 - i_left)] = \
2 * sum(weights_left * np.hstack((time_series_extended[int(len(lsq_signal) - 1 - i_left):
int(len(lsq_signal) - 1 - i_left + neural_network_k)],
1))) + 1
i_left += 1
if i_left > 1:
emd_utils_max = \
emd_utils.Utility(time=time_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))],
time_series=time_series_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))])
if sum(emd_utils_max.max_bool_func_1st_order_fd()) > 0:
max_count_left += 1
emd_utils_min = \
emd_utils.Utility(time=time_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))],
time_series=time_series_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))])
if sum(emd_utils_min.min_bool_func_1st_order_fd()) > 0:
min_count_left += 1
lsq_utils = emd_utils.Utility(time=time, time_series=lsq_signal)
utils_extended = emd_utils.Utility(time=time_extended, time_series=time_series_extended)
maxima = lsq_signal[lsq_utils.max_bool_func_1st_order_fd()]
maxima_time = time[lsq_utils.max_bool_func_1st_order_fd()]
maxima_extrapolate = time_series_extended[utils_extended.max_bool_func_1st_order_fd()][-1]
maxima_extrapolate_time = time_extended[utils_extended.max_bool_func_1st_order_fd()][-1]
minima = lsq_signal[lsq_utils.min_bool_func_1st_order_fd()]
minima_time = time[lsq_utils.min_bool_func_1st_order_fd()]
minima_extrapolate = time_series_extended[utils_extended.min_bool_func_1st_order_fd()][-2:]
minima_extrapolate_time = time_extended[utils_extended.min_bool_func_1st_order_fd()][-2:]
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('Single Neuron Neural Network Example')
plt.plot(time, lsq_signal, zorder=2, label='Signal')
plt.plot(time_extended, time_series_extended, c='g', zorder=1, label=textwrap.fill('Extrapolated signal', 12))
plt.scatter(maxima_time, maxima, c='r', zorder=3, label='Maxima')
plt.scatter(minima_time, minima, c='b', zorder=3, label='Minima')
plt.scatter(maxima_extrapolate_time, maxima_extrapolate, c='magenta', zorder=3,
label=textwrap.fill('Extrapolated maxima', 12))
plt.scatter(minima_extrapolate_time, minima_extrapolate, c='cyan', zorder=4,
label=textwrap.fill('Extrapolated minima', 12))
plt.plot(((time[-302] + time[-301]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='k',
label=textwrap.fill('Neural network inputs', 13))
plt.plot(np.linspace(((time[-302] + time[-301]) / 2), ((time[-302] + time[-301]) / 2) + 0.1, 100),
-2.75 * np.ones(100), c='k')
plt.plot(np.linspace(((time[-302] + time[-301]) / 2), ((time[-302] + time[-301]) / 2) + 0.1, 100),
2.75 * np.ones(100), c='k')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1002]) / 2),
((time_extended[-1001] + time_extended[-1002]) / 2) - 0.1, 100), -2.75 * np.ones(100), c='k')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1002]) / 2),
((time_extended[-1001] + time_extended[-1002]) / 2) - 0.1, 100), 2.75 * np.ones(100), c='k')
plt.plot(((time_extended[-1001] + time_extended[-1002]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='k')
plt.plot(((time[-202] + time[-201]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='gray', linestyle='dashed',
label=textwrap.fill('Neural network targets', 13))
plt.plot(np.linspace(((time[-202] + time[-201]) / 2), ((time[-202] + time[-201]) / 2) + 0.1, 100),
-2.75 * np.ones(100), c='gray')
plt.plot(np.linspace(((time[-202] + time[-201]) / 2), ((time[-202] + time[-201]) / 2) + 0.1, 100),
2.75 * np.ones(100), c='gray')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1000]) / 2),
((time_extended[-1001] + time_extended[-1000]) / 2) - 0.1, 100), -2.75 * np.ones(100), c='gray')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1000]) / 2),
((time_extended[-1001] + time_extended[-1000]) / 2) - 0.1, 100), 2.75 * np.ones(100), c='gray')
plt.plot(((time_extended[-1001] + time_extended[-1000]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='gray',
linestyle='dashed')
plt.xlim(3.4 * np.pi, 5.6 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/neural_network.png')
plt.show()
# plot 6a
np.random.seed(0)
time = np.linspace(0, 5 * np.pi, 1001)
knots_51 = np.linspace(0, 5 * np.pi, 51)
time_series = np.cos(2 * time) + np.cos(4 * time) + np.cos(8 * time)
noise = np.random.normal(0, 1, len(time_series))
time_series += noise
advemdpy = EMD(time=time, time_series=time_series)
imfs_51, hts_51, ifs_51 = advemdpy.empirical_mode_decomposition(knots=knots_51, max_imfs=3,
edge_effect='symmetric_anchor', verbose=False)[:3]
knots_31 = np.linspace(0, 5 * np.pi, 31)
imfs_31, hts_31, ifs_31 = advemdpy.empirical_mode_decomposition(knots=knots_31, max_imfs=2,
edge_effect='symmetric_anchor', verbose=False)[:3]
knots_11 = np.linspace(0, 5 * np.pi, 11)
imfs_11, hts_11, ifs_11 = advemdpy.empirical_mode_decomposition(knots=knots_11, max_imfs=1,
edge_effect='symmetric_anchor', verbose=False)[:3]
fig, axs = plt.subplots(3, 1)
plt.suptitle(textwrap.fill('Comparison of Trends Extracted with Different Knot Sequences', 40))
plt.subplots_adjust(hspace=0.1)
axs[0].plot(time, time_series, label='Time series')
axs[0].plot(time, imfs_51[1, :] + imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 1, IMF 2, & IMF 3 with 51 knots', 21))
print(f'DFA fluctuation with 51 knots: {np.round(np.var(time_series - (imfs_51[1, :] + imfs_51[2, :] + imfs_51[3, :])), 3)}')
for knot in knots_51:
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[0].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[0].set_xticklabels(['', '', '', '', '', ''])
axs[0].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), 5.5 * np.ones(101), 'k--')
axs[0].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), -5.5 * np.ones(101), 'k--')
axs[0].plot(0.95 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--')
axs[0].plot(1.55 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--', label='Zoomed region')
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[1].plot(time, time_series, label='Time series')
axs[1].plot(time, imfs_31[1, :] + imfs_31[2, :], label=textwrap.fill('Sum of IMF 1 and IMF 2 with 31 knots', 19))
axs[1].plot(time, imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 2 and IMF 3 with 51 knots', 19))
print(f'DFA fluctuation with 31 knots: {np.round(np.var(time_series - (imfs_31[1, :] + imfs_31[2, :])), 3)}')
for knot in knots_31:
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[1].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[1].set_xticklabels(['', '', '', '', '', ''])
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.05, box_1.y0, box_1.width * 0.85, box_1.height])
axs[1].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[1].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), 5.5 * np.ones(101), 'k--')
axs[1].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), -5.5 * np.ones(101), 'k--')
axs[1].plot(0.95 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--')
axs[1].plot(1.55 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--', label='Zoomed region')
axs[2].plot(time, time_series, label='Time series')
axs[2].plot(time, imfs_11[1, :], label='IMF 1 with 11 knots')
axs[2].plot(time, imfs_31[2, :], label='IMF 2 with 31 knots')
axs[2].plot(time, imfs_51[3, :], label='IMF 3 with 51 knots')
print(f'DFA fluctuation with 11 knots: {np.round(np.var(time_series - imfs_51[3, :]), 3)}')
for knot in knots_11:
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[2].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[2].set_xticklabels(['$0$', r'$\pi$', r'$2\pi$', r'$3\pi$', r'$4\pi$', r'$5\pi$'])
box_2 = axs[2].get_position()
axs[2].set_position([box_2.x0 - 0.05, box_2.y0, box_2.width * 0.85, box_2.height])
axs[2].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[2].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), 5.5 * np.ones(101), 'k--')
axs[2].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), -5.5 * np.ones(101), 'k--')
axs[2].plot(0.95 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--')
axs[2].plot(1.55 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--', label='Zoomed region')
plt.savefig('jss_figures/DFA_different_trends.png')
plt.show()
# plot 6b
fig, axs = plt.subplots(3, 1)
plt.suptitle(textwrap.fill('Comparison of Trends Extracted with Different Knot Sequences Zoomed Region', 40))
plt.subplots_adjust(hspace=0.1)
axs[0].plot(time, time_series, label='Time series')
axs[0].plot(time, imfs_51[1, :] + imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 1, IMF 2, & IMF 3 with 51 knots', 21))
for knot in knots_51:
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[0].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[0].set_xticklabels(['', '', '', '', '', ''])
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[0].set_ylim(-5.5, 5.5)
axs[0].set_xlim(0.95 * np.pi, 1.55 * np.pi)
axs[1].plot(time, time_series, label='Time series')
axs[1].plot(time, imfs_31[1, :] + imfs_31[2, :], label=textwrap.fill('Sum of IMF 1 and IMF 2 with 31 knots', 19))
axs[1].plot(time, imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 2 and IMF 3 with 51 knots', 19))
for knot in knots_31:
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[1].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[1].set_xticklabels(['', '', '', '', '', ''])
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.05, box_1.y0, box_1.width * 0.85, box_1.height])
axs[1].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[1].set_ylim(-5.5, 5.5)
axs[1].set_xlim(0.95 * np.pi, 1.55 * np.pi)
axs[2].plot(time, time_series, label='Time series')
axs[2].plot(time, imfs_11[1, :], label='IMF 1 with 11 knots')
axs[2].plot(time, imfs_31[2, :], label='IMF 2 with 31 knots')
axs[2].plot(time, imfs_51[3, :], label='IMF 3 with 51 knots')
for knot in knots_11:
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[2].set_xticks([np.pi, (3 / 2) * np.pi])
axs[2].set_xticklabels([r'$\pi$', r'$\frac{3}{2}\pi$'])
box_2 = axs[2].get_position()
axs[2].set_position([box_2.x0 - 0.05, box_2.y0, box_2.width * 0.85, box_2.height])
axs[2].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[2].set_ylim(-5.5, 5.5)
axs[2].set_xlim(0.95 * np.pi, 1.55 * np.pi)
plt.savefig('jss_figures/DFA_different_trends_zoomed.png')
plt.show()
hs_ouputs = hilbert_spectrum(time, imfs_51, hts_51, ifs_51, max_frequency=12, plot=False)
# plot 6c
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 0.9
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Simple Sinusoidal Time Seres with Added Noise', 50))
x_hs, y, z = hs_ouputs
z_min, z_max = 0, np.abs(z).max()
ax.pcolormesh(x_hs, y, np.abs(z), cmap='gist_rainbow', vmin=z_min, vmax=z_max)
ax.plot(x_hs[0, :], 8 * np.ones_like(x_hs[0, :]), '--', label=r'$\omega = 8$', Linewidth=3)
ax.plot(x_hs[0, :], 4 * np.ones_like(x_hs[0, :]), '--', label=r'$\omega = 4$', Linewidth=3)
ax.plot(x_hs[0, :], 2 * np.ones_like(x_hs[0, :]), '--', label=r'$\omega = 2$', Linewidth=3)
ax.set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi])
ax.set_xticklabels(['$0$', r'$\pi$', r'$2\pi$', r'$3\pi$', r'$4\pi$'])
plt.ylabel(r'Frequency (rad.s$^{-1}$)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.85, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/DFA_hilbert_spectrum.png')
plt.show()
# plot 6c
time = np.linspace(0, 5 * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
knots = np.linspace(0, 5 * np.pi, 51)
fluc = Fluctuation(time=time, time_series=time_series)
max_unsmoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='maxima', smooth=False)
max_smoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='maxima', smooth=True)
min_unsmoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='minima', smooth=False)
min_smoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='minima', smooth=True)
util = Utility(time=time, time_series=time_series)
maxima = util.max_bool_func_1st_order_fd()
minima = util.min_bool_func_1st_order_fd()
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title(textwrap.fill('Plot Demonstrating Unsmoothed Extrema Envelopes if Schoenberg–Whitney Conditions are Not Satisfied', 50))
plt.plot(time, time_series, label='Time series', zorder=2, LineWidth=2)
plt.scatter(time[maxima], time_series[maxima], c='r', label='Maxima', zorder=10)
plt.scatter(time[minima], time_series[minima], c='b', label='Minima', zorder=10)
plt.plot(time, max_unsmoothed[0], label=textwrap.fill('Unsmoothed maxima envelope', 10), c='darkorange')
plt.plot(time, max_smoothed[0], label=textwrap.fill('Smoothed maxima envelope', 10), c='red')
plt.plot(time, min_unsmoothed[0], label=textwrap.fill('Unsmoothed minima envelope', 10), c='cyan')
plt.plot(time, min_smoothed[0], label=textwrap.fill('Smoothed minima envelope', 10), c='blue')
for knot in knots[:-1]:
plt.plot(knot * np.ones(101), np.linspace(-3.0, -2.0, 101), '--', c='grey', zorder=1)
plt.plot(knots[-1] * np.ones(101), np.linspace(-3.0, -2.0, 101), '--', c='grey', label='Knots', zorder=1)
plt.xticks((0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi),
(r'$0$', r'$\pi$', r'2$\pi$', r'3$\pi$', r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
plt.xlim(-0.25 * np.pi, 5.25 * np.pi)
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Schoenberg_Whitney_Conditions.png')
plt.show()
# plot 7
a = 0.25
width = 0.2
time = np.linspace((0 + a) * np.pi, (5 - a) * np.pi, 1001)
knots = np.linspace((0 + a) * np.pi, (5 - a) * np.pi, 11)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
inflection_bool = utils.inflection_point()
inflection_x = time[inflection_bool]
inflection_y = time_series[inflection_bool]
fluctuation = emd_mean.Fluctuation(time=time, time_series=time_series)
maxima_envelope = fluctuation.envelope_basis_function_approximation(knots, 'maxima', smooth=False,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
maxima_envelope_smooth = fluctuation.envelope_basis_function_approximation(knots, 'maxima', smooth=True,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
minima_envelope = fluctuation.envelope_basis_function_approximation(knots, 'minima', smooth=False,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
minima_envelope_smooth = fluctuation.envelope_basis_function_approximation(knots, 'minima', smooth=True,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
inflection_points_envelope = fluctuation.direct_detrended_fluctuation_estimation(knots,
smooth=True,
smoothing_penalty=0.2,
technique='inflection_points')[0]
binomial_points_envelope = fluctuation.direct_detrended_fluctuation_estimation(knots,
smooth=True,
smoothing_penalty=0.2,
technique='binomial_average', order=21,
increment=20)[0]
derivative_of_lsq = utils.derivative_forward_diff()
derivative_time = time[:-1]
derivative_knots = np.linspace(knots[0], knots[-1], 31)
# change (1) detrended_fluctuation_technique and (2) max_internal_iter and (3) debug (confusing with external debugging)
emd = AdvEMDpy.EMD(time=derivative_time, time_series=derivative_of_lsq)
imf_1_of_derivative = emd.empirical_mode_decomposition(knots=derivative_knots,
knot_time=derivative_time, text=False, verbose=False)[0][1, :]
utils = emd_utils.Utility(time=time[:-1], time_series=imf_1_of_derivative)
optimal_maxima = np.r_[False, utils.derivative_forward_diff() < 0, False] & \
np.r_[utils.zero_crossing() == 1, False]
optimal_minima = np.r_[False, utils.derivative_forward_diff() > 0, False] & \
np.r_[utils.zero_crossing() == 1, False]
EEMD_maxima_envelope = fluctuation.envelope_basis_function_approximation_fixed_points(knots, 'maxima',
optimal_maxima,
optimal_minima,
smooth=False,
smoothing_penalty=0.2,
edge_effect='none')[0]
EEMD_minima_envelope = fluctuation.envelope_basis_function_approximation_fixed_points(knots, 'minima',
optimal_maxima,
optimal_minima,
smooth=False,
smoothing_penalty=0.2,
edge_effect='none')[0]
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('Detrended Fluctuation Analysis Examples')
plt.plot(time, time_series, LineWidth=2, label='Time series')
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.scatter(time[optimal_maxima], time_series[optimal_maxima], c='darkred', zorder=4,
label=textwrap.fill('Optimal maxima', 10))
plt.scatter(time[optimal_minima], time_series[optimal_minima], c='darkblue', zorder=4,
label=textwrap.fill('Optimal minima', 10))
plt.scatter(inflection_x, inflection_y, c='magenta', zorder=4, label=textwrap.fill('Inflection points', 10))
plt.plot(time, maxima_envelope, c='darkblue', label=textwrap.fill('EMD envelope', 10))
plt.plot(time, minima_envelope, c='darkblue')
plt.plot(time, (maxima_envelope + minima_envelope) / 2, c='darkblue')
plt.plot(time, maxima_envelope_smooth, c='darkred', label=textwrap.fill('SEMD envelope', 10))
plt.plot(time, minima_envelope_smooth, c='darkred')
plt.plot(time, (maxima_envelope_smooth + minima_envelope_smooth) / 2, c='darkred')
plt.plot(time, EEMD_maxima_envelope, c='darkgreen', label=textwrap.fill('EEMD envelope', 10))
plt.plot(time, EEMD_minima_envelope, c='darkgreen')
plt.plot(time, (EEMD_maxima_envelope + EEMD_minima_envelope) / 2, c='darkgreen')
plt.plot(time, inflection_points_envelope, c='darkorange', label=textwrap.fill('Inflection point envelope', 10))
plt.plot(time, binomial_points_envelope, c='deeppink', label=textwrap.fill('Binomial average envelope', 10))
plt.plot(time, np.cos(time), c='black', label='True mean')
plt.xticks((0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi), (r'$0$', r'$\pi$', r'2$\pi$', r'3$\pi$',
r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
plt.xlim(-0.25 * np.pi, 5.25 * np.pi)
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/detrended_fluctuation_analysis.png')
plt.show()
# Duffing Equation Example
def duffing_equation(xy, ts):
gamma = 0.1
epsilon = 1
omega = ((2 * np.pi) / 25)
return [xy[1], xy[0] - epsilon * xy[0] ** 3 + gamma * np.cos(omega * ts)]
t = np.linspace(0, 150, 1501)
XY0 = [1, 1]
solution = odeint(duffing_equation, XY0, t)
x = solution[:, 0]
dxdt = solution[:, 1]
x_points = [0, 50, 100, 150]
x_names = {0, 50, 100, 150}
y_points_1 = [-2, 0, 2]
y_points_2 = [-1, 0, 1]
fig, axs = plt.subplots(2, 1)
plt.subplots_adjust(hspace=0.2)
axs[0].plot(t, x)
axs[0].set_title('Duffing Equation Displacement')
axs[0].set_ylim([-2, 2])
axs[0].set_xlim([0, 150])
axs[1].plot(t, dxdt)
axs[1].set_title('Duffing Equation Velocity')
axs[1].set_ylim([-1.5, 1.5])
axs[1].set_xlim([0, 150])
axis = 0
for ax in axs.flat:
ax.label_outer()
if axis == 0:
ax.set_ylabel('x(t)')
ax.set_yticks(y_points_1)
if axis == 1:
ax.set_ylabel(r'$ \dfrac{dx(t)}{dt} $')
ax.set(xlabel='t')
ax.set_yticks(y_points_2)
ax.set_xticks(x_points)
ax.set_xticklabels(x_names)
axis += 1
plt.savefig('jss_figures/Duffing_equation.png')
plt.show()
# compare other packages Duffing - top
pyemd = pyemd0215()
py_emd = pyemd(x)
IP, IF, IA = emd040.spectra.frequency_transform(py_emd.T, 10, 'hilbert')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 0.2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 1.0
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Duffing Equation using PyEMD 0.2.10', 40))
plt.pcolormesh(t, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(t[:-1], 0.124 * np.ones_like(t[:-1]), '--', label=textwrap.fill('Hamiltonian frequency approximation', 15))
plt.plot(t[:-1], 0.04 * np.ones_like(t[:-1]), 'g--', label=textwrap.fill('Driving function frequency', 15))
plt.xticks([0, 50, 100, 150])
plt.yticks([0, 0.1, 0.2])
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.75, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Duffing_equation_ht_pyemd.png')
plt.show()
plt.show()
emd_sift = emd040.sift.sift(x)
IP, IF, IA = emd040.spectra.frequency_transform(emd_sift, 10, 'hilbert')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 0.2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 1.0
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Duffing Equation using emd 0.3.3', 40))
plt.pcolormesh(t, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(t[:-1], 0.124 * np.ones_like(t[:-1]), '--', label=textwrap.fill('Hamiltonian frequency approximation', 15))
plt.plot(t[:-1], 0.04 * np.ones_like(t[:-1]), 'g--', label=textwrap.fill('Driving function frequency', 15))
plt.xticks([0, 50, 100, 150])
plt.yticks([0, 0.1, 0.2])
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.75, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Duffing_equation_ht_emd.png')
plt.show()
# compare other packages Duffing - bottom
emd_duffing = AdvEMDpy.EMD(time=t, time_series=x)
emd_duff, emd_ht_duff, emd_if_duff, _, _, _, _ = emd_duffing.empirical_mode_decomposition(verbose=False)
fig, axs = plt.subplots(2, 1)
plt.subplots_adjust(hspace=0.3)
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
axs[0].plot(t, emd_duff[1, :], label='AdvEMDpy')
axs[0].plot(t, py_emd[0, :], '--', label='PyEMD 0.2.10')
axs[0].plot(t, emd_sift[:, 0], '--', label='emd 0.3.3')
axs[0].set_title('IMF 1')
axs[0].set_ylim([-2, 2])
axs[0].set_xlim([0, 150])
axs[1].plot(t, emd_duff[2, :], label='AdvEMDpy')
print(f'AdvEMDpy driving function error: {np.round(sum(abs(0.1 * np.cos(0.04 * 2 * np.pi * t) - emd_duff[2, :])), 3)}')
axs[1].plot(t, py_emd[1, :], '--', label='PyEMD 0.2.10')
print(f'PyEMD driving function error: {np.round(sum(abs(0.1 * np.cos(0.04 * 2 * np.pi * t) - py_emd[1, :])), 3)}')
axs[1].plot(t, emd_sift[:, 1], '--', label='emd 0.3.3')
print(f'emd driving function error: {np.round(sum(abs(0.1 * np.cos(0.04 * 2 * np.pi * t) - emd_sift[:, 1])), 3)}')
axs[1].plot(t, 0.1 * np.cos(0.04 * 2 * np.pi * t), '--', label=r'$0.1$cos$(0.08{\pi}t)$')
axs[1].set_title('IMF 2')
axs[1].set_ylim([-0.2, 0.4])
axs[1].set_xlim([0, 150])
axis = 0
for ax in axs.flat:
ax.label_outer()
if axis == 0:
ax.set_ylabel(r'$\gamma_1(t)$')
ax.set_yticks([-2, 0, 2])
if axis == 1:
ax.set_ylabel(r'$\gamma_2(t)$')
ax.set_yticks([-0.2, 0, 0.2])
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0, box_0.width * 0.85, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
ax.set_xticks(x_points)
ax.set_xticklabels(x_names)
axis += 1
plt.savefig('jss_figures/Duffing_equation_imfs.png')
plt.show()
hs_ouputs = hilbert_spectrum(t, emd_duff, emd_ht_duff, emd_if_duff, max_frequency=1.3, plot=False)
ax = plt.subplot(111)
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Duffing Equation using AdvEMDpy', 40))
x, y, z = hs_ouputs
y = y / (2 * np.pi)
z_min, z_max = 0, np.abs(z).max()
figure_size = plt.gcf().get_size_inches()
factor = 1.0
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
ax.pcolormesh(x, y, np.abs(z), cmap='gist_rainbow', vmin=z_min, vmax=z_max)
plt.plot(t[:-1], 0.124 * np.ones_like(t[:-1]), '--', label=textwrap.fill('Hamiltonian frequency approximation', 15))
plt.plot(t[:-1], 0.04 * np.ones_like(t[:-1]), 'g--', label=textwrap.fill('Driving function frequency', 15))
plt.xticks([0, 50, 100, 150])
plt.yticks([0, 0.1, 0.2])
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.75, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Duffing_equation_ht.png')
plt.show()
# Carbon Dioxide Concentration Example
CO2_data = pd.read_csv('Data/co2_mm_mlo.csv', header=51)
plt.plot(CO2_data['month'], CO2_data['decimal date'])
plt.title(textwrap.fill('Mean Monthly Concentration of Carbon Dioxide in the Atmosphere', 35))
plt.ylabel('Parts per million')
plt.xlabel('Time (years)')
plt.savefig('jss_figures/CO2_concentration.png')
plt.show()
signal = CO2_data['decimal date']
signal = np.asarray(signal)
time = CO2_data['month']
time = np.asarray(time)
# compare other packages Carbon Dioxide - top
pyemd = pyemd0215()
py_emd = pyemd(signal)
IP, IF, IA = emd040.spectra.frequency_transform(py_emd[:2, :].T, 12, 'hilbert')
print(f'PyEMD annual frequency error: {np.round(sum(np.abs(IF[:, 0] - np.ones_like(IF[:, 0]))), 3)}')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
fig, ax = plt.subplots()
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using PyEMD 0.2.10', 45))
plt.ylabel('Frequency (year$^{-1}$)')
plt.xlabel('Time (years)')
plt.pcolormesh(time, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(time, np.ones_like(time), 'k--', label=textwrap.fill('Annual cycle', 10))
box_0 = ax.get_position()
ax.set_position([box_0.x0 + 0.0125, box_0.y0 + 0.075, box_0.width * 0.8, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/CO2_Hilbert_pyemd.png')
plt.show()
emd_sift = emd040.sift.sift(signal)
IP, IF, IA = emd040.spectra.frequency_transform(emd_sift[:, :1], 12, 'hilbert')
print(f'emd annual frequency error: {np.round(sum(np.abs(IF - np.ones_like(IF)))[0], 3)}')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
fig, ax = plt.subplots()
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using emd 0.3.3', 45))
plt.ylabel('Frequency (year$^{-1}$)')
plt.xlabel('Time (years)')
plt.pcolormesh(time, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(time, np.ones_like(time), 'k--', label=textwrap.fill('Annual cycle', 10))
box_0 = ax.get_position()
ax.set_position([box_0.x0 + 0.0125, box_0.y0 + 0.075, box_0.width * 0.8, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/CO2_Hilbert_emd.png')
plt.show()
# compare other packages Carbon Dioxide - bottom
knots = np.linspace(time[0], time[-1], 200)
emd_example = AdvEMDpy.EMD(time=time, time_series=signal)
imfs, hts, ifs, _, _, _, _ = \
emd_example.empirical_mode_decomposition(knots=knots, knot_time=time, verbose=False)
print(f'AdvEMDpy annual frequency error: {np.round(sum(np.abs(ifs[1, :] / (2 * np.pi) - np.ones_like(ifs[1, :]))), 3)}')
fig, axs = plt.subplots(2, 2)
plt.subplots_adjust(hspace=0.5)
axs[0, 0].plot(time, signal)
axs[0, 1].plot(time, signal)
axs[0, 1].plot(time, imfs[0, :], label='Smoothed')
axs[0, 1].legend(loc='lower right')
axs[1, 0].plot(time, imfs[1, :])
axs[1, 1].plot(time, imfs[2, :])
axis = 0
for ax in axs.flat:
if axis == 0:
ax.set(ylabel=R'C0$_2$ concentration')
if axis == 1:
pass
if axis == 2:
ax.set(ylabel=R'C0$_2$ concentration')
ax.set(xlabel='Time (years)')
if axis == 3:
ax.set(xlabel='Time (years)')
axis += 1
plt.gcf().subplots_adjust(bottom=0.15)
axs[0, 0].set_title(r'Original CO$_2$ Concentration')
axs[0, 1].set_title('Smoothed CO$_2$ Concentration')
axs[1, 0].set_title('IMF 1')
axs[1, 1].set_title('Residual')
plt.gcf().subplots_adjust(bottom=0.15)
plt.savefig('jss_figures/CO2_EMD.png')
plt.show()
hs_ouputs = hilbert_spectrum(time, imfs, hts, ifs, max_frequency=10, which_imfs=[1], plot=False)
x_hs, y, z = hs_ouputs
y = y / (2 * np.pi)
z_min, z_max = 0, np.abs(z).max()
fig, ax = plt.subplots()
figure_size = plt.gcf().get_size_inches()
factor = 0.7
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
ax.pcolormesh(x_hs, y, np.abs(z), cmap='gist_rainbow', vmin=z_min, vmax=z_max)
ax.set_title(textwrap.fill(r'Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using AdvEMDpy', 40))
plt.ylabel('Frequency (year$^{-1}$)')
plt.xlabel('Time (years)')
plt.plot(x_hs[0, :], np.ones_like(x_hs[0, :]), 'k--', label=textwrap.fill('Annual cycle', 10))
ax.axis([x_hs.min(), x_hs.max(), y.min(), y.max()])
box_0 = ax.get_position()
ax.set_position([box_0.x0 + 0.0125, box_0.y0 + 0.075, box_0.width * 0.8, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/CO2_Hilbert.png')
plt.show()
| [((670, 695), 'seaborn.set', 'sns.set', ([], {'style': '"""darkgrid"""'}), "(style='darkgrid')\n", (677, 695), True, 'import seaborn as sns\n'), ((715, 746), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(1001)'], {}), '(0, 2 * np.pi, 1001)\n', (726, 746), True, 'import numpy as np\n'), ((841, 906), 'emd_utils.Utility', 'Utility', ([], {'time': 'pseudo_alg_time', 'time_series': 'pseudo_alg_time_series'}), '(time=pseudo_alg_time, time_series=pseudo_alg_time_series)\n', (848, 906), False, 'from emd_utils import time_extension, Utility\n'), ((934, 960), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(9, 4)'}), '(figsize=(9, 4))\n', (944, 960), True, 'import matplotlib.pyplot as plt\n'), ((966, 982), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (977, 982), True, 'import matplotlib.pyplot as plt\n'), ((1022, 1071), 'matplotlib.pyplot.title', 'plt.title', (['"""First Iteration of Sifting Algorithm"""'], {}), "('First Iteration of Sifting Algorithm')\n", (1031, 1071), True, 'import matplotlib.pyplot as plt\n'), ((1072, 1159), 'matplotlib.pyplot.plot', 'plt.plot', (['pseudo_alg_time', 'pseudo_alg_time_series'], {'label': '"""$h_{(1,0)}(t)$"""', 'zorder': '(1)'}), "(pseudo_alg_time, pseudo_alg_time_series, label='$h_{(1,0)}(t)$',\n zorder=1)\n", (1080, 1159), True, 'import matplotlib.pyplot as plt\n'), ((1899, 1934), 'matplotlib.pyplot.yticks', 'plt.yticks', ([], {'ticks': '[-2, -1, 0, 1, 2]'}), '(ticks=[-2, -1, 0, 1, 2])\n', (1909, 1934), True, 'import matplotlib.pyplot as plt\n'), ((1935, 2009), 'matplotlib.pyplot.xticks', 'plt.xticks', ([], {'ticks': '[0, np.pi, 2 * np.pi]', 'labels': "['0', '$\\\\pi$', '$2\\\\pi$']"}), "(ticks=[0, np.pi, 2 * np.pi], labels=['0', '$\\\\pi$', '$2\\\\pi$'])\n", (1945, 2009), True, 'import matplotlib.pyplot as plt\n'), ((2181, 2228), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/pseudo_algorithm.png"""'], {}), "('jss_figures/pseudo_algorithm.png')\n", (2192, 2228), True, 'import matplotlib.pyplot as plt\n'), ((2229, 2239), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2237, 2239), True, 'import matplotlib.pyplot as plt\n'), ((2249, 2262), 'numpy.arange', 'np.arange', (['(12)'], {}), '(12)\n', (2258, 2262), True, 'import numpy as np\n'), ((2270, 2294), 'numpy.linspace', 'np.linspace', (['(0)', '(11)', '(1101)'], {}), '(0, 11, 1101)\n', (2281, 2294), True, 'import numpy as np\n'), ((2304, 2348), 'emd_basis.Basis', 'emd_basis.Basis', ([], {'time': 'time', 'time_series': 'time'}), '(time=time, time_series=time)\n', (2319, 2348), False, 'import emd_basis\n'), ((2441, 2498), 'matplotlib.pyplot.title', 'plt.title', (['"""Non-Natural Cubic B-Spline Bases at Boundary"""'], {}), "('Non-Natural Cubic B-Spline Bases at Boundary')\n", (2450, 2498), True, 'import matplotlib.pyplot as plt\n'), ((2499, 2584), 'matplotlib.pyplot.plot', 'plt.plot', (['time[500:]', 'b_spline_basis[(2), 500:].T', '"""--"""'], {'label': '"""$ B_{-3,4}(t) $"""'}), "(time[500:], b_spline_basis[(2), 500:].T, '--', label='$ B_{-3,4}(t) $'\n )\n", (2507, 2584), True, 'import matplotlib.pyplot as plt\n'), ((2579, 2664), 'matplotlib.pyplot.plot', 'plt.plot', (['time[500:]', 'b_spline_basis[(3), 500:].T', '"""--"""'], {'label': '"""$ B_{-2,4}(t) $"""'}), "(time[500:], b_spline_basis[(3), 500:].T, '--', label='$ B_{-2,4}(t) $'\n )\n", (2587, 2664), True, 'import matplotlib.pyplot as plt\n'), ((2659, 2744), 'matplotlib.pyplot.plot', 'plt.plot', (['time[500:]', 'b_spline_basis[(4), 500:].T', '"""--"""'], {'label': '"""$ B_{-1,4}(t) $"""'}), "(time[500:], b_spline_basis[(4), 500:].T, '--', label='$ B_{-1,4}(t) $'\n )\n", (2667, 2744), True, 'import matplotlib.pyplot as plt\n'), ((2739, 2818), 'matplotlib.pyplot.plot', 'plt.plot', (['time[500:]', 'b_spline_basis[(5), 500:].T', '"""--"""'], {'label': '"""$ B_{0,4}(t) $"""'}), "(time[500:], b_spline_basis[(5), 500:].T, '--', label='$ B_{0,4}(t) $')\n", (2747, 2818), True, 'import matplotlib.pyplot as plt\n'), ((2818, 2897), 'matplotlib.pyplot.plot', 'plt.plot', (['time[500:]', 'b_spline_basis[(6), 500:].T', '"""--"""'], {'label': '"""$ B_{1,4}(t) $"""'}), "(time[500:], b_spline_basis[(6), 500:].T, '--', label='$ B_{1,4}(t) $')\n", (2826, 2897), True, 'import matplotlib.pyplot as plt\n'), ((2897, 2947), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[5, 6]', "['$ \\\\tau_0 $', '$ \\\\tau_1 $']"], {}), "([5, 6], ['$ \\\\tau_0 $', '$ \\\\tau_1 $'])\n", (2907, 2947), True, 'import matplotlib.pyplot as plt\n'), ((2948, 2966), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(4.4)', '(6.6)'], {}), '(4.4, 6.6)\n', (2956, 2966), True, 'import matplotlib.pyplot as plt\n'), ((3091, 3119), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper left"""'}), "(loc='upper left')\n", (3101, 3119), True, 'import matplotlib.pyplot as plt\n'), ((3120, 3165), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/boundary_bases.png"""'], {}), "('jss_figures/boundary_bases.png')\n", (3131, 3165), True, 'import matplotlib.pyplot as plt\n'), ((3166, 3176), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3174, 3176), True, 'import matplotlib.pyplot as plt\n'), ((3223, 3254), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(1001)'], {}), '(0, 2 * np.pi, 1001)\n', (3234, 3254), True, 'import numpy as np\n'), ((3368, 3397), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(51)'], {}), '(0, 2 * np.pi, 51)\n', (3379, 3397), True, 'import numpy as np\n'), ((3404, 3477), 'AdvEMDpy.EMD', 'EMD', ([], {'time': 'knot_demonstrate_time', 'time_series': 'knot_demonstrate_time_series'}), '(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)\n', (3407, 3477), False, 'from AdvEMDpy import EMD\n'), ((3599, 3617), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {}), '(3, 1)\n', (3611, 3617), True, 'import matplotlib.pyplot as plt\n'), ((4975, 5018), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/knot_uniform.png"""'], {}), "('jss_figures/knot_uniform.png')\n", (4986, 5018), True, 'import matplotlib.pyplot as plt\n'), ((5019, 5029), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5027, 5029), True, 'import matplotlib.pyplot as plt\n'), ((5076, 5107), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(1001)'], {}), '(0, 2 * np.pi, 1001)\n', (5087, 5107), True, 'import numpy as np\n'), ((5211, 5284), 'AdvEMDpy.EMD', 'EMD', ([], {'time': 'knot_demonstrate_time', 'time_series': 'knot_demonstrate_time_series'}), '(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)\n', (5214, 5284), False, 'from AdvEMDpy import EMD\n'), ((5484, 5502), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {}), '(3, 1)\n', (5496, 5502), True, 'import matplotlib.pyplot as plt\n'), ((6859, 6896), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/knot_1.png"""'], {}), "('jss_figures/knot_1.png')\n", (6870, 6896), True, 'import matplotlib.pyplot as plt\n'), ((6897, 6907), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6905, 6907), True, 'import matplotlib.pyplot as plt\n'), ((6954, 6985), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(1001)'], {}), '(0, 2 * np.pi, 1001)\n', (6965, 6985), True, 'import numpy as np\n'), ((7089, 7162), 'AdvEMDpy.EMD', 'EMD', ([], {'time': 'knot_demonstrate_time', 'time_series': 'knot_demonstrate_time_series'}), '(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)\n', (7092, 7162), False, 'from AdvEMDpy import EMD\n'), ((7362, 7380), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {}), '(3, 1)\n', (7374, 7380), True, 'import matplotlib.pyplot as plt\n'), ((8735, 8772), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/knot_2.png"""'], {}), "('jss_figures/knot_2.png')\n", (8746, 8772), True, 'import matplotlib.pyplot as plt\n'), ((8773, 8783), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (8781, 8783), True, 'import matplotlib.pyplot as plt\n'), ((8829, 8847), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {}), '(2, 1)\n', (8841, 8847), True, 'import matplotlib.pyplot as plt\n'), ((9173, 9190), 'numpy.random.seed', 'np.random.seed', (['(1)'], {}), '(1)\n', (9187, 9190), True, 'import numpy as np\n'), ((9191, 9205), 'random.seed', 'random.seed', (['(1)'], {}), '(1)\n', (9202, 9205), False, 'import random\n'), ((9414, 9482), 'emd_preprocess.Preprocess', 'Preprocess', ([], {'time': 'preprocess_time', 'time_series': 'preprocess_time_series'}), '(time=preprocess_time, time_series=preprocess_time_series)\n', (9424, 9482), False, 'from emd_preprocess import Preprocess\n'), ((12457, 12505), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/preprocess_filter.png"""'], {}), "('jss_figures/preprocess_filter.png')\n", (12468, 12505), True, 'import matplotlib.pyplot as plt\n'), ((12506, 12516), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (12514, 12516), True, 'import matplotlib.pyplot as plt\n'), ((12550, 12568), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {}), '(2, 1)\n', (12562, 12568), True, 'import matplotlib.pyplot as plt\n'), ((15319, 15367), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/preprocess_smooth.png"""'], {}), "('jss_figures/preprocess_smooth.png')\n", (15330, 15367), True, 'import matplotlib.pyplot as plt\n'), ((15368, 15378), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (15376, 15378), True, 'import matplotlib.pyplot as plt\n'), ((15400, 15431), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'sharey': '(True)'}), '(1, 2, sharey=True)\n', (15412, 15431), True, 'import matplotlib.pyplot as plt\n'), ((16474, 16520), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/comparing_bases.png"""'], {}), "('jss_figures/comparing_bases.png')\n", (16485, 16520), True, 'import matplotlib.pyplot as plt\n'), ((16521, 16531), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (16529, 16531), True, 'import matplotlib.pyplot as plt\n'), ((16570, 16607), 'numpy.linspace', 'np.linspace', (['(0)', '((5 - a) * np.pi)', '(1001)'], {}), '(0, (5 - a) * np.pi, 1001)\n', (16581, 16607), True, 'import numpy as np\n'), ((16662, 16715), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (16679, 16715), False, 'import emd_utils\n'), ((16942, 17002), 'numpy.linspace', 'np.linspace', (['(maxima_x[-1] - width)', '(maxima_x[-1] + width)', '(101)'], {}), '(maxima_x[-1] - width, maxima_x[-1] + width, 101)\n', (16953, 17002), True, 'import numpy as np\n'), ((17073, 17133), 'numpy.linspace', 'np.linspace', (['(minima_x[-1] - width)', '(minima_x[-1] + width)', '(101)'], {}), '(minima_x[-1] - width, minima_x[-1] + width, 101)\n', (17084, 17133), True, 'import numpy as np\n'), ((17202, 17246), 'numpy.linspace', 'np.linspace', (['maxima_x[-1]', 'minima_x[-1]', '(101)'], {}), '(maxima_x[-1], minima_x[-1], 101)\n', (17213, 17246), True, 'import numpy as np\n'), ((17256, 17300), 'numpy.linspace', 'np.linspace', (['maxima_y[-1]', 'minima_y[-1]', '(101)'], {}), '(maxima_y[-1], minima_y[-1], 101)\n', (17267, 17300), True, 'import numpy as np\n'), ((17415, 17483), 'numpy.linspace', 'np.linspace', (['(max_discard_time - width)', '(max_discard_time + width)', '(101)'], {}), '(max_discard_time - width, max_discard_time + width, 101)\n', (17426, 17483), True, 'import numpy as np\n'), ((17567, 17615), 'numpy.linspace', 'np.linspace', (['minima_x[-1]', 'max_discard_time', '(101)'], {}), '(minima_x[-1], max_discard_time, 101)\n', (17578, 17615), True, 'import numpy as np\n'), ((17625, 17668), 'numpy.linspace', 'np.linspace', (['minima_y[-1]', 'max_discard', '(101)'], {}), '(minima_y[-1], max_discard, 101)\n', (17636, 17668), True, 'import numpy as np\n'), ((17740, 17790), 'numpy.linspace', 'np.linspace', (['((5 - a) * np.pi)', '((5 + a) * np.pi)', '(101)'], {}), '((5 - a) * np.pi, (5 + a) * np.pi, 101)\n', (17751, 17790), True, 'import numpy as np\n'), ((18184, 18250), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'time_series_anti_reflect'}), '(time=time, time_series=time_series_anti_reflect)\n', (18201, 18250), False, 'import emd_utils\n'), ((18418, 18479), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'time_series_reflect'}), '(time=time, time_series=time_series_reflect)\n', (18435, 18479), False, 'import emd_utils\n'), ((18655, 18699), 'numpy.linspace', 'np.linspace', (['maxima_y[-1]', 'minima_y[-1]', '(101)'], {}), '(maxima_y[-1], minima_y[-1], 101)\n', (18666, 18699), True, 'import numpy as np\n'), ((18785, 18851), 'numpy.linspace', 'np.linspace', (['(point_1 * np.pi - width)', '(point_1 * np.pi + width)', '(101)'], {}), '(point_1 * np.pi - width, point_1 * np.pi + width, 101)\n', (18796, 18851), True, 'import numpy as np\n'), ((18998, 19045), 'numpy.linspace', 'np.linspace', (['time_series[-1]', 'minima_y[-1]', '(101)'], {}), '(time_series[-1], minima_y[-1], 101)\n', (19009, 19045), True, 'import numpy as np\n'), ((19137, 19203), 'numpy.linspace', 'np.linspace', (['(point_2 * np.pi - width)', '(point_2 * np.pi + width)', '(101)'], {}), '(point_2 * np.pi - width, point_2 * np.pi + width, 101)\n', (19148, 19203), True, 'import numpy as np\n'), ((19441, 19464), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (19452, 19464), True, 'import numpy as np\n'), ((19477, 19529), 'numpy.linspace', 'np.linspace', (['(time[-1] - width)', '(time[-1] + width)', '(101)'], {}), '(time[-1] - width, time[-1] + width, 101)\n', (19488, 19529), True, 'import numpy as np\n'), ((19607, 19655), 'numpy.linspace', 'np.linspace', (['(time[-1] - 0.5)', '(time[-1] + 0.5)', '(101)'], {}), '(time[-1] - 0.5, time[-1] + 0.5, 101)\n', (19618, 19655), True, 'import numpy as np\n'), ((19738, 19754), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (19749, 19754), True, 'import matplotlib.pyplot as plt\n'), ((19794, 19850), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'time_series'], {'LineWidth': '(2)', 'label': '"""Signal"""'}), "(time, time_series, LineWidth=2, label='Signal')\n", (19802, 19850), True, 'import matplotlib.pyplot as plt\n'), ((19851, 19893), 'matplotlib.pyplot.title', 'plt.title', (['"""Symmetry Edge Effects Example"""'], {}), "('Symmetry Edge Effects Example')\n", (19860, 19893), True, 'import matplotlib.pyplot as plt\n'), ((20152, 20191), 'matplotlib.pyplot.plot', 'plt.plot', (['max_dash_time', 'max_dash', '"""k-"""'], {}), "(max_dash_time, max_dash, 'k-')\n", (20160, 20191), True, 'import matplotlib.pyplot as plt\n'), ((20192, 20231), 'matplotlib.pyplot.plot', 'plt.plot', (['min_dash_time', 'min_dash', '"""k-"""'], {}), "(min_dash_time, min_dash, 'k-')\n", (20200, 20231), True, 'import matplotlib.pyplot as plt\n'), ((20232, 20268), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_1_time', 'dash_1', '"""k--"""'], {}), "(dash_1_time, dash_1, 'k--')\n", (20240, 20268), True, 'import matplotlib.pyplot as plt\n'), ((20269, 20305), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_2_time', 'dash_2', '"""k--"""'], {}), "(dash_2_time, dash_2, 'k--')\n", (20277, 20305), True, 'import matplotlib.pyplot as plt\n'), ((20306, 20360), 'matplotlib.pyplot.plot', 'plt.plot', (['length_distance_time', 'length_distance', '"""k--"""'], {}), "(length_distance_time, length_distance, 'k--')\n", (20314, 20360), True, 'import matplotlib.pyplot as plt\n'), ((20361, 20419), 'matplotlib.pyplot.plot', 'plt.plot', (['length_distance_time_2', 'length_distance_2', '"""k--"""'], {}), "(length_distance_time_2, length_distance_2, 'k--')\n", (20369, 20419), True, 'import matplotlib.pyplot as plt\n'), ((20420, 20459), 'matplotlib.pyplot.plot', 'plt.plot', (['length_time', 'length_top', '"""k-"""'], {}), "(length_time, length_top, 'k-')\n", (20428, 20459), True, 'import matplotlib.pyplot as plt\n'), ((20460, 20502), 'matplotlib.pyplot.plot', 'plt.plot', (['length_time', 'length_bottom', '"""k-"""'], {}), "(length_time, length_bottom, 'k-')\n", (20468, 20502), True, 'import matplotlib.pyplot as plt\n'), ((20503, 20546), 'matplotlib.pyplot.plot', 'plt.plot', (['length_time_2', 'length_top_2', '"""k-"""'], {}), "(length_time_2, length_top_2, 'k-')\n", (20511, 20546), True, 'import matplotlib.pyplot as plt\n'), ((20547, 20593), 'matplotlib.pyplot.plot', 'plt.plot', (['length_time_2', 'length_bottom_2', '"""k-"""'], {}), "(length_time_2, length_bottom_2, 'k-')\n", (20555, 20593), True, 'import matplotlib.pyplot as plt\n'), ((20594, 20630), 'matplotlib.pyplot.plot', 'plt.plot', (['end_time', 'end_signal', '"""k-"""'], {}), "(end_time, end_signal, 'k-')\n", (20602, 20630), True, 'import matplotlib.pyplot as plt\n'), ((20631, 20693), 'matplotlib.pyplot.plot', 'plt.plot', (['symmetry_axis_1_time', 'symmetry_axis', '"""r--"""'], {'zorder': '(1)'}), "(symmetry_axis_1_time, symmetry_axis, 'r--', zorder=1)\n", (20639, 20693), True, 'import matplotlib.pyplot as plt\n'), ((20694, 20763), 'matplotlib.pyplot.plot', 'plt.plot', (['anti_symmetric_time', 'anti_symmetric_signal', '"""r--"""'], {'zorder': '(1)'}), "(anti_symmetric_time, anti_symmetric_signal, 'r--', zorder=1)\n", (20702, 20763), True, 'import matplotlib.pyplot as plt\n'), ((20872, 20912), 'matplotlib.pyplot.text', 'plt.text', (['(5.1 * np.pi)', '(-0.7)', '"""$\\\\beta$L"""'], {}), "(5.1 * np.pi, -0.7, '$\\\\beta$L')\n", (20880, 20912), True, 'import matplotlib.pyplot as plt\n'), ((20913, 20947), 'matplotlib.pyplot.text', 'plt.text', (['(5.34 * np.pi)', '(-0.05)', '"""L"""'], {}), "(5.34 * np.pi, -0.05, 'L')\n", (20921, 20947), True, 'import matplotlib.pyplot as plt\n'), ((20948, 21012), 'matplotlib.pyplot.scatter', 'plt.scatter', (['maxima_x', 'maxima_y'], {'c': '"""r"""', 'zorder': '(4)', 'label': '"""Maxima"""'}), "(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')\n", (20959, 21012), True, 'import matplotlib.pyplot as plt\n'), ((21013, 21077), 'matplotlib.pyplot.scatter', 'plt.scatter', (['minima_x', 'minima_y'], {'c': '"""b"""', 'zorder': '(4)', 'label': '"""Minima"""'}), "(minima_x, minima_y, c='b', zorder=4, label='Minima')\n", (21024, 21077), True, 'import matplotlib.pyplot as plt\n'), ((21541, 21575), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(3.9 * np.pi)', '(5.5 * np.pi)'], {}), '(3.9 * np.pi, 5.5 * np.pi)\n', (21549, 21575), True, 'import matplotlib.pyplot as plt\n'), ((21576, 21634), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(4 * np.pi, 5 * np.pi)', "('4$\\\\pi$', '5$\\\\pi$')"], {}), "((4 * np.pi, 5 * np.pi), ('4$\\\\pi$', '5$\\\\pi$'))\n", (21586, 21634), True, 'import matplotlib.pyplot as plt\n'), ((21635, 21693), 'matplotlib.pyplot.yticks', 'plt.yticks', (['(-2, -1, 0, 1, 2)', "('-2', '-1', '0', '1', '2')"], {}), "((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))\n", (21645, 21693), True, 'import matplotlib.pyplot as plt\n'), ((21853, 21910), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/edge_effects_symmetry_anti.png"""'], {}), "('jss_figures/edge_effects_symmetry_anti.png')\n", (21864, 21910), True, 'import matplotlib.pyplot as plt\n'), ((21911, 21921), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (21919, 21921), True, 'import matplotlib.pyplot as plt\n'), ((21960, 21997), 'numpy.linspace', 'np.linspace', (['(0)', '((5 - a) * np.pi)', '(1001)'], {}), '(0, (5 - a) * np.pi, 1001)\n', (21971, 21997), True, 'import numpy as np\n'), ((22052, 22105), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (22069, 22105), False, 'import emd_utils\n'), ((22329, 22389), 'numpy.linspace', 'np.linspace', (['(maxima_y[-1] - width)', '(maxima_y[-1] + width)', '(101)'], {}), '(maxima_y[-1] - width, maxima_y[-1] + width, 101)\n', (22340, 22389), True, 'import numpy as np\n'), ((22403, 22463), 'numpy.linspace', 'np.linspace', (['(maxima_y[-2] - width)', '(maxima_y[-2] + width)', '(101)'], {}), '(maxima_y[-2] - width, maxima_y[-2] + width, 101)\n', (22414, 22463), True, 'import numpy as np\n'), ((22593, 22653), 'numpy.linspace', 'np.linspace', (['(minima_y[-1] - width)', '(minima_y[-1] + width)', '(101)'], {}), '(minima_y[-1] - width, minima_y[-1] + width, 101)\n', (22604, 22653), True, 'import numpy as np\n'), ((22667, 22727), 'numpy.linspace', 'np.linspace', (['(minima_y[-2] - width)', '(minima_y[-2] + width)', '(101)'], {}), '(minima_y[-2] - width, minima_y[-2] + width, 101)\n', (22678, 22727), True, 'import numpy as np\n'), ((22859, 22903), 'numpy.linspace', 'np.linspace', (['maxima_x[-1]', 'minima_x[-1]', '(101)'], {}), '(maxima_x[-1], minima_x[-1], 101)\n', (22870, 22903), True, 'import numpy as np\n'), ((22913, 22957), 'numpy.linspace', 'np.linspace', (['maxima_y[-1]', 'minima_y[-1]', '(101)'], {}), '(maxima_y[-1], minima_y[-1], 101)\n', (22924, 22957), True, 'import numpy as np\n'), ((22972, 23016), 'numpy.linspace', 'np.linspace', (['maxima_x[-1]', 'minima_x[-2]', '(101)'], {}), '(maxima_x[-1], minima_x[-2], 101)\n', (22983, 23016), True, 'import numpy as np\n'), ((23026, 23070), 'numpy.linspace', 'np.linspace', (['maxima_y[-1]', 'minima_y[-2]', '(101)'], {}), '(maxima_y[-1], minima_y[-2], 101)\n', (23037, 23070), True, 'import numpy as np\n'), ((23380, 23454), 'numpy.linspace', 'np.linspace', (['(slope_based_maximum - width)', '(slope_based_maximum + width)', '(101)'], {}), '(slope_based_maximum - width, slope_based_maximum + width, 101)\n', (23391, 23454), True, 'import numpy as np\n'), ((23470, 23526), 'numpy.linspace', 'np.linspace', (['minima_x[-1]', 'slope_based_maximum_time', '(101)'], {}), '(minima_x[-1], slope_based_maximum_time, 101)\n', (23481, 23526), True, 'import numpy as np\n'), ((23536, 23587), 'numpy.linspace', 'np.linspace', (['minima_y[-1]', 'slope_based_maximum', '(101)'], {}), '(minima_y[-1], slope_based_maximum, 101)\n', (23547, 23587), True, 'import numpy as np\n'), ((23916, 23990), 'numpy.linspace', 'np.linspace', (['(slope_based_minimum - width)', '(slope_based_minimum + width)', '(101)'], {}), '(slope_based_minimum - width, slope_based_minimum + width, 101)\n', (23927, 23990), True, 'import numpy as np\n'), ((24006, 24069), 'numpy.linspace', 'np.linspace', (['slope_based_maximum_time', 'slope_based_minimum_time'], {}), '(slope_based_maximum_time, slope_based_minimum_time)\n', (24017, 24069), True, 'import numpy as np\n'), ((24079, 24132), 'numpy.linspace', 'np.linspace', (['slope_based_maximum', 'slope_based_minimum'], {}), '(slope_based_maximum, slope_based_minimum)\n', (24090, 24132), True, 'import numpy as np\n'), ((24148, 24190), 'numpy.linspace', 'np.linspace', (['(2.5 - width)', '(2.5 + width)', '(101)'], {}), '(2.5 - width, 2.5 + width, 101)\n', (24159, 24190), True, 'import numpy as np\n'), ((24413, 24469), 'numpy.linspace', 'np.linspace', (['maxima_x[-2]', 'slope_based_maximum_time', '(101)'], {}), '(maxima_x[-2], slope_based_maximum_time, 101)\n', (24424, 24469), True, 'import numpy as np\n'), ((24546, 24590), 'numpy.linspace', 'np.linspace', (['(-3.4 - width)', '(-3.4 + width)', '(101)'], {}), '(-3.4 - width, -3.4 + width, 101)\n', (24557, 24590), True, 'import numpy as np\n'), ((24813, 24869), 'numpy.linspace', 'np.linspace', (['minima_x[-2]', 'slope_based_minimum_time', '(101)'], {}), '(minima_x[-2], slope_based_minimum_time, 101)\n', (24824, 24869), True, 'import numpy as np\n'), ((25610, 25707), 'numpy.linspace', 'np.linspace', (['(improved_slope_based_minimum - width)', '(improved_slope_based_minimum + width)', '(101)'], {}), '(improved_slope_based_minimum - width, \n improved_slope_based_minimum + width, 101)\n', (25621, 25707), True, 'import numpy as np\n'), ((25801, 25891), 'numpy.linspace', 'np.linspace', (['improved_slope_based_maximum_time', 'improved_slope_based_minimum_time', '(101)'], {}), '(improved_slope_based_maximum_time,\n improved_slope_based_minimum_time, 101)\n', (25812, 25891), True, 'import numpy as np\n'), ((25901, 25977), 'numpy.linspace', 'np.linspace', (['improved_slope_based_maximum', 'improved_slope_based_minimum', '(101)'], {}), '(improved_slope_based_maximum, improved_slope_based_minimum, 101)\n', (25912, 25977), True, 'import numpy as np\n'), ((25984, 26000), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (25995, 26000), True, 'import matplotlib.pyplot as plt\n'), ((26164, 26220), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'time_series'], {'LineWidth': '(2)', 'label': '"""Signal"""'}), "(time, time_series, LineWidth=2, label='Signal')\n", (26172, 26220), True, 'import matplotlib.pyplot as plt\n'), ((26221, 26266), 'matplotlib.pyplot.title', 'plt.title', (['"""Slope-Based Edge Effects Example"""'], {}), "('Slope-Based Edge Effects Example')\n", (26230, 26266), True, 'import matplotlib.pyplot as plt\n'), ((26267, 26310), 'matplotlib.pyplot.plot', 'plt.plot', (['max_dash_time_1', 'max_dash_1', '"""k-"""'], {}), "(max_dash_time_1, max_dash_1, 'k-')\n", (26275, 26310), True, 'import matplotlib.pyplot as plt\n'), ((26311, 26354), 'matplotlib.pyplot.plot', 'plt.plot', (['max_dash_time_2', 'max_dash_2', '"""k-"""'], {}), "(max_dash_time_2, max_dash_2, 'k-')\n", (26319, 26354), True, 'import matplotlib.pyplot as plt\n'), ((26355, 26398), 'matplotlib.pyplot.plot', 'plt.plot', (['max_dash_time_3', 'max_dash_3', '"""k-"""'], {}), "(max_dash_time_3, max_dash_3, 'k-')\n", (26363, 26398), True, 'import matplotlib.pyplot as plt\n'), ((26399, 26442), 'matplotlib.pyplot.plot', 'plt.plot', (['min_dash_time_1', 'min_dash_1', '"""k-"""'], {}), "(min_dash_time_1, min_dash_1, 'k-')\n", (26407, 26442), True, 'import matplotlib.pyplot as plt\n'), ((26443, 26486), 'matplotlib.pyplot.plot', 'plt.plot', (['min_dash_time_2', 'min_dash_2', '"""k-"""'], {}), "(min_dash_time_2, min_dash_2, 'k-')\n", (26451, 26486), True, 'import matplotlib.pyplot as plt\n'), ((26487, 26530), 'matplotlib.pyplot.plot', 'plt.plot', (['min_dash_time_3', 'min_dash_3', '"""k-"""'], {}), "(min_dash_time_3, min_dash_3, 'k-')\n", (26495, 26530), True, 'import matplotlib.pyplot as plt\n'), ((26531, 26574), 'matplotlib.pyplot.plot', 'plt.plot', (['min_dash_time_4', 'min_dash_4', '"""k-"""'], {}), "(min_dash_time_4, min_dash_4, 'k-')\n", (26539, 26574), True, 'import matplotlib.pyplot as plt\n'), ((26575, 26622), 'matplotlib.pyplot.plot', 'plt.plot', (['maxima_dash_time_1', 'maxima_dash', '"""k-"""'], {}), "(maxima_dash_time_1, maxima_dash, 'k-')\n", (26583, 26622), True, 'import matplotlib.pyplot as plt\n'), ((26623, 26670), 'matplotlib.pyplot.plot', 'plt.plot', (['maxima_dash_time_2', 'maxima_dash', '"""k-"""'], {}), "(maxima_dash_time_2, maxima_dash, 'k-')\n", (26631, 26670), True, 'import matplotlib.pyplot as plt\n'), ((26671, 26718), 'matplotlib.pyplot.plot', 'plt.plot', (['maxima_dash_time_3', 'maxima_dash', '"""k-"""'], {}), "(maxima_dash_time_3, maxima_dash, 'k-')\n", (26679, 26718), True, 'import matplotlib.pyplot as plt\n'), ((26719, 26766), 'matplotlib.pyplot.plot', 'plt.plot', (['minima_dash_time_1', 'minima_dash', '"""k-"""'], {}), "(minima_dash_time_1, minima_dash, 'k-')\n", (26727, 26766), True, 'import matplotlib.pyplot as plt\n'), ((26767, 26814), 'matplotlib.pyplot.plot', 'plt.plot', (['minima_dash_time_2', 'minima_dash', '"""k-"""'], {}), "(minima_dash_time_2, minima_dash, 'k-')\n", (26775, 26814), True, 'import matplotlib.pyplot as plt\n'), ((26815, 26862), 'matplotlib.pyplot.plot', 'plt.plot', (['minima_dash_time_3', 'minima_dash', '"""k-"""'], {}), "(minima_dash_time_3, minima_dash, 'k-')\n", (26823, 26862), True, 'import matplotlib.pyplot as plt\n'), ((26863, 26917), 'matplotlib.pyplot.text', 'plt.text', (['(4.34 * np.pi)', '(-3.2)', '"""$\\\\Delta{t^{min}_{m}}$"""'], {}), "(4.34 * np.pi, -3.2, '$\\\\Delta{t^{min}_{m}}$')\n", (26871, 26917), True, 'import matplotlib.pyplot as plt\n'), ((26918, 26972), 'matplotlib.pyplot.text', 'plt.text', (['(4.74 * np.pi)', '(-3.2)', '"""$\\\\Delta{t^{min}_{m}}$"""'], {}), "(4.74 * np.pi, -3.2, '$\\\\Delta{t^{min}_{m}}$')\n", (26926, 26972), True, 'import matplotlib.pyplot as plt\n'), ((26973, 27024), 'matplotlib.pyplot.text', 'plt.text', (['(4.12 * np.pi)', '(2)', '"""$\\\\Delta{t^{max}_{M}}$"""'], {}), "(4.12 * np.pi, 2, '$\\\\Delta{t^{max}_{M}}$')\n", (26981, 27024), True, 'import matplotlib.pyplot as plt\n'), ((27025, 27075), 'matplotlib.pyplot.text', 'plt.text', (['(4.5 * np.pi)', '(2)', '"""$\\\\Delta{t^{max}_{M}}$"""'], {}), "(4.5 * np.pi, 2, '$\\\\Delta{t^{max}_{M}}$')\n", (27033, 27075), True, 'import matplotlib.pyplot as plt\n'), ((27077, 27113), 'matplotlib.pyplot.text', 'plt.text', (['(4.3 * np.pi)', '(0.35)', '"""$s_1$"""'], {}), "(4.3 * np.pi, 0.35, '$s_1$')\n", (27085, 27113), True, 'import matplotlib.pyplot as plt\n'), ((27116, 27153), 'matplotlib.pyplot.text', 'plt.text', (['(4.43 * np.pi)', '(-0.2)', '"""$s_2$"""'], {}), "(4.43 * np.pi, -0.2, '$s_2$')\n", (27124, 27153), True, 'import matplotlib.pyplot as plt\n'), ((27156, 27260), 'matplotlib.pyplot.text', 'plt.text', (['(4.3 * np.pi + (minima_x[-1] - minima_x[-2]))', '(0.35 + (minima_y[-1] - minima_y[-2]))', '"""$s_1$"""'], {}), "(4.3 * np.pi + (minima_x[-1] - minima_x[-2]), 0.35 + (minima_y[-1] -\n minima_y[-2]), '$s_1$')\n", (27164, 27260), True, 'import matplotlib.pyplot as plt\n'), ((27259, 27384), 'matplotlib.pyplot.text', 'plt.text', (['(4.43 * np.pi + (slope_based_minimum_time - minima_x[-1]))', '(-0.2 + (slope_based_minimum - minima_y[-1]))', '"""$s_2$"""'], {}), "(4.43 * np.pi + (slope_based_minimum_time - minima_x[-1]), -0.2 + (\n slope_based_minimum - minima_y[-1]), '$s_2$')\n", (27267, 27384), True, 'import matplotlib.pyplot as plt\n'), ((27391, 27514), 'matplotlib.pyplot.text', 'plt.text', (['(4.5 * np.pi + (slope_based_minimum_time - minima_x[-1]))', '(1.2 + (slope_based_minimum - minima_y[-1]))', '"""$s_2$"""'], {}), "(4.5 * np.pi + (slope_based_minimum_time - minima_x[-1]), 1.2 + (\n slope_based_minimum - minima_y[-1]), '$s_2$')\n", (27399, 27514), True, 'import matplotlib.pyplot as plt\n'), ((27522, 27578), 'matplotlib.pyplot.plot', 'plt.plot', (['minima_line_dash_time', 'minima_line_dash', '"""k--"""'], {}), "(minima_line_dash_time, minima_line_dash, 'k--')\n", (27530, 27578), True, 'import matplotlib.pyplot as plt\n'), ((27579, 27635), 'matplotlib.pyplot.plot', 'plt.plot', (['maxima_line_dash_time', 'maxima_line_dash', '"""k--"""'], {}), "(maxima_line_dash_time, maxima_line_dash, 'k--')\n", (27587, 27635), True, 'import matplotlib.pyplot as plt\n'), ((27636, 27672), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_1_time', 'dash_1', '"""k--"""'], {}), "(dash_1_time, dash_1, 'k--')\n", (27644, 27672), True, 'import matplotlib.pyplot as plt\n'), ((27673, 27709), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_2_time', 'dash_2', '"""k--"""'], {}), "(dash_2_time, dash_2, 'k--')\n", (27681, 27709), True, 'import matplotlib.pyplot as plt\n'), ((27710, 27746), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_3_time', 'dash_3', '"""k--"""'], {}), "(dash_3_time, dash_3, 'k--')\n", (27718, 27746), True, 'import matplotlib.pyplot as plt\n'), ((27747, 27783), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_4_time', 'dash_4', '"""k--"""'], {}), "(dash_4_time, dash_4, 'k--')\n", (27755, 27783), True, 'import matplotlib.pyplot as plt\n'), ((27784, 27828), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_final_time', 'dash_final', '"""k--"""'], {}), "(dash_final_time, dash_final, 'k--')\n", (27792, 27828), True, 'import matplotlib.pyplot as plt\n'), ((27829, 27893), 'matplotlib.pyplot.scatter', 'plt.scatter', (['maxima_x', 'maxima_y'], {'c': '"""r"""', 'zorder': '(4)', 'label': '"""Maxima"""'}), "(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')\n", (27840, 27893), True, 'import matplotlib.pyplot as plt\n'), ((27894, 27958), 'matplotlib.pyplot.scatter', 'plt.scatter', (['minima_x', 'minima_y'], {'c': '"""b"""', 'zorder': '(4)', 'label': '"""Minima"""'}), "(minima_x, minima_y, c='b', zorder=4, label='Minima')\n", (27905, 27958), True, 'import matplotlib.pyplot as plt\n'), ((28583, 28617), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(3.9 * np.pi)', '(5.5 * np.pi)'], {}), '(3.9 * np.pi, 5.5 * np.pi)\n', (28591, 28617), True, 'import matplotlib.pyplot as plt\n'), ((28618, 28676), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(4 * np.pi, 5 * np.pi)', "('4$\\\\pi$', '5$\\\\pi$')"], {}), "((4 * np.pi, 5 * np.pi), ('4$\\\\pi$', '5$\\\\pi$'))\n", (28628, 28676), True, 'import matplotlib.pyplot as plt\n'), ((28677, 28745), 'matplotlib.pyplot.yticks', 'plt.yticks', (['(-3, -2, -1, 0, 1, 2)', "('-3', '-2', '-1', '0', '1', '2')"], {}), "((-3, -2, -1, 0, 1, 2), ('-3', '-2', '-1', '0', '1', '2'))\n", (28687, 28745), True, 'import matplotlib.pyplot as plt\n'), ((28905, 28960), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/edge_effects_slope_based.png"""'], {}), "('jss_figures/edge_effects_slope_based.png')\n", (28916, 28960), True, 'import matplotlib.pyplot as plt\n'), ((28961, 28971), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (28969, 28971), True, 'import matplotlib.pyplot as plt\n'), ((29010, 29047), 'numpy.linspace', 'np.linspace', (['(0)', '((5 - a) * np.pi)', '(1001)'], {}), '(0, (5 - a) * np.pi, 1001)\n', (29021, 29047), True, 'import numpy as np\n'), ((29102, 29155), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (29119, 29155), False, 'import emd_utils\n'), ((30112, 30164), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'Huang_wave'}), '(time=time, time_series=Huang_wave)\n', (30129, 30164), False, 'import emd_utils\n'), ((30298, 30353), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'Coughlin_wave'}), '(time=time, time_series=Coughlin_wave)\n', (30315, 30353), False, 'import emd_utils\n'), ((30868, 30928), 'numpy.linspace', 'np.linspace', (['(maxima_x[-2] - width)', '(maxima_x[-2] + width)', '(101)'], {}), '(maxima_x[-2] - width, maxima_x[-2] + width, 101)\n', (30879, 30928), True, 'import numpy as np\n'), ((30949, 31007), 'numpy.linspace', 'np.linspace', (['(5.3 * np.pi - width)', '(5.3 * np.pi + width)', '(101)'], {}), '(5.3 * np.pi - width, 5.3 * np.pi + width, 101)\n', (30960, 31007), True, 'import numpy as np\n'), ((31076, 31136), 'numpy.linspace', 'np.linspace', (['(minima_x[-2] - width)', '(minima_x[-2] + width)', '(101)'], {}), '(minima_x[-2] - width, minima_x[-2] + width, 101)\n', (31087, 31136), True, 'import numpy as np\n'), ((31157, 31215), 'numpy.linspace', 'np.linspace', (['(5.3 * np.pi - width)', '(5.3 * np.pi + width)', '(101)'], {}), '(5.3 * np.pi - width, 5.3 * np.pi + width, 101)\n', (31168, 31215), True, 'import numpy as np\n'), ((31288, 31332), 'numpy.linspace', 'np.linspace', (['minima_y[-2]', 'maxima_y[-2]', '(101)'], {}), '(minima_y[-2], maxima_y[-2], 101)\n', (31299, 31332), True, 'import numpy as np\n'), ((31413, 31473), 'numpy.linspace', 'np.linspace', (['(maxima_y[-2] - width)', '(maxima_y[-2] + width)', '(101)'], {}), '(maxima_y[-2] - width, maxima_y[-2] + width, 101)\n', (31424, 31473), True, 'import numpy as np\n'), ((31489, 31533), 'numpy.linspace', 'np.linspace', (['(-1.8 - width)', '(-1.8 + width)', '(101)'], {}), '(-1.8 - width, -1.8 + width, 101)\n', (31500, 31533), True, 'import numpy as np\n'), ((31597, 31657), 'numpy.linspace', 'np.linspace', (['(minima_y[-2] - width)', '(minima_y[-2] + width)', '(101)'], {}), '(minima_y[-2] - width, minima_y[-2] + width, 101)\n', (31608, 31657), True, 'import numpy as np\n'), ((31673, 31717), 'numpy.linspace', 'np.linspace', (['(-1.8 - width)', '(-1.8 + width)', '(101)'], {}), '(-1.8 - width, -1.8 + width, 101)\n', (31684, 31717), True, 'import numpy as np\n'), ((31795, 31839), 'numpy.linspace', 'np.linspace', (['minima_x[-2]', 'maxima_x[-2]', '(101)'], {}), '(minima_x[-2], maxima_x[-2], 101)\n', (31806, 31839), True, 'import numpy as np\n'), ((31918, 31978), 'numpy.linspace', 'np.linspace', (['(maxima_x[-1] - width)', '(maxima_x[-1] + width)', '(101)'], {}), '(maxima_x[-1] - width, maxima_x[-1] + width, 101)\n', (31929, 31978), True, 'import numpy as np\n'), ((31999, 32057), 'numpy.linspace', 'np.linspace', (['(5.4 * np.pi - width)', '(5.4 * np.pi + width)', '(101)'], {}), '(5.4 * np.pi - width, 5.4 * np.pi + width, 101)\n', (32010, 32057), True, 'import numpy as np\n'), ((32126, 32186), 'numpy.linspace', 'np.linspace', (['(minima_x[-1] - width)', '(minima_x[-1] + width)', '(101)'], {}), '(minima_x[-1] - width, minima_x[-1] + width, 101)\n', (32137, 32186), True, 'import numpy as np\n'), ((32207, 32265), 'numpy.linspace', 'np.linspace', (['(5.4 * np.pi - width)', '(5.4 * np.pi + width)', '(101)'], {}), '(5.4 * np.pi - width, 5.4 * np.pi + width, 101)\n', (32218, 32265), True, 'import numpy as np\n'), ((32338, 32382), 'numpy.linspace', 'np.linspace', (['minima_y[-1]', 'maxima_y[-1]', '(101)'], {}), '(minima_y[-1], maxima_y[-1], 101)\n', (32349, 32382), True, 'import numpy as np\n'), ((32463, 32523), 'numpy.linspace', 'np.linspace', (['(maxima_y[-1] - width)', '(maxima_y[-1] + width)', '(101)'], {}), '(maxima_y[-1] - width, maxima_y[-1] + width, 101)\n', (32474, 32523), True, 'import numpy as np\n'), ((32539, 32583), 'numpy.linspace', 'np.linspace', (['(-2.1 - width)', '(-2.1 + width)', '(101)'], {}), '(-2.1 - width, -2.1 + width, 101)\n', (32550, 32583), True, 'import numpy as np\n'), ((32647, 32707), 'numpy.linspace', 'np.linspace', (['(minima_y[-1] - width)', '(minima_y[-1] + width)', '(101)'], {}), '(minima_y[-1] - width, minima_y[-1] + width, 101)\n', (32658, 32707), True, 'import numpy as np\n'), ((32723, 32767), 'numpy.linspace', 'np.linspace', (['(-2.1 - width)', '(-2.1 + width)', '(101)'], {}), '(-2.1 - width, -2.1 + width, 101)\n', (32734, 32767), True, 'import numpy as np\n'), ((32845, 32889), 'numpy.linspace', 'np.linspace', (['minima_x[-1]', 'maxima_x[-1]', '(101)'], {}), '(minima_x[-1], maxima_x[-1], 101)\n', (32856, 32889), True, 'import numpy as np\n'), ((32958, 32974), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (32969, 32974), True, 'import matplotlib.pyplot as plt\n'), ((33014, 33062), 'matplotlib.pyplot.title', 'plt.title', (['"""Characteristic Wave Effects Example"""'], {}), "('Characteristic Wave Effects Example')\n", (33023, 33062), True, 'import matplotlib.pyplot as plt\n'), ((33063, 33119), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'time_series'], {'LineWidth': '(2)', 'label': '"""Signal"""'}), "(time, time_series, LineWidth=2, label='Signal')\n", (33071, 33119), True, 'import matplotlib.pyplot as plt\n'), ((33824, 33888), 'matplotlib.pyplot.scatter', 'plt.scatter', (['maxima_x', 'maxima_y'], {'c': '"""r"""', 'zorder': '(4)', 'label': '"""Maxima"""'}), "(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')\n", (33835, 33888), True, 'import matplotlib.pyplot as plt\n'), ((33889, 33953), 'matplotlib.pyplot.scatter', 'plt.scatter', (['minima_x', 'minima_y'], {'c': '"""b"""', 'zorder': '(4)', 'label': '"""Minima"""'}), "(minima_x, minima_y, c='b', zorder=4, label='Minima')\n", (33900, 33953), True, 'import matplotlib.pyplot as plt\n'), ((34181, 34218), 'matplotlib.pyplot.plot', 'plt.plot', (['max_2_x_time', 'max_2_x', '"""k-"""'], {}), "(max_2_x_time, max_2_x, 'k-')\n", (34189, 34218), True, 'import matplotlib.pyplot as plt\n'), ((34219, 34261), 'matplotlib.pyplot.plot', 'plt.plot', (['max_2_x_time_side', 'max_2_x', '"""k-"""'], {}), "(max_2_x_time_side, max_2_x, 'k-')\n", (34227, 34261), True, 'import matplotlib.pyplot as plt\n'), ((34262, 34299), 'matplotlib.pyplot.plot', 'plt.plot', (['min_2_x_time', 'min_2_x', '"""k-"""'], {}), "(min_2_x_time, min_2_x, 'k-')\n", (34270, 34299), True, 'import matplotlib.pyplot as plt\n'), ((34300, 34342), 'matplotlib.pyplot.plot', 'plt.plot', (['min_2_x_time_side', 'min_2_x', '"""k-"""'], {}), "(min_2_x_time_side, min_2_x, 'k-')\n", (34308, 34342), True, 'import matplotlib.pyplot as plt\n'), ((34343, 34399), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_max_min_2_x_time', 'dash_max_min_2_x', '"""k--"""'], {}), "(dash_max_min_2_x_time, dash_max_min_2_x, 'k--')\n", (34351, 34399), True, 'import matplotlib.pyplot as plt\n'), ((34400, 34438), 'matplotlib.pyplot.text', 'plt.text', (['(5.16 * np.pi)', '(0.85)', '"""$2a_2$"""'], {}), "(5.16 * np.pi, 0.85, '$2a_2$')\n", (34408, 34438), True, 'import matplotlib.pyplot as plt\n'), ((34441, 34478), 'matplotlib.pyplot.plot', 'plt.plot', (['max_2_y_time', 'max_2_y', '"""k-"""'], {}), "(max_2_y_time, max_2_y, 'k-')\n", (34449, 34478), True, 'import matplotlib.pyplot as plt\n'), ((34479, 34521), 'matplotlib.pyplot.plot', 'plt.plot', (['max_2_y_time', 'max_2_y_side', '"""k-"""'], {}), "(max_2_y_time, max_2_y_side, 'k-')\n", (34487, 34521), True, 'import matplotlib.pyplot as plt\n'), ((34522, 34559), 'matplotlib.pyplot.plot', 'plt.plot', (['min_2_y_time', 'min_2_y', '"""k-"""'], {}), "(min_2_y_time, min_2_y, 'k-')\n", (34530, 34559), True, 'import matplotlib.pyplot as plt\n'), ((34560, 34602), 'matplotlib.pyplot.plot', 'plt.plot', (['min_2_y_time', 'min_2_y_side', '"""k-"""'], {}), "(min_2_y_time, min_2_y_side, 'k-')\n", (34568, 34602), True, 'import matplotlib.pyplot as plt\n'), ((34603, 34659), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_max_min_2_y_time', 'dash_max_min_2_y', '"""k--"""'], {}), "(dash_max_min_2_y_time, dash_max_min_2_y, 'k--')\n", (34611, 34659), True, 'import matplotlib.pyplot as plt\n'), ((34660, 34708), 'matplotlib.pyplot.text', 'plt.text', (['(4.08 * np.pi)', '(-2.2)', '"""$\\\\frac{p_2}{2}$"""'], {}), "(4.08 * np.pi, -2.2, '$\\\\frac{p_2}{2}$')\n", (34668, 34708), True, 'import matplotlib.pyplot as plt\n'), ((34710, 34747), 'matplotlib.pyplot.plot', 'plt.plot', (['max_1_x_time', 'max_1_x', '"""k-"""'], {}), "(max_1_x_time, max_1_x, 'k-')\n", (34718, 34747), True, 'import matplotlib.pyplot as plt\n'), ((34748, 34790), 'matplotlib.pyplot.plot', 'plt.plot', (['max_1_x_time_side', 'max_1_x', '"""k-"""'], {}), "(max_1_x_time_side, max_1_x, 'k-')\n", (34756, 34790), True, 'import matplotlib.pyplot as plt\n'), ((34791, 34828), 'matplotlib.pyplot.plot', 'plt.plot', (['min_1_x_time', 'min_1_x', '"""k-"""'], {}), "(min_1_x_time, min_1_x, 'k-')\n", (34799, 34828), True, 'import matplotlib.pyplot as plt\n'), ((34829, 34871), 'matplotlib.pyplot.plot', 'plt.plot', (['min_1_x_time_side', 'min_1_x', '"""k-"""'], {}), "(min_1_x_time_side, min_1_x, 'k-')\n", (34837, 34871), True, 'import matplotlib.pyplot as plt\n'), ((34872, 34928), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_max_min_1_x_time', 'dash_max_min_1_x', '"""k--"""'], {}), "(dash_max_min_1_x_time, dash_max_min_1_x, 'k--')\n", (34880, 34928), True, 'import matplotlib.pyplot as plt\n'), ((34929, 34967), 'matplotlib.pyplot.text', 'plt.text', (['(5.42 * np.pi)', '(-0.1)', '"""$2a_1$"""'], {}), "(5.42 * np.pi, -0.1, '$2a_1$')\n", (34937, 34967), True, 'import matplotlib.pyplot as plt\n'), ((34970, 35007), 'matplotlib.pyplot.plot', 'plt.plot', (['max_1_y_time', 'max_1_y', '"""k-"""'], {}), "(max_1_y_time, max_1_y, 'k-')\n", (34978, 35007), True, 'import matplotlib.pyplot as plt\n'), ((35008, 35050), 'matplotlib.pyplot.plot', 'plt.plot', (['max_1_y_time', 'max_1_y_side', '"""k-"""'], {}), "(max_1_y_time, max_1_y_side, 'k-')\n", (35016, 35050), True, 'import matplotlib.pyplot as plt\n'), ((35051, 35088), 'matplotlib.pyplot.plot', 'plt.plot', (['min_1_y_time', 'min_1_y', '"""k-"""'], {}), "(min_1_y_time, min_1_y, 'k-')\n", (35059, 35088), True, 'import matplotlib.pyplot as plt\n'), ((35089, 35131), 'matplotlib.pyplot.plot', 'plt.plot', (['min_1_y_time', 'min_1_y_side', '"""k-"""'], {}), "(min_1_y_time, min_1_y_side, 'k-')\n", (35097, 35131), True, 'import matplotlib.pyplot as plt\n'), ((35132, 35188), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_max_min_1_y_time', 'dash_max_min_1_y', '"""k--"""'], {}), "(dash_max_min_1_y_time, dash_max_min_1_y, 'k--')\n", (35140, 35188), True, 'import matplotlib.pyplot as plt\n'), ((35189, 35237), 'matplotlib.pyplot.text', 'plt.text', (['(4.48 * np.pi)', '(-2.5)', '"""$\\\\frac{p_1}{2}$"""'], {}), "(4.48 * np.pi, -2.5, '$\\\\frac{p_1}{2}$')\n", (35197, 35237), True, 'import matplotlib.pyplot as plt\n'), ((35239, 35273), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(3.9 * np.pi)', '(5.6 * np.pi)'], {}), '(3.9 * np.pi, 5.6 * np.pi)\n', (35247, 35273), True, 'import matplotlib.pyplot as plt\n'), ((35274, 35332), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(4 * np.pi, 5 * np.pi)', "('4$\\\\pi$', '5$\\\\pi$')"], {}), "((4 * np.pi, 5 * np.pi), ('4$\\\\pi$', '5$\\\\pi$'))\n", (35284, 35332), True, 'import matplotlib.pyplot as plt\n'), ((35333, 35391), 'matplotlib.pyplot.yticks', 'plt.yticks', (['(-2, -1, 0, 1, 2)', "('-2', '-1', '0', '1', '2')"], {}), "((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))\n", (35343, 35391), True, 'import matplotlib.pyplot as plt\n'), ((35552, 35615), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/edge_effects_characteristic_wave.png"""'], {}), "('jss_figures/edge_effects_characteristic_wave.png')\n", (35563, 35615), True, 'import matplotlib.pyplot as plt\n'), ((35616, 35626), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (35624, 35626), True, 'import matplotlib.pyplot as plt\n'), ((35641, 35664), 'numpy.linspace', 'np.linspace', (['(5)', '(95)', '(100)'], {}), '(5, 95, 100)\n', (35652, 35664), True, 'import numpy as np\n'), ((35788, 35838), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 't', 'time_series': 'signal_orig'}), '(time=t, time_series=signal_orig)\n', (35805, 35838), False, 'import emd_utils\n'), ((36106, 36137), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(1001)'], {}), '(0, 5 * np.pi, 1001)\n', (36117, 36137), True, 'import numpy as np\n'), ((36191, 36221), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(101)'], {}), '(0, 5 * np.pi, 101)\n', (36202, 36221), True, 'import numpy as np\n'), ((36239, 36259), 'emd_utils.time_extension', 'time_extension', (['time'], {}), '(time)\n', (36253, 36259), False, 'from emd_utils import time_extension, Utility\n'), ((37406, 37429), 'numpy.hstack', 'np.hstack', (['(weights, 0)'], {}), '((weights, 0))\n', (37415, 37429), True, 'import numpy as np\n'), ((39418, 39440), 'cvxpy.Problem', 'cvx.Problem', (['objective'], {}), '(objective)\n', (39429, 39440), True, 'import cvxpy as cvx\n'), ((39507, 39525), 'numpy.array', 'np.array', (['vx.value'], {}), '(vx.value)\n', (39515, 39525), True, 'import numpy as np\n'), ((40751, 40803), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'lsq_signal'}), '(time=time, time_series=lsq_signal)\n', (40768, 40803), False, 'import emd_utils\n'), ((40821, 40892), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time_extended', 'time_series': 'time_series_extended'}), '(time=time_extended, time_series=time_series_extended)\n', (40838, 40892), False, 'import emd_utils\n'), ((41503, 41519), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (41514, 41519), True, 'import matplotlib.pyplot as plt\n'), ((41559, 41608), 'matplotlib.pyplot.title', 'plt.title', (['"""Single Neuron Neural Network Example"""'], {}), "('Single Neuron Neural Network Example')\n", (41568, 41608), True, 'import matplotlib.pyplot as plt\n'), ((41609, 41661), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'lsq_signal'], {'zorder': '(2)', 'label': '"""Signal"""'}), "(time, lsq_signal, zorder=2, label='Signal')\n", (41617, 41661), True, 'import matplotlib.pyplot as plt\n'), ((41773, 41838), 'matplotlib.pyplot.scatter', 'plt.scatter', (['maxima_time', 'maxima'], {'c': '"""r"""', 'zorder': '(3)', 'label': '"""Maxima"""'}), "(maxima_time, maxima, c='r', zorder=3, label='Maxima')\n", (41784, 41838), True, 'import matplotlib.pyplot as plt\n'), ((41839, 41904), 'matplotlib.pyplot.scatter', 'plt.scatter', (['minima_time', 'minima'], {'c': '"""b"""', 'zorder': '(3)', 'label': '"""Minima"""'}), "(minima_time, minima, c='b', zorder=3, label='Minima')\n", (41850, 41904), True, 'import matplotlib.pyplot as plt\n'), ((44091, 44125), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(3.4 * np.pi)', '(5.6 * np.pi)'], {}), '(3.4 * np.pi, 5.6 * np.pi)\n', (44099, 44125), True, 'import matplotlib.pyplot as plt\n'), ((44126, 44184), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(4 * np.pi, 5 * np.pi)', "('4$\\\\pi$', '5$\\\\pi$')"], {}), "((4 * np.pi, 5 * np.pi), ('4$\\\\pi$', '5$\\\\pi$'))\n", (44136, 44184), True, 'import matplotlib.pyplot as plt\n'), ((44185, 44243), 'matplotlib.pyplot.yticks', 'plt.yticks', (['(-2, -1, 0, 1, 2)', "('-2', '-1', '0', '1', '2')"], {}), "((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))\n", (44195, 44243), True, 'import matplotlib.pyplot as plt\n'), ((44404, 44449), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/neural_network.png"""'], {}), "('jss_figures/neural_network.png')\n", (44415, 44449), True, 'import matplotlib.pyplot as plt\n'), ((44450, 44460), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (44458, 44460), True, 'import matplotlib.pyplot as plt\n'), ((44472, 44489), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (44486, 44489), True, 'import numpy as np\n'), ((44498, 44529), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(1001)'], {}), '(0, 5 * np.pi, 1001)\n', (44509, 44529), True, 'import numpy as np\n'), ((44541, 44570), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(51)'], {}), '(0, 5 * np.pi, 51)\n', (44552, 44570), True, 'import numpy as np\n'), ((44722, 44761), 'AdvEMDpy.EMD', 'EMD', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (44725, 44761), False, 'from AdvEMDpy import EMD\n'), ((44980, 45009), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(31)'], {}), '(0, 5 * np.pi, 31)\n', (44991, 45009), True, 'import numpy as np\n'), ((45228, 45257), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(11)'], {}), '(0, 5 * np.pi, 11)\n', (45239, 45257), True, 'import numpy as np\n'), ((45477, 45495), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {}), '(3, 1)\n', (45489, 45495), True, 'import matplotlib.pyplot as plt\n'), ((45592, 45623), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.1)'}), '(hspace=0.1)\n', (45611, 45623), True, 'import matplotlib.pyplot as plt\n'), ((49286, 49337), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/DFA_different_trends.png"""'], {}), "('jss_figures/DFA_different_trends.png')\n", (49297, 49337), True, 'import matplotlib.pyplot as plt\n'), ((49338, 49348), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (49346, 49348), True, 'import matplotlib.pyplot as plt\n'), ((49371, 49389), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {}), '(3, 1)\n', (49383, 49389), True, 'import matplotlib.pyplot as plt\n'), ((49500, 49531), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.1)'}), '(hspace=0.1)\n', (49519, 49531), True, 'import matplotlib.pyplot as plt\n'), ((51975, 52033), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/DFA_different_trends_zoomed.png"""'], {}), "('jss_figures/DFA_different_trends_zoomed.png')\n", (51986, 52033), True, 'import matplotlib.pyplot as plt\n'), ((52034, 52044), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (52042, 52044), True, 'import matplotlib.pyplot as plt\n'), ((52058, 52135), 'emd_hilbert.hilbert_spectrum', 'hilbert_spectrum', (['time', 'imfs_51', 'hts_51', 'ifs_51'], {'max_frequency': '(12)', 'plot': '(False)'}), '(time, imfs_51, hts_51, ifs_51, max_frequency=12, plot=False)\n', (52074, 52135), False, 'from emd_hilbert import Hilbert, hilbert_spectrum\n'), ((52152, 52168), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (52163, 52168), True, 'import matplotlib.pyplot as plt\n'), ((52951, 52989), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency (rad.s$^{-1}$)"""'], {}), "('Frequency (rad.s$^{-1}$)')\n", (52961, 52989), True, 'import matplotlib.pyplot as plt\n'), ((52991, 53013), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (53001, 53013), True, 'import matplotlib.pyplot as plt\n'), ((53180, 53231), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/DFA_hilbert_spectrum.png"""'], {}), "('jss_figures/DFA_hilbert_spectrum.png')\n", (53191, 53231), True, 'import matplotlib.pyplot as plt\n'), ((53232, 53242), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (53240, 53242), True, 'import matplotlib.pyplot as plt\n'), ((53261, 53292), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(1001)'], {}), '(0, 5 * np.pi, 1001)\n', (53272, 53292), True, 'import numpy as np\n'), ((53347, 53376), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(51)'], {}), '(0, 5 * np.pi, 51)\n', (53358, 53376), True, 'import numpy as np\n'), ((53385, 53432), 'emd_mean.Fluctuation', 'Fluctuation', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (53396, 53432), False, 'from emd_mean import Fluctuation\n'), ((53926, 53969), 'emd_utils.Utility', 'Utility', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (53933, 53969), False, 'from emd_utils import time_extension, Utility\n'), ((54062, 54078), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (54073, 54078), True, 'import matplotlib.pyplot as plt\n'), ((54249, 54320), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'time_series'], {'label': '"""Time series"""', 'zorder': '(2)', 'LineWidth': '(2)'}), "(time, time_series, label='Time series', zorder=2, LineWidth=2)\n", (54257, 54320), True, 'import matplotlib.pyplot as plt\n'), ((54321, 54406), 'matplotlib.pyplot.scatter', 'plt.scatter', (['time[maxima]', 'time_series[maxima]'], {'c': '"""r"""', 'label': '"""Maxima"""', 'zorder': '(10)'}), "(time[maxima], time_series[maxima], c='r', label='Maxima', zorder=10\n )\n", (54332, 54406), True, 'import matplotlib.pyplot as plt\n'), ((54402, 54487), 'matplotlib.pyplot.scatter', 'plt.scatter', (['time[minima]', 'time_series[minima]'], {'c': '"""b"""', 'label': '"""Minima"""', 'zorder': '(10)'}), "(time[minima], time_series[minima], c='b', label='Minima', zorder=10\n )\n", (54413, 54487), True, 'import matplotlib.pyplot as plt\n'), ((55096, 55234), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi)', "('$0$', '$\\\\pi$', '2$\\\\pi$', '3$\\\\pi$', '4$\\\\pi$', '5$\\\\pi$')"], {}), "((0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi), (\n '$0$', '$\\\\pi$', '2$\\\\pi$', '3$\\\\pi$', '4$\\\\pi$', '5$\\\\pi$'))\n", (55106, 55234), True, 'import matplotlib.pyplot as plt\n'), ((55242, 55300), 'matplotlib.pyplot.yticks', 'plt.yticks', (['(-2, -1, 0, 1, 2)', "('-2', '-1', '0', '1', '2')"], {}), "((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))\n", (55252, 55300), True, 'import matplotlib.pyplot as plt\n'), ((55301, 55338), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.25 * np.pi)', '(5.25 * np.pi)'], {}), '(-0.25 * np.pi, 5.25 * np.pi)\n', (55309, 55338), True, 'import matplotlib.pyplot as plt\n'), ((55499, 55559), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/Schoenberg_Whitney_Conditions.png"""'], {}), "('jss_figures/Schoenberg_Whitney_Conditions.png')\n", (55510, 55559), True, 'import matplotlib.pyplot as plt\n'), ((55560, 55570), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (55568, 55570), True, 'import matplotlib.pyplot as plt\n'), ((55609, 55660), 'numpy.linspace', 'np.linspace', (['((0 + a) * np.pi)', '((5 - a) * np.pi)', '(1001)'], {}), '((0 + a) * np.pi, (5 - a) * np.pi, 1001)\n', (55620, 55660), True, 'import numpy as np\n'), ((55669, 55718), 'numpy.linspace', 'np.linspace', (['((0 + a) * np.pi)', '((5 - a) * np.pi)', '(11)'], {}), '((0 + a) * np.pi, (5 - a) * np.pi, 11)\n', (55680, 55718), True, 'import numpy as np\n'), ((55773, 55826), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (55790, 55826), False, 'import emd_utils\n'), ((56176, 56232), 'emd_mean.Fluctuation', 'emd_mean.Fluctuation', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (56196, 56232), False, 'import emd_mean\n'), ((58501, 58537), 'numpy.linspace', 'np.linspace', (['knots[0]', 'knots[-1]', '(31)'], {}), '(knots[0], knots[-1], 31)\n', (58512, 58537), True, 'import numpy as np\n'), ((58666, 58731), 'AdvEMDpy.EMD', 'AdvEMDpy.EMD', ([], {'time': 'derivative_time', 'time_series': 'derivative_of_lsq'}), '(time=derivative_time, time_series=derivative_of_lsq)\n', (58678, 58731), False, 'import AdvEMDpy\n'), ((58938, 59004), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time[:-1]', 'time_series': 'imf_1_of_derivative'}), '(time=time[:-1], time_series=imf_1_of_derivative)\n', (58955, 59004), False, 'import emd_utils\n'), ((60536, 60552), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (60547, 60552), True, 'import matplotlib.pyplot as plt\n'), ((60592, 60644), 'matplotlib.pyplot.title', 'plt.title', (['"""Detrended Fluctuation Analysis Examples"""'], {}), "('Detrended Fluctuation Analysis Examples')\n", (60601, 60644), True, 'import matplotlib.pyplot as plt\n'), ((60645, 60706), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'time_series'], {'LineWidth': '(2)', 'label': '"""Time series"""'}), "(time, time_series, LineWidth=2, label='Time series')\n", (60653, 60706), True, 'import matplotlib.pyplot as plt\n'), ((60707, 60771), 'matplotlib.pyplot.scatter', 'plt.scatter', (['maxima_x', 'maxima_y'], {'c': '"""r"""', 'zorder': '(4)', 'label': '"""Maxima"""'}), "(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')\n", (60718, 60771), True, 'import matplotlib.pyplot as plt\n'), ((60772, 60836), 'matplotlib.pyplot.scatter', 'plt.scatter', (['minima_x', 'minima_y'], {'c': '"""b"""', 'zorder': '(4)', 'label': '"""Minima"""'}), "(minima_x, minima_y, c='b', zorder=4, label='Minima')\n", (60783, 60836), True, 'import matplotlib.pyplot as plt\n'), ((61316, 61361), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'minima_envelope'], {'c': '"""darkblue"""'}), "(time, minima_envelope, c='darkblue')\n", (61324, 61361), True, 'import matplotlib.pyplot as plt\n'), ((61362, 61431), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '((maxima_envelope + minima_envelope) / 2)'], {'c': '"""darkblue"""'}), "(time, (maxima_envelope + minima_envelope) / 2, c='darkblue')\n", (61370, 61431), True, 'import matplotlib.pyplot as plt\n'), ((61526, 61577), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'minima_envelope_smooth'], {'c': '"""darkred"""'}), "(time, minima_envelope_smooth, c='darkred')\n", (61534, 61577), True, 'import matplotlib.pyplot as plt\n'), ((61578, 61665), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '((maxima_envelope_smooth + minima_envelope_smooth) / 2)'], {'c': '"""darkred"""'}), "(time, (maxima_envelope_smooth + minima_envelope_smooth) / 2, c=\n 'darkred')\n", (61586, 61665), True, 'import matplotlib.pyplot as plt\n'), ((61755, 61806), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'EEMD_minima_envelope'], {'c': '"""darkgreen"""'}), "(time, EEMD_minima_envelope, c='darkgreen')\n", (61763, 61806), True, 'import matplotlib.pyplot as plt\n'), ((61807, 61892), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '((EEMD_maxima_envelope + EEMD_minima_envelope) / 2)'], {'c': '"""darkgreen"""'}), "(time, (EEMD_maxima_envelope + EEMD_minima_envelope) / 2, c='darkgreen'\n )\n", (61815, 61892), True, 'import matplotlib.pyplot as plt\n'), ((62170, 62308), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi)', "('$0$', '$\\\\pi$', '2$\\\\pi$', '3$\\\\pi$', '4$\\\\pi$', '5$\\\\pi$')"], {}), "((0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi), (\n '$0$', '$\\\\pi$', '2$\\\\pi$', '3$\\\\pi$', '4$\\\\pi$', '5$\\\\pi$'))\n", (62180, 62308), True, 'import matplotlib.pyplot as plt\n'), ((62377, 62435), 'matplotlib.pyplot.yticks', 'plt.yticks', (['(-2, -1, 0, 1, 2)', "('-2', '-1', '0', '1', '2')"], {}), "((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))\n", (62387, 62435), True, 'import matplotlib.pyplot as plt\n'), ((62436, 62473), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.25 * np.pi)', '(5.25 * np.pi)'], {}), '(-0.25 * np.pi, 5.25 * np.pi)\n', (62444, 62473), True, 'import matplotlib.pyplot as plt\n'), ((62634, 62695), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/detrended_fluctuation_analysis.png"""'], {}), "('jss_figures/detrended_fluctuation_analysis.png')\n", (62645, 62695), True, 'import matplotlib.pyplot as plt\n'), ((62696, 62706), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (62704, 62706), True, 'import matplotlib.pyplot as plt\n'), ((62914, 62939), 'numpy.linspace', 'np.linspace', (['(0)', '(150)', '(1501)'], {}), '(0, 150, 1501)\n', (62925, 62939), True, 'import numpy as np\n'), ((62964, 62996), 'scipy.integrate.odeint', 'odeint', (['duffing_equation', 'XY0', 't'], {}), '(duffing_equation, XY0, t)\n', (62970, 62996), False, 'from scipy.integrate import odeint\n'), ((63156, 63174), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {}), '(2, 1)\n', (63168, 63174), True, 'import matplotlib.pyplot as plt\n'), ((63175, 63206), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.2)'}), '(hspace=0.2)\n', (63194, 63206), True, 'import matplotlib.pyplot as plt\n'), ((63783, 63830), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/Duffing_equation.png"""'], {}), "('jss_figures/Duffing_equation.png')\n", (63794, 63830), True, 'import matplotlib.pyplot as plt\n'), ((63831, 63841), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (63839, 63841), True, 'import matplotlib.pyplot as plt\n'), ((63891, 63902), 'PyEMD.EMD', 'pyemd0215', ([], {}), '()\n', (63900, 63902), True, 'from PyEMD import EMD as pyemd0215\n'), ((63934, 63993), 'emd.spectra.frequency_transform', 'emd040.spectra.frequency_transform', (['py_emd.T', '(10)', '"""hilbert"""'], {}), "(py_emd.T, 10, 'hilbert')\n", (63968, 63993), True, 'import emd as emd040\n'), ((64018, 64062), 'emd.spectra.define_hist_bins', 'emd040.spectra.define_hist_bins', (['(0)', '(0.2)', '(100)'], {}), '(0, 0.2, 100)\n', (64049, 64062), True, 'import emd as emd040\n'), ((64069, 64116), 'emd.spectra.hilberthuang', 'emd040.spectra.hilberthuang', (['IF', 'IA', 'freq_edges'], {}), '(IF, IA, freq_edges)\n', (64096, 64116), True, 'import emd as emd040\n'), ((64123, 64152), 'scipy.ndimage.gaussian_filter', 'gaussian_filter', (['hht'], {'sigma': '(1)'}), '(hht, sigma=1)\n', (64138, 64152), False, 'from scipy.ndimage import gaussian_filter\n'), ((64158, 64174), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (64169, 64174), True, 'import matplotlib.pyplot as plt\n'), ((64727, 64756), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 50, 100, 150]'], {}), '([0, 50, 100, 150])\n', (64737, 64756), True, 'import matplotlib.pyplot as plt\n'), ((64757, 64782), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[0, 0.1, 0.2]'], {}), '([0, 0.1, 0.2])\n', (64767, 64782), True, 'import matplotlib.pyplot as plt\n'), ((64783, 64811), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency (Hz)"""'], {}), "('Frequency (Hz)')\n", (64793, 64811), True, 'import matplotlib.pyplot as plt\n'), ((64812, 64834), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (64822, 64834), True, 'import matplotlib.pyplot as plt\n'), ((65001, 65057), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/Duffing_equation_ht_pyemd.png"""'], {}), "('jss_figures/Duffing_equation_ht_pyemd.png')\n", (65012, 65057), True, 'import matplotlib.pyplot as plt\n'), ((65058, 65068), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (65066, 65068), True, 'import matplotlib.pyplot as plt\n'), ((65070, 65080), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (65078, 65080), True, 'import matplotlib.pyplot as plt\n'), ((65093, 65112), 'emd.sift.sift', 'emd040.sift.sift', (['x'], {}), '(x)\n', (65109, 65112), True, 'import emd as emd040\n'), ((65126, 65185), 'emd.spectra.frequency_transform', 'emd040.spectra.frequency_transform', (['emd_sift', '(10)', '"""hilbert"""'], {}), "(emd_sift, 10, 'hilbert')\n", (65160, 65185), True, 'import emd as emd040\n'), ((65210, 65254), 'emd.spectra.define_hist_bins', 'emd040.spectra.define_hist_bins', (['(0)', '(0.2)', '(100)'], {}), '(0, 0.2, 100)\n', (65241, 65254), True, 'import emd as emd040\n'), ((65261, 65308), 'emd.spectra.hilberthuang', 'emd040.spectra.hilberthuang', (['IF', 'IA', 'freq_edges'], {}), '(IF, IA, freq_edges)\n', (65288, 65308), True, 'import emd as emd040\n'), ((65315, 65344), 'scipy.ndimage.gaussian_filter', 'gaussian_filter', (['hht'], {'sigma': '(1)'}), '(hht, sigma=1)\n', (65330, 65344), False, 'from scipy.ndimage import gaussian_filter\n'), ((65350, 65366), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (65361, 65366), True, 'import matplotlib.pyplot as plt\n'), ((65916, 65945), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 50, 100, 150]'], {}), '([0, 50, 100, 150])\n', (65926, 65945), True, 'import matplotlib.pyplot as plt\n'), ((65946, 65971), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[0, 0.1, 0.2]'], {}), '([0, 0.1, 0.2])\n', (65956, 65971), True, 'import matplotlib.pyplot as plt\n'), ((65972, 66000), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency (Hz)"""'], {}), "('Frequency (Hz)')\n", (65982, 66000), True, 'import matplotlib.pyplot as plt\n'), ((66001, 66023), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (66011, 66023), True, 'import matplotlib.pyplot as plt\n'), ((66190, 66244), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/Duffing_equation_ht_emd.png"""'], {}), "('jss_figures/Duffing_equation_ht_emd.png')\n", (66201, 66244), True, 'import matplotlib.pyplot as plt\n'), ((66245, 66255), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (66253, 66255), True, 'import matplotlib.pyplot as plt\n'), ((66314, 66349), 'AdvEMDpy.EMD', 'AdvEMDpy.EMD', ([], {'time': 't', 'time_series': 'x'}), '(time=t, time_series=x)\n', (66326, 66349), False, 'import AdvEMDpy\n'), ((66467, 66485), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {}), '(2, 1)\n', (66479, 66485), True, 'import matplotlib.pyplot as plt\n'), ((66486, 66517), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.3)'}), '(hspace=0.3)\n', (66505, 66517), True, 'import matplotlib.pyplot as plt\n'), ((68054, 68106), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/Duffing_equation_imfs.png"""'], {}), "('jss_figures/Duffing_equation_imfs.png')\n", (68065, 68106), True, 'import matplotlib.pyplot as plt\n'), ((68107, 68117), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (68115, 68117), True, 'import matplotlib.pyplot as plt\n'), ((68131, 68221), 'emd_hilbert.hilbert_spectrum', 'hilbert_spectrum', (['t', 'emd_duff', 'emd_ht_duff', 'emd_if_duff'], {'max_frequency': '(1.3)', 'plot': '(False)'}), '(t, emd_duff, emd_ht_duff, emd_if_duff, max_frequency=1.3,\n plot=False)\n', (68147, 68221), False, 'from emd_hilbert import Hilbert, hilbert_spectrum\n'), ((68224, 68240), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (68235, 68240), True, 'import matplotlib.pyplot as plt\n'), ((68842, 68871), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 50, 100, 150]'], {}), '([0, 50, 100, 150])\n', (68852, 68871), True, 'import matplotlib.pyplot as plt\n'), ((68872, 68897), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[0, 0.1, 0.2]'], {}), '([0, 0.1, 0.2])\n', (68882, 68897), True, 'import matplotlib.pyplot as plt\n'), ((68898, 68926), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency (Hz)"""'], {}), "('Frequency (Hz)')\n", (68908, 68926), True, 'import matplotlib.pyplot as plt\n'), ((68927, 68949), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (68937, 68949), True, 'import matplotlib.pyplot as plt\n'), ((69116, 69166), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/Duffing_equation_ht.png"""'], {}), "('jss_figures/Duffing_equation_ht.png')\n", (69127, 69166), True, 'import matplotlib.pyplot as plt\n'), ((69167, 69177), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (69175, 69177), True, 'import matplotlib.pyplot as plt\n'), ((69230, 69275), 'pandas.read_csv', 'pd.read_csv', (['"""Data/co2_mm_mlo.csv"""'], {'header': '(51)'}), "('Data/co2_mm_mlo.csv', header=51)\n", (69241, 69275), True, 'import pandas as pd\n'), ((69277, 69330), 'matplotlib.pyplot.plot', 'plt.plot', (["CO2_data['month']", "CO2_data['decimal date']"], {}), "(CO2_data['month'], CO2_data['decimal date'])\n", (69285, 69330), True, 'import matplotlib.pyplot as plt\n'), ((69426, 69457), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Parts per million"""'], {}), "('Parts per million')\n", (69436, 69457), True, 'import matplotlib.pyplot as plt\n'), ((69458, 69484), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (years)"""'], {}), "('Time (years)')\n", (69468, 69484), True, 'import matplotlib.pyplot as plt\n'), ((69485, 69533), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/CO2_concentration.png"""'], {}), "('jss_figures/CO2_concentration.png')\n", (69496, 69533), True, 'import matplotlib.pyplot as plt\n'), ((69534, 69544), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (69542, 69544), True, 'import matplotlib.pyplot as plt\n'), ((69589, 69607), 'numpy.asarray', 'np.asarray', (['signal'], {}), '(signal)\n', (69599, 69607), True, 'import numpy as np\n'), ((69640, 69656), 'numpy.asarray', 'np.asarray', (['time'], {}), '(time)\n', (69650, 69656), True, 'import numpy as np\n'), ((69713, 69724), 'PyEMD.EMD', 'pyemd0215', ([], {}), '()\n', (69722, 69724), True, 'from PyEMD import EMD as pyemd0215\n'), ((69761, 69827), 'emd.spectra.frequency_transform', 'emd040.spectra.frequency_transform', (['py_emd[:2, :].T', '(12)', '"""hilbert"""'], {}), "(py_emd[:2, :].T, 12, 'hilbert')\n", (69795, 69827), True, 'import emd as emd040\n'), ((69954, 69996), 'emd.spectra.define_hist_bins', 'emd040.spectra.define_hist_bins', (['(0)', '(2)', '(100)'], {}), '(0, 2, 100)\n', (69985, 69996), True, 'import emd as emd040\n'), ((70003, 70050), 'emd.spectra.hilberthuang', 'emd040.spectra.hilberthuang', (['IF', 'IA', 'freq_edges'], {}), '(IF, IA, freq_edges)\n', (70030, 70050), True, 'import emd as emd040\n'), ((70057, 70086), 'scipy.ndimage.gaussian_filter', 'gaussian_filter', (['hht'], {'sigma': '(1)'}), '(hht, sigma=1)\n', (70072, 70086), False, 'from scipy.ndimage import gaussian_filter\n'), ((70097, 70111), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (70109, 70111), True, 'import matplotlib.pyplot as plt\n'), ((70348, 70385), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency (year$^{-1}$)"""'], {}), "('Frequency (year$^{-1}$)')\n", (70358, 70385), True, 'import matplotlib.pyplot as plt\n'), ((70386, 70412), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (years)"""'], {}), "('Time (years)')\n", (70396, 70412), True, 'import matplotlib.pyplot as plt\n'), ((70770, 70818), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/CO2_Hilbert_pyemd.png"""'], {}), "('jss_figures/CO2_Hilbert_pyemd.png')\n", (70781, 70818), True, 'import matplotlib.pyplot as plt\n'), ((70819, 70829), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (70827, 70829), True, 'import matplotlib.pyplot as plt\n'), ((70842, 70866), 'emd.sift.sift', 'emd040.sift.sift', (['signal'], {}), '(signal)\n', (70858, 70866), True, 'import emd as emd040\n'), ((70880, 70946), 'emd.spectra.frequency_transform', 'emd040.spectra.frequency_transform', (['emd_sift[:, :1]', '(12)', '"""hilbert"""'], {}), "(emd_sift[:, :1], 12, 'hilbert')\n", (70914, 70946), True, 'import emd as emd040\n'), ((71062, 71104), 'emd.spectra.define_hist_bins', 'emd040.spectra.define_hist_bins', (['(0)', '(2)', '(100)'], {}), '(0, 2, 100)\n', (71093, 71104), True, 'import emd as emd040\n'), ((71111, 71158), 'emd.spectra.hilberthuang', 'emd040.spectra.hilberthuang', (['IF', 'IA', 'freq_edges'], {}), '(IF, IA, freq_edges)\n', (71138, 71158), True, 'import emd as emd040\n'), ((71165, 71194), 'scipy.ndimage.gaussian_filter', 'gaussian_filter', (['hht'], {'sigma': '(1)'}), '(hht, sigma=1)\n', (71180, 71194), False, 'from scipy.ndimage import gaussian_filter\n'), ((71205, 71219), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (71217, 71219), True, 'import matplotlib.pyplot as plt\n'), ((71453, 71490), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency (year$^{-1}$)"""'], {}), "('Frequency (year$^{-1}$)')\n", (71463, 71490), True, 'import matplotlib.pyplot as plt\n'), ((71491, 71517), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (years)"""'], {}), "('Time (years)')\n", (71501, 71517), True, 'import matplotlib.pyplot as plt\n'), ((71875, 71921), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/CO2_Hilbert_emd.png"""'], {}), "('jss_figures/CO2_Hilbert_emd.png')\n", (71886, 71921), True, 'import matplotlib.pyplot as plt\n'), ((71922, 71932), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (71930, 71932), True, 'import matplotlib.pyplot as plt\n'), ((71992, 72027), 'numpy.linspace', 'np.linspace', (['time[0]', 'time[-1]', '(200)'], {}), '(time[0], time[-1], 200)\n', (72003, 72027), True, 'import numpy as np\n'), ((72043, 72086), 'AdvEMDpy.EMD', 'AdvEMDpy.EMD', ([], {'time': 'time', 'time_series': 'signal'}), '(time=time, time_series=signal)\n', (72055, 72086), False, 'import AdvEMDpy\n'), ((72341, 72359), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(2)'], {}), '(2, 2)\n', (72353, 72359), True, 'import matplotlib.pyplot as plt\n'), ((72360, 72391), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.5)'}), '(hspace=0.5)\n', (72379, 72391), True, 'import matplotlib.pyplot as plt\n'), ((73151, 73189), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/CO2_EMD.png"""'], {}), "('jss_figures/CO2_EMD.png')\n", (73162, 73189), True, 'import matplotlib.pyplot as plt\n'), ((73190, 73200), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (73198, 73200), True, 'import matplotlib.pyplot as plt\n'), ((73214, 73302), 'emd_hilbert.hilbert_spectrum', 'hilbert_spectrum', (['time', 'imfs', 'hts', 'ifs'], {'max_frequency': '(10)', 'which_imfs': '[1]', 'plot': '(False)'}), '(time, imfs, hts, ifs, max_frequency=10, which_imfs=[1],\n plot=False)\n', (73230, 73302), False, 'from emd_hilbert import Hilbert, hilbert_spectrum\n'), ((73388, 73402), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (73400, 73402), True, 'import matplotlib.pyplot as plt\n'), ((73718, 73755), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency (year$^{-1}$)"""'], {}), "('Frequency (year$^{-1}$)')\n", (73728, 73755), True, 'import matplotlib.pyplot as plt\n'), ((73756, 73782), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (years)"""'], {}), "('Time (years)')\n", (73766, 73782), True, 'import matplotlib.pyplot as plt\n'), ((74105, 74147), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/CO2_Hilbert.png"""'], {}), "('jss_figures/CO2_Hilbert.png')\n", (74116, 74147), True, 'import matplotlib.pyplot as plt\n'), ((74148, 74158), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (74156, 74158), True, 'import matplotlib.pyplot as plt\n'), ((772, 795), 'numpy.sin', 'np.sin', (['pseudo_alg_time'], {}), '(pseudo_alg_time)\n', (778, 795), True, 'import numpy as np\n'), ((798, 825), 'numpy.sin', 'np.sin', (['(5 * pseudo_alg_time)'], {}), '(5 * pseudo_alg_time)\n', (804, 825), True, 'import numpy as np\n'), ((1807, 1830), 'numpy.sin', 'np.sin', (['pseudo_alg_time'], {}), '(pseudo_alg_time)\n', (1813, 1830), True, 'import numpy as np\n'), ((2994, 3021), 'numpy.linspace', 'np.linspace', (['(-0.2)', '(1.2)', '(100)'], {}), '(-0.2, 1.2, 100)\n', (3005, 3021), True, 'import numpy as np\n'), ((3056, 3083), 'numpy.linspace', 'np.linspace', (['(-0.2)', '(1.2)', '(100)'], {}), '(-0.2, 1.2, 100)\n', (3067, 3083), True, 'import numpy as np\n'), ((3286, 3315), 'numpy.sin', 'np.sin', (['knot_demonstrate_time'], {}), '(knot_demonstrate_time)\n', (3292, 3315), True, 'import numpy as np\n'), ((3318, 3351), 'numpy.sin', 'np.sin', (['(5 * knot_demonstrate_time)'], {}), '(5 * knot_demonstrate_time)\n', (3324, 3351), True, 'import numpy as np\n'), ((4529, 4552), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (4540, 4552), True, 'import numpy as np\n'), ((4662, 4685), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (4673, 4685), True, 'import numpy as np\n'), ((4763, 4786), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (4774, 4786), True, 'import numpy as np\n'), ((5139, 5168), 'numpy.sin', 'np.sin', (['knot_demonstrate_time'], {}), '(knot_demonstrate_time)\n', (5145, 5168), True, 'import numpy as np\n'), ((5171, 5204), 'numpy.sin', 'np.sin', (['(5 * knot_demonstrate_time)'], {}), '(5 * knot_demonstrate_time)\n', (5177, 5204), True, 'import numpy as np\n'), ((6445, 6468), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (6456, 6468), True, 'import numpy as np\n'), ((6570, 6593), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (6581, 6593), True, 'import numpy as np\n'), ((6663, 6686), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (6674, 6686), True, 'import numpy as np\n'), ((7017, 7046), 'numpy.sin', 'np.sin', (['knot_demonstrate_time'], {}), '(knot_demonstrate_time)\n', (7023, 7046), True, 'import numpy as np\n'), ((7049, 7082), 'numpy.sin', 'np.sin', (['(5 * knot_demonstrate_time)'], {}), '(5 * knot_demonstrate_time)\n', (7055, 7082), True, 'import numpy as np\n'), ((8309, 8332), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (8320, 8332), True, 'import numpy as np\n'), ((8437, 8460), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (8448, 8460), True, 'import numpy as np\n'), ((8533, 8556), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (8544, 8556), True, 'import numpy as np\n'), ((9378, 9400), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)'], {}), '(0, 1)\n', (9394, 9400), True, 'import numpy as np\n'), ((10482, 10526), 'numpy.linspace', 'np.linspace', (['(0.85 * np.pi)', '(1.15 * np.pi)', '(101)'], {}), '(0.85 * np.pi, 1.15 * np.pi, 101)\n', (10493, 10526), True, 'import numpy as np\n'), ((10630, 10674), 'numpy.linspace', 'np.linspace', (['(0.85 * np.pi)', '(1.15 * np.pi)', '(101)'], {}), '(0.85 * np.pi, 1.15 * np.pi, 101)\n', (10641, 10674), True, 'import numpy as np\n'), ((10752, 10775), 'numpy.linspace', 'np.linspace', (['(-3)', '(3)', '(101)'], {}), '(-3, 3, 101)\n', (10763, 10775), True, 'import numpy as np\n'), ((10835, 10858), 'numpy.linspace', 'np.linspace', (['(-3)', '(3)', '(101)'], {}), '(-3, 3, 101)\n', (10846, 10858), True, 'import numpy as np\n'), ((13644, 13688), 'numpy.linspace', 'np.linspace', (['(0.85 * np.pi)', '(1.15 * np.pi)', '(101)'], {}), '(0.85 * np.pi, 1.15 * np.pi, 101)\n', (13655, 13688), True, 'import numpy as np\n'), ((13792, 13836), 'numpy.linspace', 'np.linspace', (['(0.85 * np.pi)', '(1.15 * np.pi)', '(101)'], {}), '(0.85 * np.pi, 1.15 * np.pi, 101)\n', (13803, 13836), True, 'import numpy as np\n'), ((13914, 13937), 'numpy.linspace', 'np.linspace', (['(-3)', '(3)', '(101)'], {}), '(-3, 3, 101)\n', (13925, 13937), True, 'import numpy as np\n'), ((13997, 14020), 'numpy.linspace', 'np.linspace', (['(-3)', '(3)', '(101)'], {}), '(-3, 3, 101)\n', (14008, 14020), True, 'import numpy as np\n'), ((15795, 15822), 'numpy.linspace', 'np.linspace', (['(-0.2)', '(0.8)', '(100)'], {}), '(-0.2, 0.8, 100)\n', (15806, 15822), True, 'import numpy as np\n'), ((15860, 15887), 'numpy.linspace', 'np.linspace', (['(-0.2)', '(0.8)', '(100)'], {}), '(-0.2, 0.8, 100)\n', (15871, 15887), True, 'import numpy as np\n'), ((16263, 16290), 'numpy.linspace', 'np.linspace', (['(-0.2)', '(1.2)', '(100)'], {}), '(-0.2, 1.2, 100)\n', (16274, 16290), True, 'import numpy as np\n'), ((16328, 16355), 'numpy.linspace', 'np.linspace', (['(-0.2)', '(1.2)', '(100)'], {}), '(-0.2, 1.2, 100)\n', (16339, 16355), True, 'import numpy as np\n'), ((16622, 16634), 'numpy.cos', 'np.cos', (['time'], {}), '(time)\n', (16628, 16634), True, 'import numpy as np\n'), ((16637, 16653), 'numpy.cos', 'np.cos', (['(5 * time)'], {}), '(5 * time)\n', (16643, 16653), True, 'import numpy as np\n'), ((17029, 17056), 'numpy.ones_like', 'np.ones_like', (['max_dash_time'], {}), '(max_dash_time)\n', (17041, 17056), True, 'import numpy as np\n'), ((17160, 17187), 'numpy.ones_like', 'np.ones_like', (['min_dash_time'], {}), '(min_dash_time)\n', (17172, 17187), True, 'import numpy as np\n'), ((17517, 17552), 'numpy.ones_like', 'np.ones_like', (['max_discard_dash_time'], {}), '(max_discard_dash_time)\n', (17529, 17552), True, 'import numpy as np\n'), ((18741, 18770), 'numpy.ones_like', 'np.ones_like', (['length_distance'], {}), '(length_distance)\n', (18753, 18770), True, 'import numpy as np\n'), ((18880, 18905), 'numpy.ones_like', 'np.ones_like', (['length_time'], {}), '(length_time)\n', (18892, 18905), True, 'import numpy as np\n'), ((18937, 18962), 'numpy.ones_like', 'np.ones_like', (['length_time'], {}), '(length_time)\n', (18949, 18962), True, 'import numpy as np\n'), ((19089, 19120), 'numpy.ones_like', 'np.ones_like', (['length_distance_2'], {}), '(length_distance_2)\n', (19101, 19120), True, 'import numpy as np\n'), ((19237, 19264), 'numpy.ones_like', 'np.ones_like', (['length_time_2'], {}), '(length_time_2)\n', (19249, 19264), True, 'import numpy as np\n'), ((19298, 19325), 'numpy.ones_like', 'np.ones_like', (['length_time_2'], {}), '(length_time_2)\n', (19310, 19325), True, 'import numpy as np\n'), ((19365, 19377), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (19372, 19377), True, 'import numpy as np\n'), ((19412, 19424), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (19419, 19424), True, 'import numpy as np\n'), ((19561, 19583), 'numpy.ones_like', 'np.ones_like', (['end_time'], {}), '(end_time)\n', (19573, 19583), True, 'import numpy as np\n'), ((19698, 19731), 'numpy.ones_like', 'np.ones_like', (['anti_symmetric_time'], {}), '(anti_symmetric_time)\n', (19710, 19731), True, 'import numpy as np\n'), ((22012, 22024), 'numpy.cos', 'np.cos', (['time'], {}), '(time)\n', (22018, 22024), True, 'import numpy as np\n'), ((22027, 22043), 'numpy.cos', 'np.cos', (['(5 * time)'], {}), '(5 * time)\n', (22033, 22043), True, 'import numpy as np\n'), ((22497, 22521), 'numpy.ones_like', 'np.ones_like', (['max_dash_1'], {}), '(max_dash_1)\n', (22509, 22521), True, 'import numpy as np\n'), ((22555, 22579), 'numpy.ones_like', 'np.ones_like', (['max_dash_1'], {}), '(max_dash_1)\n', (22567, 22579), True, 'import numpy as np\n'), ((22761, 22785), 'numpy.ones_like', 'np.ones_like', (['min_dash_1'], {}), '(min_dash_1)\n', (22773, 22785), True, 'import numpy as np\n'), ((22819, 22843), 'numpy.ones_like', 'np.ones_like', (['min_dash_1'], {}), '(min_dash_1)\n', (22831, 22843), True, 'import numpy as np\n'), ((23342, 23366), 'numpy.ones_like', 'np.ones_like', (['max_dash_1'], {}), '(max_dash_1)\n', (23354, 23366), True, 'import numpy as np\n'), ((23878, 23902), 'numpy.ones_like', 'np.ones_like', (['min_dash_1'], {}), '(min_dash_1)\n', (23890, 23902), True, 'import numpy as np\n'), ((24227, 24252), 'numpy.ones_like', 'np.ones_like', (['maxima_dash'], {}), '(maxima_dash)\n', (24239, 24252), True, 'import numpy as np\n'), ((24289, 24314), 'numpy.ones_like', 'np.ones_like', (['maxima_dash'], {}), '(maxima_dash)\n', (24301, 24314), True, 'import numpy as np\n'), ((24363, 24388), 'numpy.ones_like', 'np.ones_like', (['maxima_dash'], {}), '(maxima_dash)\n', (24375, 24388), True, 'import numpy as np\n'), ((24495, 24530), 'numpy.ones_like', 'np.ones_like', (['maxima_line_dash_time'], {}), '(maxima_line_dash_time)\n', (24507, 24530), True, 'import numpy as np\n'), ((24627, 24652), 'numpy.ones_like', 'np.ones_like', (['minima_dash'], {}), '(minima_dash)\n', (24639, 24652), True, 'import numpy as np\n'), ((24689, 24714), 'numpy.ones_like', 'np.ones_like', (['minima_dash'], {}), '(minima_dash)\n', (24701, 24714), True, 'import numpy as np\n'), ((24763, 24788), 'numpy.ones_like', 'np.ones_like', (['minima_dash'], {}), '(minima_dash)\n', (24775, 24788), True, 'import numpy as np\n'), ((24896, 24931), 'numpy.ones_like', 'np.ones_like', (['minima_line_dash_time'], {}), '(minima_line_dash_time)\n', (24908, 24931), True, 'import numpy as np\n'), ((25757, 25781), 'numpy.ones_like', 'np.ones_like', (['min_dash_4'], {}), '(min_dash_4)\n', (25769, 25781), True, 'import numpy as np\n'), ((29062, 29074), 'numpy.cos', 'np.cos', (['time'], {}), '(time)\n', (29068, 29074), True, 'import numpy as np\n'), ((29077, 29093), 'numpy.cos', 'np.cos', (['(5 * time)'], {}), '(5 * time)\n', (29083, 29093), True, 'import numpy as np\n'), ((29372, 29407), 'numpy.abs', 'np.abs', (['(maxima_y[-2] - minima_y[-2])'], {}), '(maxima_y[-2] - minima_y[-2])\n', (29378, 29407), True, 'import numpy as np\n'), ((29417, 29452), 'numpy.abs', 'np.abs', (['(maxima_y[-1] - minima_y[-1])'], {}), '(maxima_y[-1] - minima_y[-1])\n', (29423, 29452), True, 'import numpy as np\n'), ((29466, 29501), 'numpy.abs', 'np.abs', (['(maxima_x[-2] - minima_x[-2])'], {}), '(maxima_x[-2] - minima_x[-2])\n', (29472, 29501), True, 'import numpy as np\n'), ((29511, 29546), 'numpy.abs', 'np.abs', (['(maxima_x[-1] - minima_x[-1])'], {}), '(maxima_x[-1] - minima_x[-1])\n', (29517, 29546), True, 'import numpy as np\n'), ((29806, 29871), 'numpy.cos', 'np.cos', (['(2 * np.pi * (1 / P1) * (Coughlin_time - Coughlin_time[0]))'], {}), '(2 * np.pi * (1 / P1) * (Coughlin_time - Coughlin_time[0]))\n', (29812, 29871), True, 'import numpy as np\n'), ((31033, 31059), 'numpy.ones_like', 'np.ones_like', (['max_2_x_time'], {}), '(max_2_x_time)\n', (31045, 31059), True, 'import numpy as np\n'), ((31241, 31267), 'numpy.ones_like', 'np.ones_like', (['min_2_x_time'], {}), '(min_2_x_time)\n', (31253, 31267), True, 'import numpy as np\n'), ((31371, 31401), 'numpy.ones_like', 'np.ones_like', (['dash_max_min_2_x'], {}), '(dash_max_min_2_x)\n', (31383, 31401), True, 'import numpy as np\n'), ((31564, 31585), 'numpy.ones_like', 'np.ones_like', (['max_2_y'], {}), '(max_2_y)\n', (31576, 31585), True, 'import numpy as np\n'), ((31748, 31769), 'numpy.ones_like', 'np.ones_like', (['min_2_y'], {}), '(min_2_y)\n', (31760, 31769), True, 'import numpy as np\n'), ((31866, 31901), 'numpy.ones_like', 'np.ones_like', (['dash_max_min_2_y_time'], {}), '(dash_max_min_2_y_time)\n', (31878, 31901), True, 'import numpy as np\n'), ((32083, 32109), 'numpy.ones_like', 'np.ones_like', (['max_1_x_time'], {}), '(max_1_x_time)\n', (32095, 32109), True, 'import numpy as np\n'), ((32291, 32317), 'numpy.ones_like', 'np.ones_like', (['min_1_x_time'], {}), '(min_1_x_time)\n', (32303, 32317), True, 'import numpy as np\n'), ((32421, 32451), 'numpy.ones_like', 'np.ones_like', (['dash_max_min_1_x'], {}), '(dash_max_min_1_x)\n', (32433, 32451), True, 'import numpy as np\n'), ((32614, 32635), 'numpy.ones_like', 'np.ones_like', (['max_1_y'], {}), '(max_1_y)\n', (32626, 32635), True, 'import numpy as np\n'), ((32798, 32819), 'numpy.ones_like', 'np.ones_like', (['min_1_y'], {}), '(min_1_y)\n', (32810, 32819), True, 'import numpy as np\n'), ((32916, 32951), 'numpy.ones_like', 'np.ones_like', (['dash_max_min_1_y_time'], {}), '(dash_max_min_1_y_time)\n', (32928, 32951), True, 'import numpy as np\n'), ((36151, 36163), 'numpy.cos', 'np.cos', (['time'], {}), '(time)\n', (36157, 36163), True, 'import numpy as np\n'), ((36166, 36182), 'numpy.cos', 'np.cos', (['(5 * time)'], {}), '(5 * time)\n', (36172, 36182), True, 'import numpy as np\n'), ((36283, 36311), 'numpy.zeros_like', 'np.zeros_like', (['time_extended'], {}), '(time_extended)\n', (36296, 36311), True, 'import numpy as np\n'), ((36783, 36808), 'numpy.ones', 'np.ones', (['neural_network_k'], {}), '(neural_network_k)\n', (36790, 36808), True, 'import numpy as np\n'), ((36943, 36974), 'numpy.matmul', 'np.matmul', (['weights', 'train_input'], {}), '(weights, train_input)\n', (36952, 36974), True, 'import numpy as np\n'), ((37097, 37123), 'numpy.mean', 'np.mean', (['gradients'], {'axis': '(1)'}), '(gradients, axis=1)\n', (37104, 37123), True, 'import numpy as np\n'), ((39331, 39364), 'cvxpy.norm', 'cvx.norm', (['(2 * (vx * P) + 1 - t)', '(2)'], {}), '(2 * (vx * P) + 1 - t, 2)\n', (39339, 39364), True, 'import cvxpy as cvx\n'), ((42240, 42269), 'numpy.linspace', 'np.linspace', (['(-2.75)', '(2.75)', '(100)'], {}), '(-2.75, 2.75, 100)\n', (42251, 42269), True, 'import numpy as np\n'), ((42346, 42435), 'numpy.linspace', 'np.linspace', (['((time[-302] + time[-301]) / 2)', '((time[-302] + time[-301]) / 2 + 0.1)', '(100)'], {}), '((time[-302] + time[-301]) / 2, (time[-302] + time[-301]) / 2 + \n 0.1, 100)\n', (42357, 42435), True, 'import numpy as np\n'), ((42483, 42572), 'numpy.linspace', 'np.linspace', (['((time[-302] + time[-301]) / 2)', '((time[-302] + time[-301]) / 2 + 0.1)', '(100)'], {}), '((time[-302] + time[-301]) / 2, (time[-302] + time[-301]) / 2 + \n 0.1, 100)\n', (42494, 42572), True, 'import numpy as np\n'), ((42619, 42748), 'numpy.linspace', 'np.linspace', (['((time_extended[-1001] + time_extended[-1002]) / 2)', '((time_extended[-1001] + time_extended[-1002]) / 2 - 0.1)', '(100)'], {}), '((time_extended[-1001] + time_extended[-1002]) / 2, (\n time_extended[-1001] + time_extended[-1002]) / 2 - 0.1, 100)\n', (42630, 42748), True, 'import numpy as np\n'), ((42808, 42937), 'numpy.linspace', 'np.linspace', (['((time_extended[-1001] + time_extended[-1002]) / 2)', '((time_extended[-1001] + time_extended[-1002]) / 2 - 0.1)', '(100)'], {}), '((time_extended[-1001] + time_extended[-1002]) / 2, (\n time_extended[-1001] + time_extended[-1002]) / 2 - 0.1, 100)\n', (42819, 42937), True, 'import numpy as np\n'), ((43064, 43093), 'numpy.linspace', 'np.linspace', (['(-2.75)', '(2.75)', '(100)'], {}), '(-2.75, 2.75, 100)\n', (43075, 43093), True, 'import numpy as np\n'), ((43160, 43189), 'numpy.linspace', 'np.linspace', (['(-2.75)', '(2.75)', '(100)'], {}), '(-2.75, 2.75, 100)\n', (43171, 43189), True, 'import numpy as np\n'), ((43290, 43379), 'numpy.linspace', 'np.linspace', (['((time[-202] + time[-201]) / 2)', '((time[-202] + time[-201]) / 2 + 0.1)', '(100)'], {}), '((time[-202] + time[-201]) / 2, (time[-202] + time[-201]) / 2 + \n 0.1, 100)\n', (43301, 43379), True, 'import numpy as np\n'), ((43430, 43519), 'numpy.linspace', 'np.linspace', (['((time[-202] + time[-201]) / 2)', '((time[-202] + time[-201]) / 2 + 0.1)', '(100)'], {}), '((time[-202] + time[-201]) / 2, (time[-202] + time[-201]) / 2 + \n 0.1, 100)\n', (43441, 43519), True, 'import numpy as np\n'), ((43569, 43698), 'numpy.linspace', 'np.linspace', (['((time_extended[-1001] + time_extended[-1000]) / 2)', '((time_extended[-1001] + time_extended[-1000]) / 2 - 0.1)', '(100)'], {}), '((time_extended[-1001] + time_extended[-1000]) / 2, (\n time_extended[-1001] + time_extended[-1000]) / 2 - 0.1, 100)\n', (43580, 43698), True, 'import numpy as np\n'), ((43761, 43890), 'numpy.linspace', 'np.linspace', (['((time_extended[-1001] + time_extended[-1000]) / 2)', '((time_extended[-1001] + time_extended[-1000]) / 2 - 0.1)', '(100)'], {}), '((time_extended[-1001] + time_extended[-1000]) / 2, (\n time_extended[-1001] + time_extended[-1000]) / 2 - 0.1, 100)\n', (43772, 43890), True, 'import numpy as np\n'), ((44020, 44049), 'numpy.linspace', 'np.linspace', (['(-2.75)', '(2.75)', '(100)'], {}), '(-2.75, 2.75, 100)\n', (44031, 44049), True, 'import numpy as np\n'), ((44623, 44639), 'numpy.cos', 'np.cos', (['(8 * time)'], {}), '(8 * time)\n', (44629, 44639), True, 'import numpy as np\n'), ((45509, 45594), 'textwrap.fill', 'textwrap.fill', (['"""Comparison of Trends Extracted with Different Knot Sequences"""', '(40)'], {}), "('Comparison of Trends Extracted with Different Knot Sequences',\n 40)\n", (45522, 45594), False, 'import textwrap\n'), ((46081, 46104), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (46092, 46104), True, 'import numpy as np\n'), ((46282, 46326), 'numpy.linspace', 'np.linspace', (['(0.95 * np.pi)', '(1.55 * np.pi)', '(101)'], {}), '(0.95 * np.pi, 1.55 * np.pi, 101)\n', (46293, 46326), True, 'import numpy as np\n'), ((46367, 46411), 'numpy.linspace', 'np.linspace', (['(0.95 * np.pi)', '(1.55 * np.pi)', '(101)'], {}), '(0.95 * np.pi, 1.55 * np.pi, 101)\n', (46378, 46411), True, 'import numpy as np\n'), ((46482, 46509), 'numpy.linspace', 'np.linspace', (['(-5.5)', '(5.5)', '(101)'], {}), '(-5.5, 5.5, 101)\n', (46493, 46509), True, 'import numpy as np\n'), ((46559, 46586), 'numpy.linspace', 'np.linspace', (['(-5.5)', '(5.5)', '(101)'], {}), '(-5.5, 5.5, 101)\n', (46570, 46586), True, 'import numpy as np\n'), ((47335, 47358), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (47346, 47358), True, 'import numpy as np\n'), ((47719, 47763), 'numpy.linspace', 'np.linspace', (['(0.95 * np.pi)', '(1.55 * np.pi)', '(101)'], {}), '(0.95 * np.pi, 1.55 * np.pi, 101)\n', (47730, 47763), True, 'import numpy as np\n'), ((47804, 47848), 'numpy.linspace', 'np.linspace', (['(0.95 * np.pi)', '(1.55 * np.pi)', '(101)'], {}), '(0.95 * np.pi, 1.55 * np.pi, 101)\n', (47815, 47848), True, 'import numpy as np\n'), ((47919, 47946), 'numpy.linspace', 'np.linspace', (['(-5.5)', '(5.5)', '(101)'], {}), '(-5.5, 5.5, 101)\n', (47930, 47946), True, 'import numpy as np\n'), ((47996, 48023), 'numpy.linspace', 'np.linspace', (['(-5.5)', '(5.5)', '(101)'], {}), '(-5.5, 5.5, 101)\n', (48007, 48023), True, 'import numpy as np\n'), ((48529, 48552), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (48540, 48552), True, 'import numpy as np\n'), ((48950, 48994), 'numpy.linspace', 'np.linspace', (['(0.95 * np.pi)', '(1.55 * np.pi)', '(101)'], {}), '(0.95 * np.pi, 1.55 * np.pi, 101)\n', (48961, 48994), True, 'import numpy as np\n'), ((49035, 49079), 'numpy.linspace', 'np.linspace', (['(0.95 * np.pi)', '(1.55 * np.pi)', '(101)'], {}), '(0.95 * np.pi, 1.55 * np.pi, 101)\n', (49046, 49079), True, 'import numpy as np\n'), ((49150, 49177), 'numpy.linspace', 'np.linspace', (['(-5.5)', '(5.5)', '(101)'], {}), '(-5.5, 5.5, 101)\n', (49161, 49177), True, 'import numpy as np\n'), ((49227, 49254), 'numpy.linspace', 'np.linspace', (['(-5.5)', '(5.5)', '(101)'], {}), '(-5.5, 5.5, 101)\n', (49238, 49254), True, 'import numpy as np\n'), ((49403, 49508), 'textwrap.fill', 'textwrap.fill', (['"""Comparison of Trends Extracted with Different Knot Sequences Zoomed Region"""', '(40)'], {}), "(\n 'Comparison of Trends Extracted with Different Knot Sequences Zoomed Region'\n , 40)\n", (49416, 49508), False, 'import textwrap\n'), ((49863, 49886), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (49874, 49886), True, 'import numpy as np\n'), ((50730, 50753), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (50741, 50753), True, 'import numpy as np\n'), ((51555, 51578), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (51566, 51578), True, 'import numpy as np\n'), ((52303, 52417), 'textwrap.fill', 'textwrap.fill', (['"""Gaussian Filtered Hilbert Spectrum of Simple Sinusoidal Time Seres with Added Noise"""', '(50)'], {}), "(\n 'Gaussian Filtered Hilbert Spectrum of Simple Sinusoidal Time Seres with Added Noise'\n , 50)\n", (52316, 52417), False, 'import textwrap\n'), ((52489, 52498), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (52495, 52498), True, 'import numpy as np\n'), ((53307, 53319), 'numpy.cos', 'np.cos', (['time'], {}), '(time)\n', (53313, 53319), True, 'import numpy as np\n'), ((53322, 53338), 'numpy.cos', 'np.cos', (['(5 * time)'], {}), '(5 * time)\n', (53328, 53338), True, 'import numpy as np\n'), ((54128, 54257), 'textwrap.fill', 'textwrap.fill', (['"""Plot Demonstrating Unsmoothed Extrema Envelopes if Schoenberg–Whitney Conditions are Not Satisfied"""', '(50)'], {}), "(\n 'Plot Demonstrating Unsmoothed Extrema Envelopes if Schoenberg–Whitney Conditions are Not Satisfied'\n , 50)\n", (54141, 54257), False, 'import textwrap\n'), ((55025, 55053), 'numpy.linspace', 'np.linspace', (['(-3.0)', '(-2.0)', '(101)'], {}), '(-3.0, -2.0, 101)\n', (55036, 55053), True, 'import numpy as np\n'), ((55733, 55745), 'numpy.cos', 'np.cos', (['time'], {}), '(time)\n', (55739, 55745), True, 'import numpy as np\n'), ((55748, 55764), 'numpy.cos', 'np.cos', (['(5 * time)'], {}), '(5 * time)\n', (55754, 55764), True, 'import numpy as np\n'), ((62125, 62137), 'numpy.cos', 'np.cos', (['time'], {}), '(time)\n', (62131, 62137), True, 'import numpy as np\n'), ((64309, 64413), 'textwrap.fill', 'textwrap.fill', (['"""Gaussian Filtered Hilbert Spectrum of Duffing Equation using PyEMD 0.2.10"""', '(40)'], {}), "(\n 'Gaussian Filtered Hilbert Spectrum of Duffing Equation using PyEMD 0.2.10'\n , 40)\n", (64322, 64413), False, 'import textwrap\n'), ((65501, 65601), 'textwrap.fill', 'textwrap.fill', (['"""Gaussian Filtered Hilbert Spectrum of Duffing Equation using emd 0.3.3"""', '(40)'], {}), "(\n 'Gaussian Filtered Hilbert Spectrum of Duffing Equation using emd 0.3.3',\n 40)\n", (65514, 65601), False, 'import textwrap\n'), ((68251, 68351), 'textwrap.fill', 'textwrap.fill', (['"""Gaussian Filtered Hilbert Spectrum of Duffing Equation using AdvEMDpy"""', '(40)'], {}), "(\n 'Gaussian Filtered Hilbert Spectrum of Duffing Equation using AdvEMDpy', 40\n )\n", (68264, 68351), False, 'import textwrap\n'), ((68561, 68570), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (68567, 68570), True, 'import numpy as np\n'), ((69341, 69428), 'textwrap.fill', 'textwrap.fill', (['"""Mean Monthly Concentration of Carbon Dioxide in the Atmosphere"""', '(35)'], {}), "('Mean Monthly Concentration of Carbon Dioxide in the Atmosphere',\n 35)\n", (69354, 69428), False, 'import textwrap\n'), ((70246, 70356), 'textwrap.fill', 'textwrap.fill', (['"""Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using PyEMD 0.2.10"""', '(45)'], {}), "(\n 'Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using PyEMD 0.2.10'\n , 45)\n", (70259, 70356), False, 'import textwrap\n'), ((70528, 70546), 'numpy.ones_like', 'np.ones_like', (['time'], {}), '(time)\n', (70540, 70546), True, 'import numpy as np\n'), ((71354, 71461), 'textwrap.fill', 'textwrap.fill', (['"""Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using emd 0.3.3"""', '(45)'], {}), "(\n 'Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using emd 0.3.3'\n , 45)\n", (71367, 71461), False, 'import textwrap\n'), ((71633, 71651), 'numpy.ones_like', 'np.ones_like', (['time'], {}), '(time)\n', (71645, 71651), True, 'import numpy as np\n'), ((73550, 73559), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (73556, 73559), True, 'import numpy as np\n'), ((73619, 73725), 'textwrap.fill', 'textwrap.fill', (['"""Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using AdvEMDpy"""', '(40)'], {}), "(\n 'Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using AdvEMDpy'\n , 40)\n", (73632, 73725), False, 'import textwrap\n'), ((73804, 73830), 'numpy.ones_like', 'np.ones_like', (['x_hs[(0), :]'], {}), '(x_hs[(0), :])\n', (73816, 73830), True, 'import numpy as np\n'), ((983, 992), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (990, 992), True, 'import matplotlib.pyplot as plt\n'), ((1382, 1405), 'numpy.sin', 'np.sin', (['pseudo_alg_time'], {}), '(pseudo_alg_time)\n', (1388, 1405), True, 'import numpy as np\n'), ((1694, 1717), 'numpy.sin', 'np.sin', (['pseudo_alg_time'], {}), '(pseudo_alg_time)\n', (1700, 1717), True, 'import numpy as np\n'), ((2980, 2992), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (2987, 2992), True, 'import numpy as np\n'), ((3042, 3054), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (3049, 3054), True, 'import numpy as np\n'), ((3650, 3659), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (3657, 3659), True, 'import matplotlib.pyplot as plt\n'), ((4515, 4527), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (4522, 4527), True, 'import numpy as np\n'), ((4648, 4660), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (4655, 4660), True, 'import numpy as np\n'), ((4749, 4761), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (4756, 4761), True, 'import numpy as np\n'), ((5535, 5544), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (5542, 5544), True, 'import matplotlib.pyplot as plt\n'), ((6431, 6443), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (6438, 6443), True, 'import numpy as np\n'), ((6556, 6568), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (6563, 6568), True, 'import numpy as np\n'), ((6649, 6661), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (6656, 6661), True, 'import numpy as np\n'), ((7413, 7422), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (7420, 7422), True, 'import matplotlib.pyplot as plt\n'), ((8295, 8307), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (8302, 8307), True, 'import numpy as np\n'), ((8423, 8435), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (8430, 8435), True, 'import numpy as np\n'), ((8519, 8531), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (8526, 8531), True, 'import numpy as np\n'), ((8894, 8903), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (8901, 8903), True, 'import matplotlib.pyplot as plt\n'), ((8935, 8944), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (8942, 8944), True, 'import matplotlib.pyplot as plt\n'), ((9004, 9013), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (9011, 9013), True, 'import matplotlib.pyplot as plt\n'), ((9639, 9681), 'textwrap.fill', 'textwrap.fill', (['"""Noiseless time series"""', '(12)'], {}), "('Noiseless time series', 12)\n", (9652, 9681), False, 'import textwrap\n'), ((9766, 9798), 'textwrap.fill', 'textwrap.fill', (['"""Mean filter"""', '(12)'], {}), "('Mean filter', 12)\n", (9779, 9798), False, 'import textwrap\n'), ((9885, 9919), 'textwrap.fill', 'textwrap.fill', (['"""Median filter"""', '(13)'], {}), "('Median filter', 13)\n", (9898, 9919), False, 'import textwrap\n'), ((10009, 10047), 'textwrap.fill', 'textwrap.fill', (['"""Windsorize filter"""', '(12)'], {}), "('Windsorize filter', 12)\n", (10022, 10047), False, 'import textwrap\n'), ((10161, 10213), 'textwrap.fill', 'textwrap.fill', (['"""Windsorize interpolation filter"""', '(14)'], {}), "('Windsorize interpolation filter', 14)\n", (10174, 10213), False, 'import textwrap\n'), ((10332, 10368), 'textwrap.fill', 'textwrap.fill', (['"""Quantile window"""', '(12)'], {}), "('Quantile window', 12)\n", (10345, 10368), False, 'import textwrap\n'), ((10533, 10545), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (10540, 10545), True, 'import numpy as np\n'), ((10582, 10616), 'textwrap.fill', 'textwrap.fill', (['"""Zoomed region"""', '(10)'], {}), "('Zoomed region', 10)\n", (10595, 10616), False, 'import textwrap\n'), ((10680, 10692), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (10687, 10692), True, 'import numpy as np\n'), ((10738, 10750), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (10745, 10750), True, 'import numpy as np\n'), ((10821, 10833), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (10828, 10833), True, 'import numpy as np\n'), ((11163, 11205), 'textwrap.fill', 'textwrap.fill', (['"""Noiseless time series"""', '(12)'], {}), "('Noiseless time series', 12)\n", (11176, 11205), False, 'import textwrap\n'), ((11290, 11322), 'textwrap.fill', 'textwrap.fill', (['"""Mean filter"""', '(12)'], {}), "('Mean filter', 12)\n", (11303, 11322), False, 'import textwrap\n'), ((11409, 11443), 'textwrap.fill', 'textwrap.fill', (['"""Median filter"""', '(13)'], {}), "('Median filter', 13)\n", (11422, 11443), False, 'import textwrap\n'), ((11533, 11571), 'textwrap.fill', 'textwrap.fill', (['"""Windsorize filter"""', '(12)'], {}), "('Windsorize filter', 12)\n", (11546, 11571), False, 'import textwrap\n'), ((11685, 11737), 'textwrap.fill', 'textwrap.fill', (['"""Windsorize interpolation filter"""', '(14)'], {}), "('Windsorize interpolation filter', 14)\n", (11698, 11737), False, 'import textwrap\n'), ((11856, 11892), 'textwrap.fill', 'textwrap.fill', (['"""Quantile window"""', '(12)'], {}), "('Quantile window', 12)\n", (11869, 11892), False, 'import textwrap\n'), ((12615, 12624), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (12622, 12624), True, 'import matplotlib.pyplot as plt\n'), ((12656, 12665), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (12663, 12665), True, 'import matplotlib.pyplot as plt\n'), ((12725, 12734), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (12732, 12734), True, 'import matplotlib.pyplot as plt\n'), ((13009, 13051), 'textwrap.fill', 'textwrap.fill', (['"""Noiseless time series"""', '(12)'], {}), "('Noiseless time series', 12)\n", (13022, 13051), False, 'import textwrap\n'), ((13120, 13167), 'textwrap.fill', 'textwrap.fill', (['"""Hodrick-Prescott smoothing"""', '(12)'], {}), "('Hodrick-Prescott smoothing', 12)\n", (13133, 13167), False, 'import textwrap\n'), ((13244, 13294), 'textwrap.fill', 'textwrap.fill', (['"""Henderson-Whittaker smoothing"""', '(13)'], {}), "('Henderson-Whittaker smoothing', 13)\n", (13257, 13294), False, 'import textwrap\n'), ((13438, 13482), 'textwrap.fill', 'textwrap.fill', (['"""Downsampled & decimated"""', '(11)'], {}), "('Downsampled & decimated', 11)\n", (13451, 13482), False, 'import textwrap\n'), ((13598, 13630), 'textwrap.fill', 'textwrap.fill', (['"""Downsampled"""', '(13)'], {}), "('Downsampled', 13)\n", (13611, 13630), False, 'import textwrap\n'), ((13695, 13707), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (13702, 13707), True, 'import numpy as np\n'), ((13744, 13778), 'textwrap.fill', 'textwrap.fill', (['"""Zoomed region"""', '(10)'], {}), "('Zoomed region', 10)\n", (13757, 13778), False, 'import textwrap\n'), ((13842, 13854), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (13849, 13854), True, 'import numpy as np\n'), ((13900, 13912), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (13907, 13912), True, 'import numpy as np\n'), ((13983, 13995), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (13990, 13995), True, 'import numpy as np\n'), ((14336, 14378), 'textwrap.fill', 'textwrap.fill', (['"""Noiseless time series"""', '(12)'], {}), "('Noiseless time series', 12)\n", (14349, 14378), False, 'import textwrap\n'), ((14447, 14494), 'textwrap.fill', 'textwrap.fill', (['"""Hodrick-Prescott smoothing"""', '(12)'], {}), "('Hodrick-Prescott smoothing', 12)\n", (14460, 14494), False, 'import textwrap\n'), ((14571, 14621), 'textwrap.fill', 'textwrap.fill', (['"""Henderson-Whittaker smoothing"""', '(13)'], {}), "('Henderson-Whittaker smoothing', 13)\n", (14584, 14621), False, 'import textwrap\n'), ((14713, 14757), 'textwrap.fill', 'textwrap.fill', (['"""Downsampled & decimated"""', '(13)'], {}), "('Downsampled & decimated', 13)\n", (14726, 14757), False, 'import textwrap\n'), ((14821, 14853), 'textwrap.fill', 'textwrap.fill', (['"""Downsampled"""', '(13)'], {}), "('Downsampled', 13)\n", (14834, 14853), False, 'import textwrap\n'), ((15781, 15793), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (15788, 15793), True, 'import numpy as np\n'), ((15846, 15858), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (15853, 15858), True, 'import numpy as np\n'), ((16249, 16261), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (16256, 16261), True, 'import numpy as np\n'), ((16314, 16326), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (16321, 16326), True, 'import numpy as np\n'), ((19755, 19764), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (19762, 19764), True, 'import matplotlib.pyplot as plt\n'), ((19964, 20001), 'textwrap.fill', 'textwrap.fill', (['"""Symmetric signal"""', '(10)'], {}), "('Symmetric signal', 10)\n", (19977, 20001), False, 'import textwrap\n'), ((20108, 20150), 'textwrap.fill', 'textwrap.fill', (['"""Anti-symmetric signal"""', '(10)'], {}), "('Anti-symmetric signal', 10)\n", (20121, 20150), False, 'import textwrap\n'), ((20823, 20860), 'textwrap.fill', 'textwrap.fill', (['"""Axes of symmetry"""', '(10)'], {}), "('Axes of symmetry', 10)\n", (20836, 20860), False, 'import textwrap\n'), ((21149, 21194), 'textwrap.fill', 'textwrap.fill', (['"""Symmetric Discard maxima"""', '(10)'], {}), "('Symmetric Discard maxima', 10)\n", (21162, 21194), False, 'import textwrap\n'), ((21263, 21307), 'textwrap.fill', 'textwrap.fill', (['"""Symmetric Anchor maxima"""', '(10)'], {}), "('Symmetric Anchor maxima', 10)\n", (21276, 21307), False, 'import textwrap\n'), ((21385, 21427), 'textwrap.fill', 'textwrap.fill', (['"""Anti-Symmetric maxima"""', '(10)'], {}), "('Anti-Symmetric maxima', 10)\n", (21398, 21427), False, 'import textwrap\n'), ((21502, 21539), 'textwrap.fill', 'textwrap.fill', (['"""Symmetric maxima"""', '(10)'], {}), "('Symmetric maxima', 10)\n", (21515, 21539), False, 'import textwrap\n'), ((26015, 26024), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (26022, 26024), True, 'import matplotlib.pyplot as plt\n'), ((26056, 26065), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (26063, 26065), True, 'import matplotlib.pyplot as plt\n'), ((26125, 26134), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (26132, 26134), True, 'import matplotlib.pyplot as plt\n'), ((28058, 28098), 'textwrap.fill', 'textwrap.fill', (['"""Slope-based maximum"""', '(11)'], {}), "('Slope-based maximum', 11)\n", (28071, 28098), False, 'import textwrap\n'), ((28199, 28239), 'textwrap.fill', 'textwrap.fill', (['"""Slope-based minimum"""', '(11)'], {}), "('Slope-based minimum', 11)\n", (28212, 28239), False, 'import textwrap\n'), ((28360, 28409), 'textwrap.fill', 'textwrap.fill', (['"""Improved slope-based maximum"""', '(11)'], {}), "('Improved slope-based maximum', 11)\n", (28373, 28409), False, 'import textwrap\n'), ((28532, 28581), 'textwrap.fill', 'textwrap.fill', (['"""Improved slope-based minimum"""', '(11)'], {}), "('Improved slope-based minimum', 11)\n", (28545, 28581), False, 'import textwrap\n'), ((32975, 32984), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (32982, 32984), True, 'import matplotlib.pyplot as plt\n'), ((33188, 33222), 'textwrap.fill', 'textwrap.fill', (['"""Huang maximum"""', '(10)'], {}), "('Huang maximum', 10)\n", (33201, 33222), False, 'import textwrap\n'), ((33289, 33323), 'textwrap.fill', 'textwrap.fill', (['"""Huang minimum"""', '(10)'], {}), "('Huang minimum', 10)\n", (33302, 33323), False, 'import textwrap\n'), ((33414, 33451), 'textwrap.fill', 'textwrap.fill', (['"""Coughlin maximum"""', '(14)'], {}), "('Coughlin maximum', 14)\n", (33427, 33451), False, 'import textwrap\n'), ((33542, 33579), 'textwrap.fill', 'textwrap.fill', (['"""Coughlin minimum"""', '(14)'], {}), "('Coughlin minimum', 14)\n", (33555, 33579), False, 'import textwrap\n'), ((33667, 33703), 'textwrap.fill', 'textwrap.fill', (['"""Average maximum"""', '(14)'], {}), "('Average maximum', 14)\n", (33680, 33703), False, 'import textwrap\n'), ((33786, 33822), 'textwrap.fill', 'textwrap.fill', (['"""Average minimum"""', '(14)'], {}), "('Average minimum', 14)\n", (33799, 33822), False, 'import textwrap\n'), ((34015, 34061), 'textwrap.fill', 'textwrap.fill', (['"""Huang Characteristic Wave"""', '(14)'], {}), "('Huang Characteristic Wave', 14)\n", (34028, 34061), False, 'import textwrap\n'), ((34129, 34178), 'textwrap.fill', 'textwrap.fill', (['"""Coughlin Characteristic Wave"""', '(14)'], {}), "('Coughlin Characteristic Wave', 14)\n", (34142, 34178), False, 'import textwrap\n'), ((35679, 35705), 'numpy.cos', 'np.cos', (['(2 * np.pi * t / 50)'], {}), '(2 * np.pi * t / 50)\n', (35685, 35705), True, 'import numpy as np\n'), ((35749, 35776), 'numpy.sin', 'np.sin', (['(2 * np.pi * t / 200)'], {}), '(2 * np.pi * t / 200)\n', (35755, 35776), True, 'import numpy as np\n'), ((41520, 41529), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (41527, 41529), True, 'import matplotlib.pyplot as plt\n'), ((41731, 41771), 'textwrap.fill', 'textwrap.fill', (['"""Extrapolated signal"""', '(12)'], {}), "('Extrapolated signal', 12)\n", (41744, 41771), False, 'import textwrap\n'), ((42003, 42043), 'textwrap.fill', 'textwrap.fill', (['"""Extrapolated maxima"""', '(12)'], {}), "('Extrapolated maxima', 12)\n", (42016, 42043), False, 'import textwrap\n'), ((42140, 42180), 'textwrap.fill', 'textwrap.fill', (['"""Extrapolated minima"""', '(12)'], {}), "('Extrapolated minima', 12)\n", (42153, 42180), False, 'import textwrap\n'), ((42226, 42238), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (42233, 42238), True, 'import numpy as np\n'), ((42293, 42335), 'textwrap.fill', 'textwrap.fill', (['"""Neural network inputs"""', '(13)'], {}), "('Neural network inputs', 13)\n", (42306, 42335), False, 'import textwrap\n'), ((42453, 42465), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (42460, 42465), True, 'import numpy as np\n'), ((42589, 42601), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (42596, 42601), True, 'import numpy as np\n'), ((42778, 42790), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (42785, 42790), True, 'import numpy as np\n'), ((42966, 42978), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (42973, 42978), True, 'import numpy as np\n'), ((43050, 43062), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (43057, 43062), True, 'import numpy as np\n'), ((43146, 43158), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (43153, 43158), True, 'import numpy as np\n'), ((43236, 43279), 'textwrap.fill', 'textwrap.fill', (['"""Neural network targets"""', '(13)'], {}), "('Neural network targets', 13)\n", (43249, 43279), False, 'import textwrap\n'), ((43397, 43409), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (43404, 43409), True, 'import numpy as np\n'), ((43536, 43548), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (43543, 43548), True, 'import numpy as np\n'), ((43728, 43740), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (43735, 43740), True, 'import numpy as np\n'), ((43919, 43931), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (43926, 43931), True, 'import numpy as np\n'), ((44006, 44018), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (44013, 44018), True, 'import numpy as np\n'), ((44585, 44601), 'numpy.cos', 'np.cos', (['(2 * time)'], {}), '(2 * time)\n', (44591, 44601), True, 'import numpy as np\n'), ((44604, 44620), 'numpy.cos', 'np.cos', (['(4 * time)'], {}), '(4 * time)\n', (44610, 44620), True, 'import numpy as np\n'), ((45747, 45810), 'textwrap.fill', 'textwrap.fill', (['"""Sum of IMF 1, IMF 2, & IMF 3 with 51 knots"""', '(21)'], {}), "('Sum of IMF 1, IMF 2, & IMF 3 with 51 knots', 21)\n", (45760, 45810), False, 'import textwrap\n'), ((45997, 46020), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (46008, 46020), True, 'import numpy as np\n'), ((46067, 46079), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (46074, 46079), True, 'import numpy as np\n'), ((46334, 46346), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (46341, 46346), True, 'import numpy as np\n'), ((46420, 46432), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (46427, 46432), True, 'import numpy as np\n'), ((46468, 46480), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (46475, 46480), True, 'import numpy as np\n'), ((46545, 46557), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (46552, 46557), True, 'import numpy as np\n'), ((46909, 46966), 'textwrap.fill', 'textwrap.fill', (['"""Sum of IMF 1 and IMF 2 with 31 knots"""', '(19)'], {}), "('Sum of IMF 1 and IMF 2 with 31 knots', 19)\n", (46922, 46966), False, 'import textwrap\n'), ((47023, 47080), 'textwrap.fill', 'textwrap.fill', (['"""Sum of IMF 2 and IMF 3 with 51 knots"""', '(19)'], {}), "('Sum of IMF 2 and IMF 3 with 51 knots', 19)\n", (47036, 47080), False, 'import textwrap\n'), ((47251, 47274), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (47262, 47274), True, 'import numpy as np\n'), ((47321, 47333), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (47328, 47333), True, 'import numpy as np\n'), ((47771, 47783), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (47778, 47783), True, 'import numpy as np\n'), ((47857, 47869), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (47864, 47869), True, 'import numpy as np\n'), ((47905, 47917), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (47912, 47917), True, 'import numpy as np\n'), ((47982, 47994), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (47989, 47994), True, 'import numpy as np\n'), ((48445, 48468), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (48456, 48468), True, 'import numpy as np\n'), ((48515, 48527), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (48522, 48527), True, 'import numpy as np\n'), ((49002, 49014), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (49009, 49014), True, 'import numpy as np\n'), ((49088, 49100), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (49095, 49100), True, 'import numpy as np\n'), ((49136, 49148), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (49143, 49148), True, 'import numpy as np\n'), ((49213, 49225), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (49220, 49225), True, 'import numpy as np\n'), ((49655, 49718), 'textwrap.fill', 'textwrap.fill', (['"""Sum of IMF 1, IMF 2, & IMF 3 with 51 knots"""', '(21)'], {}), "('Sum of IMF 1, IMF 2, & IMF 3 with 51 knots', 21)\n", (49668, 49718), False, 'import textwrap\n'), ((49779, 49802), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (49790, 49802), True, 'import numpy as np\n'), ((49849, 49861), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (49856, 49861), True, 'import numpy as np\n'), ((50414, 50471), 'textwrap.fill', 'textwrap.fill', (['"""Sum of IMF 1 and IMF 2 with 31 knots"""', '(19)'], {}), "('Sum of IMF 1 and IMF 2 with 31 knots', 19)\n", (50427, 50471), False, 'import textwrap\n'), ((50528, 50585), 'textwrap.fill', 'textwrap.fill', (['"""Sum of IMF 2 and IMF 3 with 51 knots"""', '(19)'], {}), "('Sum of IMF 2 and IMF 3 with 51 knots', 19)\n", (50541, 50585), False, 'import textwrap\n'), ((50646, 50669), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (50657, 50669), True, 'import numpy as np\n'), ((50716, 50728), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (50723, 50728), True, 'import numpy as np\n'), ((51471, 51494), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (51482, 51494), True, 'import numpy as np\n'), ((51541, 51553), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (51548, 51553), True, 'import numpy as np\n'), ((52183, 52192), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (52190, 52192), True, 'import matplotlib.pyplot as plt\n'), ((52224, 52233), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (52231, 52233), True, 'import matplotlib.pyplot as plt\n'), ((52569, 52595), 'numpy.ones_like', 'np.ones_like', (['x_hs[(0), :]'], {}), '(x_hs[(0), :])\n', (52581, 52595), True, 'import numpy as np\n'), ((52661, 52687), 'numpy.ones_like', 'np.ones_like', (['x_hs[(0), :]'], {}), '(x_hs[(0), :])\n', (52673, 52687), True, 'import numpy as np\n'), ((52753, 52779), 'numpy.ones_like', 'np.ones_like', (['x_hs[(0), :]'], {}), '(x_hs[(0), :])\n', (52765, 52779), True, 'import numpy as np\n'), ((54079, 54088), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (54086, 54088), True, 'import matplotlib.pyplot as plt\n'), ((54523, 54570), 'textwrap.fill', 'textwrap.fill', (['"""Unsmoothed maxima envelope"""', '(10)'], {}), "('Unsmoothed maxima envelope', 10)\n", (54536, 54570), False, 'import textwrap\n'), ((54626, 54671), 'textwrap.fill', 'textwrap.fill', (['"""Smoothed maxima envelope"""', '(10)'], {}), "('Smoothed maxima envelope', 10)\n", (54639, 54671), False, 'import textwrap\n'), ((54722, 54769), 'textwrap.fill', 'textwrap.fill', (['"""Unsmoothed minima envelope"""', '(10)'], {}), "('Unsmoothed minima envelope', 10)\n", (54735, 54769), False, 'import textwrap\n'), ((54819, 54864), 'textwrap.fill', 'textwrap.fill', (['"""Smoothed minima envelope"""', '(10)'], {}), "('Smoothed minima envelope', 10)\n", (54832, 54864), False, 'import textwrap\n'), ((54934, 54962), 'numpy.linspace', 'np.linspace', (['(-3.0)', '(-2.0)', '(101)'], {}), '(-3.0, -2.0, 101)\n', (54945, 54962), True, 'import numpy as np\n'), ((55011, 55023), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (55018, 55023), True, 'import numpy as np\n'), ((60553, 60562), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (60560, 60562), True, 'import matplotlib.pyplot as plt\n'), ((60941, 60976), 'textwrap.fill', 'textwrap.fill', (['"""Optimal maxima"""', '(10)'], {}), "('Optimal maxima', 10)\n", (60954, 60976), False, 'import textwrap\n'), ((61083, 61118), 'textwrap.fill', 'textwrap.fill', (['"""Optimal minima"""', '(10)'], {}), "('Optimal minima', 10)\n", (61096, 61118), False, 'import textwrap\n'), ((61189, 61227), 'textwrap.fill', 'textwrap.fill', (['"""Inflection points"""', '(10)'], {}), "('Inflection points', 10)\n", (61202, 61227), False, 'import textwrap\n'), ((61281, 61314), 'textwrap.fill', 'textwrap.fill', (['"""EMD envelope"""', '(10)'], {}), "('EMD envelope', 10)\n", (61294, 61314), False, 'import textwrap\n'), ((61490, 61524), 'textwrap.fill', 'textwrap.fill', (['"""SEMD envelope"""', '(10)'], {}), "('SEMD envelope', 10)\n", (61503, 61524), False, 'import textwrap\n'), ((61719, 61753), 'textwrap.fill', 'textwrap.fill', (['"""EEMD envelope"""', '(10)'], {}), "('EEMD envelope', 10)\n", (61732, 61753), False, 'import textwrap\n'), ((61953, 61999), 'textwrap.fill', 'textwrap.fill', (['"""Inflection point envelope"""', '(10)'], {}), "('Inflection point envelope', 10)\n", (61966, 61999), False, 'import textwrap\n'), ((62062, 62108), 'textwrap.fill', 'textwrap.fill', (['"""Binomial average envelope"""', '(10)'], {}), "('Binomial average envelope', 10)\n", (62075, 62108), False, 'import textwrap\n'), ((64189, 64198), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (64196, 64198), True, 'import matplotlib.pyplot as plt\n'), ((64230, 64239), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (64237, 64239), True, 'import matplotlib.pyplot as plt\n'), ((64527, 64547), 'numpy.ones_like', 'np.ones_like', (['t[:-1]'], {}), '(t[:-1])\n', (64539, 64547), True, 'import numpy as np\n'), ((64561, 64617), 'textwrap.fill', 'textwrap.fill', (['"""Hamiltonian frequency approximation"""', '(15)'], {}), "('Hamiltonian frequency approximation', 15)\n", (64574, 64617), False, 'import textwrap\n'), ((64643, 64663), 'numpy.ones_like', 'np.ones_like', (['t[:-1]'], {}), '(t[:-1])\n', (64655, 64663), True, 'import numpy as np\n'), ((64678, 64725), 'textwrap.fill', 'textwrap.fill', (['"""Driving function frequency"""', '(15)'], {}), "('Driving function frequency', 15)\n", (64691, 64725), False, 'import textwrap\n'), ((65381, 65390), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (65388, 65390), True, 'import matplotlib.pyplot as plt\n'), ((65422, 65431), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (65429, 65431), True, 'import matplotlib.pyplot as plt\n'), ((65716, 65736), 'numpy.ones_like', 'np.ones_like', (['t[:-1]'], {}), '(t[:-1])\n', (65728, 65736), True, 'import numpy as np\n'), ((65750, 65806), 'textwrap.fill', 'textwrap.fill', (['"""Hamiltonian frequency approximation"""', '(15)'], {}), "('Hamiltonian frequency approximation', 15)\n", (65763, 65806), False, 'import textwrap\n'), ((65832, 65852), 'numpy.ones_like', 'np.ones_like', (['t[:-1]'], {}), '(t[:-1])\n', (65844, 65852), True, 'import numpy as np\n'), ((65867, 65914), 'textwrap.fill', 'textwrap.fill', (['"""Driving function frequency"""', '(15)'], {}), "('Driving function frequency', 15)\n", (65880, 65914), False, 'import textwrap\n'), ((66532, 66541), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (66539, 66541), True, 'import matplotlib.pyplot as plt\n'), ((66573, 66582), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (66580, 66582), True, 'import matplotlib.pyplot as plt\n'), ((67415, 67443), 'numpy.cos', 'np.cos', (['(0.04 * 2 * np.pi * t)'], {}), '(0.04 * 2 * np.pi * t)\n', (67421, 67443), True, 'import numpy as np\n'), ((68431, 68440), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (68438, 68440), True, 'import matplotlib.pyplot as plt\n'), ((68472, 68481), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (68479, 68481), True, 'import matplotlib.pyplot as plt\n'), ((68642, 68662), 'numpy.ones_like', 'np.ones_like', (['t[:-1]'], {}), '(t[:-1])\n', (68654, 68662), True, 'import numpy as np\n'), ((68676, 68732), 'textwrap.fill', 'textwrap.fill', (['"""Hamiltonian frequency approximation"""', '(15)'], {}), "('Hamiltonian frequency approximation', 15)\n", (68689, 68732), False, 'import textwrap\n'), ((68758, 68778), 'numpy.ones_like', 'np.ones_like', (['t[:-1]'], {}), '(t[:-1])\n', (68770, 68778), True, 'import numpy as np\n'), ((68793, 68840), 'textwrap.fill', 'textwrap.fill', (['"""Driving function frequency"""', '(15)'], {}), "('Driving function frequency', 15)\n", (68806, 68840), False, 'import textwrap\n'), ((70126, 70135), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (70133, 70135), True, 'import matplotlib.pyplot as plt\n'), ((70167, 70176), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (70174, 70176), True, 'import matplotlib.pyplot as plt\n'), ((70561, 70594), 'textwrap.fill', 'textwrap.fill', (['"""Annual cycle"""', '(10)'], {}), "('Annual cycle', 10)\n", (70574, 70594), False, 'import textwrap\n'), ((71234, 71243), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (71241, 71243), True, 'import matplotlib.pyplot as plt\n'), ((71275, 71284), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (71282, 71284), True, 'import matplotlib.pyplot as plt\n'), ((71666, 71699), 'textwrap.fill', 'textwrap.fill', (['"""Annual cycle"""', '(10)'], {}), "('Annual cycle', 10)\n", (71679, 71699), False, 'import textwrap\n'), ((72903, 72912), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (72910, 72912), True, 'import matplotlib.pyplot as plt\n'), ((73112, 73121), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (73119, 73121), True, 'import matplotlib.pyplot as plt\n'), ((73417, 73426), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (73424, 73426), True, 'import matplotlib.pyplot as plt\n'), ((73458, 73467), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (73465, 73467), True, 'import matplotlib.pyplot as plt\n'), ((73843, 73876), 'textwrap.fill', 'textwrap.fill', (['"""Annual cycle"""', '(10)'], {}), "('Annual cycle', 10)\n", (73856, 73876), False, 'import textwrap\n'), ((4934, 4957), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (4945, 4957), True, 'import numpy as np\n'), ((6818, 6841), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (6829, 6841), True, 'import numpy as np\n'), ((8694, 8717), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (8705, 8717), True, 'import numpy as np\n'), ((17828, 17884), 'numpy.linspace', 'np.linspace', (['((5 - 2.6 * a) * np.pi)', '((5 - a) * np.pi)', '(101)'], {}), '((5 - 2.6 * a) * np.pi, (5 - a) * np.pi, 101)\n', (17839, 17884), True, 'import numpy as np\n'), ((35714, 35740), 'numpy.cos', 'np.cos', (['(2 * np.pi * t / 25)'], {}), '(2 * np.pi * t / 25)\n', (35720, 35740), True, 'import numpy as np\n'), ((37195, 37220), 'numpy.abs', 'np.abs', (['average_gradients'], {}), '(average_gradients)\n', (37201, 37220), True, 'import numpy as np\n'), ((45983, 45995), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (45990, 45995), True, 'import numpy as np\n'), ((47237, 47249), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (47244, 47249), True, 'import numpy as np\n'), ((48431, 48443), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (48438, 48443), True, 'import numpy as np\n'), ((49765, 49777), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (49772, 49777), True, 'import numpy as np\n'), ((50632, 50644), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (50639, 50644), True, 'import numpy as np\n'), ((51457, 51469), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (51464, 51469), True, 'import numpy as np\n'), ((52450, 52459), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (52456, 52459), True, 'import numpy as np\n'), ((54920, 54932), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (54927, 54932), True, 'import numpy as np\n'), ((68401, 68410), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (68407, 68410), True, 'import numpy as np\n'), ((73362, 73371), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (73368, 73371), True, 'import numpy as np\n'), ((4920, 4932), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (4927, 4932), True, 'import numpy as np\n'), ((6804, 6816), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (6811, 6816), True, 'import numpy as np\n'), ((8680, 8692), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (8687, 8692), True, 'import numpy as np\n'), ((17948, 18004), 'numpy.linspace', 'np.linspace', (['((5 - 2.6 * a) * np.pi)', '((5 - a) * np.pi)', '(101)'], {}), '((5 - 2.6 * a) * np.pi, (5 - a) * np.pi, 101)\n', (17959, 18004), True, 'import numpy as np\n'), ((37228, 37253), 'numpy.abs', 'np.abs', (['average_gradients'], {}), '(average_gradients)\n', (37234, 37253), True, 'import numpy as np\n'), ((45861, 45936), 'numpy.var', 'np.var', (['(time_series - (imfs_51[(1), :] + imfs_51[(2), :] + imfs_51[(3), :]))'], {}), '(time_series - (imfs_51[(1), :] + imfs_51[(2), :] + imfs_51[(3), :]))\n', (45867, 45936), True, 'import numpy as np\n'), ((47131, 47188), 'numpy.var', 'np.var', (['(time_series - (imfs_31[(1), :] + imfs_31[(2), :]))'], {}), '(time_series - (imfs_31[(1), :] + imfs_31[(2), :]))\n', (47137, 47188), True, 'import numpy as np\n'), ((48343, 48380), 'numpy.var', 'np.var', (['(time_series - imfs_51[(3), :])'], {}), '(time_series - imfs_51[(3), :])\n', (48349, 48380), True, 'import numpy as np\n'), ((62888, 62906), 'numpy.cos', 'np.cos', (['(omega * ts)'], {}), '(omega * ts)\n', (62894, 62906), True, 'import numpy as np\n'), ((64487, 64498), 'numpy.abs', 'np.abs', (['hht'], {}), '(hht)\n', (64493, 64498), True, 'import numpy as np\n'), ((65676, 65687), 'numpy.abs', 'np.abs', (['hht'], {}), '(hht)\n', (65682, 65687), True, 'import numpy as np\n'), ((70498, 70509), 'numpy.abs', 'np.abs', (['hht'], {}), '(hht)\n', (70504, 70509), True, 'import numpy as np\n'), ((71603, 71614), 'numpy.abs', 'np.abs', (['hht'], {}), '(hht)\n', (71609, 71614), True, 'import numpy as np\n'), ((69898, 69922), 'numpy.ones_like', 'np.ones_like', (['IF[:, (0)]'], {}), '(IF[:, (0)])\n', (69910, 69922), True, 'import numpy as np\n'), ((72296, 72321), 'numpy.ones_like', 'np.ones_like', (['ifs[(1), :]'], {}), '(ifs[(1), :])\n', (72308, 72321), True, 'import numpy as np\n'), ((66996, 67024), 'numpy.cos', 'np.cos', (['(0.04 * 2 * np.pi * t)'], {}), '(0.04 * 2 * np.pi * t)\n', (67002, 67024), True, 'import numpy as np\n'), ((67170, 67198), 'numpy.cos', 'np.cos', (['(0.04 * 2 * np.pi * t)'], {}), '(0.04 * 2 * np.pi * t)\n', (67176, 67198), True, 'import numpy as np\n'), ((67339, 67367), 'numpy.cos', 'np.cos', (['(0.04 * 2 * np.pi * t)'], {}), '(0.04 * 2 * np.pi * t)\n', (67345, 67367), True, 'import numpy as np\n'), ((71009, 71025), 'numpy.ones_like', 'np.ones_like', (['IF'], {}), '(IF)\n', (71021, 71025), True, 'import numpy as np\n')] |
wenhuchen/LogicNLG | GPT-distributed.py | e986516e5b6d310219215510b3fe1603d03215cd | import argparse
import logging
import torch
import torch.nn.functional as F
import numpy as np
from torch import nn
from torch.autograd import Variable
from transformers import GPT2Config
from transformers import GPT2LMHeadModel, GPT2Tokenizer, BertTokenizer
from DataLoader import *
from Model import BERTGen
from utils import sample_sequence
import torch.optim as optim
import math
import sys
import pandas
import os
import numpy
import nltk
from torch.utils.tensorboard import SummaryWriter
import warnings
from tqdm import tqdm, trange
from torch.utils.data import RandomSampler, SequentialSampler
from torch.utils.data import DataLoader as DL
import torch
from torch.utils.data.distributed import DistributedSampler
warnings.filterwarnings("ignore", category=UserWarning)
device = torch.device('cuda')
def set_seed(args):
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if args.n_gpu > 0:
torch.cuda.manual_seed_all(args.seed)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--model", default='gpt2', type=str)
parser.add_argument("--top_k", type=int, default=0)
parser.add_argument("--top_p", type=float, default=0.9)
parser.add_argument('--seed', type=int, default=42, help="random seed for initialization")
parser.add_argument('--do_train', default=False, action="store_true", help="whether to train or test the model")
parser.add_argument('--do_rl', default=False, action="store_true", help="whether to train or test the model")
parser.add_argument('--do_val', default=False, action="store_true", help="whether to train or test the model")
parser.add_argument('--do_test', default=False, action="store_true", help="whether to compute the BLEU scores on test split")
parser.add_argument('--do_test_challenge', default=False, action="store_true", help="whether to compute the BLEU scores on challenge split")
parser.add_argument('--do_ppl', default=False, action="store_true", help="whether to compute perplexity of the model")
parser.add_argument('--do_verify', default=False, action="store_true", help="whether compute the adv-acc score on test split")
parser.add_argument('--do_verify_challenge', default=False, action="store_true", help="whether compute the adv-acc score on challenge split")
parser.add_argument('--epoch', default=10, type=int, help="whether to train or test the model")
parser.add_argument('--batch_size', default=6, type=int, help="whether to train or test the model")
parser.add_argument('--local_rank', default=-1, type=int, help="whether to train or test the model")
parser.add_argument('--learning_rate', default=2e-6, type=float, help="whether to train or test the model")
parser.add_argument('--dataset', default='table', type=str, help="whether to train or test the model")
parser.add_argument('--every', default=50, type=int, help="whether to train or test the model")
parser.add_argument('--load_from', default='', type=str, help="whether to train or test the model")
parser.add_argument('--id', default='models', type=str, help="specify the id of the experiment")
parser.add_argument('--max_len', default=800, type=int, help="whether to train or test the model")
parser.add_argument('--dim', default=768, type=int, help="whether to train or test the model")
parser.add_argument('--layers', default=3, type=int, help="whether to train or test the model")
parser.add_argument('--head', default=4, type=int, help="whether to train or test the model")
parser.add_argument("--modelpath", type=str, default="bert-base-uncased",
help="For distributed training: local_rank")
parser.add_argument('--gradient_accumulation_steps', type=int, default=5, help="accumulation steps for gradient")
parser.add_argument('--decode_first_K', type=int, default=10000, help="For debugging purpose")
args = parser.parse_args()
if args.local_rank == -1:
device = torch.device("cuda")
args.n_gpu = 1
else:
torch.cuda.set_device(args.local_rank)
device = torch.device('cuda', args.local_rank)
torch.distributed.init_process_group(backend='nccl')
args.n_gpu = 1
args.device = device
if args.local_rank not in [-1, 0]:
torch.distributed.barrier()
tokenizer = GPT2Tokenizer.from_pretrained(args.model)
model = GPT2LMHeadModel.from_pretrained(args.model)
#model = nn.DataParallel(model)
model.to(args.device)
if args.local_rank == 0:
torch.distributed.barrier()
criterion = nn.CrossEntropyLoss(reduction='none', ignore_index=-1)
if args.do_train:
if args.local_rank in [-1, 0]:
if not os.path.exists(args.id):
os.mkdir(args.id)
tb_writer = SummaryWriter(log_dir='tensorboard/GPT2-{}'.format(args.model))
dataset = GPTTableDataset2('data/train_lm_preprocessed.json', tokenizer, args.max_len)
if args.local_rank == -1:
sampler = RandomSampler(dataset)
else:
sampler = DistributedSampler(dataset)
train_dataloader = DL(dataset, sampler=sampler, batch_size=args.batch_size, num_workers=0)
model.train()
optimizer = optim.Adam(model.parameters(), args.learning_rate)
avg_loss = 0
global_step = 0
if args.local_rank != -1:
model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.local_rank],
output_device=args.local_rank, find_unused_parameters=True)
else:
model = torch.nn.DataParallel(model)
for epoch_idx in trange(0, args.epoch, desc='Epoch', disable=args.local_rank not in [-1, 0]):
#for idx in range(0, dataset.train_len()):
for idx, batch in enumerate(tqdm(train_dataloader, desc="Iteration", disable=args.local_rank not in [-1, 0])):
batch = tuple(Variable(t).to(device) for t in batch)
trg_inp, trg_out, mask, caption = batch
inputs = torch.cat([caption, trg_inp], 1)
model.zero_grad()
optimizer.zero_grad()
logits = model(inputs)[0]
logits = logits[:, -trg_out.shape[1]:, :].contiguous()
loss = criterion(logits.view(-1, logits.shape[-1]), trg_out.view(-1))
loss = loss * mask.view(-1)
loss = loss.sum() / mask.sum()
avg_loss += loss.item()
loss.backward()
optimizer.step()
global_step += 1
if args.local_rank in [-1, 0] and idx % args.every == 0 and idx > 0:
tb_writer.add_scalar("perplexity", math.exp(avg_loss / args.every), global_step)
fake_inputs = caption
gt_inputs = trg_out.cpu().data.numpy()
#samples = model.sample(fake_inputs, tabfeat, caption, highlight_idx, bert)
samples = sample_sequence(model, 30, fake_inputs, [])
samples = samples[:, caption.shape[1]:]
samples = samples.cpu().data.numpy()
for s, gt in zip(samples, gt_inputs):
text = tokenizer.decode(s, clean_up_tokenization_spaces=True)
text = text[: text.find(tokenizer.eos_token)]
print("PREDICTION |||||| ", text)
text = tokenizer.decode(gt, clean_up_tokenization_spaces=True)
text = text[: text.find(tokenizer.eos_token)]
print("GROUNDTRUH |||||| ",text)
break
avg_loss = 0
if args.local_rank in [-1, 0]:
if args.model == 'gpt2':
torch.save(model.state_dict(), '{}/GPT_ep{}.pt'.format(args.id, epoch_idx))
else:
torch.save(model.state_dict(), '{}/GPT_medium_ep{}.pt'.format(args.id, epoch_idx))
if args.local_rank in [-1, 0]:
tb_writer.close() | [((722, 777), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'UserWarning'}), "('ignore', category=UserWarning)\n", (745, 777), False, 'import warnings\n'), ((788, 808), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (800, 808), False, 'import torch\n'), ((834, 859), 'numpy.random.seed', 'np.random.seed', (['args.seed'], {}), '(args.seed)\n', (848, 859), True, 'import numpy as np\n'), ((864, 892), 'torch.manual_seed', 'torch.manual_seed', (['args.seed'], {}), '(args.seed)\n', (881, 892), False, 'import torch\n'), ((1004, 1029), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1027, 1029), False, 'import argparse\n'), ((4357, 4398), 'transformers.GPT2Tokenizer.from_pretrained', 'GPT2Tokenizer.from_pretrained', (['args.model'], {}), '(args.model)\n', (4386, 4398), False, 'from transformers import GPT2LMHeadModel, GPT2Tokenizer, BertTokenizer\n'), ((4411, 4454), 'transformers.GPT2LMHeadModel.from_pretrained', 'GPT2LMHeadModel.from_pretrained', (['args.model'], {}), '(args.model)\n', (4442, 4454), False, 'from transformers import GPT2LMHeadModel, GPT2Tokenizer, BertTokenizer\n'), ((4600, 4654), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {'reduction': '"""none"""', 'ignore_index': '(-1)'}), "(reduction='none', ignore_index=-1)\n", (4619, 4654), False, 'from torch import nn\n'), ((924, 961), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['args.seed'], {}), '(args.seed)\n', (950, 961), False, 'import torch\n'), ((3999, 4019), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (4011, 4019), False, 'import torch\n'), ((4061, 4099), 'torch.cuda.set_device', 'torch.cuda.set_device', (['args.local_rank'], {}), '(args.local_rank)\n', (4082, 4099), False, 'import torch\n'), ((4117, 4154), 'torch.device', 'torch.device', (['"""cuda"""', 'args.local_rank'], {}), "('cuda', args.local_rank)\n", (4129, 4154), False, 'import torch\n'), ((4163, 4215), 'torch.distributed.init_process_group', 'torch.distributed.init_process_group', ([], {'backend': '"""nccl"""'}), "(backend='nccl')\n", (4199, 4215), False, 'import torch\n'), ((4312, 4339), 'torch.distributed.barrier', 'torch.distributed.barrier', ([], {}), '()\n', (4337, 4339), False, 'import torch\n'), ((4555, 4582), 'torch.distributed.barrier', 'torch.distributed.barrier', ([], {}), '()\n', (4580, 4582), False, 'import torch\n'), ((5174, 5245), 'torch.utils.data.DataLoader', 'DL', (['dataset'], {'sampler': 'sampler', 'batch_size': 'args.batch_size', 'num_workers': '(0)'}), '(dataset, sampler=sampler, batch_size=args.batch_size, num_workers=0)\n', (5176, 5245), True, 'from torch.utils.data import DataLoader as DL\n'), ((5710, 5785), 'tqdm.trange', 'trange', (['(0)', 'args.epoch'], {'desc': '"""Epoch"""', 'disable': '(args.local_rank not in [-1, 0])'}), "(0, args.epoch, desc='Epoch', disable=args.local_rank not in [-1, 0])\n", (5716, 5785), False, 'from tqdm import tqdm, trange\n'), ((5051, 5073), 'torch.utils.data.RandomSampler', 'RandomSampler', (['dataset'], {}), '(dataset)\n', (5064, 5073), False, 'from torch.utils.data import RandomSampler, SequentialSampler\n'), ((5110, 5137), 'torch.utils.data.distributed.DistributedSampler', 'DistributedSampler', (['dataset'], {}), '(dataset)\n', (5128, 5137), False, 'from torch.utils.data.distributed import DistributedSampler\n'), ((5441, 5584), 'torch.nn.parallel.DistributedDataParallel', 'torch.nn.parallel.DistributedDataParallel', (['model'], {'device_ids': '[args.local_rank]', 'output_device': 'args.local_rank', 'find_unused_parameters': '(True)'}), '(model, device_ids=[args.\n local_rank], output_device=args.local_rank, find_unused_parameters=True)\n', (5482, 5584), False, 'import torch\n'), ((5655, 5683), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['model'], {}), '(model)\n', (5676, 5683), False, 'import torch\n'), ((4735, 4758), 'os.path.exists', 'os.path.exists', (['args.id'], {}), '(args.id)\n', (4749, 4758), False, 'import os\n'), ((4776, 4793), 'os.mkdir', 'os.mkdir', (['args.id'], {}), '(args.id)\n', (4784, 4793), False, 'import os\n'), ((5882, 5967), 'tqdm.tqdm', 'tqdm', (['train_dataloader'], {'desc': '"""Iteration"""', 'disable': '(args.local_rank not in [-1, 0])'}), "(train_dataloader, desc='Iteration', disable=args.local_rank not in [-1, 0]\n )\n", (5886, 5967), False, 'from tqdm import tqdm, trange\n'), ((6115, 6147), 'torch.cat', 'torch.cat', (['[caption, trg_inp]', '(1)'], {}), '([caption, trg_inp], 1)\n', (6124, 6147), False, 'import torch\n'), ((7070, 7113), 'utils.sample_sequence', 'sample_sequence', (['model', '(30)', 'fake_inputs', '[]'], {}), '(model, 30, fake_inputs, [])\n', (7085, 7113), False, 'from utils import sample_sequence\n'), ((6795, 6826), 'math.exp', 'math.exp', (['(avg_loss / args.every)'], {}), '(avg_loss / args.every)\n', (6803, 6826), False, 'import math\n'), ((5995, 6006), 'torch.autograd.Variable', 'Variable', (['t'], {}), '(t)\n', (6003, 6006), False, 'from torch.autograd import Variable\n')] |
niits/BentoML | bentoml/saved_bundle/loader.py | 3954f36762e10f5df15af7e0ae6dd71f5f214261 | # Copyright 2019 Atalaya Tech, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import os
import sys
import tarfile
import logging
import tempfile
import shutil
from functools import wraps
from contextlib import contextmanager
from urllib.parse import urlparse
from typing import TYPE_CHECKING
from pathlib import PureWindowsPath, PurePosixPath
from bentoml.utils.s3 import is_s3_url
from bentoml.utils.gcs import is_gcs_url
from bentoml.exceptions import BentoMLException
from bentoml.saved_bundle.config import SavedBundleConfig
from bentoml.saved_bundle.pip_pkg import ZIPIMPORT_DIR
if TYPE_CHECKING:
from bentoml.yatai.proto.repository_pb2 import BentoServiceMetadata
logger = logging.getLogger(__name__)
def _is_http_url(bundle_path) -> bool:
try:
return urlparse(bundle_path).scheme in ["http", "https"]
except ValueError:
return False
def _is_remote_path(bundle_path) -> bool:
return isinstance(bundle_path, str) and (
is_s3_url(bundle_path) or is_gcs_url(bundle_path) or _is_http_url(bundle_path)
)
@contextmanager
def _resolve_remote_bundle_path(bundle_path):
if is_s3_url(bundle_path):
import boto3
parsed_url = urlparse(bundle_path)
bucket_name = parsed_url.netloc
object_name = parsed_url.path.lstrip('/')
s3 = boto3.client('s3')
fileobj = io.BytesIO()
s3.download_fileobj(bucket_name, object_name, fileobj)
fileobj.seek(0, 0)
elif is_gcs_url(bundle_path):
try:
from google.cloud import storage
except ImportError:
raise BentoMLException(
'"google-cloud-storage" package is required. You can install it with '
'pip: "pip install google-cloud-storage"'
)
gcs = storage.Client()
fileobj = io.BytesIO()
gcs.download_blob_to_file(bundle_path, fileobj)
fileobj.seek(0, 0)
elif _is_http_url(bundle_path):
import requests
response = requests.get(bundle_path)
if response.status_code != 200:
raise BentoMLException(
f"Error retrieving BentoService bundle. "
f"{response.status_code}: {response.text}"
)
fileobj = io.BytesIO()
fileobj.write(response.content)
fileobj.seek(0, 0)
else:
raise BentoMLException(f"Saved bundle path: '{bundle_path}' is not supported")
with tarfile.open(mode="r:gz", fileobj=fileobj) as tar:
with tempfile.TemporaryDirectory() as tmpdir:
filename = tar.getmembers()[0].name
tar.extractall(path=tmpdir)
yield os.path.join(tmpdir, filename)
def resolve_remote_bundle(func):
"""Decorate a function to handle remote bundles."""
@wraps(func)
def wrapper(bundle_path, *args):
if _is_remote_path(bundle_path):
with _resolve_remote_bundle_path(bundle_path) as local_bundle_path:
return func(local_bundle_path, *args)
return func(bundle_path, *args)
return wrapper
@resolve_remote_bundle
def load_saved_bundle_config(bundle_path) -> "SavedBundleConfig":
try:
return SavedBundleConfig.load(os.path.join(bundle_path, "bentoml.yml"))
except FileNotFoundError:
raise BentoMLException(
"BentoML can't locate config file 'bentoml.yml'"
" in saved bundle in path: {}".format(bundle_path)
)
def load_bento_service_metadata(bundle_path: str) -> "BentoServiceMetadata":
return load_saved_bundle_config(bundle_path).get_bento_service_metadata_pb()
def _find_module_file(bundle_path, service_name, module_file):
# Simply join full path when module_file is just a file name,
# e.g. module_file=="iris_classifier.py"
module_file_path = os.path.join(bundle_path, service_name, module_file)
if not os.path.isfile(module_file_path):
# Try loading without service_name prefix, for loading from a installed PyPi
module_file_path = os.path.join(bundle_path, module_file)
# When module_file is located in sub directory
# e.g. module_file=="foo/bar/iris_classifier.py"
# This needs to handle the path differences between posix and windows platform:
if not os.path.isfile(module_file_path):
if sys.platform == "win32":
# Try load a saved bundle created from posix platform on windows
module_file_path = os.path.join(
bundle_path, service_name, str(PurePosixPath(module_file))
)
if not os.path.isfile(module_file_path):
module_file_path = os.path.join(
bundle_path, str(PurePosixPath(module_file))
)
else:
# Try load a saved bundle created from windows platform on posix
module_file_path = os.path.join(
bundle_path, service_name, PureWindowsPath(module_file).as_posix()
)
if not os.path.isfile(module_file_path):
module_file_path = os.path.join(
bundle_path, PureWindowsPath(module_file).as_posix()
)
if not os.path.isfile(module_file_path):
raise BentoMLException(
"Can not locate module_file {} in saved bundle {}".format(
module_file, bundle_path
)
)
return module_file_path
@resolve_remote_bundle
def load_bento_service_class(bundle_path):
"""
Load a BentoService class from saved bundle in given path
:param bundle_path: A path to Bento files generated from BentoService#save,
#save_to_dir, or the path to pip installed BentoService directory
:return: BentoService class
"""
config = load_saved_bundle_config(bundle_path)
metadata = config["metadata"]
# Find and load target module containing BentoService class from given path
module_file_path = _find_module_file(
bundle_path, metadata["service_name"], metadata["module_file"]
)
# Prepend bundle_path to sys.path for loading extra python dependencies
sys.path.insert(0, bundle_path)
sys.path.insert(0, os.path.join(bundle_path, metadata["service_name"]))
# Include zipimport modules
zipimport_dir = os.path.join(bundle_path, metadata["service_name"], ZIPIMPORT_DIR)
if os.path.exists(zipimport_dir):
for p in os.listdir(zipimport_dir):
logger.debug('adding %s to sys.path', p)
sys.path.insert(0, os.path.join(zipimport_dir, p))
module_name = metadata["module_name"]
if module_name in sys.modules:
logger.warning(
"Module `%s` already loaded, using existing imported module.", module_name
)
module = sys.modules[module_name]
elif sys.version_info >= (3, 5):
import importlib.util
spec = importlib.util.spec_from_file_location(module_name, module_file_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
elif sys.version_info >= (3, 3):
from importlib.machinery import SourceFileLoader
# pylint:disable=deprecated-method
module = SourceFileLoader(module_name, module_file_path).load_module(
module_name
)
# pylint:enable=deprecated-method
else:
raise BentoMLException("BentoML requires Python 3.4 and above")
# Remove bundle_path from sys.path to avoid import naming conflicts
sys.path.remove(bundle_path)
model_service_class = module.__getattribute__(metadata["service_name"])
# Set _bento_service_bundle_path, where BentoService will load its artifacts
model_service_class._bento_service_bundle_path = bundle_path
# Set cls._version, service instance can access it via svc.version
model_service_class._bento_service_bundle_version = metadata["service_version"]
if (
model_service_class._env
and model_service_class._env._requirements_txt_file is not None
):
# Load `requirement.txt` from bundle directory instead of the user-provided
# file path, which may only available during the bundle save process
model_service_class._env._requirements_txt_file = os.path.join(
bundle_path, "requirements.txt"
)
return model_service_class
@resolve_remote_bundle
def safe_retrieve(bundle_path: str, target_dir: str):
"""Safely retrieve bento service to local path
Args:
bundle_path (:obj:`str`):
The path that contains saved BentoService bundle, supporting
both local file path and s3 path
target_dir (:obj:`str`):
Where the service contents should end up.
Returns:
:obj:`str`: location of safe local path
"""
shutil.copytree(bundle_path, target_dir)
@resolve_remote_bundle
def load_from_dir(bundle_path):
"""Load bento service from local file path or s3 path
Args:
bundle_path (str): The path that contains saved BentoService bundle,
supporting both local file path and s3 path
Returns:
bentoml.service.BentoService: a loaded BentoService instance
"""
svc_cls = load_bento_service_class(bundle_path)
return svc_cls()
@resolve_remote_bundle
def load_bento_service_api(bundle_path, api_name=None):
bento_service = load_from_dir(bundle_path)
return bento_service.get_inference_api(api_name)
| [((1194, 1221), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1211, 1221), False, 'import logging\n'), ((1635, 1657), 'bentoml.utils.s3.is_s3_url', 'is_s3_url', (['bundle_path'], {}), '(bundle_path)\n', (1644, 1657), False, 'from bentoml.utils.s3 import is_s3_url\n'), ((3286, 3297), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (3291, 3297), False, 'from functools import wraps\n'), ((4306, 4358), 'os.path.join', 'os.path.join', (['bundle_path', 'service_name', 'module_file'], {}), '(bundle_path, service_name, module_file)\n', (4318, 4358), False, 'import os\n'), ((6589, 6620), 'sys.path.insert', 'sys.path.insert', (['(0)', 'bundle_path'], {}), '(0, bundle_path)\n', (6604, 6620), False, 'import sys\n'), ((6749, 6815), 'os.path.join', 'os.path.join', (['bundle_path', "metadata['service_name']", 'ZIPIMPORT_DIR'], {}), "(bundle_path, metadata['service_name'], ZIPIMPORT_DIR)\n", (6761, 6815), False, 'import os\n'), ((6823, 6852), 'os.path.exists', 'os.path.exists', (['zipimport_dir'], {}), '(zipimport_dir)\n', (6837, 6852), False, 'import os\n'), ((7954, 7982), 'sys.path.remove', 'sys.path.remove', (['bundle_path'], {}), '(bundle_path)\n', (7969, 7982), False, 'import sys\n'), ((9256, 9296), 'shutil.copytree', 'shutil.copytree', (['bundle_path', 'target_dir'], {}), '(bundle_path, target_dir)\n', (9271, 9296), False, 'import shutil\n'), ((1702, 1723), 'urllib.parse.urlparse', 'urlparse', (['bundle_path'], {}), '(bundle_path)\n', (1710, 1723), False, 'from urllib.parse import urlparse\n'), ((1828, 1846), 'boto3.client', 'boto3.client', (['"""s3"""'], {}), "('s3')\n", (1840, 1846), False, 'import boto3\n'), ((1865, 1877), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (1875, 1877), False, 'import io\n'), ((1977, 2000), 'bentoml.utils.gcs.is_gcs_url', 'is_gcs_url', (['bundle_path'], {}), '(bundle_path)\n', (1987, 2000), False, 'from bentoml.utils.gcs import is_gcs_url\n'), ((2947, 2989), 'tarfile.open', 'tarfile.open', ([], {'mode': '"""r:gz"""', 'fileobj': 'fileobj'}), "(mode='r:gz', fileobj=fileobj)\n", (2959, 2989), False, 'import tarfile\n'), ((4370, 4402), 'os.path.isfile', 'os.path.isfile', (['module_file_path'], {}), '(module_file_path)\n', (4384, 4402), False, 'import os\n'), ((4516, 4554), 'os.path.join', 'os.path.join', (['bundle_path', 'module_file'], {}), '(bundle_path, module_file)\n', (4528, 4554), False, 'import os\n'), ((4755, 4787), 'os.path.isfile', 'os.path.isfile', (['module_file_path'], {}), '(module_file_path)\n', (4769, 4787), False, 'import os\n'), ((5659, 5691), 'os.path.isfile', 'os.path.isfile', (['module_file_path'], {}), '(module_file_path)\n', (5673, 5691), False, 'import os\n'), ((6644, 6695), 'os.path.join', 'os.path.join', (['bundle_path', "metadata['service_name']"], {}), "(bundle_path, metadata['service_name'])\n", (6656, 6695), False, 'import os\n'), ((6871, 6896), 'os.listdir', 'os.listdir', (['zipimport_dir'], {}), '(zipimport_dir)\n', (6881, 6896), False, 'import os\n'), ((8702, 8747), 'os.path.join', 'os.path.join', (['bundle_path', '"""requirements.txt"""'], {}), "(bundle_path, 'requirements.txt')\n", (8714, 8747), False, 'import os\n'), ((1479, 1501), 'bentoml.utils.s3.is_s3_url', 'is_s3_url', (['bundle_path'], {}), '(bundle_path)\n', (1488, 1501), False, 'from bentoml.utils.s3 import is_s3_url\n'), ((1505, 1528), 'bentoml.utils.gcs.is_gcs_url', 'is_gcs_url', (['bundle_path'], {}), '(bundle_path)\n', (1515, 1528), False, 'from bentoml.utils.gcs import is_gcs_url\n'), ((2298, 2314), 'google.cloud.storage.Client', 'storage.Client', ([], {}), '()\n', (2312, 2314), False, 'from google.cloud import storage\n'), ((2333, 2345), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (2343, 2345), False, 'import io\n'), ((3011, 3040), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (3038, 3040), False, 'import tempfile\n'), ((3709, 3749), 'os.path.join', 'os.path.join', (['bundle_path', '"""bentoml.yml"""'], {}), "(bundle_path, 'bentoml.yml')\n", (3721, 3749), False, 'import os\n'), ((1287, 1308), 'urllib.parse.urlparse', 'urlparse', (['bundle_path'], {}), '(bundle_path)\n', (1295, 1308), False, 'from urllib.parse import urlparse\n'), ((2509, 2534), 'requests.get', 'requests.get', (['bundle_path'], {}), '(bundle_path)\n', (2521, 2534), False, 'import requests\n'), ((2760, 2772), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (2770, 2772), False, 'import io\n'), ((2864, 2936), 'bentoml.exceptions.BentoMLException', 'BentoMLException', (['f"""Saved bundle path: \'{bundle_path}\' is not supported"""'], {}), '(f"Saved bundle path: \'{bundle_path}\' is not supported")\n', (2880, 2936), False, 'from bentoml.exceptions import BentoMLException\n'), ((3158, 3188), 'os.path.join', 'os.path.join', (['tmpdir', 'filename'], {}), '(tmpdir, filename)\n', (3170, 3188), False, 'import os\n'), ((5055, 5087), 'os.path.isfile', 'os.path.isfile', (['module_file_path'], {}), '(module_file_path)\n', (5069, 5087), False, 'import os\n'), ((5473, 5505), 'os.path.isfile', 'os.path.isfile', (['module_file_path'], {}), '(module_file_path)\n', (5487, 5505), False, 'import os\n'), ((6982, 7012), 'os.path.join', 'os.path.join', (['zipimport_dir', 'p'], {}), '(zipimport_dir, p)\n', (6994, 7012), False, 'import os\n'), ((7819, 7876), 'bentoml.exceptions.BentoMLException', 'BentoMLException', (['"""BentoML requires Python 3.4 and above"""'], {}), "('BentoML requires Python 3.4 and above')\n", (7835, 7876), False, 'from bentoml.exceptions import BentoMLException\n'), ((2106, 2243), 'bentoml.exceptions.BentoMLException', 'BentoMLException', (['""""google-cloud-storage" package is required. You can install it with pip: "pip install google-cloud-storage\\""""'], {}), '(\n \'"google-cloud-storage" package is required. You can install it with pip: "pip install google-cloud-storage"\'\n )\n', (2122, 2243), False, 'from bentoml.exceptions import BentoMLException\n'), ((2593, 2701), 'bentoml.exceptions.BentoMLException', 'BentoMLException', (['f"""Error retrieving BentoService bundle. {response.status_code}: {response.text}"""'], {}), "(\n f'Error retrieving BentoService bundle. {response.status_code}: {response.text}'\n )\n", (2609, 2701), False, 'from bentoml.exceptions import BentoMLException\n'), ((4994, 5020), 'pathlib.PurePosixPath', 'PurePosixPath', (['module_file'], {}), '(module_file)\n', (5007, 5020), False, 'from pathlib import PureWindowsPath, PurePosixPath\n'), ((5175, 5201), 'pathlib.PurePosixPath', 'PurePosixPath', (['module_file'], {}), '(module_file)\n', (5188, 5201), False, 'from pathlib import PureWindowsPath, PurePosixPath\n'), ((5400, 5428), 'pathlib.PureWindowsPath', 'PureWindowsPath', (['module_file'], {}), '(module_file)\n', (5415, 5428), False, 'from pathlib import PureWindowsPath, PurePosixPath\n'), ((7658, 7705), 'importlib.machinery.SourceFileLoader', 'SourceFileLoader', (['module_name', 'module_file_path'], {}), '(module_name, module_file_path)\n', (7674, 7705), False, 'from importlib.machinery import SourceFileLoader\n'), ((5589, 5617), 'pathlib.PureWindowsPath', 'PureWindowsPath', (['module_file'], {}), '(module_file)\n', (5604, 5617), False, 'from pathlib import PureWindowsPath, PurePosixPath\n')] |
cds-snc/notifier-api | migrations/versions/0158_remove_rate_limit_default.py | 90b385ec49efbaee7e607516fc7d9f08991af813 | """
Revision ID: 0158_remove_rate_limit_default
Revises: 0157_add_rate_limit_to_service
Create Date: 2018-01-09 14:33:08.313893
"""
import sqlalchemy as sa
from alembic import op
revision = "0158_remove_rate_limit_default"
down_revision = "0157_add_rate_limit_to_service"
def upgrade():
op.execute("ALTER TABLE services ALTER rate_limit DROP DEFAULT")
op.execute("ALTER TABLE services_history ALTER rate_limit DROP DEFAULT")
def downgrade():
op.execute("ALTER TABLE services ALTER rate_limit SET DEFAULT '3000'")
op.execute("ALTER TABLE services_history ALTER rate_limit SET DEFAULT '3000'")
| [((296, 360), 'alembic.op.execute', 'op.execute', (['"""ALTER TABLE services ALTER rate_limit DROP DEFAULT"""'], {}), "('ALTER TABLE services ALTER rate_limit DROP DEFAULT')\n", (306, 360), False, 'from alembic import op\n'), ((365, 437), 'alembic.op.execute', 'op.execute', (['"""ALTER TABLE services_history ALTER rate_limit DROP DEFAULT"""'], {}), "('ALTER TABLE services_history ALTER rate_limit DROP DEFAULT')\n", (375, 437), False, 'from alembic import op\n'), ((461, 531), 'alembic.op.execute', 'op.execute', (['"""ALTER TABLE services ALTER rate_limit SET DEFAULT \'3000\'"""'], {}), '("ALTER TABLE services ALTER rate_limit SET DEFAULT \'3000\'")\n', (471, 531), False, 'from alembic import op\n'), ((536, 614), 'alembic.op.execute', 'op.execute', (['"""ALTER TABLE services_history ALTER rate_limit SET DEFAULT \'3000\'"""'], {}), '("ALTER TABLE services_history ALTER rate_limit SET DEFAULT \'3000\'")\n', (546, 614), False, 'from alembic import op\n')] |
younghk/younghk.netlify.com | gen-post.py | 605ab089252127c0b768d31afb027e8896ae33b4 | import os
import errno
from datetime import datetime
print("Generating A New Post\n")
post_name = input('Input Post Name: ')
date_time = datetime.now()
date_time_dir = date_time.strftime("%Y-%m-%d")
date_time_post = date_time.strftime("%Y-%m-%d %H:%M:%S")
p_name = post_name.replace(" ","-")
p_name = p_name.replace("[","")
p_name = p_name.replace("]","")
p_name = p_name.lower()
f_name = date_time_dir+"---"+p_name
dir = "./src/pages/articles/"+f_name+"/"
f_dir = dir+f_name+".md"
try:
if not(os.path.isdir(dir)):
os.makedirs(os.path.join(dir))
except OSError as e:
if e.errno != errno.EEXIST:
print("Failed to create directory!!!!!")
raise
print("Generating post : ",f_dir)
with open(f_dir, 'w') as f:
f.write('---')
f.write('\n')
f.write('draft: true')
f.write('\n')
f.write('title: \"'+post_name+'\"')
f.write('\n')
f.write('date: \"'+date_time_post+'\"')
f.write('\n')
f.write('layout: post')
f.write('\n')
f.write('path: \"/posts/'+p_name+'/\"')
f.write('\n')
f.write('category: \"\"')
f.write('\n')
f.write('tags: ')
f.write('\n')
f.write('description: ""')
f.write('\n')
f.write('---')
f.write('\n')
print("Done :)") | [((140, 154), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (152, 154), False, 'from datetime import datetime\n'), ((504, 522), 'os.path.isdir', 'os.path.isdir', (['dir'], {}), '(dir)\n', (517, 522), False, 'import os\n'), ((545, 562), 'os.path.join', 'os.path.join', (['dir'], {}), '(dir)\n', (557, 562), False, 'import os\n')] |
Lyle-zhang/RMG-Py | rmgpy/reactionTest.py | 273eb51fa3c175562056c85d7d61814d5fa2986d | #!/usr/bin/env python
# encoding: utf-8 -*-
"""
This module contains unit tests of the rmgpy.reaction module.
"""
import numpy
import unittest
from external.wip import work_in_progress
from rmgpy.species import Species, TransitionState
from rmgpy.reaction import Reaction
from rmgpy.statmech.translation import Translation, IdealGasTranslation
from rmgpy.statmech.rotation import Rotation, LinearRotor, NonlinearRotor, KRotor, SphericalTopRotor
from rmgpy.statmech.vibration import Vibration, HarmonicOscillator
from rmgpy.statmech.torsion import Torsion, HinderedRotor
from rmgpy.statmech.conformer import Conformer
from rmgpy.kinetics import Arrhenius
from rmgpy.thermo import Wilhoit
import rmgpy.constants as constants
################################################################################
class PseudoSpecies:
"""
Can be used in place of a :class:`rmg.species.Species` for isomorphism checks.
PseudoSpecies('a') is isomorphic with PseudoSpecies('A')
but nothing else.
"""
def __init__(self, label):
self.label = label
def __repr__(self):
return "PseudoSpecies('{0}')".format(self.label)
def __str__(self):
return self.label
def isIsomorphic(self, other):
return self.label.lower() == other.label.lower()
class TestReactionIsomorphism(unittest.TestCase):
"""
Contains unit tests of the isomorphism testing of the Reaction class.
"""
def makeReaction(self,reaction_string):
""""
Make a Reaction (containing PseudoSpecies) of from a string like 'Ab=CD'
"""
reactants, products = reaction_string.split('=')
reactants = [PseudoSpecies(i) for i in reactants]
products = [PseudoSpecies(i) for i in products]
return Reaction(reactants=reactants, products=products)
def test1to1(self):
r1 = self.makeReaction('A=B')
self.assertTrue(r1.isIsomorphic(self.makeReaction('a=B')))
self.assertTrue(r1.isIsomorphic(self.makeReaction('b=A')))
self.assertFalse(r1.isIsomorphic(self.makeReaction('B=a'),eitherDirection=False))
self.assertFalse(r1.isIsomorphic(self.makeReaction('A=C')))
self.assertFalse(r1.isIsomorphic(self.makeReaction('A=BB')))
def test1to2(self):
r1 = self.makeReaction('A=BC')
self.assertTrue(r1.isIsomorphic(self.makeReaction('a=Bc')))
self.assertTrue(r1.isIsomorphic(self.makeReaction('cb=a')))
self.assertTrue(r1.isIsomorphic(self.makeReaction('a=cb'),eitherDirection=False))
self.assertFalse(r1.isIsomorphic(self.makeReaction('bc=a'),eitherDirection=False))
self.assertFalse(r1.isIsomorphic(self.makeReaction('a=c')))
self.assertFalse(r1.isIsomorphic(self.makeReaction('ab=c')))
def test2to2(self):
r1 = self.makeReaction('AB=CD')
self.assertTrue(r1.isIsomorphic(self.makeReaction('ab=cd')))
self.assertTrue(r1.isIsomorphic(self.makeReaction('ab=dc'),eitherDirection=False))
self.assertTrue(r1.isIsomorphic(self.makeReaction('dc=ba')))
self.assertFalse(r1.isIsomorphic(self.makeReaction('cd=ab'),eitherDirection=False))
self.assertFalse(r1.isIsomorphic(self.makeReaction('ab=ab')))
self.assertFalse(r1.isIsomorphic(self.makeReaction('ab=cde')))
def test2to3(self):
r1 = self.makeReaction('AB=CDE')
self.assertTrue(r1.isIsomorphic(self.makeReaction('ab=cde')))
self.assertTrue(r1.isIsomorphic(self.makeReaction('ba=edc'),eitherDirection=False))
self.assertTrue(r1.isIsomorphic(self.makeReaction('dec=ba')))
self.assertFalse(r1.isIsomorphic(self.makeReaction('cde=ab'),eitherDirection=False))
self.assertFalse(r1.isIsomorphic(self.makeReaction('ab=abc')))
self.assertFalse(r1.isIsomorphic(self.makeReaction('abe=cde')))
class TestReaction(unittest.TestCase):
"""
Contains unit tests of the Reaction class.
"""
def setUp(self):
"""
A method that is called prior to each unit test in this class.
"""
ethylene = Species(
label = 'C2H4',
conformer = Conformer(
E0 = (44.7127, 'kJ/mol'),
modes = [
IdealGasTranslation(
mass = (28.0313, 'amu'),
),
NonlinearRotor(
inertia = (
[3.41526, 16.6498, 20.065],
'amu*angstrom^2',
),
symmetry = 4,
),
HarmonicOscillator(
frequencies = (
[828.397, 970.652, 977.223, 1052.93, 1233.55, 1367.56, 1465.09, 1672.25, 3098.46, 3111.7, 3165.79, 3193.54],
'cm^-1',
),
),
],
spinMultiplicity = 1,
opticalIsomers = 1,
),
)
hydrogen = Species(
label = 'H',
conformer = Conformer(
E0 = (211.794, 'kJ/mol'),
modes = [
IdealGasTranslation(
mass = (1.00783, 'amu'),
),
],
spinMultiplicity = 2,
opticalIsomers = 1,
),
)
ethyl = Species(
label = 'C2H5',
conformer = Conformer(
E0 = (111.603, 'kJ/mol'),
modes = [
IdealGasTranslation(
mass = (29.0391, 'amu'),
),
NonlinearRotor(
inertia = (
[4.8709, 22.2353, 23.9925],
'amu*angstrom^2',
),
symmetry = 1,
),
HarmonicOscillator(
frequencies = (
[482.224, 791.876, 974.355, 1051.48, 1183.21, 1361.36, 1448.65, 1455.07, 1465.48, 2688.22, 2954.51, 3033.39, 3101.54, 3204.73],
'cm^-1',
),
),
HinderedRotor(
inertia = (1.11481, 'amu*angstrom^2'),
symmetry = 6,
barrier = (0.244029, 'kJ/mol'),
semiclassical = None,
),
],
spinMultiplicity = 2,
opticalIsomers = 1,
),
)
TS = TransitionState(
label = 'TS',
conformer = Conformer(
E0 = (266.694, 'kJ/mol'),
modes = [
IdealGasTranslation(
mass = (29.0391, 'amu'),
),
NonlinearRotor(
inertia = (
[6.78512, 22.1437, 22.2114],
'amu*angstrom^2',
),
symmetry = 1,
),
HarmonicOscillator(
frequencies = (
[412.75, 415.206, 821.495, 924.44, 982.714, 1024.16, 1224.21, 1326.36, 1455.06, 1600.35, 3101.46, 3110.55, 3175.34, 3201.88],
'cm^-1',
),
),
],
spinMultiplicity = 2,
opticalIsomers = 1,
),
frequency = (-750.232, 'cm^-1'),
)
self.reaction = Reaction(
reactants = [hydrogen, ethylene],
products = [ethyl],
kinetics = Arrhenius(
A = (501366000.0, 'cm^3/(mol*s)'),
n = 1.637,
Ea = (4.32508, 'kJ/mol'),
T0 = (1, 'K'),
Tmin = (300, 'K'),
Tmax = (2500, 'K'),
),
transitionState = TS,
)
# CC(=O)O[O]
acetylperoxy = Species(
label='acetylperoxy',
thermo=Wilhoit(Cp0=(4.0*constants.R,"J/(mol*K)"), CpInf=(21.0*constants.R,"J/(mol*K)"), a0=-3.95, a1=9.26, a2=-15.6, a3=8.55, B=(500.0,"K"), H0=(-6.151e+04,"J/mol"), S0=(-790.2,"J/(mol*K)")),
)
# C[C]=O
acetyl = Species(
label='acetyl',
thermo=Wilhoit(Cp0=(4.0*constants.R,"J/(mol*K)"), CpInf=(15.5*constants.R,"J/(mol*K)"), a0=0.2541, a1=-0.4712, a2=-4.434, a3=2.25, B=(500.0,"K"), H0=(-1.439e+05,"J/mol"), S0=(-524.6,"J/(mol*K)")),
)
# [O][O]
oxygen = Species(
label='oxygen',
thermo=Wilhoit(Cp0=(3.5*constants.R,"J/(mol*K)"), CpInf=(4.5*constants.R,"J/(mol*K)"), a0=-0.9324, a1=26.18, a2=-70.47, a3=44.12, B=(500.0,"K"), H0=(1.453e+04,"J/mol"), S0=(-12.19,"J/(mol*K)")),
)
self.reaction2 = Reaction(
reactants=[acetyl, oxygen],
products=[acetylperoxy],
kinetics = Arrhenius(
A = (2.65e12, 'cm^3/(mol*s)'),
n = 0.0,
Ea = (0.0, 'kJ/mol'),
T0 = (1, 'K'),
Tmin = (300, 'K'),
Tmax = (2000, 'K'),
),
)
def testIsIsomerization(self):
"""
Test the Reaction.isIsomerization() method.
"""
isomerization = Reaction(reactants=[Species()], products=[Species()])
association = Reaction(reactants=[Species(),Species()], products=[Species()])
dissociation = Reaction(reactants=[Species()], products=[Species(),Species()])
bimolecular = Reaction(reactants=[Species(),Species()], products=[Species(),Species()])
self.assertTrue(isomerization.isIsomerization())
self.assertFalse(association.isIsomerization())
self.assertFalse(dissociation.isIsomerization())
self.assertFalse(bimolecular.isIsomerization())
def testIsAssociation(self):
"""
Test the Reaction.isAssociation() method.
"""
isomerization = Reaction(reactants=[Species()], products=[Species()])
association = Reaction(reactants=[Species(),Species()], products=[Species()])
dissociation = Reaction(reactants=[Species()], products=[Species(),Species()])
bimolecular = Reaction(reactants=[Species(),Species()], products=[Species(),Species()])
self.assertFalse(isomerization.isAssociation())
self.assertTrue(association.isAssociation())
self.assertFalse(dissociation.isAssociation())
self.assertFalse(bimolecular.isAssociation())
def testIsDissociation(self):
"""
Test the Reaction.isDissociation() method.
"""
isomerization = Reaction(reactants=[Species()], products=[Species()])
association = Reaction(reactants=[Species(),Species()], products=[Species()])
dissociation = Reaction(reactants=[Species()], products=[Species(),Species()])
bimolecular = Reaction(reactants=[Species(),Species()], products=[Species(),Species()])
self.assertFalse(isomerization.isDissociation())
self.assertFalse(association.isDissociation())
self.assertTrue(dissociation.isDissociation())
self.assertFalse(bimolecular.isDissociation())
def testHasTemplate(self):
"""
Test the Reaction.hasTemplate() method.
"""
reactants = self.reaction.reactants[:]
products = self.reaction.products[:]
self.assertTrue(self.reaction.hasTemplate(reactants, products))
self.assertTrue(self.reaction.hasTemplate(products, reactants))
self.assertFalse(self.reaction2.hasTemplate(reactants, products))
self.assertFalse(self.reaction2.hasTemplate(products, reactants))
reactants.reverse()
products.reverse()
self.assertTrue(self.reaction.hasTemplate(reactants, products))
self.assertTrue(self.reaction.hasTemplate(products, reactants))
self.assertFalse(self.reaction2.hasTemplate(reactants, products))
self.assertFalse(self.reaction2.hasTemplate(products, reactants))
reactants = self.reaction2.reactants[:]
products = self.reaction2.products[:]
self.assertFalse(self.reaction.hasTemplate(reactants, products))
self.assertFalse(self.reaction.hasTemplate(products, reactants))
self.assertTrue(self.reaction2.hasTemplate(reactants, products))
self.assertTrue(self.reaction2.hasTemplate(products, reactants))
reactants.reverse()
products.reverse()
self.assertFalse(self.reaction.hasTemplate(reactants, products))
self.assertFalse(self.reaction.hasTemplate(products, reactants))
self.assertTrue(self.reaction2.hasTemplate(reactants, products))
self.assertTrue(self.reaction2.hasTemplate(products, reactants))
def testEnthalpyOfReaction(self):
"""
Test the Reaction.getEnthalpyOfReaction() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
Hlist0 = [float(v) for v in ['-146007', '-145886', '-144195', '-141973', '-139633', '-137341', '-135155', '-133093', '-131150', '-129316']]
Hlist = self.reaction2.getEnthalpiesOfReaction(Tlist)
for i in range(len(Tlist)):
self.assertAlmostEqual(Hlist[i] / 1000., Hlist0[i] / 1000., 2)
def testEntropyOfReaction(self):
"""
Test the Reaction.getEntropyOfReaction() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
Slist0 = [float(v) for v in ['-156.793', '-156.872', '-153.504', '-150.317', '-147.707', '-145.616', '-143.93', '-142.552', '-141.407', '-140.441']]
Slist = self.reaction2.getEntropiesOfReaction(Tlist)
for i in range(len(Tlist)):
self.assertAlmostEqual(Slist[i], Slist0[i], 2)
def testFreeEnergyOfReaction(self):
"""
Test the Reaction.getFreeEnergyOfReaction() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
Glist0 = [float(v) for v in ['-114648', '-83137.2', '-52092.4', '-21719.3', '8073.53', '37398.1', '66346.8', '94990.6', '123383', '151565']]
Glist = self.reaction2.getFreeEnergiesOfReaction(Tlist)
for i in range(len(Tlist)):
self.assertAlmostEqual(Glist[i] / 1000., Glist0[i] / 1000., 2)
def testEquilibriumConstantKa(self):
"""
Test the Reaction.getEquilibriumConstant() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
Kalist0 = [float(v) for v in ['8.75951e+29', '7.1843e+10', '34272.7', '26.1877', '0.378696', '0.0235579', '0.00334673', '0.000792389', '0.000262777', '0.000110053']]
Kalist = self.reaction2.getEquilibriumConstants(Tlist, type='Ka')
for i in range(len(Tlist)):
self.assertAlmostEqual(Kalist[i] / Kalist0[i], 1.0, 4)
def testEquilibriumConstantKc(self):
"""
Test the Reaction.getEquilibriumConstant() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
Kclist0 = [float(v) for v in ['1.45661e+28', '2.38935e+09', '1709.76', '1.74189', '0.0314866', '0.00235045', '0.000389568', '0.000105413', '3.93273e-05', '1.83006e-05']]
Kclist = self.reaction2.getEquilibriumConstants(Tlist, type='Kc')
for i in range(len(Tlist)):
self.assertAlmostEqual(Kclist[i] / Kclist0[i], 1.0, 4)
def testEquilibriumConstantKp(self):
"""
Test the Reaction.getEquilibriumConstant() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
Kplist0 = [float(v) for v in ['8.75951e+24', '718430', '0.342727', '0.000261877', '3.78696e-06', '2.35579e-07', '3.34673e-08', '7.92389e-09', '2.62777e-09', '1.10053e-09']]
Kplist = self.reaction2.getEquilibriumConstants(Tlist, type='Kp')
for i in range(len(Tlist)):
self.assertAlmostEqual(Kplist[i] / Kplist0[i], 1.0, 4)
def testStoichiometricCoefficient(self):
"""
Test the Reaction.getStoichiometricCoefficient() method.
"""
for reactant in self.reaction.reactants:
self.assertEqual(self.reaction.getStoichiometricCoefficient(reactant), -1)
for product in self.reaction.products:
self.assertEqual(self.reaction.getStoichiometricCoefficient(product), 1)
for reactant in self.reaction2.reactants:
self.assertEqual(self.reaction.getStoichiometricCoefficient(reactant), 0)
for product in self.reaction2.products:
self.assertEqual(self.reaction.getStoichiometricCoefficient(product), 0)
def testRateCoefficient(self):
"""
Test the Reaction.getRateCoefficient() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
self.assertAlmostEqual(self.reaction.getRateCoefficient(T, P) / self.reaction.kinetics.getRateCoefficient(T), 1.0, 6)
def testGenerateReverseRateCoefficient(self):
"""
Test the Reaction.generateReverseRateCoefficient() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
P = 1e5
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
for T in Tlist:
kr0 = self.reaction2.getRateCoefficient(T, P) / self.reaction2.getEquilibriumConstant(T)
kr = reverseKinetics.getRateCoefficient(T)
self.assertAlmostEqual(kr0 / kr, 1.0, 0)
def testGenerateReverseRateCoefficientArrhenius(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the Arrhenius format.
"""
original_kinetics = Arrhenius(
A = (2.65e12, 'cm^3/(mol*s)'),
n = 0.0,
Ea = (0.0, 'kJ/mol'),
T0 = (1, 'K'),
Tmin = (300, 'K'),
Tmax = (2000, 'K'),
)
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(original_kinetics.Tmin.value_si, original_kinetics.Tmax.value_si, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
@work_in_progress
def testGenerateReverseRateCoefficientArrheniusEP(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the ArrheniusEP format.
"""
from rmgpy.kinetics import ArrheniusEP
original_kinetics = ArrheniusEP(
A = (2.65e12, 'cm^3/(mol*s)'),
n = 0.0,
alpha = 0.5,
E0 = (41.84, 'kJ/mol'),
Tmin = (300, 'K'),
Tmax = (2000, 'K'),
)
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(original_kinetics.Tmin, original_kinetics.Tmax, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
def testGenerateReverseRateCoefficientPDepArrhenius(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the PDepArrhenius format.
"""
from rmgpy.kinetics import PDepArrhenius
arrhenius0 = Arrhenius(
A = (1.0e6,"s^-1"),
n = 1.0,
Ea = (10.0,"kJ/mol"),
T0 = (300.0,"K"),
Tmin = (300.0,"K"),
Tmax = (2000.0,"K"),
comment = """This data is completely made up""",
)
arrhenius1 = Arrhenius(
A = (1.0e12,"s^-1"),
n = 1.0,
Ea = (20.0,"kJ/mol"),
T0 = (300.0,"K"),
Tmin = (300.0,"K"),
Tmax = (2000.0,"K"),
comment = """This data is completely made up""",
)
pressures = numpy.array([0.1, 10.0])
arrhenius = [arrhenius0, arrhenius1]
Tmin = 300.0
Tmax = 2000.0
Pmin = 0.1
Pmax = 10.0
comment = """This data is completely made up"""
original_kinetics = PDepArrhenius(
pressures = (pressures,"bar"),
arrhenius = arrhenius,
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
Pmin = (Pmin,"bar"),
Pmax = (Pmax,"bar"),
comment = comment,
)
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(Tmin, Tmax, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
def testGenerateReverseRateCoefficientMultiArrhenius(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the MultiArrhenius format.
"""
from rmgpy.kinetics import MultiArrhenius
pressures = numpy.array([0.1, 10.0])
Tmin = 300.0
Tmax = 2000.0
Pmin = 0.1
Pmax = 10.0
comment = """This data is completely made up"""
arrhenius = [
Arrhenius(
A = (9.3e-14,"cm^3/(molecule*s)"),
n = 0.0,
Ea = (4740*constants.R*0.001,"kJ/mol"),
T0 = (1,"K"),
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
comment = comment,
),
Arrhenius(
A = (1.4e-9,"cm^3/(molecule*s)"),
n = 0.0,
Ea = (11200*constants.R*0.001,"kJ/mol"),
T0 = (1,"K"),
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
comment = comment,
),
]
original_kinetics = MultiArrhenius(
arrhenius = arrhenius,
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
comment = comment,
)
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(Tmin, Tmax, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
def testGenerateReverseRateCoefficientMultiPDepArrhenius(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the MultiPDepArrhenius format.
"""
from rmgpy.kinetics import PDepArrhenius, MultiPDepArrhenius
Tmin = 350.
Tmax = 1500.
Pmin = 1e-1
Pmax = 1e1
pressures = numpy.array([1e-1,1e1])
comment = 'CH3 + C2H6 <=> CH4 + C2H5 (Baulch 2005)'
arrhenius = [
PDepArrhenius(
pressures = (pressures,"bar"),
arrhenius = [
Arrhenius(
A = (9.3e-16,"cm^3/(molecule*s)"),
n = 0.0,
Ea = (4740*constants.R*0.001,"kJ/mol"),
T0 = (1,"K"),
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
comment = comment,
),
Arrhenius(
A = (9.3e-14,"cm^3/(molecule*s)"),
n = 0.0,
Ea = (4740*constants.R*0.001,"kJ/mol"),
T0 = (1,"K"),
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
comment = comment,
),
],
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
Pmin = (Pmin,"bar"),
Pmax = (Pmax,"bar"),
comment = comment,
),
PDepArrhenius(
pressures = (pressures,"bar"),
arrhenius = [
Arrhenius(
A = (1.4e-11,"cm^3/(molecule*s)"),
n = 0.0,
Ea = (11200*constants.R*0.001,"kJ/mol"),
T0 = (1,"K"),
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
comment = comment,
),
Arrhenius(
A = (1.4e-9,"cm^3/(molecule*s)"),
n = 0.0,
Ea = (11200*constants.R*0.001,"kJ/mol"),
T0 = (1,"K"),
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
comment = comment,
),
],
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
Pmin = (Pmin,"bar"),
Pmax = (Pmax,"bar"),
comment = comment,
),
]
original_kinetics = MultiPDepArrhenius(
arrhenius = arrhenius,
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
Pmin = (Pmin,"bar"),
Pmax = (Pmax,"bar"),
comment = comment,
)
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(Tmin, Tmax, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
def testGenerateReverseRateCoefficientThirdBody(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the ThirdBody format.
"""
from rmgpy.kinetics import ThirdBody
arrheniusLow = Arrhenius(
A = (2.62e+33,"cm^6/(mol^2*s)"),
n = -4.76,
Ea = (10.21,"kJ/mol"),
T0 = (1,"K"),
)
efficiencies = {"C": 3, "C(=O)=O": 2, "CC": 3, "O": 6, "[Ar]": 0.7, "[C]=O": 1.5, "[H][H]": 2}
Tmin = 300.
Tmax = 2000.
Pmin = 0.01
Pmax = 100.
comment = """H + CH3 -> CH4"""
thirdBody = ThirdBody(
arrheniusLow = arrheniusLow,
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
Pmin = (Pmin,"bar"),
Pmax = (Pmax,"bar"),
efficiencies = efficiencies,
comment = comment,
)
original_kinetics = thirdBody
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(Tmin, Tmax, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
def testGenerateReverseRateCoefficientLindemann(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the Lindemann format.
"""
from rmgpy.kinetics import Lindemann
arrheniusHigh = Arrhenius(
A = (1.39e+16,"cm^3/(mol*s)"),
n = -0.534,
Ea = (2.243,"kJ/mol"),
T0 = (1,"K"),
)
arrheniusLow = Arrhenius(
A = (2.62e+33,"cm^6/(mol^2*s)"),
n = -4.76,
Ea = (10.21,"kJ/mol"),
T0 = (1,"K"),
)
efficiencies = {"C": 3, "C(=O)=O": 2, "CC": 3, "O": 6, "[Ar]": 0.7, "[C]=O": 1.5, "[H][H]": 2}
Tmin = 300.
Tmax = 2000.
Pmin = 0.01
Pmax = 100.
comment = """H + CH3 -> CH4"""
lindemann = Lindemann(
arrheniusHigh = arrheniusHigh,
arrheniusLow = arrheniusLow,
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
Pmin = (Pmin,"bar"),
Pmax = (Pmax,"bar"),
efficiencies = efficiencies,
comment = comment,
)
original_kinetics = lindemann
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(Tmin, Tmax, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
def testGenerateReverseRateCoefficientTroe(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the Troe format.
"""
from rmgpy.kinetics import Troe
arrheniusHigh = Arrhenius(
A = (1.39e+16,"cm^3/(mol*s)"),
n = -0.534,
Ea = (2.243,"kJ/mol"),
T0 = (1,"K"),
)
arrheniusLow = Arrhenius(
A = (2.62e+33,"cm^6/(mol^2*s)"),
n = -4.76,
Ea = (10.21,"kJ/mol"),
T0 = (1,"K"),
)
alpha = 0.783
T3 = 74
T1 = 2941
T2 = 6964
efficiencies = {"C": 3, "C(=O)=O": 2, "CC": 3, "O": 6, "[Ar]": 0.7, "[C]=O": 1.5, "[H][H]": 2}
Tmin = 300.
Tmax = 2000.
Pmin = 0.01
Pmax = 100.
comment = """H + CH3 -> CH4"""
troe = Troe(
arrheniusHigh = arrheniusHigh,
arrheniusLow = arrheniusLow,
alpha = alpha,
T3 = (T3,"K"),
T1 = (T1,"K"),
T2 = (T2,"K"),
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
Pmin = (Pmin,"bar"),
Pmax = (Pmax,"bar"),
efficiencies = efficiencies,
comment = comment,
)
original_kinetics = troe
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(Tmin, Tmax, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
def testTSTCalculation(self):
"""
A test of the transition state theory k(T) calculation function,
using the reaction H + C2H4 -> C2H5.
"""
Tlist = 1000.0/numpy.arange(0.4, 3.35, 0.01)
klist = numpy.array([self.reaction.calculateTSTRateCoefficient(T) for T in Tlist])
arrhenius = Arrhenius().fitToData(Tlist, klist, kunits='m^3/(mol*s)')
klist2 = numpy.array([arrhenius.getRateCoefficient(T) for T in Tlist])
# Check that the correct Arrhenius parameters are returned
self.assertAlmostEqual(arrhenius.A.value_si, 2265.2488, delta=1e-2)
self.assertAlmostEqual(arrhenius.n.value_si, 1.45419, delta=1e-4)
self.assertAlmostEqual(arrhenius.Ea.value_si, 6645.24, delta=1e-2)
# Check that the fit is satisfactory (defined here as always within 5%)
for i in range(len(Tlist)):
self.assertAlmostEqual(klist[i], klist2[i], delta=5e-2 * klist[i])
def testPickle(self):
"""
Test that a Reaction object can be successfully pickled and
unpickled with no loss of information.
"""
import cPickle
reaction = cPickle.loads(cPickle.dumps(self.reaction,-1))
self.assertEqual(len(self.reaction.reactants), len(reaction.reactants))
self.assertEqual(len(self.reaction.products), len(reaction.products))
for reactant0, reactant in zip(self.reaction.reactants, reaction.reactants):
self.assertAlmostEqual(reactant0.conformer.E0.value_si / 1e6, reactant.conformer.E0.value_si / 1e6, 2)
self.assertEqual(reactant0.conformer.E0.units, reactant.conformer.E0.units)
for product0, product in zip(self.reaction.products, reaction.products):
self.assertAlmostEqual(product0.conformer.E0.value_si / 1e6, product.conformer.E0.value_si / 1e6, 2)
self.assertEqual(product0.conformer.E0.units, product.conformer.E0.units)
self.assertAlmostEqual(self.reaction.transitionState.conformer.E0.value_si / 1e6, reaction.transitionState.conformer.E0.value_si / 1e6, 2)
self.assertEqual(self.reaction.transitionState.conformer.E0.units, reaction.transitionState.conformer.E0.units)
self.assertAlmostEqual(self.reaction.transitionState.frequency.value_si, reaction.transitionState.frequency.value_si, 2)
self.assertEqual(self.reaction.transitionState.frequency.units, reaction.transitionState.frequency.units)
self.assertAlmostEqual(self.reaction.kinetics.A.value_si, reaction.kinetics.A.value_si, delta=1e-6)
self.assertAlmostEqual(self.reaction.kinetics.n.value_si, reaction.kinetics.n.value_si, delta=1e-6)
self.assertAlmostEqual(self.reaction.kinetics.T0.value_si, reaction.kinetics.T0.value_si, delta=1e-6)
self.assertAlmostEqual(self.reaction.kinetics.Ea.value_si, reaction.kinetics.Ea.value_si, delta=1e-6)
self.assertEqual(self.reaction.kinetics.comment, reaction.kinetics.comment)
self.assertEqual(self.reaction.duplicate, reaction.duplicate)
self.assertEqual(self.reaction.degeneracy, reaction.degeneracy)
def testOutput(self):
"""
Test that a Reaction object can be successfully reconstructed
from its repr() output with no loss of information.
"""
exec('reaction = %r' % (self.reaction))
self.assertEqual(len(self.reaction.reactants), len(reaction.reactants))
self.assertEqual(len(self.reaction.products), len(reaction.products))
for reactant0, reactant in zip(self.reaction.reactants, reaction.reactants):
self.assertAlmostEqual(reactant0.conformer.E0.value_si / 1e6, reactant.conformer.E0.value_si / 1e6, 2)
self.assertEqual(reactant0.conformer.E0.units, reactant.conformer.E0.units)
for product0, product in zip(self.reaction.products, reaction.products):
self.assertAlmostEqual(product0.conformer.E0.value_si / 1e6, product.conformer.E0.value_si / 1e6, 2)
self.assertEqual(product0.conformer.E0.units, product.conformer.E0.units)
self.assertAlmostEqual(self.reaction.transitionState.conformer.E0.value_si / 1e6, reaction.transitionState.conformer.E0.value_si / 1e6, 2)
self.assertEqual(self.reaction.transitionState.conformer.E0.units, reaction.transitionState.conformer.E0.units)
self.assertAlmostEqual(self.reaction.transitionState.frequency.value_si, reaction.transitionState.frequency.value_si, 2)
self.assertEqual(self.reaction.transitionState.frequency.units, reaction.transitionState.frequency.units)
self.assertAlmostEqual(self.reaction.kinetics.A.value_si, reaction.kinetics.A.value_si, delta=1e-6)
self.assertAlmostEqual(self.reaction.kinetics.n.value_si, reaction.kinetics.n.value_si, delta=1e-6)
self.assertAlmostEqual(self.reaction.kinetics.T0.value_si, reaction.kinetics.T0.value_si, delta=1e-6)
self.assertAlmostEqual(self.reaction.kinetics.Ea.value_si, reaction.kinetics.Ea.value_si, delta=1e-6)
self.assertEqual(self.reaction.kinetics.comment, reaction.kinetics.comment)
self.assertEqual(self.reaction.duplicate, reaction.duplicate)
self.assertEqual(self.reaction.degeneracy, reaction.degeneracy)
################################################################################
if __name__ == '__main__':
unittest.main(testRunner=unittest.TextTestRunner(verbosity=2))
| [((1776, 1824), 'rmgpy.reaction.Reaction', 'Reaction', ([], {'reactants': 'reactants', 'products': 'products'}), '(reactants=reactants, products=products)\n', (1784, 1824), False, 'from rmgpy.reaction import Reaction\n'), ((13183, 13232), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (13195, 13232), False, 'import numpy\n'), ((13689, 13738), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (13701, 13738), False, 'import numpy\n'), ((14193, 14242), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (14205, 14242), False, 'import numpy\n'), ((14708, 14757), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (14720, 14757), False, 'import numpy\n'), ((15250, 15299), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (15262, 15299), False, 'import numpy\n'), ((15796, 15845), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (15808, 15845), False, 'import numpy\n'), ((17007, 17056), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (17019, 17056), False, 'import numpy\n'), ((17389, 17438), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (17401, 17438), False, 'import numpy\n'), ((17972, 18097), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(2650000000000.0, 'cm^3/(mol*s)')", 'n': '(0.0)', 'Ea': "(0.0, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(300, 'K')", 'Tmax': "(2000, 'K')"}), "(A=(2650000000000.0, 'cm^3/(mol*s)'), n=0.0, Ea=(0.0, 'kJ/mol'),\n T0=(1, 'K'), Tmin=(300, 'K'), Tmax=(2000, 'K'))\n", (17981, 18097), False, 'from rmgpy.kinetics import Arrhenius\n'), ((18760, 18865), 'numpy.arange', 'numpy.arange', (['original_kinetics.Tmin.value_si', 'original_kinetics.Tmax.value_si', '(200.0)', 'numpy.float64'], {}), '(original_kinetics.Tmin.value_si, original_kinetics.Tmax.\n value_si, 200.0, numpy.float64)\n', (18772, 18865), False, 'import numpy\n'), ((19378, 19506), 'rmgpy.kinetics.ArrheniusEP', 'ArrheniusEP', ([], {'A': "(2650000000000.0, 'cm^3/(mol*s)')", 'n': '(0.0)', 'alpha': '(0.5)', 'E0': "(41.84, 'kJ/mol')", 'Tmin': "(300, 'K')", 'Tmax': "(2000, 'K')"}), "(A=(2650000000000.0, 'cm^3/(mol*s)'), n=0.0, alpha=0.5, E0=(\n 41.84, 'kJ/mol'), Tmin=(300, 'K'), Tmax=(2000, 'K'))\n", (19389, 19506), False, 'from rmgpy.kinetics import ArrheniusEP\n'), ((20168, 20255), 'numpy.arange', 'numpy.arange', (['original_kinetics.Tmin', 'original_kinetics.Tmax', '(200.0)', 'numpy.float64'], {}), '(original_kinetics.Tmin, original_kinetics.Tmax, 200.0, numpy.\n float64)\n', (20180, 20255), False, 'import numpy\n'), ((20745, 20914), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(1000000.0, 's^-1')", 'n': '(1.0)', 'Ea': "(10.0, 'kJ/mol')", 'T0': "(300.0, 'K')", 'Tmin': "(300.0, 'K')", 'Tmax': "(2000.0, 'K')", 'comment': '"""This data is completely made up"""'}), "(A=(1000000.0, 's^-1'), n=1.0, Ea=(10.0, 'kJ/mol'), T0=(300.0, 'K'\n ), Tmin=(300.0, 'K'), Tmax=(2000.0, 'K'), comment=\n 'This data is completely made up')\n", (20754, 20914), False, 'from rmgpy.kinetics import Arrhenius\n'), ((21036, 21211), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(1000000000000.0, 's^-1')", 'n': '(1.0)', 'Ea': "(20.0, 'kJ/mol')", 'T0': "(300.0, 'K')", 'Tmin': "(300.0, 'K')", 'Tmax': "(2000.0, 'K')", 'comment': '"""This data is completely made up"""'}), "(A=(1000000000000.0, 's^-1'), n=1.0, Ea=(20.0, 'kJ/mol'), T0=(\n 300.0, 'K'), Tmin=(300.0, 'K'), Tmax=(2000.0, 'K'), comment=\n 'This data is completely made up')\n", (21045, 21211), False, 'from rmgpy.kinetics import Arrhenius\n'), ((21328, 21352), 'numpy.array', 'numpy.array', (['[0.1, 10.0]'], {}), '([0.1, 10.0])\n', (21339, 21352), False, 'import numpy\n'), ((21565, 21731), 'rmgpy.kinetics.PDepArrhenius', 'PDepArrhenius', ([], {'pressures': "(pressures, 'bar')", 'arrhenius': 'arrhenius', 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'Pmin': "(Pmin, 'bar')", 'Pmax': "(Pmax, 'bar')", 'comment': 'comment'}), "(pressures=(pressures, 'bar'), arrhenius=arrhenius, Tmin=(Tmin,\n 'K'), Tmax=(Tmax, 'K'), Pmin=(Pmin, 'bar'), Pmax=(Pmax, 'bar'), comment\n =comment)\n", (21578, 21731), False, 'from rmgpy.kinetics import PDepArrhenius, MultiPDepArrhenius\n'), ((22354, 22400), 'numpy.arange', 'numpy.arange', (['Tmin', 'Tmax', '(200.0)', 'numpy.float64'], {}), '(Tmin, Tmax, 200.0, numpy.float64)\n', (22366, 22400), False, 'import numpy\n'), ((22898, 22922), 'numpy.array', 'numpy.array', (['[0.1, 10.0]'], {}), '([0.1, 10.0])\n', (22909, 22922), False, 'import numpy\n'), ((23733, 23825), 'rmgpy.kinetics.MultiArrhenius', 'MultiArrhenius', ([], {'arrhenius': 'arrhenius', 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'comment': 'comment'}), "(arrhenius=arrhenius, Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n comment=comment)\n", (23747, 23825), False, 'from rmgpy.kinetics import MultiArrhenius\n'), ((24411, 24457), 'numpy.arange', 'numpy.arange', (['Tmin', 'Tmax', '(200.0)', 'numpy.float64'], {}), '(Tmin, Tmax, 200.0, numpy.float64)\n', (24423, 24457), False, 'import numpy\n'), ((25061, 25085), 'numpy.array', 'numpy.array', (['[0.1, 10.0]'], {}), '([0.1, 10.0])\n', (25072, 25085), False, 'import numpy\n'), ((27357, 27493), 'rmgpy.kinetics.MultiPDepArrhenius', 'MultiPDepArrhenius', ([], {'arrhenius': 'arrhenius', 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'Pmin': "(Pmin, 'bar')", 'Pmax': "(Pmax, 'bar')", 'comment': 'comment'}), "(arrhenius=arrhenius, Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n Pmin=(Pmin, 'bar'), Pmax=(Pmax, 'bar'), comment=comment)\n", (27375, 27493), False, 'from rmgpy.kinetics import PDepArrhenius, MultiPDepArrhenius\n'), ((28105, 28151), 'numpy.arange', 'numpy.arange', (['Tmin', 'Tmax', '(200.0)', 'numpy.float64'], {}), '(Tmin, Tmax, 200.0, numpy.float64)\n', (28117, 28151), False, 'import numpy\n'), ((28637, 28727), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(2.62e+33, 'cm^6/(mol^2*s)')", 'n': '(-4.76)', 'Ea': "(10.21, 'kJ/mol')", 'T0': "(1, 'K')"}), "(A=(2.62e+33, 'cm^6/(mol^2*s)'), n=-4.76, Ea=(10.21, 'kJ/mol'), T0\n =(1, 'K'))\n", (28646, 28727), False, 'from rmgpy.kinetics import Arrhenius\n'), ((29033, 29197), 'rmgpy.kinetics.ThirdBody', 'ThirdBody', ([], {'arrheniusLow': 'arrheniusLow', 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'Pmin': "(Pmin, 'bar')", 'Pmax': "(Pmax, 'bar')", 'efficiencies': 'efficiencies', 'comment': 'comment'}), "(arrheniusLow=arrheniusLow, Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n Pmin=(Pmin, 'bar'), Pmax=(Pmax, 'bar'), efficiencies=efficiencies,\n comment=comment)\n", (29042, 29197), False, 'from rmgpy.kinetics import ThirdBody\n'), ((29867, 29913), 'numpy.arange', 'numpy.arange', (['Tmin', 'Tmax', '(200.0)', 'numpy.float64'], {}), '(Tmin, Tmax, 200.0, numpy.float64)\n', (29879, 29913), False, 'import numpy\n'), ((30400, 30489), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(1.39e+16, 'cm^3/(mol*s)')", 'n': '(-0.534)', 'Ea': "(2.243, 'kJ/mol')", 'T0': "(1, 'K')"}), "(A=(1.39e+16, 'cm^3/(mol*s)'), n=-0.534, Ea=(2.243, 'kJ/mol'), T0=\n (1, 'K'))\n", (30409, 30489), False, 'from rmgpy.kinetics import Arrhenius\n'), ((30575, 30665), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(2.62e+33, 'cm^6/(mol^2*s)')", 'n': '(-4.76)', 'Ea': "(10.21, 'kJ/mol')", 'T0': "(1, 'K')"}), "(A=(2.62e+33, 'cm^6/(mol^2*s)'), n=-4.76, Ea=(10.21, 'kJ/mol'), T0\n =(1, 'K'))\n", (30584, 30665), False, 'from rmgpy.kinetics import Arrhenius\n'), ((30971, 31165), 'rmgpy.kinetics.Lindemann', 'Lindemann', ([], {'arrheniusHigh': 'arrheniusHigh', 'arrheniusLow': 'arrheniusLow', 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'Pmin': "(Pmin, 'bar')", 'Pmax': "(Pmax, 'bar')", 'efficiencies': 'efficiencies', 'comment': 'comment'}), "(arrheniusHigh=arrheniusHigh, arrheniusLow=arrheniusLow, Tmin=(\n Tmin, 'K'), Tmax=(Tmax, 'K'), Pmin=(Pmin, 'bar'), Pmax=(Pmax, 'bar'),\n efficiencies=efficiencies, comment=comment)\n", (30980, 31165), False, 'from rmgpy.kinetics import Lindemann\n'), ((31856, 31902), 'numpy.arange', 'numpy.arange', (['Tmin', 'Tmax', '(200.0)', 'numpy.float64'], {}), '(Tmin, Tmax, 200.0, numpy.float64)\n', (31868, 31902), False, 'import numpy\n'), ((32375, 32464), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(1.39e+16, 'cm^3/(mol*s)')", 'n': '(-0.534)', 'Ea': "(2.243, 'kJ/mol')", 'T0': "(1, 'K')"}), "(A=(1.39e+16, 'cm^3/(mol*s)'), n=-0.534, Ea=(2.243, 'kJ/mol'), T0=\n (1, 'K'))\n", (32384, 32464), False, 'from rmgpy.kinetics import Arrhenius\n'), ((32550, 32640), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(2.62e+33, 'cm^6/(mol^2*s)')", 'n': '(-4.76)', 'Ea': "(10.21, 'kJ/mol')", 'T0': "(1, 'K')"}), "(A=(2.62e+33, 'cm^6/(mol^2*s)'), n=-4.76, Ea=(10.21, 'kJ/mol'), T0\n =(1, 'K'))\n", (32559, 32640), False, 'from rmgpy.kinetics import Arrhenius\n'), ((33015, 33262), 'rmgpy.kinetics.Troe', 'Troe', ([], {'arrheniusHigh': 'arrheniusHigh', 'arrheniusLow': 'arrheniusLow', 'alpha': 'alpha', 'T3': "(T3, 'K')", 'T1': "(T1, 'K')", 'T2': "(T2, 'K')", 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'Pmin': "(Pmin, 'bar')", 'Pmax': "(Pmax, 'bar')", 'efficiencies': 'efficiencies', 'comment': 'comment'}), "(arrheniusHigh=arrheniusHigh, arrheniusLow=arrheniusLow, alpha=alpha,\n T3=(T3, 'K'), T1=(T1, 'K'), T2=(T2, 'K'), Tmin=(Tmin, 'K'), Tmax=(Tmax,\n 'K'), Pmin=(Pmin, 'bar'), Pmax=(Pmax, 'bar'), efficiencies=efficiencies,\n comment=comment)\n", (33019, 33262), False, 'from rmgpy.kinetics import Troe\n'), ((33998, 34044), 'numpy.arange', 'numpy.arange', (['Tmin', 'Tmax', '(200.0)', 'numpy.float64'], {}), '(Tmin, Tmax, 200.0, numpy.float64)\n', (34010, 34044), False, 'import numpy\n'), ((23096, 23263), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(9.3e-14, 'cm^3/(molecule*s)')", 'n': '(0.0)', 'Ea': "(4740 * constants.R * 0.001, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'comment': 'comment'}), "(A=(9.3e-14, 'cm^3/(molecule*s)'), n=0.0, Ea=(4740 * constants.R *\n 0.001, 'kJ/mol'), T0=(1, 'K'), Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n comment=comment)\n", (23105, 23263), False, 'from rmgpy.kinetics import Arrhenius\n'), ((23401, 23569), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(1.4e-09, 'cm^3/(molecule*s)')", 'n': '(0.0)', 'Ea': "(11200 * constants.R * 0.001, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'comment': 'comment'}), "(A=(1.4e-09, 'cm^3/(molecule*s)'), n=0.0, Ea=(11200 * constants.R *\n 0.001, 'kJ/mol'), T0=(1, 'K'), Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n comment=comment)\n", (23410, 23569), False, 'from rmgpy.kinetics import Arrhenius\n'), ((34478, 34507), 'numpy.arange', 'numpy.arange', (['(0.4)', '(3.35)', '(0.01)'], {}), '(0.4, 3.35, 0.01)\n', (34490, 34507), False, 'import numpy\n'), ((35482, 35514), 'cPickle.dumps', 'cPickle.dumps', (['self.reaction', '(-1)'], {}), '(self.reaction, -1)\n', (35495, 35514), False, 'import cPickle\n'), ((39720, 39756), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (39743, 39756), False, 'import unittest\n'), ((7826, 7953), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(501366000.0, 'cm^3/(mol*s)')", 'n': '(1.637)', 'Ea': "(4.32508, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(300, 'K')", 'Tmax': "(2500, 'K')"}), "(A=(501366000.0, 'cm^3/(mol*s)'), n=1.637, Ea=(4.32508, 'kJ/mol'),\n T0=(1, 'K'), Tmin=(300, 'K'), Tmax=(2500, 'K'))\n", (7835, 7953), False, 'from rmgpy.kinetics import Arrhenius\n'), ((8229, 8428), 'rmgpy.thermo.Wilhoit', 'Wilhoit', ([], {'Cp0': "(4.0 * constants.R, 'J/(mol*K)')", 'CpInf': "(21.0 * constants.R, 'J/(mol*K)')", 'a0': '(-3.95)', 'a1': '(9.26)', 'a2': '(-15.6)', 'a3': '(8.55)', 'B': "(500.0, 'K')", 'H0': "(-61510.0, 'J/mol')", 'S0': "(-790.2, 'J/(mol*K)')"}), "(Cp0=(4.0 * constants.R, 'J/(mol*K)'), CpInf=(21.0 * constants.R,\n 'J/(mol*K)'), a0=-3.95, a1=9.26, a2=-15.6, a3=8.55, B=(500.0, 'K'), H0=\n (-61510.0, 'J/mol'), S0=(-790.2, 'J/(mol*K)'))\n", (8236, 8428), False, 'from rmgpy.thermo import Wilhoit\n'), ((8515, 8719), 'rmgpy.thermo.Wilhoit', 'Wilhoit', ([], {'Cp0': "(4.0 * constants.R, 'J/(mol*K)')", 'CpInf': "(15.5 * constants.R, 'J/(mol*K)')", 'a0': '(0.2541)', 'a1': '(-0.4712)', 'a2': '(-4.434)', 'a3': '(2.25)', 'B': "(500.0, 'K')", 'H0': "(-143900.0, 'J/mol')", 'S0': "(-524.6, 'J/(mol*K)')"}), "(Cp0=(4.0 * constants.R, 'J/(mol*K)'), CpInf=(15.5 * constants.R,\n 'J/(mol*K)'), a0=0.2541, a1=-0.4712, a2=-4.434, a3=2.25, B=(500.0, 'K'),\n H0=(-143900.0, 'J/mol'), S0=(-524.6, 'J/(mol*K)'))\n", (8522, 8719), False, 'from rmgpy.thermo import Wilhoit\n'), ((8806, 9007), 'rmgpy.thermo.Wilhoit', 'Wilhoit', ([], {'Cp0': "(3.5 * constants.R, 'J/(mol*K)')", 'CpInf': "(4.5 * constants.R, 'J/(mol*K)')", 'a0': '(-0.9324)', 'a1': '(26.18)', 'a2': '(-70.47)', 'a3': '(44.12)', 'B': "(500.0, 'K')", 'H0': "(14530.0, 'J/mol')", 'S0': "(-12.19, 'J/(mol*K)')"}), "(Cp0=(3.5 * constants.R, 'J/(mol*K)'), CpInf=(4.5 * constants.R,\n 'J/(mol*K)'), a0=-0.9324, a1=26.18, a2=-70.47, a3=44.12, B=(500.0, 'K'),\n H0=(14530.0, 'J/mol'), S0=(-12.19, 'J/(mol*K)'))\n", (8813, 9007), False, 'from rmgpy.thermo import Wilhoit\n'), ((9150, 9275), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(2650000000000.0, 'cm^3/(mol*s)')", 'n': '(0.0)', 'Ea': "(0.0, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(300, 'K')", 'Tmax': "(2000, 'K')"}), "(A=(2650000000000.0, 'cm^3/(mol*s)'), n=0.0, Ea=(0.0, 'kJ/mol'),\n T0=(1, 'K'), Tmin=(300, 'K'), Tmax=(2000, 'K'))\n", (9159, 9275), False, 'from rmgpy.kinetics import Arrhenius\n'), ((34619, 34630), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {}), '()\n', (34628, 34630), False, 'from rmgpy.kinetics import Arrhenius\n'), ((9562, 9571), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9569, 9571), False, 'from rmgpy.species import Species, TransitionState\n'), ((9584, 9593), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9591, 9593), False, 'from rmgpy.species import Species, TransitionState\n'), ((9638, 9647), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9645, 9647), False, 'from rmgpy.species import Species, TransitionState\n'), ((9648, 9657), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9655, 9657), False, 'from rmgpy.species import Species, TransitionState\n'), ((9670, 9679), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9677, 9679), False, 'from rmgpy.species import Species, TransitionState\n'), ((9725, 9734), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9732, 9734), False, 'from rmgpy.species import Species, TransitionState\n'), ((9747, 9756), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9754, 9756), False, 'from rmgpy.species import Species, TransitionState\n'), ((9757, 9766), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9764, 9766), False, 'from rmgpy.species import Species, TransitionState\n'), ((9811, 9820), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9818, 9820), False, 'from rmgpy.species import Species, TransitionState\n'), ((9821, 9830), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9828, 9830), False, 'from rmgpy.species import Species, TransitionState\n'), ((9843, 9852), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9850, 9852), False, 'from rmgpy.species import Species, TransitionState\n'), ((9853, 9862), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9860, 9862), False, 'from rmgpy.species import Species, TransitionState\n'), ((10251, 10260), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10258, 10260), False, 'from rmgpy.species import Species, TransitionState\n'), ((10273, 10282), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10280, 10282), False, 'from rmgpy.species import Species, TransitionState\n'), ((10327, 10336), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10334, 10336), False, 'from rmgpy.species import Species, TransitionState\n'), ((10337, 10346), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10344, 10346), False, 'from rmgpy.species import Species, TransitionState\n'), ((10359, 10368), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10366, 10368), False, 'from rmgpy.species import Species, TransitionState\n'), ((10414, 10423), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10421, 10423), False, 'from rmgpy.species import Species, TransitionState\n'), ((10436, 10445), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10443, 10445), False, 'from rmgpy.species import Species, TransitionState\n'), ((10446, 10455), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10453, 10455), False, 'from rmgpy.species import Species, TransitionState\n'), ((10500, 10509), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10507, 10509), False, 'from rmgpy.species import Species, TransitionState\n'), ((10510, 10519), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10517, 10519), False, 'from rmgpy.species import Species, TransitionState\n'), ((10532, 10541), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10539, 10541), False, 'from rmgpy.species import Species, TransitionState\n'), ((10542, 10551), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10549, 10551), False, 'from rmgpy.species import Species, TransitionState\n'), ((10926, 10935), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10933, 10935), False, 'from rmgpy.species import Species, TransitionState\n'), ((10948, 10957), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10955, 10957), False, 'from rmgpy.species import Species, TransitionState\n'), ((11002, 11011), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11009, 11011), False, 'from rmgpy.species import Species, TransitionState\n'), ((11012, 11021), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11019, 11021), False, 'from rmgpy.species import Species, TransitionState\n'), ((11034, 11043), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11041, 11043), False, 'from rmgpy.species import Species, TransitionState\n'), ((11089, 11098), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11096, 11098), False, 'from rmgpy.species import Species, TransitionState\n'), ((11111, 11120), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11118, 11120), False, 'from rmgpy.species import Species, TransitionState\n'), ((11121, 11130), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11128, 11130), False, 'from rmgpy.species import Species, TransitionState\n'), ((11175, 11184), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11182, 11184), False, 'from rmgpy.species import Species, TransitionState\n'), ((11185, 11194), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11192, 11194), False, 'from rmgpy.species import Species, TransitionState\n'), ((11207, 11216), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11214, 11216), False, 'from rmgpy.species import Species, TransitionState\n'), ((11217, 11226), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11224, 11226), False, 'from rmgpy.species import Species, TransitionState\n'), ((25291, 25458), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(9.3e-16, 'cm^3/(molecule*s)')", 'n': '(0.0)', 'Ea': "(4740 * constants.R * 0.001, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'comment': 'comment'}), "(A=(9.3e-16, 'cm^3/(molecule*s)'), n=0.0, Ea=(4740 * constants.R *\n 0.001, 'kJ/mol'), T0=(1, 'K'), Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n comment=comment)\n", (25300, 25458), False, 'from rmgpy.kinetics import Arrhenius\n'), ((25668, 25835), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(9.3e-14, 'cm^3/(molecule*s)')", 'n': '(0.0)', 'Ea': "(4740 * constants.R * 0.001, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'comment': 'comment'}), "(A=(9.3e-14, 'cm^3/(molecule*s)'), n=0.0, Ea=(4740 * constants.R *\n 0.001, 'kJ/mol'), T0=(1, 'K'), Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n comment=comment)\n", (25677, 25835), False, 'from rmgpy.kinetics import Arrhenius\n'), ((26365, 26533), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(1.4e-11, 'cm^3/(molecule*s)')", 'n': '(0.0)', 'Ea': "(11200 * constants.R * 0.001, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'comment': 'comment'}), "(A=(1.4e-11, 'cm^3/(molecule*s)'), n=0.0, Ea=(11200 * constants.R *\n 0.001, 'kJ/mol'), T0=(1, 'K'), Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n comment=comment)\n", (26374, 26533), False, 'from rmgpy.kinetics import Arrhenius\n'), ((26743, 26911), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(1.4e-09, 'cm^3/(molecule*s)')", 'n': '(0.0)', 'Ea': "(11200 * constants.R * 0.001, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'comment': 'comment'}), "(A=(1.4e-09, 'cm^3/(molecule*s)'), n=0.0, Ea=(11200 * constants.R *\n 0.001, 'kJ/mol'), T0=(1, 'K'), Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n comment=comment)\n", (26752, 26911), False, 'from rmgpy.kinetics import Arrhenius\n'), ((4228, 4270), 'rmgpy.statmech.translation.IdealGasTranslation', 'IdealGasTranslation', ([], {'mass': "(28.0313, 'amu')"}), "(mass=(28.0313, 'amu'))\n", (4247, 4270), False, 'from rmgpy.statmech.translation import Translation, IdealGasTranslation\n'), ((4341, 4427), 'rmgpy.statmech.rotation.NonlinearRotor', 'NonlinearRotor', ([], {'inertia': "([3.41526, 16.6498, 20.065], 'amu*angstrom^2')", 'symmetry': '(4)'}), "(inertia=([3.41526, 16.6498, 20.065], 'amu*angstrom^2'),\n symmetry=4)\n", (4355, 4427), False, 'from rmgpy.statmech.rotation import Rotation, LinearRotor, NonlinearRotor, KRotor, SphericalTopRotor\n'), ((4603, 4762), 'rmgpy.statmech.vibration.HarmonicOscillator', 'HarmonicOscillator', ([], {'frequencies': "([828.397, 970.652, 977.223, 1052.93, 1233.55, 1367.56, 1465.09, 1672.25, \n 3098.46, 3111.7, 3165.79, 3193.54], 'cm^-1')"}), "(frequencies=([828.397, 970.652, 977.223, 1052.93, \n 1233.55, 1367.56, 1465.09, 1672.25, 3098.46, 3111.7, 3165.79, 3193.54],\n 'cm^-1'))\n", (4621, 4762), False, 'from rmgpy.statmech.vibration import Vibration, HarmonicOscillator\n'), ((5200, 5242), 'rmgpy.statmech.translation.IdealGasTranslation', 'IdealGasTranslation', ([], {'mass': "(1.00783, 'amu')"}), "(mass=(1.00783, 'amu'))\n", (5219, 5242), False, 'from rmgpy.statmech.translation import Translation, IdealGasTranslation\n'), ((5596, 5638), 'rmgpy.statmech.translation.IdealGasTranslation', 'IdealGasTranslation', ([], {'mass': "(29.0391, 'amu')"}), "(mass=(29.0391, 'amu'))\n", (5615, 5638), False, 'from rmgpy.statmech.translation import Translation, IdealGasTranslation\n'), ((5709, 5795), 'rmgpy.statmech.rotation.NonlinearRotor', 'NonlinearRotor', ([], {'inertia': "([4.8709, 22.2353, 23.9925], 'amu*angstrom^2')", 'symmetry': '(1)'}), "(inertia=([4.8709, 22.2353, 23.9925], 'amu*angstrom^2'),\n symmetry=1)\n", (5723, 5795), False, 'from rmgpy.statmech.rotation import Rotation, LinearRotor, NonlinearRotor, KRotor, SphericalTopRotor\n'), ((5971, 6149), 'rmgpy.statmech.vibration.HarmonicOscillator', 'HarmonicOscillator', ([], {'frequencies': "([482.224, 791.876, 974.355, 1051.48, 1183.21, 1361.36, 1448.65, 1455.07, \n 1465.48, 2688.22, 2954.51, 3033.39, 3101.54, 3204.73], 'cm^-1')"}), "(frequencies=([482.224, 791.876, 974.355, 1051.48, \n 1183.21, 1361.36, 1448.65, 1455.07, 1465.48, 2688.22, 2954.51, 3033.39,\n 3101.54, 3204.73], 'cm^-1'))\n", (5989, 6149), False, 'from rmgpy.statmech.vibration import Vibration, HarmonicOscillator\n'), ((6294, 6411), 'rmgpy.statmech.torsion.HinderedRotor', 'HinderedRotor', ([], {'inertia': "(1.11481, 'amu*angstrom^2')", 'symmetry': '(6)', 'barrier': "(0.244029, 'kJ/mol')", 'semiclassical': 'None'}), "(inertia=(1.11481, 'amu*angstrom^2'), symmetry=6, barrier=(\n 0.244029, 'kJ/mol'), semiclassical=None)\n", (6307, 6411), False, 'from rmgpy.statmech.torsion import Torsion, HinderedRotor\n'), ((6841, 6883), 'rmgpy.statmech.translation.IdealGasTranslation', 'IdealGasTranslation', ([], {'mass': "(29.0391, 'amu')"}), "(mass=(29.0391, 'amu'))\n", (6860, 6883), False, 'from rmgpy.statmech.translation import Translation, IdealGasTranslation\n'), ((6954, 7041), 'rmgpy.statmech.rotation.NonlinearRotor', 'NonlinearRotor', ([], {'inertia': "([6.78512, 22.1437, 22.2114], 'amu*angstrom^2')", 'symmetry': '(1)'}), "(inertia=([6.78512, 22.1437, 22.2114], 'amu*angstrom^2'),\n symmetry=1)\n", (6968, 7041), False, 'from rmgpy.statmech.rotation import Rotation, LinearRotor, NonlinearRotor, KRotor, SphericalTopRotor\n'), ((7217, 7392), 'rmgpy.statmech.vibration.HarmonicOscillator', 'HarmonicOscillator', ([], {'frequencies': "([412.75, 415.206, 821.495, 924.44, 982.714, 1024.16, 1224.21, 1326.36, \n 1455.06, 1600.35, 3101.46, 3110.55, 3175.34, 3201.88], 'cm^-1')"}), "(frequencies=([412.75, 415.206, 821.495, 924.44, 982.714,\n 1024.16, 1224.21, 1326.36, 1455.06, 1600.35, 3101.46, 3110.55, 3175.34,\n 3201.88], 'cm^-1'))\n", (7235, 7392), False, 'from rmgpy.statmech.vibration import Vibration, HarmonicOscillator\n')] |
albeiks/omaralbeik.com | backend/core/api_urls.py | 8d096130393919612863aac6280dffaf6e00961d | from django.conf.urls import url, include
from core.routers import OptionalTrailingSlashRouter
from blog import views as blogViews
from snippets import views as snippetsViews
from projects import views as projectsViews
from tags import views as tagsViews
from contents import views as contentsViews
from contact import views as contactViews
router = OptionalTrailingSlashRouter()
router.register(r"blog", blogViews.PostViewSet)
router.register(r"snippets", snippetsViews.SnippetViewSet)
router.register(r"languages", snippetsViews.ProgrammingLanguageViewSet)
router.register(r"projects", projectsViews.ProjectViewSet)
router.register(r"tags", tagsViews.TagViewSet)
router.register(r"contents", contentsViews.ContentViewSet)
router.register(r"contact", contactViews.MessageViewSet)
# List or url patterns for the api subdomain
urlpatterns = [
url(r"^v2/", include(router.urls)),
]
| [((353, 382), 'core.routers.OptionalTrailingSlashRouter', 'OptionalTrailingSlashRouter', ([], {}), '()\n', (380, 382), False, 'from core.routers import OptionalTrailingSlashRouter\n'), ((863, 883), 'django.conf.urls.include', 'include', (['router.urls'], {}), '(router.urls)\n', (870, 883), False, 'from django.conf.urls import url, include\n')] |
alexberndt/mobile-AGV-optimization | python/video_ADG.py | 76b97fd5aa3898fd6cb6f74f8d87140555c92af5 | """
closed-loop MILP solved to determine optimal ordering defined by ADG
"""
import sys
import yaml
import time
import matplotlib.colors as mcolors
import matplotlib
import matplotlib.pyplot as plt
import random
import logging
import time
import networkx as nx
import csv
import statistics as stat
import os
import sys
from mip import Model, ProgressLog, xsum, maximize, minimize, BINARY, CONTINUOUS, Constr, ConstrList
sys.path.insert(1, "functions/")
from planners import *
from visualizers import *
from milp_formulation import *
from robot import *
from adg import *
from adg_node import *
from process_results import *
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(name)s - %(levelname)s :: %(message)s', level=logging.INFO)
def main():
""" --------------------------- INPUTS --------------------------------- """
show_visual = False
show_ADG = True #not show_visual
run_MILP = True #False #True
save_file = False
sim_timeout = 500
# define prediction and control horizons: H_prediction >= H_control
H_prediction = np.NaN # integer value for forward node lookup
H_control = 5
random_seed = 0
mu = 0.5
robust_param = 0.0
delay_amount = 5
delayed_robot_cnt = 2
w = 1.4 # sub-optimality bound: w = 1.0 -> CBS, else ECBS!
fldr = "nuernberg_small" # auto_gen_01_nuernberg | auto_gen_00_large | auto_gen_02_simple | manual_03_maxplus
random.seed(random_seed)
np.random.seed(random_seed)
""" -------------------------------------------------------------------- """
# start initial
pwd = os.path.dirname(os.path.abspath(__file__))
logger.info(pwd)
map_file = pwd + "/data/" + fldr + "/csv_map_yaml.yaml"
robot_file = pwd + "/data/" + fldr + "/csv_robots_yaml.yaml"
robot_file_tmp = pwd + "/data/tmp/robots.yaml"
start_time = time.time()
plans = run_CBS(map_file, robot_file, w=w) # if w > 1.0, run_CBS uses ECBS!
logger.info(" with sub-optimality w={}".format(w))
logger.info(" plan statistics: {} \n".format(plans["statistics"]))
logger.debug(plans["schedule"])
# show factory map
# show_factory_map(map_file, robot_file, True)
# plt.show()
map_gen_robot_count = 10
map_gen_seedval = "NaN"
try:
map_gen_robot_count = int(sys.argv[1])
map_gen_seedval = int(sys.argv[2])
H_control = int(sys.argv[3])
robust_param = int(sys.argv[4])
random.seed(map_gen_seedval) # map_gen_seedval
np.random.seed(map_gen_seedval) # map_gen_seedval
except:
print(" no valid inputs given, ignoring ...")
# determine ADG, reverse ADG and dependency groups
ADG, robot_plan, goal_positions = determine_ADG(plans, show_graph=False)
nodes_all, edges_type_1, dependency_groups = analyze_ADG(ADG, plans, show_graph=False)
ADG_reverse = ADG.reverse(copy=False)
# initialize simulation
robots = []
solve_time = []
robots_done = []
time_to_goal = {}
colors = plt.cm.rainbow( np.arange(len(robot_plan))/len(robot_plan) )
for robot_id in robot_plan:
plan = robot_plan[robot_id]
logger.debug("Robot {} - plan: {} \t \t positions: {}".format(robot_id, plan["nodes"], plan["positions"]))
new_robot = Robot(robot_id, plan, colors[robot_id], goal_positions[robot_id])
robots.append(new_robot)
robots_done.append(False)
time_to_goal[robot_id] = 0
if show_visual:
visualizer = Visualizer(map_file, robots)
# initialize optimization MIP object m_opt
m_opt = Model('MILP_sequence', solver='CBC')
# print(m_opt.max_nodes)
pl_opt = ProgressLog()
# pl_opt.settings = "objective_value"
# print("pl_opt.settings: {}".format(pl_opt.settings))
# print("pl_opt.log: {}".format(pl_opt.log))
# pl_opt.instance = m_opt.name
# print("pl_opt.instance: {}".format(pl_opt.instance))
ADG_fig = plt.figure(figsize=(12,8))
plt.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=0, hspace=0)
metadata = dict(title='Movie Test', artist='Matplotlib',
comment='Movie support!')
writer = FFMpegWriter(fps=2, metadata=metadata)
with writer.saving(ADG_fig, "ADG_video.mp4", 500):
# run a simulation in time
k = 0
robot_IDs_to_delay = []
while (not all(robots_done)) and (k < sim_timeout):
print("pl_opt.log: {}".format(pl_opt.log))
m_opt.clear()
# show current robot status
logger.info("-------------------- @ time step k = {} --------------------".format(k))
for robot in robots:
node_info = ADG.node[robot.current_node]["data"]
logger.debug(" - Robot {} # {} @ {} => status: {}".format(robot.robot_ID, node_info.ID, node_info.s_loc, robot.status))
# solve MILP for the advanced ADG to potentially adjust ordering
res, solve_t = solve_MILP(robots, dependency_groups, ADG, ADG_reverse, H_control, H_prediction, m_opt, pl_opt, run=run_MILP, uncertainty_bound=robust_param)
solve_time.append(solve_t)
if not (res is None or res == "OptimizationStatus.OPTIMAL"):
ValueError("Optimization NOT optimal")
# ADG after MILP
if show_ADG:
#
draw_ADG(ADG, robots, "ADG after MILP ADG | k = {}".format(k), writer=writer)
# plt.show()
# check for cycles
try:
nx.find_cycle(ADG, orientation="original")
logger.warning("Cycle detected!!")
raise Exception("ADG has a cycle => deadlock! something is wrong with optimization")
except nx.NetworkXNoCycle:
logger.debug("no cycle detected in ADG => no deadlock. good!")
pass
if (k % delay_amount) == 0:
robot_IDs = np.arange(map_gen_robot_count)
robot_IDs_to_delay = np.random.choice(map_gen_robot_count, size=delayed_robot_cnt, replace=False)
logger.info("delaying robots (ID): {}".format(robot_IDs_to_delay))
# Advance robots if possible (dependencies have been met)
for robot in robots:
# check if all dependencies have been met, to advance to next node
node_info = ADG.node[robot.current_node]["data"]
node_dependencies_list = list(ADG_reverse.neighbors(robot.current_node))
all_dependencies_completed = True
for dependency in node_dependencies_list:
if (ADG.node[dependency]["data"].status != Status.FINISHED):
all_dependencies_completed = False
# if all dependencies are completed, the robot can advance!
# delay_amount = np.random.poisson(mu) # same sample every time
if all_dependencies_completed and k > 0: # (robot.robot_ID == 2 or k > 5)
if (not (robot.robot_ID in robot_IDs_to_delay)): # or (k < 10 or k > 20)): # or (robot.robot_ID == 3 or k > 8):
ADG.node[robot.current_node]["data"].status = Status.FINISHED
robot.advance()
if not robot.is_done():
time_to_goal[robot.robot_ID] += 1
else:
robots_done[robot.robot_ID] = True
if show_visual:
visualizer.redraw(robots, pause_length=0.1)
# return 0
k += 1
# end of while loop
total_time = 0
for idx, t in time_to_goal.items():
total_time += t
logger.info("Total time to complete missions: {}".format(total_time))
logger.info("horizon = {}".format(H_control))
logger.info("")
logger.info("Computation time:")
logger.info(" - max: {}".format(max(solve_time)))
logger.info(" - avg: {}".format(stat.mean(solve_time)))
# create data to save to YAML file
simulation_results = {}
simulation_results["parameters"] = {}
simulation_results["parameters"]["H_control"] = H_control
simulation_results["parameters"]["random seed"] = random_seed
simulation_results["parameters"]["ECBS w"] = w
simulation_results["parameters"]["mu"] = mu
simulation_results["parameters"]["robust param"] = robust_param
simulation_results["parameters"]["delay amount"] = delay_amount
simulation_results["map details"] = {}
simulation_results["map details"]["robot_count"] = map_gen_robot_count
simulation_results["map details"]["seed val"] = map_gen_seedval
simulation_results["results"] = {}
simulation_results["results"]["comp time"] = {}
simulation_results["results"]["comp time"]["solve_time"] = [solve_time]
simulation_results["results"]["comp time"]["max"] = max(solve_time)
simulation_results["results"]["comp time"]["avg"] = stat.mean(solve_time)
simulation_results["results"]["total time"] = total_time
logger.info(simulation_results)
file_name = pwd + "/results/robust_" +str(delayed_robot_cnt) + "x" + str(delay_amount) + "/res_robots_" + str(map_gen_robot_count) + "_horizon_" + str(H_control) + "_mapseed_" + str(map_gen_seedval) + "_robustparam_" + str(robust_param) + ".yaml"
if save_file:
save_to_yaml(simulation_results, file_name)
if __name__ == "__main__":
main()
| [((426, 458), 'sys.path.insert', 'sys.path.insert', (['(1)', '"""functions/"""'], {}), "(1, 'functions/')\n", (441, 458), False, 'import sys\n'), ((640, 667), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (657, 667), False, 'import logging\n'), ((668, 762), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(name)s - %(levelname)s :: %(message)s"""', 'level': 'logging.INFO'}), "(format='%(name)s - %(levelname)s :: %(message)s', level\n =logging.INFO)\n", (687, 762), False, 'import logging\n'), ((1450, 1474), 'random.seed', 'random.seed', (['random_seed'], {}), '(random_seed)\n', (1461, 1474), False, 'import random\n'), ((1876, 1887), 'time.time', 'time.time', ([], {}), '()\n', (1885, 1887), False, 'import time\n'), ((3585, 3621), 'mip.Model', 'Model', (['"""MILP_sequence"""'], {'solver': '"""CBC"""'}), "('MILP_sequence', solver='CBC')\n", (3590, 3621), False, 'from mip import Model, ProgressLog, xsum, maximize, minimize, BINARY, CONTINUOUS, Constr, ConstrList\n'), ((3664, 3677), 'mip.ProgressLog', 'ProgressLog', ([], {}), '()\n', (3675, 3677), False, 'from mip import Model, ProgressLog, xsum, maximize, minimize, BINARY, CONTINUOUS, Constr, ConstrList\n'), ((3937, 3964), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 8)'}), '(figsize=(12, 8))\n', (3947, 3964), True, 'import matplotlib.pyplot as plt\n'), ((3968, 4041), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'left': '(0)', 'bottom': '(0)', 'right': '(1)', 'top': '(1)', 'wspace': '(0)', 'hspace': '(0)'}), '(left=0, bottom=0, right=1, top=1, wspace=0, hspace=0)\n', (3987, 4041), True, 'import matplotlib.pyplot as plt\n'), ((8921, 8942), 'statistics.mean', 'stat.mean', (['solve_time'], {}), '(solve_time)\n', (8930, 8942), True, 'import statistics as stat\n'), ((1635, 1660), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1650, 1660), False, 'import os\n'), ((2464, 2492), 'random.seed', 'random.seed', (['map_gen_seedval'], {}), '(map_gen_seedval)\n', (2475, 2492), False, 'import random\n'), ((7943, 7964), 'statistics.mean', 'stat.mean', (['solve_time'], {}), '(solve_time)\n', (7952, 7964), True, 'import statistics as stat\n'), ((5522, 5564), 'networkx.find_cycle', 'nx.find_cycle', (['ADG'], {'orientation': '"""original"""'}), "(ADG, orientation='original')\n", (5535, 5564), True, 'import networkx as nx\n')] |
bcurnow/rfid-security-svc | tests/model/test_guest.py | d3806cb74d3d0cc2623ea425230dc8781ba4d8b4 | import pytest
from unittest.mock import patch
import rfidsecuritysvc.model.guest as model
from rfidsecuritysvc.model.color import Color
from rfidsecuritysvc.model.guest import Guest
from rfidsecuritysvc.model.sound import Sound
from rfidsecuritysvc.exception import SoundNotFoundError
def test_Guest(assert_model, default_sound, default_color):
assert_model(_model(1, 'first', 'last', default_sound, default_color),
Guest(1, 'first', 'last', default_sound, default_color))
@patch('rfidsecuritysvc.model.guest.table')
def test_get(table):
table.get.return_value = _default().test_to_row()
assert model.get(1) == _default()
table.get.assert_called_once_with(1)
@patch('rfidsecuritysvc.model.guest.table')
def test_get_notfound(table):
table.get.return_value = None
assert model.get(1) is None
table.get.assert_called_once_with(1)
@patch('rfidsecuritysvc.model.guest.table')
def test_list(table):
table.list.return_value = [
_default().test_to_row(),
_default(2).test_to_row(),
]
models = model.list()
table.list.assert_called_once()
assert models == [_default(), _default(2)]
@patch('rfidsecuritysvc.model.guest.table')
def test_list_noresults(table):
table.list.return_value = []
models = model.list()
table.list.assert_called_once()
assert models == []
@patch('rfidsecuritysvc.model.guest.sound_model')
@patch('rfidsecuritysvc.model.guest.table')
def test_create(table, sound, default_sound):
sound.get.return_value = default_sound
table.create.return_value = None
assert model.create('first', 'last', default_sound.id, 0xABCDEF) is None
sound.get.assert_called_once_with(default_sound.id)
table.create.assert_called_once_with('first', 'last', default_sound.id, 0xABCDEF)
@patch('rfidsecuritysvc.model.guest.sound_model')
@patch('rfidsecuritysvc.model.guest.table')
def test_create_SoundNotFoundError(table, sound, default_sound):
sound.get.return_value = None
with pytest.raises(SoundNotFoundError):
model.create('first', 'last', default_sound.id, 0xABCDEF)
sound.get.assert_called_once_with(default_sound.id)
table.create.assert_not_called()
@patch('rfidsecuritysvc.model.guest.sound_model')
@patch('rfidsecuritysvc.model.guest.table')
def test_create_no_prefs(table, sound, default_sound):
table.create.return_value = None
assert model.create('first', 'last', None, None) is None
sound.get.assert_not_called()
table.create.assert_called_once_with('first', 'last', None, None)
@patch('rfidsecuritysvc.model.guest.table')
def test_delete(table):
table.delete.return_value = 1
assert model.delete(1) == 1
table.delete.assert_called_with(1)
@patch('rfidsecuritysvc.model.guest.sound_model')
@patch('rfidsecuritysvc.model.guest.table')
def test_update(table, sound, default_sound):
sound.get.return_value = default_sound
table.update.return_value = 1
assert model.update(1, 'first', 'last', default_sound.id, 0xABCDEF) == 1
sound.get.assert_called_once_with(default_sound.id)
table.update.assert_called_once_with(1, 'first', 'last', default_sound.id, 0xABCDEF)
@patch('rfidsecuritysvc.model.guest.sound_model')
@patch('rfidsecuritysvc.model.guest.table')
def test_update_no_prefs(table, sound, default_sound):
table.update.return_value = 1
assert model.update(1, 'first', 'last', None, None) == 1
sound.get.assert_not_called()
table.update.assert_called_once_with(1, 'first', 'last', None, None)
@patch('rfidsecuritysvc.model.guest.sound_model')
@patch('rfidsecuritysvc.model.guest.table')
def test_update_SoundNotFoundError(table, sound, default_sound):
table.update.return_value = 1
sound.get.return_value = None
with pytest.raises(SoundNotFoundError):
model.update(1, 'first', 'last', default_sound.id, 0xABCDEF)
sound.get.assert_called_once_with(default_sound.id)
table.update.assert_not_called()
def test__model_no_color(creatable_guest):
row = creatable_guest.test_to_row()
row['color'] = None
g = model.__model(row)
assert g.color is None
def test__model_no_sound(creatable_guest):
row = creatable_guest.test_to_row()
row['sound'] = None
g = model.__model(row)
assert g.sound is None
def _default(index=1):
return _model(index, f'first {index}', f'last {index}', Sound(index, f'sound_name {index}', '2021-09-25 23:13:25'), Color(0xABCDEF))
def _model(id, first_name, last_name, sound, color):
return Guest(id, first_name, last_name, sound, color)
| [((500, 542), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.table"""'], {}), "('rfidsecuritysvc.model.guest.table')\n", (505, 542), False, 'from unittest.mock import patch\n'), ((700, 742), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.table"""'], {}), "('rfidsecuritysvc.model.guest.table')\n", (705, 742), False, 'from unittest.mock import patch\n'), ((883, 925), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.table"""'], {}), "('rfidsecuritysvc.model.guest.table')\n", (888, 925), False, 'from unittest.mock import patch\n'), ((1167, 1209), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.table"""'], {}), "('rfidsecuritysvc.model.guest.table')\n", (1172, 1209), False, 'from unittest.mock import patch\n'), ((1364, 1412), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.sound_model"""'], {}), "('rfidsecuritysvc.model.guest.sound_model')\n", (1369, 1412), False, 'from unittest.mock import patch\n'), ((1414, 1456), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.table"""'], {}), "('rfidsecuritysvc.model.guest.table')\n", (1419, 1456), False, 'from unittest.mock import patch\n'), ((1805, 1853), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.sound_model"""'], {}), "('rfidsecuritysvc.model.guest.sound_model')\n", (1810, 1853), False, 'from unittest.mock import patch\n'), ((1855, 1897), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.table"""'], {}), "('rfidsecuritysvc.model.guest.table')\n", (1860, 1897), False, 'from unittest.mock import patch\n'), ((2203, 2251), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.sound_model"""'], {}), "('rfidsecuritysvc.model.guest.sound_model')\n", (2208, 2251), False, 'from unittest.mock import patch\n'), ((2253, 2295), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.table"""'], {}), "('rfidsecuritysvc.model.guest.table')\n", (2258, 2295), False, 'from unittest.mock import patch\n'), ((2556, 2598), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.table"""'], {}), "('rfidsecuritysvc.model.guest.table')\n", (2561, 2598), False, 'from unittest.mock import patch\n'), ((2731, 2779), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.sound_model"""'], {}), "('rfidsecuritysvc.model.guest.sound_model')\n", (2736, 2779), False, 'from unittest.mock import patch\n'), ((2781, 2823), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.table"""'], {}), "('rfidsecuritysvc.model.guest.table')\n", (2786, 2823), False, 'from unittest.mock import patch\n'), ((3172, 3220), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.sound_model"""'], {}), "('rfidsecuritysvc.model.guest.sound_model')\n", (3177, 3220), False, 'from unittest.mock import patch\n'), ((3222, 3264), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.table"""'], {}), "('rfidsecuritysvc.model.guest.table')\n", (3227, 3264), False, 'from unittest.mock import patch\n'), ((3525, 3573), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.sound_model"""'], {}), "('rfidsecuritysvc.model.guest.sound_model')\n", (3530, 3573), False, 'from unittest.mock import patch\n'), ((3575, 3617), 'unittest.mock.patch', 'patch', (['"""rfidsecuritysvc.model.guest.table"""'], {}), "('rfidsecuritysvc.model.guest.table')\n", (3580, 3617), False, 'from unittest.mock import patch\n'), ((1068, 1080), 'rfidsecuritysvc.model.guest.list', 'model.list', ([], {}), '()\n', (1078, 1080), True, 'import rfidsecuritysvc.model.guest as model\n'), ((1288, 1300), 'rfidsecuritysvc.model.guest.list', 'model.list', ([], {}), '()\n', (1298, 1300), True, 'import rfidsecuritysvc.model.guest as model\n'), ((4074, 4092), 'rfidsecuritysvc.model.guest.__model', 'model.__model', (['row'], {}), '(row)\n', (4087, 4092), True, 'import rfidsecuritysvc.model.guest as model\n'), ((4237, 4255), 'rfidsecuritysvc.model.guest.__model', 'model.__model', (['row'], {}), '(row)\n', (4250, 4255), True, 'import rfidsecuritysvc.model.guest as model\n'), ((4511, 4557), 'rfidsecuritysvc.model.guest.Guest', 'Guest', (['id', 'first_name', 'last_name', 'sound', 'color'], {}), '(id, first_name, last_name, sound, color)\n', (4516, 4557), False, 'from rfidsecuritysvc.model.guest import Guest\n'), ((440, 495), 'rfidsecuritysvc.model.guest.Guest', 'Guest', (['(1)', '"""first"""', '"""last"""', 'default_sound', 'default_color'], {}), "(1, 'first', 'last', default_sound, default_color)\n", (445, 495), False, 'from rfidsecuritysvc.model.guest import Guest\n'), ((629, 641), 'rfidsecuritysvc.model.guest.get', 'model.get', (['(1)'], {}), '(1)\n', (638, 641), True, 'import rfidsecuritysvc.model.guest as model\n'), ((818, 830), 'rfidsecuritysvc.model.guest.get', 'model.get', (['(1)'], {}), '(1)\n', (827, 830), True, 'import rfidsecuritysvc.model.guest as model\n'), ((1594, 1651), 'rfidsecuritysvc.model.guest.create', 'model.create', (['"""first"""', '"""last"""', 'default_sound.id', '(11259375)'], {}), "('first', 'last', default_sound.id, 11259375)\n", (1606, 1651), True, 'import rfidsecuritysvc.model.guest as model\n'), ((2006, 2039), 'pytest.raises', 'pytest.raises', (['SoundNotFoundError'], {}), '(SoundNotFoundError)\n', (2019, 2039), False, 'import pytest\n'), ((2049, 2106), 'rfidsecuritysvc.model.guest.create', 'model.create', (['"""first"""', '"""last"""', 'default_sound.id', '(11259375)'], {}), "('first', 'last', default_sound.id, 11259375)\n", (2061, 2106), True, 'import rfidsecuritysvc.model.guest as model\n'), ((2399, 2440), 'rfidsecuritysvc.model.guest.create', 'model.create', (['"""first"""', '"""last"""', 'None', 'None'], {}), "('first', 'last', None, None)\n", (2411, 2440), True, 'import rfidsecuritysvc.model.guest as model\n'), ((2668, 2683), 'rfidsecuritysvc.model.guest.delete', 'model.delete', (['(1)'], {}), '(1)\n', (2680, 2683), True, 'import rfidsecuritysvc.model.guest as model\n'), ((2958, 3018), 'rfidsecuritysvc.model.guest.update', 'model.update', (['(1)', '"""first"""', '"""last"""', 'default_sound.id', '(11259375)'], {}), "(1, 'first', 'last', default_sound.id, 11259375)\n", (2970, 3018), True, 'import rfidsecuritysvc.model.guest as model\n'), ((3365, 3409), 'rfidsecuritysvc.model.guest.update', 'model.update', (['(1)', '"""first"""', '"""last"""', 'None', 'None'], {}), "(1, 'first', 'last', None, None)\n", (3377, 3409), True, 'import rfidsecuritysvc.model.guest as model\n'), ((3760, 3793), 'pytest.raises', 'pytest.raises', (['SoundNotFoundError'], {}), '(SoundNotFoundError)\n', (3773, 3793), False, 'import pytest\n'), ((3803, 3863), 'rfidsecuritysvc.model.guest.update', 'model.update', (['(1)', '"""first"""', '"""last"""', 'default_sound.id', '(11259375)'], {}), "(1, 'first', 'last', default_sound.id, 11259375)\n", (3815, 3863), True, 'import rfidsecuritysvc.model.guest as model\n'), ((4368, 4426), 'rfidsecuritysvc.model.sound.Sound', 'Sound', (['index', 'f"""sound_name {index}"""', '"""2021-09-25 23:13:25"""'], {}), "(index, f'sound_name {index}', '2021-09-25 23:13:25')\n", (4373, 4426), False, 'from rfidsecuritysvc.model.sound import Sound\n'), ((4428, 4443), 'rfidsecuritysvc.model.color.Color', 'Color', (['(11259375)'], {}), '(11259375)\n', (4433, 4443), False, 'from rfidsecuritysvc.model.color import Color\n')] |
reavessm/Ice | ice/consoles.py | e78d046abfd6006b1a81d1cbdb516b7c3e141ac9 | # encoding: utf-8
import os
import roms
def console_roms_directory(configuration, console):
"""
If the user has specified a custom ROMs directory in consoles.txt then
return that.
Otherwise, append the shortname of the console to the default ROMs
directory given by config.txt.
"""
if console.custom_roms_directory:
return console.custom_roms_directory
return os.path.join(roms.roms_directory(configuration), console.shortname)
def path_is_rom(console, path):
"""
This function determines if a given path is actually a valid ROM file.
If a list of extensions is supplied for this console, we check if the path has a valid extension
If no extensions are defined for this console, we just accept any file
"""
if console.extensions == "":
return True
# Normalize the extension based on the things we validly ignore.
# Aka capitalization, whitespace, and leading dots
normalize = lambda ext: ext.lower().strip().lstrip('.')
(name, ext) = os.path.splitext(path)
valid_extensions = console.extensions.split(',')
return normalize(ext) in map(normalize, valid_extensions)
| [((985, 1007), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (1001, 1007), False, 'import os\n'), ((397, 431), 'roms.roms_directory', 'roms.roms_directory', (['configuration'], {}), '(configuration)\n', (416, 431), False, 'import roms\n')] |
dumpmemory/roformer-v2 | clue/c3.py | 95b71ae03b8bb910998285e194d7752b1e4104c0 | #! -*- coding:utf-8 -*-
# CLUE评测
# c3多项选择阅读理解
# 思路:每个选项分别与问题、篇章拼接后打分排序
import json
import numpy as np
from snippets import *
from bert4keras.backend import keras
from bert4keras.snippets import sequence_padding, DataGenerator
from bert4keras.snippets import open
from bert4keras.snippets import truncate_sequences
from tqdm import tqdm
# 基本参数
num_classes = 4
maxlen = 512
batch_size = 4
epochs = 10
def load_data(filename):
"""加载数据
格式:[(篇章, 问题, 选项, 答案id)]
"""
D = []
with open(filename) as f:
data = json.load(f)
for d in data:
p = u'||'.join(d[0])
for qa in d[1]:
q = qa['question']
while len(qa['choice']) < num_classes:
qa['choice'].append(u'无效答案')
c = qa['choice'][:num_classes]
if 'answer' in qa:
a = qa['choice'].index(qa['answer'])
else:
a = 0
D.append((p, q, c, a))
return D
# 加载数据集
train_data = load_data(data_path + 'c3/m-train.json')
train_data += load_data(data_path + 'c3/d-train.json')
valid_data = load_data(data_path + 'c3/m-dev.json')
valid_data += load_data(data_path + 'c3/d-dev.json')
class data_generator(DataGenerator):
"""数据生成器
"""
def __iter__(self, random=False):
batch_token_ids, batch_segment_ids, batch_labels = [], [], []
for is_end, (p, q, cs, a) in self.sample(random):
for c in cs:
p_ids = tokenizer.encode(p)[0]
q_ids = tokenizer.encode(q)[0][1:]
c_ids = tokenizer.encode(c)[0][1:]
truncate_sequences(maxlen, -2, c_ids, q_ids, p_ids)
token_ids = p_ids + q_ids + c_ids
batch_token_ids.append(token_ids)
batch_segment_ids.append([0] * len(token_ids))
batch_labels.append([a])
if len(batch_token_ids) == self.batch_size * num_classes or is_end:
batch_token_ids = sequence_padding(batch_token_ids)
batch_segment_ids = sequence_padding(batch_segment_ids)
batch_labels = sequence_padding(batch_labels)
yield [batch_token_ids, batch_segment_ids], batch_labels
batch_token_ids, batch_segment_ids, batch_labels = [], [], []
# 转换数据集
train_generator = data_generator(train_data, batch_size)
valid_generator = data_generator(valid_data, batch_size)
def multichoice_crossentropy(y_true, y_pred):
"""多项选择的交叉熵
"""
y_true = K.cast(y_true, 'int32')[::num_classes]
y_pred = K.reshape(y_pred, (-1, num_classes))
return K.mean(
K.sparse_categorical_crossentropy(y_true, y_pred, from_logits=True)
)
def multichoice_accuracy(y_true, y_pred):
"""多项选择的准确率
"""
y_true = K.cast(y_true, 'int32')[::num_classes, 0]
y_pred = K.reshape(y_pred, (-1, num_classes))
y_pred = K.cast(K.argmax(y_pred, axis=1), 'int32')
return K.mean(K.cast(K.equal(y_true, y_pred), K.floatx()))
# 构建模型
output = base.model.output
output = keras.layers.Lambda(lambda x: x[:, 0])(output)
output = keras.layers.Dense(units=1,
kernel_initializer=base.initializer)(output)
model = keras.models.Model(base.model.input, output)
model.summary()
model.compile(
loss=multichoice_crossentropy,
optimizer=optimizer4,
metrics=[multichoice_accuracy]
)
class Evaluator(keras.callbacks.Callback):
"""保存验证集acc最好的模型
"""
def __init__(self):
self.best_val_acc = 0.
def on_epoch_end(self, epoch, logs=None):
val_acc = self.evaluate(valid_generator)
if val_acc > self.best_val_acc:
self.best_val_acc = val_acc
model.save_weights('weights/c3.weights')
print(
u'val_acc: %.5f, best_val_acc: %.5f\n' %
(val_acc, self.best_val_acc)
)
def evaluate(self, data):
total, right = 0., 0.
for x_true, y_true in data:
y_pred = model.predict(x_true).reshape((-1, num_classes))
y_pred = y_pred.argmax(axis=1)
y_true = y_true[::num_classes, 0]
total += len(y_true)
right += (y_true == y_pred).sum()
return right / total
def test_predict(in_file, out_file):
"""输出测试结果到文件
结果文件可以提交到 https://www.cluebenchmarks.com 评测。
"""
test_data = load_data(in_file)
test_generator = data_generator(test_data, batch_size)
results = []
for x_true, _ in tqdm(test_generator, ncols=0):
y_pred = model.predict(x_true).reshape((-1, num_classes))
y_pred = y_pred.argmax(axis=1)
results.extend(y_pred)
fw = open(out_file, 'w')
with open(in_file) as fr:
data = json.load(fr)
i = 0
for d in data:
for qa in d[1]:
l = json.dumps({'id': str(qa['id']), 'label': str(results[i])})
fw.write(l + '\n')
i += 1
fw.close()
if __name__ == '__main__':
evaluator = Evaluator()
model.fit_generator(
train_generator.forfit(),
steps_per_epoch=len(train_generator),
epochs=epochs,
callbacks=[evaluator]
)
model.load_weights('weights/c3.weights')
test_predict(
in_file=data_path + 'c3/test1.0.json',
out_file='results/c310_predict.json'
)
test_predict(
in_file=data_path + 'c3/test1.1.json',
out_file='results/c311_predict.json'
)
else:
model.load_weights('weights/c3.weights')
| [((3237, 3281), 'bert4keras.backend.keras.models.Model', 'keras.models.Model', (['base.model.input', 'output'], {}), '(base.model.input, output)\n', (3255, 3281), False, 'from bert4keras.backend import keras\n'), ((3071, 3111), 'bert4keras.backend.keras.layers.Lambda', 'keras.layers.Lambda', (['(lambda x: x[:, (0)])'], {}), '(lambda x: x[:, (0)])\n', (3090, 3111), False, 'from bert4keras.backend import keras\n'), ((3127, 3191), 'bert4keras.backend.keras.layers.Dense', 'keras.layers.Dense', ([], {'units': '(1)', 'kernel_initializer': 'base.initializer'}), '(units=1, kernel_initializer=base.initializer)\n', (3145, 3191), False, 'from bert4keras.backend import keras\n'), ((4499, 4528), 'tqdm.tqdm', 'tqdm', (['test_generator'], {'ncols': '(0)'}), '(test_generator, ncols=0)\n', (4503, 4528), False, 'from tqdm import tqdm\n'), ((4676, 4695), 'bert4keras.snippets.open', 'open', (['out_file', '"""w"""'], {}), "(out_file, 'w')\n", (4680, 4695), False, 'from bert4keras.snippets import open\n'), ((496, 510), 'bert4keras.snippets.open', 'open', (['filename'], {}), '(filename)\n', (500, 510), False, 'from bert4keras.snippets import open\n'), ((532, 544), 'json.load', 'json.load', (['f'], {}), '(f)\n', (541, 544), False, 'import json\n'), ((4705, 4718), 'bert4keras.snippets.open', 'open', (['in_file'], {}), '(in_file)\n', (4709, 4718), False, 'from bert4keras.snippets import open\n'), ((4741, 4754), 'json.load', 'json.load', (['fr'], {}), '(fr)\n', (4750, 4754), False, 'import json\n'), ((1647, 1698), 'bert4keras.snippets.truncate_sequences', 'truncate_sequences', (['maxlen', '(-2)', 'c_ids', 'q_ids', 'p_ids'], {}), '(maxlen, -2, c_ids, q_ids, p_ids)\n', (1665, 1698), False, 'from bert4keras.snippets import truncate_sequences\n'), ((2017, 2050), 'bert4keras.snippets.sequence_padding', 'sequence_padding', (['batch_token_ids'], {}), '(batch_token_ids)\n', (2033, 2050), False, 'from bert4keras.snippets import sequence_padding, DataGenerator\n'), ((2087, 2122), 'bert4keras.snippets.sequence_padding', 'sequence_padding', (['batch_segment_ids'], {}), '(batch_segment_ids)\n', (2103, 2122), False, 'from bert4keras.snippets import sequence_padding, DataGenerator\n'), ((2154, 2184), 'bert4keras.snippets.sequence_padding', 'sequence_padding', (['batch_labels'], {}), '(batch_labels)\n', (2170, 2184), False, 'from bert4keras.snippets import sequence_padding, DataGenerator\n')] |
zipated/src | tools/perf/contrib/oop_raster/oop_raster.py | 2b8388091c71e442910a21ada3d97ae8bc1845d3 | # Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from benchmarks import smoothness,thread_times
import page_sets
from telemetry import benchmark
# pylint: disable=protected-access
def CustomizeBrowserOptionsForOopRasterization(options):
"""Enables flags needed for out of process rasterization."""
options.AppendExtraBrowserArgs('--force-gpu-rasterization')
options.AppendExtraBrowserArgs('--enable-oop-rasterization')
@benchmark.Owner(emails=['[email protected]'])
class SmoothnessOopRasterizationTop25(smoothness._Smoothness):
"""Measures rendering statistics for the top 25 with oop rasterization.
"""
tag = 'oop_rasterization'
page_set = page_sets.Top25SmoothPageSet
def SetExtraBrowserOptions(self, options):
CustomizeBrowserOptionsForOopRasterization(options)
@classmethod
def Name(cls):
return 'smoothness.oop_rasterization.top_25_smooth'
@benchmark.Owner(emails=['[email protected]'])
class ThreadTimesOopRasterKeyMobile(thread_times._ThreadTimes):
"""Measure timeline metrics for key mobile pages while using out of process
raster."""
tag = 'oop_rasterization'
page_set = page_sets.KeyMobileSitesSmoothPageSet
options = {'story_tag_filter': 'fastpath'}
def SetExtraBrowserOptions(self, options):
super(ThreadTimesOopRasterKeyMobile, self).SetExtraBrowserOptions(options)
CustomizeBrowserOptionsForOopRasterization(options)
@classmethod
def Name(cls):
return 'thread_times.oop_rasterization.key_mobile'
| [((543, 588), 'telemetry.benchmark.Owner', 'benchmark.Owner', ([], {'emails': "['[email protected]']"}), "(emails=['[email protected]'])\n", (558, 588), False, 'from telemetry import benchmark\n'), ((996, 1041), 'telemetry.benchmark.Owner', 'benchmark.Owner', ([], {'emails': "['[email protected]']"}), "(emails=['[email protected]'])\n", (1011, 1041), False, 'from telemetry import benchmark\n')] |
mrocklin/logpy | logpy/util.py | 7e32f4da10a0ab5b86fb23947cfce9a4d49c6b3f | import itertools as it
from toolz.compatibility import range, map, iteritems
def hashable(x):
try:
hash(x)
return True
except TypeError:
return False
def transitive_get(key, d):
""" Transitive dict.get
>>> from logpy.util import transitive_get
>>> d = {1: 2, 2: 3, 3: 4}
>>> d.get(1)
2
>>> transitive_get(1, d)
4
"""
while hashable(key) and key in d:
key = d[key]
return key
def deep_transitive_get(key, d):
""" Transitive get that propagates within tuples
>>> from logpy.util import transitive_get, deep_transitive_get
>>> d = {1: (2, 3), 2: 12, 3: 13}
>>> transitive_get(1, d)
(2, 3)
>>> deep_transitive_get(1, d)
(12, 13)
"""
key = transitive_get(key, d)
if isinstance(key, tuple):
return tuple(map(lambda k: deep_transitive_get(k, d), key))
else:
return key
def dicthash(d):
return hash(frozenset(d.items()))
def multihash(x):
try:
return hash(x)
except TypeError:
if isinstance(x, (list, tuple, set, frozenset)):
return hash(tuple(map(multihash, x)))
if type(x) is dict:
return hash(frozenset(map(multihash, x.items())))
if type(x) is slice:
return hash((x.start, x.stop, x.step))
raise TypeError('Hashing not covered for ' + str(x))
def unique(seq, key=lambda x: x):
seen = set()
for item in seq:
try:
if key(item) not in seen:
seen.add(key(item))
yield item
except TypeError: # item probably isn't hashable
yield item # Just return it and hope for the best
def interleave(seqs, pass_exceptions=()):
iters = map(iter, seqs)
while iters:
newiters = []
for itr in iters:
try:
yield next(itr)
newiters.append(itr)
except (StopIteration,) + tuple(pass_exceptions):
pass
iters = newiters
def take(n, seq):
if n is None:
return seq
if n == 0:
return tuple(seq)
return tuple(it.islice(seq, 0, n))
def evalt(t):
""" Evaluate tuple if unevaluated
>>> from logpy.util import evalt
>>> add = lambda x, y: x + y
>>> evalt((add, 2, 3))
5
>>> evalt(add(2, 3))
5
"""
if isinstance(t, tuple) and len(t) >= 1 and callable(t[0]):
return t[0](*t[1:])
else:
return t
def intersection(*seqs):
return (item for item in seqs[0]
if all(item in seq for seq in seqs[1:]))
def groupsizes(total, len):
""" Groups of length len that add up to total
>>> from logpy.util import groupsizes
>>> tuple(groupsizes(4, 2))
((1, 3), (2, 2), (3, 1))
"""
if len == 1:
yield (total,)
else:
for i in range(1, total - len + 1 + 1):
for perm in groupsizes(total - i, len - 1):
yield (i,) + perm
def raises(err, lamda):
try:
lamda()
raise Exception("Did not raise %s"%err)
except err:
pass
def pprint(g):
""" Pretty print a tree of goals """
if callable(g) and hasattr(g, '__name__'):
return g.__name__
if isinstance(g, type):
return g.__name__
if isinstance(g, tuple):
return "(" + ', '.join(map(pprint, g)) + ")"
return str(g)
def index(tup, ind):
""" Fancy indexing with tuples """
return tuple(tup[i] for i in ind)
| [((1743, 1758), 'toolz.compatibility.map', 'map', (['iter', 'seqs'], {}), '(iter, seqs)\n', (1746, 1758), False, 'from toolz.compatibility import range, map, iteritems\n'), ((2132, 2152), 'itertools.islice', 'it.islice', (['seq', '(0)', 'n'], {}), '(seq, 0, n)\n', (2141, 2152), True, 'import itertools as it\n'), ((2850, 2879), 'toolz.compatibility.range', 'range', (['(1)', '(total - len + 1 + 1)'], {}), '(1, total - len + 1 + 1)\n', (2855, 2879), False, 'from toolz.compatibility import range, map, iteritems\n'), ((3342, 3356), 'toolz.compatibility.map', 'map', (['pprint', 'g'], {}), '(pprint, g)\n', (3345, 3356), False, 'from toolz.compatibility import range, map, iteritems\n'), ((1124, 1141), 'toolz.compatibility.map', 'map', (['multihash', 'x'], {}), '(multihash, x)\n', (1127, 1141), False, 'from toolz.compatibility import range, map, iteritems\n')] |
rinocloud/rinobot-plugin-shift | index.py | 4f7f16a5e610b91b64377733d24b6ab4b63daa67 | import rinobot_plugin as bot
import numpy as np
def main():
# lets get our parameters and data
filepath = bot.filepath()
data = bot.loadfile(filepath)
# now comes the custom plugin logic
shift = bot.get_arg('shift', type=float, required=True)
index = bot.index_from_args(data)
data[index] = data[index] + shift
outname = bot.no_extension() + '-shift-%s.txt' % shift
# then we set up the output
outpath = bot.output_filepath(outname)
np.savetxt(outpath, data)
if __name__ == "__main__":
main()
| [((115, 129), 'rinobot_plugin.filepath', 'bot.filepath', ([], {}), '()\n', (127, 129), True, 'import rinobot_plugin as bot\n'), ((141, 163), 'rinobot_plugin.loadfile', 'bot.loadfile', (['filepath'], {}), '(filepath)\n', (153, 163), True, 'import rinobot_plugin as bot\n'), ((217, 264), 'rinobot_plugin.get_arg', 'bot.get_arg', (['"""shift"""'], {'type': 'float', 'required': '(True)'}), "('shift', type=float, required=True)\n", (228, 264), True, 'import rinobot_plugin as bot\n'), ((277, 302), 'rinobot_plugin.index_from_args', 'bot.index_from_args', (['data'], {}), '(data)\n', (296, 302), True, 'import rinobot_plugin as bot\n'), ((447, 475), 'rinobot_plugin.output_filepath', 'bot.output_filepath', (['outname'], {}), '(outname)\n', (466, 475), True, 'import rinobot_plugin as bot\n'), ((480, 505), 'numpy.savetxt', 'np.savetxt', (['outpath', 'data'], {}), '(outpath, data)\n', (490, 505), True, 'import numpy as np\n'), ((356, 374), 'rinobot_plugin.no_extension', 'bot.no_extension', ([], {}), '()\n', (372, 374), True, 'import rinobot_plugin as bot\n')] |
arsfeld/fog-web2py | gluon/contrib/memcache/__init__.py | 32263a03d4183dcaf7537c87edcb4e574d4bec6e | from gluon.contrib.memcache.memcache import Client
import time
"""
examle of usage:
cache.memcache=MemcacheClient(request,[127.0.0.1:11211],debug=true)
"""
import cPickle as pickle
import thread
locker = thread.allocate_lock()
def MemcacheClient(*a, **b):
locker.acquire()
if not hasattr(MemcacheClient, '__mc_instance'):
MemcacheClient.__mc_instance = _MemcacheClient(*a, **b)
locker.release()
return MemcacheClient.__mc_instance
class _MemcacheClient(Client):
def __init__(self, request, servers, debug=0, pickleProtocol=0,
pickler=pickle.Pickler, unpickler=pickle.Unpickler,
pload=None, pid=None):
self.request=request
Client.__init__(self,servers,debug,pickleProtocol,
pickler,unpickler,pload,pid)
def __call__(self,key,f,time_expire=300):
#key=self.__keyFormat__(key)
value=None
obj=self.get(key)
if obj:
value=obj
elif f is None:
if obj: self.delete(key)
else:
value=f()
self.set(key,value,time_expire)
return value
def increment(self,key,value=1,time_expire=300):
newKey=self.__keyFormat__(key)
obj=self.get(newKey)
if obj:
return Client.incr(self,newKey,value)
else:
self.set(newKey,value,time_expire)
return value
def set(self,key,value,time_expire=300):
newKey = self.__keyFormat__(key)
return Client.set(self,newKey,value,time_expire)
def get(self,key):
newKey = self.__keyFormat__(key)
return Client.get(self,newKey)
def delete(self,key):
newKey = self.__keyFormat__(key)
return Client.delete(self,newKey)
def __keyFormat__(self,key):
return '%s/%s' % (self.request.application,key.replace(' ','_'))
| [((208, 230), 'thread.allocate_lock', 'thread.allocate_lock', ([], {}), '()\n', (228, 230), False, 'import thread\n'), ((707, 796), 'gluon.contrib.memcache.memcache.Client.__init__', 'Client.__init__', (['self', 'servers', 'debug', 'pickleProtocol', 'pickler', 'unpickler', 'pload', 'pid'], {}), '(self, servers, debug, pickleProtocol, pickler, unpickler,\n pload, pid)\n', (722, 796), False, 'from gluon.contrib.memcache.memcache import Client\n'), ((1528, 1572), 'gluon.contrib.memcache.memcache.Client.set', 'Client.set', (['self', 'newKey', 'value', 'time_expire'], {}), '(self, newKey, value, time_expire)\n', (1538, 1572), False, 'from gluon.contrib.memcache.memcache import Client\n'), ((1654, 1678), 'gluon.contrib.memcache.memcache.Client.get', 'Client.get', (['self', 'newKey'], {}), '(self, newKey)\n', (1664, 1678), False, 'from gluon.contrib.memcache.memcache import Client\n'), ((1765, 1792), 'gluon.contrib.memcache.memcache.Client.delete', 'Client.delete', (['self', 'newKey'], {}), '(self, newKey)\n', (1778, 1792), False, 'from gluon.contrib.memcache.memcache import Client\n'), ((1305, 1337), 'gluon.contrib.memcache.memcache.Client.incr', 'Client.incr', (['self', 'newKey', 'value'], {}), '(self, newKey, value)\n', (1316, 1337), False, 'from gluon.contrib.memcache.memcache import Client\n')] |
aas-core-works/aas-core-csharp-codegen | test_data/parse/unexpected/symbol_table/inheritance_from_non_class/meta_model.py | 731f706e2d12bf80722ac55d920fcf5402fb26ef | class Some_enum(Enum):
some_literal = "some_literal"
class Something(Some_enum):
pass
class Reference:
pass
__book_url__ = "dummy"
__book_version__ = "dummy"
associate_ref_with(Reference)
| [] |
wannaphong/deepcut | deepcut/deepcut.py | e4f7779caa087c5ffbad3bc4e88f919e300d020c | #!/usr/bin/env python
# encoding: utf-8
import numbers
import os
import re
import sys
from itertools import chain
import numpy as np
import scipy.sparse as sp
import six
import pickle
from .model import get_convo_nn2
from .stop_words import THAI_STOP_WORDS
from .utils import CHAR_TYPES_MAP, CHARS_MAP, create_feature_array
MODULE_PATH = os.path.dirname(__file__)
WEIGHT_PATH = os.path.join(MODULE_PATH, 'weight', 'cnn_without_ne_ab.h5')
TOKENIZER = None
def tokenize(text, custom_dict=None):
"""
Tokenize given Thai text string
Input
=====
text: str, Thai text string
custom_dict: str (or list), path to customized dictionary file
It allows the function not to tokenize given dictionary wrongly.
The file should contain custom words separated by line.
Alternatively, you can provide list of custom words too.
Output
======
tokens: list, list of tokenized words
Example
=======
>> deepcut.tokenize('ตัดคำได้ดีมาก')
>> ['ตัดคำ','ได้','ดี','มาก']
"""
global TOKENIZER
if not TOKENIZER:
TOKENIZER = DeepcutTokenizer()
return TOKENIZER.tokenize(text, custom_dict=custom_dict)
def _custom_dict(word, text, word_end):
word_length = len(word)
initial_loc = 0
while True:
try:
start_char = re.search(word, text).start()
first_char = start_char + initial_loc
last_char = first_char + word_length - 1
initial_loc += start_char + word_length
text = text[start_char + word_length:]
word_end[first_char:last_char] = (word_length - 1) * [0]
word_end[last_char] = 1
except:
break
return word_end
def _document_frequency(X):
"""
Count the number of non-zero values for each feature in sparse X.
"""
if sp.isspmatrix_csr(X):
return np.bincount(X.indices, minlength=X.shape[1])
return np.diff(sp.csc_matrix(X, copy=False).indptr)
def _check_stop_list(stop):
"""
Check stop words list
ref: https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/feature_extraction/text.py#L87-L95
"""
if stop == "thai":
return THAI_STOP_WORDS
elif isinstance(stop, six.string_types):
raise ValueError("not a built-in stop list: %s" % stop)
elif stop is None:
return None
# assume it's a collection
return frozenset(stop)
def load_model(file_path):
"""
Load saved pickle file of DeepcutTokenizer
Parameters
==========
file_path: str, path to saved model from ``save_model`` method in DeepcutTokenizer
"""
tokenizer = pickle.load(open(file_path, 'rb'))
tokenizer.model = get_convo_nn2()
tokenizer.model = tokenizer.model.load_weights(WEIGHT_PATH)
return tokenizer
class DeepcutTokenizer(object):
"""
Class for tokenizing given Thai text documents using deepcut library
Parameters
==========
ngram_range : tuple, tuple for ngram range for vocabulary, (1, 1) for unigram
and (1, 2) for bigram
stop_words : list or set, list or set of stop words to be removed
if None, max_df can be set to value [0.7, 1.0) to automatically remove
vocabulary. If using "thai", this will use list of pre-populated stop words
max_features : int or None, if provided, only consider number of vocabulary
ordered by term frequencies
max_df : float in range [0.0, 1.0] or int, default=1.0
ignore terms that have a document frequency higher than the given threshold
min_df : float in range [0.0, 1.0] or int, default=1
ignore terms that have a document frequency lower than the given threshold
dtype : type, optional
Example
=======
raw_documents = ['ฉันอยากกินข้าวของฉัน',
'ฉันอยากกินไก่',
'อยากนอนอย่างสงบ']
tokenizer = DeepcutTokenizer(ngram_range=(1, 1))
X = tokenizer.fit_tranform(raw_documents) # document-term matrix in sparse CSR format
>> X.todense()
>> [[0, 0, 1, 0, 1, 0, 2, 1],
[0, 1, 1, 0, 1, 0, 1, 0],
[1, 0, 0, 1, 1, 1, 0, 0]]
>> tokenizer.vocabulary_
>> {'นอน': 0, 'ไก่': 1, 'กิน': 2, 'อย่าง': 3, 'อยาก': 4, 'สงบ': 5, 'ฉัน': 6, 'ข้าว': 7}
"""
def __init__(self, ngram_range=(1, 1), stop_words=None,
max_df=1.0, min_df=1, max_features=None, dtype=np.dtype('float64')):
self.model = get_convo_nn2()
self.model.load_weights(WEIGHT_PATH)
self.vocabulary_ = {}
self.ngram_range = ngram_range
self.dtype = dtype
self.max_df = max_df
self.min_df = min_df
if max_df < 0 or min_df < 0:
raise ValueError("negative value for max_df or min_df")
self.max_features = max_features
self.stop_words = _check_stop_list(stop_words)
def _word_ngrams(self, tokens):
"""
Turn tokens into a tokens of n-grams
ref: https://github.com/scikit-learn/scikit-learn/blob/ef5cb84a/sklearn/feature_extraction/text.py#L124-L153
"""
# handle stop words
if self.stop_words is not None:
tokens = [w for w in tokens if w not in self.stop_words]
# handle token n-grams
min_n, max_n = self.ngram_range
if max_n != 1:
original_tokens = tokens
if min_n == 1:
# no need to do any slicing for unigrams
# just iterate through the original tokens
tokens = list(original_tokens)
min_n += 1
else:
tokens = []
n_original_tokens = len(original_tokens)
# bind method outside of loop to reduce overhead
tokens_append = tokens.append
space_join = " ".join
for n in range(min_n,
min(max_n + 1, n_original_tokens + 1)):
for i in range(n_original_tokens - n + 1):
tokens_append(space_join(original_tokens[i: i + n]))
return tokens
def _limit_features(self, X, vocabulary,
high=None, low=None, limit=None):
"""Remove too rare or too common features.
ref: https://github.com/scikit-learn/scikit-learn/blob/ef5cb84a/sklearn/feature_extraction/text.py#L734-L773
"""
if high is None and low is None and limit is None:
return X, set()
# Calculate a mask based on document frequencies
dfs = _document_frequency(X)
mask = np.ones(len(dfs), dtype=bool)
if high is not None:
mask &= dfs <= high
if low is not None:
mask &= dfs >= low
if limit is not None and mask.sum() > limit:
tfs = np.asarray(X.sum(axis=0)).ravel()
mask_inds = (-tfs[mask]).argsort()[:limit]
new_mask = np.zeros(len(dfs), dtype=bool)
new_mask[np.where(mask)[0][mask_inds]] = True
mask = new_mask
new_indices = np.cumsum(mask) - 1 # maps old indices to new
removed_terms = set()
for term, old_index in list(vocabulary.items()):
if mask[old_index]:
vocabulary[term] = new_indices[old_index]
else:
del vocabulary[term]
removed_terms.add(term)
kept_indices = np.where(mask)[0]
if len(kept_indices) == 0:
raise ValueError("After pruning, no terms remain. Try a lower"
" min_df or a higher max_df.")
return X[:, kept_indices], removed_terms
def transform(self, raw_documents, new_document=False):
"""
raw_documents: list, list of new documents to be transformed
new_document: bool, if True, assume seeing documents and build a new self.vobabulary_,
if False, use the previous self.vocabulary_
"""
n_doc = len(raw_documents)
tokenized_documents = []
for doc in raw_documents:
tokens = tokenize(doc) # method in this file
tokens = self._word_ngrams(tokens)
tokenized_documents.append(tokens)
if new_document:
self.vocabulary_ = {v: k for k, v in enumerate(set(chain.from_iterable(tokenized_documents)))}
values, row_indices, col_indices = [], [], []
for r, tokens in enumerate(tokenized_documents):
tokens = self._word_ngrams(tokens)
feature = {}
for token in tokens:
word_index = self.vocabulary_.get(token)
if word_index is not None:
if word_index not in feature.keys():
feature[word_index] = 1
else:
feature[word_index] += 1
for c, v in feature.items():
values.append(v)
row_indices.append(r)
col_indices.append(c)
# document-term matrix in CSR format
X = sp.csr_matrix((values, (row_indices, col_indices)),
shape=(n_doc, len(self.vocabulary_)),
dtype=self.dtype)
# truncate vocabulary by max_df and min_df
if new_document:
max_df = self.max_df
min_df = self.min_df
max_doc_count = (max_df
if isinstance(max_df, numbers.Integral)
else max_df * n_doc)
min_doc_count = (min_df
if isinstance(min_df, numbers.Integral)
else min_df * n_doc)
if max_doc_count < min_doc_count:
raise ValueError(
"max_df corresponds to < documents than min_df")
X, _ = self._limit_features(X, self.vocabulary_,
max_doc_count,
min_doc_count,
self.max_features)
return X
def fit_tranform(self, raw_documents):
"""
Transform given list of raw_documents to document-term matrix in
sparse CSR format (see scipy)
"""
X = self.transform(raw_documents, new_document=True)
return X
def tokenize(self, text, custom_dict=None):
n_pad = 21
if not text:
return [''] # case of empty string
if isinstance(text, str) and sys.version_info.major == 2:
text = text.decode('utf-8')
x_char, x_type = create_feature_array(text, n_pad=n_pad)
word_end = []
# Fix thread-related issue in Keras + TensorFlow + Flask async environment
# ref: https://github.com/keras-team/keras/issues/2397
y_predict = self.model.predict([x_char, x_type])
c = [i[0] for i in y_predict.tolist()]
return list(zip(list(text),c))
def save_model(self, file_path):
"""
Save tokenizer to pickle format
"""
self.model = None # set model to None to successfully save the model
with open(file_path, 'wb') as f:
pickle.dump(self, f) | [((341, 366), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (356, 366), False, 'import os\n'), ((381, 440), 'os.path.join', 'os.path.join', (['MODULE_PATH', '"""weight"""', '"""cnn_without_ne_ab.h5"""'], {}), "(MODULE_PATH, 'weight', 'cnn_without_ne_ab.h5')\n", (393, 440), False, 'import os\n'), ((1845, 1865), 'scipy.sparse.isspmatrix_csr', 'sp.isspmatrix_csr', (['X'], {}), '(X)\n', (1862, 1865), True, 'import scipy.sparse as sp\n'), ((1882, 1926), 'numpy.bincount', 'np.bincount', (['X.indices'], {'minlength': 'X.shape[1]'}), '(X.indices, minlength=X.shape[1])\n', (1893, 1926), True, 'import numpy as np\n'), ((4399, 4418), 'numpy.dtype', 'np.dtype', (['"""float64"""'], {}), "('float64')\n", (4407, 4418), True, 'import numpy as np\n'), ((1946, 1974), 'scipy.sparse.csc_matrix', 'sp.csc_matrix', (['X'], {'copy': '(False)'}), '(X, copy=False)\n', (1959, 1974), True, 'import scipy.sparse as sp\n'), ((7020, 7035), 'numpy.cumsum', 'np.cumsum', (['mask'], {}), '(mask)\n', (7029, 7035), True, 'import numpy as np\n'), ((7362, 7376), 'numpy.where', 'np.where', (['mask'], {}), '(mask)\n', (7370, 7376), True, 'import numpy as np\n'), ((11115, 11135), 'pickle.dump', 'pickle.dump', (['self', 'f'], {}), '(self, f)\n', (11126, 11135), False, 'import pickle\n'), ((1326, 1347), 're.search', 're.search', (['word', 'text'], {}), '(word, text)\n', (1335, 1347), False, 'import re\n'), ((6932, 6946), 'numpy.where', 'np.where', (['mask'], {}), '(mask)\n', (6940, 6946), True, 'import numpy as np\n'), ((8247, 8287), 'itertools.chain.from_iterable', 'chain.from_iterable', (['tokenized_documents'], {}), '(tokenized_documents)\n', (8266, 8287), False, 'from itertools import chain\n')] |
djiajunustc/spconv | spconv/utils/__init__.py | 647927ce6b64dc51fbec4eb50c7194f8ca5007e5 | # Copyright 2021 Yan Yan
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from cumm import tensorview as tv
from contextlib import AbstractContextManager
from spconv.cppconstants import CPU_ONLY_BUILD
from spconv.core_cc.csrc.utils.boxops import BoxOps
from spconv.core_cc.csrc.sparse.all.ops_cpu1d import Point2VoxelCPU as Point2VoxelCPU1d
from spconv.core_cc.csrc.sparse.all.ops_cpu2d import Point2VoxelCPU as Point2VoxelCPU2d
from spconv.core_cc.csrc.sparse.all.ops_cpu3d import Point2VoxelCPU as Point2VoxelCPU3d
from spconv.core_cc.csrc.sparse.all.ops_cpu4d import Point2VoxelCPU as Point2VoxelCPU4d
if not CPU_ONLY_BUILD:
from spconv.core_cc.csrc.sparse.all.ops1d import Point2Voxel as Point2VoxelGPU1d
from spconv.core_cc.csrc.sparse.all.ops2d import Point2Voxel as Point2VoxelGPU2d
from spconv.core_cc.csrc.sparse.all.ops3d import Point2Voxel as Point2VoxelGPU3d
from spconv.core_cc.csrc.sparse.all.ops4d import Point2Voxel as Point2VoxelGPU4d
class nullcontext(AbstractContextManager):
"""Context manager that does no additional processing.
Used as a stand-in for a normal context manager, when a particular
block of code is only sometimes used with a normal context manager:
cm = optional_cm if condition else nullcontext()
with cm:
# Perform operation, using optional_cm if condition is True
"""
def __init__(self, enter_result=None):
self.enter_result = enter_result
def __enter__(self):
return self.enter_result
def __exit__(self, *excinfo):
pass
def rbbox_iou(box_corners: np.ndarray, qbox_corners: np.ndarray,
standup_iou: np.ndarray, standup_thresh: float):
if not BoxOps.has_boost():
raise NotImplementedError(
"this op require spconv built with boost, download boost, export BOOST_ROOT and rebuild."
)
N = box_corners.shape[0]
K = qbox_corners.shape[0]
overlap = np.zeros((N, K), dtype=box_corners.dtype)
BoxOps.rbbox_iou(tv.from_numpy(box_corners), tv.from_numpy(qbox_corners),
tv.from_numpy(standup_iou), tv.from_numpy(overlap),
standup_thresh, False)
return overlap
def rbbox_intersection(box_corners: np.ndarray, qbox_corners: np.ndarray,
standup_iou: np.ndarray, standup_thresh: float):
if not BoxOps.has_boost():
raise NotImplementedError(
"this op require spconv built with boost, download boost, export BOOST_ROOT and rebuild."
)
N = box_corners.shape[0]
K = qbox_corners.shape[0]
overlap = np.zeros((N, K), dtype=box_corners.dtype)
BoxOps.rbbox_iou(tv.from_numpy(box_corners), tv.from_numpy(qbox_corners),
tv.from_numpy(standup_iou), tv.from_numpy(overlap),
standup_thresh, True)
return overlap
def rbbox_iou_loss(box_corners: np.ndarray, qbox_corners: np.ndarray):
if not BoxOps.has_boost():
raise NotImplementedError(
"this op require spconv built with boost, download boost, export BOOST_ROOT and rebuild."
)
N = box_corners.shape[0]
overlap = np.zeros((N, ), dtype=box_corners.dtype)
BoxOps.rbbox_iou_aligned(tv.from_numpy(box_corners),
tv.from_numpy(qbox_corners),
tv.from_numpy(overlap), False)
return overlap
def non_max_suppression_cpu(boxes: np.ndarray,
order: np.ndarray,
thresh: float,
eps: float = 0.0):
return BoxOps.non_max_suppression_cpu(tv.from_numpy(boxes),
tv.from_numpy(order), thresh, eps)
def rotate_non_max_suppression_cpu(boxes: np.ndarray, order: np.ndarray,
standup_iou: np.ndarray, thresh: float):
if not BoxOps.has_boost():
raise NotImplementedError(
"this op require spconv built with boost, download boost, export BOOST_ROOT and rebuild."
)
return BoxOps.rotate_non_max_suppression_cpu(tv.from_numpy(boxes),
tv.from_numpy(order),
tv.from_numpy(standup_iou),
thresh)
| [((2450, 2491), 'numpy.zeros', 'np.zeros', (['(N, K)'], {'dtype': 'box_corners.dtype'}), '((N, K), dtype=box_corners.dtype)\n', (2458, 2491), True, 'import numpy as np\n'), ((3106, 3147), 'numpy.zeros', 'np.zeros', (['(N, K)'], {'dtype': 'box_corners.dtype'}), '((N, K), dtype=box_corners.dtype)\n', (3114, 3147), True, 'import numpy as np\n'), ((3656, 3695), 'numpy.zeros', 'np.zeros', (['(N,)'], {'dtype': 'box_corners.dtype'}), '((N,), dtype=box_corners.dtype)\n', (3664, 3695), True, 'import numpy as np\n'), ((2210, 2228), 'spconv.core_cc.csrc.utils.boxops.BoxOps.has_boost', 'BoxOps.has_boost', ([], {}), '()\n', (2226, 2228), False, 'from spconv.core_cc.csrc.utils.boxops import BoxOps\n'), ((2514, 2540), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['box_corners'], {}), '(box_corners)\n', (2527, 2540), True, 'from cumm import tensorview as tv\n'), ((2542, 2569), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['qbox_corners'], {}), '(qbox_corners)\n', (2555, 2569), True, 'from cumm import tensorview as tv\n'), ((2592, 2618), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['standup_iou'], {}), '(standup_iou)\n', (2605, 2618), True, 'from cumm import tensorview as tv\n'), ((2620, 2642), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['overlap'], {}), '(overlap)\n', (2633, 2642), True, 'from cumm import tensorview as tv\n'), ((2866, 2884), 'spconv.core_cc.csrc.utils.boxops.BoxOps.has_boost', 'BoxOps.has_boost', ([], {}), '()\n', (2882, 2884), False, 'from spconv.core_cc.csrc.utils.boxops import BoxOps\n'), ((3170, 3196), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['box_corners'], {}), '(box_corners)\n', (3183, 3196), True, 'from cumm import tensorview as tv\n'), ((3198, 3225), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['qbox_corners'], {}), '(qbox_corners)\n', (3211, 3225), True, 'from cumm import tensorview as tv\n'), ((3248, 3274), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['standup_iou'], {}), '(standup_iou)\n', (3261, 3274), True, 'from cumm import tensorview as tv\n'), ((3276, 3298), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['overlap'], {}), '(overlap)\n', (3289, 3298), True, 'from cumm import tensorview as tv\n'), ((3446, 3464), 'spconv.core_cc.csrc.utils.boxops.BoxOps.has_boost', 'BoxOps.has_boost', ([], {}), '()\n', (3462, 3464), False, 'from spconv.core_cc.csrc.utils.boxops import BoxOps\n'), ((3727, 3753), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['box_corners'], {}), '(box_corners)\n', (3740, 3753), True, 'from cumm import tensorview as tv\n'), ((3784, 3811), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['qbox_corners'], {}), '(qbox_corners)\n', (3797, 3811), True, 'from cumm import tensorview as tv\n'), ((3842, 3864), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['overlap'], {}), '(overlap)\n', (3855, 3864), True, 'from cumm import tensorview as tv\n'), ((4120, 4140), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['boxes'], {}), '(boxes)\n', (4133, 4140), True, 'from cumm import tensorview as tv\n'), ((4184, 4204), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['order'], {}), '(order)\n', (4197, 4204), True, 'from cumm import tensorview as tv\n'), ((4381, 4399), 'spconv.core_cc.csrc.utils.boxops.BoxOps.has_boost', 'BoxOps.has_boost', ([], {}), '()\n', (4397, 4399), False, 'from spconv.core_cc.csrc.utils.boxops import BoxOps\n'), ((4597, 4617), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['boxes'], {}), '(boxes)\n', (4610, 4617), True, 'from cumm import tensorview as tv\n'), ((4668, 4688), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['order'], {}), '(order)\n', (4681, 4688), True, 'from cumm import tensorview as tv\n'), ((4739, 4765), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['standup_iou'], {}), '(standup_iou)\n', (4752, 4765), True, 'from cumm import tensorview as tv\n')] |
justremotephone/android_external_chromium_org | build/android/gyp/lint.py | 246856e61da7acf5494076c74198f2aea894a721 | #!/usr/bin/env python
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs Android's lint tool."""
import optparse
import os
import sys
from xml.dom import minidom
from util import build_utils
_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..', '..'))
def _RunLint(lint_path, config_path, processed_config_path, manifest_path,
result_path, product_dir, src_dirs, classes_dir):
def _RelativizePath(path):
"""Returns relative path to top-level src dir.
Args:
path: A path relative to cwd.
"""
return os.path.relpath(os.path.abspath(path), _SRC_ROOT)
def _ProcessConfigFile():
if not build_utils.IsTimeStale(processed_config_path, [config_path]):
return
with open(config_path, 'rb') as f:
content = f.read().replace(
'PRODUCT_DIR', _RelativizePath(product_dir))
with open(processed_config_path, 'wb') as f:
f.write(content)
def _ProcessResultFile():
with open(result_path, 'rb') as f:
content = f.read().replace(
_RelativizePath(product_dir), 'PRODUCT_DIR')
with open(result_path, 'wb') as f:
f.write(content)
def _ParseAndShowResultFile():
dom = minidom.parse(result_path)
issues = dom.getElementsByTagName('issue')
print >> sys.stderr
for issue in issues:
issue_id = issue.attributes['id'].value
severity = issue.attributes['severity'].value
message = issue.attributes['message'].value
location_elem = issue.getElementsByTagName('location')[0]
path = location_elem.attributes['file'].value
line = location_elem.getAttribute('line')
if line:
error = '%s:%s %s: %s [%s]' % (path, line, severity, message,
issue_id)
else:
# Issues in class files don't have a line number.
error = '%s %s: %s [%s]' % (path, severity, message, issue_id)
print >> sys.stderr, error
for attr in ['errorLine1', 'errorLine2']:
error_line = issue.getAttribute(attr)
if error_line:
print >> sys.stderr, error_line
return len(issues)
_ProcessConfigFile()
cmd = [
lint_path, '-Werror', '--exitcode', '--showall',
'--config', _RelativizePath(processed_config_path),
'--classpath', _RelativizePath(classes_dir),
'--xml', _RelativizePath(result_path),
]
for src in src_dirs:
cmd.extend(['--sources', _RelativizePath(src)])
cmd.append(_RelativizePath(os.path.join(manifest_path, os.pardir)))
if os.path.exists(result_path):
os.remove(result_path)
try:
build_utils.CheckOutput(cmd, cwd=_SRC_ROOT)
except build_utils.CalledProcessError:
# There is a problem with lint usage
if not os.path.exists(result_path):
raise
# There are actual lint issues
else:
num_issues = _ParseAndShowResultFile()
_ProcessResultFile()
msg = ('\nLint found %d new issues.\n'
' - For full explanation refer to %s\n'
' - Wanna suppress these issues?\n'
' 1. Read comment in %s\n'
' 2. Run "python %s %s"\n' %
(num_issues,
_RelativizePath(result_path),
_RelativizePath(config_path),
_RelativizePath(os.path.join(_SRC_ROOT, 'build', 'android',
'lint', 'suppress.py')),
_RelativizePath(result_path)))
print >> sys.stderr, msg
# Lint errors do not fail the build.
return 0
return 0
def main():
parser = optparse.OptionParser()
parser.add_option('--lint-path', help='Path to lint executable.')
parser.add_option('--config-path', help='Path to lint suppressions file.')
parser.add_option('--processed-config-path',
help='Path to processed lint suppressions file.')
parser.add_option('--manifest-path', help='Path to AndroidManifest.xml')
parser.add_option('--result-path', help='Path to XML lint result file.')
parser.add_option('--product-dir', help='Path to product dir.')
parser.add_option('--src-dirs', help='Directories containing java files.')
parser.add_option('--classes-dir', help='Directory containing class files.')
parser.add_option('--stamp', help='Path to touch on success.')
parser.add_option('--enable', action='store_true',
help='Run lint instead of just touching stamp.')
options, _ = parser.parse_args()
build_utils.CheckOptions(
options, parser, required=['lint_path', 'config_path',
'processed_config_path', 'manifest_path',
'result_path', 'product_dir', 'src_dirs',
'classes_dir'])
src_dirs = build_utils.ParseGypList(options.src_dirs)
rc = 0
if options.enable:
rc = _RunLint(options.lint_path, options.config_path,
options.processed_config_path,
options.manifest_path, options.result_path,
options.product_dir, src_dirs, options.classes_dir)
if options.stamp and not rc:
build_utils.Touch(options.stamp)
return rc
if __name__ == '__main__':
sys.exit(main())
| [((2693, 2720), 'os.path.exists', 'os.path.exists', (['result_path'], {}), '(result_path)\n', (2707, 2720), False, 'import os\n'), ((3718, 3741), 'optparse.OptionParser', 'optparse.OptionParser', ([], {}), '()\n', (3739, 3741), False, 'import optparse\n'), ((4602, 4789), 'util.build_utils.CheckOptions', 'build_utils.CheckOptions', (['options', 'parser'], {'required': "['lint_path', 'config_path', 'processed_config_path', 'manifest_path',\n 'result_path', 'product_dir', 'src_dirs', 'classes_dir']"}), "(options, parser, required=['lint_path',\n 'config_path', 'processed_config_path', 'manifest_path', 'result_path',\n 'product_dir', 'src_dirs', 'classes_dir'])\n", (4626, 4789), False, 'from util import build_utils\n'), ((4902, 4944), 'util.build_utils.ParseGypList', 'build_utils.ParseGypList', (['options.src_dirs'], {}), '(options.src_dirs)\n', (4926, 4944), False, 'from util import build_utils\n'), ((363, 388), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (378, 388), False, 'import os\n'), ((1369, 1395), 'xml.dom.minidom.parse', 'minidom.parse', (['result_path'], {}), '(result_path)\n', (1382, 1395), False, 'from xml.dom import minidom\n'), ((2726, 2748), 'os.remove', 'os.remove', (['result_path'], {}), '(result_path)\n', (2735, 2748), False, 'import os\n'), ((2761, 2804), 'util.build_utils.CheckOutput', 'build_utils.CheckOutput', (['cmd'], {'cwd': '_SRC_ROOT'}), '(cmd, cwd=_SRC_ROOT)\n', (2784, 2804), False, 'from util import build_utils\n'), ((5252, 5284), 'util.build_utils.Touch', 'build_utils.Touch', (['options.stamp'], {}), '(options.stamp)\n', (5269, 5284), False, 'from util import build_utils\n'), ((753, 774), 'os.path.abspath', 'os.path.abspath', (['path'], {}), '(path)\n', (768, 774), False, 'import os\n'), ((827, 888), 'util.build_utils.IsTimeStale', 'build_utils.IsTimeStale', (['processed_config_path', '[config_path]'], {}), '(processed_config_path, [config_path])\n', (850, 888), False, 'from util import build_utils\n'), ((2646, 2684), 'os.path.join', 'os.path.join', (['manifest_path', 'os.pardir'], {}), '(manifest_path, os.pardir)\n', (2658, 2684), False, 'import os\n'), ((2898, 2925), 'os.path.exists', 'os.path.exists', (['result_path'], {}), '(result_path)\n', (2912, 2925), False, 'import os\n'), ((3435, 3501), 'os.path.join', 'os.path.join', (['_SRC_ROOT', '"""build"""', '"""android"""', '"""lint"""', '"""suppress.py"""'], {}), "(_SRC_ROOT, 'build', 'android', 'lint', 'suppress.py')\n", (3447, 3501), False, 'import os\n')] |
snu5mumr1k/clif | clif/pybind11/generator.py | 3a907dd7b0986f2b3306c88503d414f4d4f963ae | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generates pybind11 bindings code."""
from typing import Dict, Generator, List, Text, Set
from clif.protos import ast_pb2
from clif.pybind11 import classes
from clif.pybind11 import enums
from clif.pybind11 import function
from clif.pybind11 import function_lib
from clif.pybind11 import type_casters
from clif.pybind11 import utils
I = utils.I
class ModuleGenerator(object):
"""A class that generates pybind11 bindings code from CLIF ast."""
def __init__(self, ast: ast_pb2.AST, module_name: str, header_path: str,
include_paths: List[str]):
self._ast = ast
self._module_name = module_name
self._header_path = header_path
self._include_paths = include_paths
self._unique_classes = {}
def generate_header(self,
ast: ast_pb2.AST) -> Generator[str, None, None]:
"""Generates pybind11 bindings code from CLIF ast."""
includes = set()
for decl in ast.decls:
includes.add(decl.cpp_file)
self._collect_class_cpp_names(decl)
yield '#include "third_party/pybind11/include/pybind11/smart_holder.h"'
for include in includes:
yield f'#include "{include}"'
yield '\n'
for cpp_name in self._unique_classes:
yield f'PYBIND11_SMART_HOLDER_TYPE_CASTERS({cpp_name})'
yield '\n'
for cpp_name, py_name in self._unique_classes.items():
yield f'// CLIF use `{cpp_name}` as {py_name}'
def generate_from(self, ast: ast_pb2.AST):
"""Generates pybind11 bindings code from CLIF ast.
Args:
ast: CLIF ast protobuf.
Yields:
Generated pybind11 bindings code.
"""
yield from self._generate_headlines()
# Find and keep track of virtual functions.
python_override_class_names = {}
for decl in ast.decls:
yield from self._generate_python_override_class_names(
python_override_class_names, decl)
self._collect_class_cpp_names(decl)
yield from type_casters.generate_from(ast, self._include_paths)
yield f'PYBIND11_MODULE({self._module_name}, m) {{'
yield from self._generate_import_modules(ast)
yield I+('m.doc() = "CLIF-generated pybind11-based module for '
f'{ast.source}";')
yield I + 'py::google::ImportStatusModule();'
for decl in ast.decls:
if decl.decltype == ast_pb2.Decl.Type.FUNC:
for s in function.generate_from('m', decl.func, None):
yield I + s
elif decl.decltype == ast_pb2.Decl.Type.CONST:
yield from self._generate_const_variables(decl.const)
elif decl.decltype == ast_pb2.Decl.Type.CLASS:
yield from classes.generate_from(
decl.class_, 'm',
python_override_class_names.get(decl.class_.name.cpp_name, ''))
elif decl.decltype == ast_pb2.Decl.Type.ENUM:
yield from enums.generate_from('m', decl.enum)
yield ''
yield '}'
def _generate_import_modules(self,
ast: ast_pb2.AST) -> Generator[str, None, None]:
for include in ast.pybind11_includes:
# Converts `full/project/path/cheader_pybind11_clif.h` to
# `full.project.path.cheader_pybind11`
names = include.split('/')
names.insert(0, 'google3')
names[-1] = names[-1][:-len('_clif.h')]
module = '.'.join(names)
yield f'py::module_::import("{module}");'
def _generate_headlines(self):
"""Generates #includes and headers."""
includes = set()
for decl in self._ast.decls:
includes.add(decl.cpp_file)
if decl.decltype == ast_pb2.Decl.Type.CONST:
self._generate_const_variables_headers(decl.const, includes)
for include in self._ast.pybind11_includes:
includes.add(include)
for include in self._ast.usertype_includes:
includes.add(include)
yield '#include "third_party/pybind11/include/pybind11/complex.h"'
yield '#include "third_party/pybind11/include/pybind11/functional.h"'
yield '#include "third_party/pybind11/include/pybind11/operators.h"'
yield '#include "third_party/pybind11/include/pybind11/smart_holder.h"'
yield '// potential future optimization: generate this line only as needed.'
yield '#include "third_party/pybind11/include/pybind11/stl.h"'
yield ''
yield '#include "clif/pybind11/runtime.h"'
yield '#include "clif/pybind11/type_casters.h"'
yield ''
for include in includes:
yield f'#include "{include}"'
yield f'#include "{self._header_path}"'
yield ''
yield 'namespace py = pybind11;'
yield ''
def _generate_const_variables_headers(self, const_decl: ast_pb2.ConstDecl,
includes: Set[str]):
if const_decl.type.lang_type == 'complex':
includes.add('third_party/pybind11/include/pybind11/complex.h')
if (const_decl.type.lang_type.startswith('list<') or
const_decl.type.lang_type.startswith('dict<') or
const_decl.type.lang_type.startswith('set<')):
includes.add('third_party/pybind11/include/pybind11/stl.h')
def _generate_const_variables(self, const_decl: ast_pb2.ConstDecl):
"""Generates variables."""
lang_type = const_decl.type.lang_type
if (lang_type in {'int', 'float', 'double', 'bool', 'str'} or
lang_type.startswith('tuple<')):
const_def = I + (f'm.attr("{const_decl.name.native}") = '
f'{const_decl.name.cpp_name};')
else:
const_def = I + (f'm.attr("{const_decl.name.native}") = '
f'py::cast({const_decl.name.cpp_name});')
yield const_def
def _generate_python_override_class_names(
self, python_override_class_names: Dict[Text, Text], decl: ast_pb2.Decl,
trampoline_name_suffix: str = '_trampoline',
self_life_support: str = 'py::trampoline_self_life_support'):
"""Generates Python overrides classes dictionary for virtual functions."""
if decl.decltype == ast_pb2.Decl.Type.CLASS:
virtual_members = []
for member in decl.class_.members:
if member.decltype == ast_pb2.Decl.Type.FUNC and member.func.virtual:
virtual_members.append(member)
if not virtual_members:
return
python_override_class_name = (
f'{decl.class_.name.native}_{trampoline_name_suffix}')
assert decl.class_.name.cpp_name not in python_override_class_names
python_override_class_names[
decl.class_.name.cpp_name] = python_override_class_name
yield (f'struct {python_override_class_name} : '
f'{decl.class_.name.cpp_name}, {self_life_support} {{')
yield I + (
f'using {decl.class_.name.cpp_name}::{decl.class_.name.native};')
for member in virtual_members:
yield from self._generate_virtual_function(
decl.class_.name.native, member.func)
if python_override_class_name:
yield '};'
def _generate_virtual_function(self,
class_name: str, func_decl: ast_pb2.FuncDecl):
"""Generates virtual function overrides calling Python methods."""
return_type = ''
if func_decl.cpp_void_return:
return_type = 'void'
elif func_decl.returns:
for v in func_decl.returns:
if v.HasField('cpp_exact_type'):
return_type = v.cpp_exact_type
params = ', '.join([f'{p.name.cpp_name}' for p in func_decl.params])
params_list_with_types = []
for p in func_decl.params:
params_list_with_types.append(
f'{function_lib.generate_param_type(p)} {p.name.cpp_name}')
params_str_with_types = ', '.join(params_list_with_types)
cpp_const = ''
if func_decl.cpp_const_method:
cpp_const = ' const'
yield I + (f'{return_type} '
f'{func_decl.name.native}({params_str_with_types}) '
f'{cpp_const} override {{')
if func_decl.is_pure_virtual:
pybind11_override = 'PYBIND11_OVERRIDE_PURE'
else:
pybind11_override = 'PYBIND11_OVERRIDE'
yield I + I + f'{pybind11_override}('
yield I + I + I + f'{return_type},'
yield I + I + I + f'{class_name},'
yield I + I + I + f'{func_decl.name.native},'
yield I + I + I + f'{params}'
yield I + I + ');'
yield I + '}'
def _collect_class_cpp_names(self, decl: ast_pb2.Decl,
parent_name: str = '') -> None:
"""Adds every class name to a set. Only to be used in this context."""
if decl.decltype == ast_pb2.Decl.Type.CLASS:
full_native_name = decl.class_.name.native
if parent_name:
full_native_name = '.'.join([parent_name, decl.class_.name.native])
self._unique_classes[decl.class_.name.cpp_name] = full_native_name
for member in decl.class_.members:
self._collect_class_cpp_names(member, full_native_name)
def write_to(channel, lines):
"""Writes the generated code to files."""
for s in lines:
channel.write(s)
channel.write('\n')
| [((2500, 2552), 'clif.pybind11.type_casters.generate_from', 'type_casters.generate_from', (['ast', 'self._include_paths'], {}), '(ast, self._include_paths)\n', (2526, 2552), False, 'from clif.pybind11 import type_casters\n'), ((2904, 2948), 'clif.pybind11.function.generate_from', 'function.generate_from', (['"""m"""', 'decl.func', 'None'], {}), "('m', decl.func, None)\n", (2926, 2948), False, 'from clif.pybind11 import function\n'), ((7980, 8015), 'clif.pybind11.function_lib.generate_param_type', 'function_lib.generate_param_type', (['p'], {}), '(p)\n', (8012, 8015), False, 'from clif.pybind11 import function_lib\n'), ((3359, 3394), 'clif.pybind11.enums.generate_from', 'enums.generate_from', (['"""m"""', 'decl.enum'], {}), "('m', decl.enum)\n", (3378, 3394), False, 'from clif.pybind11 import enums\n')] |
ChristopheGraveline064/ADVENT | advent/model/discriminator.py | fc0ecd099862ed68979b2197423f1bb34df09c74 | from torch import nn
def get_fc_discriminator(num_classes, ndf=64):
return nn.Sequential(
nn.Conv2d(num_classes, ndf, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf, ndf * 2, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 2, ndf * 4, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 8, 1, kernel_size=4, stride=2, padding=1),
)
# def get_fe_discriminator(num_classes, ndf=64): # 256-128-64-32-16
# return nn.Sequential(
# nn.Conv2d(num_classes, ndf * 4, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 4, ndf * 2, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 2, ndf, kernel_size=2, stride=2, padding=0),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# # nn.Conv2d(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1),
# # nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf, 1, kernel_size=2, stride=2, padding=0),
# )
# def get_fe_discriminator(num_classes, ndf=64):
# return nn.Sequential(
# nn.Conv2d(num_classes, ndf, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf, ndf * 2, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 2, ndf * 4, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# # nn.Conv2d(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1),
# # nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 4, 1, kernel_size=1, stride=1, padding=0),
# )
def get_fe_discriminator(num_classes, ndf=64): # H/8,H/8,(1024 -> 256 -> 128 -> 64 -> 1)
return nn.Sequential(
nn.Conv2d(num_classes, ndf * 4, kernel_size=1, stride=1, padding=0),
# x=self.dropout(x)
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 4, ndf * 2, kernel_size=1, stride=1, padding=0),
# x=self.dropout(x)
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 2, ndf, kernel_size=1, stride=1, padding=0),
# x=self.dropout(x)
nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf, 1, kernel_size=1, stride=1, padding=0),
) | [((104, 167), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_classes', 'ndf'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(num_classes, ndf, kernel_size=4, stride=2, padding=1)\n', (113, 167), False, 'from torch import nn\n'), ((177, 223), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (189, 223), False, 'from torch import nn\n'), ((233, 292), 'torch.nn.Conv2d', 'nn.Conv2d', (['ndf', '(ndf * 2)'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(ndf, ndf * 2, kernel_size=4, stride=2, padding=1)\n', (242, 292), False, 'from torch import nn\n'), ((302, 348), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (314, 348), False, 'from torch import nn\n'), ((358, 421), 'torch.nn.Conv2d', 'nn.Conv2d', (['(ndf * 2)', '(ndf * 4)'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(ndf * 2, ndf * 4, kernel_size=4, stride=2, padding=1)\n', (367, 421), False, 'from torch import nn\n'), ((431, 477), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (443, 477), False, 'from torch import nn\n'), ((487, 550), 'torch.nn.Conv2d', 'nn.Conv2d', (['(ndf * 4)', '(ndf * 8)'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1)\n', (496, 550), False, 'from torch import nn\n'), ((560, 606), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (572, 606), False, 'from torch import nn\n'), ((616, 673), 'torch.nn.Conv2d', 'nn.Conv2d', (['(ndf * 8)', '(1)'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(ndf * 8, 1, kernel_size=4, stride=2, padding=1)\n', (625, 673), False, 'from torch import nn\n'), ((2201, 2268), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_classes', '(ndf * 4)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(num_classes, ndf * 4, kernel_size=1, stride=1, padding=0)\n', (2210, 2268), False, 'from torch import nn\n'), ((2306, 2352), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (2318, 2352), False, 'from torch import nn\n'), ((2362, 2425), 'torch.nn.Conv2d', 'nn.Conv2d', (['(ndf * 4)', '(ndf * 2)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(ndf * 4, ndf * 2, kernel_size=1, stride=1, padding=0)\n', (2371, 2425), False, 'from torch import nn\n'), ((2463, 2509), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (2475, 2509), False, 'from torch import nn\n'), ((2519, 2578), 'torch.nn.Conv2d', 'nn.Conv2d', (['(ndf * 2)', 'ndf'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(ndf * 2, ndf, kernel_size=1, stride=1, padding=0)\n', (2528, 2578), False, 'from torch import nn\n'), ((2616, 2662), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (2628, 2662), False, 'from torch import nn\n'), ((2805, 2858), 'torch.nn.Conv2d', 'nn.Conv2d', (['ndf', '(1)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(ndf, 1, kernel_size=1, stride=1, padding=0)\n', (2814, 2858), False, 'from torch import nn\n')] |
aglotero/spyder | spyder/dependencies.py | 075d32fa359b728416de36cb0e744715fa5e3943 | # -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""Module checking Spyder runtime dependencies"""
import os
# Local imports
from spyder.utils import programs
class Dependency(object):
"""Spyder's dependency
version may starts with =, >=, > or < to specify the exact requirement ;
multiple conditions may be separated by ';' (e.g. '>=0.13;<1.0')"""
OK = 'OK'
NOK = 'NOK'
def __init__(self, modname, features, required_version,
installed_version=None, optional=False):
self.modname = modname
self.features = features
self.required_version = required_version
self.optional = optional
if installed_version is None:
try:
self.installed_version = programs.get_module_version(modname)
except:
# NOTE: Don't add any exception type here!
# Modules can fail to import in several ways besides
# ImportError
self.installed_version = None
else:
self.installed_version = installed_version
def check(self):
"""Check if dependency is installed"""
return programs.is_module_installed(self.modname,
self.required_version,
self.installed_version)
def get_installed_version(self):
"""Return dependency status (string)"""
if self.check():
return '%s (%s)' % (self.installed_version, self.OK)
else:
return '%s (%s)' % (self.installed_version, self.NOK)
def get_status(self):
"""Return dependency status (string)"""
if self.check():
return self.OK
else:
return self.NOK
DEPENDENCIES = []
def add(modname, features, required_version, installed_version=None,
optional=False):
"""Add Spyder dependency"""
global DEPENDENCIES
for dependency in DEPENDENCIES:
if dependency.modname == modname:
raise ValueError("Dependency has already been registered: %s"\
% modname)
DEPENDENCIES += [Dependency(modname, features, required_version,
installed_version, optional)]
def check(modname):
"""Check if required dependency is installed"""
for dependency in DEPENDENCIES:
if dependency.modname == modname:
return dependency.check()
else:
raise RuntimeError("Unkwown dependency %s" % modname)
def status(deps=DEPENDENCIES, linesep=os.linesep):
"""Return a status of dependencies"""
maxwidth = 0
col1 = []
col2 = []
for dependency in deps:
title1 = dependency.modname
title1 += ' ' + dependency.required_version
col1.append(title1)
maxwidth = max([maxwidth, len(title1)])
col2.append(dependency.get_installed_version())
text = ""
for index in range(len(deps)):
text += col1[index].ljust(maxwidth) + ': ' + col2[index] + linesep
return text[:-1]
def missing_dependencies():
"""Return the status of missing dependencies (if any)"""
missing_deps = []
for dependency in DEPENDENCIES:
if not dependency.check() and not dependency.optional:
missing_deps.append(dependency)
if missing_deps:
return status(deps=missing_deps, linesep='<br>')
else:
return ""
| [((1321, 1415), 'spyder.utils.programs.is_module_installed', 'programs.is_module_installed', (['self.modname', 'self.required_version', 'self.installed_version'], {}), '(self.modname, self.required_version, self.\n installed_version)\n', (1349, 1415), False, 'from spyder.utils import programs\n'), ((896, 932), 'spyder.utils.programs.get_module_version', 'programs.get_module_version', (['modname'], {}), '(modname)\n', (923, 932), False, 'from spyder.utils import programs\n')] |
jasperhyp/Chemprop4SE | setup.py | c02b604b63b6766464db829fea0b306c67302e82 | import os
from setuptools import find_packages, setup
# Load version number
__version__ = None
src_dir = os.path.abspath(os.path.dirname(__file__))
version_file = os.path.join(src_dir, 'chemprop', '_version.py')
with open(version_file, encoding='utf-8') as fd:
exec(fd.read())
# Load README
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
setup(
name='chemprop',
version=__version__,
author='Kyle Swanson, Kevin Yang, Wengong Jin, Lior Hirschfeld, Allison Tam',
author_email='[email protected]',
description='Molecular Property Prediction with Message Passing Neural Networks',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/chemprop/chemprop',
download_url=f'https://github.com/chemprop/chemprop/v_{__version__}.tar.gz',
project_urls={
'Documentation': 'https://chemprop.readthedocs.io/en/latest/',
'Source': 'https://github.com/chemprop/chemprop',
'PyPi': 'https://pypi.org/project/chemprop/',
'Demo': 'http://chemprop.csail.mit.edu/',
},
license='MIT',
packages=find_packages(),
package_data={'chemprop': ['py.typed']},
entry_points={
'console_scripts': [
'chemprop_train=chemprop.train:chemprop_train',
'chemprop_predict=chemprop.train:chemprop_predict',
'chemprop_fingerprint=chemprop.train:chemprop_fingerprint',
'chemprop_hyperopt=chemprop.hyperparameter_optimization:chemprop_hyperopt',
'chemprop_interpret=chemprop.interpret:chemprop_interpret',
'chemprop_web=chemprop.web.run:chemprop_web',
'sklearn_train=chemprop.sklearn_train:sklearn_train',
'sklearn_predict=chemprop.sklearn_predict:sklearn_predict',
]
},
install_requires=[
'flask>=1.1.2',
'hyperopt>=0.2.3',
'matplotlib>=3.1.3',
'numpy>=1.18.1',
'pandas>=1.0.3',
'pandas-flavor>=0.2.0',
'scikit-learn>=0.22.2.post1',
'scipy>=1.4.1',
'sphinx>=3.1.2',
'tensorboardX>=2.0',
'torch>=1.5.1',
'tqdm>=4.45.0',
'typed-argument-parser>=1.6.1'
],
extras_require={
'test': [
'pytest>=6.2.2',
'parameterized>=0.8.1'
]
},
python_requires='>=3.6',
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent'
],
keywords=[
'chemistry',
'machine learning',
'property prediction',
'message passing neural network',
'graph neural network'
]
)
| [((172, 220), 'os.path.join', 'os.path.join', (['src_dir', '"""chemprop"""', '"""_version.py"""'], {}), "(src_dir, 'chemprop', '_version.py')\n", (184, 220), False, 'import os\n'), ((129, 154), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (144, 154), False, 'import os\n'), ((1182, 1197), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (1195, 1197), False, 'from setuptools import find_packages, setup\n')] |
HarshCasper/mars | mars/tensor/indexing/slice.py | 4c12c968414d666c7a10f497bc22de90376b1932 | # Copyright 1999-2020 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ... import opcodes as OperandDef
from ...serialize import KeyField, ListField
from ..operands import TensorHasInput, TensorOperandMixin
from ..array_utils import get_array_module
from ..core import TensorOrder
class TensorSlice(TensorHasInput, TensorOperandMixin):
_op_type_ = OperandDef.SLICE
_input = KeyField('input')
_slices = ListField('slices')
def __init__(self, slices=None, dtype=None, sparse=False, **kw):
super().__init__(_slices=slices, _dtype=dtype, _sparse=sparse, **kw)
@property
def slices(self):
return self._slices
def _set_inputs(self, inputs):
super()._set_inputs(inputs)
self._input = self._inputs[0]
def _get_order(self, kw, i):
order = kw.pop('order', None)
if order is None:
inp = self.input
if inp is None or inp.order == TensorOrder.C_ORDER:
return TensorOrder.C_ORDER
for shape, slc in zip(inp.shape, self._slices):
if slc is None:
continue
s = slc.indices(shape)
if s[0] == 0 and s[1] == shape and s[2] == 1:
continue
else:
return TensorOrder.C_ORDER
return inp.order
return order[i] if isinstance(order, (list, tuple)) else order
@classmethod
def execute(cls, ctx, op):
inp = ctx[op.inputs[0].key]
if op.input.ndim == 0 and not hasattr(inp, 'shape'):
# scalar, but organize it into an array
inp = get_array_module(inp).array(inp)
x = inp[tuple(op.slices)]
out = op.outputs[0]
ctx[out.key] = x.astype(x.dtype, order=out.order.value, copy=False)
| [] |
leewckk/vim.configuration | ftplugin/python/python/pyflakes/pyflakes/checker.py | db3faa4343714dd3eb3b7ab19f8cd0b64a52ee57 | """
Main module.
Implement the central Checker class.
Also, it models the Bindings and Scopes.
"""
import __future__
import doctest
import os
import sys
PY2 = sys.version_info < (3, 0)
PY32 = sys.version_info < (3, 3) # Python 2.5 to 3.2
PY33 = sys.version_info < (3, 4) # Python 2.5 to 3.3
PY34 = sys.version_info < (3, 5) # Python 2.5 to 3.4
try:
sys.pypy_version_info
PYPY = True
except AttributeError:
PYPY = False
builtin_vars = dir(__import__('__builtin__' if PY2 else 'builtins'))
try:
import ast
except ImportError: # Python 2.5
import _ast as ast
if 'decorator_list' not in ast.ClassDef._fields:
# Patch the missing attribute 'decorator_list'
ast.ClassDef.decorator_list = ()
ast.FunctionDef.decorator_list = property(lambda s: s.decorators)
from pyflakes import messages
if PY2:
def getNodeType(node_class):
# workaround str.upper() which is locale-dependent
return str(unicode(node_class.__name__).upper())
else:
def getNodeType(node_class):
return node_class.__name__.upper()
# Python >= 3.3 uses ast.Try instead of (ast.TryExcept + ast.TryFinally)
if PY32:
def getAlternatives(n):
if isinstance(n, (ast.If, ast.TryFinally)):
return [n.body]
if isinstance(n, ast.TryExcept):
return [n.body + n.orelse] + [[hdl] for hdl in n.handlers]
else:
def getAlternatives(n):
if isinstance(n, ast.If):
return [n.body]
if isinstance(n, ast.Try):
return [n.body + n.orelse] + [[hdl] for hdl in n.handlers]
if PY34:
LOOP_TYPES = (ast.While, ast.For)
else:
LOOP_TYPES = (ast.While, ast.For, ast.AsyncFor)
class _FieldsOrder(dict):
"""Fix order of AST node fields."""
def _get_fields(self, node_class):
# handle iter before target, and generators before element
fields = node_class._fields
if 'iter' in fields:
key_first = 'iter'.find
elif 'generators' in fields:
key_first = 'generators'.find
else:
key_first = 'value'.find
return tuple(sorted(fields, key=key_first, reverse=True))
def __missing__(self, node_class):
self[node_class] = fields = self._get_fields(node_class)
return fields
def counter(items):
"""
Simplest required implementation of collections.Counter. Required as 2.6
does not have Counter in collections.
"""
results = {}
for item in items:
results[item] = results.get(item, 0) + 1
return results
def iter_child_nodes(node, omit=None, _fields_order=_FieldsOrder()):
"""
Yield all direct child nodes of *node*, that is, all fields that
are nodes and all items of fields that are lists of nodes.
"""
for name in _fields_order[node.__class__]:
if name == omit:
continue
field = getattr(node, name, None)
if isinstance(field, ast.AST):
yield field
elif isinstance(field, list):
for item in field:
yield item
def convert_to_value(item):
if isinstance(item, ast.Str):
return item.s
elif hasattr(ast, 'Bytes') and isinstance(item, ast.Bytes):
return item.s
elif isinstance(item, ast.Tuple):
return tuple(convert_to_value(i) for i in item.elts)
elif isinstance(item, ast.Num):
return item.n
elif isinstance(item, ast.Name):
result = VariableKey(item=item)
constants_lookup = {
'True': True,
'False': False,
'None': None,
}
return constants_lookup.get(
result.name,
result,
)
elif (not PY33) and isinstance(item, ast.NameConstant):
# None, True, False are nameconstants in python3, but names in 2
return item.value
else:
return UnhandledKeyType()
class Binding(object):
"""
Represents the binding of a value to a name.
The checker uses this to keep track of which names have been bound and
which names have not. See L{Assignment} for a special type of binding that
is checked with stricter rules.
@ivar used: pair of (L{Scope}, node) indicating the scope and
the node that this binding was last used.
"""
def __init__(self, name, source):
self.name = name
self.source = source
self.used = False
def __str__(self):
return self.name
def __repr__(self):
return '<%s object %r from line %r at 0x%x>' % (self.__class__.__name__,
self.name,
self.source.lineno,
id(self))
def redefines(self, other):
return isinstance(other, Definition) and self.name == other.name
class Definition(Binding):
"""
A binding that defines a function or a class.
"""
class UnhandledKeyType(object):
"""
A dictionary key of a type that we cannot or do not check for duplicates.
"""
class VariableKey(object):
"""
A dictionary key which is a variable.
@ivar item: The variable AST object.
"""
def __init__(self, item):
self.name = item.id
def __eq__(self, compare):
return (
compare.__class__ == self.__class__
and compare.name == self.name
)
def __hash__(self):
return hash(self.name)
class Importation(Definition):
"""
A binding created by an import statement.
@ivar fullName: The complete name given to the import statement,
possibly including multiple dotted components.
@type fullName: C{str}
"""
def __init__(self, name, source, full_name=None):
self.fullName = full_name or name
self.redefined = []
super(Importation, self).__init__(name, source)
def redefines(self, other):
if isinstance(other, SubmoduleImportation):
# See note in SubmoduleImportation about RedefinedWhileUnused
return self.fullName == other.fullName
return isinstance(other, Definition) and self.name == other.name
def _has_alias(self):
"""Return whether importation needs an as clause."""
return not self.fullName.split('.')[-1] == self.name
@property
def source_statement(self):
"""Generate a source statement equivalent to the import."""
if self._has_alias():
return 'import %s as %s' % (self.fullName, self.name)
else:
return 'import %s' % self.fullName
def __str__(self):
"""Return import full name with alias."""
if self._has_alias():
return self.fullName + ' as ' + self.name
else:
return self.fullName
class SubmoduleImportation(Importation):
"""
A binding created by a submodule import statement.
A submodule import is a special case where the root module is implicitly
imported, without an 'as' clause, and the submodule is also imported.
Python does not restrict which attributes of the root module may be used.
This class is only used when the submodule import is without an 'as' clause.
pyflakes handles this case by registering the root module name in the scope,
allowing any attribute of the root module to be accessed.
RedefinedWhileUnused is suppressed in `redefines` unless the submodule
name is also the same, to avoid false positives.
"""
def __init__(self, name, source):
# A dot should only appear in the name when it is a submodule import
assert '.' in name and (not source or isinstance(source, ast.Import))
package_name = name.split('.')[0]
super(SubmoduleImportation, self).__init__(package_name, source)
self.fullName = name
def redefines(self, other):
if isinstance(other, Importation):
return self.fullName == other.fullName
return super(SubmoduleImportation, self).redefines(other)
def __str__(self):
return self.fullName
@property
def source_statement(self):
return 'import ' + self.fullName
class ImportationFrom(Importation):
def __init__(self, name, source, module, real_name=None):
self.module = module
self.real_name = real_name or name
if module.endswith('.'):
full_name = module + self.real_name
else:
full_name = module + '.' + self.real_name
super(ImportationFrom, self).__init__(name, source, full_name)
def __str__(self):
"""Return import full name with alias."""
if self.real_name != self.name:
return self.fullName + ' as ' + self.name
else:
return self.fullName
@property
def source_statement(self):
if self.real_name != self.name:
return 'from %s import %s as %s' % (self.module,
self.real_name,
self.name)
else:
return 'from %s import %s' % (self.module, self.name)
class StarImportation(Importation):
"""A binding created by a 'from x import *' statement."""
def __init__(self, name, source):
super(StarImportation, self).__init__('*', source)
# Each star importation needs a unique name, and
# may not be the module name otherwise it will be deemed imported
self.name = name + '.*'
self.fullName = name
@property
def source_statement(self):
return 'from ' + self.fullName + ' import *'
def __str__(self):
# When the module ends with a ., avoid the ambiguous '..*'
if self.fullName.endswith('.'):
return self.source_statement
else:
return self.name
class FutureImportation(ImportationFrom):
"""
A binding created by a from `__future__` import statement.
`__future__` imports are implicitly used.
"""
def __init__(self, name, source, scope):
super(FutureImportation, self).__init__(name, source, '__future__')
self.used = (scope, source)
class Argument(Binding):
"""
Represents binding a name as an argument.
"""
class Assignment(Binding):
"""
Represents binding a name with an explicit assignment.
The checker will raise warnings for any Assignment that isn't used. Also,
the checker does not consider assignments in tuple/list unpacking to be
Assignments, rather it treats them as simple Bindings.
"""
class FunctionDefinition(Definition):
pass
class ClassDefinition(Definition):
pass
class ExportBinding(Binding):
"""
A binding created by an C{__all__} assignment. If the names in the list
can be determined statically, they will be treated as names for export and
additional checking applied to them.
The only C{__all__} assignment that can be recognized is one which takes
the value of a literal list containing literal strings. For example::
__all__ = ["foo", "bar"]
Names which are imported and not otherwise used but appear in the value of
C{__all__} will not have an unused import warning reported for them.
"""
def __init__(self, name, source, scope):
if '__all__' in scope and isinstance(source, ast.AugAssign):
self.names = list(scope['__all__'].names)
else:
self.names = []
if isinstance(source.value, (ast.List, ast.Tuple)):
for node in source.value.elts:
if isinstance(node, ast.Str):
self.names.append(node.s)
super(ExportBinding, self).__init__(name, source)
class Scope(dict):
importStarred = False # set to True when import * is found
def __repr__(self):
scope_cls = self.__class__.__name__
return '<%s at 0x%x %s>' % (scope_cls, id(self), dict.__repr__(self))
class ClassScope(Scope):
pass
class FunctionScope(Scope):
"""
I represent a name scope for a function.
@ivar globals: Names declared 'global' in this function.
"""
usesLocals = False
alwaysUsed = set(['__tracebackhide__',
'__traceback_info__', '__traceback_supplement__'])
def __init__(self):
super(FunctionScope, self).__init__()
# Simplify: manage the special locals as globals
self.globals = self.alwaysUsed.copy()
self.returnValue = None # First non-empty return
self.isGenerator = False # Detect a generator
def unusedAssignments(self):
"""
Return a generator for the assignments which have not been used.
"""
for name, binding in self.items():
if (not binding.used and name not in self.globals
and not self.usesLocals
and isinstance(binding, Assignment)):
yield name, binding
class GeneratorScope(Scope):
pass
class ModuleScope(Scope):
"""Scope for a module."""
_futures_allowed = True
class DoctestScope(ModuleScope):
"""Scope for a doctest."""
# Globally defined names which are not attributes of the builtins module, or
# are only present on some platforms.
_MAGIC_GLOBALS = ['__file__', '__builtins__', 'WindowsError']
def getNodeName(node):
# Returns node.id, or node.name, or None
if hasattr(node, 'id'): # One of the many nodes with an id
return node.id
if hasattr(node, 'name'): # an ExceptHandler node
return node.name
class Checker(object):
"""
I check the cleanliness and sanity of Python code.
@ivar _deferredFunctions: Tracking list used by L{deferFunction}. Elements
of the list are two-tuples. The first element is the callable passed
to L{deferFunction}. The second element is a copy of the scope stack
at the time L{deferFunction} was called.
@ivar _deferredAssignments: Similar to C{_deferredFunctions}, but for
callables which are deferred assignment checks.
"""
nodeDepth = 0
offset = None
traceTree = False
builtIns = set(builtin_vars).union(_MAGIC_GLOBALS)
_customBuiltIns = os.environ.get('PYFLAKES_BUILTINS')
if _customBuiltIns:
builtIns.update(_customBuiltIns.split(','))
del _customBuiltIns
def __init__(self, tree, filename='(none)', builtins=None,
withDoctest='PYFLAKES_DOCTEST' in os.environ):
self._nodeHandlers = {}
self._deferredFunctions = []
self._deferredAssignments = []
self.deadScopes = []
self.messages = []
self.filename = filename
if builtins:
self.builtIns = self.builtIns.union(builtins)
self.withDoctest = withDoctest
self.scopeStack = [ModuleScope()]
self.exceptHandlers = [()]
self.root = tree
self.handleChildren(tree)
self.runDeferred(self._deferredFunctions)
# Set _deferredFunctions to None so that deferFunction will fail
# noisily if called after we've run through the deferred functions.
self._deferredFunctions = None
self.runDeferred(self._deferredAssignments)
# Set _deferredAssignments to None so that deferAssignment will fail
# noisily if called after we've run through the deferred assignments.
self._deferredAssignments = None
del self.scopeStack[1:]
self.popScope()
self.checkDeadScopes()
def deferFunction(self, callable):
"""
Schedule a function handler to be called just before completion.
This is used for handling function bodies, which must be deferred
because code later in the file might modify the global scope. When
`callable` is called, the scope at the time this is called will be
restored, however it will contain any new bindings added to it.
"""
self._deferredFunctions.append((callable, self.scopeStack[:], self.offset))
def deferAssignment(self, callable):
"""
Schedule an assignment handler to be called just after deferred
function handlers.
"""
self._deferredAssignments.append((callable, self.scopeStack[:], self.offset))
def runDeferred(self, deferred):
"""
Run the callables in C{deferred} using their associated scope stack.
"""
for handler, scope, offset in deferred:
self.scopeStack = scope
self.offset = offset
handler()
def _in_doctest(self):
return (len(self.scopeStack) >= 2 and
isinstance(self.scopeStack[1], DoctestScope))
@property
def futuresAllowed(self):
if not all(isinstance(scope, ModuleScope)
for scope in self.scopeStack):
return False
return self.scope._futures_allowed
@futuresAllowed.setter
def futuresAllowed(self, value):
assert value is False
if isinstance(self.scope, ModuleScope):
self.scope._futures_allowed = False
@property
def scope(self):
return self.scopeStack[-1]
def popScope(self):
self.deadScopes.append(self.scopeStack.pop())
def checkDeadScopes(self):
"""
Look at scopes which have been fully examined and report names in them
which were imported but unused.
"""
for scope in self.deadScopes:
# imports in classes are public members
if isinstance(scope, ClassScope):
continue
all_binding = scope.get('__all__')
if all_binding and not isinstance(all_binding, ExportBinding):
all_binding = None
if all_binding:
all_names = set(all_binding.names)
undefined = all_names.difference(scope)
else:
all_names = undefined = []
if undefined:
if not scope.importStarred and \
os.path.basename(self.filename) != '__init__.py':
# Look for possible mistakes in the export list
for name in undefined:
self.report(messages.UndefinedExport,
scope['__all__'].source, name)
# mark all import '*' as used by the undefined in __all__
if scope.importStarred:
for binding in scope.values():
if isinstance(binding, StarImportation):
binding.used = all_binding
# Look for imported names that aren't used.
for value in scope.values():
if isinstance(value, Importation):
used = value.used or value.name in all_names
if not used:
messg = messages.UnusedImport
self.report(messg, value.source, str(value))
for node in value.redefined:
if isinstance(self.getParent(node), ast.For):
messg = messages.ImportShadowedByLoopVar
elif used:
continue
else:
messg = messages.RedefinedWhileUnused
self.report(messg, node, value.name, value.source)
def pushScope(self, scopeClass=FunctionScope):
self.scopeStack.append(scopeClass())
def report(self, messageClass, *args, **kwargs):
self.messages.append(messageClass(self.filename, *args, **kwargs))
def getParent(self, node):
# Lookup the first parent which is not Tuple, List or Starred
while True:
node = node.parent
if not hasattr(node, 'elts') and not hasattr(node, 'ctx'):
return node
def getCommonAncestor(self, lnode, rnode, stop):
if stop in (lnode, rnode) or not (hasattr(lnode, 'parent') and
hasattr(rnode, 'parent')):
return None
if lnode is rnode:
return lnode
if (lnode.depth > rnode.depth):
return self.getCommonAncestor(lnode.parent, rnode, stop)
if (lnode.depth < rnode.depth):
return self.getCommonAncestor(lnode, rnode.parent, stop)
return self.getCommonAncestor(lnode.parent, rnode.parent, stop)
def descendantOf(self, node, ancestors, stop):
for a in ancestors:
if self.getCommonAncestor(node, a, stop):
return True
return False
def differentForks(self, lnode, rnode):
"""True, if lnode and rnode are located on different forks of IF/TRY"""
ancestor = self.getCommonAncestor(lnode, rnode, self.root)
parts = getAlternatives(ancestor)
if parts:
for items in parts:
if self.descendantOf(lnode, items, ancestor) ^ \
self.descendantOf(rnode, items, ancestor):
return True
return False
def addBinding(self, node, value):
"""
Called when a binding is altered.
- `node` is the statement responsible for the change
- `value` is the new value, a Binding instance
"""
# assert value.source in (node, node.parent):
for scope in self.scopeStack[::-1]:
if value.name in scope:
break
existing = scope.get(value.name)
if existing and not self.differentForks(node, existing.source):
parent_stmt = self.getParent(value.source)
if isinstance(existing, Importation) and isinstance(parent_stmt, ast.For):
self.report(messages.ImportShadowedByLoopVar,
node, value.name, existing.source)
elif scope is self.scope:
if (isinstance(parent_stmt, ast.comprehension) and
not isinstance(self.getParent(existing.source),
(ast.For, ast.comprehension))):
self.report(messages.RedefinedInListComp,
node, value.name, existing.source)
elif not existing.used and value.redefines(existing):
self.report(messages.RedefinedWhileUnused,
node, value.name, existing.source)
elif isinstance(existing, Importation) and value.redefines(existing):
existing.redefined.append(node)
if value.name in self.scope:
# then assume the rebound name is used as a global or within a loop
value.used = self.scope[value.name].used
self.scope[value.name] = value
def getNodeHandler(self, node_class):
try:
return self._nodeHandlers[node_class]
except KeyError:
nodeType = getNodeType(node_class)
self._nodeHandlers[node_class] = handler = getattr(self, nodeType)
return handler
def handleNodeLoad(self, node):
name = getNodeName(node)
if not name:
return
in_generators = None
importStarred = None
# try enclosing function scopes and global scope
for scope in self.scopeStack[-1::-1]:
# only generators used in a class scope can access the names
# of the class. this is skipped during the first iteration
if in_generators is False and isinstance(scope, ClassScope):
continue
try:
scope[name].used = (self.scope, node)
except KeyError:
pass
else:
return
importStarred = importStarred or scope.importStarred
if in_generators is not False:
in_generators = isinstance(scope, GeneratorScope)
# look in the built-ins
if name in self.builtIns:
return
if importStarred:
from_list = []
for scope in self.scopeStack[-1::-1]:
for binding in scope.values():
if isinstance(binding, StarImportation):
# mark '*' imports as used for each scope
binding.used = (self.scope, node)
from_list.append(binding.fullName)
# report * usage, with a list of possible sources
from_list = ', '.join(sorted(from_list))
self.report(messages.ImportStarUsage, node, name, from_list)
return
if name == '__path__' and os.path.basename(self.filename) == '__init__.py':
# the special name __path__ is valid only in packages
return
# protected with a NameError handler?
if 'NameError' not in self.exceptHandlers[-1]:
self.report(messages.UndefinedName, node, name)
def handleNodeStore(self, node):
name = getNodeName(node)
if not name:
return
# if the name hasn't already been defined in the current scope
if isinstance(self.scope, FunctionScope) and name not in self.scope:
# for each function or module scope above us
for scope in self.scopeStack[:-1]:
if not isinstance(scope, (FunctionScope, ModuleScope)):
continue
# if the name was defined in that scope, and the name has
# been accessed already in the current scope, and hasn't
# been declared global
used = name in scope and scope[name].used
if used and used[0] is self.scope and name not in self.scope.globals:
# then it's probably a mistake
self.report(messages.UndefinedLocal,
scope[name].used[1], name, scope[name].source)
break
parent_stmt = self.getParent(node)
if isinstance(parent_stmt, (ast.For, ast.comprehension)) or (
parent_stmt != node.parent and
not self.isLiteralTupleUnpacking(parent_stmt)):
binding = Binding(name, node)
elif name == '__all__' and isinstance(self.scope, ModuleScope):
binding = ExportBinding(name, node.parent, self.scope)
else:
binding = Assignment(name, node)
self.addBinding(node, binding)
def handleNodeDelete(self, node):
def on_conditional_branch():
"""
Return `True` if node is part of a conditional body.
"""
current = getattr(node, 'parent', None)
while current:
if isinstance(current, (ast.If, ast.While, ast.IfExp)):
return True
current = getattr(current, 'parent', None)
return False
name = getNodeName(node)
if not name:
return
if on_conditional_branch():
# We cannot predict if this conditional branch is going to
# be executed.
return
if isinstance(self.scope, FunctionScope) and name in self.scope.globals:
self.scope.globals.remove(name)
else:
try:
del self.scope[name]
except KeyError:
self.report(messages.UndefinedName, node, name)
def handleChildren(self, tree, omit=None):
for node in iter_child_nodes(tree, omit=omit):
self.handleNode(node, tree)
def isLiteralTupleUnpacking(self, node):
if isinstance(node, ast.Assign):
for child in node.targets + [node.value]:
if not hasattr(child, 'elts'):
return False
return True
def isDocstring(self, node):
"""
Determine if the given node is a docstring, as long as it is at the
correct place in the node tree.
"""
return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and
isinstance(node.value, ast.Str))
def getDocstring(self, node):
if isinstance(node, ast.Expr):
node = node.value
if not isinstance(node, ast.Str):
return (None, None)
if PYPY:
doctest_lineno = node.lineno - 1
else:
# Computed incorrectly if the docstring has backslash
doctest_lineno = node.lineno - node.s.count('\n') - 1
return (node.s, doctest_lineno)
def handleNode(self, node, parent):
if node is None:
return
if self.offset and getattr(node, 'lineno', None) is not None:
node.lineno += self.offset[0]
node.col_offset += self.offset[1]
if self.traceTree:
print(' ' * self.nodeDepth + node.__class__.__name__)
if self.futuresAllowed and not (isinstance(node, ast.ImportFrom) or
self.isDocstring(node)):
self.futuresAllowed = False
self.nodeDepth += 1
node.depth = self.nodeDepth
node.parent = parent
try:
handler = self.getNodeHandler(node.__class__)
handler(node)
finally:
self.nodeDepth -= 1
if self.traceTree:
print(' ' * self.nodeDepth + 'end ' + node.__class__.__name__)
_getDoctestExamples = doctest.DocTestParser().get_examples
def handleDoctests(self, node):
try:
if hasattr(node, 'docstring'):
docstring = node.docstring
# This is just a reasonable guess. In Python 3.7, docstrings no
# longer have line numbers associated with them. This will be
# incorrect if there are empty lines between the beginning
# of the function and the docstring.
node_lineno = node.lineno
if hasattr(node, 'args'):
node_lineno = max([node_lineno] +
[arg.lineno for arg in node.args.args])
else:
(docstring, node_lineno) = self.getDocstring(node.body[0])
examples = docstring and self._getDoctestExamples(docstring)
except (ValueError, IndexError):
# e.g. line 6 of the docstring for <string> has inconsistent
# leading whitespace: ...
return
if not examples:
return
# Place doctest in module scope
saved_stack = self.scopeStack
self.scopeStack = [self.scopeStack[0]]
node_offset = self.offset or (0, 0)
self.pushScope(DoctestScope)
underscore_in_builtins = '_' in self.builtIns
if not underscore_in_builtins:
self.builtIns.add('_')
for example in examples:
try:
tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST)
except SyntaxError:
e = sys.exc_info()[1]
if PYPY:
e.offset += 1
position = (node_lineno + example.lineno + e.lineno,
example.indent + 4 + (e.offset or 0))
self.report(messages.DoctestSyntaxError, node, position)
else:
self.offset = (node_offset[0] + node_lineno + example.lineno,
node_offset[1] + example.indent + 4)
self.handleChildren(tree)
self.offset = node_offset
if not underscore_in_builtins:
self.builtIns.remove('_')
self.popScope()
self.scopeStack = saved_stack
def ignore(self, node):
pass
# "stmt" type nodes
DELETE = PRINT = FOR = ASYNCFOR = WHILE = IF = WITH = WITHITEM = \
ASYNCWITH = ASYNCWITHITEM = RAISE = TRYFINALLY = EXEC = \
EXPR = ASSIGN = handleChildren
PASS = ignore
# "expr" type nodes
BOOLOP = BINOP = UNARYOP = IFEXP = SET = \
COMPARE = CALL = REPR = ATTRIBUTE = SUBSCRIPT = \
STARRED = NAMECONSTANT = handleChildren
NUM = STR = BYTES = ELLIPSIS = ignore
# "slice" type nodes
SLICE = EXTSLICE = INDEX = handleChildren
# expression contexts are node instances too, though being constants
LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore
# same for operators
AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = \
BITOR = BITXOR = BITAND = FLOORDIV = INVERT = NOT = UADD = USUB = \
EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = \
MATMULT = ignore
# additional node types
COMPREHENSION = KEYWORD = FORMATTEDVALUE = JOINEDSTR = handleChildren
def DICT(self, node):
# Complain if there are duplicate keys with different values
# If they have the same value it's not going to cause potentially
# unexpected behaviour so we'll not complain.
keys = [
convert_to_value(key) for key in node.keys
]
key_counts = counter(keys)
duplicate_keys = [
key for key, count in key_counts.items()
if count > 1
]
for key in duplicate_keys:
key_indices = [i for i, i_key in enumerate(keys) if i_key == key]
values = counter(
convert_to_value(node.values[index])
for index in key_indices
)
if any(count == 1 for value, count in values.items()):
for key_index in key_indices:
key_node = node.keys[key_index]
if isinstance(key, VariableKey):
self.report(messages.MultiValueRepeatedKeyVariable,
key_node,
key.name)
else:
self.report(
messages.MultiValueRepeatedKeyLiteral,
key_node,
key,
)
self.handleChildren(node)
def ASSERT(self, node):
if isinstance(node.test, ast.Tuple) and node.test.elts != []:
self.report(messages.AssertTuple, node)
self.handleChildren(node)
def GLOBAL(self, node):
"""
Keep track of globals declarations.
"""
global_scope_index = 1 if self._in_doctest() else 0
global_scope = self.scopeStack[global_scope_index]
# Ignore 'global' statement in global scope.
if self.scope is not global_scope:
# One 'global' statement can bind multiple (comma-delimited) names.
for node_name in node.names:
node_value = Assignment(node_name, node)
# Remove UndefinedName messages already reported for this name.
# TODO: if the global is not used in this scope, it does not
# become a globally defined name. See test_unused_global.
self.messages = [
m for m in self.messages if not
isinstance(m, messages.UndefinedName) or
m.message_args[0] != node_name]
# Bind name to global scope if it doesn't exist already.
global_scope.setdefault(node_name, node_value)
# Bind name to non-global scopes, but as already "used".
node_value.used = (global_scope, node)
for scope in self.scopeStack[global_scope_index + 1:]:
scope[node_name] = node_value
NONLOCAL = GLOBAL
def GENERATOREXP(self, node):
self.pushScope(GeneratorScope)
self.handleChildren(node)
self.popScope()
LISTCOMP = handleChildren if PY2 else GENERATOREXP
DICTCOMP = SETCOMP = GENERATOREXP
def NAME(self, node):
"""
Handle occurrence of Name (which can be a load/store/delete access.)
"""
# Locate the name in locals / function / globals scopes.
if isinstance(node.ctx, (ast.Load, ast.AugLoad)):
self.handleNodeLoad(node)
if (node.id == 'locals' and isinstance(self.scope, FunctionScope)
and isinstance(node.parent, ast.Call)):
# we are doing locals() call in current scope
self.scope.usesLocals = True
elif isinstance(node.ctx, (ast.Store, ast.AugStore)):
self.handleNodeStore(node)
elif isinstance(node.ctx, ast.Del):
self.handleNodeDelete(node)
else:
# must be a Param context -- this only happens for names in function
# arguments, but these aren't dispatched through here
raise RuntimeError("Got impossible expression context: %r" % (node.ctx,))
def CONTINUE(self, node):
# Walk the tree up until we see a loop (OK), a function or class
# definition (not OK), for 'continue', a finally block (not OK), or
# the top module scope (not OK)
n = node
while hasattr(n, 'parent'):
n, n_child = n.parent, n
if isinstance(n, LOOP_TYPES):
# Doesn't apply unless it's in the loop itself
if n_child not in n.orelse:
return
if isinstance(n, (ast.FunctionDef, ast.ClassDef)):
break
# Handle Try/TryFinally difference in Python < and >= 3.3
if hasattr(n, 'finalbody') and isinstance(node, ast.Continue):
if n_child in n.finalbody:
self.report(messages.ContinueInFinally, node)
return
if isinstance(node, ast.Continue):
self.report(messages.ContinueOutsideLoop, node)
else: # ast.Break
self.report(messages.BreakOutsideLoop, node)
BREAK = CONTINUE
def RETURN(self, node):
if isinstance(self.scope, (ClassScope, ModuleScope)):
self.report(messages.ReturnOutsideFunction, node)
return
if (
node.value and
hasattr(self.scope, 'returnValue') and
not self.scope.returnValue
):
self.scope.returnValue = node.value
self.handleNode(node.value, node)
def YIELD(self, node):
if isinstance(self.scope, (ClassScope, ModuleScope)):
self.report(messages.YieldOutsideFunction, node)
return
self.scope.isGenerator = True
self.handleNode(node.value, node)
AWAIT = YIELDFROM = YIELD
def FUNCTIONDEF(self, node):
for deco in node.decorator_list:
self.handleNode(deco, node)
self.LAMBDA(node)
self.addBinding(node, FunctionDefinition(node.name, node))
# doctest does not process doctest within a doctest,
# or in nested functions.
if (self.withDoctest and
not self._in_doctest() and
not isinstance(self.scope, FunctionScope)):
self.deferFunction(lambda: self.handleDoctests(node))
ASYNCFUNCTIONDEF = FUNCTIONDEF
def LAMBDA(self, node):
args = []
annotations = []
if PY2:
def addArgs(arglist):
for arg in arglist:
if isinstance(arg, ast.Tuple):
addArgs(arg.elts)
else:
args.append(arg.id)
addArgs(node.args.args)
defaults = node.args.defaults
else:
for arg in node.args.args + node.args.kwonlyargs:
args.append(arg.arg)
annotations.append(arg.annotation)
defaults = node.args.defaults + node.args.kw_defaults
# Only for Python3 FunctionDefs
is_py3_func = hasattr(node, 'returns')
for arg_name in ('vararg', 'kwarg'):
wildcard = getattr(node.args, arg_name)
if not wildcard:
continue
args.append(wildcard if PY33 else wildcard.arg)
if is_py3_func:
if PY33: # Python 2.5 to 3.3
argannotation = arg_name + 'annotation'
annotations.append(getattr(node.args, argannotation))
else: # Python >= 3.4
annotations.append(wildcard.annotation)
if is_py3_func:
annotations.append(node.returns)
if len(set(args)) < len(args):
for (idx, arg) in enumerate(args):
if arg in args[:idx]:
self.report(messages.DuplicateArgument, node, arg)
for child in annotations + defaults:
if child:
self.handleNode(child, node)
def runFunction():
self.pushScope()
for name in args:
self.addBinding(node, Argument(name, node))
if isinstance(node.body, list):
# case for FunctionDefs
for stmt in node.body:
self.handleNode(stmt, node)
else:
# case for Lambdas
self.handleNode(node.body, node)
def checkUnusedAssignments():
"""
Check to see if any assignments have not been used.
"""
for name, binding in self.scope.unusedAssignments():
self.report(messages.UnusedVariable, binding.source, name)
self.deferAssignment(checkUnusedAssignments)
if PY32:
def checkReturnWithArgumentInsideGenerator():
"""
Check to see if there is any return statement with
arguments but the function is a generator.
"""
if self.scope.isGenerator and self.scope.returnValue:
self.report(messages.ReturnWithArgsInsideGenerator,
self.scope.returnValue)
self.deferAssignment(checkReturnWithArgumentInsideGenerator)
self.popScope()
self.deferFunction(runFunction)
def CLASSDEF(self, node):
"""
Check names used in a class definition, including its decorators, base
classes, and the body of its definition. Additionally, add its name to
the current scope.
"""
for deco in node.decorator_list:
self.handleNode(deco, node)
for baseNode in node.bases:
self.handleNode(baseNode, node)
if not PY2:
for keywordNode in node.keywords:
self.handleNode(keywordNode, node)
self.pushScope(ClassScope)
# doctest does not process doctest within a doctest
# classes within classes are processed.
if (self.withDoctest and
not self._in_doctest() and
not isinstance(self.scope, FunctionScope)):
self.deferFunction(lambda: self.handleDoctests(node))
for stmt in node.body:
self.handleNode(stmt, node)
self.popScope()
self.addBinding(node, ClassDefinition(node.name, node))
def AUGASSIGN(self, node):
self.handleNodeLoad(node.target)
self.handleNode(node.value, node)
self.handleNode(node.target, node)
def TUPLE(self, node):
if not PY2 and isinstance(node.ctx, ast.Store):
# Python 3 advanced tuple unpacking: a, *b, c = d.
# Only one starred expression is allowed, and no more than 1<<8
# assignments are allowed before a stared expression. There is
# also a limit of 1<<24 expressions after the starred expression,
# which is impossible to test due to memory restrictions, but we
# add it here anyway
has_starred = False
star_loc = -1
for i, n in enumerate(node.elts):
if isinstance(n, ast.Starred):
if has_starred:
self.report(messages.TwoStarredExpressions, node)
# The SyntaxError doesn't distinguish two from more
# than two.
break
has_starred = True
star_loc = i
if star_loc >= 1 << 8 or len(node.elts) - star_loc - 1 >= 1 << 24:
self.report(messages.TooManyExpressionsInStarredAssignment, node)
self.handleChildren(node)
LIST = TUPLE
def IMPORT(self, node):
for alias in node.names:
if '.' in alias.name and not alias.asname:
importation = SubmoduleImportation(alias.name, node)
else:
name = alias.asname or alias.name
importation = Importation(name, node, alias.name)
self.addBinding(node, importation)
def IMPORTFROM(self, node):
if node.module == '__future__':
if not self.futuresAllowed:
self.report(messages.LateFutureImport,
node, [n.name for n in node.names])
else:
self.futuresAllowed = False
module = ('.' * node.level) + (node.module or '')
for alias in node.names:
name = alias.asname or alias.name
if node.module == '__future__':
importation = FutureImportation(name, node, self.scope)
if alias.name not in __future__.all_feature_names:
self.report(messages.FutureFeatureNotDefined,
node, alias.name)
elif alias.name == '*':
# Only Python 2, local import * is a SyntaxWarning
if not PY2 and not isinstance(self.scope, ModuleScope):
self.report(messages.ImportStarNotPermitted,
node, module)
continue
self.scope.importStarred = True
self.report(messages.ImportStarUsed, node, module)
importation = StarImportation(module, node)
else:
importation = ImportationFrom(name, node,
module, alias.name)
self.addBinding(node, importation)
def TRY(self, node):
handler_names = []
# List the exception handlers
for i, handler in enumerate(node.handlers):
if isinstance(handler.type, ast.Tuple):
for exc_type in handler.type.elts:
handler_names.append(getNodeName(exc_type))
elif handler.type:
handler_names.append(getNodeName(handler.type))
if handler.type is None and i < len(node.handlers) - 1:
self.report(messages.DefaultExceptNotLast, handler)
# Memorize the except handlers and process the body
self.exceptHandlers.append(handler_names)
for child in node.body:
self.handleNode(child, node)
self.exceptHandlers.pop()
# Process the other nodes: "except:", "else:", "finally:"
self.handleChildren(node, omit='body')
TRYEXCEPT = TRY
def EXCEPTHANDLER(self, node):
if PY2 or node.name is None:
self.handleChildren(node)
return
# 3.x: the name of the exception, which is not a Name node, but
# a simple string, creates a local that is only bound within the scope
# of the except: block.
for scope in self.scopeStack[::-1]:
if node.name in scope:
is_name_previously_defined = True
break
else:
is_name_previously_defined = False
self.handleNodeStore(node)
self.handleChildren(node)
if not is_name_previously_defined:
# See discussion on https://github.com/PyCQA/pyflakes/pull/59
# We're removing the local name since it's being unbound
# after leaving the except: block and it's always unbound
# if the except: block is never entered. This will cause an
# "undefined name" error raised if the checked code tries to
# use the name afterwards.
#
# Unless it's been removed already. Then do nothing.
try:
del self.scope[node.name]
except KeyError:
pass
def ANNASSIGN(self, node):
if node.value:
# Only bind the *targets* if the assignment has a value.
# Otherwise it's not really ast.Store and shouldn't silence
# UndefinedLocal warnings.
self.handleNode(node.target, node)
self.handleNode(node.annotation, node)
if node.value:
# If the assignment has value, handle the *value* now.
self.handleNode(node.value, node)
| [((14727, 14762), 'os.environ.get', 'os.environ.get', (['"""PYFLAKES_BUILTINS"""'], {}), "('PYFLAKES_BUILTINS')\n", (14741, 14762), False, 'import os\n'), ((30327, 30350), 'doctest.DocTestParser', 'doctest.DocTestParser', ([], {}), '()\n', (30348, 30350), False, 'import doctest\n'), ((25408, 25439), 'os.path.basename', 'os.path.basename', (['self.filename'], {}), '(self.filename)\n', (25424, 25439), False, 'import os\n'), ((18645, 18676), 'os.path.basename', 'os.path.basename', (['self.filename'], {}), '(self.filename)\n', (18661, 18676), False, 'import os\n'), ((31945, 31959), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (31957, 31959), False, 'import sys\n')] |
infinyte7/Auto-Screenshot | AutoScreenShot.py | 5d8e39af61f3361f372ffb48add53171b7cea672 | # Project Name: Auto Screenshot
# Description: Take screenshot of screen when any change take place.
# Author: Mani (Infinyte7)
# Date: 26-10-2020
# License: MIT
from pyscreenshot import grab
from PIL import ImageChops
import os
import time
import subprocess, sys
from datetime import datetime
import tkinter as tk
from tkinter import *
from tkinter import font
class AutoScreenshot:
def __init__(self, master):
self.root = root
root.title('Auto Screenshot')
root.config(bg="white")
fontRoboto = font.Font(family='Roboto', size=16, weight='bold')
# project name label
projectTitleLabel = Label(root, text="Auto Screenshot v1.0.0")
projectTitleLabel.config(font=fontRoboto, bg="white", fg="#5599ff")
projectTitleLabel.pack(padx="10")
# start button
btn_start = Button(root, text="Start", command=self.start)
btn_start.config(highlightthickness=0, bd=0, fg="white", bg="#5fd38d",
activebackground="#5fd38d", activeforeground="white", font=fontRoboto)
btn_start.pack(padx="10", fill=BOTH)
# close button
btn_start = Button(root, text="Close", command=self.close)
btn_start.config(highlightthickness=0, bd=0, fg="white", bg="#f44336",
activebackground="#ff7043", activeforeground="white", font=fontRoboto)
btn_start.pack(padx="10", pady="10", fill=BOTH)
def start(self):
# Create folder to store images
directory = "Screenshots"
self.new_folder = directory + "/" + datetime.now().strftime("%Y_%m_%d-%I_%M_%p")
# all images to one folder
if not os.path.exists(directory):
os.makedirs(directory)
# new folder for storing images for current session
if not os.path.exists(self.new_folder):
os.makedirs(self.new_folder)
# Run ScreenCords.py and get cordinates
cords_point = subprocess.check_output([sys.executable, "GetScreenCoordinates.py", "-l"])
cord_tuple = tuple(cords_point.decode("utf-8").rstrip().split(","))
# cordinates for screenshots and compare
self.cords = (int(cord_tuple[0]), int(cord_tuple[1]), int(cord_tuple[2]), int(cord_tuple[3]))
# save first image
img1 = grab(bbox=self.cords)
now = datetime.now().strftime("%Y_%m_%d-%I_%M_%S_%p")
fname = self.new_folder + "/ScreenShots" + now + ".png"
img1.save(fname)
print("First Screenshot taken")
# start taking screenshot of next images
self.take_screenshots()
def take_screenshots(self):
# grab first and second image
img1 = grab(bbox=self.cords)
time.sleep(1)
img2 = grab(bbox=self.cords)
# check difference between images
diff = ImageChops.difference(img1, img2)
bbox = diff.getbbox()
if bbox is not None:
now = datetime.now().strftime("%Y_%m_%d-%I_%M_%S_%p")
fname = self.new_folder + "/ScreenShots" + now + ".png"
img2.save(fname)
print("Screenshot taken")
root.after(5, self.take_screenshots)
def close(self):
quit()
if __name__ == "__main__":
root = Tk()
gui = AutoScreenshot(root)
root.mainloop()
| [((547, 597), 'tkinter.font.Font', 'font.Font', ([], {'family': '"""Roboto"""', 'size': '(16)', 'weight': '"""bold"""'}), "(family='Roboto', size=16, weight='bold')\n", (556, 597), False, 'from tkinter import font\n'), ((1980, 2054), 'subprocess.check_output', 'subprocess.check_output', (["[sys.executable, 'GetScreenCoordinates.py', '-l']"], {}), "([sys.executable, 'GetScreenCoordinates.py', '-l'])\n", (2003, 2054), False, 'import subprocess, sys\n'), ((2326, 2347), 'pyscreenshot.grab', 'grab', ([], {'bbox': 'self.cords'}), '(bbox=self.cords)\n', (2330, 2347), False, 'from pyscreenshot import grab\n'), ((2714, 2735), 'pyscreenshot.grab', 'grab', ([], {'bbox': 'self.cords'}), '(bbox=self.cords)\n', (2718, 2735), False, 'from pyscreenshot import grab\n'), ((2744, 2757), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2754, 2757), False, 'import time\n'), ((2773, 2794), 'pyscreenshot.grab', 'grab', ([], {'bbox': 'self.cords'}), '(bbox=self.cords)\n', (2777, 2794), False, 'from pyscreenshot import grab\n'), ((2853, 2886), 'PIL.ImageChops.difference', 'ImageChops.difference', (['img1', 'img2'], {}), '(img1, img2)\n', (2874, 2886), False, 'from PIL import ImageChops\n'), ((1697, 1722), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (1711, 1722), False, 'import os\n'), ((1736, 1758), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (1747, 1758), False, 'import os\n'), ((1835, 1866), 'os.path.exists', 'os.path.exists', (['self.new_folder'], {}), '(self.new_folder)\n', (1849, 1866), False, 'import os\n'), ((1880, 1908), 'os.makedirs', 'os.makedirs', (['self.new_folder'], {}), '(self.new_folder)\n', (1891, 1908), False, 'import os\n'), ((2362, 2376), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2374, 2376), False, 'from datetime import datetime\n'), ((1601, 1615), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1613, 1615), False, 'from datetime import datetime\n'), ((2973, 2987), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2985, 2987), False, 'from datetime import datetime\n')] |
HaidongHe/rqalpha | rqalpha/utils/logger.py | bb824178425909e051c456f6062a6c5bdc816421 | # -*- coding: utf-8 -*-
# 版权所有 2019 深圳米筐科技有限公司(下称“米筐科技”)
#
# 除非遵守当前许可,否则不得使用本软件。
#
# * 非商业用途(非商业用途指个人出于非商业目的使用本软件,或者高校、研究所等非营利机构出于教育、科研等目的使用本软件):
# 遵守 Apache License 2.0(下称“Apache 2.0 许可”),您可以在以下位置获得 Apache 2.0 许可的副本:http://www.apache.org/licenses/LICENSE-2.0。
# 除非法律有要求或以书面形式达成协议,否则本软件分发时需保持当前许可“原样”不变,且不得附加任何条件。
#
# * 商业用途(商业用途指个人出于任何商业目的使用本软件,或者法人或其他组织出于任何目的使用本软件):
# 未经米筐科技授权,任何个人不得出于任何商业目的使用本软件(包括但不限于向第三方提供、销售、出租、出借、转让本软件、本软件的衍生产品、引用或借鉴了本软件功能或源代码的产品或服务),任何法人或其他组织不得出于任何目的使用本软件,否则米筐科技有权追究相应的知识产权侵权责任。
# 在此前提下,对本软件的使用同样需要遵守 Apache 2.0 许可,Apache 2.0 许可与本许可冲突之处,以本许可为准。
# 详细的授权流程,请联系 [email protected] 获取。
from datetime import datetime
import logbook
from logbook import Logger, StderrHandler
from rqalpha.utils.py2 import to_utf8
logbook.set_datetime_format("local")
# patch warn
logbook.base._level_names[logbook.base.WARNING] = 'WARN'
__all__ = [
"user_log",
"system_log",
"user_system_log",
]
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
def user_std_handler_log_formatter(record, handler):
from rqalpha.environment import Environment
try:
dt = Environment.get_instance().calendar_dt.strftime(DATETIME_FORMAT)
except Exception:
dt = datetime.now().strftime(DATETIME_FORMAT)
log = "{dt} {level} {msg}".format(
dt=dt,
level=record.level_name,
msg=to_utf8(record.message),
)
return log
user_std_handler = StderrHandler(bubble=True)
user_std_handler.formatter = user_std_handler_log_formatter
def formatter_builder(tag):
def formatter(record, handler):
log = "[{formatter_tag}] [{time}] {level}: {msg}".format(
formatter_tag=tag,
level=record.level_name,
msg=to_utf8(record.message),
time=record.time,
)
if record.formatted_exception:
log += "\n" + record.formatted_exception
return log
return formatter
# loggers
# 用户代码logger日志
user_log = Logger("user_log")
# 给用户看的系统日志
user_system_log = Logger("user_system_log")
# 用于用户异常的详细日志打印
user_detail_log = Logger("user_detail_log")
# user_detail_log.handlers.append(StderrHandler(bubble=True))
# 系统日志
system_log = Logger("system_log")
basic_system_log = Logger("basic_system_log")
# 标准输出日志
std_log = Logger("std_log")
def init_logger():
system_log.handlers = [StderrHandler(bubble=True)]
basic_system_log.handlers = [StderrHandler(bubble=True)]
std_log.handlers = [StderrHandler(bubble=True)]
user_log.handlers = []
user_system_log.handlers = []
def user_print(*args, **kwargs):
sep = kwargs.get("sep", " ")
end = kwargs.get("end", "")
message = sep.join(map(str, args)) + end
user_log.info(message)
init_logger()
| [((787, 823), 'logbook.set_datetime_format', 'logbook.set_datetime_format', (['"""local"""'], {}), "('local')\n", (814, 823), False, 'import logbook\n'), ((1445, 1471), 'logbook.StderrHandler', 'StderrHandler', ([], {'bubble': '(True)'}), '(bubble=True)\n', (1458, 1471), False, 'from logbook import Logger, StderrHandler\n'), ((1985, 2003), 'logbook.Logger', 'Logger', (['"""user_log"""'], {}), "('user_log')\n", (1991, 2003), False, 'from logbook import Logger, StderrHandler\n'), ((2034, 2059), 'logbook.Logger', 'Logger', (['"""user_system_log"""'], {}), "('user_system_log')\n", (2040, 2059), False, 'from logbook import Logger, StderrHandler\n'), ((2095, 2120), 'logbook.Logger', 'Logger', (['"""user_detail_log"""'], {}), "('user_detail_log')\n", (2101, 2120), False, 'from logbook import Logger, StderrHandler\n'), ((2204, 2224), 'logbook.Logger', 'Logger', (['"""system_log"""'], {}), "('system_log')\n", (2210, 2224), False, 'from logbook import Logger, StderrHandler\n'), ((2244, 2270), 'logbook.Logger', 'Logger', (['"""basic_system_log"""'], {}), "('basic_system_log')\n", (2250, 2270), False, 'from logbook import Logger, StderrHandler\n'), ((2291, 2308), 'logbook.Logger', 'Logger', (['"""std_log"""'], {}), "('std_log')\n", (2297, 2308), False, 'from logbook import Logger, StderrHandler\n'), ((2357, 2383), 'logbook.StderrHandler', 'StderrHandler', ([], {'bubble': '(True)'}), '(bubble=True)\n', (2370, 2383), False, 'from logbook import Logger, StderrHandler\n'), ((2418, 2444), 'logbook.StderrHandler', 'StderrHandler', ([], {'bubble': '(True)'}), '(bubble=True)\n', (2431, 2444), False, 'from logbook import Logger, StderrHandler\n'), ((2470, 2496), 'logbook.StderrHandler', 'StderrHandler', ([], {'bubble': '(True)'}), '(bubble=True)\n', (2483, 2496), False, 'from logbook import Logger, StderrHandler\n'), ((1378, 1401), 'rqalpha.utils.py2.to_utf8', 'to_utf8', (['record.message'], {}), '(record.message)\n', (1385, 1401), False, 'from rqalpha.utils.py2 import to_utf8\n'), ((1749, 1772), 'rqalpha.utils.py2.to_utf8', 'to_utf8', (['record.message'], {}), '(record.message)\n', (1756, 1772), False, 'from rqalpha.utils.py2 import to_utf8\n'), ((1137, 1163), 'rqalpha.environment.Environment.get_instance', 'Environment.get_instance', ([], {}), '()\n', (1161, 1163), False, 'from rqalpha.environment import Environment\n'), ((1237, 1251), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1249, 1251), False, 'from datetime import datetime\n')] |
wikimedia/operations-debs-salt | salt/modules/oracle.py | be6342abc7401ff92f67ed59f7834f1359f35314 | # -*- coding: utf-8 -*-
'''
Oracle DataBase connection module
:mainteiner: Vladimir Bormotov <[email protected]>
:maturity: new
:depends: cx_Oracle
:platform: all
:configuration: module provide connections for multiple Oracle DB instances.
**OS Environment**
.. code-block:: text
ORACLE_HOME: path to oracle product
PATH: path to Oracle Client libs need to be in PATH
**pillar**
.. code-block:: text
oracle.dbs: list of known based
oracle.dbs.<db>.uri: connection credentials in format:
user/password@host[:port]/sid[ as {sysdba|sysoper}]
'''
import os
import logging
from salt.utils.decorators import depends
log = logging.getLogger(__name__)
try:
import cx_Oracle
MODE = {
'sysdba': cx_Oracle.SYSDBA,
'sysoper': cx_Oracle.SYSOPER
}
HAS_CX_ORACLE = True
except ImportError:
MODE = {'sysdba': 2, 'sysoper': 4}
HAS_CX_ORACLE = False
__virtualname__ = 'oracle'
def __virtual__():
'''
Load module only if cx_Oracle installed
'''
return __virtualname__ if HAS_CX_ORACLE else False
def _cx_oracle_req():
'''
Fallback function stub
'''
return 'Need "cx_Oracle" and Oracle Client installed for this functin exist'
def _unicode_output(cursor, name, default_type, size, precision, scale):
'''
Return strings values as python unicode string
http://www.oracle.com/technetwork/articles/dsl/tuininga-cx-oracle-084866.html
'''
if default_type in (cx_Oracle.STRING, cx_Oracle.LONG_STRING,
cx_Oracle.FIXED_CHAR, cx_Oracle.CLOB):
return cursor.var(unicode, size, cursor.arraysize)
def _connect(uri):
'''
uri = user/password@host[:port]/sid[ as {sysdba|sysoper}]
Return cx_Oracle.Connection instance
'''
# cx_Oracle.Connection() not support 'as sysdba' syntax
uri_l = uri.rsplit(' as ', 1)
if len(uri_l) == 2:
credentials, mode = uri_l
mode = MODE[mode]
else:
credentials = uri_l[0]
mode = 0
userpass, hostportsid = credentials.split('@')
user, password = userpass.split('/')
hostport, sid = hostportsid.split('/')
hostport_l = hostport.split(':')
if len(hostport_l) == 2:
host, port = hostport_l
else:
host = hostport_l[0]
port = 1521
log.debug('connect: {0}'.format((user, password, host, port, sid, mode)))
# force UTF-8 client encoding
os.environ['NLS_LANG'] = '.AL32UTF8'
conn = cx_Oracle.connect(user, password,
cx_Oracle.makedsn(host, port, sid),
mode)
conn.outputtypehandler = _unicode_output
return conn
@depends('cx_Oracle', fallback_function=_cx_oracle_req)
def run_query(db, query):
'''
Run SQL query and return result
CLI example:
.. code-block:: bash
salt '*' oracle.run_query my_db "select * from my_table"
'''
log.debug('run query on {0}: {1}'.format(db, query))
conn = _connect(show_dbs(db)[db]['uri'])
return conn.cursor().execute(query).fetchall()
def show_dbs(*dbs):
'''
Show databases configuration from pillar. Filter by args
.. code-block:: bash
salt '*' oracle.show_dbs
salt '*' oracle.show_dbs my_db
'''
if dbs:
log.debug('get dbs from pillar: {0}'.format(dbs))
result = {}
for db in dbs:
result[db] = __salt__['pillar.get']('oracle:dbs:' + db)
return result
else:
pillar_dbs = __salt__['pillar.get']('oracle:dbs')
log.debug('get all ({0}) dbs from pillar'.format(len(pillar_dbs)))
return pillar_dbs
@depends('cx_Oracle', fallback_function=_cx_oracle_req)
def version(*dbs):
'''
Server Version (select banner from v$version)
CLI Example:
.. code-block:: bash
salt '*' oracle.version
salt '*' oracle.version my_db
'''
pillar_dbs = __salt__['pillar.get']('oracle:dbs')
get_version = lambda x: [
r[0] for r in run_query(x, "select banner from v$version order by banner")
]
result = {}
if dbs:
log.debug('get db versions for: {0}'.format(dbs))
for db in dbs:
if db in pillar_dbs:
result[db] = get_version(db)
else:
log.debug('get all({0}) dbs versions'.format(len(dbs)))
for db in dbs:
result[db] = get_version(db)
return result
@depends('cx_Oracle', fallback_function=_cx_oracle_req)
def client_version():
'''
Oracle Client Version
CLI Example:
.. code-block:: bash
salt '*' oracle.client_version
'''
return '.'.join((str(x) for x in cx_Oracle.clientversion()))
def show_pillar(item=None):
'''
Show Pillar segment oracle.* and subitem with notation "item:subitem"
CLI Example:
.. code-block:: bash
salt '*' oracle.show_pillar
salt '*' oracle.show_pillar dbs:my_db
'''
if item:
return __salt__['pillar.get']('oracle:' + item)
else:
return __salt__['pillar.get']('oracle')
def show_env():
'''
Show Environment used by Oracle Client
CLI Example:
.. code-block:: bash
salt '*' oracle.show_env
.. note::
at first _connect() ``NLS_LANG`` will forced to '.AL32UTF8'
'''
envs = ['PATH', 'ORACLE_HOME', 'TNS_ADMIN', 'NLS_LANG']
result = {}
for env in envs:
if env in os.environ:
result[env] = os.environ[env]
return result
| [((690, 717), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (707, 717), False, 'import logging\n'), ((2702, 2756), 'salt.utils.decorators.depends', 'depends', (['"""cx_Oracle"""'], {'fallback_function': '_cx_oracle_req'}), "('cx_Oracle', fallback_function=_cx_oracle_req)\n", (2709, 2756), False, 'from salt.utils.decorators import depends\n'), ((3671, 3725), 'salt.utils.decorators.depends', 'depends', (['"""cx_Oracle"""'], {'fallback_function': '_cx_oracle_req'}), "('cx_Oracle', fallback_function=_cx_oracle_req)\n", (3678, 3725), False, 'from salt.utils.decorators import depends\n'), ((4450, 4504), 'salt.utils.decorators.depends', 'depends', (['"""cx_Oracle"""'], {'fallback_function': '_cx_oracle_req'}), "('cx_Oracle', fallback_function=_cx_oracle_req)\n", (4457, 4504), False, 'from salt.utils.decorators import depends\n'), ((2567, 2601), 'cx_Oracle.makedsn', 'cx_Oracle.makedsn', (['host', 'port', 'sid'], {}), '(host, port, sid)\n', (2584, 2601), False, 'import cx_Oracle\n'), ((4690, 4715), 'cx_Oracle.clientversion', 'cx_Oracle.clientversion', ([], {}), '()\n', (4713, 4715), False, 'import cx_Oracle\n')] |
ashwini-balnaves/python-consul | tests/test_std.py | 4ddec9b57eb5284b58967ce1a9b2422519f88cc2 | import base64
import operator
import struct
import time
import pytest
import six
import consul
import consul.std
Check = consul.Check
class TestHTTPClient(object):
def test_uri(self):
http = consul.std.HTTPClient()
assert http.uri('/v1/kv') == 'http://127.0.0.1:8500/v1/kv'
assert http.uri('/v1/kv', params={'index': 1}) == \
'http://127.0.0.1:8500/v1/kv?index=1'
class TestConsul(object):
def test_kv(self, consul_port):
c = consul.Consul(port=consul_port)
index, data = c.kv.get('foo')
assert data is None
assert c.kv.put('foo', 'bar') is True
index, data = c.kv.get('foo')
assert data['Value'] == six.b('bar')
def test_kv_wait(self, consul_port):
c = consul.Consul(port=consul_port)
assert c.kv.put('foo', 'bar') is True
index, data = c.kv.get('foo')
check, data = c.kv.get('foo', index=index, wait='20ms')
assert index == check
def test_kv_encoding(self, consul_port):
c = consul.Consul(port=consul_port)
# test binary
c.kv.put('foo', struct.pack('i', 1000))
index, data = c.kv.get('foo')
assert struct.unpack('i', data['Value']) == (1000,)
# test unicode
c.kv.put('foo', u'bar')
index, data = c.kv.get('foo')
assert data['Value'] == six.b('bar')
# test empty-string comes back as `None`
c.kv.put('foo', '')
index, data = c.kv.get('foo')
assert data['Value'] is None
# test None
c.kv.put('foo', None)
index, data = c.kv.get('foo')
assert data['Value'] is None
# check unencoded values raises assert
pytest.raises(AssertionError, c.kv.put, 'foo', {1: 2})
def test_kv_put_cas(self, consul_port):
c = consul.Consul(port=consul_port)
assert c.kv.put('foo', 'bar', cas=50) is False
assert c.kv.put('foo', 'bar', cas=0) is True
index, data = c.kv.get('foo')
assert c.kv.put('foo', 'bar2', cas=data['ModifyIndex']-1) is False
assert c.kv.put('foo', 'bar2', cas=data['ModifyIndex']) is True
index, data = c.kv.get('foo')
assert data['Value'] == six.b('bar2')
def test_kv_put_flags(self, consul_port):
c = consul.Consul(port=consul_port)
c.kv.put('foo', 'bar')
index, data = c.kv.get('foo')
assert data['Flags'] == 0
assert c.kv.put('foo', 'bar', flags=50) is True
index, data = c.kv.get('foo')
assert data['Flags'] == 50
def test_kv_recurse(self, consul_port):
c = consul.Consul(port=consul_port)
index, data = c.kv.get('foo/', recurse=True)
assert data is None
c.kv.put('foo/', None)
index, data = c.kv.get('foo/', recurse=True)
assert len(data) == 1
c.kv.put('foo/bar1', '1')
c.kv.put('foo/bar2', '2')
c.kv.put('foo/bar3', '3')
index, data = c.kv.get('foo/', recurse=True)
assert [x['Key'] for x in data] == [
'foo/', 'foo/bar1', 'foo/bar2', 'foo/bar3']
assert [x['Value'] for x in data] == [
None, six.b('1'), six.b('2'), six.b('3')]
def test_kv_delete(self, consul_port):
c = consul.Consul(port=consul_port)
c.kv.put('foo1', '1')
c.kv.put('foo2', '2')
c.kv.put('foo3', '3')
index, data = c.kv.get('foo', recurse=True)
assert [x['Key'] for x in data] == ['foo1', 'foo2', 'foo3']
assert c.kv.delete('foo2') is True
index, data = c.kv.get('foo', recurse=True)
assert [x['Key'] for x in data] == ['foo1', 'foo3']
assert c.kv.delete('foo', recurse=True) is True
index, data = c.kv.get('foo', recurse=True)
assert data is None
def test_kv_delete_cas(self, consul_port):
c = consul.Consul(port=consul_port)
c.kv.put('foo', 'bar')
index, data = c.kv.get('foo')
assert c.kv.delete('foo', cas=data['ModifyIndex']-1) is False
assert c.kv.get('foo') == (index, data)
assert c.kv.delete('foo', cas=data['ModifyIndex']) is True
index, data = c.kv.get('foo')
assert data is None
def test_kv_acquire_release(self, consul_port):
c = consul.Consul(port=consul_port)
pytest.raises(
consul.ConsulException, c.kv.put, 'foo', 'bar', acquire='foo')
s1 = c.session.create()
s2 = c.session.create()
assert c.kv.put('foo', '1', acquire=s1) is True
assert c.kv.put('foo', '2', acquire=s2) is False
assert c.kv.put('foo', '1', acquire=s1) is True
assert c.kv.put('foo', '1', release='foo') is False
assert c.kv.put('foo', '2', release=s2) is False
assert c.kv.put('foo', '2', release=s1) is True
c.session.destroy(s1)
c.session.destroy(s2)
def test_kv_keys_only(self, consul_port):
c = consul.Consul(port=consul_port)
assert c.kv.put('bar', '4') is True
assert c.kv.put('base/foo', '1') is True
assert c.kv.put('base/base/foo', '5') is True
index, data = c.kv.get('base/', keys=True, separator='/')
assert data == ['base/base/', 'base/foo']
def test_transaction(self, consul_port):
c = consul.Consul(port=consul_port)
value = base64.b64encode(b"1").decode("utf8")
d = {"KV": {"Verb": "set", "Key": "asdf", "Value": value}}
r = c.txn.put([d])
assert r["Errors"] is None
d = {"KV": {"Verb": "get", "Key": "asdf"}}
r = c.txn.put([d])
assert r["Results"][0]["KV"]["Value"] == value
def test_event(self, consul_port):
c = consul.Consul(port=consul_port)
assert c.event.fire("fooname", "foobody")
index, events = c.event.list()
assert [x['Name'] == 'fooname' for x in events]
assert [x['Payload'] == 'foobody' for x in events]
def test_event_targeted(self, consul_port):
c = consul.Consul(port=consul_port)
assert c.event.fire("fooname", "foobody")
index, events = c.event.list(name="othername")
assert events == []
index, events = c.event.list(name="fooname")
assert [x['Name'] == 'fooname' for x in events]
assert [x['Payload'] == 'foobody' for x in events]
def test_agent_checks(self, consul_port):
c = consul.Consul(port=consul_port)
def verify_and_dereg_check(check_id):
assert set(c.agent.checks().keys()) == set([check_id])
assert c.agent.check.deregister(check_id) is True
assert set(c.agent.checks().keys()) == set([])
def verify_check_status(check_id, status, notes=None):
checks = c.agent.checks()
assert checks[check_id]['Status'] == status
if notes:
assert checks[check_id]['Output'] == notes
# test setting notes on a check
c.agent.check.register('check', Check.ttl('1s'), notes='foo')
assert c.agent.checks()['check']['Notes'] == 'foo'
c.agent.check.deregister('check')
assert set(c.agent.checks().keys()) == set([])
assert c.agent.check.register(
'script_check', Check.script('/bin/true', 10)) is True
verify_and_dereg_check('script_check')
assert c.agent.check.register(
'check name',
Check.script('/bin/true', 10),
check_id='check_id') is True
verify_and_dereg_check('check_id')
http_addr = "http://127.0.0.1:{0}".format(consul_port)
assert c.agent.check.register(
'http_check', Check.http(http_addr, '10ms')) is True
time.sleep(1)
verify_check_status('http_check', 'passing')
verify_and_dereg_check('http_check')
assert c.agent.check.register(
'http_timeout_check',
Check.http(http_addr, '100ms', timeout='2s')) is True
verify_and_dereg_check('http_timeout_check')
assert c.agent.check.register('ttl_check', Check.ttl('100ms')) is True
assert c.agent.check.ttl_warn('ttl_check') is True
verify_check_status('ttl_check', 'warning')
assert c.agent.check.ttl_warn(
'ttl_check', notes='its not quite right') is True
verify_check_status('ttl_check', 'warning', 'its not quite right')
assert c.agent.check.ttl_fail('ttl_check') is True
verify_check_status('ttl_check', 'critical')
assert c.agent.check.ttl_fail(
'ttl_check', notes='something went boink!') is True
verify_check_status(
'ttl_check', 'critical', notes='something went boink!')
assert c.agent.check.ttl_pass('ttl_check') is True
verify_check_status('ttl_check', 'passing')
assert c.agent.check.ttl_pass(
'ttl_check', notes='all hunky dory!') is True
verify_check_status('ttl_check', 'passing', notes='all hunky dory!')
# wait for ttl to expire
time.sleep(120/1000.0)
verify_check_status('ttl_check', 'critical')
verify_and_dereg_check('ttl_check')
def test_service_dereg_issue_156(self, consul_port):
# https://github.com/cablehead/python-consul/issues/156
service_name = 'app#127.0.0.1#3000'
c = consul.Consul(port=consul_port)
c.agent.service.register(service_name)
time.sleep(80/1000.0)
index, nodes = c.health.service(service_name)
assert [node['Service']['ID'] for node in nodes] == [service_name]
# Clean up tasks
assert c.agent.service.deregister(service_name) is True
time.sleep(40/1000.0)
index, nodes = c.health.service(service_name)
assert [node['Service']['ID'] for node in nodes] == []
def test_agent_checks_service_id(self, consul_port):
c = consul.Consul(port=consul_port)
c.agent.service.register('foo1')
time.sleep(40/1000.0)
index, nodes = c.health.service('foo1')
assert [node['Service']['ID'] for node in nodes] == ['foo1']
c.agent.check.register('foo', Check.ttl('100ms'), service_id='foo1')
time.sleep(40/1000.0)
index, nodes = c.health.service('foo1')
assert set([
check['ServiceID'] for node in nodes
for check in node['Checks']]) == set(['foo1', ''])
assert set([
check['CheckID'] for node in nodes
for check in node['Checks']]) == set(['foo', 'serfHealth'])
# Clean up tasks
assert c.agent.check.deregister('foo') is True
time.sleep(40/1000.0)
assert c.agent.service.deregister('foo1') is True
time.sleep(40/1000.0)
def test_agent_register_check_no_service_id(self, consul_port):
c = consul.Consul(port=consul_port)
index, nodes = c.health.service("foo1")
assert nodes == []
pytest.raises(consul.std.base.ConsulException,
c.agent.check.register,
'foo', Check.ttl('100ms'),
service_id='foo1')
time.sleep(40/1000.0)
assert c.agent.checks() == {}
# Cleanup tasks
c.agent.check.deregister('foo')
time.sleep(40/1000.0)
def test_agent_register_enable_tag_override(self, consul_port):
c = consul.Consul(port=consul_port)
index, nodes = c.health.service("foo1")
assert nodes == []
c.agent.service.register('foo', enable_tag_override=True)
assert c.agent.services()['foo']['EnableTagOverride']
# Cleanup tasks
c.agent.check.deregister('foo')
def test_agent_service_maintenance(self, consul_port):
c = consul.Consul(port=consul_port)
c.agent.service.register('foo', check=Check.ttl('100ms'))
time.sleep(40/1000.0)
c.agent.service.maintenance('foo', 'true', "test")
time.sleep(40/1000.0)
checks_pre = c.agent.checks()
assert '_service_maintenance:foo' in checks_pre.keys()
assert 'test' == checks_pre['_service_maintenance:foo']['Notes']
c.agent.service.maintenance('foo', 'false')
time.sleep(40/1000.0)
checks_post = c.agent.checks()
assert '_service_maintenance:foo' not in checks_post.keys()
# Cleanup
c.agent.service.deregister('foo')
time.sleep(40/1000.0)
def test_agent_node_maintenance(self, consul_port):
c = consul.Consul(port=consul_port)
c.agent.maintenance('true', "test")
time.sleep(40/1000.0)
checks_pre = c.agent.checks()
assert '_node_maintenance' in checks_pre.keys()
assert 'test' == checks_pre['_node_maintenance']['Notes']
c.agent.maintenance('false')
time.sleep(40/1000.0)
checks_post = c.agent.checks()
assert '_node_maintenance' not in checks_post.keys()
def test_agent_members(self, consul_port):
c = consul.Consul(port=consul_port)
members = c.agent.members()
for x in members:
assert x['Status'] == 1
assert not x['Name'] is None
assert not x['Tags'] is None
assert c.agent.self()['Member'] in members
wan_members = c.agent.members(wan=True)
for x in wan_members:
assert 'dc1' in x['Name']
def test_agent_self(self, consul_port):
c = consul.Consul(port=consul_port)
assert set(c.agent.self().keys()) == set(['Member', 'Stats', 'Config',
'Coord', 'DebugConfig',
'Meta'])
def test_agent_services(self, consul_port):
c = consul.Consul(port=consul_port)
assert c.agent.service.register('foo') is True
assert set(c.agent.services().keys()) == set(['foo'])
assert c.agent.service.deregister('foo') is True
assert set(c.agent.services().keys()) == set()
# test address param
assert c.agent.service.register('foo', address='10.10.10.1') is True
assert [
v['Address'] for k, v in c.agent.services().items()
if k == 'foo'][0] == '10.10.10.1'
assert c.agent.service.deregister('foo') is True
def test_catalog(self, consul_port):
c = consul.Consul(port=consul_port)
# grab the node our server created, so we can ignore it
_, nodes = c.catalog.nodes()
assert len(nodes) == 1
current = nodes[0]
# test catalog.datacenters
assert c.catalog.datacenters() == ['dc1']
# test catalog.register
pytest.raises(
consul.ConsulException,
c.catalog.register, 'foo', '10.1.10.11', dc='dc2')
assert c.catalog.register(
'n1',
'10.1.10.11',
service={'service': 's1'},
check={'name': 'c1'}) is True
assert c.catalog.register(
'n1', '10.1.10.11', service={'service': 's2'}) is True
assert c.catalog.register(
'n2', '10.1.10.12',
service={'service': 's1', 'tags': ['master']}) is True
# test catalog.nodes
pytest.raises(consul.ConsulException, c.catalog.nodes, dc='dc2')
_, nodes = c.catalog.nodes()
nodes.remove(current)
assert [x['Node'] for x in nodes] == ['n1', 'n2']
# test catalog.services
pytest.raises(consul.ConsulException, c.catalog.services, dc='dc2')
_, services = c.catalog.services()
assert services == {'s1': [u'master'], 's2': [], 'consul': []}
# test catalog.node
pytest.raises(consul.ConsulException, c.catalog.node, 'n1', dc='dc2')
_, node = c.catalog.node('n1')
assert set(node['Services'].keys()) == set(['s1', 's2'])
_, node = c.catalog.node('n3')
assert node is None
# test catalog.service
pytest.raises(
consul.ConsulException, c.catalog.service, 's1', dc='dc2')
_, nodes = c.catalog.service('s1')
assert set([x['Node'] for x in nodes]) == set(['n1', 'n2'])
_, nodes = c.catalog.service('s1', tag='master')
assert set([x['Node'] for x in nodes]) == set(['n2'])
# test catalog.deregister
pytest.raises(
consul.ConsulException, c.catalog.deregister, 'n2', dc='dc2')
assert c.catalog.deregister('n1', check_id='c1') is True
assert c.catalog.deregister('n2', service_id='s1') is True
# check the nodes weren't removed
_, nodes = c.catalog.nodes()
nodes.remove(current)
assert [x['Node'] for x in nodes] == ['n1', 'n2']
# check n2's s1 service was removed though
_, nodes = c.catalog.service('s1')
assert set([x['Node'] for x in nodes]) == set(['n1'])
# cleanup
assert c.catalog.deregister('n1') is True
assert c.catalog.deregister('n2') is True
_, nodes = c.catalog.nodes()
nodes.remove(current)
assert [x['Node'] for x in nodes] == []
def test_health_service(self, consul_port):
c = consul.Consul(port=consul_port)
# check there are no nodes for the service 'foo'
index, nodes = c.health.service('foo')
assert nodes == []
# register two nodes, one with a long ttl, the other shorter
c.agent.service.register(
'foo',
service_id='foo:1',
check=Check.ttl('10s'),
tags=['tag:foo:1'])
c.agent.service.register(
'foo', service_id='foo:2', check=Check.ttl('100ms'))
time.sleep(40/1000.0)
# check the nodes show for the /health/service endpoint
index, nodes = c.health.service('foo')
assert [node['Service']['ID'] for node in nodes] == ['foo:1', 'foo:2']
# but that they aren't passing their health check
index, nodes = c.health.service('foo', passing=True)
assert nodes == []
# ping the two node's health check
c.agent.check.ttl_pass('service:foo:1')
c.agent.check.ttl_pass('service:foo:2')
time.sleep(40/1000.0)
# both nodes are now available
index, nodes = c.health.service('foo', passing=True)
assert [node['Service']['ID'] for node in nodes] == ['foo:1', 'foo:2']
# wait until the short ttl node fails
time.sleep(120/1000.0)
# only one node available
index, nodes = c.health.service('foo', passing=True)
assert [node['Service']['ID'] for node in nodes] == ['foo:1']
# ping the failed node's health check
c.agent.check.ttl_pass('service:foo:2')
time.sleep(40/1000.0)
# check both nodes are available
index, nodes = c.health.service('foo', passing=True)
assert [node['Service']['ID'] for node in nodes] == ['foo:1', 'foo:2']
# check that tag works
index, nodes = c.health.service('foo', tag='tag:foo:1')
assert [node['Service']['ID'] for node in nodes] == ['foo:1']
# deregister the nodes
c.agent.service.deregister('foo:1')
c.agent.service.deregister('foo:2')
time.sleep(40/1000.0)
index, nodes = c.health.service('foo')
assert nodes == []
def test_health_state(self, consul_port):
c = consul.Consul(port=consul_port)
# The empty string is for the Serf Health Status check, which has an
# empty ServiceID
index, nodes = c.health.state('any')
assert [node['ServiceID'] for node in nodes] == ['']
# register two nodes, one with a long ttl, the other shorter
c.agent.service.register(
'foo', service_id='foo:1', check=Check.ttl('10s'))
c.agent.service.register(
'foo', service_id='foo:2', check=Check.ttl('100ms'))
time.sleep(40/1000.0)
# check the nodes show for the /health/state/any endpoint
index, nodes = c.health.state('any')
assert set([node['ServiceID'] for node in nodes]) == set(
['', 'foo:1', 'foo:2'])
# but that they aren't passing their health check
index, nodes = c.health.state('passing')
assert [node['ServiceID'] for node in nodes] != 'foo'
# ping the two node's health check
c.agent.check.ttl_pass('service:foo:1')
c.agent.check.ttl_pass('service:foo:2')
time.sleep(40/1000.0)
# both nodes are now available
index, nodes = c.health.state('passing')
assert set([node['ServiceID'] for node in nodes]) == set(
['', 'foo:1', 'foo:2'])
# wait until the short ttl node fails
time.sleep(2200/1000.0)
# only one node available
index, nodes = c.health.state('passing')
assert set([node['ServiceID'] for node in nodes]) == set(
['', 'foo:1'])
# ping the failed node's health check
c.agent.check.ttl_pass('service:foo:2')
time.sleep(40/1000.0)
# check both nodes are available
index, nodes = c.health.state('passing')
assert set([node['ServiceID'] for node in nodes]) == set(
['', 'foo:1', 'foo:2'])
# deregister the nodes
c.agent.service.deregister('foo:1')
c.agent.service.deregister('foo:2')
time.sleep(40/1000.0)
index, nodes = c.health.state('any')
assert [node['ServiceID'] for node in nodes] == ['']
def test_health_node(self, consul_port):
c = consul.Consul(port=consul_port)
# grab local node name
node = c.agent.self()['Config']['NodeName']
index, checks = c.health.node(node)
assert node in [check["Node"] for check in checks]
def test_health_checks(self, consul_port):
c = consul.Consul(port=consul_port)
c.agent.service.register(
'foobar', service_id='foobar', check=Check.ttl('10s'))
time.sleep(40/1000.00)
index, checks = c.health.checks('foobar')
assert [check['ServiceID'] for check in checks] == ['foobar']
assert [check['CheckID'] for check in checks] == ['service:foobar']
c.agent.service.deregister('foobar')
time.sleep(40/1000.0)
index, checks = c.health.checks('foobar')
assert len(checks) == 0
def test_session(self, consul_port):
c = consul.Consul(port=consul_port)
# session.create
pytest.raises(consul.ConsulException, c.session.create, node='n2')
pytest.raises(consul.ConsulException, c.session.create, dc='dc2')
session_id = c.session.create('my-session')
# session.list
pytest.raises(consul.ConsulException, c.session.list, dc='dc2')
_, sessions = c.session.list()
assert [x['Name'] for x in sessions] == ['my-session']
# session.info
pytest.raises(
consul.ConsulException, c.session.info, session_id, dc='dc2')
index, session = c.session.info('1'*36)
assert session is None
index, session = c.session.info(session_id)
assert session['Name'] == 'my-session'
# session.node
node = session['Node']
pytest.raises(
consul.ConsulException, c.session.node, node, dc='dc2')
_, sessions = c.session.node(node)
assert [x['Name'] for x in sessions] == ['my-session']
# session.destroy
pytest.raises(
consul.ConsulException, c.session.destroy, session_id, dc='dc2')
assert c.session.destroy(session_id) is True
_, sessions = c.session.list()
assert sessions == []
def test_session_delete_ttl_renew(self, consul_port):
c = consul.Consul(port=consul_port)
s = c.session.create(behavior='delete', ttl=20)
# attempt to renew an unknown session
pytest.raises(consul.NotFound, c.session.renew, '1'*36)
session = c.session.renew(s)
assert session['Behavior'] == 'delete'
assert session['TTL'] == '20s'
# trying out the behavior
assert c.kv.put('foo', '1', acquire=s) is True
index, data = c.kv.get('foo')
assert data['Value'] == six.b('1')
c.session.destroy(s)
index, data = c.kv.get('foo')
assert data is None
def test_acl_disabled(self, consul_port):
c = consul.Consul(port=consul_port)
pytest.raises(consul.ACLDisabled, c.acl.list)
pytest.raises(consul.ACLDisabled, c.acl.info, '1'*36)
pytest.raises(consul.ACLDisabled, c.acl.create)
pytest.raises(consul.ACLDisabled, c.acl.update, 'foo')
pytest.raises(consul.ACLDisabled, c.acl.clone, 'foo')
pytest.raises(consul.ACLDisabled, c.acl.destroy, 'foo')
def test_acl_permission_denied(self, acl_consul):
c = consul.Consul(port=acl_consul.port)
pytest.raises(consul.ACLPermissionDenied, c.acl.list)
pytest.raises(consul.ACLPermissionDenied, c.acl.create)
pytest.raises(consul.ACLPermissionDenied, c.acl.update, 'anonymous')
pytest.raises(consul.ACLPermissionDenied, c.acl.clone, 'anonymous')
pytest.raises(consul.ACLPermissionDenied, c.acl.destroy, 'anonymous')
def test_acl_explict_token_use(self, acl_consul):
c = consul.Consul(port=acl_consul.port)
master_token = acl_consul.token
acls = c.acl.list(token=master_token)
assert set([x['ID'] for x in acls]) == \
set(['anonymous', master_token])
assert c.acl.info('1'*36) is None
compare = [c.acl.info(master_token), c.acl.info('anonymous')]
compare.sort(key=operator.itemgetter('ID'))
assert acls == compare
rules = """
key "" {
policy = "read"
}
key "private/" {
policy = "deny"
}
service "foo-" {
policy = "write"
}
service "bar-" {
policy = "read"
}
"""
token = c.acl.create(rules=rules, token=master_token)
assert c.acl.info(token)['Rules'] == rules
token2 = c.acl.clone(token, token=master_token)
assert c.acl.info(token2)['Rules'] == rules
assert c.acl.update(token2, name='Foo', token=master_token) == token2
assert c.acl.info(token2)['Name'] == 'Foo'
assert c.acl.destroy(token2, token=master_token) is True
assert c.acl.info(token2) is None
c.kv.put('foo', 'bar')
c.kv.put('private/foo', 'bar')
assert c.kv.get('foo', token=token)[1]['Value'] == six.b('bar')
pytest.raises(
consul.ACLPermissionDenied, c.kv.put, 'foo', 'bar2', token=token)
pytest.raises(
consul.ACLPermissionDenied, c.kv.delete, 'foo', token=token)
assert c.kv.get('private/foo')[1]['Value'] == six.b('bar')
pytest.raises(
consul.ACLPermissionDenied,
c.kv.get, 'private/foo', token=token)
pytest.raises(
consul.ACLPermissionDenied,
c.kv.put, 'private/foo', 'bar2', token=token)
pytest.raises(
consul.ACLPermissionDenied,
c.kv.delete, 'private/foo', token=token)
# test token pass through for service registration
pytest.raises(
consul.ACLPermissionDenied,
c.agent.service.register, "bar-1", token=token)
c.agent.service.register("foo-1", token=token)
index, data = c.health.service('foo-1', token=token)
assert data[0]['Service']['ID'] == "foo-1"
index, data = c.health.checks('foo-1', token=token)
assert data == []
index, data = c.health.service('bar-1', token=token)
assert not data
# clean up
assert c.agent.service.deregister('foo-1') is True
c.acl.destroy(token, token=master_token)
acls = c.acl.list(token=master_token)
assert set([x['ID'] for x in acls]) == \
set(['anonymous', master_token])
def test_acl_implicit_token_use(self, acl_consul):
# configure client to use the master token by default
c = consul.Consul(port=acl_consul.port, token=acl_consul.token)
master_token = acl_consul.token
acls = c.acl.list()
assert set([x['ID'] for x in acls]) == \
set(['anonymous', master_token])
assert c.acl.info('foo') is None
compare = [c.acl.info(master_token), c.acl.info('anonymous')]
compare.sort(key=operator.itemgetter('ID'))
assert acls == compare
rules = """
key "" {
policy = "read"
}
key "private/" {
policy = "deny"
}
"""
token = c.acl.create(rules=rules)
assert c.acl.info(token)['Rules'] == rules
token2 = c.acl.clone(token)
assert c.acl.info(token2)['Rules'] == rules
assert c.acl.update(token2, name='Foo') == token2
assert c.acl.info(token2)['Name'] == 'Foo'
assert c.acl.destroy(token2) is True
assert c.acl.info(token2) is None
c.kv.put('foo', 'bar')
c.kv.put('private/foo', 'bar')
c_limited = consul.Consul(port=acl_consul.port, token=token)
assert c_limited.kv.get('foo')[1]['Value'] == six.b('bar')
pytest.raises(
consul.ACLPermissionDenied, c_limited.kv.put, 'foo', 'bar2')
pytest.raises(
consul.ACLPermissionDenied, c_limited.kv.delete, 'foo')
assert c.kv.get('private/foo')[1]['Value'] == six.b('bar')
pytest.raises(
consul.ACLPermissionDenied,
c_limited.kv.get, 'private/foo')
pytest.raises(
consul.ACLPermissionDenied,
c_limited.kv.put, 'private/foo', 'bar2')
pytest.raises(
consul.ACLPermissionDenied,
c_limited.kv.delete, 'private/foo')
# check we can override the client's default token
pytest.raises(
consul.ACLPermissionDenied,
c.kv.get, 'private/foo', token=token
)
pytest.raises(
consul.ACLPermissionDenied,
c.kv.put, 'private/foo', 'bar2', token=token)
pytest.raises(
consul.ACLPermissionDenied,
c.kv.delete, 'private/foo', token=token)
# clean up
c.acl.destroy(token)
acls = c.acl.list()
assert set([x['ID'] for x in acls]) == \
set(['anonymous', master_token])
def test_status_leader(self, consul_port):
c = consul.Consul(port=consul_port)
agent_self = c.agent.self()
leader = c.status.leader()
addr_port = agent_self['Stats']['consul']['leader_addr']
assert leader == addr_port, \
"Leader value was {0}, expected value " \
"was {1}".format(leader, addr_port)
def test_status_peers(self, consul_port):
c = consul.Consul(port=consul_port)
agent_self = c.agent.self()
addr_port = agent_self['Stats']['consul']['leader_addr']
peers = c.status.peers()
assert addr_port in peers, \
"Expected value '{0}' " \
"in peer list but it was not present".format(addr_port)
def test_query(self, consul_port):
c = consul.Consul(port=consul_port)
# check that query list is empty
queries = c.query.list()
assert queries == []
# create a new named query
query_service = 'foo'
query_name = 'fooquery'
query = c.query.create(query_service, query_name)
# assert response contains query ID
assert 'ID' in query \
and query['ID'] is not None \
and str(query['ID']) != ''
# retrieve query using id and name
queries = c.query.get(query['ID'])
assert queries != [] \
and len(queries) == 1
assert queries[0]['Name'] == query_name \
and queries[0]['ID'] == query['ID']
# explain query
assert c.query.explain(query_name)['Query']
# delete query
assert c.query.delete(query['ID'])
def test_coordinate(self, consul_port):
c = consul.Consul(port=consul_port)
c.coordinate.nodes()
c.coordinate.datacenters()
assert set(c.coordinate.datacenters()[0].keys()) == \
set(['Datacenter', 'Coordinates', 'AreaID'])
def test_operator(self, consul_port):
c = consul.Consul(port=consul_port)
config = c.operator.raft_config()
assert config["Index"] == 1
leader = False
voter = False
for server in config["Servers"]:
if server["Leader"]:
leader = True
if server["Voter"]:
voter = True
assert leader
assert voter
| [((209, 232), 'consul.std.HTTPClient', 'consul.std.HTTPClient', ([], {}), '()\n', (230, 232), False, 'import consul\n'), ((486, 517), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (499, 517), False, 'import consul\n'), ((767, 798), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (780, 798), False, 'import consul\n'), ((1035, 1066), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (1048, 1066), False, 'import consul\n'), ((1710, 1766), 'pytest.raises', 'pytest.raises', (['AssertionError', 'c.kv.put', '"""foo"""', '{(1): 2}'], {}), "(AssertionError, c.kv.put, 'foo', {(1): 2})\n", (1723, 1766), False, 'import pytest\n'), ((1822, 1853), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (1835, 1853), False, 'import consul\n'), ((2291, 2322), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (2304, 2322), False, 'import consul\n'), ((2613, 2644), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (2626, 2644), False, 'import consul\n'), ((3255, 3286), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (3268, 3286), False, 'import consul\n'), ((3849, 3880), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (3862, 3880), False, 'import consul\n'), ((4269, 4300), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (4282, 4300), False, 'import consul\n'), ((4310, 4386), 'pytest.raises', 'pytest.raises', (['consul.ConsulException', 'c.kv.put', '"""foo"""', '"""bar"""'], {'acquire': '"""foo"""'}), "(consul.ConsulException, c.kv.put, 'foo', 'bar', acquire='foo')\n", (4323, 4386), False, 'import pytest\n'), ((4928, 4959), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (4941, 4959), False, 'import consul\n'), ((5283, 5314), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (5296, 5314), False, 'import consul\n'), ((5684, 5715), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (5697, 5715), False, 'import consul\n'), ((5982, 6013), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (5995, 6013), False, 'import consul\n'), ((6376, 6407), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (6389, 6407), False, 'import consul\n'), ((7672, 7685), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (7682, 7685), False, 'import time\n'), ((8985, 9009), 'time.sleep', 'time.sleep', (['(120 / 1000.0)'], {}), '(120 / 1000.0)\n', (8995, 9009), False, 'import time\n'), ((9283, 9314), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (9296, 9314), False, 'import consul\n'), ((9371, 9394), 'time.sleep', 'time.sleep', (['(80 / 1000.0)'], {}), '(80 / 1000.0)\n', (9381, 9394), False, 'import time\n'), ((9622, 9645), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (9632, 9645), False, 'import time\n'), ((9832, 9863), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (9845, 9863), False, 'import consul\n'), ((9914, 9937), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (9924, 9937), False, 'import time\n'), ((10141, 10164), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (10151, 10164), False, 'import time\n'), ((10575, 10598), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (10585, 10598), False, 'import time\n'), ((10665, 10688), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (10675, 10688), False, 'import time\n'), ((10768, 10799), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (10781, 10799), False, 'import consul\n'), ((11076, 11099), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (11086, 11099), False, 'import time\n'), ((11211, 11234), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (11221, 11234), False, 'import time\n'), ((11314, 11345), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (11327, 11345), False, 'import consul\n'), ((11687, 11718), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (11700, 11718), False, 'import consul\n'), ((11795, 11818), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (11805, 11818), False, 'import time\n'), ((11886, 11909), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (11896, 11909), False, 'import time\n'), ((12145, 12168), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (12155, 12168), False, 'import time\n'), ((12345, 12368), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (12355, 12368), False, 'import time\n'), ((12436, 12467), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (12449, 12467), False, 'import consul\n'), ((12522, 12545), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (12532, 12545), False, 'import time\n'), ((12752, 12775), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (12762, 12775), False, 'import time\n'), ((12935, 12966), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (12948, 12966), False, 'import consul\n'), ((13372, 13403), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (13385, 13403), False, 'import consul\n'), ((13677, 13708), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (13690, 13708), False, 'import consul\n'), ((14283, 14314), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (14296, 14314), False, 'import consul\n'), ((14602, 14694), 'pytest.raises', 'pytest.raises', (['consul.ConsulException', 'c.catalog.register', '"""foo"""', '"""10.1.10.11"""'], {'dc': '"""dc2"""'}), "(consul.ConsulException, c.catalog.register, 'foo',\n '10.1.10.11', dc='dc2')\n", (14615, 14694), False, 'import pytest\n'), ((15151, 15215), 'pytest.raises', 'pytest.raises', (['consul.ConsulException', 'c.catalog.nodes'], {'dc': '"""dc2"""'}), "(consul.ConsulException, c.catalog.nodes, dc='dc2')\n", (15164, 15215), False, 'import pytest\n'), ((15382, 15449), 'pytest.raises', 'pytest.raises', (['consul.ConsulException', 'c.catalog.services'], {'dc': '"""dc2"""'}), "(consul.ConsulException, c.catalog.services, dc='dc2')\n", (15395, 15449), False, 'import pytest\n'), ((15601, 15670), 'pytest.raises', 'pytest.raises', (['consul.ConsulException', 'c.catalog.node', '"""n1"""'], {'dc': '"""dc2"""'}), "(consul.ConsulException, c.catalog.node, 'n1', dc='dc2')\n", (15614, 15670), False, 'import pytest\n'), ((15882, 15954), 'pytest.raises', 'pytest.raises', (['consul.ConsulException', 'c.catalog.service', '"""s1"""'], {'dc': '"""dc2"""'}), "(consul.ConsulException, c.catalog.service, 's1', dc='dc2')\n", (15895, 15954), False, 'import pytest\n'), ((16241, 16316), 'pytest.raises', 'pytest.raises', (['consul.ConsulException', 'c.catalog.deregister', '"""n2"""'], {'dc': '"""dc2"""'}), "(consul.ConsulException, c.catalog.deregister, 'n2', dc='dc2')\n", (16254, 16316), False, 'import pytest\n'), ((17080, 17111), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (17093, 17111), False, 'import consul\n'), ((17575, 17598), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (17585, 17598), False, 'import time\n'), ((18084, 18107), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (18094, 18107), False, 'import time\n'), ((18341, 18365), 'time.sleep', 'time.sleep', (['(120 / 1000.0)'], {}), '(120 / 1000.0)\n', (18351, 18365), False, 'import time\n'), ((18634, 18657), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (18644, 18657), False, 'import time\n'), ((19133, 19156), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (19143, 19156), False, 'import time\n'), ((19289, 19320), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (19302, 19320), False, 'import consul\n'), ((19806, 19829), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (19816, 19829), False, 'import time\n'), ((20361, 20384), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (20371, 20384), False, 'import time\n'), ((20629, 20654), 'time.sleep', 'time.sleep', (['(2200 / 1000.0)'], {}), '(2200 / 1000.0)\n', (20639, 20654), False, 'import time\n'), ((20934, 20957), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (20944, 20957), False, 'import time\n'), ((21278, 21301), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (21288, 21301), False, 'import time\n'), ((21465, 21496), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (21478, 21496), False, 'import consul\n'), ((21743, 21774), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (21756, 21774), False, 'import consul\n'), ((21886, 21909), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (21896, 21909), False, 'import time\n'), ((22162, 22185), 'time.sleep', 'time.sleep', (['(40 / 1000.0)'], {}), '(40 / 1000.0)\n', (22172, 22185), False, 'import time\n'), ((22321, 22352), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (22334, 22352), False, 'import consul\n'), ((22387, 22453), 'pytest.raises', 'pytest.raises', (['consul.ConsulException', 'c.session.create'], {'node': '"""n2"""'}), "(consul.ConsulException, c.session.create, node='n2')\n", (22400, 22453), False, 'import pytest\n'), ((22462, 22527), 'pytest.raises', 'pytest.raises', (['consul.ConsulException', 'c.session.create'], {'dc': '"""dc2"""'}), "(consul.ConsulException, c.session.create, dc='dc2')\n", (22475, 22527), False, 'import pytest\n'), ((22612, 22675), 'pytest.raises', 'pytest.raises', (['consul.ConsulException', 'c.session.list'], {'dc': '"""dc2"""'}), "(consul.ConsulException, c.session.list, dc='dc2')\n", (22625, 22675), False, 'import pytest\n'), ((22810, 22885), 'pytest.raises', 'pytest.raises', (['consul.ConsulException', 'c.session.info', 'session_id'], {'dc': '"""dc2"""'}), "(consul.ConsulException, c.session.info, session_id, dc='dc2')\n", (22823, 22885), False, 'import pytest\n'), ((23140, 23209), 'pytest.raises', 'pytest.raises', (['consul.ConsulException', 'c.session.node', 'node'], {'dc': '"""dc2"""'}), "(consul.ConsulException, c.session.node, node, dc='dc2')\n", (23153, 23209), False, 'import pytest\n'), ((23364, 23442), 'pytest.raises', 'pytest.raises', (['consul.ConsulException', 'c.session.destroy', 'session_id'], {'dc': '"""dc2"""'}), "(consul.ConsulException, c.session.destroy, session_id, dc='dc2')\n", (23377, 23442), False, 'import pytest\n'), ((23649, 23680), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (23662, 23680), False, 'import consul\n'), ((23793, 23850), 'pytest.raises', 'pytest.raises', (['consul.NotFound', 'c.session.renew', "('1' * 36)"], {}), "(consul.NotFound, c.session.renew, '1' * 36)\n", (23806, 23850), False, 'import pytest\n'), ((24299, 24330), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (24312, 24330), False, 'import consul\n'), ((24339, 24384), 'pytest.raises', 'pytest.raises', (['consul.ACLDisabled', 'c.acl.list'], {}), '(consul.ACLDisabled, c.acl.list)\n', (24352, 24384), False, 'import pytest\n'), ((24393, 24448), 'pytest.raises', 'pytest.raises', (['consul.ACLDisabled', 'c.acl.info', "('1' * 36)"], {}), "(consul.ACLDisabled, c.acl.info, '1' * 36)\n", (24406, 24448), False, 'import pytest\n'), ((24455, 24502), 'pytest.raises', 'pytest.raises', (['consul.ACLDisabled', 'c.acl.create'], {}), '(consul.ACLDisabled, c.acl.create)\n', (24468, 24502), False, 'import pytest\n'), ((24511, 24565), 'pytest.raises', 'pytest.raises', (['consul.ACLDisabled', 'c.acl.update', '"""foo"""'], {}), "(consul.ACLDisabled, c.acl.update, 'foo')\n", (24524, 24565), False, 'import pytest\n'), ((24574, 24627), 'pytest.raises', 'pytest.raises', (['consul.ACLDisabled', 'c.acl.clone', '"""foo"""'], {}), "(consul.ACLDisabled, c.acl.clone, 'foo')\n", (24587, 24627), False, 'import pytest\n'), ((24636, 24691), 'pytest.raises', 'pytest.raises', (['consul.ACLDisabled', 'c.acl.destroy', '"""foo"""'], {}), "(consul.ACLDisabled, c.acl.destroy, 'foo')\n", (24649, 24691), False, 'import pytest\n'), ((24759, 24794), 'consul.Consul', 'consul.Consul', ([], {'port': 'acl_consul.port'}), '(port=acl_consul.port)\n', (24772, 24794), False, 'import consul\n'), ((24803, 24856), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c.acl.list'], {}), '(consul.ACLPermissionDenied, c.acl.list)\n', (24816, 24856), False, 'import pytest\n'), ((24865, 24920), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c.acl.create'], {}), '(consul.ACLPermissionDenied, c.acl.create)\n', (24878, 24920), False, 'import pytest\n'), ((24929, 24997), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c.acl.update', '"""anonymous"""'], {}), "(consul.ACLPermissionDenied, c.acl.update, 'anonymous')\n", (24942, 24997), False, 'import pytest\n'), ((25006, 25073), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c.acl.clone', '"""anonymous"""'], {}), "(consul.ACLPermissionDenied, c.acl.clone, 'anonymous')\n", (25019, 25073), False, 'import pytest\n'), ((25082, 25151), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c.acl.destroy', '"""anonymous"""'], {}), "(consul.ACLPermissionDenied, c.acl.destroy, 'anonymous')\n", (25095, 25151), False, 'import pytest\n'), ((25219, 25254), 'consul.Consul', 'consul.Consul', ([], {'port': 'acl_consul.port'}), '(port=acl_consul.port)\n', (25232, 25254), False, 'import consul\n'), ((26571, 26650), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c.kv.put', '"""foo"""', '"""bar2"""'], {'token': 'token'}), "(consul.ACLPermissionDenied, c.kv.put, 'foo', 'bar2', token=token)\n", (26584, 26650), False, 'import pytest\n'), ((26672, 26746), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c.kv.delete', '"""foo"""'], {'token': 'token'}), "(consul.ACLPermissionDenied, c.kv.delete, 'foo', token=token)\n", (26685, 26746), False, 'import pytest\n'), ((26836, 26915), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c.kv.get', '"""private/foo"""'], {'token': 'token'}), "(consul.ACLPermissionDenied, c.kv.get, 'private/foo', token=token)\n", (26849, 26915), False, 'import pytest\n'), ((26949, 27040), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c.kv.put', '"""private/foo"""', '"""bar2"""'], {'token': 'token'}), "(consul.ACLPermissionDenied, c.kv.put, 'private/foo', 'bar2',\n token=token)\n", (26962, 27040), False, 'import pytest\n'), ((27070, 27157), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c.kv.delete', '"""private/foo"""'], {'token': 'token'}), "(consul.ACLPermissionDenied, c.kv.delete, 'private/foo', token\n =token)\n", (27083, 27157), False, 'import pytest\n'), ((27246, 27339), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c.agent.service.register', '"""bar-1"""'], {'token': 'token'}), "(consul.ACLPermissionDenied, c.agent.service.register, 'bar-1',\n token=token)\n", (27259, 27339), False, 'import pytest\n'), ((28097, 28156), 'consul.Consul', 'consul.Consul', ([], {'port': 'acl_consul.port', 'token': 'acl_consul.token'}), '(port=acl_consul.port, token=acl_consul.token)\n', (28110, 28156), False, 'import consul\n'), ((29162, 29210), 'consul.Consul', 'consul.Consul', ([], {'port': 'acl_consul.port', 'token': 'token'}), '(port=acl_consul.port, token=token)\n', (29175, 29210), False, 'import consul\n'), ((29286, 29360), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c_limited.kv.put', '"""foo"""', '"""bar2"""'], {}), "(consul.ACLPermissionDenied, c_limited.kv.put, 'foo', 'bar2')\n", (29299, 29360), False, 'import pytest\n'), ((29382, 29451), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c_limited.kv.delete', '"""foo"""'], {}), "(consul.ACLPermissionDenied, c_limited.kv.delete, 'foo')\n", (29395, 29451), False, 'import pytest\n'), ((29541, 29615), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c_limited.kv.get', '"""private/foo"""'], {}), "(consul.ACLPermissionDenied, c_limited.kv.get, 'private/foo')\n", (29554, 29615), False, 'import pytest\n'), ((29649, 29735), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c_limited.kv.put', '"""private/foo"""', '"""bar2"""'], {}), "(consul.ACLPermissionDenied, c_limited.kv.put, 'private/foo',\n 'bar2')\n", (29662, 29735), False, 'import pytest\n'), ((29765, 29842), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c_limited.kv.delete', '"""private/foo"""'], {}), "(consul.ACLPermissionDenied, c_limited.kv.delete, 'private/foo')\n", (29778, 29842), False, 'import pytest\n'), ((29936, 30015), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c.kv.get', '"""private/foo"""'], {'token': 'token'}), "(consul.ACLPermissionDenied, c.kv.get, 'private/foo', token=token)\n", (29949, 30015), False, 'import pytest\n'), ((30058, 30149), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c.kv.put', '"""private/foo"""', '"""bar2"""'], {'token': 'token'}), "(consul.ACLPermissionDenied, c.kv.put, 'private/foo', 'bar2',\n token=token)\n", (30071, 30149), False, 'import pytest\n'), ((30179, 30266), 'pytest.raises', 'pytest.raises', (['consul.ACLPermissionDenied', 'c.kv.delete', '"""private/foo"""'], {'token': 'token'}), "(consul.ACLPermissionDenied, c.kv.delete, 'private/foo', token\n =token)\n", (30192, 30266), False, 'import pytest\n'), ((30518, 30549), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (30531, 30549), False, 'import consul\n'), ((30888, 30919), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (30901, 30919), False, 'import consul\n'), ((31252, 31283), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (31265, 31283), False, 'import consul\n'), ((32152, 32183), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (32165, 32183), False, 'import consul\n'), ((32422, 32453), 'consul.Consul', 'consul.Consul', ([], {'port': 'consul_port'}), '(port=consul_port)\n', (32435, 32453), False, 'import consul\n'), ((700, 712), 'six.b', 'six.b', (['"""bar"""'], {}), "('bar')\n", (705, 712), False, 'import six\n'), ((1114, 1136), 'struct.pack', 'struct.pack', (['"""i"""', '(1000)'], {}), "('i', 1000)\n", (1125, 1136), False, 'import struct\n'), ((1191, 1224), 'struct.unpack', 'struct.unpack', (['"""i"""', "data['Value']"], {}), "('i', data['Value'])\n", (1204, 1224), False, 'import struct\n'), ((1362, 1374), 'six.b', 'six.b', (['"""bar"""'], {}), "('bar')\n", (1367, 1374), False, 'import six\n'), ((2218, 2231), 'six.b', 'six.b', (['"""bar2"""'], {}), "('bar2')\n", (2223, 2231), False, 'import six\n'), ((24133, 24143), 'six.b', 'six.b', (['"""1"""'], {}), "('1')\n", (24138, 24143), False, 'import six\n'), ((26550, 26562), 'six.b', 'six.b', (['"""bar"""'], {}), "('bar')\n", (26555, 26562), False, 'import six\n'), ((26815, 26827), 'six.b', 'six.b', (['"""bar"""'], {}), "('bar')\n", (26820, 26827), False, 'import six\n'), ((29265, 29277), 'six.b', 'six.b', (['"""bar"""'], {}), "('bar')\n", (29270, 29277), False, 'import six\n'), ((29520, 29532), 'six.b', 'six.b', (['"""bar"""'], {}), "('bar')\n", (29525, 29532), False, 'import six\n'), ((3163, 3173), 'six.b', 'six.b', (['"""1"""'], {}), "('1')\n", (3168, 3173), False, 'import six\n'), ((3175, 3185), 'six.b', 'six.b', (['"""2"""'], {}), "('2')\n", (3180, 3185), False, 'import six\n'), ((3187, 3197), 'six.b', 'six.b', (['"""3"""'], {}), "('3')\n", (3192, 3197), False, 'import six\n'), ((5331, 5353), 'base64.b64encode', 'base64.b64encode', (["b'1'"], {}), "(b'1')\n", (5347, 5353), False, 'import base64\n'), ((25574, 25599), 'operator.itemgetter', 'operator.itemgetter', (['"""ID"""'], {}), "('ID')\n", (25593, 25599), False, 'import operator\n'), ((28457, 28482), 'operator.itemgetter', 'operator.itemgetter', (['"""ID"""'], {}), "('ID')\n", (28476, 28482), False, 'import operator\n')] |
Subsets and Splits