query
stringlengths 9
9.05k
| document
stringlengths 10
222k
| metadata
dict | negatives
sequencelengths 30
30
| negative_scores
sequencelengths 30
30
| document_score
stringlengths 4
10
| document_rank
stringclasses 2
values |
---|---|---|---|---|---|---|
Import a function from a full module path | def import_from(full_name):
module_name, function_name = full_name.rsplit('.', 1)
mod = import_module(module_name)
return getattr(mod, function_name) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def import_module(self, location, name):",
"def load_function(path):\r\n module_path, _, name = path.rpartition('.')\r\n return getattr(import_module(module_path), name)",
"def load_function(path):\r\n module_path, _, name = path.rpartition('.')\r\n return getattr(import_module(module_path), name)",
"def load_function(path):\r\n module_path, _, name = path.rpartition('.')\r\n return getattr(import_module(module_path), name)",
"def import_function(name: str):\n module_name, function_name = name.rsplit(\".\", 1)\n module = importlib.import_module(module_name)\n return getattr(module, function_name)",
"def import_from_path(module: str, path: str, name: str):\n\n spec = importlib.util.spec_from_file_location(module, path)\n foo = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(foo)\n return getattr(foo, name)",
"def import_from_cwd(module, imp=..., package=...):\n ...",
"def importFromPath(filename):\n try:\n path, name = os.path.split(filename)\n name, ext = os.path.splitext(name)\n file, filename, data = imp.find_module(name, [path])\n importedModule = imp.load_module(name, file, filename, data)\n except Exception as ae:\n raise Exception('Importing module '+ filename + ' at ' + path + os.sep + name + ' failed with error '+ str(ae))\n return importedModule",
"def relative_import(path):\n caller_path = os.path.abspath(inspect.getfile(inspect.currentframe().f_back))\n\n script_path = os.path.abspath(os.path.join(os.path.dirname(caller_path), path))\n script_name = os.path.splitext(os.path.basename(script_path))[0]\n\n sys.path.append(os.path.dirname(script_path))\n try:\n module = importlib.import_module(script_name)\n importlib.reload(module)\n return module\n finally:\n del sys.path[-1]",
"def import_module(module):\n return importlib.import_module(module)",
"def import_module(name, path):\n spec = importlib.util.spec_from_file_location(name, path)\n module = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(module)\n return module",
"def import_by_source(path: str):\n\n module = splitext(basename(path))[0]\n\n sys.path.append(dirname(path))\n\n spec = importlib.util.spec_from_file_location(module, path)\n module = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(module)\n\n sys.path.pop()\n\n return module",
"def import_module_from(mod_path):\n if '.' in mod_path:\n bits = mod_path.split('.')\n mod_name = bits.pop()\n mod_path = '.'.join(bits)\n return import_module(mod_path, mod_name)\n else:\n return import_module(mod_path)",
"def import_module(module, from_where):\n from_module = __import__(from_where, globals(), locals(), [module])\n return getattr(from_module, module)",
"def import_module_from_module_path(path):\n return SourceFileLoader('', path).load_module()",
"def importModule(filename):\n\tfrom os.path import abspath, split, splitext\n\tfrom sys import path\n\tif isPython2():\n\t\tfrom imp import reload\n\telse:\n\t\tfrom importlib import reload\n\t\n\tfilename = adaptPath(filename)\n\tmodulePath = abspath(split(filename)[0])\n\tmoduleName = splitext(split(filename)[1])[0]\n\t\n\tif not modulePath in path:\n\t\tpath.append (modulePath)\n\tmodule = __import__(moduleName)\n\treload (module)\n\treturn module",
"def _import_module(name):\r\n __import__(name)\r\n return sys.modules[name]",
"def _import_module(name):\r\n __import__(name)\r\n return sys.modules[name]",
"def _import_module(name):\r\n __import__(name)\r\n return sys.modules[name]",
"def _import_module(name):\n __import__(name)\n return sys.modules[name]",
"def _import_module(name):\n __import__(name)\n return sys.modules[name]",
"def _import_from(mod, path, mod_dir=None):\n\n if mod in sys.modules:\n return sys.modules[mod]\n\n if mod_dir is None:\n full_mod = mod\n else:\n full_mod = mod_dir.replace(os.sep, '.')\n\n if mod_dir is None:\n mod_dir = mod.replace('.', os.sep)\n\n if not os.path.exists(path):\n return None\n\n source_path = os.path.join(path, mod_dir, '__init__.py')\n if not os.path.exists(source_path):\n source_path = os.path.join(path, mod_dir + '.py')\n\n if not os.path.exists(source_path):\n return None\n\n if os.sep in mod_dir:\n append, mod_dir = mod_dir.rsplit(os.sep, 1)\n path = os.path.join(path, append)\n\n try:\n if sys.version_info < (3, 5):\n mod_info = imp.find_module(mod_dir, [path])\n return imp.load_module(mod, *mod_info)\n\n else:\n package = mod.split('.', 1)[0]\n package_dir = full_mod.split('.', 1)[0]\n package_path = os.path.join(path, package_dir)\n CUSTOM_FINDER.add_module(package, package_path)\n\n return importlib.import_module(mod)\n\n except ImportError:\n return None",
"def _load_module(modulepath):\n\n mod = __import__(modulepath)\n path = []\n for token in modulepath.split(\".\")[1:]:\n path.append(token)\n mod = getattr(mod, token)\n return mod",
"def _import_from(mod, path, mod_dir=None):\n\n if mod_dir is None:\n mod_dir = mod\n\n if not os.path.exists(path):\n return None\n\n if not os.path.exists(os.path.join(path, mod_dir)):\n return None\n\n try:\n mod_info = imp.find_module(mod_dir, [path])\n return imp.load_module(mod, *mod_info)\n except ImportError:\n return None",
"def import_from(module: str, name: str):\n\n module = __import__(module, fromlist=[name])\n return getattr(module, name)",
"def import_function(s):\n a = s.split('.')\n j = lambda x: '.'.join(x)\n return getattr(import_module(j(a[:-1])), a[-1])",
"def import_file(name: Text, file_path: Text):\n\n spec = spec_from_file_location(f\"luh3417.{name}\", file_path)\n module = module_from_spec(spec)\n spec.loader.exec_module(module)\n\n return module",
"def import_module_from_file(f_path, verbose=True):\n # get absolute path\n f_path = os.path.abspath(f_path)\n assert os.path.isfile(f_path)\n\n f_dir = os.path.dirname(f_path)\n f_name = os.path.basename(f_path)\n f_id = os.path.splitext(f_name)[0]\n\n try:\n # add f_dir to system path for later import\n sys.path.insert(0, f_dir)\n # import module by name\n module = importlib.import_module(f_id)\n return module\n except ImportError:\n err_str = \"ERROR: Could not import module '{}' from '{}'.\\n\"\n err_str = err_str.format(f_name, f_dir)\n raise ImportError(err_str)",
"def call_function_from_import_path(import_path: str) -> Any:\n try:\n callback_func = import_attr(import_path)\n except Exception as e:\n raise ValueError(f\"The import path {import_path} cannot be imported: {e}\")\n\n if not callable(callback_func):\n raise TypeError(f\"The import path {import_path} is not callable.\")\n\n try:\n return callback_func()\n except Exception as e:\n raise RuntimeError(f\"The function {import_path} raised an exception: {e}\")",
"def load_function(engine_path):\r\n module_path, _, name = engine_path.rpartition('.')\r\n return getattr(importlib.import_module(module_path), name)"
] | [
"0.7085891",
"0.698463",
"0.698463",
"0.698463",
"0.69600534",
"0.69119257",
"0.6854556",
"0.6701928",
"0.6670122",
"0.6668526",
"0.6647451",
"0.66429377",
"0.66402143",
"0.6558814",
"0.6548901",
"0.652248",
"0.6475477",
"0.6475477",
"0.6475477",
"0.6468391",
"0.6468391",
"0.646328",
"0.64550793",
"0.64433074",
"0.6435672",
"0.64285463",
"0.6390404",
"0.63809305",
"0.63442934",
"0.6311481"
] | 0.73028684 | 0 |
Creates a new Bloom Filter ``key`` with desired probability of false positives ``errorRate`` expected entries to be inserted as ``capacity``. Default expansion value is 2. By default, filter is autoscaling. | def bfCreate(self, key, errorRate, capacity, expansion=None, noScale=None):
params = [key, errorRate, capacity]
self.appendExpansion(params, expansion)
self.appendNoScale(params, noScale)
return self.execute_command(self.BF_RESERVE, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cfCreate(self, key, capacity, expansion=None, bucket_size=None, max_iterations=None):\n params = [key, capacity]\n self.appendExpansion(params, expansion)\n self.appendBucketSize(params, bucket_size)\n self.appendMaxIterations(params, max_iterations)\n\n return self.execute_command(self.CF_RESERVE, *params)",
"def __init__(self, server, bfkeypreffix, capacity, error_rate=0.001):\n if not (0 < error_rate < 1):\n raise ValueError(\"Error_Rate must be between 0 and 1.\")\n if not capacity > 0:\n raise ValueError(\"Capacity must be > 0\")\n # given M = num_bits, k = num_slices, P = error_rate, n = capacity\n # k = log2(1/P)\n # solving for m = bits_per_slice\n # n ~= M * ((ln(2) ** 2) / abs(ln(P)))\n # n ~= (k * m) * ((ln(2) ** 2) / abs(ln(P)))\n # m ~= n * abs(ln(P)) / (k * (ln(2) ** 2))\n num_slices = int(math.ceil(math.log(1.0 / error_rate, 2)))\n bits_per_slice = int(math.ceil(\n (capacity * abs(math.log(error_rate))) /\n (num_slices * (math.log(2) ** 2))))\n if bits_per_slice > MAX_PER_SLICE_SIZE:\n raise ValueError(\"Capacity and error_rate make per slice size extended, MAX_PER_SLICE_SIZE is %s\" % (MAX_PER_SLICE_SIZE))\n self._setup(error_rate, num_slices, bits_per_slice, capacity, 0, server, bfkeypreffix)",
"def bfInsert(self, key, items, capacity=None, error=None, noCreate=None, expansion=None, noScale=None):\n params = [key]\n self.appendCapacity(params, capacity)\n self.appendError(params, error)\n self.appendExpansion(params, expansion)\n self.appendNoCreate(params, noCreate)\n self.appendNoScale(params, noScale)\n self.appendItems(params, items)\n\n return self.execute_command(self.BF_INSERT, *params)",
"def __init__(self, capacity):\n self.capacity = capacity # Number of buckets in the hash table\n self.storage = [None] * capacity\n self.key_count = 0",
"def __init__(self, capacity=100):\n \n self.capacity = capacity\n self.size = 0\n self._keys = []\n self._entry = [[] for _ in range(capacity)]",
"def __init__(\n self, capacity: int, operation: Any, neutral_element: Optional[Any] = None\n ):\n\n assert (\n capacity > 0 and capacity & (capacity - 1) == 0\n ), \"Capacity must be positive and a power of 2!\"\n self.capacity = capacity\n if neutral_element is None:\n neutral_element = (\n 0.0\n if operation is operator.add\n else float(\"-inf\")\n if operation is max\n else float(\"inf\")\n )\n self.neutral_element = neutral_element\n self.value = [self.neutral_element for _ in range(2 * capacity)]\n self.operation = operation",
"def __init__(self, capacity, operation, neutral_element):\n assert (\n capacity > 0 and capacity & (capacity - 1) == 0\n ), \"capacity must be positive and a power of 2.\"\n self._capacity = capacity\n self._value = [neutral_element for _ in range(2 * capacity)]\n self._operation = operation\n self.neutral_element = neutral_element",
"def __init__(self, capacity, operation, neutral_element):\n assert capacity > 0 and capacity & (capacity - 1) == 0, \"capacity must be positive and a power of 2.\"\n self._capacity = capacity\n self._value = [neutral_element for _ in range(2 * capacity)]\n self._operation = operation",
"def __init__(self, capacity, operation, neutral_element):\n assert capacity > 0 and capacity & (capacity - 1) == 0, \"capacity must be positive and a power of 2.\"\n self._capacity = capacity\n self._value = [neutral_element for _ in range(2 * capacity)]\n self._operation = operation",
"def __init__(self, capacity, operation, neutral_element):\n assert capacity > 0 and capacity & (capacity - 1) == 0, \"capacity must be positive and a power of 2.\"\n self._capacity = capacity\n self._value = [neutral_element for _ in range(2 * capacity)]\n self._operation = operation",
"def __init__(self, capacity, alpha, beta_i, beta_f, beta_anneal,\n weight_offset):\n self.weight_offset = weight_offset\n self.alpha = alpha\n\n assert beta_i < beta_f, \"Beta update assumes beta_i < beta_f\"\n self.beta = beta_i\n self.beta_f = beta_f\n self.beta_update = (beta_f - beta_i) / beta_anneal\n\n self.experiences = WeightedRingBuf(capacity)\n # ids of experiences that haven't been used for training yet.\n self.unplayed_experiences = deque(maxlen=capacity)",
"def bf_counter(file_name, k, n, capacity, error_rate, verbose=False):\n if verbose:\n start = time.time()\n print('BFCounter started.')\n\n heap = []\n for i in range(n):\n heap.append((0, ''))\n\n bf = BloomFilter(capacity, error_rate, 'kmer_bf')\n\n kmer_counter = defaultdict(lambda: 1)\n\n # Assign functions to local variables for performance improvement\n add_to_bf = bf.add\n heap_pushpop = heapq.heappushpop\n\n with open(file_name, 'r') as f:\n line_num = 0\n for line in f:\n if line_num % 4 == 1: # dna sequence\n kmer_count = len(line) - k\n for i in range(kmer_count):\n kmer = line[i:i + k]\n if kmer not in bf: # not in Bloom Filter\n add_to_bf(kmer)\n else: # in Bloom Filter\n kmer_counter[kmer] += 1\n line_num += 1\n if verbose:\n end_hash = time.time()\n hash_table_size = sys.getsizeof(kmer_counter) / (1024 ** 2)\n print('Hash table is created in {:.2f} seconds.'.format(\n end_hash - start))\n print('Hash table size: {:.2f} MB.'.format(hash_table_size))\n start_populate = time.time()\n print('Populating the heap...')\n\n for count, kmer in kmer_counter.items():\n # insert to the heap if count is bigger than minimum\n if count > heap[0][0]:\n heap_pushpop(heap, (count, kmer))\n\n if verbose:\n end_populate = time.time()\n print('Heap is populated in {:.2f} seconds.'.format(\n end_populate - start_populate\n ))\n\n os.remove('kmer_bf')\n if verbose:\n end = time.time()\n print('BFCounter is completed in {:.2f} seconds.'.format(end - start))\n\n return heap",
"def __init__(self, capacity, initial):\n\t\tself.capacity = capacity\n\t\tself.amount = initial",
"def __init__(self, capacity, fillValue=None):\r\n self._items = list()\r\n for count in range(capacity):\r\n self._items.append(fillValue)",
"def __init__(self, capacity: int, function) -> None:\n self.buckets = DynamicArray()\n for _ in range(capacity):\n self.buckets.append(LinkedList())\n self.capacity = capacity\n self.hash_function = function\n self.size = 0",
"def __init__(self, capacity, fillValue = None):\n \n self._items = list() \n self._fillValue = fillValue\n self._DEFAULT_CAPACITY = capacity\n self._logicalSize = 0 #as required by exercise 1\n \n \n for count in xrange(capacity):\n self._items.append(self._fillValue)",
"def knapsack(items, capacity):\r\n pass",
"def __init__(self, capacity, fillValue=None):\r\n self._items = list()\r\n self._logicalSize = 0\r\n # Track the capacity and fill value for adjustments later\r\n self._capacity = capacity\r\n self._fillValue = fillValue\r\n for count in range(capacity):\r\n self._items.append(fillValue)",
"def create_capacity_limiter(total_tokens: float) -> abc.CapacityLimiter:\n return get_asynclib().CapacityLimiter(total_tokens)",
"def cfInsert(self, key, items, capacity=None, nocreate=None):\n params = [key]\n self.appendCapacity(params, capacity)\n self.appendNoCreate(params, nocreate)\n self.appendItems(params, items)\n\n return self.execute_command(self.CF_INSERT, *params)",
"def expandable_capacity(self, expandable_capacity):\n\n self._expandable_capacity = expandable_capacity",
"def _generate_table(self):\n for i in xrange(32):\n self._table.append(\n BloomFilter(\n capacity=self.__capacity,\n error_rate=self.__error_rate\n )\n )",
"def __init__(self, capacity, units=0):\n self.capacity = capacity\n self.units = units",
"def FixedWidthBucketer(width, num_finite_buckets=100):\n return Bucketer(width=width, growth_factor=0.0,\n num_finite_buckets=num_finite_buckets)",
"def __init__(self, width, growth_factor, num_finite_buckets):\n\n if num_finite_buckets < 0:\n raise ValueError('num_finite_buckets must be >= 0 (was %d)' %\n num_finite_buckets)\n\n self.width = width\n self.growth_factor = growth_factor\n self.num_finite_buckets = num_finite_buckets\n self.total_buckets = num_finite_buckets + 2\n self.underflow_bucket = 0\n self.overflow_bucket = self.total_buckets - 1\n\n self._lower_bounds = list(self._generate_lower_bounds())",
"def __init__(self, bucket_size, bucket_fill_rate, current_time=None):\n self.__bucket_contents = bucket_size\n self.__bucket_size = bucket_size\n self.__bucket_fill_rate = bucket_fill_rate\n\n if current_time is None:\n current_time = time.time()\n\n self.__last_bucket_fill_time = current_time",
"def capacity_factor(self, value: float) -> None:\n # State S, I, E, SE, or EE\n self._capacity_factor = value",
"def __init__(self, capacity=4):\n self.capacity = capacity\n self.size = 0\n self.table = [None] * capacity",
"def new_capacity_rule(mod, prj, prd):\n return 0",
"def __init__(__self__, *,\n capacity: Optional[int] = None,\n name: Optional[str] = None):\n if capacity is not None:\n pulumi.set(__self__, \"capacity\", capacity)\n if name is not None:\n pulumi.set(__self__, \"name\", name)"
] | [
"0.6714165",
"0.6105351",
"0.6015295",
"0.573277",
"0.5619034",
"0.55716175",
"0.55599296",
"0.5556067",
"0.5556067",
"0.5556067",
"0.5469874",
"0.5467277",
"0.54228985",
"0.5409745",
"0.54063326",
"0.5370435",
"0.53439593",
"0.52427536",
"0.5227065",
"0.52156657",
"0.51400864",
"0.50792253",
"0.50605166",
"0.5004958",
"0.49932584",
"0.497809",
"0.49667922",
"0.49578026",
"0.49486753",
"0.49344996"
] | 0.7681637 | 0 |
Adds to a Bloom Filter ``key`` an ``item``. | def bfAdd(self, key, item):
params = [key, item]
return self.execute_command(self.BF_ADD, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cfAdd(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.CF_ADD, *params)",
"def add(self, key):\n\t\t#super(CountingBloomFilter, self).add(key)\n\t\t#super(CountingBloomFilter, self).generateStats()\n\t\tfor i in self.getBitArrayIndices(key):\n\t\t\tself.ba[i] += 1\n\t\tself.n += 1",
"def add(self, item):\n self.num_item += 1\n indexs = self.__get_indexs(item)\n for index in indexs:\n self.filter_bitarray[index] = True",
"def add_item(self, key, item):\n self[key].add(item)\n try:\n self._reverse_store[item].add(key)\n except KeyError:\n self._reverse_store[item] = set([key])",
"def append(self, item):\n # FIXME: this is only append if the key isn't already present\n key, value = item\n self._main[key] = value",
"def add(self, item):\n self._dict[item] = item",
"def add(self, key):\n if key in self:\n return True\n if not self.filters:\n filter = RedisLocalBloomFilter(\n server=self.server, \n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=self.initial_capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n else:\n filter = self.filters[-1]\n if filter.count >= filter.capacity:\n capacity = filter.capacity * self.scale\n if capacity > MAX_PER_SLICE_SIZE:\n capacity = MAX_PER_SLICE_SIZE\n filter = RedisLocalBloomFilter(\n server=self.server,\n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n if self.max_filters > 0 and len(self.filters) >= self.max_filters:\n f = self.filters[0]\n f.clear()\n del self.filters[0]\n filter.add(key, skip_check=True)\n return False",
"def add(self, key, value):",
"def put(self, key, item):\n # Do nothing if key is NoneType or item is NoneType\n if (key is None or item is None):\n return\n\n # If Key exists, update Cache Item\n if key in self.cache_data:\n for ci in self.LFU:\n if ci.key is key:\n ci.updateItem(item)\n else:\n # Length will be longer than max capacity, make room\n if len(self.cache_data) == self.MAX_ITEMS:\n\n # Determine discarded cache item\n discard = self.LFU[0]\n for x in self.LFU:\n if x.freq < discard.freq or \\\n x.freq == discard.freq and x.age > discard.age:\n discard = x\n\n # Discard Cache Item\n print(\"DISCARD: {}\".format(discard.key))\n del self.cache_data[discard.key]\n self.LFU.remove(discard)\n\n # Add new Cache Item\n data = LFUCacheItem(key, item, 0, 0)\n self.LFU.append(data)\n\n # increase age of all items\n for x in self.LFU:\n x.age += 1\n\n self.cache_data[key] = item",
"def add(self, key, value):\n\t\tself.__add_key_to_bt(key)[3] = self.__add_key_value_to_ll(key, value)",
"def add_item (self, item):\n new_item = CacheItem (item)\n cached = self.cache.get(hash(item))\n if cached is None:\n self.evict_or_add (new_item)\n cached.hits += 1",
"def add_item(self, key, data):\n hash_key = self.count_hash(key, len(self.slots))\n\n if self.slots[hash_key] is None:\n self.slots[hash_key] = key\n self.data[hash_key] = data\n else:\n if self.slots[hash_key] == key:\n self.data[hash_key] = data\n elif isinstance(self.slots[hash_key], int):\n self.slots[hash_key] = (self.slots[hash_key], key,)\n self.data[hash_key] = (self.data[hash_key], data,)\n elif len(self.slots[hash_key]) > 1:\n list_slot = list(self.slots[hash_key])\n list_data = list(self.data[hash_key])\n list_slot.append(key)\n list_data.append(data)\n self.slots[hash_key] = tuple(list_slot)\n self.data[hash_key] = tuple(list_data)",
"def put(self, key, item):\n if key and item:\n self.cache_data[key] = item",
"def put(self, key, item):\n if key and item:\n self.cache_data[key] = item",
"def add(self, item):",
"def add_to_bag(self, item):\n self._bag.append(item)",
"def put(self, key, item):\n if key or item:\n self.cache_data[key] = item\n if len(self.cache_data) > BaseCaching.MAX_ITEMS:\n print(\"DISCARD: {}\".format(self.last))\n del self.cache_data[self.last]\n self.last = key",
"def __setitem__(self, key, item):\n self.set_field(key, item)",
"def insert(self, item):\n for h_num in xrange(self.k):\n val = self.hash_value(item, h_num)\n self.arr[val] = True",
"def add(self, key, value):\n self._data.add_last(self._Item(key, value))",
"def put(self, key, item):\n if key and item:\n if key in self.cache_data.keys():\n self.cache_data[key] = item\n self.stack.append(key)\n else:\n if len(self.cache_data.keys()) < self.MAX_ITEMS:\n self.cache_data[key] = item\n self.stack.append(key)\n elif len(self.cache_data.keys()) == self.MAX_ITEMS:\n k = self.stack.pop(-1)\n self.cache_data.pop(k)\n print(\"DISCARD: {}\".format(k))\n self.cache_data[key] = item\n self.stack.append(key)",
"def add(self, key, value):\n self._data.append(self._Item(key, value))\n self._upheap(len(self._data) - 1) # upheap newly added position",
"def add(self, key, value):\n self._data.append(self._Item(key, value))\n self._upheap(len(self._data) - 1) # upheap newly added position",
"def add(self, key, value):\n self.data.append((key, value))",
"def add(self, key, skip_check=False):\n bits_per_slice = self.bits_per_slice\n hashes = self.make_hashes(key)\n found_all_bits = True\n if self.count > self.capacity:\n raise IndexError(\"RedisLocalBloomFilter is at capacity\")\n pipe = self.server.pipeline(transaction=False) \n sliceIdx = 0\n for k in hashes:\n sliceKey = self.SLICE_KEY_FMT % (self.bfkeypreffix, sliceIdx)\n pipe.setbit(sliceKey, k, 1)\n sliceIdx += 1\n pipeResults = pipe.execute()\n if not skip_check:\n for pipeResult in pipeResults:\n if not pipeResult:\n found_all_bits = False\n break\n if skip_check:\n self.count += 1\n return False\n elif not found_all_bits:\n self.count += 1\n return False\n else:\n return True",
"def _add_item_by_item(self, item):\n self.item_list[item.call_number] = item",
"def add(self, item: Mapping[Hashable, Any], **kwargs: Any) -> None:\n self.contents.update(item, **kwargs)\n return",
"def add(self, key, value):\n newest = self._Item(key, value) # make new item instance\n walk = self._data.last() # walk backward looking for smaller key\n while walk is not None and newest < walk.element():\n walk = self._data.before(walk)\n if walk is None:\n self._data.add_first(newest) # new key is smallest\n else:\n self._data.add_after(walk, newest) # newest goes after walk",
"def _single_setitem(self, key, item):\n self._dict[key] = item",
"def put(self, key, item):\n raise NotImplementedError(\"put must be implemented in your cache class\")"
] | [
"0.7277136",
"0.7030493",
"0.6990854",
"0.6836308",
"0.66995275",
"0.66974354",
"0.66621774",
"0.66100615",
"0.65638524",
"0.6553088",
"0.6477446",
"0.6454946",
"0.640302",
"0.640302",
"0.63999856",
"0.63677585",
"0.63595116",
"0.6356604",
"0.63035226",
"0.6302042",
"0.6256967",
"0.62406826",
"0.6236075",
"0.6231482",
"0.62213266",
"0.6210293",
"0.62102187",
"0.61980146",
"0.61943066",
"0.6194135"
] | 0.8320729 | 0 |
Adds to a Bloom Filter ``key`` multiple ``items``. If ``nocreate`` remain ``None`` and ``key does not exist, a new Bloom Filter ``key`` will be created with desired probability of false positives ``errorRate`` and expected entries to be inserted as ``size``. | def bfInsert(self, key, items, capacity=None, error=None, noCreate=None, expansion=None, noScale=None):
params = [key]
self.appendCapacity(params, capacity)
self.appendError(params, error)
self.appendExpansion(params, expansion)
self.appendNoCreate(params, noCreate)
self.appendNoScale(params, noScale)
self.appendItems(params, items)
return self.execute_command(self.BF_INSERT, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def bfAdd(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.BF_ADD, *params)",
"def add(self, key):\n\t\t#super(CountingBloomFilter, self).add(key)\n\t\t#super(CountingBloomFilter, self).generateStats()\n\t\tfor i in self.getBitArrayIndices(key):\n\t\t\tself.ba[i] += 1\n\t\tself.n += 1",
"def add(self, key):\n if key in self:\n return True\n if not self.filters:\n filter = RedisLocalBloomFilter(\n server=self.server, \n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=self.initial_capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n else:\n filter = self.filters[-1]\n if filter.count >= filter.capacity:\n capacity = filter.capacity * self.scale\n if capacity > MAX_PER_SLICE_SIZE:\n capacity = MAX_PER_SLICE_SIZE\n filter = RedisLocalBloomFilter(\n server=self.server,\n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n if self.max_filters > 0 and len(self.filters) >= self.max_filters:\n f = self.filters[0]\n f.clear()\n del self.filters[0]\n filter.add(key, skip_check=True)\n return False",
"def cfInsert(self, key, items, capacity=None, nocreate=None):\n params = [key]\n self.appendCapacity(params, capacity)\n self.appendNoCreate(params, nocreate)\n self.appendItems(params, items)\n\n return self.execute_command(self.CF_INSERT, *params)",
"def add_new(self, item, key):\n if key in self._items:\n raise DuplicateListHeapItemException(key)\n if len(self._items) >= self._max_limit:\n raise MaxItemLimitReachedException()\n self._items[key] = item\n self._listbox.insert(END, key)",
"def cfAdd(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.CF_ADD, *params)",
"def add(self, key, skip_check=False):\n bits_per_slice = self.bits_per_slice\n hashes = self.make_hashes(key)\n found_all_bits = True\n if self.count > self.capacity:\n raise IndexError(\"RedisLocalBloomFilter is at capacity\")\n pipe = self.server.pipeline(transaction=False) \n sliceIdx = 0\n for k in hashes:\n sliceKey = self.SLICE_KEY_FMT % (self.bfkeypreffix, sliceIdx)\n pipe.setbit(sliceKey, k, 1)\n sliceIdx += 1\n pipeResults = pipe.execute()\n if not skip_check:\n for pipeResult in pipeResults:\n if not pipeResult:\n found_all_bits = False\n break\n if skip_check:\n self.count += 1\n return False\n elif not found_all_bits:\n self.count += 1\n return False\n else:\n return True",
"def add(self, key, value):\n new = self._Item(key, value)\n\n if self.is_empty():\n self._data.append(new)\n else:\n for i, item in enumerate(self._data):\n if new <= item:\n self._data.insert(i, new)\n break\n if i == len(self) - 1:\n self._data.append(new)\n break",
"def topkAdd(self, key, *items):\n params = [key]\n params += items\n \n return self.execute_command(self.TOPK_ADD, *params)",
"def add_to_items(items, name, size, price):\n index = items_contains_name(items, name)\n if index == 0:\n temp = {'name': name, 'size': size, 'count': 1, 'price': price}\n items.append(temp)\n else:\n items[index]['count'] = items[index]['count'] + 1\n return items",
"def Add(self, key, *args):\n temp_error = Errors()\n if ErrMsg.isValidKey(key, ErrMsg._MsgKey__class, temp_error):\n if key.argcount != len(args):\n if not self._keychainExists(key):\n self._keychainExists(key, True)\n exception = self._validateException(key.exception)\n if exception:\n self.Raise(exception, key, args)\n else:\n self._add(key, args)\n else:\n self.Add(ErrMsg.Error.Add.Invalid_Msgformat, key.message, args)\n\n elif ErrMsg.isValidKey(key, None, temp_error):\n # Assume GENERIC status\n\n key = ErrMsg._defaultKeyChain(key, temp_error)\n if temp_error:\n pass\n else:\n self.Add(key, args)\n else:\n self.Add(ErrMsg.Error.Add.Invalid_Errorkey, key.message, args)",
"def bfCreate(self, key, errorRate, capacity, expansion=None, noScale=None):\n params = [key, errorRate, capacity]\n self.appendExpansion(params, expansion)\n self.appendNoScale(params, noScale)\n\n return self.execute_command(self.BF_RESERVE, *params)",
"def add(self, key, value):\n newest = self._Item(key, value) # make new item instance\n walk = self._data.last() # walk backward looking for smaller key\n while walk is not None and newest < walk.element():\n walk = self._data.before(walk)\n if walk is None:\n self._data.add_first(newest) # new key is smallest\n else:\n self._data.add_after(walk, newest) # newest goes after walk",
"def test_sample_container_add_exceeds_limit(self):\n self.assertEqual(self.container._data, defaultdict(list))\n\n retval = self.container.add(\"key1\", [\"1\", \"2\", \"3\", ], 2)\n\n self.assertEqual(retval, [\"1\", \"2\", \"3\", ])\n self.assertEqual([], self.container._data[\"key1\"])",
"def add_item(self, item):\n self.items.append(item)\n self.item_count += 1\n self.max_length = max(self.max_length, len(item.samples))",
"def addItem(self, key):\n if key in self.dictionary:\n raise Exception(\"Key already exist in dictionary\")\n self.dictionary[key] = WordInformation(self.MAX_RATING)",
"def add_item(self, item, index):\n if index in self.d_buffer.keys():\n return True\n elif len(self) < self._size:\n self.d_buffer.update({index: item})\n return True\n else:\n return False",
"def add(self, key, value):\n self._data.append(self._Item(key, value))\n self._upheap(len(self._data) - 1) # upheap newly added position",
"def cfInsertNX(self, key, items, capacity=None, nocreate=None):\n params = [key]\n self.appendCapacity(params, capacity)\n self.appendNoCreate(params, nocreate)\n self.appendItems(params, items)\n\n return self.execute_command(self.CF_INSERTNX, *params)",
"def add(self, key, value):\n self._data.append(self._Item(key, value))\n self._upheap(len(self._data) - 1) # upheap newly added position",
"def append ( self , item ) :\n self.cond.acquire()\n try:\n if self.closed :\n raise Exception( \"Trying to append to a closed queue\" )\n else :\n self.weight += int( item['size'] )\n self.push( item )\n self.cond.notify()\n finally:\n self.cond.release()",
"def appenddictitemsize(self, key, numents):\n self._dentsvertsdata[key].appendsize(numents * self._multFactor)",
"def add(self, key, value):\r\n newest = Item(key, value) # make new item instance\r\n walk = self.data.last() # walk backward looking for smaller key\r\n while walk is not None and newest < walk.element():\r\n walk = self.data.before(walk)\r\n if walk is None:\r\n self.data.add_first(newest) # new key is smallest\r\n else:\r\n self.data.add_after(walk, newest) # newest goes after walk\r",
"def add(self, item):\n self.num_item += 1\n indexs = self.__get_indexs(item)\n for index in indexs:\n self.filter_bitarray[index] = True",
"def add(self, key, value):\n self._data.append(self._Item(key, value))\n self._up_heap(len(self) - 1)",
"def cfAddNX(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.CF_ADDNX, *params)",
"def insert(self, key, value):\n # Resize array here if necessary.\n if key < 0: key = 0\n elif key > len(self): key = len(self)\n if key < len(self):\n for j in range(len(self), key, -1):\n self._items[j] = self._items[j - 1]\n self._items[key] = value\n self._size += 1\n self.incModCount()",
"def add_item(self):\n item = LibGen.create_item()\n if not self.item_exists(item.call_number):\n self.item_list[item.call_number] = item\n print(f\"Item({item.call_number}) bas been added.\")\n else:\n print(\"This item already exists.\")",
"def _additems(self, w,h):\n for idx in range(len(self.data['items'])):\n default={\n 'color': self.data['itemscolor'],\n 'textscale': self.data['itemsscale'],\n 'textfont': self.data['textfont'],\n 'width': w-(self.data['margin'][0]*2.),\n }\n self.data['items'][idx].update(default)\n self.addItem(idx, **self.data['items'][idx])",
"def add_item(self, item):\r\n bag_res = consts.BAG_PUT_FAILED\r\n for i in range(len(self._items)):\r\n res = self.put_item_at(i, item, allow_switch=False)\r\n if res == consts.PUT_FORBIDDEN:\r\n return consts.BAG_PUT_FAILED\r\n if res == consts.PUT_SWITCH or \\\r\n res == consts.PUT_INTO_EMPTY or \\\r\n res == consts.PUT_MERGE_TOTALLY:\r\n return consts.BAG_PUT_TOTALLY\r\n if res == consts.PUT_MERGE_PARTIALLY:\r\n bag_res = consts.BAG_PUT_PARTIALLY\r\n continue\r\n if res == consts.PUT_MERGE_FAILED or \\\r\n res == consts.PUT_SWITCH_FORBIDDEN:\r\n continue\r\n return bag_res"
] | [
"0.6203647",
"0.6000729",
"0.5843988",
"0.57451206",
"0.5711649",
"0.56672007",
"0.5634117",
"0.5580528",
"0.54817045",
"0.541424",
"0.5405912",
"0.53877556",
"0.53802043",
"0.5330853",
"0.53062",
"0.52977407",
"0.5295973",
"0.5289593",
"0.52855736",
"0.5276881",
"0.5263222",
"0.5257719",
"0.52493656",
"0.52452296",
"0.5184029",
"0.5172518",
"0.51582766",
"0.511162",
"0.50999177",
"0.5088622"
] | 0.6028828 | 1 |
Checks whether an ``item`` exists in Bloom Filter ``key``. | def bfExists(self, key, item):
params = [key, item]
return self.execute_command(self.BF_EXISTS, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __contains__(self, item, key):\n ndx = self._findPostion(key)\n return ndx is not None",
"def item_has_key(self, item, key):\n if key in self._reverse_store[item]:\n return True\n else:\n return False",
"def contains(self, item):\n for h_num in xrange(self.k):\n val = self.hash_value(item, h_num)\n if not self.arr[val]:\n return False\n else:\n return True",
"def contains(self, item):\n return self._dict.has_key(item)\n\n self.__contains__ = contains",
"def item_exists(item_id):\n return item_id in all_items",
"def has_item(self, item):\n return item in self.set",
"def has_item(self, item):\n return item in self.set",
"def cfExists(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.CF_EXISTS, *params)",
"def has_item(self, item):\n return item in self.cache",
"def exista(self, item):\n if item not in self._items:\n return False\n for x in self._items:\n if x == item:\n return True",
"def __contains__(self, item):\n try:\n self[item]\n return True\n except KeyError:\n return False",
"def has_item(self, usage_key):\r\n try:\r\n self._find_one(usage_key)\r\n return True\r\n except ItemNotFoundError:\r\n return False",
"def contains(self, key):\n # TODO: Check if the given key exists in a bucket\n hash_key = self._bucket_index(key) # Gets the index of the key\n if self.buckets[hash_key].is_empty() is False: # If the hask_key exists\n for key_value_pair in self.buckets[hash_key]: # Iteratre through the value pair\n if key_value_pair[0] is key: # If the key matches\n return True\n return False",
"def contains(self, key: int) -> bool:\n _hash = self.get_hash(key)\n return self.bucket_array[_hash].exist(key)",
"def __contains__(self, item):\n try:\n hdu = self[item] # noqa\n return True\n except Exception:\n return False",
"def __contains__(self, item: object) -> bool:\n val = conv_kv(item) # type: ignore\n for fixup in self._mapping._fixup.values():\n if fixup.value == val:\n return True\n return False",
"def array_key_exists(name, item):\n return item.has_key(name);",
"def has(cls, item):\n return item in cls.values()",
"def __contains__(self, item):\n\n if self[item]:\n return True\n return False",
"def bfMExists(self, key, *items):\n params = [key]\n params += items\n\n return self.execute_command(self.BF_MEXISTS, *params)",
"def __contains__(self, item):\n return item in self._data",
"def has(self, item):\n return item in self.mut",
"def contains(self, key: int) -> bool:\n return self._find_key(key, find_empty=False) >= 0",
"def _has(self, key):\n path = self._get_key_path(key)\n return exists(path)",
"def item_exists(self, call_number):\n return call_number in self.item_list.keys()",
"def contains(self, key: int) -> bool:\n lv1, lv2 = self.hashing(key)\n \n for item in self.cont[lv1][lv2]:\n if item==key:\n return True\n \n return False",
"def __contains__(self, key):\n return self.keys[self._linear_probe(key, \"contains\")] is not None",
"def has_item(self, item_name):\n if item_name in self.item_list:\n return True\n return False",
"def __contains__(self, item):\n\t\treturn item in self.__dict__.values()",
"def has(self, key):"
] | [
"0.77040607",
"0.76314753",
"0.75677925",
"0.7561435",
"0.75477517",
"0.7416592",
"0.7416592",
"0.7373087",
"0.73644096",
"0.7320269",
"0.7269573",
"0.71763974",
"0.7118146",
"0.70699257",
"0.7025244",
"0.6994611",
"0.6993277",
"0.6964525",
"0.6963223",
"0.6961354",
"0.6954093",
"0.6912245",
"0.6889395",
"0.68624604",
"0.68619245",
"0.6860283",
"0.6854506",
"0.68280953",
"0.68246704",
"0.6814358"
] | 0.83642733 | 0 |
Checks whether ``items`` exist in Bloom Filter ``key``. | def bfMExists(self, key, *items):
params = [key]
params += items
return self.execute_command(self.BF_MEXISTS, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def bfExists(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.BF_EXISTS, *params)",
"def item_exists(item_id):\n return item_id in all_items",
"def __contains__(self, items):\n if type(items) != list:\n raise PJFInvalidType(items, list)\n ret = 0\n for item in items:\n for key in self.__dict__:\n if isinstance(self.__dict__[key], JsonFactory):\n ret += item in self.__dict__[key]\n elif item == key:\n ret += 1\n return len(items) == ret",
"def contains(self, item):\n for h_num in xrange(self.k):\n val = self.hash_value(item, h_num)\n if not self.arr[val]:\n return False\n else:\n return True",
"def __contains__(self, item, key):\n ndx = self._findPostion(key)\n return ndx is not None",
"def item_has_key(self, item, key):\n if key in self._reverse_store[item]:\n return True\n else:\n return False",
"def has_item(self, item):\n return item in self.set",
"def has_item(self, item):\n return item in self.set",
"def has_items(self):\r\n return self.orderitem_set.exists() # pylint: disable=E1101\r",
"def contains(self, item):\n return self._dict.has_key(item)\n\n self.__contains__ = contains",
"def array_key_exists(name, item):\n return item.has_key(name);",
"def item_exists(self, call_number):\n return call_number in self.item_list.keys()",
"def exista(self, item):\n if item not in self._items:\n return False\n for x in self._items:\n if x == item:\n return True",
"def has_item(self, usage_key):\r\n try:\r\n self._find_one(usage_key)\r\n return True\r\n except ItemNotFoundError:\r\n return False",
"def has(cls, item):\n return item in cls.values()",
"def contains(self, key):\n\t\tfor i in self.getBitArrayIndices(key):\n\t\t\tif self.ba[i] <= 0:\n\t\t\t\treturn False\n\t\treturn True",
"def contains(self, key: int) -> bool:\n _hash = self.get_hash(key)\n return self.bucket_array[_hash].exist(key)",
"def __contains__(self, item):\n return item in self.__keys or item in self.__vals",
"def contains(self, key):\n if key in self.key_list:\n return True\n return False",
"def has_item(self, usage_key):\r\n store = self._get_modulestore_for_courseid(usage_key.course_key)\r\n return store.has_item(usage_key)",
"def has_item(self, item_name):\n if item_name in self.item_list:\n return True\n return False",
"def contains(self, key: int) -> bool:\n return self._find_key(key, find_empty=False) >= 0",
"def check_item_in(self, url):\n item_hash = tools.url_hash(url)\n if item_hash not in self.__items:\n self.__item_lock.acquire()\n self.__items.add(item_hash)\n self.__item_lock.release()\n return False\n else:\n return True",
"def has_item(self, item):\n return item in self.cache",
"def contains(self, item):\n if isinstance(item, dict):\n return _(item).all(lambda key: self._.get(key) == item[key])\n return item in self",
"def __contains__(self, item):\n\t\treturn item in self.__dict__.values()",
"def has_item(self, usage_key):\r\n return usage_key in self.modules[usage_key.course_key]",
"def __contains__(self, key):\n bits_per_slice = self.bits_per_slice\n hashes = self.make_hashes(key)\n pipe = self.server.pipeline(transaction=False) \n sliceIdx = 0\n for k in hashes:\n sliceKey = self.SLICE_KEY_FMT % (self.bfkeypreffix, sliceIdx)\n pipe.getbit(sliceKey, k)\n sliceIdx += 1\n getbits = pipe.execute() \n for bit in getbits:\n if not bit:\n return False\n return True",
"def __contains__(self, key):\n\n return key in self.keys_set",
"def cfExists(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.CF_EXISTS, *params)"
] | [
"0.73868114",
"0.73612565",
"0.7006057",
"0.6978514",
"0.6682587",
"0.6636079",
"0.6593363",
"0.6593363",
"0.6583643",
"0.65689707",
"0.6546252",
"0.6544462",
"0.6530537",
"0.6525098",
"0.6517376",
"0.65107256",
"0.64853024",
"0.6446855",
"0.6445891",
"0.64283794",
"0.64225173",
"0.6404468",
"0.64029586",
"0.6397683",
"0.6396188",
"0.63960636",
"0.637843",
"0.63750595",
"0.63697594",
"0.6350113"
] | 0.77390194 | 0 |
Begins an incremental save of the bloom filter ``key``. This is useful for large bloom filters which cannot fit into the normal SAVE and RESTORE model. The first time this command is called, the value of ``iter`` should be 0. This command will return successive (iter, data) pairs until (0, NULL) to indicate completion. | def bfScandump(self, key, iter):
params = [key, iter]
return self.execute_command(self.BF_SCANDUMP, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cfScandump(self, key, iter):\n params = [key, iter]\n \n return self.execute_command(self.CF_SCANDUMP, *params)",
"def save(self) -> None:\n self._bin_iter.save()",
"def save(self) -> dict:\n for pair in self._buffer:\n yield pair.save()",
"def _iter(self, key, count, increment=1):\n key %= self.size\n while count > 0:\n try:\n yield self.db[key]\n except KeyError:\n # This shouldn't happen, but there's really nothing we can do if it does.\n # Skip over the damaged part of our database, ignoring the missing item.\n pass\n key = (key + increment) % self.size\n count -= 1",
"def bfLoadChunk(self, key, iter, data):\n params = [key, iter, data]\n \n return self.execute_command(self.BF_LOADCHUNK, *params)",
"def __iter__(self):\n try:\n i = self.db[self._headKey]\n while True:\n yield i\n i = self.db[self._getNextKey(i)]\n except KeyError:\n pass",
"def __iter__(self):\r\n for item in self._data:\r\n yield item # yield the KEY\r",
"def add(self, key):\n\t\t#super(CountingBloomFilter, self).add(key)\n\t\t#super(CountingBloomFilter, self).generateStats()\n\t\tfor i in self.getBitArrayIndices(key):\n\t\t\tself.ba[i] += 1\n\t\tself.n += 1",
"def save(self, key=None):\n\n # we can override our key by passing one in explicitly\n if key: self._key = key\n\n # now save in the db\n if self._key:\n self._dbag[self._key] = self.to_python()\n else:\n self._key = self._dbag.add(self.to_python())\n return self._key",
"def __iter__(self):\n if not self.loading:\n self.reset_loading()\n self.current_batch_index = 0\n return self",
"def keys_fetch(self):\n with self.env.begin(write=False) as txn:\n cursor = txn.cursor()\n tot = txn.stat()['entries']\n i = 0\n\n path = self.db_path\n base_name = self.base_path\n cache_file_path = os.path.join(path, '_cache_' + base_name + '.pkl')\n print('cache_file_path = ', cache_file_path) # DEBUG\n\n if os.path.isfile(cache_file_path):\n self.keys = pickle.load(open(cache_file_path, 'rb'))\n self._num_examples = tot\n else:\n keys = []\n for key, _ in cursor:\n i += 1\n if i % 1000 == 0 or i == tot:\n print('Fetching {:>8d} /{:>8d} keys'.format(i, tot),\n end='\\r')\n keys.append(key)\n print('\\nDone.')\n self._num_examples = tot\n self.keys = np.asarray(keys)\n pickle.dump(self.keys, open(cache_file_path, 'wb'))",
"def __iter__(self):\n # This could be as simple as \"return self._getKeyList().__iter__()\"\n # but this performs some extra consistency checking to make sure the\n # key we iterate to actually exists, to keep us from crashing if\n # our db is a little out of sync with itself.\n\n # This is a nasty hack because our db seems prone to circular links\n nItems = 0\n for item in self._getKeyList():\n if item in self:\n yield item\n nItems += 1\n # NASTY HACK!\n if nItems > 1000:\n self.reindex()\n raise Exception(\"Circular link corrected, try again\")\n else:\n self._delKey(item)",
"def add(self, key, skip_check=False):\n bits_per_slice = self.bits_per_slice\n hashes = self.make_hashes(key)\n found_all_bits = True\n if self.count > self.capacity:\n raise IndexError(\"RedisLocalBloomFilter is at capacity\")\n pipe = self.server.pipeline(transaction=False) \n sliceIdx = 0\n for k in hashes:\n sliceKey = self.SLICE_KEY_FMT % (self.bfkeypreffix, sliceIdx)\n pipe.setbit(sliceKey, k, 1)\n sliceIdx += 1\n pipeResults = pipe.execute()\n if not skip_check:\n for pipeResult in pipeResults:\n if not pipeResult:\n found_all_bits = False\n break\n if skip_check:\n self.count += 1\n return False\n elif not found_all_bits:\n self.count += 1\n return False\n else:\n return True",
"def add(self, key):\n if key in self:\n return True\n if not self.filters:\n filter = RedisLocalBloomFilter(\n server=self.server, \n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=self.initial_capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n else:\n filter = self.filters[-1]\n if filter.count >= filter.capacity:\n capacity = filter.capacity * self.scale\n if capacity > MAX_PER_SLICE_SIZE:\n capacity = MAX_PER_SLICE_SIZE\n filter = RedisLocalBloomFilter(\n server=self.server,\n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n if self.max_filters > 0 and len(self.filters) >= self.max_filters:\n f = self.filters[0]\n f.clear()\n del self.filters[0]\n filter.add(key, skip_check=True)\n return False",
"def inc(self, key: str) -> None:\n if key not in self.bucket_of_keys:\n self.bucket_of_keys[key] = self.buckets.insert(self.buckets.begin(), Node(0, {key}))\n bucket, next_bucket = self.bucket_of_keys[key], self.bucket_of_keys[key].next\n if next_bucket is self.buckets.end() or next_bucket.value > bucket.value + 1:\n next_bucket = self.buckets.insert(next_bucket, Node(bucket.value + 1, set()))\n next_bucket.keys.add(key)\n self.bucket_of_keys[key] = next_bucket\n\n bucket.keys.remove(key)\n if not bucket.keys:\n self.buckets.erase(bucket)",
"def _save(self, itr):\n # using keep_checkpoint_every_n_hours as proxy for iterations between saves\n if self.saver and (itr + 1) % self.saver._keep_checkpoint_every_n_hours == 0:\n\n # collect params (or stuff to keep in general)\n params = dict()\n params['critic'] = self.critic.network.get_param_values()\n\n # if the environment is wrapped in a normalizing env, save those stats\n normalized_env = hgail.misc.utils.extract_normalizing_env(self.env)\n if normalized_env is not None:\n params['normalzing'] = dict(\n obs_mean=normalized_env._obs_mean,\n obs_var=normalized_env._obs_var\n )\n\n # save hierarchy\n for i, level in enumerate(self.hierarchy):\n params[i] = dict()\n params[i]['policy'] = level.algo.policy.get_param_values()\n \n # save params \n save_dir = os.path.split(self.saver_filepath)[0]\n hgail.misc.utils.save_params(save_dir, params, itr+1, max_to_keep=50)",
"def inc(self, key: str) -> None:\n if key not in self.mapping:\n cur_block = self.head\n else:\n cur_block = self.mapping[key]\n cur_block.keys.remove(key)\n\n if cur_block.val + 1 != cur_block.next.val:\n new_block = Block(cur_block.val + 1)\n cur_block.insert_after(new_block)\n else:\n new_block = cur_block.next\n new_block.keys.add(key)\n self.mapping[key] = new_block\n\n if not cur_block.keys and cur_block.val != 0:\n cur_block.remove()",
"def inc(self, key):\n if key in self.keyCountMap:\n self._updateCount(key, 1)\n else:\n self.keyCountMap[key] = 1\n if self.head.next.count != 1:\n self._addBucketAfter(Bucket(1), self.head)\n self.head.next.keySet.add(key)\n self.countBucketMap[1] = self.head.next",
"def fisher_iterate(\n self,\n cbl,\n map_tag=None,\n iter_max=200,\n converge_criteria=0.005,\n qb_start=None,\n transfer_run=False,\n save_iters=False,\n null_first_cmb=False,\n delta_beta_prior=None,\n cond_noise=None,\n cond_criteria=None,\n like_profiles=False,\n like_profile_sigma=3.0,\n like_profile_points=100,\n file_tag=None,\n ):\n\n save_name = \"transfer\" if transfer_run else \"bandpowers\"\n\n if transfer_run:\n null_first_cmb = False\n\n # previous fqb iterations to monitor convergence and adjust conditioning\n prev_fqb = []\n cond_adjusted = False\n\n if qb_start is None:\n qb = OrderedDict()\n for k, v in self.bin_def.items():\n if transfer_run:\n if \"cmb\" not in k or \"eb\" in k or \"tb\" in k:\n continue\n if k == \"delta_beta\":\n # qb_delta beta is a coefficient on the change from beta,\n # so expect that it should be small if beta_ref is close\n # (zeroes cause singular matrix problems)\n qb[k] = [self.delta_beta_fix]\n elif k.startswith(\"res_\") or k.startswith(\"fg_\"):\n # res qb=0 means noise model is 100% accurate.\n qb[k] = 1e-5 * np.ones(len(v))\n else:\n # start by assuming model is 100% accurate\n qb[k] = np.ones(len(v))\n else:\n qb = qb_start\n\n obs, nell, debias = self.get_data_spectra(\n map_tag=map_tag, transfer_run=transfer_run\n )\n\n bin_index = pt.dict_to_index(self.bin_def)\n\n success = False\n for iter_idx in range(iter_max):\n self.log(\n \"Doing Fisher step {}/{}...\".format(iter_idx + 1, iter_max), \"info\"\n )\n\n qb_new, inv_fish = self.fisher_calc(\n qb,\n cbl,\n obs,\n cls_noise=nell,\n cls_debias=debias,\n cond_noise=cond_noise,\n delta_beta_prior=delta_beta_prior,\n cond_criteria=cond_criteria,\n null_first_cmb=null_first_cmb,\n )\n\n qb_arr = pt.dict_to_arr(qb, flatten=True)\n qb_new_arr = pt.dict_to_arr(qb_new, flatten=True)\n dqb = qb_new_arr - qb_arr\n fqb = dqb / qb_arr\n max_fqb = np.nanmax(np.abs(fqb))\n\n prev_fqb.append(max_fqb)\n\n fnan = np.isnan(fqb)\n if fnan.any():\n (nanidx,) = np.where(fnan)\n self.log(\n \"Iter {}: Ignoring {} bins with fqb=nan: bins={}, qb_new={}, \"\n \"qb={}\".format(\n iter_idx,\n len(nanidx),\n nanidx,\n qb_new_arr[nanidx],\n qb_arr[nanidx],\n ),\n \"warning\",\n )\n\n self.log(\"Max fractional change in qb: {}\".format(max_fqb), \"info\")\n\n # put qb_new in original dict\n qb = copy.deepcopy(qb_new)\n cls_model = self.get_model_spectra(\n qb, cbl, delta=True, cls_noise=nell, cond_noise=None\n )\n\n if \"delta_beta\" in qb:\n # get beta fit and beta error\n beta_fit = qb[\"delta_beta\"][0] + self.beta_ref\n db_idx = slice(*bin_index[\"delta_beta\"])\n beta_err = np.sqrt(np.diag(inv_fish[db_idx, db_idx]))[0]\n else:\n beta_fit = None\n beta_err = None\n\n if save_iters:\n # save only the quantities that change with each iteration\n out = dict(\n map_tag=map_tag,\n map_tags=self.map_tags,\n iter_index=iter_idx,\n bin_def=self.bin_def,\n bin_weights=self.bin_weights,\n cls_shape=self.cls_shape,\n cls_obs=obs,\n qb=qb,\n fqb=fqb,\n inv_fish=inv_fish,\n cls_model=cls_model,\n cbl=cbl,\n map_freqs=self.map_freqs,\n cls_signal=self.cls_signal,\n cls_noise=self.cls_noise,\n Dmat_obs=self.Dmat_obs,\n gmat_ell=self.gmat_ell,\n extra_tag=file_tag,\n )\n\n if \"fg_tt\" in self.bin_def:\n out.update(\n beta_fit=beta_fit,\n beta_err=beta_err,\n ref_freq=self.ref_freq,\n beta_ref=self.beta_ref,\n )\n\n self.save_data(save_name, bp_opts=not transfer_run, **out)\n\n (nans,) = np.where(np.isnan(qb_new_arr))\n if len(nans):\n msg = \"Found NaN values in qb bins {} at iter {}\".format(nans, iter_idx)\n break\n\n if fnan.all():\n msg = (\n \"All bins have fqb=NaN at iter {}, \"\n \"something has gone horribly wrong.\".format(iter_idx)\n )\n break\n\n negs = np.where(np.diag(inv_fish) < 0)[0]\n if len(negs):\n self.log(\n \"Iter {}: Found negatives in inv_fish diagonal at locations \"\n \"{}\".format(iter_idx, negs),\n \"warning\",\n )\n\n if np.nanmax(np.abs(fqb)) < converge_criteria:\n if not transfer_run:\n # Calculate final fisher matrix without conditioning\n self.log(\"Calculating final Fisher matrix.\", \"info\")\n _, inv_fish = self.fisher_calc(\n qb,\n cbl,\n obs,\n cls_noise=nell,\n cls_debias=debias,\n cond_noise=None,\n delta_beta_prior=delta_beta_prior,\n null_first_cmb=null_first_cmb,\n )\n\n # If any diagonals of inv_fisher are negative, something went wrong\n negs = np.where(np.diag(inv_fish) < 0)[0]\n if len(negs):\n self.log(\n \"Found negatives in inv_fish diagonal at locations \"\n \"{}\".format(negs),\n \"warning\",\n )\n\n success = True\n break\n\n else:\n msg = \"{} {} did not converge in {} iterations\".format(\n \"Multi-map\" if map_tag is None else \"Map {}\".format(map_tag),\n \"transfer function\" if transfer_run else \"spectrum\",\n iter_max,\n )\n # Check the slope of the last ten fqb_maxpoints.\n # If there's not a downward trend, adjust conditioning\n # criteria to help convergence.\n if len(prev_fqb) <= 10 or transfer_run:\n continue\n m, b = np.polyfit(np.arange(10), prev_fqb[-10:], 1)\n if m > 0: # Not converging\n # First, start from very little conditioning\n if not cond_adjusted:\n cond_criteria = 5e3\n cond_adjusted = True\n self.log(\n \"Iter {}: Not converging. Setting cond_criteria={}\".format(\n iter_idx, cond_criteria\n ),\n \"warning\",\n )\n\n elif cond_criteria > 100:\n cond_criteria /= 2.0\n self.log(\n \"Iter {}: Tightening condition criteria to help convergence. \"\n \"cond_criteria={}\".format(iter_idx, cond_criteria),\n \"warning\",\n )\n else:\n self.log(\n \"Iter {}: Can't reduce cond_criteria any more.\".format(\n iter_idx\n ),\n \"warning\",\n )\n # give it ten tries to start converging\n prev_fqb = []\n\n # save and return\n out = dict(\n qb=qb,\n inv_fish=inv_fish,\n fqb=fqb,\n bin_def=self.bin_def,\n bin_weights=self.bin_weights,\n iters=iter_idx,\n success=success,\n map_tags=self.map_tags,\n map_freqs=self.map_freqs,\n converge_criteria=converge_criteria,\n cond_noise=cond_noise,\n cond_criteria=cond_criteria,\n null_first_cmb=null_first_cmb,\n apply_gcorr=self.apply_gcorr,\n weighted_bins=self.weighted_bins,\n )\n\n if \"fg_tt\" in self.bin_def:\n out.update(\n delta_beta_prior=delta_beta_prior,\n beta_fit=beta_fit,\n beta_err=beta_err,\n ref_freq=self.ref_freq,\n beta_ref=self.beta_ref,\n )\n\n if self.debug:\n out.update(\n cbl=cbl,\n cls_obs=obs,\n cls_signal=self.cls_signal,\n cls_noise=self.cls_noise,\n cls_model=cls_model,\n cls_shape=self.cls_shape,\n cond_noise=cond_noise,\n Dmat_obs=self.Dmat_obs,\n )\n\n if not transfer_run:\n out.update(qb_transfer=self.qb_transfer)\n if self.template_cleaned:\n out.update(template_alpha=self.template_alpha)\n\n if success and not transfer_run:\n # do one more fisher calc that doesn't include sample variance\n # set qb=very close to 0. 0 causes singular matrix problems.\n # don't do this for noise residual bins\n self.log(\"Calculating final Fisher matrix without sample variance.\", \"info\")\n qb_zeroed = copy.deepcopy(qb)\n qb_new_ns = copy.deepcopy(qb)\n for comp in [\"cmb\", \"fg\"]:\n for spec in self.specs:\n stag = \"{}_{}\".format(comp, spec)\n if stag not in qb_zeroed:\n continue\n qb_zeroed[stag][:] = 1e-20\n qb_new_ns[stag][:] = 1.0\n if \"delta_beta\" in qb:\n qb_zeroed[\"delta_beta\"][:] = 1e-20\n qb_new_ns[\"delta_beta\"][:] = 0\n\n _, inv_fish_ns = self.fisher_calc(\n qb_zeroed,\n cbl,\n obs,\n cls_noise=nell,\n cls_debias=debias,\n cond_noise=None,\n delta_beta_prior=None,\n null_first_cmb=null_first_cmb,\n )\n\n out.update(\n invfish_nosampvar=inv_fish_ns,\n )\n\n # compute window functions for CMB bins\n self.log(\"Calculating window functions for CMB bins\", \"info\")\n wbl_qb = self.fisher_calc(\n qb,\n cbl,\n obs,\n cls_noise=nell,\n cls_debias=None,\n cond_noise=None,\n delta_beta_prior=delta_beta_prior,\n null_first_cmb=null_first_cmb,\n windows=True,\n inv_fish=inv_fish,\n )\n out.update(wbl_qb=wbl_qb)\n\n # compute bandpowers and covariances\n cb, dcb, ellb, cov, qb2cb, wbl_cb = self.do_qb2cb(qb, inv_fish, wbl_qb)\n _, dcb_ns, _, cov_ns, _, _ = self.do_qb2cb(qb, inv_fish_ns, wbl_qb)\n\n out.update(\n cb=cb,\n dcb=dcb,\n ellb=ellb,\n cov=cov,\n qb2cb=qb2cb,\n wbl_cb=wbl_cb,\n dcb_nosampvar=dcb_ns,\n cov_nosampvar=cov_ns,\n )\n\n if like_profiles:\n # compute bandpower likelihoods\n self.log(\"Calculating bandpower profile likelihoods\", \"info\")\n max_like = self.fisher_calc(\n qb,\n cbl,\n obs,\n cls_noise=nell,\n cond_noise=None,\n delta_beta_prior=delta_beta_prior,\n null_first_cmb=null_first_cmb,\n likelihood=True,\n )\n\n dqb = pt.arr_to_dict(np.sqrt(np.abs(np.diag(inv_fish))), qb)\n qb_like = OrderedDict()\n\n for stag, qbs in qb.items():\n qb_like[stag] = np.zeros(\n (len(qbs), 2, like_profile_points), dtype=float\n )\n\n for ibin, q in enumerate(qbs):\n qb1 = copy.deepcopy(qb)\n dq = dqb[stag][ibin] * like_profile_sigma\n q_arr = np.linspace(q - dq, q + dq, like_profile_points)\n like_arr = np.zeros_like(q_arr)\n\n for iq, q1 in enumerate(q_arr):\n qb1[stag][ibin] = q1\n try:\n like = self.fisher_calc(\n qb1,\n cbl,\n obs,\n cls_noise=nell,\n cond_noise=None,\n delta_beta_prior=delta_beta_prior,\n null_first_cmb=null_first_cmb,\n likelihood=True,\n )\n except np.linalg.LinAlgError:\n like = np.nan\n\n like_arr[iq] = like\n\n self.log(\n \"{} bin {} delta qb {} delta like: {}\".format(\n stag, ibin, q1 - q, like - max_like\n ),\n \"debug\",\n )\n\n qb_like[stag][ibin] = np.vstack([q_arr, like_arr])\n\n out.update(max_like=max_like, qb_like=qb_like)\n\n if not success:\n save_name = \"ERROR_{}\".format(save_name)\n self.log(msg, \"error\")\n self.warn(msg)\n\n return self.save_data(\n save_name, map_tag=map_tag, bp_opts=True, extra_tag=file_tag, **out\n )",
"def cfLoadChunk(self, key, iter, data):\n params = [key, iter, data]\n \n return self.execute_command(self.CF_LOADCHUNK, *params)",
"def store(self,key,start,end,data):\n\n pass",
"def knapsack_iterate_back(save):\n pass",
"def __iter__(self):\n cursor=0\n while cursor<len(self):\n yield self._item[cursor].key\n cursor+=1",
"def train_bloom_filter(self, train_data):\n for val in train_data:\n if self.debug:\n print('val: ', val)\n for i in range(0, self.hash_size):\n k = self.hashes[i](val[0])\n if self.debug:\n print('k: ', k)\n self.bitarray[k] = 1\n if self.debug:\n print('___end training____')",
"def train_callback(self, model, iteration):\n if (self.rewind_it == iteration and self.rewind_state_dict is None):\n # Save the current model weights\n self.rewind_state_dict = copy.deepcopy(model.state_dict())",
"def put(self, key, processed_query):\n data = json.dumps(processed_query.to_cache())\n\n def commit_to_db(connection):\n cursor = connection.cursor()\n cursor.execute(\"\"\"\n INSERT OR IGNORE into queries values (?, ?, ?, ?, ?);\n \"\"\", (key,\n data,\n processed_query.query.text,\n processed_query.domain,\n processed_query.intent,\n ))\n connection.commit()\n\n if self.memory_connection:\n commit_to_db(self.memory_connection)\n rowid = self.key_to_row_id(key)\n self.batch_writes.append(str(rowid))\n if len(self.batch_writes) == self.batch_write_size:\n self.flush_to_disk()\n else:\n commit_to_db(self.disk_connection)\n\n return self.key_to_row_id(key)",
"def save (self):\n if self.newobj:\n using_sequence = self.sequence ()\n self.keyvals['id'] = using_sequence\n self.seq = using_sequence\n else:\n using_sequence = self.seq\n for key, val in self.keyvals.items ():\n r_key = self.prepare_key (key, using_sequence)\n r.set (r_key, val)\n self.keyvals = {}\n self.newobj = False",
"def _set(self, cmd, key, val, expiry_time, min_compress_len = 0):\n\t\tcheck_key(key)\n\t\tserver, key = yield self._get_server_for(key)\n\t\tif not server:\n\t\t\traise StopIteration(False)\n\n\t\tstored_info = self._value_to_stored(val, min_compress_len)\n\t\tif stored_info is None:\n\t\t\t# If it's not storable due to length, just return.\n\t\t\traise StopIteration(True)\n\t\tflags, stored = stored_info\n\t\t\n\n\t\tfull_cmd = \"%s %s %d %d %d\\r\\n%s\\r\\n\" % (cmd, key, flags, expiry_time, len(stored), stored)\n\n\t\ttry:\n\t\t\tyield server.sendall(full_cmd)\n\t\t\tres = yield server.read_line()\n\t\t\traise StopIteration(res == \"STORED\")\n\n\t\texcept tcp.ConnectionClosedException:\n\t\t\tserver.mark_dead()\n\n\t\traise StopIteration(False)",
"def inc(self, key):\n if key in self.cache:\n curr_freq = self.cache[key]\n self.freq[curr_freq].remove(key)\n\n if len(self.freq[curr_freq]) == 0:\n del self.freq[curr_freq]\n\n curr_freq += 1\n self.freq[curr_freq].add(key)\n self.cache[key] = curr_freq\n\n else:\n self.cache[key] = 1\n self.freq[1].add(key)",
"def __iter__(self):\n while True:\n if self.stop:\n return\n for item in self.get_next_batch():\n yield item"
] | [
"0.5560349",
"0.5399405",
"0.5302348",
"0.5262011",
"0.5024422",
"0.50054467",
"0.49560073",
"0.49460107",
"0.47173524",
"0.4702185",
"0.4692565",
"0.46744853",
"0.46606264",
"0.4658389",
"0.46374193",
"0.45671514",
"0.45634228",
"0.45548865",
"0.45259008",
"0.45051858",
"0.447539",
"0.4470084",
"0.44618055",
"0.44412106",
"0.44157273",
"0.44156754",
"0.4412035",
"0.43980506",
"0.43676764",
"0.43339434"
] | 0.6080377 | 0 |
Creates a new Cuckoo Filter ``key`` an initial ``capacity`` items. | def cfCreate(self, key, capacity, expansion=None, bucket_size=None, max_iterations=None):
params = [key, capacity]
self.appendExpansion(params, expansion)
self.appendBucketSize(params, bucket_size)
self.appendMaxIterations(params, max_iterations)
return self.execute_command(self.CF_RESERVE, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(self, capacity=100):\n \n self.capacity = capacity\n self.size = 0\n self._keys = []\n self._entry = [[] for _ in range(capacity)]",
"def __init__(self, capacity):\n self.capacity = capacity # Number of buckets in the hash table\n self.storage = [None] * capacity\n self.key_count = 0",
"def bfCreate(self, key, errorRate, capacity, expansion=None, noScale=None):\n params = [key, errorRate, capacity]\n self.appendExpansion(params, expansion)\n self.appendNoScale(params, noScale)\n\n return self.execute_command(self.BF_RESERVE, *params)",
"def __init__(self, capacity, fillValue=None):\r\n self._items = list()\r\n for count in range(capacity):\r\n self._items.append(fillValue)",
"def __init__(self, capacity, fillValue = None):\n \n self._items = list() \n self._fillValue = fillValue\n self._DEFAULT_CAPACITY = capacity\n self._logicalSize = 0 #as required by exercise 1\n \n \n for count in xrange(capacity):\n self._items.append(self._fillValue)",
"def __init__(self, capacity, initial):\n\t\tself.capacity = capacity\n\t\tself.amount = initial",
"def __init__(self, capacity, fillValue=None):\r\n self._items = list()\r\n self._logicalSize = 0\r\n # Track the capacity and fill value for adjustments later\r\n self._capacity = capacity\r\n self._fillValue = fillValue\r\n for count in range(capacity):\r\n self._items.append(fillValue)",
"def __init__(self, key):\n self.key = key\n self.BLOCK_SIZE = 16",
"def __init__(self, key):\n\n def keys(key, num_rounds):\n \"\"\"Yields the permuted key bitstring for i = 1..num_rounds\"\"\"\n C, D = key[:28], key[28:]\n # Rounds are 1-indexed, so shift array over by one\n left_shifts = [None, 1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1]\n for i in range(1, num_rounds + 1):\n # Negate each rotation to rotate left.\n C, D = rotate(C, -left_shifts[i]), rotate(D, -left_shifts[i])\n yield self.permute(C + D, self._CD_permutation)\n\n self.key = list(bits_of(key, 64))\n # Permute the key. The permutation discards the parity bits...\n self.key = self.permute(self.key, self._key_permutation)\n self.number_of_rounds = 16\n # A list of the 16 keys K1 .. K16, shifted over by one to allow 1-indexing.\n self.keys = [None] + list(keys(self.key, self.number_of_rounds))",
"def __init__(self, capacity, units=0):\n self.capacity = capacity\n self.units = units",
"def __init__(self, key=None):\n self.key = key",
"def __init__(self, k: int):\r\n self.capacity = k\r\n self.frontIndex = 0\r\n self.lastIndex = 1\r\n self.deque = [0] * self.capacity\r\n self.size = 0 # current size\r",
"def __init__(self, key):\n self.key = key",
"def new_key(self, key_name=None):\r\n return self.key_class(self, key_name)",
"def __init__(self, key: bytearray):\n self.__key = key\n self.__KSA(bytearray([i for i in range(256)]))",
"def __init__(self, key, default=NOT_GIVEN):\n self.key = adapt(key,IComponentKey)\n self.default = default",
"def __init__(self, knapsack_size, items):\n self.knapsack_size = knapsack_size\n self.items = items\n self._cache = dict()\n # fill-in the cache with base cases' (subproblems') solutions\n for size in range(knapsack_size + 1):\n # if there are no items, the max value is 0\n self._cache[(0, size)] = 0\n for end in range(len(items) + 1):\n # if the knapsack's size is 0 no items fit, the max value is 0\n self._cache[(end, 0)] = 0",
"def __init__(self, capacity, operation, neutral_element):\n assert capacity > 0 and capacity & (capacity - 1) == 0, \"capacity must be positive and a power of 2.\"\n self._capacity = capacity\n self._value = [neutral_element for _ in range(2 * capacity)]\n self._operation = operation",
"def __init__(self, capacity, operation, neutral_element):\n assert capacity > 0 and capacity & (capacity - 1) == 0, \"capacity must be positive and a power of 2.\"\n self._capacity = capacity\n self._value = [neutral_element for _ in range(2 * capacity)]\n self._operation = operation",
"def __init__(self, capacity, operation, neutral_element):\n assert capacity > 0 and capacity & (capacity - 1) == 0, \"capacity must be positive and a power of 2.\"\n self._capacity = capacity\n self._value = [neutral_element for _ in range(2 * capacity)]\n self._operation = operation",
"def __init__(self, k: int):\n self.capacity = k\n self.head = None\n self.tail = None\n self.count = 0",
"def __init__(self, capacity, operation, neutral_element):\n assert (\n capacity > 0 and capacity & (capacity - 1) == 0\n ), \"capacity must be positive and a power of 2.\"\n self._capacity = capacity\n self._value = [neutral_element for _ in range(2 * capacity)]\n self._operation = operation\n self.neutral_element = neutral_element",
"def __init__(self, server, bfkeypreffix, capacity, error_rate=0.001):\n if not (0 < error_rate < 1):\n raise ValueError(\"Error_Rate must be between 0 and 1.\")\n if not capacity > 0:\n raise ValueError(\"Capacity must be > 0\")\n # given M = num_bits, k = num_slices, P = error_rate, n = capacity\n # k = log2(1/P)\n # solving for m = bits_per_slice\n # n ~= M * ((ln(2) ** 2) / abs(ln(P)))\n # n ~= (k * m) * ((ln(2) ** 2) / abs(ln(P)))\n # m ~= n * abs(ln(P)) / (k * (ln(2) ** 2))\n num_slices = int(math.ceil(math.log(1.0 / error_rate, 2)))\n bits_per_slice = int(math.ceil(\n (capacity * abs(math.log(error_rate))) /\n (num_slices * (math.log(2) ** 2))))\n if bits_per_slice > MAX_PER_SLICE_SIZE:\n raise ValueError(\"Capacity and error_rate make per slice size extended, MAX_PER_SLICE_SIZE is %s\" % (MAX_PER_SLICE_SIZE))\n self._setup(error_rate, num_slices, bits_per_slice, capacity, 0, server, bfkeypreffix)",
"def __init__(self, capacity=4):\n self.capacity = capacity\n self.size = 0\n self.table = [None] * capacity",
"def knapsack(items, capacity):\r\n pass",
"def __init__(self, capacity=2):\r\n self._capacity = capacity\r\n self._data = [0] * self._capacity\r\n self._size = 0",
"def __init__(__self__, *,\n capacity: Optional[int] = None,\n name: Optional[str] = None):\n if capacity is not None:\n pulumi.set(__self__, \"capacity\", capacity)\n if name is not None:\n pulumi.set(__self__, \"name\", name)",
"def __init__(self, key):\n self.bs = 16\n self.key = hashlib.sha256(key.encode()).digest()",
"def __init__(self, k, num_buckets, fp_size, bucket_size, max_iter):\n self.children: List[Node] = []\n self.parent: Optional[Node] = None\n self.filter = CuckooFilterBit(num_buckets, fp_size, bucket_size, max_iter)\n\n self.dataset_id: Optional[str] = None\n self.k = k",
"def __init__(self, key):\n self.key = [int_mapping(k) for k in key]"
] | [
"0.63611585",
"0.6257026",
"0.6124389",
"0.6099812",
"0.5895444",
"0.5895253",
"0.58910316",
"0.58668506",
"0.58037305",
"0.5686704",
"0.5662281",
"0.5616573",
"0.5591175",
"0.5572409",
"0.5540789",
"0.5507857",
"0.54945195",
"0.5490829",
"0.5490829",
"0.5490829",
"0.5427748",
"0.54269576",
"0.5423259",
"0.54201984",
"0.54197884",
"0.5410173",
"0.54084855",
"0.5400501",
"0.5374076",
"0.5364273"
] | 0.675632 | 0 |
Adds an ``item`` to a Cuckoo Filter ``key``. | def cfAdd(self, key, item):
params = [key, item]
return self.execute_command(self.CF_ADD, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def bfAdd(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.BF_ADD, *params)",
"def add_item(self, key, item):\n self[key].add(item)\n try:\n self._reverse_store[item].add(key)\n except KeyError:\n self._reverse_store[item] = set([key])",
"def add(self, item):\n self._dict[item] = item",
"def append(self, item):\n # FIXME: this is only append if the key isn't already present\n key, value = item\n self._main[key] = value",
"def add(self, key, value):",
"def filter_matches_add(self, key, value):\n\t\tif key in self.filter_matches.keys():\n\t\t\tself.filter_matches[key].append(value)\n\t\telse:\n\t\t\tself.filter_matches[key]=[value]",
"def add(self, item):\n self.num_item += 1\n indexs = self.__get_indexs(item)\n for index in indexs:\n self.filter_bitarray[index] = True",
"def _add_item_by_item(self, item):\n self.item_list[item.call_number] = item",
"def cfAddNX(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.CF_ADDNX, *params)",
"def add(self, item):",
"def add_item (self, item):\n new_item = CacheItem (item)\n cached = self.cache.get(hash(item))\n if cached is None:\n self.evict_or_add (new_item)\n cached.hits += 1",
"def put(self, key, item):\n if key and item:\n self.cache_data[key] = item",
"def put(self, key, item):\n if key and item:\n self.cache_data[key] = item",
"def add_filter(self, filter):\n self._filters.append(filter.as_dict())",
"def add_item(self, item):\n self.items.append(item)",
"def add(self, key, value):\n self.data.append((key, value))",
"def add(self, key):\n if key in self:\n return True\n if not self.filters:\n filter = RedisLocalBloomFilter(\n server=self.server, \n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=self.initial_capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n else:\n filter = self.filters[-1]\n if filter.count >= filter.capacity:\n capacity = filter.capacity * self.scale\n if capacity > MAX_PER_SLICE_SIZE:\n capacity = MAX_PER_SLICE_SIZE\n filter = RedisLocalBloomFilter(\n server=self.server,\n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n if self.max_filters > 0 and len(self.filters) >= self.max_filters:\n f = self.filters[0]\n f.clear()\n del self.filters[0]\n filter.add(key, skip_check=True)\n return False",
"def __setitem__(self, key, item):\n self.set_field(key, item)",
"def add_item(dct,item):\r\n if item not in dct[0]:\r\n print \"itemNotFound \",str(item)\r\n return False\r\n\r\n num=len(item)\r\n if num in dct:\r\n if item in dct[num]:\r\n return False\r\n else:\r\n dct[num].append(item)\r\n return True\r\n else:\r\n dct[num]=[item]\r\n return True",
"def add_to_inventory(self, item, quantity):\n\t\tincreaseQuantity = None\n\t\taddToDict = True\n\t\tfor key in self.inventoryDictionary:\n\t\t\tif key.name == item.name:\n\t\t\t\taddToDict = False\n\t\t\t\tincreaseQuantity = key\n\t\t\t\tbreak\n\t\t\t\t\n\n\t\t\telse:\n\t\t\t\taddToDict = True\n\t\t\t\t\n\n\t\tif addToDict:\n\t\t\tself.inventoryDictionary[item] = quantity\n\t\telse:\n\t\t\tself.inventoryDictionary[increaseQuantity] += quantity",
"def add(self, key, value):\n self._data.add_last(self._Item(key, value))",
"def add_item(self, item):\n self.items_with_price.update(item)",
"def put(self, key, item):\n raise NotImplementedError(\"put must be implemented in your cache class\")",
"def add(self, item: Mapping[Hashable, Any], **kwargs: Any) -> None:\n self.contents.update(item, **kwargs)\n return",
"def add(self, key, val):\n self.obtain(key).append(val)",
"def add(self, item, issue):\n if self.has_key(item):\n self[item].append(issue)\n else:\n self[item] = [issue]\n return 1",
"def _single_setitem(self, key, item):\n self._dict[key] = item",
"def add_item(self, item_to_append):\n self.items.append(item_to_append)",
"def addItem(*args):",
"def addItem(*args):"
] | [
"0.7453558",
"0.6998339",
"0.6921159",
"0.69202787",
"0.6884508",
"0.6801688",
"0.6676187",
"0.655837",
"0.65446305",
"0.65341115",
"0.64497375",
"0.63943213",
"0.63943213",
"0.63741636",
"0.63548404",
"0.63431346",
"0.6286365",
"0.6278622",
"0.62722087",
"0.6255055",
"0.62299794",
"0.62193507",
"0.61958456",
"0.6178707",
"0.61615276",
"0.61432326",
"0.61413556",
"0.61286783",
"0.6123936",
"0.6123936"
] | 0.7630527 | 0 |
Adds multiple ``items`` to a Cuckoo Filter ``key``, allowing the filter to be created with a custom ``capacity` if it does not yet exist. ``items`` must be provided as a list. | def cfInsert(self, key, items, capacity=None, nocreate=None):
params = [key]
self.appendCapacity(params, capacity)
self.appendNoCreate(params, nocreate)
self.appendItems(params, items)
return self.execute_command(self.CF_INSERT, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cfAdd(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.CF_ADD, *params)",
"def bfAdd(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.BF_ADD, *params)",
"def add_items(self, items):\n for item in items:\n self.add(item)",
"def addItems(c, items):\n\t\tcontainer.containersToSave[c['id_item_container']] = item.inventory.addItems(\n\t\t\titem.inventory.fromStr(c['items']),\n\t\t\titems\n\t\t)",
"def bfInsert(self, key, items, capacity=None, error=None, noCreate=None, expansion=None, noScale=None):\n params = [key]\n self.appendCapacity(params, capacity)\n self.appendError(params, error)\n self.appendExpansion(params, expansion)\n self.appendNoCreate(params, noCreate)\n self.appendNoScale(params, noScale)\n self.appendItems(params, items)\n\n return self.execute_command(self.BF_INSERT, *params)",
"def addItems(*args):",
"def add(self, *items):\n for item in items:\n self.unsorted.append(item)\n key = item[0]\n self.index[key] = item\n return self",
"def append(self, items):\n self.__add__(items)",
"def topkAdd(self, key, *items):\n params = [key]\n params += items\n \n return self.execute_command(self.TOPK_ADD, *params)",
"def add_items(self, items):\n for item in items:\n self.addItem(item)\n # end for item in items",
"def append(self, *items: BOSminer) -> None:\n for item in items:\n self.miners[item.ip] = item",
"def add(self, *items):",
"def update(self, *items):\n for item in items:\n self.add(item)",
"def add_items(self, items: typing.Iterable[str]) -> None:\n for item in items:\n self.add_item(item)",
"def add(self, item):\n self.num_item += 1\n indexs = self.__get_indexs(item)\n for index in indexs:\n self.filter_bitarray[index] = True",
"def extend(self, items):\n\t\tfor item in items:\n\t\t\tself.append(item)",
"def add(self, items):\n if isinstance(items, list):\n self.items.extend(items)\n else:\n self.items.append(items)",
"def update(self, items: Mapping[Any, Any]) -> None:\n self.extend(list(items.values()))\n return",
"def addToWatchlist(self, items):\n if not isinstance(items, list):\n items = [items]\n\n for item in items:\n if self.onWatchlist(item):\n raise BadRequest(f'\"{item.title}\" is already on the watchlist')\n ratingKey = item.guid.rsplit('/', 1)[-1]\n self.query(f'{self.METADATA}/actions/addToWatchlist?ratingKey={ratingKey}', method=self._session.put)\n return self",
"def add_items(self, items: Iterable[_T]) -> None:\n for item in items:\n self.add_item(item)",
"def knapsack(items, capacity):\r\n pass",
"def add_toolbar_items(self, *toolbar_items):\n self.items += [self._map_item(item) for item in toolbar_items]",
"def add(self, key):\n if key in self:\n return True\n if not self.filters:\n filter = RedisLocalBloomFilter(\n server=self.server, \n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=self.initial_capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n else:\n filter = self.filters[-1]\n if filter.count >= filter.capacity:\n capacity = filter.capacity * self.scale\n if capacity > MAX_PER_SLICE_SIZE:\n capacity = MAX_PER_SLICE_SIZE\n filter = RedisLocalBloomFilter(\n server=self.server,\n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n if self.max_filters > 0 and len(self.filters) >= self.max_filters:\n f = self.filters[0]\n f.clear()\n del self.filters[0]\n filter.add(key, skip_check=True)\n return False",
"def cfInsertNX(self, key, items, capacity=None, nocreate=None):\n params = [key]\n self.appendCapacity(params, capacity)\n self.appendNoCreate(params, nocreate)\n self.appendItems(params, items)\n\n return self.execute_command(self.CF_INSERTNX, *params)",
"def add_items(items, cities, img_path, mask_path, mask_postfix):\n\n for c in cities:\n c_items = [name.split('_leftImg8bit.png')[0] for name in\n os.listdir(os.path.join(img_path, c))]\n for it in c_items:\n item = (os.path.join(img_path, c, it + '_leftImg8bit.png'),\n os.path.join(mask_path, c, it + mask_postfix))\n items.append(item)",
"def _additems(self, w,h):\n for idx in range(len(self.data['items'])):\n default={\n 'color': self.data['itemscolor'],\n 'textscale': self.data['itemsscale'],\n 'textfont': self.data['textfont'],\n 'width': w-(self.data['margin'][0]*2.),\n }\n self.data['items'][idx].update(default)\n self.addItem(idx, **self.data['items'][idx])",
"def filter_matches_add(self, key, value):\n\t\tif key in self.filter_matches.keys():\n\t\t\tself.filter_matches[key].append(value)\n\t\telse:\n\t\t\tself.filter_matches[key]=[value]",
"def apply_filters(filters, items):\n return scom.apply_filters(filters, items)",
"def cache_db_items(self, key, items, item_key='id'):\n db_items = self._extension_data['db_items'].setdefault(key, {})\n for item in items:\n db_items[item[item_key]] = item",
"def add_items(self, items):\n\n self.model.add_items(items)\n self.refreshed.emit()"
] | [
"0.60938436",
"0.6008798",
"0.5990414",
"0.58841145",
"0.58353513",
"0.5737808",
"0.5727663",
"0.57137096",
"0.565862",
"0.56526506",
"0.5644674",
"0.56005716",
"0.5593892",
"0.5568903",
"0.55669373",
"0.55430824",
"0.55396664",
"0.55076975",
"0.5490069",
"0.54595166",
"0.5393595",
"0.53759456",
"0.53673685",
"0.5350819",
"0.5266138",
"0.5203471",
"0.5194724",
"0.51828015",
"0.51543474",
"0.5128592"
] | 0.6239115 | 0 |
Checks whether an ``item`` exists in Cuckoo Filter ``key``. | def cfExists(self, key, item):
params = [key, item]
return self.execute_command(self.CF_EXISTS, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __contains__(self, item, key):\n ndx = self._findPostion(key)\n return ndx is not None",
"def bfExists(self, key, item):\n params = [key, item]\n \n return self.execute_command(self.BF_EXISTS, *params)",
"def item_has_key(self, item, key):\n if key in self._reverse_store[item]:\n return True\n else:\n return False",
"def contains(self, item):\n return self._dict.has_key(item)\n\n self.__contains__ = contains",
"def __contains__(self, item):\n try:\n self[item]\n return True\n except KeyError:\n return False",
"def has_item(self, item):\n return item in self.cache",
"def has_item(self, item):\n return item in self.set",
"def has_item(self, item):\n return item in self.set",
"def item_exists(item_id):\n return item_id in all_items",
"def has_item(self, usage_key):\r\n try:\r\n self._find_one(usage_key)\r\n return True\r\n except ItemNotFoundError:\r\n return False",
"def __contains__(self, item):\n\n if self[item]:\n return True\n return False",
"def exista(self, item):\n if item not in self._items:\n return False\n for x in self._items:\n if x == item:\n return True",
"def __contains__(self, item):\n try:\n hdu = self[item] # noqa\n return True\n except Exception:\n return False",
"def __contains__(self, item: object) -> bool:\n val = conv_kv(item) # type: ignore\n for fixup in self._mapping._fixup.values():\n if fixup.value == val:\n return True\n return False",
"def contains(self, item):\n if isinstance(item, dict):\n return _(item).all(lambda key: self._.get(key) == item[key])\n return item in self",
"def __contains__(self, key):\n return self.keys[self._linear_probe(key, \"contains\")] is not None",
"def __contains__(self, item):\n return item in self._data",
"def has(self, key):",
"def has(cls, item):\n return item in cls.values()",
"def __contains__(self, key):\n found = True\n try:\n self.__getitem__(key)\n except:\n found = False\n return found",
"def __contains__(self, item):\n\t\treturn item in self.__dict__.values()",
"def array_key_exists(name, item):\n return item.has_key(name);",
"def has_key(self, key):\n return self.contains(key)",
"def has_item(self, usage_key):\r\n return usage_key in self.modules[usage_key.course_key]",
"def __contains__(self, key):\n return self._lookup(key).value is not None",
"def _has(self, key):\n path = self._get_key_path(key)\n return exists(path)",
"def contains(self, key: int) -> bool:\n lv1, lv2 = self.hashing(key)\n \n for item in self.cont[lv1][lv2]:\n if item==key:\n return True\n \n return False",
"def __contains__(self, item):\n return item in self.__keys or item in self.__vals",
"def has_key(self, key):\n return key in self",
"def __contains__(self, key):\n for f in reversed(self.filters):\n if key in f:\n return True\n return False"
] | [
"0.77499324",
"0.746354",
"0.74272346",
"0.73767465",
"0.7316772",
"0.7260363",
"0.72131723",
"0.72131723",
"0.72055244",
"0.7191276",
"0.70799226",
"0.7073482",
"0.70224476",
"0.701648",
"0.6999903",
"0.69994754",
"0.6997449",
"0.6991005",
"0.6959211",
"0.69314",
"0.69124043",
"0.690909",
"0.6902491",
"0.68843997",
"0.688126",
"0.6871489",
"0.6857937",
"0.6829655",
"0.68289304",
"0.6809889"
] | 0.76099366 | 1 |
Deletes ``item`` from ``key``. | def cfDel(self, key, item):
params = [key, item]
return self.execute_command(self.CF_DEL, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def remove_item(self, key, item):\n self[key].remove(item)\n self._remove_reverse_mapping(item, key)",
"def __delitem__(self, key):\n self.delete(key)",
"def __delitem__(self, key):\n self.delete(key)",
"def __delitem__(self, key):\n self.f_remove(key)",
"def delete_item(self, key):\n deleted_slot = self.count_hash(key, len(self.slots))\n\n if self.slots[deleted_slot] == key:\n self.slots[deleted_slot] = None\n self.data[deleted_slot] = None\n elif isinstance(self.slots[deleted_slot], tuple):\n index_tuple = (self.slots[deleted_slot].index(key))\n list_slot = list(self.slots[deleted_slot])\n list_data = list(self.data[deleted_slot])\n list_slot.pop(index_tuple)\n list_data.pop(index_tuple)\n self.slots[deleted_slot] = tuple(list_slot)\n self.data[deleted_slot] = tuple(list_data)",
"def __delitem__(self, key: T) -> None:\n self.delete(key)",
"def __delitem__(self, key):\n with self.__lock:\n log.debug(\"__delitem__: {}\".format(key))\n del self._d[key]",
"def __delitem__(self, key):\n del self.list[key]",
"def __delitem__(self, key: tuple):\n s, a = key\n del self.store[s][a]",
"def __delitem__(self, key):\r\n self.client.delete(id=key, ignore=[404], **self.kwargs)",
"def __delitem__(self, key, *args, **kwargs):\n self._del(key, *args, **kwargs)",
"def remove(self, item):\n del self._dict[item]",
"def __delitem__(self, key):\n try:\n kvp = self.keyvaluepair_set.get(key=key)\n except KeyValuePair.DoesNotExist:\n raise KeyError\n else:\n kvp.delete()",
"def __delitem__(self, key):\r\n key = self.key(key)\r\n if key in self.data_with_same_key:\r\n if len(self.data_with_same_key[key]) == 1:\r\n self.data[key] = self.data_with_same_key.pop(key)[0]\r\n else:\r\n self.data[key] = self.data_with_same_key[key].pop(-1)\r\n else:\r\n del self.data[key]",
"def __delitem__(self, key):\n del self._data[key]",
"def del_item(self, item):\n index = self.board[item.pos[0]][item.pos[1]].index(item)\n del self.board[item.pos[0]][item.pos[1]][index]",
"def __delitem__(self, key):\n self.deleteAttributes([key])",
"def remove_item(self, item):\r\n\r\n for key in self._inner_dict:\r\n if item in self._inner_dict[key]:\r\n idx = self._inner_dict[key].index(item)\r\n del self._inner_dict[key][idx]",
"def __delitem__(self, key):\n pass",
"def __delitem__(self, key):\n pass",
"def __delitem__(self, key):\n del self._get_storage()[key]",
"def __delitem__(self, key):\n\t\tdel self.__dStore[key]",
"def __delitem__(self, key):\n if self._size > 1:\n node_to_delete = self._getItemHelper(key, self._root)\n if node_to_delete:\n self._delItemHelper(node_to_delete)\n self._size -= 1\n else:\n raise KeyError('Key is not in the tree.')\n elif self._size == 1 and self._root.key == key:\n self._root = None\n self._size -= 1\n else:\n raise KeyError('Key is not in the tree.')",
"def __delitem__(self, key):\n del self.elements[key]",
"def __delitem__(self, key):\n i, kv_pair = self._lookup(key, self._backing)\n if kv_pair and not kv_pair.value is Hashmap.absent:\n self._backing[i] = KeyValue(key, Hashmap.absent)\n self._deleted += 1\n\n size = len(self._backing)\n utilization = (self._used - self._deleted)/size \n if utilization < 0.16:\n self._resize(self._decr_size(size))\n else:\n raise KeyError('no such item!')",
"def delete(self, item):\r\n self.fetch()\r\n t = self.make_item_tuple(item)\r\n changed = False\r\n while t in self.data:\r\n self.data.remove(t)\r\n changed = True\r\n \r\n if changed:\r\n query_cache.set(self.iden, self.data)",
"def _delete(self, item):\n self.cv.delete(item)",
"def __delitem__(self, key: Hashable) -> None:\n del self.contents[key]\n return",
"def _map___delitem__(self, key):\n if not isinstance(key, self.keytype):\n raise KeyError('type of `key` should be ' + repr(self.keytype) + ' but got ' + repr(type(key)))\n if key not in self:\n raise KeyError('key not found')\n self.erase(self.find(key))\n return",
"def __delitem__(self, key):\n\n bucket_key = self.key_for_bucket(key)\n del self.buckets[bucket_key][key]\n\n if not self.buckets[bucket_key]:\n del self.buckets[bucket_key]"
] | [
"0.78592277",
"0.7817014",
"0.7817014",
"0.7764789",
"0.77646744",
"0.7654799",
"0.76423573",
"0.75042117",
"0.7504201",
"0.74854916",
"0.7483856",
"0.7433788",
"0.74314106",
"0.73982894",
"0.73960876",
"0.7380565",
"0.73796886",
"0.7366733",
"0.73262364",
"0.73262364",
"0.73161906",
"0.7287357",
"0.72806454",
"0.720359",
"0.719625",
"0.71946526",
"0.71944094",
"0.71775186",
"0.71710634",
"0.7161939"
] | 0.7879571 | 0 |
Begins an incremental save of the Cuckoo filter ``key``. This is useful for large Cuckoo filters which cannot fit into the normal SAVE and RESTORE model. The first time this command is called, the value of ``iter`` should be 0. This command will return successive (iter, data) pairs until (0, NULL) to indicate completion. | def cfScandump(self, key, iter):
params = [key, iter]
return self.execute_command(self.CF_SCANDUMP, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def bfScandump(self, key, iter):\n params = [key, iter]\n \n return self.execute_command(self.BF_SCANDUMP, *params)",
"def save(self) -> dict:\n for pair in self._buffer:\n yield pair.save()",
"def __iter__(self):\r\n for item in self._data:\r\n yield item # yield the KEY\r",
"def save(self) -> None:\n self._bin_iter.save()",
"def _iter(self, key, count, increment=1):\n key %= self.size\n while count > 0:\n try:\n yield self.db[key]\n except KeyError:\n # This shouldn't happen, but there's really nothing we can do if it does.\n # Skip over the damaged part of our database, ignoring the missing item.\n pass\n key = (key + increment) % self.size\n count -= 1",
"def __iter__(self):\n if not self.loading:\n self.reset_loading()\n self.current_batch_index = 0\n return self",
"def __iter__(self):\n try:\n i = self.db[self._headKey]\n while True:\n yield i\n i = self.db[self._getNextKey(i)]\n except KeyError:\n pass",
"def knapsack_iterate_back(save):\n pass",
"def add(self, key):\n\t\t#super(CountingBloomFilter, self).add(key)\n\t\t#super(CountingBloomFilter, self).generateStats()\n\t\tfor i in self.getBitArrayIndices(key):\n\t\t\tself.ba[i] += 1\n\t\tself.n += 1",
"def train_callback(self, model, iteration):\n if (self.rewind_it == iteration and self.rewind_state_dict is None):\n # Save the current model weights\n self.rewind_state_dict = copy.deepcopy(model.state_dict())",
"def bfLoadChunk(self, key, iter, data):\n params = [key, iter, data]\n \n return self.execute_command(self.BF_LOADCHUNK, *params)",
"def _save(self, itr):\n # using keep_checkpoint_every_n_hours as proxy for iterations between saves\n if self.saver and (itr + 1) % self.saver._keep_checkpoint_every_n_hours == 0:\n\n # collect params (or stuff to keep in general)\n params = dict()\n params['critic'] = self.critic.network.get_param_values()\n\n # if the environment is wrapped in a normalizing env, save those stats\n normalized_env = hgail.misc.utils.extract_normalizing_env(self.env)\n if normalized_env is not None:\n params['normalzing'] = dict(\n obs_mean=normalized_env._obs_mean,\n obs_var=normalized_env._obs_var\n )\n\n # save hierarchy\n for i, level in enumerate(self.hierarchy):\n params[i] = dict()\n params[i]['policy'] = level.algo.policy.get_param_values()\n \n # save params \n save_dir = os.path.split(self.saver_filepath)[0]\n hgail.misc.utils.save_params(save_dir, params, itr+1, max_to_keep=50)",
"def cfLoadChunk(self, key, iter, data):\n params = [key, iter, data]\n \n return self.execute_command(self.CF_LOADCHUNK, *params)",
"def __iter__(self):\n while True:\n if self.stop:\n return\n for item in self.get_next_batch():\n yield item",
"def keys_fetch(self):\n with self.env.begin(write=False) as txn:\n cursor = txn.cursor()\n tot = txn.stat()['entries']\n i = 0\n\n path = self.db_path\n base_name = self.base_path\n cache_file_path = os.path.join(path, '_cache_' + base_name + '.pkl')\n print('cache_file_path = ', cache_file_path) # DEBUG\n\n if os.path.isfile(cache_file_path):\n self.keys = pickle.load(open(cache_file_path, 'rb'))\n self._num_examples = tot\n else:\n keys = []\n for key, _ in cursor:\n i += 1\n if i % 1000 == 0 or i == tot:\n print('Fetching {:>8d} /{:>8d} keys'.format(i, tot),\n end='\\r')\n keys.append(key)\n print('\\nDone.')\n self._num_examples = tot\n self.keys = np.asarray(keys)\n pickle.dump(self.keys, open(cache_file_path, 'wb'))",
"def fisher_iterate(\n self,\n cbl,\n map_tag=None,\n iter_max=200,\n converge_criteria=0.005,\n qb_start=None,\n transfer_run=False,\n save_iters=False,\n null_first_cmb=False,\n delta_beta_prior=None,\n cond_noise=None,\n cond_criteria=None,\n like_profiles=False,\n like_profile_sigma=3.0,\n like_profile_points=100,\n file_tag=None,\n ):\n\n save_name = \"transfer\" if transfer_run else \"bandpowers\"\n\n if transfer_run:\n null_first_cmb = False\n\n # previous fqb iterations to monitor convergence and adjust conditioning\n prev_fqb = []\n cond_adjusted = False\n\n if qb_start is None:\n qb = OrderedDict()\n for k, v in self.bin_def.items():\n if transfer_run:\n if \"cmb\" not in k or \"eb\" in k or \"tb\" in k:\n continue\n if k == \"delta_beta\":\n # qb_delta beta is a coefficient on the change from beta,\n # so expect that it should be small if beta_ref is close\n # (zeroes cause singular matrix problems)\n qb[k] = [self.delta_beta_fix]\n elif k.startswith(\"res_\") or k.startswith(\"fg_\"):\n # res qb=0 means noise model is 100% accurate.\n qb[k] = 1e-5 * np.ones(len(v))\n else:\n # start by assuming model is 100% accurate\n qb[k] = np.ones(len(v))\n else:\n qb = qb_start\n\n obs, nell, debias = self.get_data_spectra(\n map_tag=map_tag, transfer_run=transfer_run\n )\n\n bin_index = pt.dict_to_index(self.bin_def)\n\n success = False\n for iter_idx in range(iter_max):\n self.log(\n \"Doing Fisher step {}/{}...\".format(iter_idx + 1, iter_max), \"info\"\n )\n\n qb_new, inv_fish = self.fisher_calc(\n qb,\n cbl,\n obs,\n cls_noise=nell,\n cls_debias=debias,\n cond_noise=cond_noise,\n delta_beta_prior=delta_beta_prior,\n cond_criteria=cond_criteria,\n null_first_cmb=null_first_cmb,\n )\n\n qb_arr = pt.dict_to_arr(qb, flatten=True)\n qb_new_arr = pt.dict_to_arr(qb_new, flatten=True)\n dqb = qb_new_arr - qb_arr\n fqb = dqb / qb_arr\n max_fqb = np.nanmax(np.abs(fqb))\n\n prev_fqb.append(max_fqb)\n\n fnan = np.isnan(fqb)\n if fnan.any():\n (nanidx,) = np.where(fnan)\n self.log(\n \"Iter {}: Ignoring {} bins with fqb=nan: bins={}, qb_new={}, \"\n \"qb={}\".format(\n iter_idx,\n len(nanidx),\n nanidx,\n qb_new_arr[nanidx],\n qb_arr[nanidx],\n ),\n \"warning\",\n )\n\n self.log(\"Max fractional change in qb: {}\".format(max_fqb), \"info\")\n\n # put qb_new in original dict\n qb = copy.deepcopy(qb_new)\n cls_model = self.get_model_spectra(\n qb, cbl, delta=True, cls_noise=nell, cond_noise=None\n )\n\n if \"delta_beta\" in qb:\n # get beta fit and beta error\n beta_fit = qb[\"delta_beta\"][0] + self.beta_ref\n db_idx = slice(*bin_index[\"delta_beta\"])\n beta_err = np.sqrt(np.diag(inv_fish[db_idx, db_idx]))[0]\n else:\n beta_fit = None\n beta_err = None\n\n if save_iters:\n # save only the quantities that change with each iteration\n out = dict(\n map_tag=map_tag,\n map_tags=self.map_tags,\n iter_index=iter_idx,\n bin_def=self.bin_def,\n bin_weights=self.bin_weights,\n cls_shape=self.cls_shape,\n cls_obs=obs,\n qb=qb,\n fqb=fqb,\n inv_fish=inv_fish,\n cls_model=cls_model,\n cbl=cbl,\n map_freqs=self.map_freqs,\n cls_signal=self.cls_signal,\n cls_noise=self.cls_noise,\n Dmat_obs=self.Dmat_obs,\n gmat_ell=self.gmat_ell,\n extra_tag=file_tag,\n )\n\n if \"fg_tt\" in self.bin_def:\n out.update(\n beta_fit=beta_fit,\n beta_err=beta_err,\n ref_freq=self.ref_freq,\n beta_ref=self.beta_ref,\n )\n\n self.save_data(save_name, bp_opts=not transfer_run, **out)\n\n (nans,) = np.where(np.isnan(qb_new_arr))\n if len(nans):\n msg = \"Found NaN values in qb bins {} at iter {}\".format(nans, iter_idx)\n break\n\n if fnan.all():\n msg = (\n \"All bins have fqb=NaN at iter {}, \"\n \"something has gone horribly wrong.\".format(iter_idx)\n )\n break\n\n negs = np.where(np.diag(inv_fish) < 0)[0]\n if len(negs):\n self.log(\n \"Iter {}: Found negatives in inv_fish diagonal at locations \"\n \"{}\".format(iter_idx, negs),\n \"warning\",\n )\n\n if np.nanmax(np.abs(fqb)) < converge_criteria:\n if not transfer_run:\n # Calculate final fisher matrix without conditioning\n self.log(\"Calculating final Fisher matrix.\", \"info\")\n _, inv_fish = self.fisher_calc(\n qb,\n cbl,\n obs,\n cls_noise=nell,\n cls_debias=debias,\n cond_noise=None,\n delta_beta_prior=delta_beta_prior,\n null_first_cmb=null_first_cmb,\n )\n\n # If any diagonals of inv_fisher are negative, something went wrong\n negs = np.where(np.diag(inv_fish) < 0)[0]\n if len(negs):\n self.log(\n \"Found negatives in inv_fish diagonal at locations \"\n \"{}\".format(negs),\n \"warning\",\n )\n\n success = True\n break\n\n else:\n msg = \"{} {} did not converge in {} iterations\".format(\n \"Multi-map\" if map_tag is None else \"Map {}\".format(map_tag),\n \"transfer function\" if transfer_run else \"spectrum\",\n iter_max,\n )\n # Check the slope of the last ten fqb_maxpoints.\n # If there's not a downward trend, adjust conditioning\n # criteria to help convergence.\n if len(prev_fqb) <= 10 or transfer_run:\n continue\n m, b = np.polyfit(np.arange(10), prev_fqb[-10:], 1)\n if m > 0: # Not converging\n # First, start from very little conditioning\n if not cond_adjusted:\n cond_criteria = 5e3\n cond_adjusted = True\n self.log(\n \"Iter {}: Not converging. Setting cond_criteria={}\".format(\n iter_idx, cond_criteria\n ),\n \"warning\",\n )\n\n elif cond_criteria > 100:\n cond_criteria /= 2.0\n self.log(\n \"Iter {}: Tightening condition criteria to help convergence. \"\n \"cond_criteria={}\".format(iter_idx, cond_criteria),\n \"warning\",\n )\n else:\n self.log(\n \"Iter {}: Can't reduce cond_criteria any more.\".format(\n iter_idx\n ),\n \"warning\",\n )\n # give it ten tries to start converging\n prev_fqb = []\n\n # save and return\n out = dict(\n qb=qb,\n inv_fish=inv_fish,\n fqb=fqb,\n bin_def=self.bin_def,\n bin_weights=self.bin_weights,\n iters=iter_idx,\n success=success,\n map_tags=self.map_tags,\n map_freqs=self.map_freqs,\n converge_criteria=converge_criteria,\n cond_noise=cond_noise,\n cond_criteria=cond_criteria,\n null_first_cmb=null_first_cmb,\n apply_gcorr=self.apply_gcorr,\n weighted_bins=self.weighted_bins,\n )\n\n if \"fg_tt\" in self.bin_def:\n out.update(\n delta_beta_prior=delta_beta_prior,\n beta_fit=beta_fit,\n beta_err=beta_err,\n ref_freq=self.ref_freq,\n beta_ref=self.beta_ref,\n )\n\n if self.debug:\n out.update(\n cbl=cbl,\n cls_obs=obs,\n cls_signal=self.cls_signal,\n cls_noise=self.cls_noise,\n cls_model=cls_model,\n cls_shape=self.cls_shape,\n cond_noise=cond_noise,\n Dmat_obs=self.Dmat_obs,\n )\n\n if not transfer_run:\n out.update(qb_transfer=self.qb_transfer)\n if self.template_cleaned:\n out.update(template_alpha=self.template_alpha)\n\n if success and not transfer_run:\n # do one more fisher calc that doesn't include sample variance\n # set qb=very close to 0. 0 causes singular matrix problems.\n # don't do this for noise residual bins\n self.log(\"Calculating final Fisher matrix without sample variance.\", \"info\")\n qb_zeroed = copy.deepcopy(qb)\n qb_new_ns = copy.deepcopy(qb)\n for comp in [\"cmb\", \"fg\"]:\n for spec in self.specs:\n stag = \"{}_{}\".format(comp, spec)\n if stag not in qb_zeroed:\n continue\n qb_zeroed[stag][:] = 1e-20\n qb_new_ns[stag][:] = 1.0\n if \"delta_beta\" in qb:\n qb_zeroed[\"delta_beta\"][:] = 1e-20\n qb_new_ns[\"delta_beta\"][:] = 0\n\n _, inv_fish_ns = self.fisher_calc(\n qb_zeroed,\n cbl,\n obs,\n cls_noise=nell,\n cls_debias=debias,\n cond_noise=None,\n delta_beta_prior=None,\n null_first_cmb=null_first_cmb,\n )\n\n out.update(\n invfish_nosampvar=inv_fish_ns,\n )\n\n # compute window functions for CMB bins\n self.log(\"Calculating window functions for CMB bins\", \"info\")\n wbl_qb = self.fisher_calc(\n qb,\n cbl,\n obs,\n cls_noise=nell,\n cls_debias=None,\n cond_noise=None,\n delta_beta_prior=delta_beta_prior,\n null_first_cmb=null_first_cmb,\n windows=True,\n inv_fish=inv_fish,\n )\n out.update(wbl_qb=wbl_qb)\n\n # compute bandpowers and covariances\n cb, dcb, ellb, cov, qb2cb, wbl_cb = self.do_qb2cb(qb, inv_fish, wbl_qb)\n _, dcb_ns, _, cov_ns, _, _ = self.do_qb2cb(qb, inv_fish_ns, wbl_qb)\n\n out.update(\n cb=cb,\n dcb=dcb,\n ellb=ellb,\n cov=cov,\n qb2cb=qb2cb,\n wbl_cb=wbl_cb,\n dcb_nosampvar=dcb_ns,\n cov_nosampvar=cov_ns,\n )\n\n if like_profiles:\n # compute bandpower likelihoods\n self.log(\"Calculating bandpower profile likelihoods\", \"info\")\n max_like = self.fisher_calc(\n qb,\n cbl,\n obs,\n cls_noise=nell,\n cond_noise=None,\n delta_beta_prior=delta_beta_prior,\n null_first_cmb=null_first_cmb,\n likelihood=True,\n )\n\n dqb = pt.arr_to_dict(np.sqrt(np.abs(np.diag(inv_fish))), qb)\n qb_like = OrderedDict()\n\n for stag, qbs in qb.items():\n qb_like[stag] = np.zeros(\n (len(qbs), 2, like_profile_points), dtype=float\n )\n\n for ibin, q in enumerate(qbs):\n qb1 = copy.deepcopy(qb)\n dq = dqb[stag][ibin] * like_profile_sigma\n q_arr = np.linspace(q - dq, q + dq, like_profile_points)\n like_arr = np.zeros_like(q_arr)\n\n for iq, q1 in enumerate(q_arr):\n qb1[stag][ibin] = q1\n try:\n like = self.fisher_calc(\n qb1,\n cbl,\n obs,\n cls_noise=nell,\n cond_noise=None,\n delta_beta_prior=delta_beta_prior,\n null_first_cmb=null_first_cmb,\n likelihood=True,\n )\n except np.linalg.LinAlgError:\n like = np.nan\n\n like_arr[iq] = like\n\n self.log(\n \"{} bin {} delta qb {} delta like: {}\".format(\n stag, ibin, q1 - q, like - max_like\n ),\n \"debug\",\n )\n\n qb_like[stag][ibin] = np.vstack([q_arr, like_arr])\n\n out.update(max_like=max_like, qb_like=qb_like)\n\n if not success:\n save_name = \"ERROR_{}\".format(save_name)\n self.log(msg, \"error\")\n self.warn(msg)\n\n return self.save_data(\n save_name, map_tag=map_tag, bp_opts=True, extra_tag=file_tag, **out\n )",
"def __iter__(self):\n cursor=0\n while cursor<len(self):\n yield self._item[cursor].key\n cursor+=1",
"def cb(xk):\n self.iteration += 1\n t_current = time.time()\n t_elapsed = t_current - self.t_store\n self.t_store = t_current\n \n self.of_list.append(self.of_last)\n self.params = xk\n self._disp(t_elapsed)\n\n # Call the custom callback function if any\n if callback is not None:\n callback(self)",
"def iterator(self):\n return self.KeyIterator()",
"def add(self, key):\n if key in self:\n return True\n if not self.filters:\n filter = RedisLocalBloomFilter(\n server=self.server, \n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=self.initial_capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n else:\n filter = self.filters[-1]\n if filter.count >= filter.capacity:\n capacity = filter.capacity * self.scale\n if capacity > MAX_PER_SLICE_SIZE:\n capacity = MAX_PER_SLICE_SIZE\n filter = RedisLocalBloomFilter(\n server=self.server,\n bfkeypreffix = self.FILTER_KEY_FMT % (self.bfkeypreffix, self.filter_count),\n capacity=capacity,\n error_rate=self.error_rate * (1.0 - self.ratio))\n self.filter_count += 1\n self.filters.append(filter)\n if self.max_filters > 0 and len(self.filters) >= self.max_filters:\n f = self.filters[0]\n f.clear()\n del self.filters[0]\n filter.add(key, skip_check=True)\n return False",
"def __iter__(self):\n # This could be as simple as \"return self._getKeyList().__iter__()\"\n # but this performs some extra consistency checking to make sure the\n # key we iterate to actually exists, to keep us from crashing if\n # our db is a little out of sync with itself.\n\n # This is a nasty hack because our db seems prone to circular links\n nItems = 0\n for item in self._getKeyList():\n if item in self:\n yield item\n nItems += 1\n # NASTY HACK!\n if nItems > 1000:\n self.reindex()\n raise Exception(\"Circular link corrected, try again\")\n else:\n self._delKey(item)",
"def next_window(self) -> Iterator[Optional[np.ndarray]]:\n while self._count >= self._window_width:\n # Preserve what we want to return by copying it.\n p1 = np.copy(self._data_store[:self._window_width, :])\n\n # Remove the data we don't need any more from the front of the buffer.\n frames_to_keep = self._count - self._window_step\n self._data_store[:frames_to_keep,\n :] = self._data_store[self._window_step:self._count, :]\n self._count -= self._window_step\n yield p1",
"def store(self,key,start,end,data):\n\n pass",
"def filter_keys(self):\n filters = self.args.keyfilter.split('.')\n self.logger.info(u'Filtering with:{f}'.format(f=filters))\n data = self.inputdata\n newdata = {}\n for key, value in data.items():\n self.logger.info(u'\\nProcessing Key:{k}'.format(k=key))\n returned_data = dict_key_filter(key, value, filters, self.logger)\n if bool(returned_data):\n newdata[key] = returned_data\n self.logger.info(u'Data After filter:{d}'.format(d=newdata))\n self.outputdata = newdata",
"def inc(self, key):\n if key in self.cache:\n curr_freq = self.cache[key]\n self.freq[curr_freq].remove(key)\n\n if len(self.freq[curr_freq]) == 0:\n del self.freq[curr_freq]\n\n curr_freq += 1\n self.freq[curr_freq].add(key)\n self.cache[key] = curr_freq\n\n else:\n self.cache[key] = 1\n self.freq[1].add(key)",
"def post_prepared_commit(self, key, prepared):\n docs = self.__splitprepared(prepared)\n docs[0][\"key\"] = key\n return self.client.post_commit(docs[0], docs[1])",
"def filter(self, key):\n with suppress(KeyError):\n yield from self.data[key]",
"def run(self, iter: int = -1):\n try:\n while iter != 0:\n self.iteration_count += 1\n iso = self._iso_observe()\n self._propagate(iso)\n iter -= 1\n except _FinishedObserving:\n return True\n except _Contradiction:\n return False",
"def add(self, key, skip_check=False):\n bits_per_slice = self.bits_per_slice\n hashes = self.make_hashes(key)\n found_all_bits = True\n if self.count > self.capacity:\n raise IndexError(\"RedisLocalBloomFilter is at capacity\")\n pipe = self.server.pipeline(transaction=False) \n sliceIdx = 0\n for k in hashes:\n sliceKey = self.SLICE_KEY_FMT % (self.bfkeypreffix, sliceIdx)\n pipe.setbit(sliceKey, k, 1)\n sliceIdx += 1\n pipeResults = pipe.execute()\n if not skip_check:\n for pipeResult in pipeResults:\n if not pipeResult:\n found_all_bits = False\n break\n if skip_check:\n self.count += 1\n return False\n elif not found_all_bits:\n self.count += 1\n return False\n else:\n return True",
"def __init__(self):\n super(KeyIterator, self).__init__()\n self.iterator = self.ValueIterator()"
] | [
"0.557857",
"0.5078635",
"0.49877542",
"0.4933926",
"0.4932952",
"0.48787275",
"0.47174305",
"0.46994108",
"0.4630205",
"0.46070197",
"0.45404267",
"0.45206273",
"0.4477999",
"0.44704136",
"0.4470278",
"0.44687983",
"0.44679555",
"0.4457995",
"0.44183904",
"0.44140702",
"0.43917415",
"0.43813822",
"0.43755758",
"0.43646127",
"0.4361089",
"0.4354573",
"0.43527192",
"0.43489787",
"0.4333391",
"0.4332018"
] | 0.5583304 | 0 |
Initializes a CountMin Sketch ``key`` to dimensions (``width``, ``depth``) specified by user. | def cmsInitByDim(self, key, width, depth):
params = [key, width, depth]
return self.execute_command(self.CMS_INITBYDIM, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(self, k:int, **kwargs):\n self.k = k",
"def __init__(self, width = 40):\n self.width = width\n self.state = 0\n self.total = 0",
"def __init__(self, key):\n self.key = key\n self.BLOCK_SIZE = 16",
"def __init__(self, k_d, k_s=0., p=20., k_m=0., k_a=None):\n # TODO A5 (Step2) implement this function\n # Check if each property is an array of shape (h, w, 3)\n # If so, then apply the property using the uv coordinates supplied by the geometry.\n self.k_d = k_d\n self.k_s = k_s\n self.p = p\n self.k_m = k_m\n self.k_a = k_a if k_a is not None else k_d",
"def __init__(self, top_k: int = 1) -> None:\n self.top_k = top_k",
"def __init__(self, d=1):\r\n self.depth = d",
"def __init__(self, poss_keys, poss_vals):\n self.Poss_Tree = {x: list(POSS_DIGITS) for x in poss_keys}\n self.place = len(str(poss_keys[0]))",
"def __init__(self, k=1):\n self.k = k\n self.x = None\n self.y = None\n self.classes_ = None",
"def __init__(self, pad_size, input_size, pre_pad=False):\n self.pre_pad = pre_pad\n self.pad_size = pad_size\n self.input_size = input_size\n\n self.build()",
"def __init__(self, key: bytearray):\n self.__key = key\n self.__KSA(bytearray([i for i in range(256)]))",
"def __init__(self, ksize, stride=None):\n \n self._ksize = (ksize, ksize) if isinstance(ksize, int) else ksize\n self._pad = (0, 0, 0, 0)\n self._stride = stride\n \n if stride is None:\n self._stride = tuple(self._ksize)\n elif isinstance(stride, int):\n self._stride = (stride, stride)\n \n self._X_shape = None\n self._cols = None\n self._max_idx = None",
"def __init__(self, min_player_count):\n self.min_player_count = min_player_count",
"def __init__(self, k):\n self.__start = 0\n self.__size = 0\n self.__buffer = [0] * k",
"def __init__(self, dict = {}):\r\n if dict == {}:\r\n self.zero_val()\r\n else:\r\n self.piDD = dict\r\n self.top_node = utilities.max_length_in_list(self.return_keys())\r\n if self.piDD[self.top_node] == None:\r\n self.dim = 0\r\n else:\r\n self.dim = self.piDD[self.top_node][0][0]",
"def __init__(self, ksize: torch.Tensor = 7, sigma: torch.Tensor = 5):\r\n super().__init__()\r\n self.ksize = ksize\r\n self.sigma = sigma\r\n\r\n self.conv2d_guass = get_gaussian_kernel(self.ksize, self.sigma)",
"def __init__(self):\n self._root = None\n self._size = 0\n self._curr_idx = 0\n self._depths, self._heights = None, None",
"def __init__(self, width=10, height=10, density=0.25):\n\t\tself.width = width\n\t\tself.height = height\n\t\t# create marks and mine field\n\t\tself.marks = [[CLOSED for _ in range(height)] for _ in range(width)]\n\t\tself.mines = [[random.random() < density for _ in range(height)] \n\t\t for _ in range(width)]",
"def __init__(self, env, key, factor, dim):\n gym.ObservationWrapper.__init__(self, env)\n\n self.key = key\n self.factor = factor\n self.dim = dim\n\n space = self.observation_space.spaces[self.key]\n shape = list(space.shape)\n\n for d in dim:\n shape[d] *= self.factor\n\n shape = np.asarray(shape, dtype=np.int)\n\n self.observation_space.spaces[self.key] = gym.spaces.Box(0, 255, shape, dtype=np.float32)",
"def __init__(self, height, width):\n # number of keypoint kind\n self.kpn = 4\n # max output object in one image\n self.maxDet = 20\n # object detect threshold, confidence\n self.obj_thr = 0.5\n # peak detect threshold, unit pixel\n self.peak_thr = 0.5\n # see threshold\n self.see_thr = 0.8\n # peak close threshold, unit pixel\n self.close_thr = 1.0\n self.height = height\n self.width = width\n # assit array\n self.x_array = np.tile(np.arange(self.width), (self.height, 1))\n self.y_array = np.tile(np.arange(self.height).reshape(-1, 1),\n (1, self.width))",
"def __init__(self, width: int, height: int, food: List[List[int]]):\n self.n = height\n self.m = width\n self.dirs = {'L': [0, -1], 'U': [-1, 0], 'R': [0, 1], 'D': [1, 0]}\n self.food = collections.deque(food)\n self.snake_set = {(0, 0)}\n self.snake = collections.deque([(0, 0)])",
"def DEFAULT_MIN_DEPTH(self): # real signature unknown; restored from __doc__\n pass",
"def _init(self, key, name):\n\n self.key = key\n self.name = name\n\n self._state = Node.State.INVALID\n self._value = None\n\n # Keyword and positional arguments to compute_value.\n self._args = ObservableList()\n self._kwargs = ObservableDict()\n\n self.args.listeners.add(self._on_args_changed)\n self.kwargs.listeners.add(self._on_kwargs_changed)\n\n # Map Nodes to the number of times they appear in this Node's\n # arguments.\n self._arg_refcount = {}\n\n # Nodes whose values depend on this Node.\n self._dependents = set()\n\n if NodeCallStack.stack:\n self._created_by = NodeCallStack.stack[-1]\n\n NodeCreateEvent(self)\n\n NodeCallStack._push(self)",
"def __init__(self, k: int, training_set: np.ndarray):\n self._k = k\n self._training_set = training_set",
"def __init__(self,\n size: int,\n counter_num: int,\n time_window: float,\n update_sample_size: int=5):\n super().__init__(size)\n\n self.__counters = []\n for i in range(counter_num):\n sketch = FullCounter()\n self.__counters.append(sketch)\n\n self.__time_window = time_window\n self.__processed_windows = 0\n self.__from_window_start = 0.0\n\n self.__priority_dict = PriorityDict()\n\n self.__update_sample_size = update_sample_size",
"def __init__(self, N, K, sliding_window=True):\n self.K = K\n self.N = N\n self.sliding_window = sliding_window",
"def __init__(self, k, p, sample_p=1):\n # Maximum sample size\n self.k = k\n\n # A dictionary containing the sampled elements\n # The dictionary key is the key of the element\n # The value is a tuple (seed, count)\n self.elements = {}\n\n # The function of the frequencies that the sketch estimates\n # For now it's the p-th frequency moment, but in the future we may\n # support other functions (passed as a parameter)\n self.func_of_freq = lambda x: x**p\n\n # The power of values used for the sampling weights\n self.sample_p = sample_p",
"def __init__(self, ksize_low, ksize_high=None): \n self._sigma_low = 0.3*(ksize_low//2 - 1) + 0.8\n \n if ksize_high is None:\n self._sigma_high = np.sqrt(2)*self._sigma_low\n else:\n self._sigma_high = 0.3*(ksize_high//2 - 1) + 0.8",
"def __init__(self, dimensions=2):\n assert dimensions > 0\n for d in range(0,dimensions+1):\n self.weight.append(0)",
"def __init__(self, width, length):\n self.width = width\n self.length = length",
"def __init__(self, k):\n self.k = k\n self.N = 2**self.k"
] | [
"0.5303504",
"0.5293798",
"0.52392584",
"0.5238729",
"0.52039605",
"0.516473",
"0.51517797",
"0.5144335",
"0.5137341",
"0.5133678",
"0.51300824",
"0.5129396",
"0.51235527",
"0.5118941",
"0.508737",
"0.5040831",
"0.5036856",
"0.50319624",
"0.50160795",
"0.50102866",
"0.5008397",
"0.49952224",
"0.4993463",
"0.49930155",
"0.49906266",
"0.49699995",
"0.49453956",
"0.49380907",
"0.49358666",
"0.49339226"
] | 0.7107055 | 0 |
Initializes a CountMin Sketch ``key`` to characteristics (``error``, ``probability``) specified by user. | def cmsInitByProb(self, key, error, probability):
params = [key, error, probability]
return self.execute_command(self.CMS_INITBYPROB, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def initialize(X, k):\n if not isinstance(X, np.ndarray) or X.ndim != 2:\n return None, None, None\n if not isinstance(k, int) or k <= 0:\n return None, None, None\n _, d = X.shape\n C, clss = kmeans(X, k)\n pi = 1 / k * np.ones(k)\n m = C\n S = np.array([np.identity(d)] * k)\n return pi, m, S",
"def __init__(self, k, p, sample_p=1):\n # Maximum sample size\n self.k = k\n\n # A dictionary containing the sampled elements\n # The dictionary key is the key of the element\n # The value is a tuple (seed, count)\n self.elements = {}\n\n # The function of the frequencies that the sketch estimates\n # For now it's the p-th frequency moment, but in the future we may\n # support other functions (passed as a parameter)\n self.func_of_freq = lambda x: x**p\n\n # The power of values used for the sampling weights\n self.sample_p = sample_p",
"def __init__(self, probability, nodeKeys):\n self.probability = float(probability)\n self.nodeKeys = nodeKeys",
"def __init__(self, kp, ki, kd):\n self.kp = kp\n self.ki = ki\n self.kd = kd\n self.error_last = 0\n self.error_sum = 0\n self.delta_error = 0",
"def set_min_prob(self, disease, probability):\n self.min_probs[disease] = probability",
"def __init__(self, probability: float):\n super().__init__()\n\n # store input parameters\n self.probability = probability",
"def __init__(self, key: bytearray):\n self.__key = key\n self.__KSA(bytearray([i for i in range(256)]))",
"def __init__(self,k,data,max_guess=(100,100),min_guess=(-100,-100)):\r\n\t\tself.k = k\r\n\t\tself.data = data\r\n\t\tself.max_guess = max_guess\r\n\t\tself.min_guess = min_guess",
"def initialise_source(self, c, key):\n if key == 'p':\n return 1e5\n elif key == 'h':\n if self.Q.val < 0 and self.Q.is_set:\n return 1e5\n elif self.Q.val > 0 and self.Q.is_set:\n return 5e5\n else:\n return 3e5",
"def __init__(self, key, initial_prng):\n self.cipher = key\n self.prng = initial_prng\n self.nonce = None",
"def __init__(self, k: int, training_set: np.ndarray):\n self._k = k\n self._training_set = training_set",
"def __init__(\n self, seq: Sequence, probabilities: Optional[List[float]] = None, k: int = 0\n ):\n super().__init__()\n\n # store input parameters\n self.seq = seq\n self.probabilities = probabilities\n self.k = k",
"def __init__(self, key=None):\n self.key = key",
"def __init__(self, k: int) -> None:\n\n assert k > 2, \"for k = 2 use Bernoulli distribution.\"\n\n self.k = k",
"def __init__(self, weights, keys=None):\n n = self.n = len(weights)\n if keys is None:\n self.keys = keys\n else:\n self.keys = array(keys)\n \n if isinstance(weights, (list, tuple)):\n weights = array(weights, dtype=float)\n elif isinstance(weights, numpy.ndarray):\n if weights.dtype != float:\n weights = weights.astype(float)\n else:\n weights = array(list(weights), dtype=float)\n \n if weights.ndim != 1:\n raise ValueError(\"weights must be a vector\")\n \n weights = weights * n / weights.sum()\n \n inx = -ones(n, dtype=int)\n short = where(weights < 1)[0].tolist()\n long = where(weights > 1)[0].tolist()\n while short and long:\n j = short.pop()\n k = long[-1]\n \n inx[j] = k\n weights[k] -= (1 - weights[j])\n if weights[k] < 1:\n short.append( k )\n long.pop()\n \n self.prob = weights\n self.inx = inx",
"def initialize(X, k):\n\n if not isinstance(X, np.ndarray) or X.ndim != 2:\n return None\n\n # n: number of dada points\n # d: dimension of each data point\n n, d = X.shape\n # print(X.shape)\n # print(X)\n\n if not isinstance(k, int) or k <= 0 or k > n:\n return None\n\n # Sample k centroids from a random.uniform distribution;\n # output is an array of coordinates\n C = np.random.uniform(low=np.min(X, axis=0),\n high=np.max(X, axis=0),\n size=(k, d))\n return C",
"def __init__(self, min_player_count):\n self.min_player_count = min_player_count",
"def initialise_source(self, c, key):\n return 0",
"def __init__(self, x=None):\n # Unpack the parameters or use default values.\n if x is None:\n self.nt_probs = np.ones(4) / 4\n self.kappa = 2.0\n self.penalty = 0\n else:\n info = self._unpack_params(x)\n self.nt_probs, self.kappa, self.penalty = info\n\n # Mark some downstream attributes as not initialized.\n self._invalidate()",
"def initialise_target(self, c, key):\n if key == 'p':\n return 1e5\n elif key == 'h':\n if self.Q.val < 0 and self.Q.is_set:\n return 5e5\n elif self.Q.val > 0 and self.Q.is_set:\n return 1e5\n else:\n return 3e5",
"def __init__(self, k, hash_func, p, advice_obj):\n # Maximum sample size\n self.k = k\n\n # The following hash function defines all the randomness used for\n # picking the sample\n self.hash_func = hash_func\n\n # A dictionary containing the sampled elements\n # The dictionary key is the key of the element\n # The value is a tuple (seed, count)\n self.elements = {}\n\n # The advice object\n self.advice_obj = advice_obj\n\n # The function of the frequencies that the sketch estimates\n # For now it's the p-th frequency moment, but in the future we may\n # support other functions (passed as a parameter)\n self.func_of_freq = lambda x: x**p",
"def __init__(self, top_k: int):\n self._topk_acc_dict: Dict[int, Mean] = defaultdict(Mean)\n self.top_k: int = top_k\n\n self.__torchmetrics_requires_task = version.parse(\n torchmetrics.__version__\n ) >= version.parse(\"0.11.0\")",
"def __init__(self, kp, ki, kd, ts):\n self.__kp = kp # Controller's P constant\n self.__kd = kd / ts # Controller's D constant\n self.__ki = ki * ts # Controller's I constant\n self.__ts = ts # Controller's sampling time\n self.__err_previous = None # Controller's previous error (there is no error before t = 0s)\n self.__error_sum = 0 # Controller's cumulative error",
"def __init__(self, k=1):\n self.k = k\n self.x = None\n self.y = None\n self.classes_ = None",
"def __init__(self, key):\n self.key = key",
"def init(self, rng_key, num_warmup, init_params, model_args, model_kwargs):\n raise NotImplementedError",
"def __init__(self, k:int, **kwargs):\n self.k = k",
"def init(self, start_sample, fhat, budget):\n self.proposed_points = start_sample\n self.n0 = start_sample.shape[0]\n self.budget = budget\n self.fhat = fhat",
"def __init__(__self__, *,\n key_data: pulumi.Input[str]):\n pulumi.set(__self__, \"key_data\", key_data)",
"def initialize(self, k, stats):\n\n k = k + 5\n\n qbin_sizes = 0.5 / k # Quantile sizes\n qbin_edges = 0.25 + qbin_sizes*np.arange(0, k+1) # Edge locations (in quantile terms)\n\n bin_edges = np.interp(qbin_edges, stats['quantile_basis'], stats['quantiles'])\n\n self.k = k\n self.n_bins = k + 2\n self.classes = list(range(1, self.n_bins + 2))\n self.edges = [-np.Inf] + [edge for edge in bin_edges] + [np.Inf]\n self.chi = np.zeros((2, self.n_bins + 1))\n\n dist = np.linspace(2, 1, self.n_bins) # Bins captured by observations\n scaled_dist = 0.9 * dist / dist.sum() # Scaling by 0.9 to allow for 0.1 emission prob of NaN\n self.chi[1, :-1] = scaled_dist # Paired emission dist\n self.chi[0, :-1] = np.flip(scaled_dist) # Unpaired emission dist\n self.chi[1, -1] = 0.1 # NaN observations\n self.chi[0, -1] = 0.1 # NaN observations\n\n self.n_params = 2*(self.n_bins-2)"
] | [
"0.5790586",
"0.5750808",
"0.55292356",
"0.5522919",
"0.5522306",
"0.53946745",
"0.5369128",
"0.53256714",
"0.5308711",
"0.5307491",
"0.5304574",
"0.52648705",
"0.5253688",
"0.5247738",
"0.52242655",
"0.52212495",
"0.52023345",
"0.518744",
"0.5175941",
"0.51755065",
"0.5169663",
"0.515917",
"0.5135909",
"0.5134228",
"0.5133494",
"0.5123331",
"0.5118834",
"0.509812",
"0.50863886",
"0.5085439"
] | 0.6565891 | 0 |
Adds/increases ``items`` to a CountMin Sketch ``key`` by ''increments''. Both ``items`` and ``increments`` are lists. Example cmsIncrBy('A', ['foo'], [1]) | def cmsIncrBy(self, key, items, increments):
params = [key]
self.appendItemsAndIncrements(params, items, increments)
return self.execute_command(self.CMS_INCRBY, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def incr(self, key, delta=1, callback=None):\n self._incrdecr(\"incr\", key, delta, callback=callback)",
"def incr(self, key, delta=1):\n\t\treturn self._incrdecr(\"incr\", key, delta)",
"def increase(self, key:str) -> None:\n\n hash_key = self.hash_key(key)\n head = self.array[hash_key] \n \n while head.next: \n if head.next.key == key:\n head.next.value +=1\n head = head.next",
"def __inc__(self, key, value):\n # TODO: value is not used in this method. Can it be removed?\n if key in ['upper_index', 'lower_index']:\n inc = self.num_cells\n elif key in ['shared_boundaries']:\n inc = self.num_cells_down\n elif key == 'shared_coboundaries':\n inc = self.num_cells_up\n elif key == 'boundary_index':\n boundary_inc = self.num_cells_down if self.num_cells_down is not None else 0\n cell_inc = self.num_cells if self.num_cells is not None else 0\n inc = [[boundary_inc], [cell_inc]]\n else:\n inc = 0\n if inc is None:\n inc = 0\n\n return inc",
"def inc(self, key):\n if key in self.keyCountMap:\n self._updateCount(key, 1)\n else:\n self.keyCountMap[key] = 1\n if self.head.next.count != 1:\n self._addBucketAfter(Bucket(1), self.head)\n self.head.next.keySet.add(key)\n self.countBucketMap[1] = self.head.next",
"def incr_proof_item(item, start, n):\n item.id = incr_id_after(item.id, start, n)\n item.prevs = [incr_id_after(id, start, n) for id in item.prevs]\n if item.subproof:\n for subitem in item.subproof.items:\n incr_proof_item(subitem, start, n)",
"def incr(self, key, delta=1):\r\n if delta < 0:\r\n return self._incrdecr(\"decr\", key, -delta)\r\n else:\r\n return self._incrdecr(\"incr\", key, delta)",
"def inc(self, key: str) -> None:\n if key not in self.bucket_of_keys:\n self.bucket_of_keys[key] = self.buckets.insert(self.buckets.begin(), Node(0, {key}))\n bucket, next_bucket = self.bucket_of_keys[key], self.bucket_of_keys[key].next\n if next_bucket is self.buckets.end() or next_bucket.value > bucket.value + 1:\n next_bucket = self.buckets.insert(next_bucket, Node(bucket.value + 1, set()))\n next_bucket.keys.add(key)\n self.bucket_of_keys[key] = next_bucket\n\n bucket.keys.remove(key)\n if not bucket.keys:\n self.buckets.erase(bucket)",
"def __iadd__(self, increment):\n self.update(self.val + increment)\n return self",
"def inc(self, key):\n if key in self.cache:\n curr_freq = self.cache[key]\n self.freq[curr_freq].remove(key)\n\n if len(self.freq[curr_freq]) == 0:\n del self.freq[curr_freq]\n\n curr_freq += 1\n self.freq[curr_freq].add(key)\n self.cache[key] = curr_freq\n\n else:\n self.cache[key] = 1\n self.freq[1].add(key)",
"def inc(self, key: str) -> None:\n if key not in self.mapping:\n cur_block = self.head\n else:\n cur_block = self.mapping[key]\n cur_block.keys.remove(key)\n\n if cur_block.val + 1 != cur_block.next.val:\n new_block = Block(cur_block.val + 1)\n cur_block.insert_after(new_block)\n else:\n new_block = cur_block.next\n new_block.keys.add(key)\n self.mapping[key] = new_block\n\n if not cur_block.keys and cur_block.val != 0:\n cur_block.remove()",
"def inc(self, key: str) -> None:\n if key in self.keyCnt:\n self.changeKey(key, 1)\n else:\n self.keyCnt[key] = 1\n # 说明没有计数为1的节点,在self.head后面加入\n if self.head.next.cnt != 1:\n self.addNodeAfter(Node(1), self.head)\n self.head.next.keySet.add(key)\n self.cntKey[1] = self.head.next",
"def handle_incr(self, api, command):\n key = self._sandboxed_key(api.sandbox_id, command.get('key'))\n if not (yield self.check_keys(api, key)):\n returnValue(self._too_many_keys(command))\n amount = command.get('amount', 1)\n try:\n value = yield self.redis.incr(key, amount=amount)\n except Exception, e:\n returnValue(self.reply(command, success=False, reason=unicode(e)))\n returnValue(self.reply(command, value=int(value), success=True))",
"def _insert_item_run_length_encoded(cls, incremental_items, aggregated_items,\n num_runs): # pragma: no cover\n for item in incremental_items:\n if len(aggregated_items) and item[1] == aggregated_items[0][1]:\n aggregated_items[0][0] = min(aggregated_items[0][0] + item[0], num_runs)\n else:\n aggregated_items.insert(0, item)",
"def __iterate(\n self,\n items: List[ClientWorklistItem],\n inc: Union[InitialIncClientWorklistData, IncClientWorklistData],\n ):\n if inc is None:\n return\n # append the items\n if inc.items_flat:\n items += inc.items_flat\n else:\n return\n # iterator is used up\n if inc.dropped:\n return\n\n # fetch next\n inc_cl: IncClientWorklistsApi = self.__service_provider.get_service(IncClientWorklistsApi)\n next_it: IncClientWorklistData = inc_cl.inc_client_wl_get_next(inc.inc_wl_id)\n self.__iterate(items, next_it)",
"def inc(self, key):\n # update key node\n if key not in self.hash_table:\n self.hash_table[key] = ListNode(key, 1)\n else:\n self.hash_table[key].val += 1\n node = self.hash_table[key]\n val = node.val\n\n #print 'inc', key, val\n # delete node from original List\n if node._prev:\n node._prev._next = node._next\n if node._next:\n node._next._prev = node._prev\n\n # insert node to new List\n if val not in self.count_table:\n cl_node = CountListNode()\n cl_node._next = node\n node._prev = cl_node\n node._next = None\n if not self.head.next_cl and not self.tail.prev_cl:\n self.head.next_cl = cl_node\n cl_node.prev_cl = self.head\n self.tail.prev_cl = cl_node\n cl_node.next_cl = self.tail\n else:\n if val == 1:\n next_cl_node = self.tail\n else:\n next_cl_node = self.count_table[val-1]\n cl_node.next_cl = next_cl_node\n cl_node.prev_cl = next_cl_node.prev_cl\n next_cl_node.prev_cl.next_cl = cl_node\n next_cl_node.prev_cl = cl_node\n\n #print key, val, cl_node.prev_cl == self.head\n self.count_table[val] = cl_node\n else:\n node._next = self.count_table[val]._next\n node._prev = self.count_table[val]\n self.count_table[val]._next._prev = node\n self.count_table[val]._next = node\n\n print 'inc', key, val\n if val - 1 in self.count_table and not self.count_table[val - 1]._next:\n #print key, val, val-1\n del_node = self.count_table[val-1]\n #print del_node.prev_cl._next\n del_node.prev_cl.next_cl = del_node.next_cl\n del_node.next_cl.prev_cl = del_node.prev_cl\n del (self.count_table[val - 1])\n\n for v in self.count_table:\n print 'v:',v,\n node = self.count_table[v]._next\n while node:\n print node.key, node.val\n node = node._next\n\n if self.head.next_cl:\n print 'head',self.head.next_cl._next.key, self.head.next_cl._next.val\n print '\\n'",
"def incrby(self, key, value, timeBucket=None,\n retentionSecs=None, labels={}):\n params = [key, value]\n self.appendTimeBucket(params, timeBucket)\n self.appendRetention(params, retentionSecs)\n self.appendLabels(params, labels)\n\n return self.execute_command(self.INCRBY_CMD, *params)",
"def incr(self, key, delta=1, version=None, client=None):\r\n return self._incr(key=key, delta=delta, version=version, client=client)",
"def inc(self, key):\n if key in self.key_dict:\n self.increase(key)\n return\n self.key_dict[key] = key_node = KeyNode(key, 1)\n value_node = self.value_dict.get(1)\n if value_node is None:\n self.value_dict[1] = value_node = ValueNode(1, None, self.head)\n if self.head:\n self.head.prev = value_node\n self.head = value_node\n if self.last is None:\n self.last = value_node\n self.insert_key_node(key_node)",
"def incr(self, n=1):\n return _SALOMERuntime.SALOMERuntime_PySwigIterator_incr(self, n)",
"def incr(self, n=1):\n return _libsbml.SwigPyIterator_incr(self, n)",
"def incr(self, n=1):\n return _elas.SwigPyIterator_incr(self, n)",
"def _update_prepend_key(self):\n self.prepend_key -= 1",
"def increment(self):\r\n return self.add(1)",
"def increment_counter(self) -> None:",
"def incr(self, key, delta=1):\n try:\n key = self.prepare_key(key)\n return super(CacheClass, self).incr(key, delta)\n except Exception as err:\n return self.warn_or_error(err, delta)",
"def increment(self) -> global___Expression:",
"def add_to_inv(self, item):\n for obj in self.inv:\n if obj.name == item.name:\n self.inv[obj] += 1\n break\n else:\n self.inv[item] = 1",
"def inc_counter(self, *_, **__): # pylint: disable=arguments-differ\n pass",
"def update(self, *items):\n for item in items:\n self.add(item)"
] | [
"0.5571012",
"0.54712886",
"0.5445248",
"0.53391284",
"0.53194755",
"0.53172624",
"0.5278319",
"0.52269477",
"0.52106184",
"0.51903224",
"0.5171463",
"0.5151687",
"0.5137558",
"0.5133662",
"0.5103444",
"0.5102637",
"0.5099352",
"0.5090726",
"0.5089509",
"0.5078452",
"0.5074509",
"0.50743383",
"0.50702477",
"0.50593024",
"0.5052929",
"0.5044189",
"0.49818596",
"0.49762857",
"0.49444923",
"0.49391705"
] | 0.815581 | 0 |
Merges ``numKeys`` of sketches into ``destKey``. Sketches specified in ``srcKeys``. All sketches must have identical width and depth. ``Weights`` can be used to multiply certain sketches. Default weight is 1. Both ``srcKeys`` and ``weights`` are lists. | def cmsMerge(self, destKey, numKeys, srcKeys, weights=[]):
params = [destKey, numKeys]
params += srcKeys
self.appendWeights(params, weights)
return self.execute_command(self.CMS_MERGE, *params) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def copySkinWeights(*args, destinationSkin: Union[AnyStr, bool]=\"\", influenceAssociation:\n Union[AnyStr, List[AnyStr], bool]=\"\", mirrorInverse: bool=True, mirrorMode:\n Union[AnyStr, bool]=\"\", noBlendWeight: bool=True, noMirror: bool=True,\n normalize: bool=True, sampleSpace: Union[int, bool]=0, smooth: bool=True,\n sourceSkin: Union[AnyStr, bool]=\"\", surfaceAssociation: Union[AnyStr,\n bool]=\"\", uvSpace: Union[List[AnyStr, AnyStr], bool]=None, q=True,\n query=True, e=True, edit=True, **kwargs)->Union[None, Any]:\n pass",
"def join_w(targs, srcs, ws):\n # convert targs/srcs to dicts if given as arrays\n if not isinstance(targs, dict):\n targs_ = copy(targs)\n targs = {\n cell_type: targs_ == cell_type for cell_type in set(targs_)\n }\n if not isinstance(srcs, dict):\n srcs_ = copy(srcs)\n srcs = {\n cell_type: srcs_ == cell_type for cell_type in set(srcs_)\n }\n \n # make sure all targ/src masks have same shape\n targ_shapes = [mask.shape for mask in targs.values()]\n src_shapes = [mask.shape for mask in srcs.values()]\n \n if len(set(targ_shapes)) > 1:\n raise Exception('All targ masks must have same shape.')\n \n if len(set(src_shapes)) > 1:\n raise Exception('All targ masks must have same shape.')\n \n n_targ = targ_shapes[0][0]\n n_src = src_shapes[0][0]\n \n # make sure weight matrix dimensions match sizes\n # of targ/src classes\n for syn, ws_ in ws.items():\n for (targ, src), w_ in ws_.items():\n if not w_.shape == (targs[targ].sum(), srcs[src].sum()):\n raise Exception(\n 'Weight matrix for {}: ({}, {}) does not match '\n 'dimensionality specified by targ/src masks.')\n \n # loop through synapse types\n dtype = list(list(ws.values())[0].values())[0].dtype\n ws_full = {}\n \n for syn, ws_ in ws.items():\n \n w = np.zeros((n_targ, n_src), dtype=dtype)\n \n # loop through population pairs\n for (targ, src), w_ in ws_.items():\n \n # get mask of all cxns from src to targ\n mask = np.outer(targs[targ], srcs[src])\n \n assert mask.sum() == w_.size\n \n w[mask] = w_.flatten()\n \n ws_full[syn] = w\n \n return ws_full",
"def transfer_weights(src_model, dest_model):\r\n # ingore the first layer Input()\r\n # layer 1-24 to 1-24\r\n for i in range(1, 24):\r\n dest_model.layers[i].set_weights(src_model.layers[i].get_weights())\r\n print(\"Partially load weights from layer 1-24 successfully!\")\r\n\r\n # layer 25-45 to 65-85\r\n for i in range(25, 45):\r\n dest_model.layers[i+40].set_weights(src_model.layers[i].get_weights())\r\n print(\"Partially load weights from layer 25-45 successfully!\")\r\n\r\n # layer 46-65 to 126-145\r\n for i in range(46, 65):\r\n dest_model.layers[i+80].set_weights(src_model.layers[i].get_weights())\r\n print(\"Partially load weights from layer 46-65 successfully!\")\r\n\r\n # 69 to 189\r\n dest_model.layers[69+120].set_weights(src_model.layers[69].get_weights())\r\n print(\"Partially load weights from layer 69 successfully!\")",
"def load_embeddings(self, weight, words,\n target_embeddings='src', verbose=False):\n if isinstance(weight, np.ndarray):\n weight = torch.from_numpy(weight)\n assert weight.size(1) == self.emb_dim, \\\n \"Mismatched embedding dim %d for model with dim %d\" % \\\n (weight.size(1), self.emb_dim)\n target_words = {word: idx for idx, word in enumerate(words)}\n for idx, word in enumerate(self.src_dict.vocab):\n if word not in target_words:\n if verbose:\n logging.warn(\"Couldn't find word [%s]\" % word)\n continue\n if target_embeddings == 'src':\n self.src_embeddings.weight.data[idx, :].copy_(\n weight[target_words[word], :])\n elif target_embeddings == 'trg':\n self.trg_embeddings.weight.data[idx, :].copy_(\n weight[target_words[word], :])\n else:\n raise ValueError('target_embeddings must be `src` or `trg`')",
"def weightKmers(self, weightDict):\n for k, w in weightDict.iteritems():\n assert k in self.kmers\n self.G.edge[k + \"_L\"][k + \"_R\"]['weight'] = w",
"def add_edge(self, src_key, dest_key, weight=1):\n self.vertices[src_key].add_neighbour(self.vertices[dest_key], weight)",
"def copyDeformerWeights(*args, destinationDeformer: Union[AnyStr, bool]=\"\", destinationShape:\n Union[AnyStr, bool]=\"\", mirrorInverse: bool=True, mirrorMode:\n Union[AnyStr, bool]=\"\", noMirror: bool=True, smooth: bool=True,\n sourceDeformer: Union[AnyStr, bool]=\"\", sourceShape: Union[AnyStr,\n bool]=\"\", surfaceAssociation: Union[AnyStr, bool]=\"\", uvSpace:\n Union[List[AnyStr, AnyStr], bool]=None, q=True, query=True, e=True,\n edit=True, **kwargs)->Union[None, Any]:\n pass",
"def copy_skin_weights(source_skin, target_skin):\n\n # gets the shape back from the source_skin and target_skin\n # need to do this as providing the sourceSkin and destinationSkin arguments\n # to the copySkinWeights command does not update correctly the shapes\n\n source_shape = cmds.ls(cmds.listHistory(\"{}.outputGeometry\".format(\n source_skin), pdo=False, future=True), dag=True,\n noIntermediate=True)\n target_shape = cmds.ls(cmds.listHistory(\n \"{}.outputGeometry\".format(target_skin),\n pdo=False, future=True), dag=True,\n noIntermediate=True)\n\n # checks if source and target shapes list are bigger than 1\n if len(source_shape) > 1:\n source_shape = source_shape[0]\n if len(target_shape) > 1:\n target_shape = target_shape[0]\n\n cmds.select(source_shape, target_shape)\n\n # copy skin command\n cmds.copySkinWeights(surfaceAssociation=\"closestPoint\", noMirror=True,\n influenceAssociation=(\"label\",\n \"closestJoint\",\n \"oneToOne\"))\n\n # forces refresh\n cmds.refresh()",
"def hard_copy_weights(self, target, source):\n for target_param, param in zip(target.parameters(), source.parameters()):\n target_param.data.copy_(param.data)",
"def add_images_weighted(input1:Image, input2:Image, output :Image = None, weight1:float=1, weight2:float=1):\n\n parameters = {\n \"src\":input1,\n \"src1\":input2,\n \"dst\":output,\n \"factor\":float(weight1),\n \"factor1\":float(weight2)\n };\n\n execute(__file__, 'add_images_weighted_' + str(len(output.shape)) + 'd_x.cl', 'add_images_weighted_' + str(len(output.shape)) + 'd', output.shape, parameters);\n\n return output",
"def merge_images(sources, targets, opts, k=10):\n _, _, h, w = sources.shape\n row = int(np.sqrt(opts.batch_size))\n merged = np.zeros([3, row*h, row*w*2])\n for idx, (s, t) in enumerate(zip(sources, targets)):\n i = idx // row\n j = idx % row\n merged[:, i*h:(i+1)*h, (j*2)*h:(j*2+1)*h] = s\n merged[:, i*h:(i+1)*h, (j*2+1)*h:(j*2+2)*h] = t\n return merged.transpose(1, 2, 0)",
"def apply_weights(src, dest_shape, n_s, n_b, row, col, s):\n\n dest = np.ndarray(dest_shape).flatten()\n dest[:] = 0.0\n src = src.flatten()\n\n for i in range(n_s):\n dest[row[i]-1] = dest[row[i]-1] + s[i]*src[col[i]-1]\n\n return dest.reshape(dest_shape)",
"def distribute_by_weights(path: Tensor, nimages: int, path_target: Tensor = None, weights: Tensor = None, climbing_pivots: list = None):\n # Ensure storage for coordinates\n if path_target is None:\n path_target = path.new(nimages, path.shape[1])\n else:\n assert path_target is not path, \"Source must be unequal to target for redistribution\"\n assert path_target.shape[0] == nimages\n # Ensure weights\n if weights is None:\n weights = path.new(nimages - 1).fill_(1)\n else:\n assert len(weights.shape) == 1\n assert weights.shape[0] == nimages - 1\n\n # In climbing mode, reinterpolate only between the climbing images\n if climbing_pivots is not None:\n assert path.shape[0] == nimages, \"Cannot change number of items when reinterpolating with respect to climbing images.\"\n assert len(climbing_pivots) == nimages\n assert all(isinstance(b, bool) for b in climbing_pivots), \"Image must be climbing or not.\"\n start = 0\n for i, is_climbing in enumerate(climbing_pivots):\n if is_climbing or i == nimages - 1:\n distribute_by_weights(path[start:i + 1], i + 1 - start, path_target[start:i + 1], weights[start:i])\n start = i\n return path_target\n\n if path is path_target:\n # For the computation the original path is necessary\n path_source = path.clone()\n else:\n path_source = path\n\n # The current distances between elements on chain\n current_distances = (path_source[:-1] - path_source[1:]).norm(2, 1)\n target_positions = (weights / weights.sum()).cumsum(0) * current_distances.sum() # Target positions of elements (spaced by weights)\n\n # Put each new item spaced by weights (measured along line) on the line\n last_idx = 0 # Index of previous pivot\n pos_prev = 0. # Position of previous pivot on chain\n pos_next = current_distances[last_idx].item() # Position of next pivot on chain\n path_target[0] = path_source[0]\n for i in range(1, nimages - 1):\n position = target_positions[i - 1].item()\n while position > pos_next:\n last_idx += 1\n pos_prev = pos_next\n pos_next += current_distances[last_idx].item()\n\n t = (position - pos_prev) / (pos_next - pos_prev)\n path_target[i] = (t * path_source[last_idx + 1] + (1 - t) * path_source[last_idx])\n path_target[nimages - 1] = path_source[-1]\n\n return path_target",
"def linear_interpolation_keys(self, keys):\n if len(keys) != len(self.dims):\n raise ValueError(\"Number of keys must be equal to the number of\" +\n \" dimensions. (Got \" + str(len(keys)) + \"/\"\n + str(len(self.dims)) + \")\")\n \n weightedKeys = []\n for key, dim in zip(keys, self.dims):\n weightedKeys.append(dim.linear_interpolation_indexes(key))\n \n while len(weightedKeys) > 1:\n newKeys = []\n for key1 in weightedKeys[-2]:\n for key2 in weightedKeys[-1]:\n newKeys.append({'key':key1['key'] + key2['key'],\n 'weight':key1['weight']*key2['weight']})\n weightedKeys.pop(-1)\n weightedKeys[-1] = newKeys\n\n return weightedKeys[0]",
"def set_weights(self, weights):\n tuples = []\n for layer in self.layers:\n num_param = len(layer.weights)\n layer_weights = weights[:num_param]\n for sw, w in zip(layer.weights, layer_weights):\n tuples.append((sw, w))\n weights = weights[num_param:]\n K.batch_set_value(tuples)",
"def combine_cache_keys(cache_keys):\r\n if len(cache_keys) == 1:\r\n return cache_keys[0]\r\n else:\r\n combined_id = Target.maybe_readable_combine_ids(cache_key.id for cache_key in cache_keys)\r\n combined_hash = hash_all(sorted(cache_key.hash for cache_key in cache_keys))\r\n combined_num_sources = sum(cache_key.num_sources for cache_key in cache_keys)\r\n combined_sources = \\\r\n sorted(list(itertools.chain(*[cache_key.sources for cache_key in cache_keys])))\r\n return CacheKey(combined_id, combined_hash, combined_num_sources, combined_sources)",
"def get_weights(self, nn_weights, rov_id): # Get weights from CCEA population\n\n for w in range(self.n_weights):\n self.weights[rov_id, w] = nn_weights[w]",
"def update(self, x_dict, y_dict, weight):\n assert len(x_dict) == len(y_dict), \"invalid # of qids\"\n \n qids = self.__get_shuffled_qids(x_dict, y_dict, weight.epoch)\n w = weight.get_dense_weight()\n for qid in tqdm(qids):\n w = approx_ap(x_dict[qid].toarray(), y_dict[qid], w, self.eta, self.alpha, self.beta)\n weight.set_weight(sp.csr_matrix(w.reshape((1, weight.dims))))\n weight.epoch += 1",
"def load_weights(self, weights):\n\n i = 0\n for l in range(1, self.num_layers()):\n for n in range(self.get_layer(l).num_nodes):\n for w in range(len(self.get_node_with_layer(l, n).weights)):\n self.get_node_with_layer(l, n).weights[w] = weights[i]\n i += 1",
"def set_weight(self, dest, weight):\n self.points_to[dest] = weight",
"def write_weights_images(self):\n for weight_name, weight in self._weights.items():\n self._write_weight_image_to_tensorboard(\n name=f\"{self._Sections.WEIGHTS}/{weight_name}\",\n weight=weight,\n step=self._epochs,\n )",
"def tie_weights(self):\n if hasattr(self, \"get_output_embeddings\") and hasattr(\n self, \"get_input_embeddings\"):\n output_embeddings = self.get_output_embeddings()\n if output_embeddings is not None:\n self._tie_or_clone_weights(output_embeddings,\n self.get_input_embeddings())",
"def weight_paste(pixSrc, pixPng, src_id, logo_id):\n weight = pixPng[:, :, 3] / 255\n weight = weight[:, :, np.newaxis]\n alpha = weight[logo_id]\n beta = 1 - alpha\n pixSrc[src_id] = pixSrc[src_id] * beta + pixPng[logo_id] * alpha\n return pixSrc",
"def weights(self, weights):\n\n self._weights = weights",
"def set_weights(self, weights: Dict[PolicyID, dict]):\n self.workers.local_worker().set_weights(weights)",
"def copy_keys(source: str, destination: str) -> None:\n try:\n keys = [filename for filename in os.listdir(source) if filename.lower().endswith(\".bikey\")]\n except FileNotFoundError:\n logging.debug(f\"Error when searching for *.bikey files to copy at {source}\", exc_info=True)\n keys = []\n\n if len(keys) == 0:\n logging.warning(f\"No *.bikey files found in {source}\")\n return\n\n os.makedirs(destination, exist_ok=True)\n\n for key in keys:\n shutil.copy2(os.path.join(source, key), destination)",
"def update_weights(self, exp_ids, raw_weights):\n assert len(set(exp_ids)) == len(exp_ids),\\\n \"Invalid Argument: must pass a unique set of experience ids.\"\n\n new_weights = (raw_weights + self.weight_offset) ** self.alpha\n\n # Update the weights used for sampling each of the experiences.\n for idx, weight in zip(exp_ids, new_weights):\n self.experiences.update_weight(idx, weight)\n\n # Update beta which is used to weight the importance sampling.\n if self.beta < self.beta_f:\n self.beta = min(self.beta_f, self.beta + self.beta_update)",
"def copy_conv_weights_from(self, source: \"Encoder\") -> None:\n pass",
"def add_keys(destdict, srclist, value=None):\n if len(srclist) > 1:\n destdict[srclist[0]] = {}\n destdict[srclist[0]] = destdict.get(srclist[0], {})\n add_keys(destdict[srclist[0]], srclist[1:], value)\n else:\n destdict[srclist[0]] = value\n return destdict",
"def prepare_weights(self, pre_exist_words, hs, negative, wv, sentences,\n nonce, update=False, replication=False,\n sum_over_set=False, weighted=False, beta=1000):\n # set initial input/projection and hidden weights\n if not update:\n raise Exception('prepare_weight on Nonce2VecTrainables should '\n 'always be used with update=True')\n else:\n self.update_weights(pre_exist_words, hs, negative, wv, sentences,\n nonce, replication, sum_over_set, weighted,\n beta)"
] | [
"0.549154",
"0.5410098",
"0.520968",
"0.49663427",
"0.4958788",
"0.4947422",
"0.48904583",
"0.4840362",
"0.4815075",
"0.47568074",
"0.47536424",
"0.47400427",
"0.4730924",
"0.4675684",
"0.46377957",
"0.46358636",
"0.46322548",
"0.46215504",
"0.4620774",
"0.46132562",
"0.46081996",
"0.45859233",
"0.45747662",
"0.45710614",
"0.45550832",
"0.45155415",
"0.4464412",
"0.44578615",
"0.4454145",
"0.44501168"
] | 0.71798104 | 0 |
Return full list of items in TopK list of ``key```. | def topkList(self, key):
return self.execute_command(self.TOPK_LIST, key) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_for_key(self, key) -> list:\n return [res[key] for res in self.list]",
"def get_list(key):\n ret = hookenv.action_get(key)\n return ret.split() if ret else []",
"def topkQuery(self, key, *items):\n params = [key]\n params += items\n \n return self.execute_command(self.TOPK_QUERY, *params)",
"def getlist(self, key):\n try:\n return dict.__getitem__(self, key)\n except KeyError:\n return []",
"def key_list(dict):\n list = []\n for key in dict:\n list.append(key)\n return list",
"def getlist(self, key):\n try:\n vals = _dict_getitem(self, key.lower())\n except KeyError:\n return []\n else:\n if isinstance(vals, tuple):\n return [vals[1]]\n else:\n return vals[1:]",
"def get_list(self, k: str) -> List:\n return self._redis.lrange(k, 0, -1)",
"def get_adjacent_keys(self, key: str) -> List[str]:\n return [k for k in self.get_adjacent(key)]",
"def list_values(key):\n return meta.list_values(key=key)",
"def keys(self, key=None, lo=None, hi=None, reverse=None, max=None,\n include=False, txn=None, rec=None):\n return itertools.imap(ITEMGETTER_0,\n self.items(key, lo, hi, reverse, max, include, txn, rec))",
"def list(self, key):\n\n if \"~\" in key or key == \"title\":\n v = self(key, connector=\"\\n\")\n if v == \"\": return []\n else: return v.split(\"\\n\")\n elif key in self: return self[key].split(\"\\n\")\n else: return []",
"def topk(vec, k):\n vec = torch.topk(vec, k)\n return vec.view(-1).data.tolist()",
"def keys(self) -> List:\n pass",
"def expand_nested_lists(query, key):\n items = []\n for item in query[key]:\n if isinstance(item, list):\n items.extend(item)\n else:\n items.extend([item])\n return items",
"def GetSubkeys(self):",
"def hgetall(self, key):\n return self._command(b'HGETALL', key, handler=list_to_dict)",
"def uplink_buys_by_key(self, key):\n buys = []\n for buy in self.uplinkbuys:\n if buy.mindkey == key:\n buys.append(buy)\n return buys",
"def get_descendants(self, key: str) -> Sequence[str]:\n raise NotImplementedError",
"def getall(self, key):\n return self.values.get(key, [])",
"async def get_all(self, key: datastore.Key) -> RV:\n\t\treturn await (await self.get(key)).collect() # type: ignore[return-value]",
"def champion_keys():\n keys = []\n for champion_id in champions[\"data\"]:\n keys.append(champions[\"data\"][str(champion_id)][\"key\"])\n return sorted(keys)",
"def get_list(key, nodename=None):\n return _get_property(key, nodename, [])",
"def keys(self, *args, **kwargs):\n return self._list(*args, **kwargs)",
"def topkInfo(self, key):\n \n return self.execute_command(self.TOPK_INFO, key)",
"def list_all_keys(riak_host,riak_port,bucket):\n url='http://%s:%s/buckets/%s/keys?keys=true' % (riak_host,riak_port,bucket)\n #print url\n r=requests.get(url)\n print json.dumps(r.json(), sort_keys=True, indent=4)",
"def summarize(\n self,\n key: Callable[['Classification'], Union[Iterable[str], str]] = None,\n top_k: int = None,\n ):\n ctr = Counter(sorted(filter(None, flatmap(key, self.values()))))\n return [elt for elt, _ in ctr.most_common(top_k)]",
"def get_items(self, value, key=None):\n if key is None:\n return self.dicts(value)\n else:\n items = self.dicts(value)\n return [item[key] for item in items]",
"def fetch_all_keys():\n response = TIME_TABLE.scan()\n items = response['Items']\n items.sort(key=lambda x: x['timeStamp'])\n response = ''\n for item in items:\n response = '{0}\\n{1}'.format(response, item)\n return response",
"def _getbundlelistkeysparts(\n bundler, repo, source, bundlecaps=None, b2caps=None, **kwargs\n):\n listkeys = kwargs.get('listkeys', ())\n for namespace in listkeys:\n part = bundler.newpart(b'listkeys')\n part.addparam(b'namespace', namespace)\n keys = repo.listkeys(namespace).items()\n part.data = pushkey.encodekeys(keys)",
"def _force_key_as_list(self, key):\r\n return [key] if isinstance(key, (str, unicode)) else key"
] | [
"0.6962184",
"0.657635",
"0.63970435",
"0.631337",
"0.6151614",
"0.6138156",
"0.6085079",
"0.6063242",
"0.59806406",
"0.59505635",
"0.59002477",
"0.58530796",
"0.5838963",
"0.58134776",
"0.57977164",
"0.57843095",
"0.5766967",
"0.57466274",
"0.57404083",
"0.5712583",
"0.57029986",
"0.57026654",
"0.57009965",
"0.57004935",
"0.5691315",
"0.56544966",
"0.56457067",
"0.5645122",
"0.5624143",
"0.56213534"
] | 0.82646406 | 0 |
Return a new pipeline object that can queue multiple commands for later execution. ``transaction`` indicates whether all commands should be executed atomically. Apart from making a group of operations atomic, pipelines are useful for reducing the backandforth overhead between the client and server. Overridden in order to provide the right client through the pipeline. | def pipeline(self, transaction=True, shard_hint=None):
p = Pipeline(
connection_pool=self.connection_pool,
response_callbacks=self.response_callbacks,
transaction=transaction,
shard_hint=shard_hint)
return p | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def pipeline(self, transaction=True, shard_hint=None):\n p = AsyncPipeline(\n connection_pool=self.client.connection_pool,\n response_callbacks=self._MODULE_CALLBACKS,\n transaction=transaction,\n shard_hint=shard_hint,\n )\n p.index_name = self.index_name\n return p",
"def pipeline(self, transaction=True, shard_hint=None):\n p = Pipeline(\n connection_pool=self.client.connection_pool,\n response_callbacks=self._MODULE_CALLBACKS,\n transaction=transaction,\n shard_hint=shard_hint,\n )\n p.index_name = self.index_name\n return p",
"def pipeline(self, transaction=True, shard_hint=None):\n return MockRedisPipeline(self, transaction, shard_hint)",
"def createPipe(self, transaction):\n pipe = detectPipeClass(transaction.dev, transaction.endpt)(self)\n name = \"Dev %s, %s\" % (transaction.dev, transaction.getTransferString())\n self.appendCanvas(name, pipe.stack)\n return pipe",
"def pipelines(self):\n return PipelineManager(session=self._session)",
"def pipeline(self) -> Pipeline:\n if self._to_pipeline is None:\n raise AttributeError(\n \"pipeline not available because `to_pipeline` was not set on __init__.\"\n )\n return self._to_pipeline(self)",
"def wrap_transaction(self):\n new_script = self.__class__()\n new_script.append(\n [BeginStatement()] + self.statements + [CommitStatement()])\n\n return new_script",
"def multi(self):\n if self._transaction_state not in (None, \"watch\"):\n raise ValueError(\"MULTI calls can not be nested\")\n ret = self._command(b'MULTI', handler=\"OK\")\n self._transaction_state = [] # This is used in self._command\n return ret",
"def _pipeline(self):\n try:\n b = self._pipeline_cache\n except AttributeError:\n r = open_redis_connection()\n b = self._pipeline_cache = r.pipeline()\n return b",
"def from_pipeline(cls, pipeline, proba=None, repeat=None):\n if proba is None:\n if repeat is None:\n new_p = cls(pipeline=pipeline)\n else:\n if pipeline.num_actions == 1 and pipeline.get_last_action_proba() is None:\n new_p = cls(pipeline=pipeline, repeat=repeat)\n else:\n new_p = cls()\n new_p.append_pipeline(pipeline, repeat=repeat)\n else:\n if pipeline.num_actions == 1 and pipeline.get_last_action_repeat() is None:\n new_p = cls(pipeline=pipeline, proba=proba)\n else:\n new_p = cls()\n new_p.append_pipeline(pipeline, proba=proba)\n return new_p",
"def pipeline(self):\n # gotta avoid circular imports by deferring\n from .pipeline import Pipeline\n return Pipeline().from_source(self._collection)",
"def transaction(self):\n return Transaction(self)",
"async def connect_pipeline(\n *, connect=None, bind=None, loop=None, translation_table=None\n):\n if loop is None:\n loop = asyncio.get_event_loop()\n\n transp, proto = await create_zmq_connection(\n lambda: _ClientProtocol(loop, translation_table=translation_table),\n zmq.PUSH,\n connect=connect,\n bind=bind,\n loop=loop,\n )\n return PipelineClient(loop, proto)",
"def make_pipeline():\n \n # Base universe set to the QTradableStocksUS\n base_universe = QTradableStocksUS()#Q500US()\n base_universe = (base_universe & Q500US())\n base_universe = (base_universe & Fundamentals.market_cap.latest.top(150))\n \n # Factor of yesterday's close price.\n #yesterday_close = USEquityPricing.close.latest\n \n pipe = Pipeline(\n columns={\n #'close': yesterday_close,\n 'sector': Sector(),\n },\n screen=base_universe\n )\n return pipe",
"def delay_pipeline(pipeline, pipe):\n _pipeline = delayed(pipeline[0].curry())(pipe)\n for task in pipeline[1:]:\n _pipeline = delayed(task.curry())(_pipeline)\n\n return _pipeline",
"def make_pipeline(context):\n \n # Base universe of top 500 US stocks.\n base_universe_filter = Q500US()\n\n # Stocks of only tech sector.\n tech_sector = Sector(mask=base_universe_filter)\n tech_universe_filter = base_universe_filter & tech_sector.eq(311)\n\n # Top 10 tech stocks with largest market cap.\n mkt_cap_filter = morningstar.valuation.market_cap.latest\n top_mkt_cap_tech_filter = mkt_cap_filter.top(context.NUM_SYMBOLS, mask=tech_universe_filter)\n\n # Bollinger band factor with Stdev factor 2.\n lower_band_factor, middle_factor, upper_band_factor = BollingerBands(window_length=22, k=2, mask=top_mkt_cap_tech_filter)\n\n # Percent difference between (price, lower_band) and (price, upper_band).\n price = USEquityPricing.close.latest\n buy_percent_factor = ((lower_band_factor - price)*100)/price\n sell_percent_factor = ((price - upper_band_factor)*100)/price\n\n # Mean reversion buy and sell filters.\n # Sell when price exceeds upper-band and buy when price is below lower-band.\n buy_filter = buy_percent_factor > 0\n sell_filter = sell_percent_factor > 0\n\n # Build and return the Pipeline.\n pipe_bbands = Pipeline(columns={'buy_percent': buy_percent_factor,\n 'lower_band': lower_band_factor,\n 'buy': buy_filter,\n 'price': price,\n 'sell': sell_filter,\n 'upper_band': upper_band_factor,\n 'sell_percent': sell_percent_factor}, screen=top_mkt_cap_tech_filter)\n \n return pipe_bbands",
"def pipelines(self):\r\n return pipelines.Pipelines(self)",
"def make_pipeline():\r\n base_universe = Q1500US()\r\n sector = Sector() \r\n # screen is based off of returns\r\n returns = Returns(window_length = 2)\r\n # check if stock price has good strength, but not necessarily overbought\r\n rsi = RSI() \r\n price = USEquityPricing.close.latest\r\n # creating filter by specifying the type of returns desired\r\n top_return_stocks = returns.top(1,mask=base_universe, groupby=sector)\r\n pipe = Pipeline(\r\n columns = {\r\n 'rsi': rsi,\r\n 'price': price\r\n },\r\n # filter top return stocks, and stocks that are not being overbought\r\n # but are not too oversold either\r\n screen = base_universe & top_return_stocks & (20 < rsi < 80)\r\n # the above is equivalent to: choose stocks from the base universe that have had the top returns in their sectors and have a good RSI value\r\n )\r\n return pipe",
"def pipeline(self):\n return self._pipeline",
"def pipeline(self):\n return self._pipeline",
"def pipeline(\n self,\n name: str,\n description: Optional[str] = None,\n labels: Optional[Sequence[PipelineLabel]] = None,\n ) -> \"Client\":\n _args = [\n Arg(\"name\", name),\n Arg(\"description\", description, None),\n Arg(\"labels\", labels, None),\n ]\n _ctx = self._select(\"pipeline\", _args)\n return Client(_ctx)",
"def async_pipe(self, **kwargs):\n return AsyncPipe(source=self.async_fetch(), **kwargs)",
"def pipeline(args) :\n from pipeliner import create_pipeline\n create_pipeline(args)",
"def make_pipeline():\n # exchange = Fundamentals.exchange_id.latest\n # nyse_filter = exchange.eq('NYS')\n symbol_filter = StaticSids([TRADING_SID])\n set_benchmark(TRADING_SID) \n # volume_filter = VolumeFilter(\n # inputs=[USEquityPricing.volume],\n # window_length=1,\n # mask=symbol_filter\n # )\n\n # is_setup = volume_filter & alpha_long_weekly & alpha_long_daily\n weekly_high = WeeklyHigh(\n inputs=[USEquityPricing.high],\n mask=symbol_filter\n )\n weekly_low = WeeklyLow(\n inputs=[USEquityPricing.low],\n mask=symbol_filter\n )\n weekly_classifier = WeeklyClassifier(\n inputs=[\n USEquityPricing.open,\n USEquityPricing.high,\n USEquityPricing.low,\n USEquityPricing.close\n ],\n mask=symbol_filter\n )\n daily_classifier = DailyClassifier(\n inputs=[\n USEquityPricing.open,\n USEquityPricing.high,\n USEquityPricing.low,\n USEquityPricing.close\n ],\n mask=symbol_filter\n\n )\n\n pipe = Pipeline(\n screen=symbol_filter, # & (daily_classifier > 0),\n columns={\n 'daily_classifier': daily_classifier,\n 'daily_high': USEquityPricing.high.latest,\n 'daily_low': USEquityPricing.low.latest,\n 'weekly_classifier': weekly_classifier,\n 'weekly_high': weekly_high,\n 'weekly_low': weekly_low\n }\n )\n return pipe",
"def make_pipeline():\n universe = TradableStocksUS('Real Estate') | TradableStocksUS('Utilities') | \\\n TradableStocksUS('Consumer Staples') | TradableStocksUS('Technology') | \\\n TradableStocksUS('Financials') | TradableStocksUS('Energy') | \\\n TradableStocksUS('Materials') | TradableStocksUS('Health Care') | \\\n TradableStocksUS('Industrials') | TradableStocksUS('Consumer Discretionary') | \\\n TradableStocksUS('Communications')\n\n roic = shfd.slice(dimension='MRT', period_offset=0).ROIC.latest\n ebit = shfd.slice(dimension='MRQ', period_offset=0).EBIT.latest\n ev = shfd.slice(dimension='MRQ', period_offset=0).EV.latest\n volatility = AnnualizedVolatility(window_length=100)\n value = ebit / ev\n\n roic_rank = roic.rank(mask=universe)\n value_rank = value.rank(mask=universe)\n volatility_rank = volatility.rank(mask=universe, ascending=False)\n\n spy_ma100_price = SMA(inputs=[USEquityPricing.close], \n window_length=100)[algo.sid(\"FIBBG000BDTBL9\")]\n spy_price = USEquityPricing.close.latest[algo.sid(\"FIBBG000BDTBL9\")]\n\n momentum_score = MomentumScore()\n\n overall_rank = roic_rank + value_rank + volatility_rank\n\n # seven_month_returns = Returns(window_length=148, mask=universe,)\n # one_month_returns = Returns(window_length=30, mask=universe,)\n\n pipeline = Pipeline(\n columns={\n 'stock' : master.SecuritiesMaster.Symbol.latest,\n 'sid': master.SecuritiesMaster.Sid.latest,\n 'sector' : master.SecuritiesMaster.usstock_Sector.latest,\n 'average_dollar_volume': AverageDollarVolume(window_length=200),\n 'price': EquityPricing.close.latest,\n 'volume': EquityPricing.volume.latest,\n 'roic' : roic,\n 'value' : value,\n 'volatility': volatility,\n 'roic_rank' : roic_rank,\n 'value_rank' : value_rank,\n 'momentum': momentum_score,\n 'momentum_decile': momentum_score.deciles(),\n 'volatility_decile' : volatility.deciles(),\n 'overall_rank' : overall_rank,\n 'overall_rank_decile': overall_rank.deciles(),\n 'trend_filter': spy_price > spy_ma100_price,\n # 'returns' : one_month_returns - seven_month_returns\n }, \n screen = universe\n )\n\n return pipeline",
"def pipeline(self, pipeline_id):\r\n return pipelines.Pipeline(self, pipeline_id)",
"def transaction(self, *args, **kwargs):\n # Build database transaction class\n class DBTransaction(BaseTransaction, self.__class__):\n pass\n return DBTransaction(self, *args, **kwargs)",
"def pipe(self, func, *args, **kwargs):\n return func(self, *args, **kwargs)",
"def make_pipeline():\r\n\r\n # Custom universe containing only desired assets (stocks with flag data)\r\n universe = StaticSids(my_stocks)\r\n\r\n return Pipeline(\r\n columns={\r\n #'flag_type': algo_data_full.flag_type.latest,\r\n #'flag_price': algo_data_full.flag_price.latest,\r\n #'end_flag_date': algo_data_full.end_flag_date.latest,\r\n #'end_flag_price': algo_data_full.end_flag_price.latest,\r\n 'up_flags': flag_counts.up.latest,\r\n 'down_flags': flag_counts.down.latest,\r\n 'up_ratio': up_ratios_2.up_ratio.latest,\r\n 'close': USEquityPricing.close.latest,\r\n },\r\n screen=universe\r\n )",
"def trace_pipeline(pipe):\n _patch_multi_exec_execute(pipe)"
] | [
"0.6506651",
"0.6506439",
"0.60975367",
"0.5716096",
"0.5687337",
"0.5610756",
"0.5471087",
"0.5375021",
"0.5373202",
"0.5361515",
"0.53164816",
"0.52698827",
"0.5247132",
"0.5190451",
"0.5172265",
"0.51619154",
"0.5102271",
"0.5086458",
"0.5023004",
"0.5023004",
"0.49856988",
"0.4985457",
"0.4974305",
"0.49494478",
"0.49182144",
"0.4911432",
"0.49040595",
"0.4904034",
"0.48949367",
"0.48897997"
] | 0.7290739 | 0 |
This will return the graph data for the outage module | def get_outage(self):
try:
assert self._db_connection, {
STATUS_KEY: HTTP_500_INTERNAL_SERVER_ERROR,
MESSAGE_KEY: DB_ERROR}
if self.equipment == COKE_DRUM_VALUE and self.module == OUTAGE_VALUE:
"""
This will return the graph data for the selected outage module
"""
query_params = {
TAG_NAME_REQUEST: self.query_params.GET[TAG_NAME_REQUEST],
START_DATE_REQUEST: self.query_params.GET[START_DATE_REQUEST],
END_DATE_REQUEST: self.query_params.GET[END_DATE_REQUEST]
}
MODULE_LEVEL_MULTILINE_TAG = tuple(LIST_OF_OUTAGE_MODULE_LEVEL_MULTILINE_TAGS_GRAPH)
if MULTILINE_REQUEST in self.query_params.GET:
"""
This will return the graph data for the actual and predicted tags for the selected outage module
"""
query_params[MULTILINE_REQUEST] = self.query_params.GET[MULTILINE_REQUEST]
if query_params:
if START_DATE_REQUEST not in query_params or not query_params[START_DATE_REQUEST] and \
MULTILINE_REQUEST not in query_params:
graph_data = django_search_query_all(
DETAILED_OUTAGE_GRAPH_NULL_START_DATE.format(
self.module,
query_params[TAG_NAME_REQUEST],
query_params[END_DATE_REQUEST]))
elif query_params[START_DATE_REQUEST] and MULTILINE_REQUEST not in query_params:
graph_data = django_search_query_all(
DETAILED_OUTAGE_GRAPH.format(
self.module,
query_params[TAG_NAME_REQUEST],
query_params[START_DATE_REQUEST],
query_params[END_DATE_REQUEST]))
elif query_params[START_DATE_REQUEST] and query_params[MULTILINE_REQUEST]:
if query_params[TAG_NAME_REQUEST] in LIST_OF_OUTAGE_MODULE_LEVEL_MULTILINE_TAGS_GRAPH:
graph_data = django_search_query_all(
DETAILED_OUTAGE_MODULE_MULTILINE_GRAPH.format(
self.module,
MODULE_LEVEL_MULTILINE_TAG,
query_params[START_DATE_REQUEST],
query_params[END_DATE_REQUEST]))
else:
graph_data = django_search_query_all(
DETAILED_OUTAGE_GRAPH.format(
self.module,
query_params[TAG_NAME_REQUEST],
query_params[START_DATE_REQUEST],
query_params[END_DATE_REQUEST]))
df_data = pd.DataFrame(graph_data)
min_max = django_search_query_all(
MIN_MAX_DATA.format(
self.module,
query_params[TAG_NAME_REQUEST]
))
df_min_max_data = pd.DataFrame(min_max)
graph = []
if not df_data.empty:
df_data = df_data.where(pd.notnull(df_data) == True, None)
df_data.sort_values(TIMESTAMP_KEY, ascending=True, inplace=True)
df_unit = df_data[UNIT].iloc[0]
df_description = df_data[DESCRIPTION].iloc[0]
df_timestamp = list(dict.fromkeys(list(df_data[TIMESTAMP_KEY])))
if query_params[TAG_NAME_REQUEST] in LIST_OF_OUTAGE_MODULE_LEVEL_MULTILINE_TAGS_GRAPH:
df_result = df_data.groupby(TAG_NAME_REQUEST)
actual_north_data = []
predicted_north_data = []
actual_south_data = []
predicted_south_data = []
if len(df_result) == 2:
df_description = \
df_data[df_data[TAG_NAME_REQUEST] == query_params[TAG_NAME_REQUEST]][
DESCRIPTION].iloc[0]
df_north_actual = df_result.get_group(OUTAGE_MODULE_LEVEL_ACTUAL_TAG)
actual_north_data = list(df_north_actual['north_drum_tag_value'])
df_north_predicted = df_result.get_group(OUTAGE_MODULE_LEVEL_PREDICTED_TAG)
predicted_north_data = list(df_north_predicted['north_drum_tag_value'])
df_south_actual = df_result.get_group(OUTAGE_MODULE_LEVEL_ACTUAL_TAG)
actual_south_data = list(df_south_actual['south_drum_tag_value'])
df_south_predicted = df_result.get_group(OUTAGE_MODULE_LEVEL_PREDICTED_TAG)
predicted_south_data = list(df_south_predicted['south_drum_tag_value'])
elif len(df_result) == 1:
if df_result[TAG_NAME_REQUEST] == OUTAGE_MODULE_LEVEL_ACTUAL_TAG:
df_description = \
df_data[df_data[TAG_NAME_REQUEST] == OUTAGE_MODULE_LEVEL_ACTUAL_TAG][
DESCRIPTION].iloc[0]
df_north_actual = df_result.get_group(OUTAGE_MODULE_LEVEL_ACTUAL_TAG)
actual_north_data = list(df_north_actual['north_drum_tag_value'])
df_south_actual = df_result.get_group(OUTAGE_MODULE_LEVEL_ACTUAL_TAG)
actual_south_data = list(df_south_actual['south_drum_tag_value'])
elif df_result[TAG_NAME_REQUEST] != OUTAGE_MODULE_LEVEL_ACTUAL_TAG:
df_description = \
df_data[df_data[TAG_NAME_REQUEST] == OUTAGE_MODULE_LEVEL_PREDICTED_TAG][
DESCRIPTION].iloc[0]
df_north_predicted = df_result.get_group(OUTAGE_MODULE_LEVEL_PREDICTED_TAG)
predicted_north_data = list(df_north_predicted['north_drum_tag_value'])
df_south_predicted = df_result.get_group(OUTAGE_MODULE_LEVEL_PREDICTED_TAG)
predicted_south_data = list(df_south_predicted['south_drum_tag_value'])
temp = {"north_actual": actual_north_data, "north_predicted": predicted_north_data,
"south_actual": actual_south_data, "south_predicted": predicted_south_data,
"x_axis": df_timestamp,
"unit": df_unit,
"description": df_description}
else:
temp = {"y_axis": list(df_data[TAG_VALUE]), "x_axis": df_timestamp,
"unit": df_unit, "description": df_description}
if not df_min_max_data.empty:
temp["min_data"] = df_min_max_data[MIN_VALUE].iloc[0]
temp["max_data"] = df_min_max_data[MAX_VALUE].iloc[0]
else:
temp["min_data"] = None
temp["max_data"] = None
graph.append(temp)
return graph
except AssertionError as e:
log_error("Exception due to : %s" + str(e))
return asert_res(e)
except Exception as e:
log_error("Exception due to : %s" + str(e))
return json_InternalServerError | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_outage_graph(request, equipment_name=None, module_name=None):\r\n query_params, obj = None, None\r\n try:\r\n\r\n query_params = request\r\n\r\n except:\r\n pass\r\n\r\n try:\r\n if request.method == GET_REQUEST:\r\n obj = OutageGraph(query_params, equipment_name, module_name)\r\n return obj.get_outage()\r\n\r\n log_debug(METHOD_NOT_ALLOWED)\r\n return JsonResponse({MESSAGE_KEY: METHOD_NOT_ALLOWED},\r\n status=HTTP_405_METHOD_NOT_ALLOWED)\r\n\r\n except Exception as e:\r\n\r\n excMsg = \"get_outage_graph_data API : \" + str(error_instance(e))\r\n\r\n return excMsg\r\n\r\n finally:\r\n\r\n if obj:\r\n del obj",
"def get_graph(self) -> dict:\n response = requests.get(self.channel, params=\"get_graph\")\n return json_to_graph(response.content)",
"def get_graph_summary(self):\n\n pass",
"def _get_full_graph(self):",
"def getGraph(self):\n\t\treturn self.graph",
"def graphs(self):\n return self.__graphs",
"def get_graphs_data_connection(self):\n return self.m_connection.graphs_data",
"def get_graph(**options):\n graph = bonobo.Graph()\n\n split_dbs = bonobo.noop\n\n graph.add_chain(\n GetOrderXML(\n prefix=\"/etl/ivm\",\n glob=[\n 'Mozilla_Corporation{timestamp:%Y_%m_%d}*.xml'.format(\n timestamp=options['now'])\n ]),\n ParseDates(['Transactionlog_Tranenddatetime']),\n truncate_description,\n bonobo.UnpackItems(0),\n bonobo.Rename(\n transaction_date='Transactionlog_Tranenddatetime',\n item_number='Transactionlog_Itemnumber',\n transaction_id='Transactionlog_Tlid',\n item_description='Transactionlog_Itemdesc'),\n bonobo.Rename(\n user_id='Transactionlog_User',\n quantity='Transactionlog_Qty',\n transaction_code='Transactionlog_Transcode',\n description='Vendingmachines_Descr',\n ),\n split_dbs,\n _name=\"main\")\n\n #insert into ivm (description, transaction_id, item_number, item_description, user_id, quantity, transaction_date, transaction_code) values\n\n for engine in list(set(options['engine'])):\n graph.add_chain(\n bonobo_sqlalchemy.InsertOrUpdate(\n table_name=options['table_name'] + options['table_suffix'],\n discriminant=('transaction_id', ),\n engine=engine),\n _input=split_dbs)\n\n return graph",
"def get_output_nodes(self):\n \n\n self.buildings = self.dataset.groups['buildings']\n self.building_nodes = self.buildings.groups['nodes']\n\n eta_output_added = getattr(self.building_nodes,'eta_output_added')\n uv_output_added = getattr(self.building_nodes,'uv_output_added')\n\n eta = []\n uv = []\n nodeIds = []\n time = []\n \n if(eta_output_added or uv_output_added ):\n time = self.building_nodes.variables['time'][:].tolist()\n nodeIds = self.building_nodes.variables['id'][:].tolist()\n if eta_output_added: eta = self.building_nodes.variables['eta'][:].tolist()\n if uv_output_added: uv = self.building_nodes.variables['uv'][:].tolist()\n\n \n return nodeIds,eta, uv, time",
"def get_graph(self):\n return json.dumps(self.graph.get_edgelist(), separators=(',',':'))",
"def graph(self):\n ...",
"def dump_graph(self):\n # TODO\n return",
"def getOutageHistory(self):\n return self._OutageHistory",
"def get_graph_data(\n self, episode_queryset\n ):\n total = episode_queryset.count()\n count = 0\n result = {}\n\n if total == 0:\n amount = 0\n else:\n count = episode_queryset.annotate(\n subrecord_count=Count(self.subrecord_api_name)\n ).filter(subrecord_count=0).count()\n\n amount = round(float(count)/total, 3) * 100\n result[\"total\"] = total\n result[\"count\"] = count\n aggregate = [['None', amount]]\n links = {\"None\": self.to_link(\"None\")}\n result[\"graph_vals\"] = json.dumps(dict(\n aggregate=aggregate,\n subrecord=self.subrecord_api_name,\n links=links\n ))\n return result",
"def data_graph():\n station_reference = request.args.get(\"stationReference\")\n station_name = request.args.get(\"stationName\")\n station_name = station_name.replace(\" \",\"+\")\n\n if station_name is not None:\n # station_data = station_data.replace(\" \", \"+\")\n station = station_data.loc[station_data.stationName == station_name]\n else:\n station = station_data.loc[station_data.stationReference == station_reference]\n result_station = station.iloc[0]\n\n # Get optional parameters\n time_from = request.args.get(\"from\")\n time_to = request.args.get(\"to\")\n if time_from:\n pass\n else:\n time_from = None\n if time_to:\n pass\n else:\n time_to = None\n # plot pic\n magic_trick= data.station_graph(result_station.stationName, time_from, time_to)\n # img_stream = io.BytesIO(img)\n # img = Image.open(img_stream)\n # imgByteArr = io.BytesIO()\n # img.save(imgByteArr,format='PNG')\n # imgByteArr = imgByteArr.getvalue()\n # return send_file(io.BytesIO(imgByteArr),\n # mimetype = 'image/png',\n # as_attachment = True,\n # attachment_filename = 'tmp.png')\n image_data = open(\"tmp.png\", \"rb\").read()\n response = make_response(image_data)\n response.headers['Content-Type'] = 'image/png'\n return response",
"def graph(self):\n return self.__graph",
"def get_graph(**options):\r\n graph = bonobo.Graph()\r\n graph.add_chain(get_stock_list,extract, process, load)\r\n\r\n return graph",
"def buildGraph(self):\n return None",
"def export_nodes(self):\n return ['lon', 'lat', 'speed', 'heading'], \\\n [{'speed': self.node_speed_limit[v],\n 'lon': self.node_locations[v][0],\n 'lat': self.node_locations[v][1],\n 'heading': self.node_heading[v]} for v in self.graph.vertices()]",
"def getOutEdges(self):\n edges = []\n for edict in mm.G[self].values():\n for k in edict.keys():\n edges.append(edict.get(k).get(\"edge\"))\n \n return edges",
"def gen_graph(self):",
"def graph(self):\n return self._graph",
"def graph(self):\n return self._graph",
"def graph(self) -> dict:\n return self.flat_graph()",
"def get_graph(self):\n return self._graph",
"def export_json_graph(self, destpath):\n export = {}\n export['vertices'] = self.vertices\n export['edges'] = self.edges\n export['_totals'] = {}\n export['_photo'] = {}\n export['_photo']['credit'] = self.photo['credit']\n export['_photo']['entity_max'] = self.photo['max']\n export['_totals']['media'] = len(self.media)\n export['_totals']['wilds'] = len(self.wilds)\n export['_totals']['zoos'] = len(self.zoos)\n export['_totals']['locations'] = len(self.wilds) + len(self.zoos)\n export['_totals']['pandas'] = self.sum_pandas()\n export['_totals']['last_born'] = self.summary['birthday']\n export['_totals']['last_died'] = self.summary['death']\n with open(destpath, 'wb') as wfh:\n wfh.write(json.dumps(export, \n ensure_ascii=False,\n indent=4,\n sort_keys=True).encode('utf8'))\n print(\"Dataset exported: %d pandas at %d locations (%d wild, %d zoo)\"\n % (export['_totals']['pandas'], export['_totals']['locations'],\n export['_totals']['wilds'], export['_totals']['zoos']))",
"def graph():\n # Try to get params request\n params = extract_variables(['start_time', 'end_time', 'sensor_id'], request)\n # Fetch data from database\n results = query_climate_range(**params)\n\n # Turn it in to lists which can be graphed\n dates = []\n humids = []\n temps = []\n pressures = []\n for result in results:\n dates.append(datetime.datetime.fromtimestamp(result['time']))\n humids.append(result['humid'])\n temps.append(result['temp'])\n pressures.append(result['pressure'])\n\n # Graph it\n fig = Figure()\n # First y axis (temp and humid)\n axis = fig.add_subplot(1, 1, 1)\n # Plot humidity and temp on the same scale\n axis.plot_date(dates, humids, '-', color=COLORS['blue'])\n axis.plot_date(dates, temps, '-', color=COLORS['red'])\n axis.xaxis.set_major_formatter(DateFormatter('%d/%m/%y %H:%M'))\n axis.set_ylabel('Humidity in % & Temps in C')\n axis.set_xlabel('Time')\n # Second y axis (pressure)\n axis_pressure = axis.twinx()\n # Plot pressure\n axis_pressure.plot_date(dates, pressures, '-', color=COLORS['green'])\n axis_pressure.xaxis.set_major_formatter(DateFormatter('%d/%m/%y %H:%M'))\n axis_pressure.set_ylabel('Pressure in mbar')\n # Configure the figure\n fig.autofmt_xdate()\n fig.legend(['Humidity', 'Temperature', 'Pressure'], loc='lower right')\n fig.set_tight_layout(True)\n canvas = FigureCanvas(fig)\n # Save output\n png_output = BytesIO()\n canvas.print_png(png_output)\n\n # Create the response and send it\n response = make_response(png_output.getvalue())\n response.headers['Content-Type'] = 'image/png'\n return response",
"def graph():\n return jsonify(app.config[\"jsonified\"])",
"def graph(self):\n\n return self._graph",
"def gen_graph():\n if config_pagination:\n gdata = tgraph.call_graph(offset=offset, limit=limit)\n else:\n gdata = tgraph.call_graph(start=start, end=end, contineous=contineous)\n\n for data in gdata:\n yield data"
] | [
"0.7131809",
"0.6295313",
"0.62823576",
"0.62179625",
"0.6100377",
"0.59277797",
"0.5841857",
"0.58356446",
"0.5830653",
"0.5827107",
"0.5826806",
"0.57876396",
"0.5763705",
"0.57545763",
"0.56935877",
"0.5691411",
"0.5688853",
"0.56795114",
"0.5666311",
"0.56479836",
"0.5642819",
"0.56165665",
"0.56165665",
"0.56140625",
"0.5609281",
"0.55508524",
"0.5513983",
"0.5496717",
"0.5490275",
"0.5454037"
] | 0.7193129 | 0 |
Return a mock component of a general model. | def mock_component():
component = Mock()
component.free_parameters = flex.double([1.0])
component.free_parameter_esds = None
component.n_params = 1
component.var_cov_matrix = sparse.matrix(1, 1)
return component | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_model(self) -> None:\n get_model()",
"def real_model(request):\n return request.config.option.real_model",
"def model(self) -> Type[Model]:",
"def model_name(self) -> str:\n return \"mock-model-name\"",
"def test_get_model_method(self):\n # arrange\n model_manager = ModelManager()\n\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n # act\n exception_raised = False\n model = None\n try:\n model = model_manager.get_model(qualified_name=\"qualified_name\")\n except Exception as e:\n exception_raised = True\n\n # assert\n self.assertFalse(exception_raised)\n self.assertTrue(type(model) is MLModelMock)",
"def test_get(self):\n self.assertEqual(self.expected_described_model, self.mapped_model.get(\"described_model_type\"))",
"def testGetReigsteredModel(self):\n from soc.models.student import Student\n model = models_logic.getModel('soc.models.student.Student')\n self.assertEqual(model, Student)",
"def get_model(model=gin.REQUIRED):\n return model",
"def model() -> Model:\n return Model()",
"def test_get_item(self):\n self.assertEqual(self.expected_described_model, self.mapped_model[\"described_model_type\"])",
"def get_model():\n return UNISAL",
"def get_main_model(self):\n return self",
"def test_coupledmodels_get(self):\n pass",
"def get_response_model_ctor(self):\n return self._response_model_ctor",
"def test_get_object(self, detail_view, employee_model):\n\n employee = Mock()\n employee_model.objects.get.return_value = Mock()\n detail_view.get_object.return_value = employee\n\n emp = detail_view.get_object(1)\n self.assertEqual(employee, emp)",
"def model(self) -> Model:\n return self.software_system.get_model()",
"def _get_card_model(self, model: str) -> Any:\n return self.collection.models.byName(model)",
"def create_model(self):\n self.skipTest(\"Base module should not be tested.\")",
"def get_model(self):\n\t\treturn self.object.__class__",
"def MakeModel(self):\n pass",
"def get_model(*args):\n return Model()",
"def test_get_model_component(requests_mock):\n from DarktraceMBs import Client, get_model_component_command\n\n # GIVEN an integration is configured and you would like to find similar devices\n mock_api_response = util_load_json('test_data/component.json')\n requests_mock.get('https://mock.darktrace.com/components?cid=254503',\n json=mock_api_response)\n\n client = Client(\n base_url='https://mock.darktrace.com',\n verify=False,\n auth=('examplepub', 'examplepri')\n )\n\n # WHEN the specified device id is 1 and there are 2 results max desired\n args = {\n 'cid': '254503'\n }\n\n # THEN the context will be updated and information about similar devices will be fetched and pulled\n integration_response = get_model_component_command(client, args)\n expected_response = util_load_json('test_data/formatted_component.json')\n\n assert integration_response.outputs == expected_response\n assert integration_response.outputs_prefix == 'Darktrace.Model.Component'",
"def modelClass(self):\n raise NotImplementedError",
"def get_model(self):\n return self.model",
"def get_model(self):\n return self.model",
"def get_model(self):\n return self.model",
"def get_model(self):\n return self.model",
"def get_model(self):\n return self.model",
"def get_model(self):\n return self.model",
"def get_model(self):\n return self.model"
] | [
"0.6227962",
"0.61123717",
"0.60828376",
"0.60729104",
"0.59207785",
"0.58533525",
"0.5756956",
"0.5729371",
"0.5705438",
"0.5661981",
"0.56480694",
"0.55965275",
"0.5589341",
"0.55885005",
"0.553621",
"0.55356354",
"0.5533403",
"0.55187505",
"0.55115426",
"0.5495379",
"0.5476795",
"0.54682446",
"0.5439469",
"0.5429802",
"0.5429802",
"0.5429802",
"0.5429802",
"0.5429802",
"0.5429802",
"0.5429802"
] | 0.63239694 | 0 |
Return a mock data manager of a general model. | def mock_data_manager(components):
dm = Mock()
dm.components = components
dm.fixed_components = []
return dm | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def setup_dummy_data_manager():\n import repoze.filesafe\n repoze.filesafe._local.manager = mgr = DummyDataManager()\n return mgr",
"def _get_data_manager(self):\n\n ftype = self.conf['General']['save_as']\n if ftype == 'npz':\n return NPZDataManager(self.conf, self.log)\n elif ftype == 'hdf5':\n return HDF5DataManager(self.conf, self.log)\n else:\n raise ValueError('Invalid file type in config')",
"def setUp(self):\n super().setUp()\n self.database.datamodels.find_one.return_value = self.DATA_MODEL",
"def get_data_manager(self):\n\n return self._data_manager",
"def setUp(self):\n self.mock_model = Mock()",
"def data_manager_fixture():\n\n class DataManager:\n def __init__(self):\n self.gen = 1000\n self.cfg = get_cfg_defaults()\n mode = \"test_inference\"\n self.dataset = Dataset(None, self.cfg, mode)\n self.auto_anchors = AutoAnchors(self.dataset, self.cfg.model, self.gen)\n self.k_points = torch.ones((12, 2)) * 2.0\n self.wh = torch.ones((1000, 2)) * 2.0\n\n return DataManager()",
"def manager(model):\n return model.objects",
"def test_get_model_method(self):\n # arrange\n model_manager = ModelManager()\n\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n # act\n exception_raised = False\n model = None\n try:\n model = model_manager.get_model(qualified_name=\"qualified_name\")\n except Exception as e:\n exception_raised = True\n\n # assert\n self.assertFalse(exception_raised)\n self.assertTrue(type(model) is MLModelMock)",
"def test_default_manager(self):\n\n class Book(RestObject):\n pass\n\n class Author(RestObject):\n pass\n \n self.assertTrue(isinstance(Book.objects, RestManager))\n self.assertTrue(Book.objects.object_class, Book)\n\n self.assertTrue(isinstance(Author.objects, RestManager))\n self.assertTrue(Author.objects.object_class, Author)\n\n self.assertNotEqual(Book.objects, Author.objects)\n \n book = Book()\n # Cannot test AttributeError with self.assertRaises\n try:\n book.objects.all()\n except AttributeError, e:\n self.assertEqual('%s' % e, 'Manager is not accessible via Book instances')",
"def make_test_object(self):\n return self.orm_cls.testing_create()",
"def create_model(self):\n self.skipTest(\"Base module should not be tested.\")",
"def as_manager(cls):\n manager = DefaultManager.from_queryset(cls)()\n manager._built_with_as_manager = True\n return manager",
"def test_get_model(self) -> None:\n get_model()",
"def get_data_manager(\n hass: HomeAssistantType, entry: ConfigEntry\n) -> WithingsDataManager:\n profile = entry.data.get(const.PROFILE)\n\n if not hass.data.get(const.DOMAIN):\n hass.data[const.DOMAIN] = {}\n\n if not hass.data[const.DOMAIN].get(const.DATA_MANAGER):\n hass.data[const.DOMAIN][const.DATA_MANAGER] = {}\n\n if not hass.data[const.DOMAIN][const.DATA_MANAGER].get(profile):\n hass.data[const.DOMAIN][const.DATA_MANAGER][\n profile\n ] = create_withings_data_manager(hass, entry)\n\n return hass.data[const.DOMAIN][const.DATA_MANAGER][profile]",
"def test_model_manager_will_return_same_instance_when_instantiated_many_times(self):\n # arrange, act\n # instantiating the model manager class twice\n first_model_manager = ModelManager()\n second_model_manager = ModelManager()\n\n # loading the MLModel objects from configuration\n first_model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n first_model_object = first_model_manager.get_model(qualified_name=\"qualified_name\")\n second_model_object = second_model_manager.get_model(qualified_name=\"qualified_name\")\n\n # assert\n self.assertTrue(str(first_model_manager) == str(second_model_manager))\n self.assertTrue(str(first_model_object) == str(second_model_object))",
"def cleanup_dummy_data_manager():\n import repoze.filesafe\n manager = getattr(repoze.filesafe._local, 'manager', None)\n if isinstance(manager, DummyDataManager):\n del repoze.filesafe._local.manager\n return manager",
"def get_fake_model(fields=None, model_base=PostgresModel, meta_options={}):\n\n model = define_fake_model(fields, model_base, meta_options)\n\n class TestProject:\n def clone(self, *_args, **_kwargs):\n return self\n\n @property\n def apps(self):\n return self\n\n class TestMigration(migrations.Migration):\n operations = [HStoreExtension()]\n\n with connection.schema_editor() as schema_editor:\n migration_executor = MigrationExecutor(schema_editor.connection)\n migration_executor.apply_migration(\n TestProject(), TestMigration(\"eh\", \"postgres_extra\")\n )\n\n schema_editor.create_model(model)\n\n return model",
"def setUp(self):\n self.my_model1 = BaseModel()\n self.my_model1.name = \"hello\"\n self.my_model1.number = 9\n self.my_model2 = BaseModel()\n self.my_model2.name = \"goodbye\"\n self.my_model2.number = 19\n self.mock_stdin = create_autospec(sys.stdin)\n self.mock_stdout = create_autospec(sys.stdout)",
"def test_get_model_moderator(self, *mocks):\n moderator = get_model_moderator(Article)\n self.assertIsNotNone(moderator)",
"def test_load_model_method(self):\n # arrange\n # instantiating the model manager class\n model_manager = ModelManager()\n\n # adding the model\n model_manager.load_model(\"tests.mocks.MLModelMock\")\n\n # act\n exception_raised = False\n model_object = None\n # accessing the MLModelMock model object\n try:\n model_object = model_manager.get_model(qualified_name=\"qualified_name\")\n except Exception as e:\n exception_raised = True\n print_tb(e)\n\n # assert\n self.assertFalse(exception_raised)\n self.assertTrue(model_object is not None)",
"def mock_rdata(): \n return {\n \"authors\": [{\"full_name\": \"N. Ame\"}],\n \"owners\": [{\"full_name\": \"N. Ame\"}],\n \"submitter\": {\"full_name\": \"N. Ame\"},\n \"paper_id\": \"1234.56789\",\n \"title\": \"some title\",\n \"abstract\": \"An abstract with math $/alpha * /alpha$ for you.\",\n }",
"def test_model(base, fake_session):\n\n # Make a dummy model\n\n # these fields should be ignored and should not appear in the model\n ignored = (\"field1\", \"field2\", \"field3\")\n\n # these fields are in the model, but should not get dumped to json\n loadonly = (\"field6\", \"field7\")\n\n @add_schema\n class MyModel(base):\n fields = dict(ignore=ignored, load_only=loadonly)\n\n # load the model from dummy data\n values = range(10)\n keys = [\"field{}\".format(x) for x in values]\n data = dict(zip(keys, values))\n m = MyModel.load_from(data, fake_session)\n\n return m, ignored, loadonly, data, MyModel",
"def mock_object(cls, profile=None):\n mo = ManagedObject()\n if profile:\n mo.profile = Profile.get_by_name(profile)\n mo.is_mock = True\n return mo",
"def test_default_manager(self):\n self.assertIsInstance(FlatPage._default_manager, UrlNodeManager)\n self.assertIsInstance(FlatPage.objects.all(), UrlNodeQuerySet)",
"def data_model(self) -> DataModel:\n return self._data_model",
"def setUpClass(self):\n\n base_model = BaseModel()",
"def setUp(self):\n self.base1 = BaseModel()",
"def test_dataloader(self) -> DataLoader:\n return self._custom_data_loader()",
"def mock_labware_data_provider(decoy: Decoy) -> LabwareDataProvider:\n return decoy.mock(cls=LabwareDataProvider)",
"def test_get_object(self, detail_view, employee_model):\n\n employee = Mock()\n employee_model.objects.get.return_value = Mock()\n detail_view.get_object.return_value = employee\n\n emp = detail_view.get_object(1)\n self.assertEqual(employee, emp)"
] | [
"0.63638645",
"0.62232745",
"0.5958567",
"0.5882889",
"0.5777589",
"0.57463723",
"0.5660761",
"0.5626389",
"0.5614345",
"0.5575118",
"0.55164546",
"0.5475766",
"0.54707235",
"0.54639786",
"0.54465616",
"0.5437837",
"0.5368954",
"0.53603864",
"0.5357883",
"0.5350542",
"0.5341743",
"0.5313242",
"0.5307788",
"0.5295979",
"0.52796817",
"0.5273456",
"0.52717143",
"0.5263591",
"0.5258852",
"0.52574456"
] | 0.6330217 | 1 |
Test for the general multi_active_parameter_manage class. | def test_multi_apm():
components_1 = {
"scale": mock_component(),
"decay": mock_component(),
"absorption": mock_component(),
}
components_2 = {"scale": mock_component(), "decay": mock_component()}
multi_apm = multi_active_parameter_manager(
ScalingTarget(),
[components_1, components_2],
[["scale", "decay"], ["scale"]],
active_parameter_manager,
)
# Test correct setup of apm_list attribute.
for apm in multi_apm.apm_list:
assert isinstance(apm, active_parameter_manager)
assert len(multi_apm.apm_list) == 2
assert multi_apm.components_list == ["scale", "decay", "scale"]
assert multi_apm.n_active_params == 3
assert multi_apm.apm_data[0] == {"start_idx": 0, "end_idx": 2}
assert multi_apm.apm_data[1] == {"start_idx": 2, "end_idx": 3}
# Test parameter selection.
multi_apm.set_param_vals(flex.double([3.0, 2.5, 2.0]))
assert multi_apm.get_param_vals() == flex.double([3.0, 2.5, 2.0])
assert multi_apm.select_parameters(0) == flex.double([3.0, 2.5])
assert multi_apm.select_parameters(1) == flex.double([2.0])
# Test setting parameter esds.
multi_apm.set_param_esds(flex.double([0.1, 0.2, 0.3]))
assert components_1["scale"].free_parameter_esds == flex.double([0.1])
assert components_1["decay"].free_parameter_esds == flex.double([0.2])
assert components_2["scale"].free_parameter_esds == flex.double([0.3])
# Test setting var_cov matrices for each component.
var_cov = flex.double([1.0, 0.5, 0.5, 0.5, 2.0, 0.5, 0.5, 0.5, 3.0])
var_cov.reshape(flex.grid(3, 3))
multi_apm.calculate_model_state_uncertainties(var_cov)
assert components_1["scale"].var_cov_matrix[0, 0] == 1.0
assert components_1["decay"].var_cov_matrix[0, 0] == 2.0
assert components_2["scale"].var_cov_matrix[0, 0] == 3.0 | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_scaling_active_parameter_manager():\n components_2 = {\"1\": mock_scaling_component(2), \"2\": mock_scaling_component(2)}\n scaling_apm = scaling_active_parameter_manager(components_2, [\"1\"])\n assert list(scaling_apm.constant_g_values[0]) == list(\n components_2[\"2\"].calculate_scales()\n )\n assert len(scaling_apm.constant_g_values) == 1\n assert scaling_apm.n_obs == [2]\n\n # Test that no constant_g_values if both components selected\n scaling_apm = scaling_active_parameter_manager(components_2, [\"1\", \"2\"])\n assert scaling_apm.constant_g_values is None\n\n # Check that one can't initialise with an unequal number of reflections,\n # either within the selection or overall.\n with pytest.raises(AssertionError):\n components_2 = {\"1\": mock_scaling_component(2), \"2\": mock_scaling_component(1)}\n scaling_apm = scaling_active_parameter_manager(components_2, [\"1\", \"2\"])\n with pytest.raises(AssertionError):\n components_2 = {\"1\": mock_scaling_component(2), \"2\": mock_scaling_component(1)}\n scaling_apm = scaling_active_parameter_manager(components_2, [\"1\"])\n\n data_manager = mock_data_manager(components_2)\n pmg = ScalingParameterManagerGenerator(\n [data_manager], target=ScalingTarget(), mode=\"concurrent\"\n )\n assert isinstance(pmg.apm_type, type(scaling_active_parameter_manager))",
"def test_general_apm():\n components = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n\n apm = active_parameter_manager(components, [\"scale\", \"decay\"])\n assert \"decay\" in apm.components_list\n assert \"scale\" in apm.components_list\n assert \"absorption\" not in apm.components_list\n assert apm.n_active_params == (\n components[\"scale\"].n_params + components[\"decay\"].n_params\n )\n n_cumul = 0\n for component in apm.components:\n assert apm.components[component][\"n_params\"] == components[component].n_params\n assert apm.components[component][\"start_idx\"] == n_cumul\n assert (\n apm.components[component][\"end_idx\"]\n == n_cumul + apm.components[component][\"n_params\"]\n )\n n_cumul += apm.components[component][\"n_params\"]\n\n apm.set_param_vals(flex.double([2.0, 1.5]))\n assert apm.get_param_vals() == flex.double([2.0, 1.5])\n # Test params were updated in components\n assert list(components[\"scale\"].free_parameters) == [2.0]\n assert list(components[\"decay\"].free_parameters) == [1.5]\n # Test selection of parameters\n decay_params = apm.select_parameters(\"decay\")\n assert len(decay_params) == 1\n assert decay_params[0] == 1.5\n\n # Test calculate model state uncertainties\n var_cov = flex.double([1.0, 0.5, 0.5, 2.0])\n var_cov.reshape(flex.grid(2, 2))\n apm.calculate_model_state_uncertainties(var_cov)\n assert components[\"scale\"].var_cov_matrix[0, 0] == 1.0\n assert components[\"decay\"].var_cov_matrix[0, 0] == 2.0\n\n # Test set param esds.\n apm.set_param_esds(flex.double([0.1, 0.2]))\n assert components[\"scale\"].free_parameter_esds == flex.double([0.1])\n assert components[\"decay\"].free_parameter_esds == flex.double([0.2])",
"def test_options(self):\n for module in Parameters.__modules__:\n m = getattr(Parameters, module)\n if type(m) == AnyOf:\n for o in m.options:\n setattr(self.p, module, o)\n Parameters(1, **{module: o})",
"def test_multi(self):\n self.assertEqual(6, foo.multi(2, 3))",
"def test_class_callparams(self):\n\n @Configurable(\n conf=[\n Parameter('test0', value=True),\n Parameter('test1', value=False)\n ]\n )\n class Test(object):\n\n def __init__(self, test0=None):\n\n super(Test, self).__init__()\n\n self.test0 = test0\n\n test = Test()\n\n self.assertTrue(test.test0)\n self.assertFalse(test.test1)",
"def test_test_group_parameters(self):\n pass",
"def test_all_params(self):\n persistence_helper = PersistenceHelper(use_riak=True, is_sync=True)\n self.assertEqual(persistence_helper.use_riak, True)\n self.assertEqual(persistence_helper.is_sync, True)",
"def test_overridable_parameter() -> None:\n param_dict = ParamClass.get_overridable_parameters()\n assert \"name\" in param_dict\n assert \"flag\" in param_dict\n assert \"not_flag\" in param_dict\n assert \"seed\" in param_dict\n assert \"number\" in param_dict\n assert \"integers\" in param_dict\n assert \"optional_int\" in param_dict\n assert \"optional_float\" in param_dict\n assert \"tuple1\" in param_dict\n assert \"int_tuple\" in param_dict\n assert \"enum\" in param_dict\n assert \"readonly\" not in param_dict\n assert \"_non_override\" not in param_dict\n assert \"constant\" not in param_dict",
"def _validate_params(self, request_set, target_set=None, context=None):\n\n # Perform first-pass validation in Function.__init__():\n # - returns full set of params based on subclass paramClassDefaults\n super(Mechanism, self)._validate_params(request_set,target_set,context)\n\n params = target_set\n\n #region VALIDATE TIME SCALE\n try:\n param_value = params[TIME_SCALE]\n except KeyError:\n if COMMAND_LINE in context:\n pass\n else:\n self.timeScale = timeScaleSystemDefault\n else:\n if isinstance(param_value, TimeScale):\n self.timeScale = params[TIME_SCALE]\n else:\n if self.prefs.verbosePref:\n print(\"Value for {0} ({1}) param of {2} must be of type {3}; default will be used: {4}\".\n format(TIME_SCALE, param_value, self.name, type(TimeScale), timeScaleSystemDefault))\n #endregion\n\n #region VALIDATE INPUT STATE(S)\n\n # MODIFIED 6/10/16\n # FIX: SHOULD CHECK LENGTH OF INPUT_STATES PARAM (LIST OF NAMES OR SPECIFICATION DICT) AGAINST LENGTH OF\n # FIX: self.variable 2D ARRAY AND COMPARE variable SPECS, IF PROVIDED, WITH CORRESPONDING ELEMENTS OF\n # FIX: self.variable 2D ARRAY\n try:\n param_value = params[INPUT_STATES]\n\n except KeyError:\n if COMMAND_LINE in context:\n pass\n else:\n # INPUT_STATES not specified:\n # - set to None, so that it is set to default (self.variable) in instantiate_inputState\n # - if in VERBOSE mode, warn in instantiate_inputState, where default value is known\n params[INPUT_STATES] = None\n\n else:\n # INPUT_STATES is specified, so validate:\n # If it is a single item or a non-OrderedDict, place in a list (for use here and in instantiate_inputState)\n if not isinstance(param_value, (list, OrderedDict)):\n param_value = [param_value]\n # Validate each item in the list or OrderedDict\n # Note:\n # * number of inputStates is validated against length of the owner mechanism's execute method variable (EMV)\n # in instantiate_inputState, where an inputState is assigned to each item (value) of the EMV\n i = 0\n for key, item in param_value if isinstance(param_value, dict) else enumerate(param_value):\n from PsyNeuLink.Components.States.InputState import InputState\n # If not valid...\n if not ((isclass(item) and (issubclass(item, InputState) or # InputState class ref\n issubclass(item, Projection))) or # Project class ref\n isinstance(item, InputState) or # InputState object\n isinstance(item, dict) or # InputState specification dict\n isinstance(item, ParamValueProjection) or # ParamValueProjection tuple\n isinstance(item, str) or # Name (to be used as key in inputStates dict)\n iscompatible(item, **{kwCompatibilityNumeric: True})): # value\n # set to None, so it is set to default (self.variable) in instantiate_inputState\n param_value[key] = None\n if self.prefs.verbosePref:\n print(\"Item {0} of {1} param ({2}) in {3} is not a\"\n \" InputState, specification dict or value, nor a list of dict of them; \"\n \"variable ({4}) of execute method for {5} will be used\"\n \" to create a default outputState for {3}\".\n format(i,\n INPUT_STATES,\n param_value,\n self.__class__.__name__,\n self.variable,\n self.execute.__self__.name))\n i += 1\n params[INPUT_STATES] = param_value\n #endregion\n\n #region VALIDATE EXECUTE METHOD PARAMS\n try:\n function_param_specs = params[FUNCTION_PARAMS]\n except KeyError:\n if COMMAND_LINE in context:\n pass\n elif self.prefs.verbosePref:\n print(\"No params specified for {0}\".format(self.__class__.__name__))\n else:\n if not (isinstance(function_param_specs, dict)):\n raise MechanismError(\"{0} in {1} must be a dict of param specifications\".\n format(FUNCTION_PARAMS, self.__class__.__name__))\n # Validate params\n from PsyNeuLink.Components.States.ParameterState import ParameterState\n for param_name, param_value in function_param_specs.items():\n try:\n default_value = self.paramInstanceDefaults[FUNCTION_PARAMS][param_name]\n except KeyError:\n raise MechanismError(\"{0} not recognized as a param of execute method for {1}\".\n format(param_name, self.__class__.__name__))\n if not ((isclass(param_value) and\n (issubclass(param_value, ParameterState) or\n issubclass(param_value, Projection))) or\n isinstance(param_value, ParameterState) or\n isinstance(param_value, Projection) or\n isinstance(param_value, dict) or\n isinstance(param_value, ParamValueProjection) or\n iscompatible(param_value, default_value)):\n params[FUNCTION_PARAMS][param_name] = default_value\n if self.prefs.verbosePref:\n print(\"{0} param ({1}) for execute method {2} of {3} is not a ParameterState, \"\n \"projection, ParamValueProjection, or value; default value ({4}) will be used\".\n format(param_name,\n param_value,\n self.execute.__self__.componentName,\n self.__class__.__name__,\n default_value))\n #endregion\n # FIX: MAKE SURE OUTPUT OF EXECUTE FUNCTION / SELF.VALUE IS 2D ARRAY, WITH LENGTH == NUM OUTPUT STATES\n\n #region VALIDATE OUTPUT STATE(S)\n\n # FIX: MAKE SURE # OF OUTPUTS == LENGTH OF OUTPUT OF EXECUTE FUNCTION / SELF.VALUE\n try:\n param_value = params[OUTPUT_STATES]\n\n except KeyError:\n if COMMAND_LINE in context:\n pass\n else:\n # OUTPUT_STATES not specified:\n # - set to None, so that it is set to default (self.value) in instantiate_outputState\n # Notes:\n # * if in VERBOSE mode, warning will be issued in instantiate_outputState, where default value is known\n # * number of outputStates is validated against length of owner mechanism's execute method output (EMO)\n # in instantiate_outputState, where an outputState is assigned to each item (value) of the EMO\n params[OUTPUT_STATES] = None\n\n else:\n # OUTPUT_STATES is specified, so validate:\n # If it is a single item or a non-OrderedDict, place in a list (for use here and in instantiate_outputState)\n if not isinstance(param_value, (list, OrderedDict)):\n param_value = [param_value]\n # Validate each item in the list or OrderedDict\n i = 0\n for key, item in param_value if isinstance(param_value, dict) else enumerate(param_value):\n from PsyNeuLink.Components.States.OutputState import OutputState\n # If not valid...\n if not ((isclass(item) and issubclass(item, OutputState)) or # OutputState class ref\n isinstance(item, OutputState) or # OutputState object\n isinstance(item, dict) or # OutputState specification dict\n isinstance(item, str) or # Name (to be used as key in outputStates dict)\n iscompatible(item, **{kwCompatibilityNumeric: True})): # value\n # set to None, so it is set to default (self.value) in instantiate_outputState\n param_value[key] = None\n if self.prefs.verbosePref:\n print(\"Item {0} of {1} param ({2}) in {3} is not a\"\n \" OutputState, specification dict or value, nor a list of dict of them; \"\n \"output ({4}) of execute method for {5} will be used\"\n \" to create a default outputState for {3}\".\n format(i,\n OUTPUT_STATES,\n param_value,\n self.__class__.__name__,\n self.value,\n self.execute.__self__.name))\n i += 1\n params[OUTPUT_STATES] = param_value",
"def test_configure_to_reconfigure_param(self):\n\n class ToConfigure(object):\n \"\"\"Class to configure.\"\"\"\n\n def __init__(self):\n super(ToConfigure, self).__init__()\n self.test = None\n\n target = ToConfigure()\n\n param = 'test'\n\n conf = configuration(category('TEST', Parameter(param, value=True)))\n\n self.configurable.configure(conf=conf, targets=[target])\n self.assertTrue(target.test)",
"def check_params(self):\n raise NotImplementedError",
"def test_multi(self):\n self.assertEqual(6, multi(2, 3))",
"def test_ParameterManagerGenerator_concurrent():\n components_1 = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n data_manager = mock_data_manager(components_1)\n\n pmg = ParameterManagerGenerator(\n [data_manager],\n apm_type=active_parameter_manager,\n target=ScalingTarget(),\n mode=\"concurrent\",\n )\n apms = pmg.parameter_managers()\n assert len(apms) == 1\n apm = apms[0]\n assert isinstance(apm, multi_active_parameter_manager)\n assert \"scale\" in apm.components_list\n assert \"decay\" in apm.components_list\n assert \"absorption\" in apm.components_list\n\n components_1 = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n components_2 = {\"1\": mock_component(), \"2\": mock_component()}\n data_manager_1 = mock_data_manager(components_1)\n data_manager_2 = mock_data_manager(components_2)\n\n pmg = ParameterManagerGenerator(\n [data_manager_1, data_manager_2],\n apm_type=active_parameter_manager,\n target=ScalingTarget(),\n mode=\"concurrent\",\n )\n multi_apms = pmg.parameter_managers()\n assert len(multi_apms) == 1\n multi_apm = multi_apms[0]\n assert isinstance(multi_apm, multi_active_parameter_manager)\n for apm in multi_apm.apm_list:\n assert isinstance(apm, active_parameter_manager)\n assert \"scale\" in multi_apm.apm_list[0].components_list\n assert \"decay\" in multi_apm.apm_list[0].components_list\n assert \"absorption\" in multi_apm.apm_list[0].components_list\n assert \"1\" in multi_apm.apm_list[1].components_list\n assert \"2\" in multi_apm.apm_list[1].components_list\n\n # now try fixing a component\n data_manager.fixed_components = [\"absorption\"]\n pmg = ParameterManagerGenerator(\n [data_manager],\n apm_type=active_parameter_manager,\n target=ScalingTarget(),\n mode=\"concurrent\",\n )\n apms = pmg.parameter_managers()\n assert len(apms) == 1\n apm = apms[0]\n assert isinstance(apm, multi_active_parameter_manager)\n assert \"scale\" in apm.components_list\n assert \"decay\" in apm.components_list\n assert \"absorption\" not in apm.components_list",
"def test_verify_set_multi(self):\n self._verify([self.applied_commands['setm']])",
"def test_get_mt_settings(self):\n pass",
"def test_set_params():\n\n tpot_obj = TPOTClassifier()\n assert tpot_obj.set_params() is tpot_obj",
"def Check(self, parameters):",
"def test_checkCustoms(self):\n self.failUnlessEqual(self.nice.opts['myflag'], \"PONY!\")\n self.failUnlessEqual(self.nice.opts['myparam'], \"Tofu WITH A PONY!\")",
"def test_subsystems(self):\n pass",
"def test_set_params_2():\n tpot_obj = TPOTClassifier(generations=2)\n tpot_obj.set_params(generations=3)\n\n assert tpot_obj.generations == 3",
"def test_parameters(self):\n self.assert_initialize_driver()\n #reply = self.driver_client.cmd_dvr('get_resource', Parameter.ALL)\n #self.assert_driver_parameters(reply, verify_sample_interval=True)",
"def test_provider(self):\n msg = 'Wrong number of processing algorithm loaded.'\n self.assertEqual(len(self.provider.alglist), 6, msg)\n\n msg = 'InaSAFE should be activated by default in Processing.'\n self.assertEqual(self.provider.activate, True, msg)\n\n msg = 'Wrong processing provide.'\n for algorithm in self.provider.alglist:\n self.assertEqual(algorithm.provider, self.provider, msg)",
"def test_create_hyperflex_feature_limit_internal(self):\n pass",
"def test_intent_classifier_set_params(self):\n pass",
"def supported_parameters(cls):\n raise NotImplementedError()",
"def define_parameters(self):",
"def test_direct_access_params(self):\n self.assertTrue(self.driver._protocol._param_dict.get(\"foo\"), 10)\n self.assertTrue(self.driver._protocol._param_dict.get(\"bar\"), 15)\n # use real driver sets here, the direct poke of the param dict is just\n # a test-with-base-class thing\n self.driver._protocol._param_dict.update(\"bar=20\")\n self.assertTrue(self.driver._protocol._param_dict.get(\"bar\"), 20)\n\n # pretend to go into direct access mode,\n running_config = self.driver._protocol.get_cached_config()\n # make some changes to both, (foo to 100, bar to 200)\n self.driver._protocol._param_dict.update(\"foo=100\")\n self.driver._protocol._param_dict.update(\"bar=200\")\n # its like we came out of DA mode\n self.driver.restore_direct_access_params(running_config)\n\n # confirm that the default values were set back appropriately.\n self.assertTrue(self.driver._protocol._param_dict.get(\"foo\"), 10)\n self.assertTrue(self.driver._protocol._param_dict.get(\"bar\"), 200)",
"def test_change_param(self):\n test_adc = ads1115_single(assume_defaults)\n\n # Check to make sure an assertion is thrown if a position beyond 3 is\n # chosen.\n try:\n test_adc.change_param(4, 0)\n # If the assertion error hasn't been thrown, fail the test.\n self.fail()\n except AssertionError:\n # An assertion error is expected.\n pass\n\n # Reset the test_adc to make sure it's a clean slate after the expected\n # failure.\n test_adc = ads1115_single(assume_defaults)\n\n # Check to make sure assertion errors are still thrown for invalid\n # parameters. Only one check is done because test_device_creation\n # has already walked the check_params internal function to make sure\n # that all invalid parametes are failed.\n try:\n # Try changing address to an invalid value.\n test_adc.change_param(2, 0)\n # If the assertion error hasn't been thrown, fail the test.\n except AssertionError:\n # An assertion error is expected.\n pass\n\n # Reset the test_adc to make sure it's a clean slate after the expected\n # failure.\n test_adc = ads1115_single(assume_defaults)\n\n # Check to make sure that all parameters can be changed when the change\n # is valid.\n\n # Set channel to 3.\n test_adc.change_param(0, 3)\n self.assertEqual(test_adc.parameters[0], 3)\n\n # Set gain to 4.\n test_adc.change_param(1, 4)\n self.assertEqual(test_adc.parameters[1], 4)\n\n # Set address to 0x49.\n test_adc.change_param(2, 0x49)\n self.assertEqual(test_adc.parameters[2], 0x49)\n\n # Set busnum to 0.\n try:\n test_adc.change_param(3, 0)\n # If the 0th I2C bus exists, then assert that the parameter has\n # changed.\n self.assertEqual(test_adc.parameters[3], 0)\n except IOError:\n # This is just because the current system does not have a 0th I2C\n # bus.\n pass",
"def test_act_on_settings(self):\n pass # TODO(tlarsen)",
"def test_act_on_settings(self):\n pass # TODO(tlarsen)"
] | [
"0.5979494",
"0.564233",
"0.56316334",
"0.56300414",
"0.55950177",
"0.55662066",
"0.5561874",
"0.5522249",
"0.55102235",
"0.5508482",
"0.5448773",
"0.54069316",
"0.5395452",
"0.53479075",
"0.5345451",
"0.532887",
"0.53067946",
"0.5266664",
"0.5219508",
"0.52126247",
"0.5177413",
"0.51731807",
"0.5144085",
"0.51240355",
"0.5108058",
"0.5104126",
"0.5083353",
"0.50732505",
"0.5071742",
"0.5071742"
] | 0.5946211 | 1 |
Test the apm factory for concurrent refinement. | def test_ParameterManagerGenerator_concurrent():
components_1 = {
"scale": mock_component(),
"decay": mock_component(),
"absorption": mock_component(),
}
data_manager = mock_data_manager(components_1)
pmg = ParameterManagerGenerator(
[data_manager],
apm_type=active_parameter_manager,
target=ScalingTarget(),
mode="concurrent",
)
apms = pmg.parameter_managers()
assert len(apms) == 1
apm = apms[0]
assert isinstance(apm, multi_active_parameter_manager)
assert "scale" in apm.components_list
assert "decay" in apm.components_list
assert "absorption" in apm.components_list
components_1 = {
"scale": mock_component(),
"decay": mock_component(),
"absorption": mock_component(),
}
components_2 = {"1": mock_component(), "2": mock_component()}
data_manager_1 = mock_data_manager(components_1)
data_manager_2 = mock_data_manager(components_2)
pmg = ParameterManagerGenerator(
[data_manager_1, data_manager_2],
apm_type=active_parameter_manager,
target=ScalingTarget(),
mode="concurrent",
)
multi_apms = pmg.parameter_managers()
assert len(multi_apms) == 1
multi_apm = multi_apms[0]
assert isinstance(multi_apm, multi_active_parameter_manager)
for apm in multi_apm.apm_list:
assert isinstance(apm, active_parameter_manager)
assert "scale" in multi_apm.apm_list[0].components_list
assert "decay" in multi_apm.apm_list[0].components_list
assert "absorption" in multi_apm.apm_list[0].components_list
assert "1" in multi_apm.apm_list[1].components_list
assert "2" in multi_apm.apm_list[1].components_list
# now try fixing a component
data_manager.fixed_components = ["absorption"]
pmg = ParameterManagerGenerator(
[data_manager],
apm_type=active_parameter_manager,
target=ScalingTarget(),
mode="concurrent",
)
apms = pmg.parameter_managers()
assert len(apms) == 1
apm = apms[0]
assert isinstance(apm, multi_active_parameter_manager)
assert "scale" in apm.components_list
assert "decay" in apm.components_list
assert "absorption" not in apm.components_list | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_add_multiple_pis_simultaneously_to_vpg_check_reallocation(self):\n proj_obj, fabric_obj, pr_objs = self._create_prerequisites(\n create_second_pr=True)\n test_id = self.id()\n VPG_CLASS = self._api_server.get_resource_class('virtual-port-group')\n org_process_ae_id = VPG_CLASS._process_ae_id\n\n class MockVpg(VPG_CLASS):\n org_process_ae_id = VPG_CLASS._process_ae_id\n HOLD_API = True\n @classmethod\n def mock_process_ae_id(cls, db_obj_dict, vpg_name, obj_dict=None):\n while cls.HOLD_API:\n print('sleeping for HOLD_API to clear for '\n 'args = %s' % obj_dict)\n gevent.sleep(0.5)\n return cls.org_process_ae_id(db_obj_dict, vpg_name, obj_dict)\n\n def process_ae_ids(x):\n return [int(i) for i in sorted(x) if i is not None]\n\n def get_zk_ae_ids(prs=None):\n prefix = os.path.join(\n self.__class__.__name__,\n 'id', 'aggregated-ethernet')\n zk_client = self._api_server._db_conn._zk_db._zk_client._zk_client\n if not prs:\n prs = [os.path.join(prefix, pr.name) for pr in pr_objs]\n else:\n if not isinstance(prs, list):\n prs = [prs]\n prs = [os.path.join(prefix, pr) for pr in prs]\n ae_ids = {}\n for pr in prs:\n pr_org = os.path.split(pr)[-1]\n ae_ids[pr_org] = zk_client.get_children(pr)\n return ae_ids\n\n pi_per_pr = 6\n pi_objs = {}\n pr1_pi_names = ['%s_pr1_pi%d' % (test_id, i) for\n i in range(1, pi_per_pr + 1)]\n pr2_pi_names = ['%s_pr2_pi%d' % (test_id, i) for\n i in range(1, pi_per_pr + 1)]\n pr1_pi_objs = self._create_pi_objects(pr_objs[0], pr1_pi_names)\n pr2_pi_objs = self._create_pi_objects(pr_objs[1], pr2_pi_names)\n pi_objs.update(pr1_pi_objs)\n pi_objs.update(pr2_pi_objs)\n\n # create a VPG\n vpg_count = 3\n vpg_names = ['vpg_%s_%s' % (test_id, i) for i in range(\n 1, vpg_count + 1)]\n vpg_objs = self._create_vpgs(fabric_obj, vpg_names)\n\n # record AE-IDs in ZK before creating any VPG\n ae_ids = [x for x in get_zk_ae_ids().values() if x]\n self.assertEqual(len(ae_ids), 0)\n\n def _attach_pi_simultaneously(vpg_obj, pi_uuids):\n # Attach PIs from PR1 to VPG-1\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n try:\n # mock _process_ae_id at VPG resource\n VPG_CLASS._process_ae_id = MockVpg.mock_process_ae_id\n MockVpg.HOLD_API = True\n for pi_uuid in pi_uuids:\n gevent.spawn(\n self.api.ref_update,\n \"virtual-port-group\",\n vpg_obj.uuid,\n \"physical-interface\",\n pi_uuid,\n None,\n \"ADD\",\n None)\n gevent.sleep(2)\n MockVpg.HOLD_API = False\n gevent.sleep(3)\n except gevent.timeout.Timeout:\n self.assertFalse(\n False,\n '%s failed unexpectedly' % VPG_CLASS._process_ae_id)\n finally:\n # reset mock to original\n VPG_CLASS._process_ae_id = org_process_ae_id\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n return vpg_obj, pi_refs\n\n # Case 1\n # Attach 2 PIs from PR1 to VPG-1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_uuids = [pi.uuid for pi in list(pr1_pi_objs.values())[0:2]]\n vpg_obj, pi_refs = _attach_pi_simultaneously(vpg_obj, pi_uuids)\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 2)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertEqual(len(list(vpg_ae_ids.values())), 2)\n self.assertEqual(len(set(vpg_ae_ids.values())), 1)\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [0, 0])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 1)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [0])\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [])\n\n # Case 2\n # Attach 2 PIs from PR1 to VPG-2\n vpg_name = vpg_names[1]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_uuids = [pi.uuid for pi in list(pr1_pi_objs.values())[2:4]]\n vpg_obj, pi_refs = _attach_pi_simultaneously(vpg_obj, pi_uuids)\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 2)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertEqual(len(list(vpg_ae_ids.values())), 2)\n self.assertEqual(len(set(vpg_ae_ids.values())), 1)\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [1, 1])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 2)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [0, 1])\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [])\n\n # Case 3\n # Deattach 1 PIs from PR1 to VPG-1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_obj = list(pr1_pi_objs.values())[0]\n pi_obj = self._vnc_lib.physical_interface_read(id=pi_obj.uuid)\n vpg_obj.del_physical_interface(pi_obj)\n self.api.virtual_port_group_update(vpg_obj)\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 1)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertIsNone(list(vpg_ae_ids.values())[0])\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 1)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [1])\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [])\n\n # Case 4\n # Attach 2 PIs from PR1 to VPG-3\n vpg_name = vpg_names[2]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_uuids = [pi.uuid for pi in list(pr1_pi_objs.values())[4:6]]\n vpg_obj, pi_refs = _attach_pi_simultaneously(vpg_obj, pi_uuids)\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 2)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertEqual(len(list(vpg_ae_ids.values())), 2)\n self.assertEqual(len(set(vpg_ae_ids.values())), 1)\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [0, 0])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 2)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [0, 1])\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [])\n\n # Case 5\n # Attach 1 PIs from PR1 to VPG-1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_obj = list(pr1_pi_objs.values())[0]\n pi_obj = self._vnc_lib.physical_interface_read(id=pi_obj.uuid)\n vpg_obj.add_physical_interface(pi_obj)\n self._vnc_lib.virtual_port_group_update(vpg_obj)\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 2)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertEqual(len(list(vpg_ae_ids.values())), 2)\n self.assertEqual(len(set(vpg_ae_ids.values())), 1)\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [2, 2])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 3)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [0, 1, 2])\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [])",
"def test_multiple_factories(self, mocker):\n sdk_ready_flag = threading.Event()\n\n def _init(self, ready_flag, some, auth_api, streaming_enabled, telemetry_runtime_producer, telemetry_init_consumer, sse_url=None):\n self._ready_flag = ready_flag\n self._synchronizer = mocker.Mock(spec=Synchronizer)\n self._streaming_enabled = False\n self._telemetry_runtime_producer = telemetry_runtime_producer\n self._telemetry_init_consumer = telemetry_init_consumer\n mocker.patch('splitio.sync.manager.Manager.__init__', new=_init)\n\n def _start(self, *args, **kwargs):\n sdk_ready_flag.set()\n mocker.patch('splitio.sync.manager.Manager.start', new=_start)\n\n def _stop(self, *args, **kwargs):\n pass\n mocker.patch('splitio.sync.manager.Manager.stop', new=_stop)\n\n mockManager = Manager(sdk_ready_flag, mocker.Mock(), mocker.Mock(), False, mocker.Mock(), mocker.Mock())\n\n def _make_factory_with_apikey(apikey, *_, **__):\n return SplitFactory(apikey, {}, True, mocker.Mock(spec=ImpressionsManager), mockManager, mocker.Mock(), mocker.Mock(), mocker.Mock())\n\n factory_module_logger = mocker.Mock()\n build_in_memory = mocker.Mock()\n build_in_memory.side_effect = _make_factory_with_apikey\n build_redis = mocker.Mock()\n build_redis.side_effect = _make_factory_with_apikey\n build_localhost = mocker.Mock()\n build_localhost.side_effect = _make_factory_with_apikey\n mocker.patch('splitio.client.factory._LOGGER', new=factory_module_logger)\n mocker.patch('splitio.client.factory._build_in_memory_factory', new=build_in_memory)\n mocker.patch('splitio.client.factory._build_redis_factory', new=build_redis)\n mocker.patch('splitio.client.factory._build_localhost_factory', new=build_localhost)\n\n _INSTANTIATED_FACTORIES.clear() # Clear all factory counters for testing purposes\n\n factory1 = get_factory('some_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 1\n assert factory_module_logger.warning.mock_calls == []\n\n factory2 = get_factory('some_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 2\n assert factory_module_logger.warning.mock_calls == [mocker.call(\n \"factory instantiation: You already have %d %s with this SDK Key. \"\n \"We recommend keeping only one instance of the factory at all times \"\n \"(Singleton pattern) and reusing it throughout your application.\",\n 1,\n 'factory'\n )]\n\n factory_module_logger.reset_mock()\n factory3 = get_factory('some_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 3\n assert factory_module_logger.warning.mock_calls == [mocker.call(\n \"factory instantiation: You already have %d %s with this SDK Key. \"\n \"We recommend keeping only one instance of the factory at all times \"\n \"(Singleton pattern) and reusing it throughout your application.\",\n 2,\n 'factories'\n )]\n\n factory_module_logger.reset_mock()\n factory4 = get_factory('some_other_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 3\n assert _INSTANTIATED_FACTORIES['some_other_api_key'] == 1\n assert factory_module_logger.warning.mock_calls == [mocker.call(\n \"factory instantiation: You already have an instance of the Split factory. \"\n \"Make sure you definitely want this additional instance. \"\n \"We recommend keeping only one instance of the factory at all times \"\n \"(Singleton pattern) and reusing it throughout your application.\"\n )]\n\n event = threading.Event()\n factory1.destroy(event)\n event.wait()\n assert _INSTANTIATED_FACTORIES['some_other_api_key'] == 1\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 2\n factory2.destroy()\n factory3.destroy()\n factory4.destroy()",
"def test_add_multiple_pis_simultaneously_to_vpg_check_deallocation(self):\n proj_obj, fabric_obj, pr_objs = self._create_prerequisites(\n create_second_pr=True)\n test_id = self.id()\n VPG_CLASS = self._api_server.get_resource_class('virtual-port-group')\n org_process_ae_id = VPG_CLASS._process_ae_id\n\n class MockVpg(VPG_CLASS):\n org_process_ae_id = VPG_CLASS._process_ae_id\n HOLD_API = True\n @classmethod\n def mock_process_ae_id(cls, db_obj_dict, vpg_name, obj_dict=None):\n while cls.HOLD_API:\n print('sleeping for HOLD_API to clear for '\n 'args = %s' % obj_dict)\n gevent.sleep(0.5)\n return cls.org_process_ae_id(db_obj_dict, vpg_name, obj_dict)\n\n def process_ae_ids(x):\n return [int(i) for i in sorted(x) if i is not None]\n\n def get_zk_ae_ids(prs=None):\n prefix = os.path.join(\n self.__class__.__name__,\n 'id', 'aggregated-ethernet')\n zk_client = self._api_server._db_conn._zk_db._zk_client._zk_client\n if not prs:\n prs = [os.path.join(prefix, pr.name) for pr in pr_objs]\n else:\n if not isinstance(prs, list):\n prs = [prs]\n prs = [os.path.join(prefix, pr) for pr in prs]\n ae_ids = {}\n for pr in prs:\n pr_org = os.path.split(pr)[-1]\n ae_ids[pr_org] = zk_client.get_children(pr)\n return ae_ids\n\n pi_per_pr = 1\n pi_objs = {}\n pr1_pi_names = ['%s_pr1_pi%d' % (test_id, i) for\n i in range(1, pi_per_pr + 1)]\n pr2_pi_names = ['%s_pr2_pi%d' % (test_id, i) for\n i in range(1, pi_per_pr + 1)]\n pr1_pi_objs = self._create_pi_objects(pr_objs[0], pr1_pi_names)\n pr2_pi_objs = self._create_pi_objects(pr_objs[1], pr2_pi_names)\n pi_objs.update(pr1_pi_objs)\n pi_objs.update(pr2_pi_objs)\n\n # create a VPG\n vpg_count = 1\n vpg_names = ['vpg_%s_%s' % (test_id, i) for i in range(\n 1, vpg_count + 1)]\n vpg_objs = self._create_vpgs(fabric_obj, vpg_names)\n\n # record AE-IDs in ZK before creating any VPG\n ae_ids = [x for x in get_zk_ae_ids().values() if x]\n self.assertEqual(len(ae_ids), 0)\n\n def _attach_pi_simultaneously(vpg_obj, pi_uuids):\n # Attach PIs from PR1 to VPG-1\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n try:\n # mock _process_ae_id at VPG resource\n VPG_CLASS._process_ae_id = MockVpg.mock_process_ae_id\n MockVpg.HOLD_API = True\n for pi_uuid in pi_uuids:\n gevent.spawn(\n self.api.ref_update,\n \"virtual-port-group\",\n vpg_obj.uuid,\n \"physical-interface\",\n pi_uuid,\n None,\n \"ADD\",\n None)\n gevent.sleep(2)\n MockVpg.HOLD_API = False\n gevent.sleep(3)\n except gevent.timeout.Timeout:\n self.assertFalse(\n False,\n '%s failed unexpectedly' % VPG_CLASS._process_ae_id)\n finally:\n # reset mock to original\n VPG_CLASS._process_ae_id = org_process_ae_id\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n return vpg_obj, pi_refs\n\n # Case 1\n # Attach 2 PIs from PR1 to VPG-1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pr1_pi_uuids = list(pr1_pi_objs.values())[0].uuid\n pr2_pi_uuids = list(pr2_pi_objs.values())[0].uuid\n pi_uuids = [pr1_pi_uuids, pr2_pi_uuids]\n vpg_obj, pi_refs = _attach_pi_simultaneously(vpg_obj, pi_uuids)\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 2)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertEqual(len(list(vpg_ae_ids.values())), 2)\n self.assertEqual(len(set(vpg_ae_ids.values())), 1)\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [0, 0])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 1)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [0])\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 1)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [0])\n\n # Case 2\n # Deattach 1 PIs from PR1 to VPG-1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_obj = list(pr1_pi_objs.values())[0]\n pi_obj = self._vnc_lib.physical_interface_read(id=pi_obj.uuid)\n vpg_obj.del_physical_interface(pi_obj)\n self.api.virtual_port_group_update(vpg_obj)\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 1)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertIsNone(list(vpg_ae_ids.values())[0])\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [])\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [])",
"def test_multi_apm():\n\n components_1 = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n components_2 = {\"scale\": mock_component(), \"decay\": mock_component()}\n\n multi_apm = multi_active_parameter_manager(\n ScalingTarget(),\n [components_1, components_2],\n [[\"scale\", \"decay\"], [\"scale\"]],\n active_parameter_manager,\n )\n\n # Test correct setup of apm_list attribute.\n for apm in multi_apm.apm_list:\n assert isinstance(apm, active_parameter_manager)\n assert len(multi_apm.apm_list) == 2\n assert multi_apm.components_list == [\"scale\", \"decay\", \"scale\"]\n assert multi_apm.n_active_params == 3\n assert multi_apm.apm_data[0] == {\"start_idx\": 0, \"end_idx\": 2}\n assert multi_apm.apm_data[1] == {\"start_idx\": 2, \"end_idx\": 3}\n\n # Test parameter selection.\n multi_apm.set_param_vals(flex.double([3.0, 2.5, 2.0]))\n assert multi_apm.get_param_vals() == flex.double([3.0, 2.5, 2.0])\n assert multi_apm.select_parameters(0) == flex.double([3.0, 2.5])\n assert multi_apm.select_parameters(1) == flex.double([2.0])\n\n # Test setting parameter esds.\n multi_apm.set_param_esds(flex.double([0.1, 0.2, 0.3]))\n assert components_1[\"scale\"].free_parameter_esds == flex.double([0.1])\n assert components_1[\"decay\"].free_parameter_esds == flex.double([0.2])\n assert components_2[\"scale\"].free_parameter_esds == flex.double([0.3])\n\n # Test setting var_cov matrices for each component.\n var_cov = flex.double([1.0, 0.5, 0.5, 0.5, 2.0, 0.5, 0.5, 0.5, 3.0])\n var_cov.reshape(flex.grid(3, 3))\n multi_apm.calculate_model_state_uncertainties(var_cov)\n assert components_1[\"scale\"].var_cov_matrix[0, 0] == 1.0\n assert components_1[\"decay\"].var_cov_matrix[0, 0] == 2.0\n assert components_2[\"scale\"].var_cov_matrix[0, 0] == 3.0",
"def acquire(ABC) -> bool:",
"def test_run_alpha_rarefaction_parallel(self):\r\n\r\n run_alpha_rarefaction(\r\n self.test_data['biom'][0],\r\n self.test_data['map'][0],\r\n self.test_out,\r\n call_commands_serially,\r\n self.params,\r\n self.qiime_config,\r\n tree_fp=self.test_data['tree'][0],\r\n num_steps=5,\r\n parallel=True,\r\n min_rare_depth=3,\r\n max_rare_depth=18,\r\n status_update_callback=no_status_updates)\r\n\r\n html_fp = join(self.test_out, 'alpha_rarefaction_plots',\r\n 'rarefaction_plots.html')\r\n pd_averages_fp = join(self.test_out, 'alpha_rarefaction_plots',\r\n 'average_tables', 'PD_whole_treeSampleType.txt')\r\n pd_collated_fp = join(self.test_out, 'alpha_div_collated',\r\n 'PD_whole_tree.txt')\r\n\r\n # Confirm that palm and gut alpha diversities are different,\r\n # and suggestive of statistical significance (we only have a\r\n # few sequences, so we don't get significant results)\r\n ttest_res, alpha_avg = compare_alpha_diversities(open(pd_collated_fp),\r\n open(\r\n self.test_data[\r\n 'map'][0]),\r\n 'SampleType',\r\n 18,\r\n test_type='parametric')\r\n feces_palm_t = ttest_res[('feces', 'L_palm')][0]\r\n self.assertTrue(feces_palm_t < 0,\r\n \"t-statistic too high: %1.3f, but should be less than 0\"\r\n % feces_palm_t)\r\n\r\n # check that final output files have non-zero size\r\n self.assertTrue(getsize(html_fp) > 0)\r\n\r\n # Check that the log file is created and has size > 0\r\n log_fp = glob(join(self.test_out, 'log*.txt'))[0]\r\n self.assertTrue(getsize(log_fp) > 0)",
"def test_general_apm():\n components = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n\n apm = active_parameter_manager(components, [\"scale\", \"decay\"])\n assert \"decay\" in apm.components_list\n assert \"scale\" in apm.components_list\n assert \"absorption\" not in apm.components_list\n assert apm.n_active_params == (\n components[\"scale\"].n_params + components[\"decay\"].n_params\n )\n n_cumul = 0\n for component in apm.components:\n assert apm.components[component][\"n_params\"] == components[component].n_params\n assert apm.components[component][\"start_idx\"] == n_cumul\n assert (\n apm.components[component][\"end_idx\"]\n == n_cumul + apm.components[component][\"n_params\"]\n )\n n_cumul += apm.components[component][\"n_params\"]\n\n apm.set_param_vals(flex.double([2.0, 1.5]))\n assert apm.get_param_vals() == flex.double([2.0, 1.5])\n # Test params were updated in components\n assert list(components[\"scale\"].free_parameters) == [2.0]\n assert list(components[\"decay\"].free_parameters) == [1.5]\n # Test selection of parameters\n decay_params = apm.select_parameters(\"decay\")\n assert len(decay_params) == 1\n assert decay_params[0] == 1.5\n\n # Test calculate model state uncertainties\n var_cov = flex.double([1.0, 0.5, 0.5, 2.0])\n var_cov.reshape(flex.grid(2, 2))\n apm.calculate_model_state_uncertainties(var_cov)\n assert components[\"scale\"].var_cov_matrix[0, 0] == 1.0\n assert components[\"decay\"].var_cov_matrix[0, 0] == 2.0\n\n # Test set param esds.\n apm.set_param_esds(flex.double([0.1, 0.2]))\n assert components[\"scale\"].free_parameter_esds == flex.double([0.1])\n assert components[\"decay\"].free_parameter_esds == flex.double([0.2])",
"def test_fleur_relax_continue_converged(self, run_with_cache, mock_code_factory):\n assert False",
"def test_ipam_services_update(self):\n pass",
"def testA_StraightThrough(self):\n # Do pre-submit job check\n nRunning = getCondorRunningJobs()\n self.assertEqual(nRunning, 0, \"User currently has %i running jobs. Test will not continue\" % (nRunning))\n\n myThread = threading.currentThread()\n workload = self.createTestWorkload()\n config = self.getConfig()\n\n\n name = 'WMAgent_Test1'\n site = self.sites[0]\n nSubs = 5\n nFiles = 10\n workloadPath = os.path.join(self.testDir, 'workloadTest',\n 'TestWorkload', 'WMSandbox',\n 'WMWorkload.pkl')\n\n # Create a collection of files\n self.createFileCollection(name = name, nSubs = nSubs,\n nFiles = nFiles,\n workflowURL = workloadPath,\n site = site)\n\n\n\n ############################################################\n # Test the JobCreator\n\n\n config.Agent.componentName = 'JobCreator'\n testJobCreator = JobCreatorPoller(config = config)\n\n testJobCreator.algorithm()\n time.sleep(5)\n\n\n # Did all jobs get created?\n getJobsAction = self.daoFactory(classname = \"Jobs.GetAllJobs\")\n result = getJobsAction.execute(state = 'Created', jobType = \"Processing\")\n self.assertEqual(len(result), nSubs*nFiles)\n\n\n # Count database objects\n result = myThread.dbi.processData('SELECT * FROM wmbs_sub_files_acquired')[0].fetchall()\n self.assertEqual(len(result), nSubs * nFiles)\n\n\n # Find the test directory\n testDirectory = os.path.join(self.testDir, 'TestWorkload', 'ReReco')\n self.assertTrue('JobCollection_1_0' in os.listdir(testDirectory))\n self.assertTrue(len(os.listdir(testDirectory)) <= 20)\n\n groupDirectory = os.path.join(testDirectory, 'JobCollection_1_0')\n\n # First job should be in here\n self.assertTrue('job_1' in os.listdir(groupDirectory))\n jobFile = os.path.join(groupDirectory, 'job_1', 'job.pkl')\n self.assertTrue(os.path.isfile(jobFile))\n with open(jobFile, 'rb') as f:\n job = pickle.load(f)\n\n\n self.assertEqual(job['workflow'], name)\n self.assertEqual(len(job['input_files']), 1)\n self.assertEqual(os.path.basename(job['sandbox']), 'TestWorkload-Sandbox.tar.bz2')\n\n\n\n\n\n\n\n\n\n\n ###############################################################\n # Now test the JobSubmitter\n\n config.Agent.componentName = 'JobSubmitter'\n testJobSubmitter = JobSubmitterPoller(config = config)\n\n\n testJobSubmitter.algorithm()\n\n\n # Check that jobs are in the right state\n result = getJobsAction.execute(state = 'Created', jobType = \"Processing\")\n self.assertEqual(len(result), 0)\n result = getJobsAction.execute(state = 'Executing', jobType = \"Processing\")\n self.assertEqual(len(result), nSubs * nFiles)\n\n\n\n # Check assigned locations\n getLocationAction = self.daoFactory(classname = \"Jobs.GetLocation\")\n for id in result:\n loc = getLocationAction.execute(jobid = id)\n self.assertEqual(loc, [[site]])\n\n\n # Check to make sure we have running jobs\n nRunning = getCondorRunningJobs()\n self.assertEqual(nRunning, nFiles * nSubs)\n\n\n #################################################################\n # Now the JobTracker\n\n\n config.Agent.componentName = 'JobTracker'\n testJobTracker = JobTrackerPoller(config = config)\n testJobTracker.setup()\n\n testJobTracker.algorithm()\n\n # Running the algo without removing the jobs should do nothing\n result = getJobsAction.execute(state = 'Executing', jobType = \"Processing\")\n self.assertEqual(len(result), nSubs * nFiles)\n\n\n condorRM()\n time.sleep(1)\n\n # All jobs gone?\n nRunning = getCondorRunningJobs()\n self.assertEqual(nRunning, 0)\n\n\n testJobTracker.algorithm()\n time.sleep(5)\n\n # Running the algo without removing the jobs should do nothing\n result = getJobsAction.execute(state = 'Executing', jobType = \"Processing\")\n self.assertEqual(len(result), 0)\n result = getJobsAction.execute(state = 'Complete', jobType = \"Processing\")\n self.assertEqual(len(result), nSubs * nFiles)\n\n\n\n\n #################################################################\n # Now the JobAccountant\n\n # First you need to load all jobs\n\n\n self.getFWJRAction = self.daoFactory(classname = \"Jobs.GetFWJRByState\")\n completeJobs = self.getFWJRAction.execute(state = \"complete\")\n\n\n # Create reports for all jobs\n self.createReports(jobs = completeJobs, retryCount = 0)\n\n\n\n\n\n\n config.Agent.componentName = 'JobAccountant'\n testJobAccountant = JobAccountantPoller(config = config)\n testJobAccountant.setup()\n\n\n # It should do something with the jobs\n testJobAccountant.algorithm()\n\n\n # All the jobs should be done now\n result = getJobsAction.execute(state = 'Complete', jobType = \"Processing\")\n self.assertEqual(len(result), 0)\n result = getJobsAction.execute(state = 'Success', jobType = \"Processing\")\n self.assertEqual(len(result), nSubs * nFiles)\n\n\n\n #######################################################################\n # Now the JobArchiver\n\n\n config.Agent.componentName = 'JobArchiver'\n testJobArchiver = JobArchiverPoller(config = config)\n\n\n testJobArchiver.algorithm()\n\n # All the jobs should be cleaned up\n result = getJobsAction.execute(state = 'Success', jobType = \"Processing\")\n self.assertEqual(len(result), 0)\n result = getJobsAction.execute(state = 'Cleanout', jobType = \"Processing\")\n self.assertEqual(len(result), nSubs * nFiles)\n\n\n logDir = os.path.join(self.testDir, 'logs')\n\n for job in completeJobs:\n self.assertFalse(os.path.exists(job['fwjr_path']))\n jobFolder = 'JobCluster_%i' \\\n % (int(job['id']/config.JobArchiver.numberOfJobsToCluster))\n jobPath = os.path.join(logDir, jobFolder, 'Job_%i.tar' %(job['id']))\n self.assertTrue(os.path.isfile(jobPath))\n self.assertTrue(os.path.getsize(jobPath) > 0)\n\n\n\n\n ###########################################################################\n # Now the TaskAchiver\n\n\n config.Agent.componentName = 'TaskArchiver'\n testTaskArchiver = TaskArchiverPoller(config = config)\n\n\n testTaskArchiver.algorithm()\n\n\n result = getJobsAction.execute(state = 'Cleanout', jobType = \"Processing\")\n self.assertEqual(len(result), 0)\n\n\n for jdict in completeJobs:\n job = Job(id = jdict['id'])\n self.assertFalse(job.exists())\n\n\n\n\n\n if os.path.isdir('testDir'):\n shutil.rmtree('testDir')\n shutil.copytree('%s' %self.testDir, os.path.join(os.getcwd(), 'testDir'))\n\n\n\n\n return",
"def test_active_inference_SPM_1b(self):",
"def test_parego(facade, make_scenario, configspace):\n N_TRIALS = 64\n RETRAIN_AFTER = 8\n\n scenario: Scenario = make_scenario(configspace, use_multi_objective=True, n_trials=N_TRIALS)\n multi_objective_algorithm = WrapStrategy(ParEGO, scenario=scenario)\n intensifier = Intensifier(scenario, max_config_calls=1, max_incumbents=10)\n config_selector = ConfigSelector(scenario, retrain_after=RETRAIN_AFTER)\n initial_design = RandomInitialDesign(scenario, n_configs=1)\n\n smac = facade(\n scenario=scenario,\n target_function=tae,\n multi_objective_algorithm=multi_objective_algorithm,\n intensifier=intensifier,\n config_selector=config_selector,\n initial_design=initial_design,\n overwrite=True,\n )\n incumbents = smac.optimize()\n\n sorted_incumbents = []\n for incumbent in incumbents:\n x, y = func(incumbent[\"x\"])\n sorted_incumbents.append((x, y))\n\n sorted_incumbents = sorted(sorted_incumbents, key=lambda x: x[0])\n previous_y = np.inf\n for x, y in sorted_incumbents:\n assert y <= previous_y\n previous_y = y\n\n # We expect N_TRIALS/RETRAIN_AFTER updates\n assert multi_objective_algorithm._n_calls_update_on_iteration_start == int(N_TRIALS / RETRAIN_AFTER)",
"def test_lama_job_runner():\n\n configs = registration_root.glob('*.toml')\n\n for cfg in configs:\n delete_previous_files()\n\n print(f\"\\n{'#'*8} Doing config {cfg.name} {'#'*8}\")\n\n lama_job_runner.lama_job_runner(cfg, wt_registration_dir, make_job_file=True, log_level=logging.ERROR)\n lama_job_runner.lama_job_runner(cfg, wt_registration_dir, log_level=logging.ERROR)\n\n lama_job_runner.lama_job_runner(cfg, mut_registration_dir, make_job_file=True, log_level=logging.ERROR)\n lama_job_runner.lama_job_runner(cfg, mut_registration_dir, log_level=logging.ERROR)\n # return # Just do the first",
"def test_active(self):\n nodes = [create_node(\"a\", \"service1\"),\n create_node(\"b\", \"service2\")]\n static = StaticRoutes(nodes).create(self.disco, self.runtime)\n self.runtime.dispatcher.startActor(static)\n self.runtime.dispatcher.pump()\n\n self.assertEqual(knownNodes(self.disco, \"service1\", \"sandbox\"), [nodes[0]])\n self.assertEqual(knownNodes(self.disco, \"service2\", \"sandbox\"), [nodes[1]])",
"def test_delete_pi_simultaneously_to_vpg_with_multiple_pi(self):\n proj_obj, fabric_obj, pr_objs = self._create_prerequisites(\n create_second_pr=True)\n test_id = self.id()\n VPG_CLASS = self._api_server.get_resource_class('virtual-port-group')\n org_process_ae_id = VPG_CLASS._process_ae_id\n\n class MockVpg(VPG_CLASS):\n org_process_ae_id = VPG_CLASS._process_ae_id\n HOLD_API = True\n @classmethod\n def mock_process_ae_id(cls, db_obj_dict, vpg_name, obj_dict=None):\n while cls.HOLD_API:\n print('sleeping for HOLD_API to clear for '\n 'args = %s' % obj_dict)\n gevent.sleep(0.5)\n return cls.org_process_ae_id(db_obj_dict, vpg_name, obj_dict)\n\n def process_ae_ids(x):\n return [int(i) for i in sorted(x) if i is not None]\n\n def get_zk_ae_ids(prs=None):\n prefix = os.path.join(\n self.__class__.__name__,\n 'id', 'aggregated-ethernet')\n zk_client = self._api_server._db_conn._zk_db._zk_client._zk_client\n if not prs:\n prs = [os.path.join(prefix, pr.name) for pr in pr_objs]\n else:\n if not isinstance(prs, list):\n prs = [prs]\n prs = [os.path.join(prefix, pr) for pr in prs]\n ae_ids = {}\n for pr in prs:\n pr_org = os.path.split(pr)[-1]\n ae_ids[pr_org] = zk_client.get_children(pr)\n return ae_ids\n\n pi_per_pr = 3\n pi_objs = {}\n pr1_pi_names = ['%s_pr1_pi%d' % (test_id, i) for\n i in range(1, pi_per_pr + 1)]\n pr2_pi_names = ['%s_pr2_pi%d' % (test_id, i) for\n i in range(1, pi_per_pr + 1)]\n pr1_pi_objs = self._create_pi_objects(pr_objs[0], pr1_pi_names)\n pr2_pi_objs = self._create_pi_objects(pr_objs[1], pr2_pi_names)\n pi_objs.update(pr1_pi_objs)\n pi_objs.update(pr2_pi_objs)\n\n # create a VPG\n vpg_count = 1\n vpg_names = ['vpg_%s_%s' % (test_id, i) for i in range(\n 1, vpg_count + 1)]\n vpg_objs = self._create_vpgs(fabric_obj, vpg_names)\n\n # record AE-IDs in ZK before creating any VPG\n ae_ids = [x for x in get_zk_ae_ids().values() if x]\n self.assertEqual(len(ae_ids), 0)\n\n def _attach_pi_simultaneously(\n vpg_obj, create_pi_uuids=None, delete_pi_uuids=None):\n if create_pi_uuids is None:\n create_pi_uuids = []\n if delete_pi_uuids is None:\n delete_pi_uuids = []\n # Attach PIs from PR1 to VPG-1\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n try:\n # mock _process_ae_id at VPG resource\n VPG_CLASS._process_ae_id = MockVpg.mock_process_ae_id\n # MockVpg.HOLD_API = True\n MockVpg.HOLD_API = False\n for pi_uuid in create_pi_uuids:\n gevent.spawn(\n self.api.ref_update,\n \"virtual-port-group\",\n vpg_obj.uuid,\n \"physical-interface\",\n pi_uuid,\n None,\n \"ADD\",\n None)\n for pi_uuid in delete_pi_uuids:\n gevent.spawn(\n self.api.ref_update,\n \"virtual-port-group\",\n vpg_obj.uuid,\n \"physical-interface\",\n pi_uuid,\n None,\n \"DELETE\",\n None)\n gevent.sleep(2)\n MockVpg.HOLD_API = False\n gevent.sleep(3)\n except gevent.timeout.Timeout:\n self.assertFalse(\n False,\n '%s failed unexpectedly' % VPG_CLASS._process_ae_id)\n finally:\n # reset mock to original\n VPG_CLASS._process_ae_id = org_process_ae_id\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n return vpg_obj, pi_refs\n\n # Case 1\n # Attach 3 PIs/PR1 and 3 PIs/PR2 to VPG1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pr1_pi_uuids = [pi_objs[pr1_pi_names[pi]].uuid for pi in range(3)]\n pr2_pi_uuids = [pi_objs[pr2_pi_names[pi]].uuid for pi in range(3)]\n pi_uuids = pr1_pi_uuids + pr2_pi_uuids\n vpg_obj, pi_refs = _attach_pi_simultaneously(vpg_obj, pi_uuids)\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 6)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertEqual(len(vpg_ae_ids.values()), 6)\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [0] * 6)\n # verification at Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 1)\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 1)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [0])\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [0])\n\n # Case 2\n # Deattach PI-1/PR-1, PI-1/PR-2 from VPG-1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_uuids = [pr1_pi_uuids[0], pr2_pi_uuids[0]]\n vpg_obj, pi_refs = _attach_pi_simultaneously(\n vpg_obj, delete_pi_uuids=pi_uuids)\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 4)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertEqual(len(vpg_ae_ids.values()), 4)\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [0] * 4)\n # verification at Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 1)\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 1)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [0])\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [0])\n\n # Case 3\n # Deattach all PIs/PR-1. AE-IDs at PR-1 to be de-allocated\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_uuids = pr1_pi_uuids[1:3]\n vpg_obj, pi_refs = _attach_pi_simultaneously(\n vpg_obj, delete_pi_uuids=pi_uuids)\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 2)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertEqual(len(vpg_ae_ids.values()), 2)\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [0] * 2)\n # verification at Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 0)\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 1)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [])\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [0])",
"def test_raylet_infeasible_tasks(shutdown_only):\n addresses = ray.init(num_gpus=3)\n\n @ray.remote(num_gpus=5)\n class ActorRequiringGPU:\n def __init__(self):\n pass\n\n ActorRequiringGPU.remote()\n\n def test_infeasible_actor(ray_addresses):\n assert (wait_until_server_available(addresses[\"webui_url\"]) is True)\n webui_url = ray_addresses[\"webui_url\"].replace(\"localhost\",\n \"http://127.0.0.1\")\n raylet_info = requests.get(webui_url + \"/api/raylet_info\").json()\n actor_info = raylet_info[\"result\"][\"actors\"]\n assert len(actor_info) == 1\n\n _, infeasible_actor_info = actor_info.popitem()\n assert infeasible_actor_info[\"state\"] == -1\n assert infeasible_actor_info[\"invalidStateType\"] == \"infeasibleActor\"\n\n assert (wait_until_succeeded_without_exception(\n test_infeasible_actor,\n (AssertionError, requests.exceptions.ConnectionError),\n addresses,\n timeout_ms=30000,\n retry_interval_ms=1000) is True)",
"def test_add_multiple_pis_simultaneously_to_vpg_with_1_pi(self):\n proj_obj, fabric_obj, pr_objs = self._create_prerequisites(\n create_second_pr=True)\n test_id = self.id()\n VPG_CLASS = self._api_server.get_resource_class('virtual-port-group')\n org_process_ae_id = VPG_CLASS._process_ae_id\n\n class MockVpg(VPG_CLASS):\n org_process_ae_id = VPG_CLASS._process_ae_id\n HOLD_API = True\n @classmethod\n def mock_process_ae_id(cls, db_obj_dict, vpg_name, obj_dict=None):\n while cls.HOLD_API:\n print('sleeping for HOLD_API to clear for '\n 'args = %s' % obj_dict)\n gevent.sleep(0.5)\n return cls.org_process_ae_id(db_obj_dict, vpg_name, obj_dict)\n\n def process_ae_ids(x):\n return [int(i) for i in sorted(x) if i is not None]\n\n def get_zk_ae_ids(prs=None):\n prefix = os.path.join(\n self.__class__.__name__,\n 'id', 'aggregated-ethernet')\n zk_client = self._api_server._db_conn._zk_db._zk_client._zk_client\n if not prs:\n prs = [os.path.join(prefix, pr.name) for pr in pr_objs]\n else:\n if not isinstance(prs, list):\n prs = [prs]\n prs = [os.path.join(prefix, pr) for pr in prs]\n ae_ids = {}\n for pr in prs:\n pr_org = os.path.split(pr)[-1]\n ae_ids[pr_org] = zk_client.get_children(pr)\n return ae_ids\n\n pi_per_pr = 150\n pi_objs = {}\n pr1_pi_names = ['%s_pr1_pi%d' % (test_id, i) for\n i in range(1, pi_per_pr + 1)]\n pr2_pi_names = ['%s_pr2_pi%d' % (test_id, i) for\n i in range(1, pi_per_pr + 1)]\n pr1_pi_objs = self._create_pi_objects(pr_objs[0], pr1_pi_names)\n pr2_pi_objs = self._create_pi_objects(pr_objs[1], pr2_pi_names)\n pi_objs.update(pr1_pi_objs)\n pi_objs.update(pr2_pi_objs)\n\n # create a VPG\n vpg_count = 1\n vpg_names = ['vpg_%s_%s' % (test_id, i) for i in range(\n 1, vpg_count + 1)]\n vpg_objs = self._create_vpgs(fabric_obj, vpg_names)\n\n # record AE-IDs in ZK before creating any VPG\n ae_ids = [x for x in get_zk_ae_ids().values() if x]\n self.assertEqual(len(ae_ids), 0)\n\n def _attach_pi_simultaneously(vpg_obj, pi_uuids):\n # Attach PIs from PR1 to VPG-1\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n try:\n # mock _process_ae_id at VPG resource\n VPG_CLASS._process_ae_id = MockVpg.mock_process_ae_id\n MockVpg.HOLD_API = True\n for pi_uuid in pi_uuids:\n gevent.spawn(\n self.api.ref_update,\n \"virtual-port-group\",\n vpg_obj.uuid,\n \"physical-interface\",\n pi_uuid,\n None,\n \"ADD\",\n None)\n gevent.sleep(2)\n MockVpg.HOLD_API = False\n gevent.sleep(6)\n except gevent.timeout.Timeout:\n self.assertFalse(\n False,\n '%s failed unexpectedly' % VPG_CLASS._process_ae_id)\n finally:\n # reset mock to original\n VPG_CLASS._process_ae_id = org_process_ae_id\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n return vpg_obj, pi_refs\n\n # Case 1\n # Attach 1 PIs from PR1 to VPG-1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_uuids = [pi_objs[pr1_pi_names[pi]].uuid for pi in range(1)]\n vpg_obj, pi_refs = _attach_pi_simultaneously(vpg_obj, pi_uuids)\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 1)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertIsNone(list(vpg_ae_ids.values())[0])\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [])\n\n # Case 2\n # Attach rest of 149 PIs from PR1 to VPG-1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_uuids = [pi_objs[pr1_pi_names[pi]].uuid for pi in range(1, 150)]\n vpg_obj, pi_refs = _attach_pi_simultaneously(vpg_obj, pi_uuids)\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 150)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertEqual(len(set(vpg_ae_ids.values())), 1)\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [0] * 150)\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 1)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [0])",
"def __init__(self, asa_factory: AsaFactory):\n self.step_in_progress = False\n self.asa_factory = asa_factory",
"def test_ipam_services_partial_update(self):\n pass",
"def test_add_delete_a_pi_simultaneously_to_vpg_with_1_pi(self):\n proj_obj, fabric_obj, pr_objs = self._create_prerequisites(\n create_second_pr=True)\n test_id = self.id()\n VPG_CLASS = self._api_server.get_resource_class('virtual-port-group')\n org_process_ae_id = VPG_CLASS._process_ae_id\n\n class MockVpg(VPG_CLASS):\n org_process_ae_id = VPG_CLASS._process_ae_id\n HOLD_API = True\n @classmethod\n def mock_process_ae_id(cls, db_obj_dict, vpg_name, obj_dict=None):\n while cls.HOLD_API:\n print('sleeping for HOLD_API to clear for '\n 'args = %s' % obj_dict)\n gevent.sleep(0.5)\n return cls.org_process_ae_id(db_obj_dict, vpg_name, obj_dict)\n\n def process_ae_ids(x):\n return [int(i) for i in sorted(x) if i is not None]\n\n def get_zk_ae_ids(prs=None):\n prefix = os.path.join(\n self.__class__.__name__,\n 'id', 'aggregated-ethernet')\n zk_client = self._api_server._db_conn._zk_db._zk_client._zk_client\n if not prs:\n prs = [os.path.join(prefix, pr.name) for pr in pr_objs]\n else:\n if not isinstance(prs, list):\n prs = [prs]\n prs = [os.path.join(prefix, pr) for pr in prs]\n ae_ids = {}\n for pr in prs:\n pr_org = os.path.split(pr)[-1]\n ae_ids[pr_org] = zk_client.get_children(pr)\n return ae_ids\n\n pi_per_pr = 3\n pi_objs = {}\n pr1_pi_names = ['%s_pr1_pi%d' % (test_id, i) for\n i in range(1, pi_per_pr + 1)]\n pr1_pi_objs = self._create_pi_objects(pr_objs[0], pr1_pi_names)\n pi_objs.update(pr1_pi_objs)\n\n # create a VPG\n vpg_count = 1\n vpg_names = ['vpg_%s_%s' % (test_id, i) for i in range(\n 1, vpg_count + 1)]\n vpg_objs = self._create_vpgs(fabric_obj, vpg_names)\n\n # record AE-IDs in ZK before creating any VPG\n ae_ids = [x for x in get_zk_ae_ids().values() if x]\n self.assertEqual(len(ae_ids), 0)\n\n def _attach_pi_simultaneously(\n vpg_obj, create_pi_uuids, delete_pi_uuids=None):\n if delete_pi_uuids is None:\n delete_pi_uuids = []\n # Attach PIs from PR1 to VPG-1\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n try:\n # mock _process_ae_id at VPG resource\n VPG_CLASS._process_ae_id = MockVpg.mock_process_ae_id\n MockVpg.HOLD_API = True\n for pi_uuid in delete_pi_uuids:\n gevent.spawn(\n self.api.ref_update,\n \"virtual-port-group\",\n vpg_obj.uuid,\n \"physical-interface\",\n pi_uuid,\n None,\n \"DELETE\")\n for pi_uuid in create_pi_uuids:\n gevent.spawn(\n self.api.ref_update,\n \"virtual-port-group\",\n vpg_obj.uuid,\n \"physical-interface\",\n pi_uuid,\n None,\n \"ADD\",\n None)\n gevent.sleep(2)\n MockVpg.HOLD_API = False\n gevent.sleep(2)\n except gevent.timeout.Timeout:\n self.assertFalse(\n False,\n '%s failed unexpectedly' % VPG_CLASS._process_ae_id)\n finally:\n # reset mock to original\n VPG_CLASS._process_ae_id = org_process_ae_id\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n return vpg_obj, pi_refs\n\n # Case 1\n # Attach PI-1/PR-1 to VPG-1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_uuids = [list(pr1_pi_objs.values())[0].uuid]\n # vpg_obj, pi_refs = _attach_pi_simultaneously(vpg_obj, pi_uuids)\n pi_obj = self._vnc_lib.physical_interface_read(id=pi_uuids[0])\n vpg_obj.add_physical_interface(pi_obj)\n self.api.virtual_port_group_update(vpg_obj)\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 1)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertIsNone(list(vpg_ae_ids.values())[0])\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [])\n\n # Case 2\n # Attach PI-2 from PR1 to VPG-1 and delete exiting PI-1/PR-1\n # simultaneously\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n existing_pi_uuids = [ref['uuid'] for ref in pi_refs]\n pi_uuids = [list(pr1_pi_objs.values())[1].uuid]\n vpg_obj, pi_refs = _attach_pi_simultaneously(\n vpg_obj, pi_uuids, existing_pi_uuids)\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 1)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertIsNone(list(vpg_ae_ids.values())[0])\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [])",
"def test_update_instances_schedule_state(self):\n pass",
"def test_active_inference_SPM_1a(self):\n array_path = os.path.join(os.getcwd(), DATA_PATH + \"vbx_test_1a.mat\")\n mat_contents = loadmat(file_name=array_path)\n\n A = mat_contents[\"A\"][0]\n B = mat_contents[\"B\"][0]\n C = to_arr_of_arr(mat_contents[\"C\"][0][0][:,0])\n obs_matlab = mat_contents[\"obs\"].astype(\"int64\")\n policy = mat_contents[\"policies\"].astype(\"int64\") - 1\n t_horizon = mat_contents[\"t_horizon\"][0, 0].astype(\"int64\")\n actions_matlab = mat_contents[\"actions\"].astype(\"int64\") - 1\n qs_matlab = mat_contents[\"qs\"][0]\n xn_matlab = mat_contents[\"xn\"][0]\n vn_matlab = mat_contents[\"vn\"][0]\n\n likelihoods_matlab = mat_contents[\"likelihoods\"][0]\n\n num_obs, num_states, _, num_factors = get_model_dimensions(A, B)\n obs = convert_observation_array(obs_matlab, num_obs)\n T = len(obs)\n\n agent = Agent(A=A, B=B, C=C, inference_algo=\"MMP\", policy_len=1, \n inference_horizon=t_horizon, use_BMA = False, \n policy_sep_prior = True)\n \n actions_python = np.zeros(T)\n\n for t in range(T):\n o_t = (np.where(obs[t])[0][0],)\n qx, xn_t, vn_t = agent.infer_states_test(o_t)\n q_pi, efe= agent.infer_policies()\n action = agent.sample_action()\n\n actions_python[t] = action\n\n xn_python = build_xn_vn_array(xn_t)\n vn_python = build_xn_vn_array(vn_t)\n\n if t == T-1:\n xn_python = xn_python[:,:,:-1,:]\n vn_python = vn_python[:,:,:-1,:]\n\n start_tstep = max(0, agent.curr_timestep - agent.inference_horizon)\n end_tstep = min(agent.curr_timestep + agent.policy_len, T)\n\n xn_validation = xn_matlab[0][:,:,start_tstep:end_tstep,t,:]\n vn_validation = vn_matlab[0][:,:,start_tstep:end_tstep,t,:]\n\n self.assertTrue(np.isclose(xn_python, xn_validation).all())\n self.assertTrue(np.isclose(vn_python, vn_validation).all())\n \n self.assertTrue(np.isclose(actions_matlab[0,:],actions_python[:-1]).all())",
"def test_ipam_services_create(self):\n pass",
"def test_ParameterManagerGenerator_consecutive():\n components_1 = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n\n data_manager = mock_data_manager(components_1)\n data_manager.consecutive_refinement_order = [[\"scale\", \"decay\"], [\"absorption\"]]\n\n # Test single dataset case.\n pmg = ParameterManagerGenerator(\n [data_manager],\n apm_type=active_parameter_manager,\n target=ScalingTarget(),\n mode=\"consecutive\",\n )\n apms = list(pmg.parameter_managers())\n assert len(apms) == 2\n apm = apms[0]\n assert isinstance(apm, multi_active_parameter_manager)\n assert \"scale\" in apm.components_list\n assert \"decay\" in apm.components_list\n assert \"absorption\" not in apm.components_list\n apm = apms[1]\n assert isinstance(apm, multi_active_parameter_manager)\n assert \"scale\" not in apm.components_list\n assert \"decay\" not in apm.components_list\n assert \"absorption\" in apm.components_list\n\n # Test multi dataset case.\n components_2 = {\"1\": mock_component(), \"2\": mock_component()}\n data_manager_2 = mock_data_manager(components_2)\n data_manager_2.consecutive_refinement_order = [[\"1\"], [\"2\"]]\n\n pmg = ParameterManagerGenerator(\n [data_manager, data_manager_2],\n apm_type=active_parameter_manager,\n target=ScalingTarget(),\n mode=\"consecutive\",\n )\n apms = list(pmg.parameter_managers())\n assert len(apms) == 2\n multi_apm = apms[0]\n assert isinstance(multi_apm, multi_active_parameter_manager)\n apm_1 = multi_apm.apm_list[0]\n assert \"scale\" in apm_1.components_list\n assert \"decay\" in apm_1.components_list\n assert \"absorption\" not in apm_1.components_list\n assert multi_apm.apm_list[1].components_list == [\"1\"]\n multi_apm = apms[1]\n assert isinstance(multi_apm, multi_active_parameter_manager)\n assert multi_apm.apm_list[0].components_list == [\"absorption\"]\n assert multi_apm.apm_list[1].components_list == [\"2\"]\n\n # Test multi dataset case with different number of cycles for each data_manager.\n components_2 = {\"1\": mock_component()}\n data_manager_2 = mock_data_manager(components_2)\n data_manager_2.consecutive_refinement_order = [[\"1\"], [\"2\"]]\n pmg = ParameterManagerGenerator(\n [data_manager, data_manager_2],\n apm_type=active_parameter_manager,\n target=ScalingTarget(),\n mode=\"consecutive\",\n )\n assert pmg.param_lists[0] == [[\"scale\", \"decay\"], [\"absorption\"]]\n assert pmg.param_lists[1] == [[\"1\"]]\n apms = list(pmg.parameter_managers())\n assert len(apms) == 2\n multi_apm = apms[0]\n assert isinstance(multi_apm, multi_active_parameter_manager)\n apm_1 = multi_apm.apm_list[0]\n assert \"scale\" in apm_1.components_list\n assert \"decay\" in apm_1.components_list\n assert \"absorption\" not in apm_1.components_list\n assert multi_apm.apm_list[1].components_list == [\"1\"]\n multi_apm = apms[1]\n assert isinstance(multi_apm, multi_active_parameter_manager)\n assert multi_apm.apm_list[0].components_list == [\"absorption\"]\n # Only change relative to previous test case.\n assert multi_apm.apm_list[1].components_list == []\n\n # Test fixing the decay parameter.\n data_manager.fixed_components = [\"decay\"]\n pmg = ParameterManagerGenerator(\n [data_manager],\n apm_type=active_parameter_manager,\n target=ScalingTarget(),\n mode=\"consecutive\",\n )\n apms = list(pmg.parameter_managers())\n assert len(apms) == 2\n apm = apms[0]\n assert isinstance(apm, multi_active_parameter_manager)\n assert \"scale\" in apm.components_list\n assert \"decay\" not in apm.components_list\n assert \"absorption\" not in apm.components_list\n apm = apms[1]\n assert isinstance(apm, multi_active_parameter_manager)\n assert \"scale\" not in apm.components_list\n assert \"decay\" not in apm.components_list\n assert \"absorption\" in apm.components_list",
"def case_real_runs(\n automl: AutoML,\n make_ensemble_builder_manager: Callable[..., EnsembleBuilderManager],\n) -> EnsembleBuilderManager:\n manager = make_ensemble_builder_manager(\n backend=automl._backend,\n metric=automl._metrics[0],\n task=automl._task,\n dataset_name=automl._dataset_name,\n seed=automl._seed,\n logger_port=automl._logger_port,\n random_state=DEFAULT_SEED,\n )\n return manager",
"def test_destroy(self, mocker):\n def _split_task_init_mock(self, api, storage, period, event):\n self._task = mocker.Mock()\n self._api = api\n self._storage = storage\n self._period = period\n self._event = event\n event.set()\n mocker.patch('splitio.client.factory.SplitSynchronizationTask.__init__', new=_split_task_init_mock)\n\n def _segment_task_init_mock(self, api, storage, split_storage, period, event):\n self._task = mocker.Mock()\n self._worker_pool = mocker.Mock()\n self._api = api\n self._segment_storage = storage\n self._split_storage = split_storage\n self._period = period\n self._event = event\n event.set()\n mocker.patch('splitio.client.factory.SegmentSynchronizationTask.__init__', new=_segment_task_init_mock)\n\n imp_async_task_mock = mocker.Mock(spec=asynctask.AsyncTask)\n def _imppression_task_init_mock(self, api, storage, refresh_rate, bulk_size):\n self._logger = mocker.Mock()\n self._impressions_api = api\n self._storage = storage\n self._period = refresh_rate\n self._task = imp_async_task_mock\n self._failed = mocker.Mock()\n self._bulk_size = bulk_size\n mocker.patch('splitio.client.factory.ImpressionsSyncTask.__init__', new=_imppression_task_init_mock)\n\n evt_async_task_mock = mocker.Mock(spec=asynctask.AsyncTask)\n def _event_task_init_mock(self, api, storage, refresh_rate, bulk_size):\n self._logger = mocker.Mock()\n self._impressions_api = api\n self._storage = storage\n self._period = refresh_rate\n self._task = evt_async_task_mock\n self._failed = mocker.Mock()\n self._bulk_size = bulk_size\n mocker.patch('splitio.client.factory.EventsSyncTask.__init__', new=_event_task_init_mock)\n\n # Start factory and make assertions\n factory = get_factory('some_api_key')\n factory.block_until_ready()\n time.sleep(1) # give a chance for the bg thread to set the ready status\n assert factory.ready\n assert factory.destroyed is False\n\n factory.destroy()\n assert imp_async_task_mock.stop.mock_calls == [mocker.call(None)]\n assert evt_async_task_mock.stop.mock_calls == [mocker.call(None)]\n assert factory.destroyed is True",
"def test_concurrent_instances(self):\n cm = contextlib.ExitStack() # TODO: clean this up\n\n work_dir1 = Path(cm.enter_context(tempfile.TemporaryDirectory())) # TODO: make these delete only if no exception occured\n work_dir2 = Path(cm.enter_context(tempfile.TemporaryDirectory()))\n\n archive = RemotePrometheusArchive.for_tag('latest').download()\n prometheus1: PrometheusInstance = cm.enter_context(PrometheusInstance(archive, work_dir1))\n prometheus2: PrometheusInstance = cm.enter_context(PrometheusInstance(archive, work_dir2))\n\n prometheus1.start()\n\n with self.assertRaisesRegex(Exception, 'certificate verify failed'):\n prometheus2.start()\n\n\n cm.close()",
"def test_concurrent_access(self):\n num_threads = 4\n thread_pool = ThreadPool(num_threads)\n\n def test_func(x):\n \"\"\"Create, get, delete models.\"\"\"\n for i in range(32):\n handle = self.model_manager.create(name='%s-%s' % (x, i))\n self.assertTrue(\n handle in [m.handle for m in self.model_manager.models()])\n self.model_manager.delete(handle)\n self.assertTrue(\n handle not in\n [m.handle for m in self.model_manager.models()])\n return True\n for x in range(num_threads):\n thread_pool.add_func(test_func, x)\n thread_pool.join()\n self.assertTrue(len(self.model_manager.models()) == 0,\n 'Expecting no models to stick around')",
"def test_run_started(self):",
"async def test_change_pools_reorg(self, setup, trusted_and_fee, bt, self_hostname):\n trusted, fee = trusted_and_fee\n full_nodes, wallet_nodes, receive_address, client, rpc_cleanup = setup\n our_ph = receive_address[0]\n pool_a_ph = receive_address[1]\n wallets = [wallet_n.wallet_state_manager.main_wallet for wallet_n in wallet_nodes]\n pool_b_ph = await wallets[1].get_new_puzzlehash()\n full_node_api = full_nodes[0]\n WAIT_SECS = 30\n if trusted:\n wallet_nodes[0].config[\"trusted_peers\"] = {\n full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex()\n }\n else:\n wallet_nodes[0].config[\"trusted_peers\"] = {}\n\n await wallet_nodes[0].server.start_client(\n PeerInfo(self_hostname, uint16(full_node_api.full_node.server._port)), None\n )\n\n try:\n assert len(await client.get_wallets(WalletType.POOLING_WALLET)) == 0\n\n async def have_chia():\n await farm_blocks(full_node_api, our_ph, 1)\n return (await wallets[0].get_confirmed_balance()) > 0\n\n await time_out_assert(timeout=WAIT_SECS, function=have_chia)\n await time_out_assert(20, wallet_is_synced, True, wallet_nodes[0], full_node_api)\n\n creation_tx: TransactionRecord = await client.create_new_pool_wallet(\n pool_a_ph, \"https://pool-a.org\", 5, f\"{self_hostname}:5000\", \"new\", \"FARMING_TO_POOL\", fee\n )\n\n await time_out_assert(\n 10,\n full_node_api.full_node.mempool_manager.get_spendbundle,\n creation_tx.spend_bundle,\n creation_tx.name,\n )\n\n await farm_blocks(full_node_api, our_ph, 6)\n assert full_node_api.full_node.mempool_manager.get_spendbundle(creation_tx.name) is None\n\n await time_out_assert(20, wallet_is_synced, True, wallet_nodes[0], full_node_api)\n\n summaries_response = await client.get_wallets(WalletType.POOLING_WALLET)\n assert len(summaries_response) == 1\n wallet_id: int = summaries_response[0][\"id\"]\n status: PoolWalletInfo = (await client.pw_status(wallet_id))[0]\n\n assert status.current.state == PoolSingletonState.FARMING_TO_POOL.value\n assert status.target is None\n\n async def status_is_farming_to_pool():\n await farm_blocks(full_node_api, our_ph, 1)\n pw_status: PoolWalletInfo = (await client.pw_status(wallet_id))[0]\n return pw_status.current.state == PoolSingletonState.FARMING_TO_POOL.value\n\n await time_out_assert(timeout=WAIT_SECS, function=status_is_farming_to_pool)\n\n pw_info: PoolWalletInfo = (await client.pw_status(wallet_id))[0]\n assert pw_info.current.pool_url == \"https://pool-a.org\"\n assert pw_info.current.relative_lock_height == 5\n\n join_pool_tx: TransactionRecord = (\n await client.pw_join_pool(\n wallet_id,\n pool_b_ph,\n \"https://pool-b.org\",\n 10,\n fee,\n )\n )[\"transaction\"]\n assert join_pool_tx is not None\n await time_out_assert(\n 10,\n full_node_api.full_node.mempool_manager.get_spendbundle,\n join_pool_tx.spend_bundle,\n join_pool_tx.name,\n )\n await farm_blocks(full_node_api, our_ph, 1)\n\n async def status_is_leaving_no_blocks():\n pw_status: PoolWalletInfo = (await client.pw_status(wallet_id))[0]\n return pw_status.current.state == PoolSingletonState.LEAVING_POOL.value\n\n async def status_is_farming_to_pool_no_blocks():\n pw_status: PoolWalletInfo = (await client.pw_status(wallet_id))[0]\n return pw_status.current.state == PoolSingletonState.FARMING_TO_POOL.value\n\n await time_out_assert(timeout=WAIT_SECS, function=status_is_leaving_no_blocks)\n\n current_blocks = await full_node_api.get_all_full_blocks()\n more_blocks = full_node_api.bt.get_consecutive_blocks(\n 3,\n farmer_reward_puzzle_hash=pool_a_ph,\n pool_reward_puzzle_hash=pool_b_ph,\n block_list_input=current_blocks[:-1],\n force_overflow=True,\n guarantee_transaction_block=True,\n seed=32 * b\"4\",\n transaction_data=join_pool_tx.spend_bundle,\n )\n\n for block in more_blocks[-3:]:\n await full_node_api.full_node.respond_block(RespondBlock(block))\n\n await asyncio.sleep(5)\n await time_out_assert(timeout=WAIT_SECS, function=status_is_leaving_no_blocks)\n\n # Eventually, leaves pool\n await time_out_assert(timeout=WAIT_SECS, function=status_is_farming_to_pool)\n\n finally:\n client.close()\n await client.await_closed()\n await rpc_cleanup()"
] | [
"0.588525",
"0.58404994",
"0.5700447",
"0.5668184",
"0.5639154",
"0.5630297",
"0.5601512",
"0.558073",
"0.5506044",
"0.5464134",
"0.5464046",
"0.5454045",
"0.54427963",
"0.5430056",
"0.54291415",
"0.54067427",
"0.54062814",
"0.5388401",
"0.53826404",
"0.53818905",
"0.53586805",
"0.5358201",
"0.5352884",
"0.5345808",
"0.53335357",
"0.5314364",
"0.5313943",
"0.5313571",
"0.5288687",
"0.52776027"
] | 0.63022053 | 0 |
Test the apm factory for consecutive refinement. | def test_ParameterManagerGenerator_consecutive():
components_1 = {
"scale": mock_component(),
"decay": mock_component(),
"absorption": mock_component(),
}
data_manager = mock_data_manager(components_1)
data_manager.consecutive_refinement_order = [["scale", "decay"], ["absorption"]]
# Test single dataset case.
pmg = ParameterManagerGenerator(
[data_manager],
apm_type=active_parameter_manager,
target=ScalingTarget(),
mode="consecutive",
)
apms = list(pmg.parameter_managers())
assert len(apms) == 2
apm = apms[0]
assert isinstance(apm, multi_active_parameter_manager)
assert "scale" in apm.components_list
assert "decay" in apm.components_list
assert "absorption" not in apm.components_list
apm = apms[1]
assert isinstance(apm, multi_active_parameter_manager)
assert "scale" not in apm.components_list
assert "decay" not in apm.components_list
assert "absorption" in apm.components_list
# Test multi dataset case.
components_2 = {"1": mock_component(), "2": mock_component()}
data_manager_2 = mock_data_manager(components_2)
data_manager_2.consecutive_refinement_order = [["1"], ["2"]]
pmg = ParameterManagerGenerator(
[data_manager, data_manager_2],
apm_type=active_parameter_manager,
target=ScalingTarget(),
mode="consecutive",
)
apms = list(pmg.parameter_managers())
assert len(apms) == 2
multi_apm = apms[0]
assert isinstance(multi_apm, multi_active_parameter_manager)
apm_1 = multi_apm.apm_list[0]
assert "scale" in apm_1.components_list
assert "decay" in apm_1.components_list
assert "absorption" not in apm_1.components_list
assert multi_apm.apm_list[1].components_list == ["1"]
multi_apm = apms[1]
assert isinstance(multi_apm, multi_active_parameter_manager)
assert multi_apm.apm_list[0].components_list == ["absorption"]
assert multi_apm.apm_list[1].components_list == ["2"]
# Test multi dataset case with different number of cycles for each data_manager.
components_2 = {"1": mock_component()}
data_manager_2 = mock_data_manager(components_2)
data_manager_2.consecutive_refinement_order = [["1"], ["2"]]
pmg = ParameterManagerGenerator(
[data_manager, data_manager_2],
apm_type=active_parameter_manager,
target=ScalingTarget(),
mode="consecutive",
)
assert pmg.param_lists[0] == [["scale", "decay"], ["absorption"]]
assert pmg.param_lists[1] == [["1"]]
apms = list(pmg.parameter_managers())
assert len(apms) == 2
multi_apm = apms[0]
assert isinstance(multi_apm, multi_active_parameter_manager)
apm_1 = multi_apm.apm_list[0]
assert "scale" in apm_1.components_list
assert "decay" in apm_1.components_list
assert "absorption" not in apm_1.components_list
assert multi_apm.apm_list[1].components_list == ["1"]
multi_apm = apms[1]
assert isinstance(multi_apm, multi_active_parameter_manager)
assert multi_apm.apm_list[0].components_list == ["absorption"]
# Only change relative to previous test case.
assert multi_apm.apm_list[1].components_list == []
# Test fixing the decay parameter.
data_manager.fixed_components = ["decay"]
pmg = ParameterManagerGenerator(
[data_manager],
apm_type=active_parameter_manager,
target=ScalingTarget(),
mode="consecutive",
)
apms = list(pmg.parameter_managers())
assert len(apms) == 2
apm = apms[0]
assert isinstance(apm, multi_active_parameter_manager)
assert "scale" in apm.components_list
assert "decay" not in apm.components_list
assert "absorption" not in apm.components_list
apm = apms[1]
assert isinstance(apm, multi_active_parameter_manager)
assert "scale" not in apm.components_list
assert "decay" not in apm.components_list
assert "absorption" in apm.components_list | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_general_apm():\n components = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n\n apm = active_parameter_manager(components, [\"scale\", \"decay\"])\n assert \"decay\" in apm.components_list\n assert \"scale\" in apm.components_list\n assert \"absorption\" not in apm.components_list\n assert apm.n_active_params == (\n components[\"scale\"].n_params + components[\"decay\"].n_params\n )\n n_cumul = 0\n for component in apm.components:\n assert apm.components[component][\"n_params\"] == components[component].n_params\n assert apm.components[component][\"start_idx\"] == n_cumul\n assert (\n apm.components[component][\"end_idx\"]\n == n_cumul + apm.components[component][\"n_params\"]\n )\n n_cumul += apm.components[component][\"n_params\"]\n\n apm.set_param_vals(flex.double([2.0, 1.5]))\n assert apm.get_param_vals() == flex.double([2.0, 1.5])\n # Test params were updated in components\n assert list(components[\"scale\"].free_parameters) == [2.0]\n assert list(components[\"decay\"].free_parameters) == [1.5]\n # Test selection of parameters\n decay_params = apm.select_parameters(\"decay\")\n assert len(decay_params) == 1\n assert decay_params[0] == 1.5\n\n # Test calculate model state uncertainties\n var_cov = flex.double([1.0, 0.5, 0.5, 2.0])\n var_cov.reshape(flex.grid(2, 2))\n apm.calculate_model_state_uncertainties(var_cov)\n assert components[\"scale\"].var_cov_matrix[0, 0] == 1.0\n assert components[\"decay\"].var_cov_matrix[0, 0] == 2.0\n\n # Test set param esds.\n apm.set_param_esds(flex.double([0.1, 0.2]))\n assert components[\"scale\"].free_parameter_esds == flex.double([0.1])\n assert components[\"decay\"].free_parameter_esds == flex.double([0.2])",
"def test_multi_apm():\n\n components_1 = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n components_2 = {\"scale\": mock_component(), \"decay\": mock_component()}\n\n multi_apm = multi_active_parameter_manager(\n ScalingTarget(),\n [components_1, components_2],\n [[\"scale\", \"decay\"], [\"scale\"]],\n active_parameter_manager,\n )\n\n # Test correct setup of apm_list attribute.\n for apm in multi_apm.apm_list:\n assert isinstance(apm, active_parameter_manager)\n assert len(multi_apm.apm_list) == 2\n assert multi_apm.components_list == [\"scale\", \"decay\", \"scale\"]\n assert multi_apm.n_active_params == 3\n assert multi_apm.apm_data[0] == {\"start_idx\": 0, \"end_idx\": 2}\n assert multi_apm.apm_data[1] == {\"start_idx\": 2, \"end_idx\": 3}\n\n # Test parameter selection.\n multi_apm.set_param_vals(flex.double([3.0, 2.5, 2.0]))\n assert multi_apm.get_param_vals() == flex.double([3.0, 2.5, 2.0])\n assert multi_apm.select_parameters(0) == flex.double([3.0, 2.5])\n assert multi_apm.select_parameters(1) == flex.double([2.0])\n\n # Test setting parameter esds.\n multi_apm.set_param_esds(flex.double([0.1, 0.2, 0.3]))\n assert components_1[\"scale\"].free_parameter_esds == flex.double([0.1])\n assert components_1[\"decay\"].free_parameter_esds == flex.double([0.2])\n assert components_2[\"scale\"].free_parameter_esds == flex.double([0.3])\n\n # Test setting var_cov matrices for each component.\n var_cov = flex.double([1.0, 0.5, 0.5, 0.5, 2.0, 0.5, 0.5, 0.5, 3.0])\n var_cov.reshape(flex.grid(3, 3))\n multi_apm.calculate_model_state_uncertainties(var_cov)\n assert components_1[\"scale\"].var_cov_matrix[0, 0] == 1.0\n assert components_1[\"decay\"].var_cov_matrix[0, 0] == 2.0\n assert components_2[\"scale\"].var_cov_matrix[0, 0] == 3.0",
"def test_active_inference_SPM_1b(self):",
"def test_add_multiple_pis_simultaneously_to_vpg_check_reallocation(self):\n proj_obj, fabric_obj, pr_objs = self._create_prerequisites(\n create_second_pr=True)\n test_id = self.id()\n VPG_CLASS = self._api_server.get_resource_class('virtual-port-group')\n org_process_ae_id = VPG_CLASS._process_ae_id\n\n class MockVpg(VPG_CLASS):\n org_process_ae_id = VPG_CLASS._process_ae_id\n HOLD_API = True\n @classmethod\n def mock_process_ae_id(cls, db_obj_dict, vpg_name, obj_dict=None):\n while cls.HOLD_API:\n print('sleeping for HOLD_API to clear for '\n 'args = %s' % obj_dict)\n gevent.sleep(0.5)\n return cls.org_process_ae_id(db_obj_dict, vpg_name, obj_dict)\n\n def process_ae_ids(x):\n return [int(i) for i in sorted(x) if i is not None]\n\n def get_zk_ae_ids(prs=None):\n prefix = os.path.join(\n self.__class__.__name__,\n 'id', 'aggregated-ethernet')\n zk_client = self._api_server._db_conn._zk_db._zk_client._zk_client\n if not prs:\n prs = [os.path.join(prefix, pr.name) for pr in pr_objs]\n else:\n if not isinstance(prs, list):\n prs = [prs]\n prs = [os.path.join(prefix, pr) for pr in prs]\n ae_ids = {}\n for pr in prs:\n pr_org = os.path.split(pr)[-1]\n ae_ids[pr_org] = zk_client.get_children(pr)\n return ae_ids\n\n pi_per_pr = 6\n pi_objs = {}\n pr1_pi_names = ['%s_pr1_pi%d' % (test_id, i) for\n i in range(1, pi_per_pr + 1)]\n pr2_pi_names = ['%s_pr2_pi%d' % (test_id, i) for\n i in range(1, pi_per_pr + 1)]\n pr1_pi_objs = self._create_pi_objects(pr_objs[0], pr1_pi_names)\n pr2_pi_objs = self._create_pi_objects(pr_objs[1], pr2_pi_names)\n pi_objs.update(pr1_pi_objs)\n pi_objs.update(pr2_pi_objs)\n\n # create a VPG\n vpg_count = 3\n vpg_names = ['vpg_%s_%s' % (test_id, i) for i in range(\n 1, vpg_count + 1)]\n vpg_objs = self._create_vpgs(fabric_obj, vpg_names)\n\n # record AE-IDs in ZK before creating any VPG\n ae_ids = [x for x in get_zk_ae_ids().values() if x]\n self.assertEqual(len(ae_ids), 0)\n\n def _attach_pi_simultaneously(vpg_obj, pi_uuids):\n # Attach PIs from PR1 to VPG-1\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n try:\n # mock _process_ae_id at VPG resource\n VPG_CLASS._process_ae_id = MockVpg.mock_process_ae_id\n MockVpg.HOLD_API = True\n for pi_uuid in pi_uuids:\n gevent.spawn(\n self.api.ref_update,\n \"virtual-port-group\",\n vpg_obj.uuid,\n \"physical-interface\",\n pi_uuid,\n None,\n \"ADD\",\n None)\n gevent.sleep(2)\n MockVpg.HOLD_API = False\n gevent.sleep(3)\n except gevent.timeout.Timeout:\n self.assertFalse(\n False,\n '%s failed unexpectedly' % VPG_CLASS._process_ae_id)\n finally:\n # reset mock to original\n VPG_CLASS._process_ae_id = org_process_ae_id\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n return vpg_obj, pi_refs\n\n # Case 1\n # Attach 2 PIs from PR1 to VPG-1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_uuids = [pi.uuid for pi in list(pr1_pi_objs.values())[0:2]]\n vpg_obj, pi_refs = _attach_pi_simultaneously(vpg_obj, pi_uuids)\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 2)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertEqual(len(list(vpg_ae_ids.values())), 2)\n self.assertEqual(len(set(vpg_ae_ids.values())), 1)\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [0, 0])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 1)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [0])\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [])\n\n # Case 2\n # Attach 2 PIs from PR1 to VPG-2\n vpg_name = vpg_names[1]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_uuids = [pi.uuid for pi in list(pr1_pi_objs.values())[2:4]]\n vpg_obj, pi_refs = _attach_pi_simultaneously(vpg_obj, pi_uuids)\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 2)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertEqual(len(list(vpg_ae_ids.values())), 2)\n self.assertEqual(len(set(vpg_ae_ids.values())), 1)\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [1, 1])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 2)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [0, 1])\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [])\n\n # Case 3\n # Deattach 1 PIs from PR1 to VPG-1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_obj = list(pr1_pi_objs.values())[0]\n pi_obj = self._vnc_lib.physical_interface_read(id=pi_obj.uuid)\n vpg_obj.del_physical_interface(pi_obj)\n self.api.virtual_port_group_update(vpg_obj)\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 1)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertIsNone(list(vpg_ae_ids.values())[0])\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 1)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [1])\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [])\n\n # Case 4\n # Attach 2 PIs from PR1 to VPG-3\n vpg_name = vpg_names[2]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_uuids = [pi.uuid for pi in list(pr1_pi_objs.values())[4:6]]\n vpg_obj, pi_refs = _attach_pi_simultaneously(vpg_obj, pi_uuids)\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 2)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertEqual(len(list(vpg_ae_ids.values())), 2)\n self.assertEqual(len(set(vpg_ae_ids.values())), 1)\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [0, 0])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 2)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [0, 1])\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [])\n\n # Case 5\n # Attach 1 PIs from PR1 to VPG-1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_obj = list(pr1_pi_objs.values())[0]\n pi_obj = self._vnc_lib.physical_interface_read(id=pi_obj.uuid)\n vpg_obj.add_physical_interface(pi_obj)\n self._vnc_lib.virtual_port_group_update(vpg_obj)\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 2)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertEqual(len(list(vpg_ae_ids.values())), 2)\n self.assertEqual(len(set(vpg_ae_ids.values())), 1)\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [2, 2])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 3)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [0, 1, 2])\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [])",
"def test_add_multiple_pis_simultaneously_to_vpg_check_deallocation(self):\n proj_obj, fabric_obj, pr_objs = self._create_prerequisites(\n create_second_pr=True)\n test_id = self.id()\n VPG_CLASS = self._api_server.get_resource_class('virtual-port-group')\n org_process_ae_id = VPG_CLASS._process_ae_id\n\n class MockVpg(VPG_CLASS):\n org_process_ae_id = VPG_CLASS._process_ae_id\n HOLD_API = True\n @classmethod\n def mock_process_ae_id(cls, db_obj_dict, vpg_name, obj_dict=None):\n while cls.HOLD_API:\n print('sleeping for HOLD_API to clear for '\n 'args = %s' % obj_dict)\n gevent.sleep(0.5)\n return cls.org_process_ae_id(db_obj_dict, vpg_name, obj_dict)\n\n def process_ae_ids(x):\n return [int(i) for i in sorted(x) if i is not None]\n\n def get_zk_ae_ids(prs=None):\n prefix = os.path.join(\n self.__class__.__name__,\n 'id', 'aggregated-ethernet')\n zk_client = self._api_server._db_conn._zk_db._zk_client._zk_client\n if not prs:\n prs = [os.path.join(prefix, pr.name) for pr in pr_objs]\n else:\n if not isinstance(prs, list):\n prs = [prs]\n prs = [os.path.join(prefix, pr) for pr in prs]\n ae_ids = {}\n for pr in prs:\n pr_org = os.path.split(pr)[-1]\n ae_ids[pr_org] = zk_client.get_children(pr)\n return ae_ids\n\n pi_per_pr = 1\n pi_objs = {}\n pr1_pi_names = ['%s_pr1_pi%d' % (test_id, i) for\n i in range(1, pi_per_pr + 1)]\n pr2_pi_names = ['%s_pr2_pi%d' % (test_id, i) for\n i in range(1, pi_per_pr + 1)]\n pr1_pi_objs = self._create_pi_objects(pr_objs[0], pr1_pi_names)\n pr2_pi_objs = self._create_pi_objects(pr_objs[1], pr2_pi_names)\n pi_objs.update(pr1_pi_objs)\n pi_objs.update(pr2_pi_objs)\n\n # create a VPG\n vpg_count = 1\n vpg_names = ['vpg_%s_%s' % (test_id, i) for i in range(\n 1, vpg_count + 1)]\n vpg_objs = self._create_vpgs(fabric_obj, vpg_names)\n\n # record AE-IDs in ZK before creating any VPG\n ae_ids = [x for x in get_zk_ae_ids().values() if x]\n self.assertEqual(len(ae_ids), 0)\n\n def _attach_pi_simultaneously(vpg_obj, pi_uuids):\n # Attach PIs from PR1 to VPG-1\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n try:\n # mock _process_ae_id at VPG resource\n VPG_CLASS._process_ae_id = MockVpg.mock_process_ae_id\n MockVpg.HOLD_API = True\n for pi_uuid in pi_uuids:\n gevent.spawn(\n self.api.ref_update,\n \"virtual-port-group\",\n vpg_obj.uuid,\n \"physical-interface\",\n pi_uuid,\n None,\n \"ADD\",\n None)\n gevent.sleep(2)\n MockVpg.HOLD_API = False\n gevent.sleep(3)\n except gevent.timeout.Timeout:\n self.assertFalse(\n False,\n '%s failed unexpectedly' % VPG_CLASS._process_ae_id)\n finally:\n # reset mock to original\n VPG_CLASS._process_ae_id = org_process_ae_id\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n return vpg_obj, pi_refs\n\n # Case 1\n # Attach 2 PIs from PR1 to VPG-1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pr1_pi_uuids = list(pr1_pi_objs.values())[0].uuid\n pr2_pi_uuids = list(pr2_pi_objs.values())[0].uuid\n pi_uuids = [pr1_pi_uuids, pr2_pi_uuids]\n vpg_obj, pi_refs = _attach_pi_simultaneously(vpg_obj, pi_uuids)\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 2)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertEqual(len(list(vpg_ae_ids.values())), 2)\n self.assertEqual(len(set(vpg_ae_ids.values())), 1)\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [0, 0])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 1)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [0])\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 1)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [0])\n\n # Case 2\n # Deattach 1 PIs from PR1 to VPG-1\n vpg_name = vpg_names[0]\n vpg_obj = vpg_objs[vpg_name]\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_obj = list(pr1_pi_objs.values())[0]\n pi_obj = self._vnc_lib.physical_interface_read(id=pi_obj.uuid)\n vpg_obj.del_physical_interface(pi_obj)\n self.api.virtual_port_group_update(vpg_obj)\n vpg_obj = self._vnc_lib.virtual_port_group_read(id=vpg_obj.uuid)\n pi_refs = vpg_obj.get_physical_interface_refs()\n # verify PI-refs are correct\n self.assertEqual(len(pi_refs), 1)\n vpg_ae_ids = {ref['href'].split('/')[-1]: ref['attr'].ae_num\n for ref in pi_refs}\n # verify all AE-IDs allocated per prouter are unique\n self.assertEqual(len(set(vpg_ae_ids.keys())), len(pi_refs))\n self.assertIsNone(list(vpg_ae_ids.values())[0])\n ae_id_sorted = process_ae_ids(vpg_ae_ids.values())\n self.assertEqual(ae_id_sorted, [])\n # verification at ZK for AE-IDs in Physical Routers\n pr_ae_ids = get_zk_ae_ids()\n self.assertEqual(len(pr_ae_ids[pr_objs[0].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[0].name]), [])\n self.assertEqual(len(pr_ae_ids[pr_objs[1].name]), 0)\n self.assertEqual(process_ae_ids(pr_ae_ids[pr_objs[1].name]), [])",
"def test_alchemical_phase_factory_building(self):\n with mmtools.utils.temporary_directory() as tmp_dir:\n template_script = self.get_implicit_template_script(tmp_dir)\n\n # AbsoluteAlchemicalFactory options.\n template_script['options']['alchemical_pme_treatment'] = 'exact'\n\n # Test that options are passed to AlchemicalPhaseFactory correctly.\n exp_builder = ExperimentBuilder(script=template_script)\n for experiment in exp_builder.build_experiments():\n for phase_factory in experiment.phases:\n assert phase_factory.alchemical_factory.alchemical_pme_treatment == 'exact'\n # Overwrite AbsoluteAlchemicalFactory default for disable_alchemical_dispersion_correction.\n assert phase_factory.alchemical_factory.disable_alchemical_dispersion_correction == True",
"def test_02(self, test):\r\n\r\n return test.MANUAL()",
"def test_04(self, test):\r\n globalConfig.test = test\r\n\r\n self.check_delayed_activation(SCHEDULED_ABSOLUTE_ACTIVATION)\r\n\r\n return test.PASS()",
"def test_bookkeeping():\n\n ## CASE 1: alanine dipeptide in vacuum\n # Create vanilla system\n ala = AlanineDipeptideVacuum()\n system = ala.system\n positions = ala.positions\n\n # Create REST system\n system.removeForce(4)\n res1 = list(ala.topology.residues())[1]\n rest_atoms = [atom.index for atom in res1.atoms()]\n factory = RESTTopologyFactory(system, solute_region=rest_atoms)\n REST_system = factory.REST_system\n\n # Compare energy components\n compare_energy_components(REST_system, system, positions)\n\n ## CASE 2: alanine dipeptide in solvent\n # Create vanilla system\n ala = AlanineDipeptideExplicit()\n system = ala.system\n positions = ala.positions\n\n # Create REST system\n system.removeForce(4)\n res1 = list(ala.topology.residues())[1]\n rest_atoms = [atom.index for atom in res1.atoms()]\n factory = RESTTopologyFactory(system, solute_region=rest_atoms, use_dispersion_correction=True)\n REST_system = factory.REST_system\n\n # Compare energy components\n compare_energy_components(REST_system, system, positions)\n\n ## CASE 3: alanine dipeptide in solvent with repartitioned hybrid system\n # Create repartitioned hybrid system for lambda 0 endstate\n atp, system_generator = generate_atp(phase='solvent')\n htf = generate_dipeptide_top_pos_sys(atp.topology,\n new_res='THR',\n system=atp.system,\n positions=atp.positions,\n system_generator=system_generator,\n conduct_htf_prop=True,\n generate_repartitioned_hybrid_topology_factory=True,\n endstate=0,\n validate_endstate_energy=False)\n\n # Create REST-ified hybrid system\n res1 = list(htf.hybrid_topology.residues)[1]\n rest_atoms = [atom.index for atom in list(res1.atoms)]\n factory = RESTTopologyFactory(htf.hybrid_system, solute_region=rest_atoms, use_dispersion_correction=True)\n REST_system = factory.REST_system\n\n # Compare energy components\n compare_energy_components(REST_system, htf.hybrid_system, htf.hybrid_positions)",
"def run(self):\n\n from dials.algorithms.refinement.refiner import phil_scope\n params = phil_scope.fetch(source=phil.parse('')).extract()\n\n # disable outlier rejection for speed of refiner construction\n params.refinement.reflections.outlier.algorithm='null'\n\n refiner = RefinerFactory.from_parameters_data_experiments(params,\n self._reflections, self._experiments)\n\n d1 = self._experiments[0].detector\n d2 = refiner.get_experiments()[0].detector\n\n assert d1.is_similar_to(d2)\n print \"OK\"\n return",
"def test_1(self):\n # Generate constraint periods\n constr = {\"program\": {\"version\": \"python\"}}\n # Generate random initialization file\n params_spec, options_spec = generate_random_model(point_constr=constr)\n respy_obj = RespyCls(params_spec, options_spec)\n respy_obj = simulate_observed(respy_obj)\n\n # Extract class attributes\n (\n state_space,\n states_all,\n mapping_state_idx,\n periods_rewards_systematic,\n periods_emax,\n num_periods,\n num_draws_emax,\n edu_spec,\n optim_paras,\n num_types,\n ) = dist_class_attributes(\n respy_obj,\n \"state_space\",\n \"states_all\",\n \"mapping_state_idx\",\n \"periods_rewards_systematic\",\n \"periods_emax\",\n \"num_periods\",\n \"num_draws_emax\",\n \"edu_spec\",\n \"optim_paras\",\n \"num_types\",\n )\n\n # Sample draws\n draws_emax_standard = np.random.multivariate_normal(\n np.zeros(4), np.identity(4), num_draws_emax\n )\n draws_emax_risk = transform_disturbances(\n draws_emax_standard, np.zeros(4), optim_paras[\"shocks_cholesky\"]\n )\n\n # Sampling of random period and admissible state index\n period = np.random.choice(range(num_periods))\n k = np.random.choice(range(state_space.states_per_period[period]))\n\n # Select systematic rewards\n rewards_systematic = periods_rewards_systematic[period, k, :]\n\n # Evaluation of simulated expected future values. Limit to one individual as the\n # Fortran version.\n rewards_period = state_space.get_attribute_from_period(\"rewards\", period)[k]\n emaxs_period = state_space.get_attribute_from_period(\"emaxs\", period)[k, :4]\n max_education_period = (\n state_space.get_attribute_from_period(\"states\", period)[k, 3]\n >= edu_spec[\"max\"]\n )\n\n py = construct_emax_risk(\n rewards_period[-2:],\n rewards_period[:4],\n emaxs_period,\n draws_emax_risk,\n optim_paras[\"delta\"],\n max_education_period,\n )\n\n f90 = fort_debug.wrapper_construct_emax_risk(\n num_periods,\n num_draws_emax,\n period,\n k,\n draws_emax_risk,\n rewards_systematic,\n periods_emax,\n states_all,\n mapping_state_idx,\n edu_spec[\"start\"],\n edu_spec[\"max\"],\n optim_paras[\"delta\"],\n optim_paras[\"coeffs_common\"],\n optim_paras[\"coeffs_a\"],\n optim_paras[\"coeffs_b\"],\n num_types,\n )\n\n assert_allclose(py, f90)",
"def test_parego(facade, make_scenario, configspace):\n N_TRIALS = 64\n RETRAIN_AFTER = 8\n\n scenario: Scenario = make_scenario(configspace, use_multi_objective=True, n_trials=N_TRIALS)\n multi_objective_algorithm = WrapStrategy(ParEGO, scenario=scenario)\n intensifier = Intensifier(scenario, max_config_calls=1, max_incumbents=10)\n config_selector = ConfigSelector(scenario, retrain_after=RETRAIN_AFTER)\n initial_design = RandomInitialDesign(scenario, n_configs=1)\n\n smac = facade(\n scenario=scenario,\n target_function=tae,\n multi_objective_algorithm=multi_objective_algorithm,\n intensifier=intensifier,\n config_selector=config_selector,\n initial_design=initial_design,\n overwrite=True,\n )\n incumbents = smac.optimize()\n\n sorted_incumbents = []\n for incumbent in incumbents:\n x, y = func(incumbent[\"x\"])\n sorted_incumbents.append((x, y))\n\n sorted_incumbents = sorted(sorted_incumbents, key=lambda x: x[0])\n previous_y = np.inf\n for x, y in sorted_incumbents:\n assert y <= previous_y\n previous_y = y\n\n # We expect N_TRIALS/RETRAIN_AFTER updates\n assert multi_objective_algorithm._n_calls_update_on_iteration_start == int(N_TRIALS / RETRAIN_AFTER)",
"def test_multiple_factories(self, mocker):\n sdk_ready_flag = threading.Event()\n\n def _init(self, ready_flag, some, auth_api, streaming_enabled, telemetry_runtime_producer, telemetry_init_consumer, sse_url=None):\n self._ready_flag = ready_flag\n self._synchronizer = mocker.Mock(spec=Synchronizer)\n self._streaming_enabled = False\n self._telemetry_runtime_producer = telemetry_runtime_producer\n self._telemetry_init_consumer = telemetry_init_consumer\n mocker.patch('splitio.sync.manager.Manager.__init__', new=_init)\n\n def _start(self, *args, **kwargs):\n sdk_ready_flag.set()\n mocker.patch('splitio.sync.manager.Manager.start', new=_start)\n\n def _stop(self, *args, **kwargs):\n pass\n mocker.patch('splitio.sync.manager.Manager.stop', new=_stop)\n\n mockManager = Manager(sdk_ready_flag, mocker.Mock(), mocker.Mock(), False, mocker.Mock(), mocker.Mock())\n\n def _make_factory_with_apikey(apikey, *_, **__):\n return SplitFactory(apikey, {}, True, mocker.Mock(spec=ImpressionsManager), mockManager, mocker.Mock(), mocker.Mock(), mocker.Mock())\n\n factory_module_logger = mocker.Mock()\n build_in_memory = mocker.Mock()\n build_in_memory.side_effect = _make_factory_with_apikey\n build_redis = mocker.Mock()\n build_redis.side_effect = _make_factory_with_apikey\n build_localhost = mocker.Mock()\n build_localhost.side_effect = _make_factory_with_apikey\n mocker.patch('splitio.client.factory._LOGGER', new=factory_module_logger)\n mocker.patch('splitio.client.factory._build_in_memory_factory', new=build_in_memory)\n mocker.patch('splitio.client.factory._build_redis_factory', new=build_redis)\n mocker.patch('splitio.client.factory._build_localhost_factory', new=build_localhost)\n\n _INSTANTIATED_FACTORIES.clear() # Clear all factory counters for testing purposes\n\n factory1 = get_factory('some_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 1\n assert factory_module_logger.warning.mock_calls == []\n\n factory2 = get_factory('some_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 2\n assert factory_module_logger.warning.mock_calls == [mocker.call(\n \"factory instantiation: You already have %d %s with this SDK Key. \"\n \"We recommend keeping only one instance of the factory at all times \"\n \"(Singleton pattern) and reusing it throughout your application.\",\n 1,\n 'factory'\n )]\n\n factory_module_logger.reset_mock()\n factory3 = get_factory('some_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 3\n assert factory_module_logger.warning.mock_calls == [mocker.call(\n \"factory instantiation: You already have %d %s with this SDK Key. \"\n \"We recommend keeping only one instance of the factory at all times \"\n \"(Singleton pattern) and reusing it throughout your application.\",\n 2,\n 'factories'\n )]\n\n factory_module_logger.reset_mock()\n factory4 = get_factory('some_other_api_key')\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 3\n assert _INSTANTIATED_FACTORIES['some_other_api_key'] == 1\n assert factory_module_logger.warning.mock_calls == [mocker.call(\n \"factory instantiation: You already have an instance of the Split factory. \"\n \"Make sure you definitely want this additional instance. \"\n \"We recommend keeping only one instance of the factory at all times \"\n \"(Singleton pattern) and reusing it throughout your application.\"\n )]\n\n event = threading.Event()\n factory1.destroy(event)\n event.wait()\n assert _INSTANTIATED_FACTORIES['some_other_api_key'] == 1\n assert _INSTANTIATED_FACTORIES['some_api_key'] == 2\n factory2.destroy()\n factory3.destroy()\n factory4.destroy()",
"def test_01_lighting(self):",
"def test_4(self):\n params_spec, options_spec = generate_random_model()\n respy_obj = RespyCls(params_spec, options_spec)\n\n # Ensure that backward induction routines use the same grid for the\n # interpolation.\n write_interpolation_grid(respy_obj)\n\n # Extract class attributes\n (\n num_periods,\n edu_spec,\n optim_paras,\n num_draws_emax,\n seed_emax,\n is_debug,\n is_interpolated,\n num_points_interp,\n optimizer_options,\n file_sim,\n num_types,\n ) = dist_class_attributes(\n respy_obj,\n \"num_periods\",\n \"edu_spec\",\n \"optim_paras\",\n \"num_draws_emax\",\n \"seed_emax\",\n \"is_debug\",\n \"is_interpolated\",\n \"num_points_interp\",\n \"optimizer_options\",\n \"file_sim\",\n \"num_types\",\n )\n\n shocks_cholesky = optim_paras[\"shocks_cholesky\"]\n coeffs_common = optim_paras[\"coeffs_common\"]\n coeffs_home = optim_paras[\"coeffs_home\"]\n coeffs_edu = optim_paras[\"coeffs_edu\"]\n coeffs_a = optim_paras[\"coeffs_a\"]\n coeffs_b = optim_paras[\"coeffs_b\"]\n delta = optim_paras[\"delta\"]\n\n type_spec_shifts = optim_paras[\"type_shifts\"]\n type_spec_shares = optim_paras[\"type_shares\"]\n\n min_idx = edu_spec[\"max\"] + 1\n\n # Check the state space creation.\n state_space = StateSpace(\n num_periods, num_types, edu_spec[\"start\"], edu_spec[\"max\"], optim_paras\n )\n\n states_all, mapping_state_idx, _, _ = state_space._get_fortran_counterparts()\n\n pyth = (\n states_all,\n state_space.states_per_period,\n mapping_state_idx,\n state_space.states_per_period.max(),\n )\n\n f2py = fort_debug.wrapper_create_state_space(\n num_periods, num_types, edu_spec[\"start\"], edu_spec[\"max\"], min_idx\n )\n for i in range(4):\n # Slice Fortran output to shape of Python output.\n if isinstance(f2py[i], np.ndarray):\n f2py_reduced = f2py[i][tuple(map(slice, pyth[i].shape))]\n else:\n f2py_reduced = f2py[i]\n\n assert_allclose(pyth[i], f2py_reduced)\n\n _, _, pyth, _ = state_space._get_fortran_counterparts()\n\n f2py = fort_debug.wrapper_calculate_rewards_systematic(\n num_periods,\n state_space.states_per_period,\n states_all,\n state_space.states_per_period.max(),\n coeffs_common,\n coeffs_a,\n coeffs_b,\n coeffs_edu,\n coeffs_home,\n type_spec_shares,\n type_spec_shifts,\n )\n\n assert_allclose(pyth, f2py)\n\n # Carry some results from the systematic rewards calculation for future use and\n # create the required set of disturbances.\n periods_draws_emax = create_draws(\n num_periods, num_draws_emax, seed_emax, is_debug\n )\n\n # Save result for next test.\n periods_rewards_systematic = pyth.copy()\n\n # Fix for hardcoded myopic agents.\n optim_paras[\"delta\"] = 0.00000000000000001\n\n # Check backward induction procedure.\n state_space = pyth_backward_induction(\n periods_draws_emax,\n state_space,\n is_debug,\n is_interpolated,\n num_points_interp,\n optim_paras,\n file_sim,\n False,\n )\n _, _, _, pyth = state_space._get_fortran_counterparts()\n\n f2py = fort_debug.wrapper_backward_induction(\n num_periods,\n False,\n state_space.states_per_period.max(),\n periods_draws_emax,\n num_draws_emax,\n state_space.states_per_period,\n periods_rewards_systematic,\n mapping_state_idx,\n states_all,\n is_debug,\n is_interpolated,\n num_points_interp,\n edu_spec[\"start\"],\n edu_spec[\"max\"],\n shocks_cholesky,\n delta,\n coeffs_common,\n coeffs_a,\n coeffs_b,\n file_sim,\n False,\n )\n\n assert_allclose(pyth, f2py)",
"def test_bare_pass_manager_multiple(self):\n qc0 = QuantumCircuit(1)\n qc1 = QuantumCircuit(2)\n\n pm = PassManager([])\n result = pm.run([qc0, qc1])\n\n self.assertIsInstance(result, list)\n self.assertEqual(len(result), 2)\n\n for qc, new_qc in zip([qc0, qc1], result):\n self.assertIsInstance(new_qc, QuantumCircuit)\n self.assertEqual(new_qc, qc) # pm has no passes",
"async def test_signal_repetitions_alternation(hass: HomeAssistant, monkeypatch) -> None:\n config = {\n \"rflink\": {\"port\": \"/dev/ttyABC0\"},\n DOMAIN: {\n \"platform\": \"rflink\",\n \"devices\": {\n \"protocol_0_0\": {\"name\": \"test\", \"signal_repetitions\": 2},\n \"protocol_0_1\": {\"name\": \"test1\", \"signal_repetitions\": 2},\n },\n },\n }\n\n # setup mocking rflink module\n _, _, protocol, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)\n\n await hass.services.async_call(\n DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: f\"{DOMAIN}.test\"}\n )\n await hass.services.async_call(\n DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: f\"{DOMAIN}.test1\"}\n )\n\n await hass.async_block_till_done()\n\n assert protocol.send_command_ack.call_args_list[0][0][0] == \"protocol_0_0\"\n assert protocol.send_command_ack.call_args_list[1][0][0] == \"protocol_0_1\"\n assert protocol.send_command_ack.call_args_list[2][0][0] == \"protocol_0_0\"\n assert protocol.send_command_ack.call_args_list[3][0][0] == \"protocol_0_1\"",
"async def test_form_multiple_services(hass: HomeAssistant) -> None:\n result = await hass.config_entries.flow.async_init(\n DOMAIN, context={\"source\": config_entries.SOURCE_USER}\n )\n assert result[\"type\"] == RESULT_TYPE_FORM\n assert result[\"errors\"] is None\n\n with patch(\"aussiebb.asyncio.AussieBB.__init__\", return_value=None), patch(\n \"aussiebb.asyncio.AussieBB.login\", return_value=True\n ), patch(\"aussiebb.asyncio.AussieBB.get_services\", return_value=FAKE_SERVICES):\n result2 = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"],\n FAKE_DATA,\n )\n await hass.async_block_till_done()\n\n assert result2[\"type\"] == RESULT_TYPE_FORM\n assert result2[\"step_id\"] == \"service\"\n assert result2[\"errors\"] is None\n\n with patch(\n \"homeassistant.components.aussie_broadband.async_setup_entry\",\n return_value=True,\n ) as mock_setup_entry:\n result3 = await hass.config_entries.flow.async_configure(\n result[\"flow_id\"],\n {CONF_SERVICES: [FAKE_SERVICES[1][\"service_id\"]]},\n )\n await hass.async_block_till_done()\n\n assert result3[\"type\"] == RESULT_TYPE_CREATE_ENTRY\n assert result3[\"title\"] == TEST_USERNAME\n assert result3[\"data\"] == FAKE_DATA\n assert result3[\"options\"] == {\n CONF_SERVICES: [FAKE_SERVICES[1][\"service_id\"]],\n }\n assert len(mock_setup_entry.mock_calls) == 1",
"def test_check_occurs_once(self, test_generator):\n feature = test_generator()\n with mock_availability_test(feature) as check:\n check.assert_not_called()\n if feature:\n pass\n check.assert_called_once()\n\n if feature:\n feature.require_now(\"no message\")\n feature.require_in_call(lambda: None)()\n feature.require_in_call(\"no message\")(lambda: None)()\n feature.require_in_instance(type(\"Dummy\", (), {}))()\n feature.require_in_instance(\"no message\")(type(\"Dummy\", (), {}))()\n\n check.assert_called_once()",
"def test_ParameterManagerGenerator_concurrent():\n components_1 = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n data_manager = mock_data_manager(components_1)\n\n pmg = ParameterManagerGenerator(\n [data_manager],\n apm_type=active_parameter_manager,\n target=ScalingTarget(),\n mode=\"concurrent\",\n )\n apms = pmg.parameter_managers()\n assert len(apms) == 1\n apm = apms[0]\n assert isinstance(apm, multi_active_parameter_manager)\n assert \"scale\" in apm.components_list\n assert \"decay\" in apm.components_list\n assert \"absorption\" in apm.components_list\n\n components_1 = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n components_2 = {\"1\": mock_component(), \"2\": mock_component()}\n data_manager_1 = mock_data_manager(components_1)\n data_manager_2 = mock_data_manager(components_2)\n\n pmg = ParameterManagerGenerator(\n [data_manager_1, data_manager_2],\n apm_type=active_parameter_manager,\n target=ScalingTarget(),\n mode=\"concurrent\",\n )\n multi_apms = pmg.parameter_managers()\n assert len(multi_apms) == 1\n multi_apm = multi_apms[0]\n assert isinstance(multi_apm, multi_active_parameter_manager)\n for apm in multi_apm.apm_list:\n assert isinstance(apm, active_parameter_manager)\n assert \"scale\" in multi_apm.apm_list[0].components_list\n assert \"decay\" in multi_apm.apm_list[0].components_list\n assert \"absorption\" in multi_apm.apm_list[0].components_list\n assert \"1\" in multi_apm.apm_list[1].components_list\n assert \"2\" in multi_apm.apm_list[1].components_list\n\n # now try fixing a component\n data_manager.fixed_components = [\"absorption\"]\n pmg = ParameterManagerGenerator(\n [data_manager],\n apm_type=active_parameter_manager,\n target=ScalingTarget(),\n mode=\"concurrent\",\n )\n apms = pmg.parameter_managers()\n assert len(apms) == 1\n apm = apms[0]\n assert isinstance(apm, multi_active_parameter_manager)\n assert \"scale\" in apm.components_list\n assert \"decay\" in apm.components_list\n assert \"absorption\" not in apm.components_list",
"def test_mutate(self):\n f0 = 5 * (np.random.rand(10, 5) - 0.5)\n ga = population.Evolver(f0, eval_one_max)\n\n self.assertFalse(ga.generations[-1].new)\n\n for i in range(10):\n ga.mutate()\n\n self.assertTrue(ga.generations[-1].new)",
"def __init__(self, asa_factory: AsaFactory):\n self.step_in_progress = False\n self.asa_factory = asa_factory",
"def test_02_visit_again(self):",
"def test_fleur_relax_continue_converged(self, run_with_cache, mock_code_factory):\n assert False",
"def test_setup_multiple_parameters_system():\n with mmtools.utils.temporary_directory() as tmp_dir:\n yaml_script = get_template_script(tmp_dir)\n\n # Force antechamber parametrization of benzene to output frcmod file\n exp_builder = ExperimentBuilder(yaml_script)\n exp_builder._db._setup_molecules('benzene')\n benzene_dir = exp_builder._db.get_molecule_dir('benzene')\n frcmod_path = os.path.join(benzene_dir, 'benzene.frcmod')\n benzene_path = os.path.join(benzene_dir, 'benzene.gaff.mol2')\n\n # Redefine benzene to use leaprc.gaff and benzene.frcmod\n # and set up system for hydration free energy calculation\n yaml_script['molecules'] = {\n 'benzene-frcmod': {'filepath': benzene_path,\n 'leap': {'parameters': ['leaprc.gaff', frcmod_path]}}}\n yaml_script['systems'] = {\n 'system':\n {'solute': 'benzene-frcmod', 'solvent1': 'PME', 'solvent2': 'vacuum',\n 'leap': {'parameters': 'oldff/leaprc.ff14SB'}}\n }\n del yaml_script['experiments']\n\n exp_builder = ExperimentBuilder(yaml_script)\n system_files_path = exp_builder._db.get_system('system')\n\n # Check that output exist:\n for phase in system_files_path:\n assert os.path.exists(phase.parameters_path)\n assert os.path.exists(phase.position_path)\n assert os.path.getsize(phase.parameters_path) > 0\n assert os.path.getsize(phase.position_path) > 0",
"def test_ajuste(self):\n\n def test(clk, nrst, tick, ajuste, ajuste_hora, ajuste_min, ajuste_seg, hora, min, seg):\n\n yield delay(tick_period * randint(60, 180))\n ajuste.next = 1\n ajuste_hora.next = 5\n ajuste_min.next = 10\n ajuste_seg.next = 0\n\n yield delay(tick_period*2)\n self.assertEqual(5, hora)\n self.assertEqual(10, min)\n self.assertEqual(0, seg)\n\n ajuste.next = 0\n yield delay(tick_period)\n self.assertEqual(5, hora)\n self.assertEqual(10, min)\n self.assertEqual(1, seg)\n\n runSim(test, 60*60*3*tick_period)",
"def test_AFQ_FA():\n _, bids_path, _ = get_temp_hardi()\n myafq = api.AFQ(\n bids_path=bids_path,\n dmriprep='vistasoft',\n reg_template='dti_fa_template',\n reg_subject='dti_fa_subject')\n myafq.rois",
"def test_ipam_services_partial_update(self):\n pass",
"def test_forces_and_energies(simulation_factory, lattice_snapshot_factory,\n external_params):\n # unpack parameters\n cls_obj, param_attr, list_params, evaluator = external_params\n\n for param in list_params:\n # create class instance\n obj_instance = cls_obj()\n getattr(obj_instance, param_attr)['A'] = param\n\n # set up simulation and run a bit\n snap = lattice_snapshot_factory(n=2)\n if snap.communicator.rank == 0:\n snap.particles.charge[:] = np.random.random(\n snap.particles.N) * 2 - 1\n sim = simulation_factory(snap)\n sim.operations.integrator = hoomd.md.Integrator(dt=0.001)\n sim.operations.integrator.forces.append(obj_instance)\n sim.run(10)\n\n # test energies\n new_snap = sim.state.get_snapshot()\n forces = sim.operations.integrator.forces[0].forces\n energies = sim.operations.integrator.forces[0].energies\n if new_snap.communicator.rank == 0:\n expected_forces, expected_energies = evaluator(new_snap, param)\n # Set atol as the energies and forces very close to 0.\n # It would be better to run a test that applies appreciable forces\n # and energies.\n np.testing.assert_allclose(expected_forces, forces, atol=1e-5)\n np.testing.assert_allclose(expected_energies, energies, atol=1e-5)",
"def test_10(self, test):\r\n return test.MANUAL()"
] | [
"0.5912187",
"0.57896715",
"0.57230085",
"0.56915814",
"0.5550722",
"0.55056584",
"0.5462291",
"0.54244",
"0.54005516",
"0.53685725",
"0.53471875",
"0.53402764",
"0.5339042",
"0.53294677",
"0.528481",
"0.5281426",
"0.52730227",
"0.5270677",
"0.52555203",
"0.5252101",
"0.5242871",
"0.5232969",
"0.5232711",
"0.522285",
"0.5222638",
"0.52195626",
"0.5218014",
"0.52177167",
"0.5209131",
"0.5196099"
] | 0.6325197 | 0 |
Test the scalingspecific parameter manager. | def test_scaling_active_parameter_manager():
components_2 = {"1": mock_scaling_component(2), "2": mock_scaling_component(2)}
scaling_apm = scaling_active_parameter_manager(components_2, ["1"])
assert list(scaling_apm.constant_g_values[0]) == list(
components_2["2"].calculate_scales()
)
assert len(scaling_apm.constant_g_values) == 1
assert scaling_apm.n_obs == [2]
# Test that no constant_g_values if both components selected
scaling_apm = scaling_active_parameter_manager(components_2, ["1", "2"])
assert scaling_apm.constant_g_values is None
# Check that one can't initialise with an unequal number of reflections,
# either within the selection or overall.
with pytest.raises(AssertionError):
components_2 = {"1": mock_scaling_component(2), "2": mock_scaling_component(1)}
scaling_apm = scaling_active_parameter_manager(components_2, ["1", "2"])
with pytest.raises(AssertionError):
components_2 = {"1": mock_scaling_component(2), "2": mock_scaling_component(1)}
scaling_apm = scaling_active_parameter_manager(components_2, ["1"])
data_manager = mock_data_manager(components_2)
pmg = ScalingParameterManagerGenerator(
[data_manager], target=ScalingTarget(), mode="concurrent"
)
assert isinstance(pmg.apm_type, type(scaling_active_parameter_manager)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_get_measure_parameters(self):\n pass",
"def test_scale(app):\n\n assert False",
"def test_general_apm():\n components = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n\n apm = active_parameter_manager(components, [\"scale\", \"decay\"])\n assert \"decay\" in apm.components_list\n assert \"scale\" in apm.components_list\n assert \"absorption\" not in apm.components_list\n assert apm.n_active_params == (\n components[\"scale\"].n_params + components[\"decay\"].n_params\n )\n n_cumul = 0\n for component in apm.components:\n assert apm.components[component][\"n_params\"] == components[component].n_params\n assert apm.components[component][\"start_idx\"] == n_cumul\n assert (\n apm.components[component][\"end_idx\"]\n == n_cumul + apm.components[component][\"n_params\"]\n )\n n_cumul += apm.components[component][\"n_params\"]\n\n apm.set_param_vals(flex.double([2.0, 1.5]))\n assert apm.get_param_vals() == flex.double([2.0, 1.5])\n # Test params were updated in components\n assert list(components[\"scale\"].free_parameters) == [2.0]\n assert list(components[\"decay\"].free_parameters) == [1.5]\n # Test selection of parameters\n decay_params = apm.select_parameters(\"decay\")\n assert len(decay_params) == 1\n assert decay_params[0] == 1.5\n\n # Test calculate model state uncertainties\n var_cov = flex.double([1.0, 0.5, 0.5, 2.0])\n var_cov.reshape(flex.grid(2, 2))\n apm.calculate_model_state_uncertainties(var_cov)\n assert components[\"scale\"].var_cov_matrix[0, 0] == 1.0\n assert components[\"decay\"].var_cov_matrix[0, 0] == 2.0\n\n # Test set param esds.\n apm.set_param_esds(flex.double([0.1, 0.2]))\n assert components[\"scale\"].free_parameter_esds == flex.double([0.1])\n assert components[\"decay\"].free_parameter_esds == flex.double([0.2])",
"def test_parameters(self):\n self.assert_initialize_driver()\n #reply = self.driver_client.cmd_dvr('get_resource', Parameter.ALL)\n #self.assert_driver_parameters(reply, verify_sample_interval=True)",
"def test_get_all_scaled_scores_success(self):\n with mock.patch('score.ScoresGenerator.split_data') as mock_split_data:\n with mock.patch('score.ScoresGenerator.create_category_scaled_score') \\\n as mock_scaled_category:\n with mock.patch('score.ScoresGenerator.create_total_scaled_score') \\\n as mock_scaled_total:\n for test in self.success_get_all_scaled_score_test_params:\n score_test = score.ScoresGenerator()\n score_test.get_all_scaled_scores(test[KEY_INPUT])\n self.assertDictEqual(score_test.SCALED_SCORES, test[KEY_EXPECTED])",
"def test_arguments(self):\n\n h.test_function_arguments(\n func=ScalingTransformer.__init__,\n expected_arguments=[\"self\", \"columns\", \"scaler\", \"scaler_kwargs\"],\n expected_default_values=({},),\n )",
"def test_arguments(self):\n\n h.test_function_arguments(\n func=ScalingTransformer.fit,\n expected_arguments=[\"self\", \"X\", \"y\"],\n expected_default_values=(None,),\n )",
"def is_scale_enabled(self) -> bool:\r\n ...",
"async def test_floating_point_scale(hass, mock_hub):\n register_config = {\n CONF_COUNT: 1,\n CONF_DATA_TYPE: DATA_TYPE_INT,\n CONF_SCALE: 2.4,\n CONF_OFFSET: 0,\n CONF_PRECISION: 2,\n }\n await run_test(\n hass,\n mock_hub,\n register_config,\n SENSOR_DOMAIN,\n register_words=[1],\n expected=\"2.40\",\n )",
"def test_multi_apm():\n\n components_1 = {\n \"scale\": mock_component(),\n \"decay\": mock_component(),\n \"absorption\": mock_component(),\n }\n components_2 = {\"scale\": mock_component(), \"decay\": mock_component()}\n\n multi_apm = multi_active_parameter_manager(\n ScalingTarget(),\n [components_1, components_2],\n [[\"scale\", \"decay\"], [\"scale\"]],\n active_parameter_manager,\n )\n\n # Test correct setup of apm_list attribute.\n for apm in multi_apm.apm_list:\n assert isinstance(apm, active_parameter_manager)\n assert len(multi_apm.apm_list) == 2\n assert multi_apm.components_list == [\"scale\", \"decay\", \"scale\"]\n assert multi_apm.n_active_params == 3\n assert multi_apm.apm_data[0] == {\"start_idx\": 0, \"end_idx\": 2}\n assert multi_apm.apm_data[1] == {\"start_idx\": 2, \"end_idx\": 3}\n\n # Test parameter selection.\n multi_apm.set_param_vals(flex.double([3.0, 2.5, 2.0]))\n assert multi_apm.get_param_vals() == flex.double([3.0, 2.5, 2.0])\n assert multi_apm.select_parameters(0) == flex.double([3.0, 2.5])\n assert multi_apm.select_parameters(1) == flex.double([2.0])\n\n # Test setting parameter esds.\n multi_apm.set_param_esds(flex.double([0.1, 0.2, 0.3]))\n assert components_1[\"scale\"].free_parameter_esds == flex.double([0.1])\n assert components_1[\"decay\"].free_parameter_esds == flex.double([0.2])\n assert components_2[\"scale\"].free_parameter_esds == flex.double([0.3])\n\n # Test setting var_cov matrices for each component.\n var_cov = flex.double([1.0, 0.5, 0.5, 0.5, 2.0, 0.5, 0.5, 0.5, 3.0])\n var_cov.reshape(flex.grid(3, 3))\n multi_apm.calculate_model_state_uncertainties(var_cov)\n assert components_1[\"scale\"].var_cov_matrix[0, 0] == 1.0\n assert components_1[\"decay\"].var_cov_matrix[0, 0] == 2.0\n assert components_2[\"scale\"].var_cov_matrix[0, 0] == 3.0",
"def test_scaling(self):\n def runs_successfully(use_scal, coeffs):\n prob = om.Problem()\n prob.model.add_subsystem('row1', ScalingTestComp(row=1, coeffs=coeffs,\n use_scal=use_scal))\n prob.model.add_subsystem('row2', ScalingTestComp(row=2, coeffs=coeffs,\n use_scal=use_scal))\n prob.model.connect('row1.y', 'row2.x')\n prob.model.connect('row2.y', 'row1.x')\n prob.model.nonlinear_solver = om.NewtonSolver(solve_subsystems=False, maxiter=2, atol=1e-5, rtol=0)\n prob.model.nonlinear_solver.linear_solver = om.ScipyKrylov(maxiter=1)\n\n prob.set_solver_print(level=0)\n\n prob.setup()\n prob.run_model()\n\n return np.linalg.norm(prob.model._residuals.asarray()) < 1e-5\n\n # ---------------------------\n # coeffs: r1, r2, c1, c2\n coeffs = [1.e0, 1.e0, 1.e0, 1.e0]\n\n # Don't use scaling - but there's no need\n use_scal = False\n self.assertTrue(runs_successfully(use_scal, coeffs))\n # Use scaling - but there's no need\n use_scal = True\n self.assertTrue(runs_successfully(use_scal, coeffs))\n\n # ---------------------------\n # coeffs: r1, r2, c1, c2 - test output scaling:\n coeffs = [1.e0, 1.e0, 1.e10, 1.e0]\n\n # Don't use scaling - but output scaling needed\n use_scal = False\n self.assertTrue(not runs_successfully(use_scal, coeffs))\n # Use scaling - output scaling works successfully\n use_scal = True\n self.assertTrue(runs_successfully(use_scal, coeffs))\n\n # ---------------------------\n # coeffs: r1, r2, c1, c2 - test residual scaling:\n coeffs = [1.e10, 1.e0, 1.e10, 1.e0]\n\n # Don't use scaling - but residual scaling needed\n use_scal = False\n self.assertTrue(not runs_successfully(use_scal, coeffs))\n # Use scaling - residual scaling works successfully\n use_scal = True\n self.assertTrue(runs_successfully(use_scal, coeffs))",
"def _validate_params(self, request_set, target_set=None, context=None):\n\n # Perform first-pass validation in Function.__init__():\n # - returns full set of params based on subclass paramClassDefaults\n super(Mechanism, self)._validate_params(request_set,target_set,context)\n\n params = target_set\n\n #region VALIDATE TIME SCALE\n try:\n param_value = params[TIME_SCALE]\n except KeyError:\n if COMMAND_LINE in context:\n pass\n else:\n self.timeScale = timeScaleSystemDefault\n else:\n if isinstance(param_value, TimeScale):\n self.timeScale = params[TIME_SCALE]\n else:\n if self.prefs.verbosePref:\n print(\"Value for {0} ({1}) param of {2} must be of type {3}; default will be used: {4}\".\n format(TIME_SCALE, param_value, self.name, type(TimeScale), timeScaleSystemDefault))\n #endregion\n\n #region VALIDATE INPUT STATE(S)\n\n # MODIFIED 6/10/16\n # FIX: SHOULD CHECK LENGTH OF INPUT_STATES PARAM (LIST OF NAMES OR SPECIFICATION DICT) AGAINST LENGTH OF\n # FIX: self.variable 2D ARRAY AND COMPARE variable SPECS, IF PROVIDED, WITH CORRESPONDING ELEMENTS OF\n # FIX: self.variable 2D ARRAY\n try:\n param_value = params[INPUT_STATES]\n\n except KeyError:\n if COMMAND_LINE in context:\n pass\n else:\n # INPUT_STATES not specified:\n # - set to None, so that it is set to default (self.variable) in instantiate_inputState\n # - if in VERBOSE mode, warn in instantiate_inputState, where default value is known\n params[INPUT_STATES] = None\n\n else:\n # INPUT_STATES is specified, so validate:\n # If it is a single item or a non-OrderedDict, place in a list (for use here and in instantiate_inputState)\n if not isinstance(param_value, (list, OrderedDict)):\n param_value = [param_value]\n # Validate each item in the list or OrderedDict\n # Note:\n # * number of inputStates is validated against length of the owner mechanism's execute method variable (EMV)\n # in instantiate_inputState, where an inputState is assigned to each item (value) of the EMV\n i = 0\n for key, item in param_value if isinstance(param_value, dict) else enumerate(param_value):\n from PsyNeuLink.Components.States.InputState import InputState\n # If not valid...\n if not ((isclass(item) and (issubclass(item, InputState) or # InputState class ref\n issubclass(item, Projection))) or # Project class ref\n isinstance(item, InputState) or # InputState object\n isinstance(item, dict) or # InputState specification dict\n isinstance(item, ParamValueProjection) or # ParamValueProjection tuple\n isinstance(item, str) or # Name (to be used as key in inputStates dict)\n iscompatible(item, **{kwCompatibilityNumeric: True})): # value\n # set to None, so it is set to default (self.variable) in instantiate_inputState\n param_value[key] = None\n if self.prefs.verbosePref:\n print(\"Item {0} of {1} param ({2}) in {3} is not a\"\n \" InputState, specification dict or value, nor a list of dict of them; \"\n \"variable ({4}) of execute method for {5} will be used\"\n \" to create a default outputState for {3}\".\n format(i,\n INPUT_STATES,\n param_value,\n self.__class__.__name__,\n self.variable,\n self.execute.__self__.name))\n i += 1\n params[INPUT_STATES] = param_value\n #endregion\n\n #region VALIDATE EXECUTE METHOD PARAMS\n try:\n function_param_specs = params[FUNCTION_PARAMS]\n except KeyError:\n if COMMAND_LINE in context:\n pass\n elif self.prefs.verbosePref:\n print(\"No params specified for {0}\".format(self.__class__.__name__))\n else:\n if not (isinstance(function_param_specs, dict)):\n raise MechanismError(\"{0} in {1} must be a dict of param specifications\".\n format(FUNCTION_PARAMS, self.__class__.__name__))\n # Validate params\n from PsyNeuLink.Components.States.ParameterState import ParameterState\n for param_name, param_value in function_param_specs.items():\n try:\n default_value = self.paramInstanceDefaults[FUNCTION_PARAMS][param_name]\n except KeyError:\n raise MechanismError(\"{0} not recognized as a param of execute method for {1}\".\n format(param_name, self.__class__.__name__))\n if not ((isclass(param_value) and\n (issubclass(param_value, ParameterState) or\n issubclass(param_value, Projection))) or\n isinstance(param_value, ParameterState) or\n isinstance(param_value, Projection) or\n isinstance(param_value, dict) or\n isinstance(param_value, ParamValueProjection) or\n iscompatible(param_value, default_value)):\n params[FUNCTION_PARAMS][param_name] = default_value\n if self.prefs.verbosePref:\n print(\"{0} param ({1}) for execute method {2} of {3} is not a ParameterState, \"\n \"projection, ParamValueProjection, or value; default value ({4}) will be used\".\n format(param_name,\n param_value,\n self.execute.__self__.componentName,\n self.__class__.__name__,\n default_value))\n #endregion\n # FIX: MAKE SURE OUTPUT OF EXECUTE FUNCTION / SELF.VALUE IS 2D ARRAY, WITH LENGTH == NUM OUTPUT STATES\n\n #region VALIDATE OUTPUT STATE(S)\n\n # FIX: MAKE SURE # OF OUTPUTS == LENGTH OF OUTPUT OF EXECUTE FUNCTION / SELF.VALUE\n try:\n param_value = params[OUTPUT_STATES]\n\n except KeyError:\n if COMMAND_LINE in context:\n pass\n else:\n # OUTPUT_STATES not specified:\n # - set to None, so that it is set to default (self.value) in instantiate_outputState\n # Notes:\n # * if in VERBOSE mode, warning will be issued in instantiate_outputState, where default value is known\n # * number of outputStates is validated against length of owner mechanism's execute method output (EMO)\n # in instantiate_outputState, where an outputState is assigned to each item (value) of the EMO\n params[OUTPUT_STATES] = None\n\n else:\n # OUTPUT_STATES is specified, so validate:\n # If it is a single item or a non-OrderedDict, place in a list (for use here and in instantiate_outputState)\n if not isinstance(param_value, (list, OrderedDict)):\n param_value = [param_value]\n # Validate each item in the list or OrderedDict\n i = 0\n for key, item in param_value if isinstance(param_value, dict) else enumerate(param_value):\n from PsyNeuLink.Components.States.OutputState import OutputState\n # If not valid...\n if not ((isclass(item) and issubclass(item, OutputState)) or # OutputState class ref\n isinstance(item, OutputState) or # OutputState object\n isinstance(item, dict) or # OutputState specification dict\n isinstance(item, str) or # Name (to be used as key in outputStates dict)\n iscompatible(item, **{kwCompatibilityNumeric: True})): # value\n # set to None, so it is set to default (self.value) in instantiate_outputState\n param_value[key] = None\n if self.prefs.verbosePref:\n print(\"Item {0} of {1} param ({2}) in {3} is not a\"\n \" OutputState, specification dict or value, nor a list of dict of them; \"\n \"output ({4}) of execute method for {5} will be used\"\n \" to create a default outputState for {3}\".\n format(i,\n OUTPUT_STATES,\n param_value,\n self.__class__.__name__,\n self.value,\n self.execute.__self__.name))\n i += 1\n params[OUTPUT_STATES] = param_value",
"def test_get_measure_parameters_by_id(self):\n pass",
"def test_sample_quality(self):\r\n self.assertEqual(self.test_sample.quality, 'medium')",
"def __init__(self, params={}, verbosity=0, testing_level=1, testing_verbosity=1):\r\n self.verbosity = verbosity\r\n self.testing_unit = UnitTests.ParticleSwarmUnitTests(testing_level=testing_level, verbosity=testing_verbosity)\r\n\r\n for key, val in params.items():\r\n self.set(key, val) # invoke set so that all continuous checking for changed parameters happens only once\r\n # place\r",
"def test_test_group_parameters(self):\n pass",
"def describe_scaling_parameters(DomainName=None):\n pass",
"def plane_scale(self, scale):\n cmd = '{}testPlaneScale {}'.format(self.console, scale)\n self.write_command(cmd)",
"def test_get_scale_factors(generate_workchain, generate_eos_inputs, scaling_inputs, expected):\n inputs = generate_eos_inputs()\n\n # This conditional and conversion is necessary because for `aiida-core<2.0` the `list` type is not automatically\n # serialized to a `List` node. Once we require `aiida-core>=2.0`, this can be removed. The reason we couldn't\n # already simply turn the ``scaling_inputs`` into a ``orm.List`` is that during the parametrization done by pytest\n # no AiiDA profile will have been loaded yet and so creating a node will raise an exception.\n if 'scale_factors' in scaling_inputs and isinstance(scaling_inputs['scale_factors'], list):\n scaling_inputs['scale_factors'] = orm.List(list=scaling_inputs['scale_factors'])\n\n inputs.update(scaling_inputs)\n process = generate_workchain('common_workflows.eos', inputs)\n assert process.get_scale_factors() == expected",
"def test_SMEL_args():\n testing_function('sme', bilinear=False)",
"def test_arguments(self):\n\n h.test_function_arguments(\n func=ScalingTransformer.transform,\n expected_arguments=[\"self\", \"X\"],\n expected_default_values=None,\n )",
"def mock_scaling_component(n_refl):\n component = mock_component()\n component.calculate_scales.return_value = flex.double(n_refl, 1.0)\n component.n_refl = [n_refl]\n return component",
"def testSimParametersCorrectlyStored(self):\n sim_pars = self.tree.get_simulation_parameters()\n self.assertEqual(10 ** -8, sim_pars[\"m_probability\"])\n self.assertEqual(160, sim_pars[\"cutoff\"])",
"async def test_scale_and_offset(hass, mock_hub):\n register_config = {\n CONF_COUNT: 1,\n CONF_DATA_TYPE: DATA_TYPE_INT,\n CONF_SCALE: 3,\n CONF_OFFSET: 13,\n CONF_PRECISION: 0,\n }\n await run_test(\n hass,\n mock_hub,\n register_config,\n SENSOR_DOMAIN,\n register_words=[7],\n expected=\"34\",\n )",
"def testSimParamsStored(self):\n params = self.tree.get_simulation_parameters()\n actual_sim_parameters = dict(\n seed=1,\n task=30,\n output_dir=\"output\",\n speciation_rate=0.1,\n sigma=4.0,\n tau=4.0,\n deme=1,\n sample_size=0.1,\n max_time=2.0,\n dispersal_relative_cost=1.0,\n min_num_species=1,\n habitat_change_rate=0.0,\n gen_since_historical=0.0,\n time_config_file=\"null\",\n coarse_map_file=\"sample/SA_sample_coarse.tif\",\n coarse_map_x=35,\n coarse_map_y=41,\n coarse_map_x_offset=11,\n coarse_map_y_offset=14,\n coarse_map_scale=1.0,\n fine_map_file=\"sample/SA_sample_fine.tif\",\n fine_map_x=13,\n fine_map_y=13,\n fine_map_x_offset=0,\n fine_map_y_offset=0,\n sample_file=\"null\",\n grid_x=13,\n grid_y=13,\n sample_x=13,\n sample_y=13,\n sample_x_offset=0,\n sample_y_offset=0,\n historical_coarse_map=\"none\",\n historical_fine_map=\"none\",\n sim_complete=1,\n dispersal_method=\"normal\",\n m_probability=0.0,\n cutoff=0.0,\n landscape_type=\"tiled_coarse\",\n protracted=0,\n min_speciation_gen=0.0,\n max_speciation_gen=0.0,\n dispersal_map=\"none\",\n )\n for key in params.keys():\n self.assertEqual(params[key], actual_sim_parameters[key], msg=\"Error in {}\".format(key))\n self.assertEqual(self.tree.get_job()[0], 1)\n self.assertEqual(self.tree.get_job()[1], 30)",
"def test_sample_one_quality(self):\r\n self.assertEqual(self.test_sample.quality, 'medium')",
"def testParamsAreStrings(self):\n self.chart.display.extra_params['test'] = 32\n self.assertEqual(self.Param('test'), '32')",
"def test_base_hyper_parameters_reg(self):\n hyper_parameter_set = modelgen.generate_base_hyper_parameter_set()\n assert 'regularization_rate' in hyper_parameter_set.keys()",
"def test_get_mt_settings(self):\n pass",
"def test_set_params_2():\n tpot_obj = TPOTClassifier(generations=2)\n tpot_obj.set_params(generations=3)\n\n assert tpot_obj.generations == 3"
] | [
"0.6702554",
"0.6387912",
"0.6046434",
"0.6030869",
"0.59028023",
"0.5901838",
"0.5870547",
"0.58489865",
"0.5837942",
"0.583072",
"0.5788725",
"0.57385606",
"0.57318795",
"0.5716319",
"0.56859964",
"0.56810164",
"0.56458664",
"0.5612986",
"0.56105846",
"0.5608693",
"0.5561435",
"0.55288047",
"0.55212134",
"0.55166095",
"0.5496967",
"0.5490376",
"0.54730386",
"0.54667073",
"0.5458826",
"0.5451223"
] | 0.7040565 | 0 |
Receive a request from the worker work_socket receive a request on this socket timeout if request isn't received by the timeout, raise six.moves.queue.Empty default = blocks forever This polls on both the worker and up_queue sockets and will throw an exception if there is anything available on the upqueue as this indicates that nothing is running. | def recv(self, work_socket, timeout=None):
poller = zmq.Poller()
poller.register(self.up_queue_recv_socket, zmq.POLLIN)
poller.register(work_socket, zmq.POLLIN)
for socket, state in poller.poll(timeout):
if socket == self.up_queue_recv_socket and state == zmq.POLLIN:
result, e = self.up_queue.get()
if e is not None:
raise e
else:
raise cellprofiler_core.pipeline.event.CancelledException(
"Unexpected exit during recv"
)
if socket == work_socket and state == zmq.POLLIN:
return cellprofiler_core.utilities.zmq.communicable.Communicable.recv(
work_socket
)
raise six.moves.queue.Empty | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def process_request_thread(self):\n while True:\n try:\n request, client_address = self._request_queue.get(\n timeout=self.timeout_on_get,\n )\n except Queue.Empty:\n # You wouldn't believe how much crap this can end up leaking,\n # so we clear the exception.\n sys.exc_clear()\n if self._shutdown_event.isSet():\n return\n continue\n try:\n self.finish_request(request, client_address)\n self.shutdown_request(request)\n except:\n self.handle_error(request, client_address)\n self.shutdown_request(request)\n self._request_queue.task_done()",
"def poll(self, timeout=-1):\n future = self._Future()\n if timeout == 0:\n try:\n result = super(_AsyncPoller, self).poll(0)\n except Exception as e:\n future.set_exception(e)\n else:\n future.set_result(result)\n return future\n \n loop = self._default_loop()\n \n # register Future to be called as soon as any event is available on any socket\n watcher = self._Future()\n \n # watch raw sockets:\n raw_sockets = []\n def wake_raw(*args):\n if not watcher.done():\n watcher.set_result(None)\n\n watcher.add_done_callback(lambda f: self._unwatch_raw_sockets(loop, *raw_sockets))\n\n for socket, mask in self.sockets:\n if isinstance(socket, _zmq.Socket):\n if not isinstance(socket, self._socket_class):\n # it's a blocking zmq.Socket, wrap it in async\n socket = self._socket_class.from_socket(socket)\n if mask & _zmq.POLLIN:\n socket._add_recv_event('poll', future=watcher)\n if mask & _zmq.POLLOUT:\n socket._add_send_event('poll', future=watcher)\n else:\n raw_sockets.append(socket)\n evt = 0\n if mask & _zmq.POLLIN:\n evt |= self._READ\n if mask & _zmq.POLLOUT:\n evt |= self._WRITE\n self._watch_raw_socket(loop, socket, evt, wake_raw)\n\n def on_poll_ready(f):\n if future.done():\n return\n if watcher.cancelled():\n try:\n future.cancel()\n except RuntimeError:\n # RuntimeError may be called during teardown\n pass\n return\n if watcher.exception():\n future.set_exception(watcher.exception())\n else:\n try:\n result = super(_AsyncPoller, self).poll(0)\n except Exception as e:\n future.set_exception(e)\n else:\n future.set_result(result)\n watcher.add_done_callback(on_poll_ready)\n \n if timeout is not None and timeout > 0:\n # schedule cancel to fire on poll timeout, if any\n def trigger_timeout():\n if not watcher.done():\n watcher.set_result(None)\n \n timeout_handle = loop.call_later(\n 1e-3 * timeout,\n trigger_timeout\n )\n def cancel_timeout(f):\n if hasattr(timeout_handle, 'cancel'):\n timeout_handle.cancel()\n else:\n loop.remove_timeout(timeout_handle)\n future.add_done_callback(cancel_timeout)\n\n def cancel_watcher(f):\n if not watcher.done():\n watcher.cancel()\n future.add_done_callback(cancel_watcher)\n\n return future",
"def ecute(self):\n msg = self.up_queue_recv_socket.recv()\n result, e = self.up_queue.get()\n if e is not None:\n raise e\n return result",
"def poll(self):\n while self.running and reactor._started and not reactor._stopped:\n self.check_response_queue()\n sleep(0.5)",
"def _recv(self) -> None:\n if not self.connected or now() < self.next_poll:\n return\n self.next_poll += self.poll_interval\n data = []\n while True:\n try:\n data.append(self.endpoint.recv(BUFFSIZE))\n except BlockingIOError:\n break\n if data:\n stream = io.BytesIO(b\"\".join(data))\n while True:\n try:\n info = pickle.load(stream)\n msg = Message(*info)\n self.inq.append(msg)\n except EOFError:\n break",
"def WaitForRequest(self):\r\n selector = selectors.DefaultSelector()\r\n selector.register(self.listenSocket, selectors.EVENT_READ)\r\n while True:\r\n events = selector.select(timeout = 10)\r\n for __, __ in events:\r\n\r\n self.listenSocket.setblocking(True)\r\n sock, address = self.listenSocket.accept()\r\n self.listenSocket.setblocking(False) \r\n bgThread = threading.Thread(target=self.HandleRemoteCall, args=(sock, address))\r\n bgThread.start()",
"def receive(self):\n events = self.poller.poll(self.timeout)\n\n # If there is control socket, he has the priority\n if len(events) == 2:\n return self._recv_serialized(self.control_socket)\n elif len(events) == 1:\n return self._recv_serialized(events[0][0])\n return None",
"def _receive(self):\n # initialize sockets map\n r, w, x = [self.socket], [], []\n r, w, x = select.select(r, w, x, self.sessiondata.timeout)\n if r:\n return self.socket.recv(4096)\n # return nothing on timeout\n return None",
"def _worker(self):\n while True:\n request = self.queue.get()\n self.worker(request)\n self.queue.task_done()",
"def _run(self):\n\n while self._thread_alive_event.is_set():\n reported_events = self._poll.poll(self.POLL_TIMEOUT)\n\n for fd_event_pair in reported_events:\n fd, event = fd_event_pair\n\n if event & select.POLLIN or event & select.POLLPRI:\n self._recv(fd)\n\n elif event & select.POLLERR:\n self.logger.error(\"Error condition of some sort\")\n self._thread_alive_event.clear()\n break\n\n elif event & select.POLLNVAL:\n self.logger.error(\"Invalid request: descriptor not open\")\n self._thread_alive_event.clear()\n break",
"def run(self):\n while True:\n # Check to see if we should stop\n if self._stop.isSet():\n logger.debug(\"Worker thread stopping.\")\n break\n\n # Try to pull from the queue\n try:\n func, args, kwargs = self.queue.get_nowait()\n func(*args, **kwargs)\n except Queue.Empty:\n time.sleep(5)\n continue\n except Exception as e:\n logger.exception(e)",
"def work():\n with rq.Connection(create_connection()):\n worker = rq.Worker(list(map(rq.Queue, listen)))\n worker.work()",
"def receive(self, request_id, timeout=None):\n res = None\n start_time = time.time()\n while res is None:\n with self.connlock:\n res = self.conn.do_receive(request_id)\n if res is None:\n time.sleep(0.1)\n if timeout and (time.time() - start_time > timeout):\n raise RequestTimeout(request_id)\n\n if 'Error' in res:\n raise ServerError(res['Error'], res)\n\n try:\n return res['Response']\n except:\n raise BadResponseError(\"Failed to parse response: {}\".format(res))",
"def run(self) -> None:\n\n while not self.stop_event.is_set():\n if self.my_queue:\n # if heartbeat received at '/heartbeat' route from the monitored peer,\n # sleep until next\n self.my_queue.clear()\n time.sleep(7)\n\n else:\n # else drop peer data from database and inform central server appending '0'\n # to my queue\n self.db_access.drop_peer(self.peer_id)\n self.my_queue.append(0)\n break",
"async def read(self) -> None:\n make_non_blocking(self.stream)\n\n while not self.stream.closed:\n message = None\n try:\n message = await self.read_one()\n\n if not message:\n await self.sleep()\n continue\n else:\n self.wake()\n\n IOLoop.current().add_callback(self.queue.put_nowait, message)\n except Exception as e: # pragma: no cover\n self.log.exception(\n \"%s couldn't enqueue message: %s (%s)\", self, message, e\n )\n await self.sleep()",
"def run(self):\n self.poller = select.epoll()\n self.pollmask = select.EPOLLIN | select.EPOLLHUP | select.EPOLLERR\n self.poller.register(self.server,self.pollmask)\n self.timeout = float(self.webconfig.parameters[\"timeout\"])\n lastSweep = time.time()\n\n while True:\n # poll sockets\n\n if (time.time() - lastSweep) > .5: #sweet through every half second\n self.socketCheck()\n lastSweep = time.time()\n try:\n fds = self.poller.poll(timeout=1.0)\n except:\n return\n fd = 0\n for (fd,event) in fds:\n # handle errors\n if event & (select.POLLHUP | select.POLLERR):\n self.handleError(fd)\n continue\n # handle the server socket\n if fd == self.server.fileno():\n self.handleServer()\n continue\n # handle client socket\n result = self.handleClient(fd)",
"def work(self, worker_id):\n\n try:\n while self.running:\n # blocking request - timeout 3 seconds\n messageSent = False\n try:\n # throws queue.Empty exception if it fails to get an item in 3 seconds\n priorityItem = self.message_queue.get(True, 3)\n topic = priorityItem.item.topic\n self.metric_handler.increment_observed()\n print(f\"sending message on topic {topic} approximate queue size: {self.message_queue.qsize()}\")\n\n if self.sampling == True:\n if self.worker_sample_counts[worker_id][topic] == self.topic_sample_rates[topic]:\n self.send_message(priorityItem.item, worker_id)\n self.worker_sample_counts[worker_id][topic] = 0\n else:\n self.worker_sample_counts[worker_id][topic] += 1\n else:\n self.send_message(priorityItem.item, worker_id)\n \n # might not have actually been sent if we are sampling, but dont attempt to send it in finally\n messageSent = True\n\n except (ConnectionResetError, BrokenPipeError, ConnectionResetError) as e:\n # should maybe record number of times connection breaks? Will get wordy\n self.get_logger().error(f\"Error sending socket message: {str(e)}\")\n self.init_socket_with_rety(worker_id)\n except queue.Empty:\n priorityItem = None\n pass\n finally:\n # give one more attempt at sending the message if we failed\n if not messageSent and priorityItem is not None:\n try:\n self.send_message(priorityItem.item, worker_id)\n except:\n pass\n except Exception as ex:\n self.get_logger().error(f\"Worker thread {worker_id} exitting unexpectedly with error: {str(ex)}\")\n self.get_logger().error(traceback.format_exc())\n finally:\n self.get_logger().info(f\"Worker thread {worker_id} finishing.\")",
"def _poll(self):\n return self.zmq_core.poll(10)",
"def queue_processor(self):\n\n while self.state != consts.SMPP_CLIENT_STATE_CLOSED:\n try:\n p = self.queue.get(timeout=1)\n self._request_handler(p)\n self.queue.task_done()\n except Empty:\n pass",
"def run(self):\n self.channel.queue_declare(self._request_queue)\n self.channel.basic_consume(self._request_queue, self.on_message)\n try:\n msg = \"Waiting for message ...\"\n print(msg)\n logging.info(msg)\n self.channel.start_consuming()\n except KeyboardInterrupt:\n self.channel.stop_consuming()\n\n self.connection.close()",
"def _client(self):\n while True:\n body = self.queue.get(True)\n print \"Sending %s bytes (%s/%s)\" % (len(body), self.queue.qsize(), self.queue.maxsize)\n\n try:\n req = urllib2.Request(self.endpoint, body)\n urllib2.urlopen(req).read()\n except:\n print \"Cannot send request. Retrying in 5 seconds\"\n print_exception(*sys.exc_info())\n print \"continuing...\"\n self.enqueue(body)\n sleep(5)",
"def monitor_queue(self):\n\n while True:\n job = self.queue.next()\n if job:\n # print(\"found %s\" % (job.job_id))\n\n job_name = job.payload[\"job_name\"]\n\n if job_name in self.mul_func_map:\n\n t = self.mul_func_map[job_name]\n p = multiprocessing.Process(target=t, args=(job,))\n p.daemon = True\n p.start()\n\n elif job_name in self.th_func_map:\n\n t = self.th_func_map[job_name]\n # create a thread to process the job\n p = threading.Thread(target=t, args=(job,))\n p.daemon = True\n # start the thread, going into the worker function\n p.start()\n\n elif job_name in self.fk_func_map:\n t = self.fk_func_map[job_name]\n if not os.fork():\n os.setsid()\n t(job)\n exit()\n else:\n # jobs in this queue that are unknown are presently being skipped\n # however they could probably get moved to a 'dead letter' queue\n # for closer examination\n print(\"unknown job name %s, skipping\" % (job_name))\n\n # throttle so that other worker subscribers get a chance\n time.sleep(self.queue_delay)\n else:\n time.sleep(self.poll_delay)\n\n # prints the number of threads\n # print len(threading.enumerate())",
"def dequeue(self, timeout=0):\n result = self.connection.dequeue_any([self], timeout)\n if result:\n job, queue = result\n return job\n else:\n return None",
"def listen_for_messages(self, callback):\n # generate get requests for all input queues\n requests = [port.in_queue.get() for port in self.ports]\n while requests:\n # helper variable for the asserts\n queues_with_pending_requests = [req.resource for req in requests]\n # There is a request for each input queue.\n assert set(self.input_queues) == set(queues_with_pending_requests)\n # For each input queue there's exactly one request.\n assert (\n len(queues_with_pending_requests) ==\n len(set(queues_with_pending_requests)))\n\n log.debug(\"{} waiting for next reception\".format(self))\n completed_requests = (yield self.env.any_of(requests))\n received_messages = list(completed_requests.values())\n log.debug(\"{} received {}\".format(\n self, received_messages))\n\n callback(received_messages)\n\n # Only leave the requests which have not been completed yet\n remaining_requests = [\n req for req in requests if req not in completed_requests]\n # Input queues that have been emptied since the last wake up.\n emptied_queues = [req.resource for req in completed_requests]\n # Add new get requests for the input queues that have been emptied.\n new_requests = []\n for input_queue in emptied_queues:\n new_requests.append(input_queue.get())\n requests = remaining_requests + new_requests",
"async def _process_queue(self, callback, socket_info,\n has_heartbeat_seq=True):\n pending_callback = False\n while True:\n unparsed_message = await socket_info.queue.get()\n #log.debug(\"Received: \" + unparsed_message)\n response = json.loads(unparsed_message)\n # Sometimes the response is a list sometimes not. Convert to list.\n message_list = response if type(response) == list else [response]\n if not message_list:\n log.warning(\"Received empty message from Gemini. This isn't a \"\n \"type of response documented in their API docs.\")\n continue\n if message_list[0]['type'] == 'heartbeat':\n if has_heartbeat_seq:\n self._process_heartbeat(message_list[0], socket_info)\n self._check_sequence(message_list[0], socket_info)\n continue\n # A non heartbeat message.\n for message in message_list:\n self._check_sequence(message, socket_info)\n state_update = callback(message)\n if state_update:\n pending_callback = True\n if not socket_info.queue.empty():\n continue\n if pending_callback and self.is_setup():\n self.exchange_state.update_publisher.notify()\n pending_callback = False",
"def _read_from_socket(self):\n data = \"\"\n try:\n data = self.__socket.recv(SOCKET_BUFFER_SIZE)\n except socket.timeout:\n self.state[\"Errors\"] = True\n raise socket.timeout(\"Error! Socket did not get info, when expected\")\n if not data:\n s = \"Empty\"\n else:\n s = data.decode('utf-8')\n print(\"\\n === Read from socket === \\n%s\\n\" % s)\n self._load_to_queue(s)",
"async def run(self):\n while True:\n await asyncio.sleep(0)\n # See if any sockets have anything\n try:\n socks, events = self.poller.poll(1000)\n for sock, event in zip(socks,events):\n if sock in self.subscriptions:\n states = sock.recv_json()\n await self.main_server.sync_states(states)\n\n # Nothing to report - Poller did not find any sockets with updates\n except ValueError:\n pass\n # Exiting\n except KeyboardInterrupt:\n break",
"def receive_bytes(self, timeout: Optional[float] = None) -> Optional[bytes]:\n if timeout is None:\n return self._recv_socket.recv()\n\n # * 1000 as ms required here but seconds are used everywhere else\n if self._recv_poller.poll(timeout * 1000):\n return self._recv_socket.recv(flags=zmq.NOBLOCK)\n return None",
"def wait(self):\n while self._worker is None:\n # wait() before self._run()\n time.sleep(0.1)\n self._worker.join()\n return self.poll()",
"def _reply_remove_job(self):\n self.remove_job_socket.linger = 0\n self.remove_job_socket.setsockopt(zmq.RCVTIMEO, remote_constants.HEARTBEAT_RCVTIMEO_S * 1000)\n while self.worker_is_alive and self.master_is_alive:\n try:\n message = self.remove_job_socket.recv_multipart()\n tag = message[0]\n assert tag == remote_constants.KILLJOB_TAG\n to_remove_job_address = to_str(message[1])\n logger.info(\"[Worker] A job requests the worker to stop this job.\")\n self._remove_job(to_remove_job_address)\n self.remove_job_socket.send_multipart([remote_constants.NORMAL_TAG])\n except zmq.error.Again as e:\n #detect whether `self.worker_is_alive` is True periodically\n pass"
] | [
"0.6019085",
"0.6004343",
"0.58936167",
"0.5832211",
"0.5812502",
"0.58101034",
"0.5788062",
"0.5670236",
"0.5595741",
"0.55511177",
"0.55298686",
"0.54770106",
"0.54572827",
"0.5441434",
"0.5433049",
"0.54328305",
"0.5424303",
"0.5412369",
"0.5375704",
"0.5372488",
"0.5343022",
"0.5339628",
"0.5337792",
"0.53236455",
"0.5305691",
"0.5303386",
"0.52980363",
"0.5281956",
"0.5263355",
"0.52613515"
] | 0.7843548 | 0 |
Artificially set up the worker's work socket This sets self.aw.work_socket so that methods other than "run" can be tested in the worker. | def set_work_socket(self):
self.analysis_id = uuid.uuid4().hex
def do_set_work_socket(aw):
aw.work_socket = cellprofiler_core.constants.worker.the_zmq_context.socket(
zmq.REQ
)
aw.work_socket.connect(self.work_addr)
aw.work_request_address = self.work_addr
aw.current_analysis_id = self.analysis_id
self.awthread.execute(do_set_work_socket, self.awthread.aw) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def setup(self) -> None:\n self.running = True\n self.listen()\n self.start_workers()\n\n # Send server socket to workers.\n assert self.socket is not None\n for work_queue in self.work_queues:\n work_queue[0].send(self.family)\n send_handle(work_queue[0], self.socket.fileno(),\n self.workers[self.current_worker_id].pid)\n self.socket.close()",
"def setWorker(self, worker):\n pass",
"def setup_for_run(self):\n self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n self.server.bind((self.ip_address, self.port))\n self.server.listen(100)",
"async def _setup(self):\n\n Reporter.info('Setting up workers...')\n self.workers = [asyncio.Task(self._work(), loop=self.loop)\n for _ in range(self.MAX_WORKERS)]\n Reporter.info('Starting scan...')\n await self.q.join()",
"def setup(self):\n # create the pull socket (to communicate with this actor, others\n # process have to connect a push socket to this socket)\n self.pull_socket, pull_port = self._create_socket(zmq.PULL, -1)\n\n # create the control socket (to control this actor, a process have to\n # connect a pair socket to this socket with the `control` method)\n self.control_socket, ctrl_port = self._create_socket(zmq.PAIR, 0)\n\n self.pull_socket_address = LOCAL_ADDR + ':' + str(pull_port)\n self.control_socket_address = LOCAL_ADDR + ':' + str(ctrl_port)\n\n self._pull_port.value = pull_port\n self._ctrl_port.value = ctrl_port\n self._values_available.set()",
"def worker(self, worker):\n\n self._worker = worker",
"def prepare(self):\r\n self.socket.listen()\r\n for _ in xrange(self.threads):\r\n thread = Worker(self.tasks)\r\n thread.setDaemon(True)\r\n thread.start()\r\n self.prepared = True",
"def setup(self):\n global log_th, conf_th, header_th, command_w_th\n self.conf_th_ic = conf_th\n self.header_th_ic = header_th\n self.command_w_th_inc = command_w_th\n self.hostname = conf_th.get_item(q_key='general').get('hostname')\n self.std_recv_size = int(conf_th.get_item(q_key='general').get('std_recv_size'))\n self.data_recv_size = int(conf_th.get_item(q_key='general').get('data_recv_size'))\n self.mail_save_enable = int(conf_th.get_item(q_key='general').get('mail_save_enable'))\n self.mail_save_path = conf_th.get_item(q_key='general').get('mail_save_path')\n self.no_answer = int(conf_th.get_item(q_key='general').get('no_answer'))\n self.sleep_between = int(conf_th.get_item(q_key='general').get('sleep_between'))\n self.message_id = library.q_id_generate(size=16)\n self.client_ip = tuple(self.client_address).__getitem__(0)\n self.client_port = int(tuple(self.client_address).__getitem__(1))\n # Running\n self.header_th_ic.write_header(ip=self.client_ip, qid=self.message_id)\n message = '220 ' + self.hostname\n self.func_sender(message)\n log_th.log_info('{} connected to {} thread'.format(self.client_ip, threading.current_thread().name))",
"def setup(self):\n self.context = zmq.Context()\n self.sub_socket = self.context.socket(zmq.SUB)\n if self.filter:\n self.sub_socket.setsockopt(zmq.SUBSCRIBE, self.filter)\n self.sub_socket.connect('tcp://'+self.host+':'+str(self.com_port))\n return self",
"def __init__(self, worker_id=0, base_port=5005):",
"def connect_to_worker():\n socket = context.socket(zmq.REQ)\n socket.connect(\"tcp://localhost:5555\")\n return socket",
"def setDefaultWorker(self, worker):\n pass",
"def __init__(self, worker_id=0,\n base_port=5005):\n self.port = base_port + worker_id\n self.worker_id = worker_id\n self.server = None\n self.unity_to_external = None\n self.is_open = False",
"def setUp(self) :\n self.longMessage = True\n logger = corAna.makeLogger(isTestMode=True,isMaster=True,isViewer=True,isServer=True,rank=0)\n isFirstWorker = True\n self.numTimes = 5\n numDataPointsThisWorker = 1\n\n self.workerData = corAna.WorkerData(logger, isFirstWorker, self.numTimes,\n numDataPointsThisWorker, addRemoveCallbackObject = None)",
"def initialize(self,init):\n logger.info('*** initialize: worker id=%d',self._agent.wid)\n self.commands = {'initialize':None, 'before_do_work':None, 'after_do_work':None, 'finalize':None}\n self.commands.update(init.get(self._agent.wid,{}))\n exec_command(self.commands['initialize'])",
"def init_socket_with_rety(self, worker_id):\n\n if self.mode == \"tcp\":\n # acquire lock for this socket in 100 ms or abandon, another thread is handling the socket reconnect\n with self.socket_locks[worker_id].acquire_timeout(0.1):\n connected = False\n while not connected:\n try:\n self._init_socket_tcp(worker_id)\n connected = True\n self.get_logger().info('Connection successful!')\n except Exception as e:\n self.get_logger().error(f\"Error initializing socket exception: {str(e)} worker id {worker_id}\")\n for i in range(1, 5):\n self.get_logger().info(f'Retrying in {5-i}')\n time.sleep(1)\n elif self.mode == \"udp\": \n self._init_socket_udp(worker_id)\n else:\n raise Exception(\"Mode must be one of 'udp' or 'tcp'\")",
"def main() -> None:\n worker = Worker()\n worker.do_work()",
"def setup_socket(self):\n self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)\n self.server_socket.bind((self.config['HOST_NAME'], self.config['BIND_PORT']))\n self.server_socket.listen(10)",
"def _run(self):\n try:\n # Send a connect message\n self.socket.send_json({\"worker_id\": self.socket_id, \"message\": \"connect\"})\n while not self.stop_event.is_set():\n job = self.socket.recv_json()\n if self.debug:\n print(\"Received task %s\" % job)\n value = self._do_work(job)\n self.socket.send_json(\n {\n \"worker_id\": self.socket_id,\n \"message\": \"job_done\",\n \"job\": Job.get_result(job, value),\n }\n )\n except KeyboardInterrupt:\n pass\n except Exception as e:\n print(e)\n finally:\n self._disconnect()",
"def init(self, job_start):\n self.server_addr = self.server.start(self)\n self.job_start = job_start\n self._start_worker()",
"def _init_socket_tcp(self, worker_id):\n\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sock.connect((self.host, self.port))\n if len(self.sockets) - 1 < worker_id:\n self.sockets.append(MessageSocket(sock))\n else:\n # socket was already initialized, MessageSocket implements a try:catch\n self.sockets[worker_id].close()\n self.sockets[worker_id] = MessageSocket(sock)",
"def __init__(self, run, expname):\n logger.debug('Initializing worker {}.'.format(rank))\n self.run = int(run)\n self.expname = expname\n bcast_var = None\n dsname = comm.bcast(bcast_var, root=0)\n print(dsname)\n \n print('********** Start setup.')\n t0 = time.time()\n self.dsIdx = psana.DataSource(str(dsname))\n logger.info('********** Datasource on rank {}: {}s'.format(rank, time.time()-t0))\n self.dsIdxRun = next(self.dsIdx.runs())\n self.parse_detectors()\n logger.info('Rank {} has datasource and detectors.'.format(rank))\n print('********** Setup on rank {}: {}s'.format(rank, time.time()-t0))\n return",
"def setup(self, runner):\n msg = None\n try:\n msg = \"Failed to start protocol connection\"\n self.connect()\n\n msg = None\n\n for cls in self.implements:\n getattr(self, cls.name).setup()\n\n msg = \"Post-connection steps failed\"\n self.after_connect()\n except Exception:\n if msg is not None:\n self.logger.warning(msg)\n self.logger.warning(traceback.format_exc())\n raise",
"def __init__( self, app, nworkers, **kwds ):\n super( LwrJobRunner, self ).__init__( app, nworkers, runner_param_specs=LWR_PARAM_SPECS, **kwds )\n self._init_worker_threads()\n galaxy_url = self.runner_params.galaxy_url\n if galaxy_url:\n galaxy_url = galaxy_url.rstrip(\"/\")\n self.galaxy_url = galaxy_url\n self.__init_client_manager()\n if self.runner_params.url:\n # This is a message queue driven runner, don't monitor\n # just setup required callback.\n self.client_manager.ensure_has_status_update_callback(self.__async_update)\n else:\n self._init_monitor_thread()",
"def setup(self):\n self.ip = self.client_address[0]\n self.port = self.client_address[1]\n self.connection = self.request #TCP socket object for the client\n self.server.clients[(self.ip, self.port)] = self\n self.server.peers.append((self.connection)) \n for client in self.server.clients:\n print(\"Connected client: \", client)\n\n #for peer in self.server.peers:\n # print(\"Peers: \", peer)",
"def __init__(self, worker):\n self._worker = worker\n self._jobs = Queue()\n self._results, self._errors = [], []\n self._jobfinished = Condition()",
"def init_worker (self):\n print(\"initializing map worker in directory: \", os.getcwd ())\n\n context = zmq.Context()\n\n # Socket to receive messages on. Worker uses PULL from the master\n # To that end, we connect to the server. The map worker pulls info\n # from the base port of the master\n self.receiver = context.socket (zmq.PULL)\n self.receiver.setsockopt (zmq.RCVHWM, 0)\n connect_addr = \"tcp://\"+ self.master_ip + \":\" + str (self.master_port)\n print(\"Using PULL, map worker connecting to \", connect_addr)\n self.receiver.connect (connect_addr)\n \n # As part of the initialization, we tell the master that we are up.\n # This information is to be pushed to the master at a port which is\n # 2 more than the base of the master.\n self.init_sender = context.socket (zmq.PUSH)\n self.init_sender.setsockopt (zmq.LINGER, -1)\n connect_addr = \"tcp://\" + self.master_ip + \":\" + str (self.master_port+2)\n print(\"Using PUSH, map worker connecting to worker up barrier at \", connect_addr)\n self.init_sender.connect (connect_addr)\n #bind_addr = \"tcp://\" + self.master_ip + \":\" + str (self.master_port+2)\n #print \"Using PUSH, map worker binding to worker up barrier at \", bind_addr\n #self.init_sender.bind (bind_addr)\n\n # now send an ACK to the barrier to let it know that we are up\n self.init_sender.send (b'0')\n\n # close the socket\n # self.init_sender.close ()\n\n # To send the results, we need to initialize the send address to point\n # to the map results barrier\n #\n # Note that the port number of the maps result barrier is 3 more than\n # the port of the master. Initialize it so we can send results \n self.results_sender = context.socket (zmq.PUSH)\n self.results_sender.setsockopt (zmq.LINGER, -1)\n self.results_sender.setsockopt (zmq.SNDHWM, 0)\n connect_addr = \"tcp://\" + self.master_ip + \":\" + str (self.master_port+3)\n print(\"Using PUSH, map worker connecting to map results barrier at \", connect_addr)\n self.results_sender.connect (connect_addr)\n #bind_addr = \"tcp://\" + self.master_ip + \":\" + str (self.master_port+3)\n #print \"Using PUSH, map worker binding to map results barrier at \", bind_addr\n #self.results_sender.bind (bind_addr)",
"def run_worker(self):\n # TODO(xiejw): To allow execution framework to add train hooks.\n return self._start_distributed_training()",
"def activate(self):\n self.socket.listen(self.request_queue_size)",
"def _setup_communication(self):\n state = self.ui.checkBox_comm.checkState()\n if state:\n try:\n sys.path.append(\"..\")\n from zmq_interface.gui_interface import ZmqInterface\n except ImportError as e:\n self.write_text(\"ZMQ interface failed to import. No remote control for this session.\")\n self.disable_visualizer()\n return\n try:\n ##TODO: let user specify ports\n self.com = ZmqInterface(rep_port=REPLY_PORT,\n gui_handle=self)\n except Exception as e:\n #traceback.print_exc(file=sys.stdout)\n self.write_text(\"ZMQ interface failed to start. No remote control for this session. Reason: %s\" % e)\n self.disable_visualizer()\n return\n self.start = self._start_session\n self.stop = self._stop_session\n self.load_config = self._load_state\n self.save_config = self._save_state\n self.com_timer = QtCore.QTimer()\n self.com_timer.timeout.connect(self._check_coms)\n self.com_timer.start(200)\n self.write_text(\"ZMQ interface set up. Reply port on %s\" % self.com.rep_port)\n self.enable_visualizer()\n else:\n if self.com:\n self.com.close()\n if self.com_timer:\n self.com_timer.stop()\n self.com = None\n self.com_timer = None\n self.enable_visualizer()\n self.write_text(\"ZMQ interface closed.\")"
] | [
"0.74050635",
"0.6771996",
"0.66032565",
"0.64634144",
"0.6445292",
"0.62212235",
"0.6142211",
"0.6116784",
"0.6084497",
"0.6056881",
"0.5957323",
"0.59568125",
"0.5929769",
"0.5915095",
"0.5910893",
"0.5893372",
"0.5853307",
"0.5753932",
"0.5749135",
"0.57407844",
"0.57199585",
"0.5707923",
"0.5697508",
"0.5696799",
"0.5664185",
"0.56582713",
"0.56572866",
"0.565072",
"0.5644031",
"0.5639019"
] | 0.8267066 | 0 |
Announce the work address until we get some sort of a request | def send_announcement_get_work_request(self):
self.analysis_id = uuid.uuid4().hex
while True:
self.announce_socket.send_json(((self.analysis_id, self.work_addr),))
try:
return self.awthread.recv(self.work_socket, 250)
except six.moves.queue.Empty:
continue | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def request_work(self):\n self.workRequested.emit()",
"def make_work_request(self):\n request = StoreRequest()\n self.bb_client.read_wait(request, self.handle_request)",
"def answer_waiting_call(self) -> None:",
"def do_work(self):",
"async def send_referral(self) -> None:\n # pause logic\n if not self.running.is_set():\n self.add_to_output(\"Paused...\")\n await self.running.wait()\n # check if the referral file exists\n if os.path.exists(REFERRAL_FILE_S9):\n try:\n # tell the user we are sending the referral\n self.add_to_output(\"Sending referral IPK...\")\n # create ssh connection to miner\n await self.send_file(REFERRAL_FILE_S9, '/tmp/referral.ipk')\n await self.send_file(CONFIG_FILE, '/etc/bosminer.toml')\n\n await self.run_command(f'opkg install /tmp/referral.ipk && /etc/init.d/bosminer restart')\n # tell the user the referral completed\n self.add_to_output(f\"Referral configuration completed...\")\n except OSError as e:\n print(e)\n self.add_to_output(f\"Unknown error...\")\n else:\n self.add_to_output(\"No referral file, skipping referral install\")",
"def test_answer_background(network):\n work_item_id = bfq.ipOwners().answer(background=True)\n bf_get_work_status(work_item_id)",
"def work(self, job):\n pass",
"def receive_completion_notification(self, data, *args, **kwargs):\n self.log.debug('Received completion message from {ip}'.format(\n ip = data['ip_addr']\n ))\n self.completion_messages.add(data['ip_addr'])",
"def _work(self):\n return \"Task Done\"",
"def link_up_respond(self, neighbor):\n neighbor.is_killed = False\n neighbor.send_timer = time.time()\n neighbor.kill_timer = time.time()\n if self.update_dv():\n for name in self.neighbors:\n self.neighbors[name].update_ready = True\n self.neighbors[name].send_timer = time.time()",
"def alert_for_pending_mails_1(request):\n\n\tprint(\">>>>>>>>>>>>>>>>>>>>>Beginning of alert_for_pending_mails_1<<<<<<<<<<<<<<<<<<<<\")\n\n\tThread(target=alert_for_pending_mails_1_worker).start()\n\n\tprint(\">>>>>>>>>>>>>>>>>>>>>End of alert_for_pending_mails_1<<<<<<<<<<<<<<<<<<<<\")\n\n\tresponse = {}\n\n\tresponse[\"info_to_contact\"] = \"Ok\"\n\n\treturn response",
"def address(self):\n ...",
"def director_address():\n while True:\n #addr = etcd.watch(\"director_publish_addr\")\n #director_address = addr.value\n break",
"async def announce(self, ctx, *, msg):\n if self._announce_msg is not None:\n await self.bot.say(\"Already announcing, wait until complete to\"\n \" issue a new announcement.\")\n else:\n self._announce_msg = msg",
"def test_solicitation_no_reply_resend(self):\n waittime = self.autoconflayer._solicitation_timeout * 4.0\n self.autoconflayer.start_process()\n interest = Interest(Name('/foo/bar'))\n self.queue_from_higher.put([None, interest])\n\n # Catch all data the autoconfig layer sends downwards for 3 seconds\n deadline = datetime.utcnow() + timedelta(seconds=waittime)\n tolower = []\n while datetime.utcnow() < deadline:\n try:\n data = self.queue_to_lower.get(timeout=waittime/10)\n tolower.append(data)\n except queue.Empty:\n pass\n # Make sure the broadcast face was actually created and get its face id\n bcfid = self.faceidtable.get_or_create_faceid(AddressInfo(('127.255.255.255', 4242), 0))\n self.assertIsNotNone(bcfid)\n # Make sure the forwarder solicitation was sent more than once\n solictiation = Interest(Name('/autoconfig/forwarders'))\n solictiation_count = len([1 for data in tolower if data == [bcfid, solictiation]])\n self.assertGreater(solictiation_count, 1)",
"def send_completion_notification(self):\n msg_data={'ip_addr': self.this_drone[0], 'id': self.drone_id}\n drone_ip, drone_port = self.master_drone\n self.log.debug('Sending completion ({msg}) to summoner at {ip}:{port}'.format(\n msg=msg_data,\n ip=drone_ip,\n port=drone_port\n ))\n\n send(\n drone_ip=drone_ip,\n mission_id=self.mission_id,\n endpoint='/receive-completion',\n data=msg_data,\n skyserve_port=self.this_drone[1]\n #async=True\n )",
"def is_ready(self, addr: int, /) -> bool:",
"def startworking():\r\n #In the future have the manager program or from the website implement this arguments to a route\r\n #the program will download the file from the website\r\n global exe_name\r\n global Task_Conditional\r\n task_data = None\r\n while task_data is None:\r\n task_data = recieve_data_from_server(\"get_task\")\r\n if task_data is None:\r\n time.sleep(5)\r\n else:\r\n exe_name = task_data[\"exe_name\"]\r\n print('Working on the task \"{}\"'.format(exe_name))\r\n get_file(exe_name)\r\n Task_Conditional = task_data[\"Task_conditional\"]\r\n print(\"loading\")\r\n t1 = time.time()\r\n task_divider(task_data[\"first_num\"], task_data[\"last_num\"])\r\n t2 = time.time()\r\n print(\"ready {}\".format(t2-t1))",
"async def request(self) -> str:\n self._comment = [None] * 3\n await self.addr_conn.conn.segment_scan_completed_event.wait()\n self.comment_known.clear()\n for trh in self.trhs:\n trh.activate()\n await self.comment_known.wait()\n return self.comment",
"def _run_notice_event(look_for_work):\n while True:\n try:\n found = look_for_work()\n if not found:\n break\n except ConcurrentUpdate as e:\n # retry if we had a race-condition while claiming work\n sys.stderr.write('Handling ErmrestConcurrentUpdate exception...\\n')\n pass",
"def work(self, request):\n raise NotImplementedError",
"def notify(self) -> None:\n pass",
"def notify(self) -> None:\n pass",
"def hook_request_assistance(self, data):\n request_id = data[\"request_id\"]\n log.info(\"NEW request for assistance %s\", request_id)\n volunteers_to_contact = data[\"volunteers\"]\n\n needs = \"\"\n for item in data[\"needs\"]:\n needs += f\"- {item}\\n\"\n\n assistance_request = c.MSG_REQUEST_ANNOUNCEMENT % (data[\"address\"], needs)\n\n for chat_id in volunteers_to_contact:\n if chat_id not in self.updater.persistence.user_data:\n log.debug(\"User %s hasn't added the updater to their contacts, skipping.\", chat_id)\n continue\n\n current_state = self.updater.persistence.user_data[chat_id].get(\"state\", None)\n\n if current_state in [c.State.REQUEST_IN_PROGRESS, c.State.REQUEST_ASSIGNED]:\n log.debug(\"Vol%s is already working on a request, skippint\")\n continue\n\n self.updater.bot.send_message(\n chat_id=chat_id,\n text=assistance_request,\n parse_mode=ParseMode.MARKDOWN,\n reply_markup=ReplyKeyboardMarkup(k.initial_responses, one_time_keyboard=True),\n )\n\n # update this user's state and keep the request_id as well, so we can use it later\n updated_state = {\"state\": c.State.REQUEST_SENT, \"reviewed_request\": request_id}\n self.updater.dispatcher.user_data[chat_id].update(updated_state)\n\n self.updater.dispatcher.bot_data.update({request_id: data})\n self.updater.dispatcher.update_persistence()",
"def _future_work_():\n pass",
"def feed(self, instruction):\n assert self.future_inst is None, 'BranchUnit fed when full'\n self.future_inst = instruction\n self.future_timer = max(0, instruction.DELAY - 1)",
"def run(self):\n while True:\n try:\n target_url = self.TO_PROCESS.get(block=True, timeout=4)\n if target_url[\"url\"].startswith(\"mailto:\"):\n email = target_url[\"url\"][len(\"mailto:\") :]\n self.mailto_links.append(email)\n\n elif target_url[\"url\"] not in self.visited:\n self.visited.add(target_url[\"url\"])\n job = self.pool.submit(\n self.load_url, target_url, self.config.timeout\n )\n job.add_done_callback(self.handle_future)\n except Empty:\n return\n except Exception as e:\n print(e)",
"def find_address():\n while True:\n business_object = query_business_name()\n if business_object == \"back\":\n return\n elif business_object is None:\n continue\n\n print(f'{business_object[\"name\"]}\\'s address is:'\n f'{business_object[\"address\"]}, {business_object[\"city\"]} '\n f'{business_object[\"state\"]}')",
"def test_lookup_some_pending_some_contacted(self):\n lookup = Lookup(FindValue, self.target, self.node, self.event_loop)\n # Reset in order to manually create the correct state.\n lookup.pending_requests = {}\n lookup.contacted = set()\n self.node.send_find.call_count = 0\n\n # Add a single pending request.\n pending_uuid = str(uuid.uuid4())\n pending_future = asyncio.Future()\n lookup.pending_requests[pending_uuid] = pending_future\n # Add a single contact to the contacted list.\n lookup.contacted.add(lookup.shortlist[0])\n # Sanity check.\n self.assertEqual(1, len(lookup.pending_requests))\n self.assertEqual(1, len(lookup.contacted))\n # Re-run _lookup and check state has been correctly updated.\n lookup._lookup()\n self.assertEqual(ALPHA - 1, self.node.send_find.call_count)\n self.assertEqual(ALPHA, len(lookup.pending_requests))\n self.assertEqual(ALPHA, len(lookup.contacted))",
"def jmp(self, addr):\n\n self.reg.ip = addr"
] | [
"0.5951986",
"0.5589441",
"0.5423254",
"0.53269106",
"0.52976644",
"0.52254903",
"0.52201986",
"0.520611",
"0.52030164",
"0.5180417",
"0.51489556",
"0.5138724",
"0.51302093",
"0.5128945",
"0.510327",
"0.50935763",
"0.50749195",
"0.5074746",
"0.5072681",
"0.505735",
"0.50519097",
"0.5027512",
"0.5027512",
"0.501978",
"0.5019467",
"0.50015664",
"0.4992609",
"0.49729005",
"0.4969123",
"0.4968418"
] | 0.6637995 | 0 |
Returns the X window id of the window whose title matches regex `title_regex` | def get_window_id(title_regex):
cmd = "wmctrl -l"
logit(cmd)
output = subprocess.check_output(cmd.split()).decode("utf-8").splitlines()
logit(output)
for line in output:
w_id = line.split()[0]
title = line.split(" ", 3)[3]
if re.match(title_regex, title):
return w_id
raise Exception(f"Could not find window with title matching regex: {title_regex}") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def GetProcessIdByWindowTitle(window_title: str) -> int:\n result = ctypes.c_uint32(0)\n\n string_buffer_size = len(window_title) + 2 # (+2) for the next possible character of a title and the NULL char.\n string_buffer = ctypes.create_unicode_buffer(string_buffer_size)\n\n def callback(hwnd, size):\n \"\"\"\n This callback is used to get a window handle and compare\n its title with the target window title.\n\n To continue enumeration, the callback function must return TRUE;\n to stop enumeration, it must return FALSE.\n \"\"\"\n nonlocal result, string_buffer\n\n user32.GetWindowTextW(hwnd, string_buffer, size)\n\n # Compare the window titles and get the process ID.\n if window_title == string_buffer.value:\n user32.GetWindowThreadProcessId(hwnd, ctypes.byref(result))\n return False\n\n # Indicate it must continue enumeration.\n return True\n\n # Enumerates all top-level windows on the screen by passing the handle to each window,\n # in turn, to an application-defined callback function.\n user32.EnumWindows(WNDENUMPROC(callback), string_buffer_size)\n\n return result.value",
"def getCurrentWindowId(*args):",
"def get_window_title(self): # real signature unknown; restored from __doc__\n return \"\"",
"def _get_title_id(cursor, title):\n # run query to find title id for given title\n title_id_query = cursor.execute(dbq.SELECT_TITLE_ID, [title])\n\n if title_id_query:\n return _fetch_value(cursor)\n else:\n return None",
"def get_window_title(self):\n\n return self.window_title",
"def getCurrentWindowDialogId(*args):",
"def getApplicationwindowId(ReferenceID):\n try:\n ldtp.wait(5)\n window = ReferenceID.windows()[0]\n logging.info(\"Application id of the window : %s\" % window)\n except Exception as er:\n logging.info('Not able to get window name of Application')\n return False\n return window",
"def id_by_title(self, title):\n logging.debug('id_by_title(%s)', title)\n if not self.list_loaded_:\n self.load_shows()\n\n for show_id in self.shows_data:\n next_show = self.shows_data[show_id]\n logging.debug('id_by_title(%s) = %s', next_show['title'], show_id)\n if next_show['title'] == title:\n logging.debug('Found id_by_title(%s) = %s', title, show_id)\n return show_id\n\n print('Unknown title - {0}'.format(title))\n sys.exit(1)",
"def get_title_id(title=_(\"Name the element\"),\n text=_(\"Choose a name for the element\"),\n element_title=None,\n element_id=None):\n d = title_id_dialog(title=title,\n element_title=element_title,\n element_id=element_id,\n text=text)\n d.show_all()\n center_on_mouse(d)\n\n res=d.run()\n if res == gtk.RESPONSE_OK:\n try:\n t=unicode(d.title_entry.get_text())\n i=unicode(d.id_entry.get_text())\n except ValueError:\n t=None\n i=None\n else:\n t=None\n i=None\n\n d.destroy()\n\n return t, i",
"def extract_channel_number(title):\n # Generate re\n p = _re.compile(\"(lower)|(upper)\")\n result = _re.search(p, title)\n idx = result.lastindex\n return idx",
"def title(self):\n return win32gui.GetWindowText(self.hwnd)",
"def draw_title_window(self, screen: curses.window, height: int, width: int, y: int, x: int) -> None:\n title_win = screen.subwin(height, width, y, x)\n title_win.border()\n\n title = \"XKCD Extractor\"\n centered_x = width // 2 - len(title) // 2\n title_win.addstr(1, centered_x, title)",
"def winTitle(self, title):\n winTitle = title\n window = self.window\n window.setWindowTitle(winTitle)",
"def getTitleAndPos(self, pos, windowname = \"\"):\n \n wnd = WindowFromPoint(pos)\n while True:\n if not GetParent(wnd): break\n if windowname:\n if windowname in GetWindowText(wnd):\n break\n wnd = GetParent(wnd)\n\n # if the user-specified window is a valid top-level window, use it\n # except that the click took place on the genius application window or\n # on a child window that has the user-specified name\n if GetWindowText(wnd) != \"Operation Genius\" and windowname:\n try:\n w = winutil.getWindowHandle(windowname)\n except WindowNotFound:\n pass\n else:\n if windowname not in GetWindowText(wnd):\n wnd = w\n \n title = GetWindowText(wnd)\n wPos = winutil.ScreenToWindow(wnd, pos)\n return (title, wPos)",
"def callback(hwnd, size):\n nonlocal result, string_buffer\n\n user32.GetWindowTextW(hwnd, string_buffer, size)\n\n # Compare the window titles and get the process ID.\n if window_title == string_buffer.value:\n user32.GetWindowThreadProcessId(hwnd, ctypes.byref(result))\n return False\n\n # Indicate it must continue enumeration.\n return True",
"def find_window(**kwargs):\r\n try:\r\n kwargs['backend'] = 'win32'\r\n element = find_element(**kwargs)\r\n return element.handle\r\n except ElementNotFoundError:\r\n raise WindowNotFoundError\r\n except ElementAmbiguousError:\r\n raise WindowAmbiguousError",
"def getActiveWindowName(display):\n\n # we can't get it via powershell as a system process, so we need to get it from a file \n # that gets written to through a scheduled task (hopefully)\n\n # get user's TEMP path\n tempPath = getCurrentUserTempPath()\n\n if tempPath:\n windowOutput = ''\n try:\n with open(tempPath+\"mqttNanny-activeWindow.txt\", encoding=\"utf-16\") as file:\n windowOutput = file.read()\n except IOError as e:\n logger.error(\"Error while reading active window name: {}\".format(str(e)))\n return ''\n\n # File contents looks like this:\n #\n #ProcessName AppTitle \n #----------- -------- \n #WindowsTerminal Windows PowerShell \n \n processNameLength = 0\n dashesMatched = False\n activeWindows = []\n\n for line in iter(windowOutput.splitlines()):\n #ignore blank lines\n if re.match('^\\s*$', line):\n continue\n logger.debug(line)\n # look for ----------- --------\n matchDashes = re.match(r'^([-]+\\s+)([-]+\\s*)', line, re.UNICODE)\n if matchDashes:\n # we need to count the length of the columns so that we can more easily parse it\n processNameLength = len(matchDashes.group(1))\n logger.debug(\"processNameLength = {}\".format(processNameLength))\n dashesMatched = True\n continue\n \n if dashesMatched:\n # we'll split the line based on length\n # some lines may not have all the data, skip them\n if len(line) >= processNameLength:\n processName = line[0:processNameLength].rstrip(\" \")\n title = line[processNameLength:].rstrip(\" \")\n \n activeWindows.append(processName + \": \" + title)\n \n if len(activeWindows) == 1:\n #this is normal, one active window\n return activeWindows[0]\n elif len(activeWindows) == 0:\n return \"No window\"\n else:\n # more than one active window is a problem - couldn't get active windows...\n logger.warning(\"Found \"+str(len(activeWindows))+\" active windows. This is not ok.\")\n return \"Error - couldn't get active window\"",
"def getTitle(self):\n cmdId = self.executeCommand(Command.GET_TITLE)\n return cmdId",
"def strip_winids(string):\n return re.sub(r'0x([0-9a-f]+)', '<windowid>', string)",
"def title_contains(title_substring):\n title_substring = title_substring.encode('ascii')\n def f(win):\n t = conv(win.title)\n return title_substring in t\n return f",
"def get_window_x_y(windowid):\n return commands.getoutput(\"xwininfo -id \"+windowid+\" | grep 'Corners' | cut -d' ' -f5 | cut -d'+' -f2,3\").split(\"+\")",
"def the_tvdb_dot_com_id(title):\n pass",
"def doGetPageTitle(self, timeout=10.0):\n TestAdapterLib.check_timeout(caller=TestAdapterLib.caller(), timeout=timeout)\n \n ret = True\n cmdId = self.getTitle()\n rsp = self.hasWindowTitle(timeout=timeout, commandId=cmdId)\n if rsp is None:\n ret = False\n else:\n elementVall = rsp.get('GUI', 'value')\n ret = elementVall.get('value') # title of the window\n return ret",
"def title(self):\n with switch_window(self._browser, self.name):\n return self._browser.title",
"def get_window_id_by_pid(pid):\n from subprocess import check_output\n # Looks like:\n # 0x03c00041 0 3498 skipper Mozilla Firefox\n # WindowID ? PID USER Window Name\n # Needs sudo apt-get install wmctrl -lp\n\n output = check_output('wmctrl -lp', shell=True)\n # Find the line with the PID we are looking for\n for line in output.splitlines():\n fields = line.split()\n if len(fields) >= 3:\n this_pid = int(fields[2])\n if this_pid == pid:\n return int(fields[0], 16)\n return None",
"def set_title( self , winTitle ):\r\n self.rootWin.wm_title( str( winTitle ) )",
"def get_valid_title(title):\n if len(title) >= 254:\n title = title[:254]\n return title",
"def tv_tropes_id(title):\n pass",
"def get_title(self):\n return self.run_command('get_title')[0]",
"def _set_window(video_path, window_name, title):\n\n assert os.path.isfile(video_path), \"Path error\"\n vc = cv2.VideoCapture()\n vc.open(video_path)\n im_width = int(vc.get(cv2.CAP_PROP_FRAME_WIDTH))\n im_height = int(vc.get(cv2.CAP_PROP_FRAME_HEIGHT))\n\n return (im_width, im_height)"
] | [
"0.67164993",
"0.62230086",
"0.607506",
"0.6065819",
"0.60467404",
"0.58529276",
"0.5789825",
"0.57747257",
"0.5757205",
"0.57141364",
"0.5661283",
"0.56199354",
"0.56173295",
"0.5543509",
"0.55334383",
"0.548825",
"0.5437465",
"0.5373055",
"0.5367433",
"0.5316267",
"0.53121114",
"0.5308796",
"0.52915806",
"0.5228942",
"0.51771426",
"0.51694065",
"0.51619935",
"0.5161497",
"0.51402414",
"0.5100269"
] | 0.8586612 | 0 |
Ensure we can't create a student user without academic_ fields. | def test_create_new_student_user_missing_field(self):
data = {
'email': '[email protected]',
'password': 'test123!',
}
response = self.client.post(
reverse('user-list'),
data,
format='json',
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_can_not_create_education_instance_without_user(self):\n\t\twith self.assertRaises(\n\t\t\tIntegrityError,\n\t\t\tmsg = 'Should raise IntegrityError if user not provided.'\n\t\t\t):\n\n\t\t\tEducation.objects.create(\n\t\t\t\tschool_name=self.school_name,\n\t\t\t\tcourse_name=self.course_name,\n\t\t\t\tstart_date=self.start_date,\n\t\t\t)",
"def test_careers_invalid_student(self):\n student_id = '1234567890'\n result = self.ucuenca.careers(student_id)\n self.assertFalse(result)",
"def test_invalid_student(self):\n # request\n request_body = {\n 'wwuid': '123456789', # too long\n 'labgroup': self.labgroup.id,\n 'enroll_key': self.labgroup.enroll_key\n }\n response = self.client.post(reverse(self.view_name), request_body)\n # test response\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)\n # test database\n self.assertEqual(len(Student.objects.all()), 0)",
"def test_no_user(self):\n form = self._get_form()\n self.assertTrue(self._validate_form(form), form.errors)\n self.assertRaises(IntegrityError, form.save)",
"def test_create_student_missing_role(self):\n response = self.client.post(self.url, data=json.dumps(self.payload_missing_role),\n content_type='application/json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, msg=response.content.decode())\n self.assertEqual(Student.objects.count(), self.qty)",
"def test_staff_permission_required(self):\r\n with self.assertRaises(PermissionDenied):\r\n add_user_with_status_granted(self.user, self.user)\r\n\r\n with self.assertRaises(PermissionDenied):\r\n update_course_creator_group(self.user, self.user, True)",
"def test_can_not_create_education_instance_without_start_date(self):\n\t\twith self.assertRaises(\n\t\t\tIntegrityError,\n\t\t\tmsg = 'Should raise IntegrityError if start_date not provided.'\n\t\t\t):\n\n\t\t\tEducation.objects.create(\n\t\t\t\tuser=self.user,\n\t\t\t\tschool_name=self.school_name,\n\t\t\t\tcourse_name=self.course_name,\n\t\t\t)",
"def test_private_create_user_without_email(self):\n with pytest.raises(ValueError) as exinfo:\n EmailUser.objects._create_user(None, None, False, False)\n self.assertIn('email must be set', str(exinfo.value))",
"def test_validate_user(self):\n with self.assertRaises(ValidationError):\n self.make_assignment(\n self.category, self.user_alice, self.role_contributor\n )",
"def test_bad_student(self):\r\n staff_page = self._goto_staff_page()\r\n staff_page.answer_problem()\r\n\r\n staff_debug_page = staff_page.open_staff_debug_info()\r\n staff_debug_page.delete_state('INVALIDUSER')\r\n msg = staff_debug_page.idash_msg[0]\r\n self.assertEqual(u'Failed to delete student state. '\r\n 'User does not exist.', msg)",
"def test06_add_student_with_empty_fields(self):\n student_data = self.students_page.\\\n click_edit_students_list_button(). \\\n click_add_new_student_button()\n student_data.save_data_changes_button.click()\n actual_warnings = \\\n student_data.warnings_text_for_adding_student_with_empty_fields()\n self.assertEqual(actual_warnings, data['expected_warnings'])",
"def test_creation_without_password(self, user):\n with pytest.raises(mongoengine.errors.ValidationError):\n user.save()",
"def test_dont_create_user(self):\n self.assertFalse(User.objects.exists())",
"def test_cannot_create_user_without_email(self):\n with self.assertRaises(TypeError):\n User.objects.create_user(username=\"username\", password=\"password\", email=None)",
"def test_invalid_data_course_add(self, app, auth, field):\n app.admin.add_new_course()\n course_data = CreateCourse.random()\n setattr(course_data, field, None)\n app.course.create_course(course_data)\n assert (\n not app.course.all_required_fields_filled()\n ), \"Empty fields are ignored and user data changed successfully!\"",
"def test_add_user_to_course_group_permission_denied(self):\r\n add_users(self.global_admin, CourseInstructorRole(self.course_key), self.creator)\r\n add_users(self.global_admin, CourseStaffRole(self.course_key), self.creator)\r\n with self.assertRaises(PermissionDenied):\r\n add_users(self.staff, CourseStaffRole(self.course_key), self.staff)",
"def test_not_creator_cannot_update(self):\n\n logged_user = utils.create_user_and_authenticate(self)\n self.group.users.add(logged_user)\n expected_url = reverse('my_groups_view')\n\n utils.test_cannot_access(self, self.url,\n expected_url=expected_url,\n data=self.data)",
"def test_create_user_missing_fields(self):\n payload = {\n 'email': 'email',\n 'password': ''\n }\n res = self.client.post(CREATE_USER_API, payload)\n self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)",
"def test_careers_invalid_student(self):\n student_id = '1234567890'\n result = self.ucuenca.schedule(student_id)\n self.assertFalse(result)",
"def test_creator_group_not_enabled(self):\r\n self.assertTrue(has_access(self.user, CourseCreatorRole()))",
"def test_can_have_no_assessor(self):\n title = \"Smart thesis title\"\n\n thesis = Thesis(student=self.student,\n assessor=None,\n supervisor=self.supervisor,\n title=title,\n begin_date=datetime.now().date(),\n due_date=datetime(2018, 1, 30))\n\n thesis.save()\n\n self.assertEqual(None, thesis.assessor)",
"def test_creating_a_new_user_without_email(self):\n\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, \"Test1234\")",
"def test_08_create_user_not_exists(self):\n\n _, user = self.get_random_item(models.User)\n utils.create_user(user, session=self.session)\n success, error = utils.create_user(user, session=self.session)\n self.assertFalse(success)\n self.assertTrue(error)",
"def test_create_instructor_missing_role(self):\n response = self.client.post(self.url, data=json.dumps(self.payload_missing_role),\n content_type='application/json')\n self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST, msg=response.content.decode())\n self.assertEqual(Instructor.objects.count(), self.qty)",
"def _add_user(user, state):\r\n if not user.is_staff and CourseCreator.objects.filter(user=user).count() == 0:\r\n entry = CourseCreator(user=user, state=state)\r\n entry.save()\r\n return True\r\n\r\n return False",
"def test_create_course_no_course_creators_not_staff(self):\r\n with mock.patch.dict('django.conf.settings.FEATURES', {\"ENABLE_CREATOR_GROUP\": True}):\r\n self.user.is_staff = False\r\n self.user.save()\r\n self.assert_course_permission_denied()",
"def test_cannot_create_group_with_empty_field(self):\n\n utils.create_user_and_authenticate(self)\n group_fields = ['name', 'description']\n utils.test_cannot_post_with_empty_fields(self, self.url, group_fields)",
"def test_creation_throws_error_on_missing_fields(self, test_domain):\n with pytest.raises(ValidationError) as err:\n test_domain.repository_for(Person)._dao.create(last_name=\"Doe\")\n\n assert err.value.messages == {\"first_name\": [\"is required\"]}",
"def test_signup_missing_first_name(self):\n\n invalid_u = User.signup(\"[email protected]\", \"testuser\", \"testpass\", None, \"User\", None)\n \n uid = 99999\n invalid_u.id = uid\n\n with self.assertRaises(exc.IntegrityError) as context:\n db.session.commit()",
"def test_create_user_invalid_type(self):\r\n print(\"Create user invalid type\")\r\n u_id = 3\r\n username = \"newtestuser\"\r\n password = \"password\"\r\n u_type = 5\r\n\r\n prev_noUsers = len(User.query.all())\r\n self.assertEqual(self.system.create_user(u_id, username, password, u_type), 0)\r\n curr_noUsers = len(User.query.all())\r\n self.assertEqual(prev_noUsers, curr_noUsers)"
] | [
"0.67956454",
"0.63918",
"0.636316",
"0.6349266",
"0.63289773",
"0.6238365",
"0.623791",
"0.6165642",
"0.61546856",
"0.61181474",
"0.6080576",
"0.60766065",
"0.60527575",
"0.60310954",
"0.60178196",
"0.6006018",
"0.59811884",
"0.5958835",
"0.59572184",
"0.59439963",
"0.5943449",
"0.5939593",
"0.5896514",
"0.5877708",
"0.58449924",
"0.5835229",
"0.582459",
"0.5813666",
"0.5810574",
"0.5803253"
] | 0.64296496 | 1 |
Ensure we can't create a new user with an invalid phone number | def test_create_new_user_invalid_phone(self):
data = {
'username': 'John',
'email': '[email protected]',
'password': '1fasd6dq#$%',
'phone': '12345',
'other_phone': '23445dfg',
'first_name': 'Chuck',
'last_name': 'Norris',
'university': {
"name": "random_university"
},
'academic_field': {'name': "random_field"},
'academic_level': {'name': "random_level"},
'gender': "M",
'birthdate': "1999-11-11",
}
response = self.client.post(
reverse('user-list'),
data,
format='json',
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
content = {
"phone": ['Invalid format.'],
"other_phone": ['Invalid format.']
}
self.assertEqual(json.loads(response.content), content) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def validate_phone_number(self, phone_number):\n if User.objects.filter(phone_number=phone_number).exists():\n raise serializers.ValidationError('Phone Number already registered.')\n return phone_number",
"def test_new_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(\"\", \"test42837492374923749\")",
"def test_duplicate_phone_number(self):\n params = {\n 'first_name': \"David\",\n 'last_name': \"Smith\",\n 'password': '******',\n 'email': \"[email protected]\",\n 'phone_number': \"012-345-6789\"\n }\n self.register(params)\n response = self.register(params)\n self.assertEqual(response.status_code, 400)\n self.assertDictContainsSubset({'message': \"Phone number/email already exists\"}, response.json())",
"def test_new_user_invalid_email(self):\n\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, 'test1234')",
"def test_new_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, '3232Ze')",
"def test_new_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, '123')",
"def test_new_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, 'test123456')",
"def test_create_use_with_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, password='open@123')",
"def test_new_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, \"test123\")",
"def test_new_user_invalid_email(self):\n\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, 'test123')",
"def test_create_user_with_invalid_input(self, user, status_code, len_):\n # setup: none\n\n # test\n resp = self.create_user(user)\n try:\n assert resp.status_code == status_code\n assert resp.headers[\"Content-Type\"] == \"application/json; charset=utf-8\"\n assert resp.text.find(\"Passwords must have at least one non alphanumeric character\") == len_\n except AssertionError:\n raise\n finally:\n self.pprint_request(resp.request)\n self.pprint_response(resp)\n\n # teardown: none",
"def test_new_user_with_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, 'userpass123')",
"def test_new_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, 'test123')",
"def test_new_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, 'test123')",
"def test_new_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, 'test123')",
"def test_new_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, 'test123')",
"def test_new_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, 'test123')",
"def test_create_user_invalid_id(self):\r\n print(\"Create user invalid id (already taken)\")\r\n u_id = 100\r\n username = \"newtestuser\"\r\n password = \"test9999\"\r\n u_type = 1\r\n\r\n prev_noUsers = len(User.query.all())\r\n self.assertEqual(self.system.create_user(u_id, username, password, u_type), 0)\r\n curr_noUsers = len(User.query.all())\r\n self.assertEqual(prev_noUsers, curr_noUsers)",
"def test_new_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(\n None,\n 'Password'\n )",
"def test_new_user_invalid_email(self):\n user_number_before = get_user_model().objects.count()\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(\n None,\n password=\"1234Test\"\n )\n user_number_after = get_user_model().objects.count()\n self.assertEqual(user_number_before, user_number_after)",
"def validate_phone(self, data):\n value = data.strip()\n if re.match(constant.NUMBER_ONLY, value):\n if User.objects.filter(phone=value).exists():\n raise serializers.ValidationError('phone number already registered')\n return value\n raise serializers.ValidationError(VALIDATION['phone']['invalid'])",
"def post(self):\r\n args = user_parser.parse_args()\r\n\r\n phone_number = args[\"phone_number\"]\r\n\r\n first_three = phone_number[:3]\r\n\r\n if first_three not in prefix_list and first_three != \"+23\":\r\n response = {\r\n \"status\": \"error\",\r\n \"details\": {\r\n \"message\": \"Pass in a valid phone-number\"\r\n }\r\n }\r\n return response, http.client.BAD_REQUEST\r\n\r\n if not (len(phone_number) == 11 or len(phone_number) == 14):\r\n\r\n response = {\r\n \"status\": \"error\",\r\n \"details\": {\r\n \"message\": \"The lenth of number passed is invalid\"\r\n }\r\n }\r\n return response, http.client.BAD_REQUEST\r\n\r\n user = (UserModel.query.filter(\r\n UserModel.phone_number == phone_number).first())\r\n\r\n if user:\r\n result = {\r\n \"status\": \"error\",\r\n \"result\": {\r\n 'message': 'Phone Number already exists, try another one.'\r\n }\r\n }\r\n return result, http.client.CONFLICT\r\n\r\n if not validators.email(args[\"email\"]):\r\n response = {\r\n \"status\": \"error\",\r\n \"details\": {\r\n \"message\": \"Input a valid email address\"\r\n }\r\n }\r\n return response, http.client.BAD_REQUEST\r\n\r\n user = (UserModel.query.filter(\r\n UserModel.email == args['email']).first())\r\n if user:\r\n result = {\r\n \"status\": \"error\",\r\n \"result\": {\r\n 'message': 'Email already exists, try another one.'\r\n }\r\n }\r\n return result, http.client.CONFLICT\r\n\r\n email = args['email'].lower()\r\n new_user = UserModel(email=email,\r\n phone_number=args[\"phone_number\"],\r\n password=args['password'],\r\n role=args['role'],\r\n created_at=datetime.utcnow(),\r\n firebase_token=args['firebase_token'])\r\n db.session.add(new_user)\r\n try:\r\n db.session.commit()\r\n except IntegrityError:\r\n db.session.rollback()\r\n result = {\r\n \"status\": \"error\",\r\n \"result\": {\r\n 'message':\r\n 'Email or Phone Number already exists, try another one.'\r\n }\r\n }\r\n return result, http.client.CONFLICT\r\n\r\n result = admin_namespace.marshal(new_user, user_model)\r\n\r\n response = {\"status\": \"success\", \"result\": result}\r\n\r\n return response, http.client.CREATED",
"def test_profile_phone_number_exceptions(self, bad_number):\n with mute_signals(post_save):\n profile = ExamProfileFactory(profile__phone_number=bad_number)\n with self.assertRaises(InvalidProfileDataException):\n CDDWriter.profile_phone_number_to_raw_number(profile)\n with self.assertRaises(InvalidProfileDataException):\n CDDWriter.profile_phone_number_to_country_code(profile)",
"def test_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(email=None, password=\"123\")",
"def test_creating_a_new_user_without_email(self):\n\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(None, \"Test1234\")",
"def test_new_user_invalid_email(self):\n\n # It should raise a ValueError\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(\n None,\n password=\"password\"\n )",
"def validate_username(self, attrs, source):\n phone_no = attrs[source]\n if not phoneCleaner(phone_no):\n raise serializers.ValidationError(\"Please check your phone no. the format is incorrect\")\n\n try:\n us = User.objects.get(username__iexact=phone_no)\n except User.DoesNotExist:\n raise serializers.ValidationError(\"Phone number must already be registered before doing this\")\n\n if us.hierarchy != 'master':\n raise serializers.ValidationError(\"Phone number must not be a slave to another user\")\n\n return attrs",
"def test_invalid_user_without_email(self):\n email = ''\n password = None\n with self.assertRaises(ValueError):\n self.user_manager.create_user(email=email, password=password)",
"def test_new_user_invalid_email(self):\n with self.assertRaises(ValueError):\n get_user_model().objects.create_user(\n email=None,\n password=self.test_user_pass,\n name=self.test_user_name,\n )",
"def test_valid_phone_invalid():\n assert not valid_phone(\"\")\n assert not valid_phone(\"000-000-00000\")\n assert not valid_phone(\"000-0000-0000\")\n assert not valid_phone(\"0000-000-0000\")\n assert not valid_phone(\"00000000000\")\n assert not valid_phone(\"foobar\")"
] | [
"0.7145561",
"0.70290124",
"0.6975835",
"0.68602705",
"0.68565595",
"0.6848795",
"0.684489",
"0.6838828",
"0.68144745",
"0.6813772",
"0.6810992",
"0.6809171",
"0.68087786",
"0.68087786",
"0.68087786",
"0.68087786",
"0.68087786",
"0.6709067",
"0.67028594",
"0.6652703",
"0.66233295",
"0.66155636",
"0.6599365",
"0.6571285",
"0.6551518",
"0.6550027",
"0.6540607",
"0.6514016",
"0.6495744",
"0.64895654"
] | 0.7310096 | 0 |
Ensure we can't list users without authentication. | def test_list_users_without_authenticate(self):
response = self.client.get(reverse('user-list'))
content = {"detail": "Authentication credentials were not provided."}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_list_users_without_permissions(self):\n self.client.force_authenticate(user=self.user)\n\n response = self.client.get(reverse('user-list'))\n\n content = {\n 'detail': 'You do not have permission to perform this action.'\n }\n self.assertEqual(json.loads(response.content), content)\n\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def test_unauthenticated_user_denial(self):\n\n self.response = self.client.get(\"/api/users/users_list/\")\n self.assertEqual(self.response.status_code, status.HTTP_403_FORBIDDEN)\n self.assertEqual(\n 'Authentication credentials were not provided.', self.response.data['detail'])",
"def test_admin_user_list_all_users_permission_denied(self):\n self.client.logout()\n self.client.login(\n username=self.invalid_user.username,\n password=self.invalid_user.password\n )\n response = self.client.get(CONSTS.USER_ADMIN_LIST)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def test_get_users_non_admin(client: FlaskClient) -> None:\n # Non-admin users are not allowed to make the request\n username = create_random_username()\n auth_token = create_auth_token(username)\n response = get_users(client, auth_token.signed)\n assert_error_response(response, HTTPStatus.FORBIDDEN)",
"def test_if_forbiden_for_authenticated_permissions(self):\r\n res = self.client_authenticated.get(reverse(LIST_USER_URL),data={})\r\n self.assertEqual(res.status_code, status.HTTP_403_FORBIDDEN)",
"def test_get_users_unauthenticated(client: FlaskClient) -> None:\n # Unauthenticated users are not allowed to make the request\n response = get_users(client)\n assert_error_response(response, HTTPStatus.UNAUTHORIZED)",
"def test_user_get_all(self):\n response = self.app.get('/api/v3/users', headers=self.user_header)\n self.assertEqual(response.status_code, 401)",
"def test_cannot_view_all_users_with_blacklisted_token(self):\n resp = self.admin_create_user()\n reply = self.admin_create_user2()\n resp = self.admin_login()\n token = resp['token']\n\n resp = self.client.delete(\n '/api/v1/logout',\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n self.assertEqual(reply['message'], 'You are successfully logged out!')\n self.assertEqual(resp.status_code, 200)\n\n resp = self.client.get(\n '/api/v1/users',\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n self.assertEqual(reply['message'], 'Invalid Authentication, Please Login!')\n self.assertEqual(resp.status_code, 401)",
"def test_10_admin_user_not_listed(self):\r\n self.register()\r\n res = self.app.get('/admin/users', follow_redirects=True)\r\n assert \"Manage Admin Users\" in res.data, res.data\r\n assert \"Current Users with Admin privileges\" not in res.data, res.data\r\n assert \"John\" not in res.data, res.data",
"def test_if_not_available_for_unauthorized(self):\r\n res = self.not_authenticated.get(reverse(LIST_USER_URL),data={})\r\n self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)",
"def test_show_private_lists_invalid(self):\n with self.client as c:\n with c.session_transaction() as sess:\n sess[CURR_USER_KEY] = self.user2.id\n \n res = c.get(\"/users/tester1/private-lists\")\n\n self.assertEqual(res.status_code, 302)",
"def test_anonymous_user_read(self):\r\n with self.flask_app.test_request_context('/'):\r\n for token in self.auth_providers:\r\n assert_raises(Unauthorized,\r\n getattr(require, 'token').read,\r\n token)",
"def test_not_authenticated(self):\n pass # lint-amnesty, pylint: disable=unnecessary-pass",
"def test_not_authenticated(self):\n pass # lint-amnesty, pylint: disable=unnecessary-pass",
"def test_no_token_get_all(self):\n response = self.app.get('/api/v3/users')\n self.assertEqual(response.status_code, 401)",
"def test_unauthenticated_resource_allowed(self):\n raise NotImplementedError # FIXME",
"def test_authenticated_user_read(self):\r\n with self.flask_app.test_request_context('/'):\r\n for token in self.auth_providers:\r\n assert_raises(Forbidden,\r\n getattr(require, 'token').read,\r\n token)",
"def test_user_list_get_without_auth(client):\n\n response = client.get(\"/users\", headers={\"Accept\": \"application/vnd.api+json\"})\n assert response.status_code == 401\n assert get_content_type(response) == \"application/vnd.api+json\"\n assert json.loads(response.data.decode()) == {\n \"errors\": [\n {\n \"status\": 401,\n \"title\": \"Unauthorized\",\n \"detail\": \"Missing Authorization Header\",\n }\n ]\n }",
"def test_list_user(self):\n pass",
"def test_unauthenticated_get(self):\n url = reverse('edit-list')\n\n response = self.client.get(url)\n self.assertEqual(403, response.status_code)\n self.assertEqual('Forbidden', response.status_text)\n self.assertTrue(\n 'credentials were not provided.' in response.data.get('detail'))",
"def test_list_not_authenticated(self):\n response = self.client.get(\n reverse('retreat:waitqueuenotification-list'),\n format='json',\n )\n\n content = {'detail': 'Authentication credentials were not provided.'}\n\n self.assertEqual(json.loads(response.content), content)\n\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)",
"def test_need_login_to_see_usagelist(self):\n response = self.client.get(reverse('api_v1:usage-list'), follow=True)\n self.assertEqual(response.status_code, 403)",
"def test_list_members_without_auth(self):\n url = '/api/v1/communities/3/retrieve_members/'\n\n response = self.client.get(url)\n self.assertEqual(status.HTTP_401_UNAUTHORIZED, response.status_code)",
"def test_get_work_list_forbidden(self):\n # Attempt to get works list\n response = self.client.get(self.url)\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)",
"def test_get_list_of_followers_without_auth(self):\n with self.settings(\n EMAIL_BACKEND='django.core.mail.backends.locmem.EmailBackend'):\n response = self.client.get(self.followers_url)\n self.assertEqual(response.data['detail'],\n \"Authentication credentials were not provided.\")\n\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def get_illegal_users(sessions, users):\n\n # Don't care about specific users being allowed to log in\n if not users:\n return []\n\n illegal_users = []\n\n # Record user sessions not whitelisted by the check\n for session in sessions:\n user = sessions[session]['user']\n if users:\n if user not in users:\n illegal_users.append(session + ':' + user)\n\n return illegal_users",
"def test_show_private_lists_valid(self):\n with self.client as c:\n with c.session_transaction() as sess:\n sess[CURR_USER_KEY] = self.user1.id\n \n res = c.get(\"/users/tester1/private-lists\")\n\n self.assertEqual(res.status_code, 200)",
"def test_list_playlists_by_anonymous_user(self):\n factories.PlaylistFactory()\n response = self.client.get(\"/api/playlists/\")\n self.assertEqual(response.status_code, 401)",
"def test_list_not_authenticated(self):\n response = self.client.get('/api/products/')\n expected = {'detail': 'Authentication credentials were not provided.'}\n self.assertEqual(response.status_code, 401)\n self.assertEqual(response.json(), expected)",
"def test_not_logged_user_cannot_access(self):\n\n utils.test_not_logged_cannot_access(self, self.url)"
] | [
"0.7592878",
"0.7291725",
"0.7268098",
"0.72500527",
"0.7094657",
"0.6990975",
"0.6927358",
"0.69206643",
"0.6902929",
"0.68474746",
"0.67786574",
"0.6738562",
"0.67326707",
"0.67326707",
"0.6700241",
"0.664444",
"0.6609902",
"0.6586935",
"0.6504748",
"0.6471352",
"0.64705676",
"0.64099103",
"0.63810873",
"0.634859",
"0.63304645",
"0.63026404",
"0.62980145",
"0.62765384",
"0.6274346",
"0.6273391"
] | 0.75815666 | 1 |
Ensure we can't list users without permissions. | def test_list_users_without_permissions(self):
self.client.force_authenticate(user=self.user)
response = self.client.get(reverse('user-list'))
content = {
'detail': 'You do not have permission to perform this action.'
}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_admin_user_list_all_users_permission_denied(self):\n self.client.logout()\n self.client.login(\n username=self.invalid_user.username,\n password=self.invalid_user.password\n )\n response = self.client.get(CONSTS.USER_ADMIN_LIST)\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)",
"def test_if_forbiden_for_authenticated_permissions(self):\r\n res = self.client_authenticated.get(reverse(LIST_USER_URL),data={})\r\n self.assertEqual(res.status_code, status.HTTP_403_FORBIDDEN)",
"def test_filter_user_permissions(self):\n data = {\n \"users\": {\n 1: \"view\",\n 2: \"NONE\",\n }\n }\n\n with self.assertRaises(exceptions.PermissionDenied):\n check_user_permissions(data, 1)\n\n with self.assertRaises(exceptions.PermissionDenied):\n check_user_permissions(data, 2)\n\n check_user_permissions(data, 3)",
"def test_10_admin_user_not_listed(self):\r\n self.register()\r\n res = self.app.get('/admin/users', follow_redirects=True)\r\n assert \"Manage Admin Users\" in res.data, res.data\r\n assert \"Current Users with Admin privileges\" not in res.data, res.data\r\n assert \"John\" not in res.data, res.data",
"def DeniedPermissions(self) -> _n_6_t_0:",
"def test_get_users_non_admin(client: FlaskClient) -> None:\n # Non-admin users are not allowed to make the request\n username = create_random_username()\n auth_token = create_auth_token(username)\n response = get_users(client, auth_token.signed)\n assert_error_response(response, HTTPStatus.FORBIDDEN)",
"def test_requester_is_no_admin(self) -> None:\n\n channel = self.make_request(\n \"GET\",\n self.url,\n access_token=self.other_user_tok,\n )\n\n self.assertEqual(403, channel.code, msg=channel.json_body)\n self.assertEqual(Codes.FORBIDDEN, channel.json_body[\"errcode\"])",
"def test_requester_is_no_admin(self) -> None:\n\n channel = self.make_request(\n \"GET\",\n self.url,\n access_token=self.other_user_tok,\n )\n\n self.assertEqual(403, channel.code, msg=channel.json_body)\n self.assertEqual(Codes.FORBIDDEN, channel.json_body[\"errcode\"])",
"def test_get_no_permission(self):\n self.user.user_permissions.clear()\n response = self._get()\n self.assertRedirectsToLogin(response)",
"def test_get_no_permission(self):\n self.user.user_permissions.clear()\n response = self._get()\n self.assertRedirectsToLogin(response)",
"def get_queryset(self):\n user = self.request.user\n if not (user.is_authenticated and user.check_permstring(\"builders\")):\n raise Http404(\"Not staff\")\n return super(IncompleteRosterListView, self).get_queryset()",
"def test_if_allowed_for_superusers_permissions(self):\r\n res = self.client_superuser.get(reverse(LIST_USER_URL),data={})\r\n self.assertEqual(res.status_code, status.HTTP_200_OK)",
"def test_user_without_share(self):\n set_permission(Permission.EDIT, self.user1, self.collection)\n\n # Can not add permissions to users.\n data = {\"users\": {self.user2.pk: \"view\"}}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)\n\n # Can not add permissions to groups.\n data = {\"users\": {self.group.pk: \"view\"}}\n resp = self._detail_permissions(self.collection.pk, data, self.user1)\n self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)",
"def test_get_no_permission(self):\n self.user.user_permissions.clear()\n response = self._get()\n self.assertRedirectsToLogin(response)\n self._assert_no_change()",
"def cant(self, permissions: Union[str, List]) -> bool:",
"def test_unauthenticated_user_denial(self):\n\n self.response = self.client.get(\"/api/users/users_list/\")\n self.assertEqual(self.response.status_code, status.HTTP_403_FORBIDDEN)\n self.assertEqual(\n 'Authentication credentials were not provided.', self.response.data['detail'])",
"def get_everyone_denied(self):",
"def test_get_no_permission(self):\n self.user.user_permissions.clear()\n response = self._get()\n self.assertRedirectsToLogin(response)\n self.assertEquals(self.model.objects.count(), 0)",
"def test_need_login_to_see_usagelist(self):\n response = self.client.get(reverse('api_v1:usage-list'), follow=True)\n self.assertEqual(response.status_code, 403)",
"def attendants_cannot_view_user_accounts(self):\n reply = self.admin_create_user()\n resp = self.attendant_login()\n token = resp['token']\n resp = self.client.get(\n '/api/v1/users',\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n self.assertEqual(reply['message'], 'Unauthorized Access!')\n self.assertEqual(resp.status_code, 401)",
"def test_requester_is_no_admin(self) -> None:\n\n channel = self.make_request(\n \"DELETE\",\n self.url,\n access_token=self.other_user_tok,\n )\n\n self.assertEqual(403, channel.code, msg=channel.json_body)\n self.assertEqual(Codes.FORBIDDEN, channel.json_body[\"errcode\"])",
"def test_list_user(self):\n pass",
"def test_no_permission(self):\n self.user.user_permissions.remove(*self.permissions)\n response = self._get()\n self.assertEqual(response.status_code, 302)",
"def test_no_permission(self):\n self.user.user_permissions.remove(*self.permissions)\n response = self._get()\n self.assertEqual(response.status_code, 302)",
"def test_cannot_view_all_users_with_blacklisted_token(self):\n resp = self.admin_create_user()\n reply = self.admin_create_user2()\n resp = self.admin_login()\n token = resp['token']\n\n resp = self.client.delete(\n '/api/v1/logout',\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n self.assertEqual(reply['message'], 'You are successfully logged out!')\n self.assertEqual(resp.status_code, 200)\n\n resp = self.client.get(\n '/api/v1/users',\n headers={'Authorization': 'Bearer {}'.format(token)}\n )\n reply = json.loads(resp.data.decode())\n self.assertEqual(reply['message'], 'Invalid Authentication, Please Login!')\n self.assertEqual(resp.status_code, 401)",
"def test_unauthenticated_resource_allowed(self):\n raise NotImplementedError # FIXME",
"def check_admin():\n\tif not current_user.is_admin:\n\t\tabort(403)",
"def test__user_passed_as_none(self):\r\n access.has_access(None, 'staff', 'global', None)",
"def test_list_users_without_authenticate(self):\n response = self.client.get(reverse('user-list'))\n\n content = {\"detail\": \"Authentication credentials were not provided.\"}\n self.assertEqual(json.loads(response.content), content)\n\n self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)",
"def testGetAccessDenied(self):\n self.runGet(None)\n self.response_401()\n for user in (self.norole, self.unrelated_owner):\n self.runGet(user)\n self.response_403()"
] | [
"0.7638809",
"0.69624966",
"0.6810429",
"0.6799262",
"0.67936945",
"0.6770797",
"0.6763287",
"0.6763287",
"0.66203946",
"0.66203946",
"0.6566771",
"0.65271413",
"0.6525158",
"0.64896446",
"0.64557964",
"0.6414002",
"0.63701904",
"0.6349665",
"0.63055766",
"0.62822396",
"0.626694",
"0.62578756",
"0.62459654",
"0.62459654",
"0.6243862",
"0.6235741",
"0.6225688",
"0.6202697",
"0.61954355",
"0.6195404"
] | 0.7912165 | 0 |
Ensure we can send notification for membership end | def test_send_notification_end_membership(self):
fixed_time = timezone.now()
end_time_membership = fixed_time + relativedelta(days=28)
self.user.membership = self.membership
self.user.membership_end = end_time_membership
self.user.save()
with mock.patch(
'store.serializers.timezone.now',
return_value=fixed_time
):
response = self.client.get(
reverse('user-execute-automatic-email-membership-end')
)
content = {
'stop': False,
'email_send_count': 1
}
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
response.content
)
self.assertEqual(
json.loads(response.content),
content
)
self.assertEqual(len(mail.outbox), 1)
self.user.refresh_from_db()
self.assertEqual(self.user.membership_end_notification, fixed_time)
with mock.patch(
'store.serializers.timezone.now',
return_value=fixed_time
):
response = self.client.get(
reverse('user-execute-automatic-email-membership-end')
)
content = {
'stop': False,
'email_send_count': 0
}
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
response.content
)
self.assertEqual(
json.loads(response.content),
content
)
# no new mail
self.assertEqual(len(mail.outbox), 1) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_successful_subscriptions_notifies_pm(self) -> None:\n invitee = self.example_user(\"iago\")\n\n current_stream = self.get_streams(invitee)[0]\n invite_streams = self.make_random_stream_names([current_stream])[:1]\n self.common_subscribe_to_streams(\n invitee,\n invite_streams,\n extra_post_data={\n \"announce\": \"true\",\n \"principals\": orjson.dumps([self.user_profile.id]).decode(),\n },\n )",
"def test_notify_user(self):\n foo = Foo.objects.create(name='foo', description='foo object')\n notify_users([self.user_a], foo, notification_type='foo')\n self.assertEqual(len(mail.outbox), 1)",
"def test_admin_approval_complete_email(self):\n new_user = UserModel().objects.create_user(**self.user_info)\n profile = self.registration_profile.objects.create_profile(new_user)\n profile.send_admin_approve_complete_email(Site.objects.get_current())\n self.assertEqual(len(mail.outbox), 1)\n self.assertEqual(mail.outbox[0].to, [self.user_info['email']])",
"def test_registered_no_notifications(self):\n msg = self._send(self.reg_conn, '1')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.no_reminders)",
"def test_registered_no_notifications(self):\n msg = self._send(self.reg_conn, '1')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.no_reminders)",
"def send_notification(self):\n s1 = System()\n b1 = Books(\"1984\", \"George Orwell\", \"Harvill Secker\", \"1949\", \"0123456789123\")\n m1 = Members(\"Richard\", \"Blackmore\", \"14-04-1945\", \"Weston\")\n s1.send_notification(\"Please return book\")\n self.assertEqual(m1.get_notifications(), None)\n s1.add_resource(b1)\n s1.lending_process(b1, m1)\n s1.send_notification(\"Please return book\")\n self.assertEqual(m1.get_notifications(), \"-Please return boo- \")",
"def test_admin_approval_email(self):\n new_user = UserModel().objects.create_user(**self.user_info)\n profile = self.registration_profile.objects.create_profile(new_user)\n profile.activated = True\n self.registration_profile.objects.send_admin_approve_email(\n new_user, Site.objects.get_current())\n self.assertEqual(len(mail.outbox), 1)\n admins_emails = [value[1] for value in settings.REGISTRATION_ADMINS]\n for email in mail.outbox[0].to:\n self.assertIn(email, admins_emails)",
"def test_notify_users(self):\n foo = Foo.objects.create(name='foo', description='foo object')\n notify_users(User.objects.all(), foo, notification_type='foo')\n self.assertEqual(len(mail.outbox), 2)",
"def send_reminder(self):\n pass",
"async def check_notify(self) -> None:\n async with self.lock:\n # We loop through a list of keys because we are going to\n # mutate the dictionary as we loop through it.\n for message_id in copy.copy(list(self.upcoming_events.keys())):\n upcoming_event = self.upcoming_events[message_id]\n if not upcoming_event.time_to_notify():\n continue\n\n # Delete upcoming event if it's a member event\n if isinstance(upcoming_event, MemberEvent):\n # Delete upcoming if it's a member event\n await self.delete_upcoming_event(message_id)\n\n # Prepare message from the queue if it's recurring\n stop_notifying = False\n if isinstance(upcoming_event, RecurringEvent):\n stop_notifying = (\n upcoming_event.event_cancelled\n or upcoming_event.notified\n )\n\n if not stop_notifying:\n # Send ongoing event message\n ongoing_message = await upcoming_event.send_ongoing_message(\n notif_message=self.ongoing_template,\n channel=self.calendar_channel\n )\n\n # Distribute DM\n await upcoming_event.distribute_dm(\n self.dm_template,\n self.organizer_dm_template\n )\n\n # Create new ongoing event\n ongoing_event = OngoingEvent(\n countdown_time=upcoming_event.start_time,\n timeout_length=self.event_timeout,\n organizer_id=upcoming_event.organizer.id,\n message_text=ongoing_message.content,\n message_embed=ongoing_message.embeds[0]\n )\n\n self.ongoing_events[ongoing_message.id] = ongoing_event",
"def notifyNewMember(self, request, context):\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')",
"def test_resend_inactive(self):\n self.invite.active = False\n self.invite.save()\n url = reverse(\n 'projectroles:api_invite_resend',\n kwargs={'projectinvite': self.invite.sodar_uuid},\n )\n response = self.request_knox(url, method='POST')\n self.assertEqual(response.status_code, 400, msg=response.content)\n self.assertEqual(len(mail.outbox), 0)",
"def test_registered_with_notification(self):\n now = datetime.datetime.now()\n notification = reminders.Notification.objects.create(num_days=1,\n time_of_day=now)\n reminders.SentNotification.objects.create(notification=notification,\n recipient=self.contact,\n status='sent',\n message='abc',\n appt_date=now,\n date_to_send=now)\n msg = self._send(self.reg_conn, '1')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.thank_you)\n sent_notif = reminders.SentNotification.objects.all()\n self.assertEqual(sent_notif.count(), 1)\n self.assertEqual(sent_notif[0].status, 'confirmed')",
"def test_registered_with_notification(self):\n now = datetime.datetime.now()\n notification = reminders.Notification.objects.create(num_days=1,\n time_of_day=now)\n reminders.SentNotification.objects.create(notification=notification,\n recipient=self.contact,\n status='sent',\n message='abc',\n appt_date=now,\n date_to_send=now,\n date_queued=now)\n msg = self._send(self.reg_conn, '1')\n self.assertEqual(len(msg.responses), 1)\n self.assertEqual(msg.responses[0].text,\n self.app.thank_you)\n sent_notif = reminders.SentNotification.objects.all()\n self.assertEqual(sent_notif.count(), 1)\n self.assertEqual(sent_notif[0].status, 'confirmed')",
"def test_resend_delegate_no_perms(self):\n self.invite.role = self.role_delegate\n self.invite.save()\n delegate = self.make_user('delegate')\n self.make_assignment(self.project, delegate, self.role_delegate)\n\n url = reverse(\n 'projectroles:api_invite_resend',\n kwargs={'projectinvite': self.invite.sodar_uuid},\n )\n response = self.request_knox(\n url, method='POST', token=self.get_token(delegate)\n )\n self.assertEqual(response.status_code, 403, msg=response.content)\n self.assertEqual(len(mail.outbox), 0)",
"def test_private_message_sends_email(self, get_current):\n get_current.return_value.domain = \"testserver\"\n\n s, c = Setting.objects.get_or_create(user=self.to, name=\"email_private_messages\")\n s.value = True\n s.save()\n # User has setting, and should recieve notification email.\n\n assert Setting.get_for_user(self.to, \"email_private_messages\")\n\n self.client.login(username=self.sender.username, password=\"testpass\")\n post(self.client, \"messages.new\", {\"to\": self.to, \"message\": \"a message\"})\n subject = \"[SUMO] You have a new private message from [{sender}]\"\n\n attrs_eq(\n mail.outbox[0],\n to=[self.to.email],\n subject=subject.format(sender=self.sender.profile.name),\n )\n starts_with(\n mail.outbox[0].body, PRIVATE_MESSAGE_EMAIL.format(sender=self.sender.profile.name)\n )",
"def test_set_send_email_notifications(self):\n # Setup scenario\n username = 'tester'\n password = 'secret'\n user = Account.objects.create_user(username=username, email='[email protected]', password=password)\n\n self.assertTrue(self.client.login(username=username, password=password))\n\n # Verify initial assumptions\n self.assertTrue(user.send_email_notifications)\n\n # Run code\n resp = self.client.post(reverse('account.api.configure_email'), {\n 'send_email_notifications': False,\n }, format='json')\n\n # Verify expectations\n self.assertEquals(status.HTTP_201_CREATED, resp.status_code)\n self.assertTrue(user.send_email_notifications)",
"def test_api_user_resend_confirmation_post(self):\n pass",
"def can_notify(self, last_notification):\n return (\n features.is_enabled(features.EMAIL_NOTIFICATIONS)\n and self.notification_settings.via_email\n and api.can_email_user(self.user)\n and super().can_notify(last_notification)\n )",
"def notify(message):\n # TODO: clean up this ugly mess\n\n global notify_flag\n\n if not notify_flag:\n notify_flag = True\n message.reply(\":gear: Started expiration checking process; users will now \"\n \"be notified if their access is about to expire.\")\n else:\n message.reply(\"Cannot have more than one running instance of the notify \"\n \"function.\")\n return\n\n flag = \"tenmins\"\n while True:\n if flag is \"deleted\":\n info = sql.notify_users(\"hour\")\n flag = \"hour\"\n elif flag is \"hour\":\n info = sql.notify_users(\"tenmins\")\n flag = \"tenmins\"\n elif flag is \"tenmins\":\n info = sql.notify_users(\"deleted\")\n flag = \"deleted\"\n\n for person in info:\n if len(info[person]) == 0:\n continue\n try:\n users = hf.get_users()\n for user in users:\n if user[\"name\"] == person:\n dbs = []\n servers = []\n for grant in info[person]:\n dbs.append(grant[\"db\"])\n servers.append(grant[\"server\"])\n chan = hf.find_channel(message._client.channels, user[\"id\"])\n\n if flag is \"hour\":\n message._client.send_message(chan,\n Strings['NOTIFY_EXPIRE_HOUR'].format(\", \".join(dbs)) + \"\\n\"\n \"\" + Strings[\"NOTIFY_EXPIRE_INFO\"])\n for db, server in zip(dbs, servers):\n logging.info(\"{} reason=[NOTIFIED OF DATABASE ACCESS EXPIRING IN AN HOUR]\\n\".format(user[\"name\"]), server, db, \"notifyhour\")\n elif flag is \"tenmins\":\n message._client.send_message(chan,\n Strings['NOTIFY_EXPIRE_TENMINS'].format(\", \".join(dbs)) + \"\\n\"\n \"\" + Strings[\"NOTIFY_EXPIRE_INFO\"])\n for db, server in zip(dbs, servers):\n logging.info(\"{} reason=[NOTIFIED OF DATABASE ACCESS EXPIRING IN TEN MINUTES]\\n\".format(user[\"name\"]), server, db, \"notifyten\")\n elif flag is \"deleted\":\n message._client.send_message(chan,\n Strings['EXPIRE'].format(\", \".join(dbs)))\n message._client.send_message(public_channel,\n Strings[\"EXPIRE_PING\"].format(user[\"name\"],\n \", \".join(dbs)))\n for db, server in zip(dbs, servers):\n logging.info(\"{} reason=[NOTIFIED OF DATABASE ACCESS EXPIRING]\\n\".format(user[\"name\"]), server, db, \"notifyexpire\")\n\n except Exception as e:\n message._client.send_message(errors_channel, \"```{}```\".format(e))\n\n with open(\"data/jobs.json\") as f:\n jobs = json.load(f)\n\n new_jobs = []\n if len(jobs) > 0:\n for job in jobs:\n if not job.endswith(\"DONE\"):\n job_string = job.replace(\"10.132.140.160\", \"SQLCLUSTER02\").replace(\"10.132.140.150\", \"SQLCLUSTER01\")\n message._client.send_message(public_channel,\n Strings[\"LOGOUT_PLEASE\"].format(job_string.split(\":\")[0],\n job_string.split(\":\")[1]))\n new_jobs.append(job + \":DONE\")\n else:\n new_jobs.append(job)\n\n with open(\"data/jobs.json\", \"w\") as f:\n json.dump(new_jobs, f)\n\n # For use with Datadog\n with open(\"/opt/opsbot35/data/status.txt\", \"w\") as f:\n f.write(str(datetime.now()))\n\n time.sleep(5)",
"def test_resend_activation_email_nonexistent_user(self):\n self.assertFalse(self.registration_profile.objects.resend_activation_mail(\n email=self.user_info['email'],\n site=Site.objects.get_current(),\n ))\n self.assertEqual(len(mail.outbox), 0)",
"def test_alert_create_for_site_members(self):\n pass",
"def test_sending_mail(self):\n\n appt_date = datetime.date.today() + datetime.timedelta(days=7) # Default for email\n confirmed = self.create_confirmed_notification(self.test_patient, appt_date)\n\n # run email job\n daily_email_callback(self.router)\n\n self.assertEqual(len(mail.outbox), 1)\n message = mail.outbox[0]\n self.assertTrue(self.test_contact.email in message.to)",
"def test_private_message_not_sends_email(self, get_current):\n get_current.return_value.domain = \"testserver\"\n\n s, c = Setting.objects.get_or_create(user=self.to, name=\"email_private_messages\")\n # Now user should not recieve email.\n s.value = False\n s.save()\n assert not Setting.get_for_user(self.to, \"email_private_messages\")\n\n self.client.login(username=self.sender.username, password=\"testpass\")\n post(self.client, \"messages.new\", {\"to\": self.to, \"message\": \"a message\"})\n\n assert not mail.outbox",
"def test_resend_delegate(self):\n self.invite.role = self.role_delegate\n self.invite.save()\n url = reverse(\n 'projectroles:api_invite_resend',\n kwargs={'projectinvite': self.invite.sodar_uuid},\n )\n response = self.request_knox(url, method='POST')\n self.assertEqual(response.status_code, 200, msg=response.content)\n self.assertEqual(len(mail.outbox), 1)",
"async def anticipation(self, ctx: commands.Context):\n role = ctx.guild.get_role(529447810127495168)\n\n if role.id not in (r.id for r in ctx.author.roles):\n await ctx.author.add_roles(role, reason=\"/anticipation\")\n embed = discord.Embed(\n colour=discord.Colour.green(),\n description=\"Anticipation Notifications successfully added.\"\n )\n await ctx.send(embed=embed)\n\n else:\n await ctx.author.remove_roles(role, reason=\"/anticipation\")\n embed = discord.Embed(\n colour=discord.Colour.red(),\n description=\"Anticipation Notifications successfully removed.\"\n )\n await ctx.send(embed=embed)",
"def handle_sent(self, instance):\n if not instance.created_by:\n return\n\n activity = Activity(\n actor=instance.created_by,\n verb=RestrictedMailSent,\n object=instance,\n time=instance.used,\n extra_context={},\n )\n self.manager.add_activity(\n activity, [instance.created_by.pk], [NotificationFeed]\n )\n\n # Send notification\n notification = RestrictedMailSentNotification(instance.created_by)\n notification.notify()",
"def test_notify_reached_end_of_wait_queue(self):\n # self.client.force_authenticate(user=self.admin)\n\n notification_count = WaitQueueNotification.objects.all().count()\n\n self.retreat.next_user_notified = 2\n self.retreat.save()\n\n response = self.client.get(\n '/'.join([\n reverse('retreat:waitqueuenotification-list'),\n 'notify',\n ])\n )\n\n self.retreat.refresh_from_db()\n\n self.assertEqual(\n self.retreat.next_user_notified,\n 0,\n \"next_user_notified index invalid\"\n )\n\n # Assert that 0 reserved seats remain (since 0 users are waiting)\n self.assertEqual(\n self.retreat.reserved_seats,\n 0,\n \"reserved_seats index invalid\"\n )\n\n # Assert that 0 notification has been created\n # The old one has been deleted\n self.assertEqual(\n WaitQueueNotification.objects.all().count(),\n notification_count - 1,\n \"WaitQueueNotification count invalid\"\n )\n\n self.assertEqual(\n response.status_code,\n status.HTTP_200_OK,\n response.content,\n )\n\n response_data = json.loads(response.content)\n\n content = {\n 'detail': 'No reserved seats.',\n 'stop': True\n }\n\n self.assertEqual(response_data, content)\n\n self.assertEqual(len(mail.outbox), 0)",
"def send_registration_handle(sender, instance, **kwargs):\n #import ipdb; ipdb.set_trace()\n if instance._verifying: return\n #url=instance.get_absolute_url()\n url=reverse('registration-verify', request=instance.request, format=None)\n try:\n send_mail(\n 'Registration to ItalyInformaticaProject',\n #\"Please click on the link to validate your registration: /verify/%s/%s\"%(url.rstrip('/'),instance.token),\n \"Please click on the link to validate your registration: %s/%s/%s\"%(url.rstrip('/'),repr(instance.id),instance.token),\n '[email protected]',\n [instance.owner.email],\n fail_silently=False,\n )\n except Exception as e:\n instance.owner.delete()\n raise APIException(\"Cannot send email notification:%s\"%repr(e))",
"def notify_students():\n time_now = datetime.datetime.now(get_localzone())\n emails_to_send = Email.objects.all()\n for email in emails_to_send:\n if email.assignment.date_assigned <= time_now:\n send_mail(subject=email.subject,\n message=email.message,\n recipient_list=Student.objects.filter(assignments=email.assignment),\n from_email=None,\n fail_silently=False)\n email.delete()"
] | [
"0.631417",
"0.61985207",
"0.61863524",
"0.6167759",
"0.6167759",
"0.61630905",
"0.6139909",
"0.6068332",
"0.60387355",
"0.60372734",
"0.6013381",
"0.60123867",
"0.6006325",
"0.59600574",
"0.5957167",
"0.5927392",
"0.5894092",
"0.5890335",
"0.5887974",
"0.5873129",
"0.5872538",
"0.58361214",
"0.5829908",
"0.58294815",
"0.5777522",
"0.57497036",
"0.5746095",
"0.5745145",
"0.5735348",
"0.5732637"
] | 0.7043496 | 0 |
Ensure admin can credit tickets to a user | def test_credit_ticket_as_admin(self):
user = UserFactory()
self.assertEqual(user.tickets, 1)
nb_tickets_to_add = 5
data = {
'nb_tickets': nb_tickets_to_add,
}
self.client.force_authenticate(user=self.admin)
response = self.client.post(
reverse(
'user-credit-tickets',
kwargs={'pk': user.id},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_200_OK,
)
self.assertEqual(
User.objects.get(pk=user.id).tickets,
1 + nb_tickets_to_add
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_credit_ticket_as_user(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = 5\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.user)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_403_FORBIDDEN,\n )",
"async def ticket_add(self, ctx, user: discord.Member):\n guild_settings = await self.config.guild(ctx.guild).all()\n is_admin = await is_admin_or_superior(self.bot, ctx.author) or any(\n [ur.id in guild_settings[\"supportroles\"] for ur in ctx.author.roles]\n )\n must_be_admin = not guild_settings[\"usercanmodify\"]\n\n if not is_admin and must_be_admin:\n await ctx.send(\"Only Administrators can add/remove other users to tickets.\")\n return\n elif not is_admin:\n author = ctx.author\n author_id = author.id\n elif is_admin:\n # Since the author isn't specified, and it's an admin, we need to guess on who\n # the author is\n inverted = {}\n for author_id, tickets in guild_settings[\"created\"].items():\n for ticket in tickets:\n inverted[ticket[\"channel\"]] = author_id\n try:\n author = ctx.guild.get_member(int(inverted[ctx.channel.id]))\n if author:\n author_id = author.id\n else:\n author_id = int(inverted[ctx.channel.id])\n except KeyError:\n author = ctx.author\n author_id = author.id\n\n index = None\n\n if not guild_settings[\"created\"][str(author_id)]:\n await ctx.send(\"You don't have any open tickets.\")\n return\n elif len(guild_settings[\"created\"][str(author_id)]) == 1:\n index = 0\n else:\n for i, ticket in enumerate(guild_settings[\"created\"][str(author_id)]):\n if ticket[\"channel\"] == ctx.channel.id:\n index = i\n break\n\n if index is None:\n await ctx.send(\n \"You have multiple tickets open. \"\n \"Please run this command in the ticket channel you wish to edit.\"\n )\n return\n\n channel = self.bot.get_channel(guild_settings[\"created\"][str(author_id)][index][\"channel\"])\n\n if user.id in guild_settings[\"created\"][str(author_id)][index][\"added\"]:\n await ctx.send(\"That user is already added.\")\n return\n\n adding_is_admin = await is_admin_or_superior(self.bot, user) or any(\n [ur.id in guild_settings[\"supportroles\"] for ur in user.roles]\n )\n\n if adding_is_admin:\n await ctx.send(\"You cannot add a user in support or admin team.\")\n return\n\n channel = self.bot.get_channel(guild_settings[\"created\"][str(author_id)][index][\"channel\"])\n if not channel:\n await ctx.send(\"The ticket channel has been deleted.\")\n return\n\n try:\n await channel.set_permissions(user, send_messages=True, read_messages=True)\n except discord.Forbidden:\n await ctx.send(\n \"The Manage Permissions channel for me has been removed. \"\n \"I am unable to modify this ticket.\"\n )\n return\n\n async with self.config.guild(ctx.guild).created() as created:\n created[str(author_id)][index][\"added\"].append(user.id)\n\n await ctx.send(f\"{user.mention} has been added to the ticket.\")",
"def test_credit_ticket_negative_int(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = -5\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.admin)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_400_BAD_REQUEST,\n )",
"async def ticket_remove(self, ctx, user: discord.Member):\n guild_settings = await self.config.guild(ctx.guild).all()\n is_admin = await is_admin_or_superior(self.bot, ctx.author) or any(\n [ur.id in guild_settings[\"supportroles\"] for ur in ctx.author.roles]\n )\n must_be_admin = not guild_settings[\"usercanmodify\"]\n\n if not is_admin and must_be_admin:\n await ctx.send(\"Only Administrators can add/remove other users to tickets.\")\n return\n elif not is_admin:\n author = ctx.author\n author_id = author.id\n elif is_admin:\n # Since the author isn't specified, and it's an admin, we need to guess on who\n # the author is\n inverted = {}\n for author_id, tickets in guild_settings[\"created\"].items():\n for ticket in tickets:\n inverted[ticket[\"channel\"]] = author_id\n try:\n author = ctx.guild.get_member(int(inverted[ctx.channel.id]))\n if author:\n author_id = author.id\n else:\n author_id = int(inverted[ctx.channel.id])\n except KeyError:\n author = ctx.author\n author_id = author.id\n\n index = None\n\n if not guild_settings[\"created\"][str(author_id)]:\n await ctx.send(\"You don't have any open tickets.\")\n return\n elif len(guild_settings[\"created\"][str(author_id)]) == 1:\n index = 0\n else:\n for i, ticket in enumerate(guild_settings[\"created\"][str(author_id)]):\n if ticket[\"channel\"] == ctx.channel.id:\n index = i\n break\n\n if index is None:\n await ctx.send(\n \"You have multiple tickets open. \"\n \"Please run this command in the ticket channel you wish to edit.\"\n )\n return\n\n if user.id not in guild_settings[\"created\"][str(author_id)][index][\"added\"]:\n await ctx.send(\"That user is not added.\")\n return\n\n removing_is_admin = await is_admin_or_superior(self.bot, user) or any(\n [ur.id in guild_settings[\"supportroles\"] for ur in user.roles]\n )\n\n if removing_is_admin:\n await ctx.send(\"You cannot remove a user in support or admin team.\")\n return\n\n channel = self.bot.get_channel(guild_settings[\"created\"][str(author_id)][index][\"channel\"])\n if not channel:\n await ctx.send(\"The ticket channel has been deleted.\")\n\n try:\n await channel.set_permissions(user, send_messages=False, read_messages=False)\n except discord.Forbidden:\n await ctx.send(\n \"The Manage Permissions channel for me has been removed. \"\n \"I am unable to modify this ticket.\"\n )\n return\n\n async with self.config.guild(ctx.guild).created() as created:\n created[str(author_id)][index][\"added\"].remove(user.id)\n\n await ctx.send(f\"{user.mention} has been removed from the ticket.\")",
"def test_credit_ticket_not_int(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = 'this is not an int'\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.admin)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_400_BAD_REQUEST,\n )",
"def user_requested_access(user):\r\n user = CourseCreator.objects.get(user=user)\r\n if user.state != CourseCreator.GRANTED:\r\n user.state = CourseCreator.PENDING\r\n user.save()",
"async def admin_credit(self, ctx, target: discord.Member, sum: int = 100):\n if is_registered(target.id):\n \n inventories = get_file(\"inventories\")\n inventories[str(target.id)][\"balance\"] += sum\n update_file(\"inventories\", inventories)\n\n embed = discord.Embed(color=admin_color)\n embed.set_author(name=\"🛠️ Admin\")\n embed.add_field(name=\"💰 Credit\",\n value=f\"{ctx.author.mention}, {target.mention} a été crédité de `{sum}` PO (pièces d'or)\")\n embed = set_footer(embed, ctx)\n await ctx.send(embed=embed)",
"def AdminTicket(ticket):\n try:\n data, = xmlrpclib.loads(ticket)[0]\n name = data['slivers'][0]['name']\n if data != None:\n deliver_ticket(data)\n logger.log('api_calls: Admin Ticket delivered for %s' % name)\n Create(database.db.get(name))\n except Exception, err:\n raise xmlrpclib.Fault(102, 'Ticket error: ' + str(err))",
"def user_allow_credit(self):\n try:\n return self.user.creditAllowed()\n except AttributeError:\n return False",
"def test_user_can_change_admin(self):\n self.assertTrue(self.story.user_can_change(self.admin_user))",
"def check_credit(self):\n self.ensure_one()\n getattr(self, '%s_check_credit' % self.provider, lambda: None)()",
"def allowed(self, user, amount):\n return True",
"def add_ticket(self, user):\n profile = user.get_profile()\n if profile.available_tickets() <= 0:\n raise Exception(\"This user does not have any tickets to allocate.\")\n \n ticket = RaffleTicket(raffle_prize=self, user=user)\n ticket.save()",
"async def adduser(ctx, user: discord.Member):\n channel = ctx.channel\n if not IsATicket(channel.id):\n await ctx.send(\n \"This is not a ticket! Users can only be added to a ticket channel\"\n )\n return\n\n await channel.set_permissions(user, read_messages=True, send_messages=True)\n await ctx.message.delete()",
"def issue_ticket(database, user):\n try:\n # check if user is an officer\n c = database.cursor()\n c.execute('SELECT utype FROM users WHERE uid = ?', (user, ))\n user_type = c.fetchone()[0]\n\n # If user is an officer \n if user_type == 'o':\n reg_num = int(input(\"Registration number: \"))\n c.execute(\"\"\"SELECT p.fname, p.lname, v.make, v.model, v.year, v.color FROM registrations r JOIN\n persons p ON (r.fname, r.lname) = (p.fname, p.lname) JOIN vehicles v ON r.vin = v.vin WHERE r.regno = ?\"\"\",(reg_num,))\n result = c.fetchone()\n fname = result[0]\n lname = result[1]\n make = result[2]\n model = result[3]\n year = result[4]\n color = result[5]\n print(\"\\n--------------------------\\nInformation\\n--------------------------\\n\")\n print(\"First Name: \", fname)\n print(\"Last Name: \", lname)\n print(\"Make: \", make)\n print(\"Model: \", model)\n print(\"Year: \", year)\n print(\"Color: \", color)\n\n print(\"\\n-------------------------\\nTicket the registra: \\n------------------------\\n\")\n violation_date = str(input(\"Violation Date: \")) # if not provided, today's date\n if violation_date == \"\":\n violation_date = datetime.today().strftime('%Y-%m-%d')\n violation_text = str(input(\"violation Text: \"))\n amount = str(input(\"Amount: \"))\n tno = randrange(1001, 9867699)\n\n c.execute(q.insert_into_tickets, (tno, reg_num, amount, violation_text, violation_date))\n\n database.commit()\n print(pm.all_done)\n # if user is not an officer\n else:\n print(pm.for_officers_only)\n sys.exit()\n except:\n print(pm.something_went_wrong)\n sys.exit()",
"def write_authorize(cls, user, obj):\n if not obj.delivery.deadline.assignment_group.is_examiner(user):\n raise PermissionDenied()",
"def test_func(self, user):\n return self.get_object().admin == user",
"def create_ticket(self, user):\n return Ticket.objects.create_ticket('test', user)",
"def can_approve(self, user, **data):\n raise Return(False)",
"def credit_deliverer():\n return True",
"async def plaguebearer(self, ctx):\n currency = await bank.get_currency_name(ctx.guild)\n await self.config.user(ctx.author).gameRole.set(\"Plaguebearer\")\n await self.notify_user(ctx=ctx, user=ctx.author, notificationType=\"plaguebearer\")\n await ctx.send(f\"{ctx.author} has spent 10,000 {currency} and become a Plaguebearer.\")",
"def userreject_admin(user_id):\n\n # User objects list which includes list of all users which can be broken down into editors and sponsors\n # get individual user\n user = db.session.query(User).filter(User.id==user_id).first()\n # update status to approved\n user.user_status = 'rejected'\n # commit to database\n db.session.commit()\n\n return redirect(url_for('admin_bp.usersview_admin'))",
"def test_creating_supply_user(self):\n request = self.factory.post(\n '/api/supplies/', {'name': '3d printer 2', 'state': 'good state', 'description': 'prints 3d objects'})\n force_authenticate(request, user=self.testuser1)\n response = SupplyListView.as_view()(request)\n\n self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)\n try:\n Supply.objects.get(name='3d printer')\n self.fail()\n except Supply.DoesNotExist:\n pass",
"def test_user_can_change_as_author(self):\n self.assertTrue(self.story.user_can_change(self.user1))",
"def prepare_ticket(self, req, ticket, fields, actions):",
"def test_admin_update_user_taskrun(self):\r\n\r\n with self.flask_app.test_request_context('/'):\r\n user_taskrun = TaskRunFactory.create()\r\n\r\n assert self.mock_admin.id != user_taskrun.user.id\r\n assert_raises(Forbidden,\r\n getattr(require, 'taskrun').update,\r\n user_taskrun)",
"def accept(self, responder):\n try:\n with transaction.atomic():\n self._apply_decision(self.Status.ACCEPTED, responder)\n # update the user credentials\n user = self.user\n user.is_credentialed = True\n user.credential_datetime = timezone.now()\n user.save()\n except DatabaseError:\n messages.error(request, 'Database error. Please try again.')",
"def check_admin():\r\n if not current_user.is_admin:\r\n abort(403)",
"def write_authorize_examinercommon(cls, user, obj):\n if obj.delivered_by != None:\n raise PermissionDenied()",
"def confirm_meal(request, e_id):\n enrolment = Enrolment.objects.get(pk=e_id)\n total_meal = enrolment.day_meal_count + enrolment.night_meal_count\n price = enrolment.plan.price\n extended_user = ExtendedUser.objects.get(user=request.user)\n extended_user.balance -= price * total_meal\n if extended_user.balance >= 0:\n extended_user.save()\n owner = enrolment.plan.store.owner\n owner = ExtendedUser.objects.get(user=owner)\n owner.balance += price * total_meal\n owner.save()\n return view_enrolments(request)"
] | [
"0.76057625",
"0.7198469",
"0.663993",
"0.65313613",
"0.6304915",
"0.6245688",
"0.62093043",
"0.60432243",
"0.5984338",
"0.59788305",
"0.5956783",
"0.5948479",
"0.5885243",
"0.58366257",
"0.5809721",
"0.5722764",
"0.569822",
"0.5689488",
"0.5654343",
"0.5625085",
"0.56015855",
"0.5596882",
"0.5558705",
"0.554677",
"0.55456024",
"0.5541014",
"0.5533439",
"0.55198914",
"0.5513701",
"0.550214"
] | 0.8034523 | 0 |
Ensure user can't credit tickets to a user | def test_credit_ticket_as_user(self):
user = UserFactory()
self.assertEqual(user.tickets, 1)
nb_tickets_to_add = 5
data = {
'nb_tickets': nb_tickets_to_add,
}
self.client.force_authenticate(user=self.user)
response = self.client.post(
reverse(
'user-credit-tickets',
kwargs={'pk': user.id},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_403_FORBIDDEN,
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_credit_ticket_negative_int(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = -5\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.admin)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_400_BAD_REQUEST,\n )",
"def test_credit_ticket_not_int(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = 'this is not an int'\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.admin)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_400_BAD_REQUEST,\n )",
"def test_credit_ticket_as_admin(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = 5\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.admin)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_200_OK,\n )\n\n self.assertEqual(\n User.objects.get(pk=user.id).tickets,\n 1 + nb_tickets_to_add\n )",
"def check_credit(self):\n self.ensure_one()\n getattr(self, '%s_check_credit' % self.provider, lambda: None)()",
"def cant(user, action):\n\n return not can(user, action)",
"def test_not_permitted(self):\r\n test_user_client, test_user = self.create_non_staff_authed_user_client()\r\n CourseEnrollment.enroll(test_user, self.course.id)\r\n response = test_user_client.get(self.orphan_url)\r\n self.assertEqual(response.status_code, 403)\r\n response = test_user_client.delete(self.orphan_url)\r\n self.assertEqual(response.status_code, 403)",
"def allowed(self, user, amount):\n return True",
"def test_validate_ticket_no_ticket(self):\n with self.assertRaises(InvalidRequest):\n ProxyGrantingTicket.objects.validate_ticket(None, 'https://www.example.com')",
"def test_noTicket():\n assert testUser1.buyTicket(None) == False",
"def user_allow_credit(self):\n try:\n return self.user.creditAllowed()\n except AttributeError:\n return False",
"def user_requested_access(user):\r\n user = CourseCreator.objects.get(user=user)\r\n if user.state != CourseCreator.GRANTED:\r\n user.state = CourseCreator.PENDING\r\n user.save()",
"def test_validate_ticket_invalid_ticket(self):\n with self.assertRaises(InvalidTicket):\n ProxyGrantingTicket.objects.validate_ticket('12345', 'https://www.example.com')",
"def test_negative_conditions(self):\r\n outline_url = reverse_course_url('course_handler', self.course.id)\r\n # register a non-staff member and try to delete the course branch\r\n non_staff_client, _ = self.create_non_staff_authed_user_client()\r\n response = non_staff_client.delete(outline_url, {}, HTTP_ACCEPT='application/json')\r\n self.assertEqual(response.status_code, 403)",
"def test_validate_ticket_consumed_ticket(self):\n pgt = ProxyGrantingTicketFactory(consume=True)\n with self.assertRaises(InvalidTicket):\n ProxyGrantingTicket.objects.validate_ticket(pgt.ticket, 'https://www.example.com')",
"def test_buyTicket_AlreadySold():\n assert not testUser2.buyTicket(testTicket1)\n assert testTicket1 in testUser1.inventory\n assert testTicket1 not in testUser2.inventory\n assert not testTicket1.for_sale\n assert testUser2.wallet == 500",
"def test_validate_ticket_does_not_exist(self):\n ticket = 'PGT-0000000000-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'\n with self.assertRaises(InvalidTicket):\n ProxyGrantingTicket.objects.validate_ticket(ticket, 'https://www.example.com')",
"def test_case_user_not_yet_customer(self):\n pass",
"def test_buyTicket_NotForSale():\n old_venue_balance = testVenue.wallet\n assert not testUser2.buyTicket(testTicket2)\n assert testTicket2 not in testUser2.inventory\n assert not testTicket1.for_sale\n assert testUser2.wallet == 500\n assert testVenue.wallet == old_venue_balance",
"def test_deny_pending_payment(self):\n pass",
"def _is_ticket_blocked(self, registration, **kwargs):\n if not self._is_ticketing_handled(registration.registration_form):\n return False\n req = registration.cern_access_request\n return not req or not req.is_active or not req.has_identity_info",
"async def ticket_add(self, ctx, user: discord.Member):\n guild_settings = await self.config.guild(ctx.guild).all()\n is_admin = await is_admin_or_superior(self.bot, ctx.author) or any(\n [ur.id in guild_settings[\"supportroles\"] for ur in ctx.author.roles]\n )\n must_be_admin = not guild_settings[\"usercanmodify\"]\n\n if not is_admin and must_be_admin:\n await ctx.send(\"Only Administrators can add/remove other users to tickets.\")\n return\n elif not is_admin:\n author = ctx.author\n author_id = author.id\n elif is_admin:\n # Since the author isn't specified, and it's an admin, we need to guess on who\n # the author is\n inverted = {}\n for author_id, tickets in guild_settings[\"created\"].items():\n for ticket in tickets:\n inverted[ticket[\"channel\"]] = author_id\n try:\n author = ctx.guild.get_member(int(inverted[ctx.channel.id]))\n if author:\n author_id = author.id\n else:\n author_id = int(inverted[ctx.channel.id])\n except KeyError:\n author = ctx.author\n author_id = author.id\n\n index = None\n\n if not guild_settings[\"created\"][str(author_id)]:\n await ctx.send(\"You don't have any open tickets.\")\n return\n elif len(guild_settings[\"created\"][str(author_id)]) == 1:\n index = 0\n else:\n for i, ticket in enumerate(guild_settings[\"created\"][str(author_id)]):\n if ticket[\"channel\"] == ctx.channel.id:\n index = i\n break\n\n if index is None:\n await ctx.send(\n \"You have multiple tickets open. \"\n \"Please run this command in the ticket channel you wish to edit.\"\n )\n return\n\n channel = self.bot.get_channel(guild_settings[\"created\"][str(author_id)][index][\"channel\"])\n\n if user.id in guild_settings[\"created\"][str(author_id)][index][\"added\"]:\n await ctx.send(\"That user is already added.\")\n return\n\n adding_is_admin = await is_admin_or_superior(self.bot, user) or any(\n [ur.id in guild_settings[\"supportroles\"] for ur in user.roles]\n )\n\n if adding_is_admin:\n await ctx.send(\"You cannot add a user in support or admin team.\")\n return\n\n channel = self.bot.get_channel(guild_settings[\"created\"][str(author_id)][index][\"channel\"])\n if not channel:\n await ctx.send(\"The ticket channel has been deleted.\")\n return\n\n try:\n await channel.set_permissions(user, send_messages=True, read_messages=True)\n except discord.Forbidden:\n await ctx.send(\n \"The Manage Permissions channel for me has been removed. \"\n \"I am unable to modify this ticket.\"\n )\n return\n\n async with self.config.guild(ctx.guild).created() as created:\n created[str(author_id)][index][\"added\"].append(user.id)\n\n await ctx.send(f\"{user.mention} has been added to the ticket.\")",
"def test_not_owner(self):\n creating_user = create_user()\n creating_user.save()\n festival = create_festival('test', creating_user)\n festival.save()\n\n concert = create_concert(festival, 'test')\n concert.save()\n\n login(self.client)\n\n client = create_client('test')\n client.delete_access = True\n client.save()\n\n response = self.client.post('/backend/u/conc/', {'client': 'test', 'id': concert.pk})\n self.assertEqual(response.status_code, 200)\n self.assertEqual('Permission not granted', response.content.decode('utf-8'))",
"def test_create_ticket_no_expires(self):\n st = ServiceTicket.objects.create_ticket(user=self.user)\n self.assertTrue(st.expires > now())",
"def add_ticket(self, user):\n profile = user.get_profile()\n if profile.available_tickets() <= 0:\n raise Exception(\"This user does not have any tickets to allocate.\")\n \n ticket = RaffleTicket(raffle_prize=self, user=user)\n ticket.save()",
"def test_can_not_cancel_past_block(self):\n date = datetime.now().replace(minute=0, second=0, microsecond=0) - timedelta(hours=1)\n\n response = self.client.post(\n reverse('bookings', kwargs={'facility': 'g'}), {'cancel': str(date.timestamp())})\n\n context = response.context\n bookings = context[\"bookings\"]\n\n self.assertEqual(response.status_code, 403)\n self.assertEqual(context[\"quota\"], settings.BOOKINGS_QUOTA)",
"def validate_ownership(item, user_id):\n if item.user_id != user_id:\n raise Forbidden('You are not allowed to modify this item.')",
"def test_can_not_cancel_current_block(self):\n date = datetime.now().replace(minute=0, second=0, microsecond=0)\n\n response = self.client.post(\n reverse('bookings', kwargs={'facility': 'g'}), {'cancel': str(date.timestamp())})\n\n context = response.context\n bookings = context[\"bookings\"]\n\n self.assertEqual(response.status_code, 403)\n self.assertEqual(context[\"quota\"], settings.BOOKINGS_QUOTA)",
"def test_user_can_change_not_author(self):\n self.assertFalse(self.story.user_can_change(self.user2))",
"def test_validate_ticket(self):\n pgt = ProxyGrantingTicketFactory()\n ticket = ProxyGrantingTicket.objects.validate_ticket(pgt.ticket, 'https://www.example.com')\n self.assertEqual(ticket, pgt)\n self.assertFalse(ticket.is_consumed())",
"def test_no_enable_paid_course_registration(self):\r\n self.add_to_cart()\r\n self.request.user = self.user\r\n context = user_has_cart_context_processor(self.request)\r\n self.assertFalse(context['display_shopping_cart'])"
] | [
"0.69946736",
"0.6597223",
"0.64734346",
"0.6470661",
"0.63877785",
"0.6298837",
"0.62688833",
"0.62648696",
"0.62245417",
"0.61735904",
"0.6159603",
"0.6098433",
"0.6039654",
"0.60156566",
"0.600577",
"0.5985433",
"0.5973899",
"0.5934116",
"0.59265006",
"0.59215",
"0.5908241",
"0.58887154",
"0.5870351",
"0.5846558",
"0.583821",
"0.58281153",
"0.58258164",
"0.58253825",
"0.58220494",
"0.5781086"
] | 0.6947074 | 1 |
Ensure admin can't credit negative tickets to a user | def test_credit_ticket_negative_int(self):
user = UserFactory()
self.assertEqual(user.tickets, 1)
nb_tickets_to_add = -5
data = {
'nb_tickets': nb_tickets_to_add,
}
self.client.force_authenticate(user=self.admin)
response = self.client.post(
reverse(
'user-credit-tickets',
kwargs={'pk': user.id},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_400_BAD_REQUEST,
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_credit_ticket_as_admin(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = 5\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.admin)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_200_OK,\n )\n\n self.assertEqual(\n User.objects.get(pk=user.id).tickets,\n 1 + nb_tickets_to_add\n )",
"def test_credit_ticket_as_user(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = 5\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.user)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_403_FORBIDDEN,\n )",
"def test_credit_ticket_not_int(self):\n user = UserFactory()\n self.assertEqual(user.tickets, 1)\n nb_tickets_to_add = 'this is not an int'\n data = {\n 'nb_tickets': nb_tickets_to_add,\n }\n\n self.client.force_authenticate(user=self.admin)\n response = self.client.post(\n reverse(\n 'user-credit-tickets',\n kwargs={'pk': user.id},\n ),\n data,\n format='json',\n )\n self.assertEqual(\n response.status_code,\n status.HTTP_400_BAD_REQUEST,\n )",
"def test_negative_conditions(self):\r\n outline_url = reverse_course_url('course_handler', self.course.id)\r\n # register a non-staff member and try to delete the course branch\r\n non_staff_client, _ = self.create_non_staff_authed_user_client()\r\n response = non_staff_client.delete(outline_url, {}, HTTP_ACCEPT='application/json')\r\n self.assertEqual(response.status_code, 403)",
"def test_not_permitted(self):\r\n test_user_client, test_user = self.create_non_staff_authed_user_client()\r\n CourseEnrollment.enroll(test_user, self.course.id)\r\n response = test_user_client.get(self.orphan_url)\r\n self.assertEqual(response.status_code, 403)\r\n response = test_user_client.delete(self.orphan_url)\r\n self.assertEqual(response.status_code, 403)",
"def allowed(self, user, amount):\n return True",
"async def ticket_add(self, ctx, user: discord.Member):\n guild_settings = await self.config.guild(ctx.guild).all()\n is_admin = await is_admin_or_superior(self.bot, ctx.author) or any(\n [ur.id in guild_settings[\"supportroles\"] for ur in ctx.author.roles]\n )\n must_be_admin = not guild_settings[\"usercanmodify\"]\n\n if not is_admin and must_be_admin:\n await ctx.send(\"Only Administrators can add/remove other users to tickets.\")\n return\n elif not is_admin:\n author = ctx.author\n author_id = author.id\n elif is_admin:\n # Since the author isn't specified, and it's an admin, we need to guess on who\n # the author is\n inverted = {}\n for author_id, tickets in guild_settings[\"created\"].items():\n for ticket in tickets:\n inverted[ticket[\"channel\"]] = author_id\n try:\n author = ctx.guild.get_member(int(inverted[ctx.channel.id]))\n if author:\n author_id = author.id\n else:\n author_id = int(inverted[ctx.channel.id])\n except KeyError:\n author = ctx.author\n author_id = author.id\n\n index = None\n\n if not guild_settings[\"created\"][str(author_id)]:\n await ctx.send(\"You don't have any open tickets.\")\n return\n elif len(guild_settings[\"created\"][str(author_id)]) == 1:\n index = 0\n else:\n for i, ticket in enumerate(guild_settings[\"created\"][str(author_id)]):\n if ticket[\"channel\"] == ctx.channel.id:\n index = i\n break\n\n if index is None:\n await ctx.send(\n \"You have multiple tickets open. \"\n \"Please run this command in the ticket channel you wish to edit.\"\n )\n return\n\n channel = self.bot.get_channel(guild_settings[\"created\"][str(author_id)][index][\"channel\"])\n\n if user.id in guild_settings[\"created\"][str(author_id)][index][\"added\"]:\n await ctx.send(\"That user is already added.\")\n return\n\n adding_is_admin = await is_admin_or_superior(self.bot, user) or any(\n [ur.id in guild_settings[\"supportroles\"] for ur in user.roles]\n )\n\n if adding_is_admin:\n await ctx.send(\"You cannot add a user in support or admin team.\")\n return\n\n channel = self.bot.get_channel(guild_settings[\"created\"][str(author_id)][index][\"channel\"])\n if not channel:\n await ctx.send(\"The ticket channel has been deleted.\")\n return\n\n try:\n await channel.set_permissions(user, send_messages=True, read_messages=True)\n except discord.Forbidden:\n await ctx.send(\n \"The Manage Permissions channel for me has been removed. \"\n \"I am unable to modify this ticket.\"\n )\n return\n\n async with self.config.guild(ctx.guild).created() as created:\n created[str(author_id)][index][\"added\"].append(user.id)\n\n await ctx.send(f\"{user.mention} has been added to the ticket.\")",
"def raise_not_editable(self, viewer):\n if not self.id or viewer.has_perm(\"bookwyrm.create_invites\"):\n return\n raise PermissionDenied()",
"async def ticket_remove(self, ctx, user: discord.Member):\n guild_settings = await self.config.guild(ctx.guild).all()\n is_admin = await is_admin_or_superior(self.bot, ctx.author) or any(\n [ur.id in guild_settings[\"supportroles\"] for ur in ctx.author.roles]\n )\n must_be_admin = not guild_settings[\"usercanmodify\"]\n\n if not is_admin and must_be_admin:\n await ctx.send(\"Only Administrators can add/remove other users to tickets.\")\n return\n elif not is_admin:\n author = ctx.author\n author_id = author.id\n elif is_admin:\n # Since the author isn't specified, and it's an admin, we need to guess on who\n # the author is\n inverted = {}\n for author_id, tickets in guild_settings[\"created\"].items():\n for ticket in tickets:\n inverted[ticket[\"channel\"]] = author_id\n try:\n author = ctx.guild.get_member(int(inverted[ctx.channel.id]))\n if author:\n author_id = author.id\n else:\n author_id = int(inverted[ctx.channel.id])\n except KeyError:\n author = ctx.author\n author_id = author.id\n\n index = None\n\n if not guild_settings[\"created\"][str(author_id)]:\n await ctx.send(\"You don't have any open tickets.\")\n return\n elif len(guild_settings[\"created\"][str(author_id)]) == 1:\n index = 0\n else:\n for i, ticket in enumerate(guild_settings[\"created\"][str(author_id)]):\n if ticket[\"channel\"] == ctx.channel.id:\n index = i\n break\n\n if index is None:\n await ctx.send(\n \"You have multiple tickets open. \"\n \"Please run this command in the ticket channel you wish to edit.\"\n )\n return\n\n if user.id not in guild_settings[\"created\"][str(author_id)][index][\"added\"]:\n await ctx.send(\"That user is not added.\")\n return\n\n removing_is_admin = await is_admin_or_superior(self.bot, user) or any(\n [ur.id in guild_settings[\"supportroles\"] for ur in user.roles]\n )\n\n if removing_is_admin:\n await ctx.send(\"You cannot remove a user in support or admin team.\")\n return\n\n channel = self.bot.get_channel(guild_settings[\"created\"][str(author_id)][index][\"channel\"])\n if not channel:\n await ctx.send(\"The ticket channel has been deleted.\")\n\n try:\n await channel.set_permissions(user, send_messages=False, read_messages=False)\n except discord.Forbidden:\n await ctx.send(\n \"The Manage Permissions channel for me has been removed. \"\n \"I am unable to modify this ticket.\"\n )\n return\n\n async with self.config.guild(ctx.guild).created() as created:\n created[str(author_id)][index][\"added\"].remove(user.id)\n\n await ctx.send(f\"{user.mention} has been removed from the ticket.\")",
"def cant(user, action):\n\n return not can(user, action)",
"def check_credit(self):\n self.ensure_one()\n getattr(self, '%s_check_credit' % self.provider, lambda: None)()",
"def user_allow_credit(self):\n try:\n return self.user.creditAllowed()\n except AttributeError:\n return False",
"def disallow_handler(update, _):\n global TEMPORARILY_ALLOWED\n user_id = update.message.chat.id\n if user_id == ADMIN_ID:\n TEMPORARILY_ALLOWED = False\n update.message.reply_text(\"Temprarily allowed disabled!\")",
"def get_everyone_denied(self):",
"def test_can_not_cancel_past_block(self):\n date = datetime.now().replace(minute=0, second=0, microsecond=0) - timedelta(hours=1)\n\n response = self.client.post(\n reverse('bookings', kwargs={'facility': 'g'}), {'cancel': str(date.timestamp())})\n\n context = response.context\n bookings = context[\"bookings\"]\n\n self.assertEqual(response.status_code, 403)\n self.assertEqual(context[\"quota\"], settings.BOOKINGS_QUOTA)",
"def write_authorize(cls, user, obj):\n if not obj.delivery.deadline.assignment_group.is_examiner(user):\n raise PermissionDenied()",
"def test_requester_is_no_admin(self) -> None:\n\n channel = self.make_request(\n \"GET\",\n self.url,\n access_token=self.other_user_tok,\n )\n\n self.assertEqual(403, channel.code, msg=channel.json_body)\n self.assertEqual(Codes.FORBIDDEN, channel.json_body[\"errcode\"])",
"def test_requester_is_no_admin(self) -> None:\n\n channel = self.make_request(\n \"GET\",\n self.url,\n access_token=self.other_user_tok,\n )\n\n self.assertEqual(403, channel.code, msg=channel.json_body)\n self.assertEqual(Codes.FORBIDDEN, channel.json_body[\"errcode\"])",
"def test_deny_pending_payment(self):\n pass",
"def check_admin():\n\tif not current_user.is_admin:\n\t\tabort(403)",
"def check_admin():\r\n if not current_user.is_admin:\r\n abort(403)",
"def no_reason(message, db):\n #message.reply(Strings['GRANT_EXAMPLE'].format(db))\n try:\n hf.grant(message, db.lower(), \"[EXTENDING ACCESS TIME]\", False)\n except Exception as e:\n message._client.send_message(errors_channel, \"```{}```\".format(e))",
"def test_buyTicket_NotForSale():\n old_venue_balance = testVenue.wallet\n assert not testUser2.buyTicket(testTicket2)\n assert testTicket2 not in testUser2.inventory\n assert not testTicket1.for_sale\n assert testUser2.wallet == 500\n assert testVenue.wallet == old_venue_balance",
"def test_can_not_cancel_current_block(self):\n date = datetime.now().replace(minute=0, second=0, microsecond=0)\n\n response = self.client.post(\n reverse('bookings', kwargs={'facility': 'g'}), {'cancel': str(date.timestamp())})\n\n context = response.context\n bookings = context[\"bookings\"]\n\n self.assertEqual(response.status_code, 403)\n self.assertEqual(context[\"quota\"], settings.BOOKINGS_QUOTA)",
"def temporarily_allow_handler(update, _):\n global TEMPORARILY_ALLOWED\n user_id = update.message.chat.id\n if user_id == ADMIN_ID:\n TEMPORARILY_ALLOWED = True\n update.message.reply_text(\"Temprarily allowed!\")",
"def no_reason(message, db):\n #message.reply(Strings['GRANT_EXAMPLE'].format(db))\n try:\n hf.grant(message, db.lower(), \"[EXTENDING ACCESS TIME]\", True)\n except Exception as e:\n message._client.send_message(errors_channel, \"```{}```\".format(e))",
"def admin_reject(user):\n if user.comments in (None or \"\"):\n return\n\n subject = \"ECE/CIS Account - Account Application rejected for %s\" % user.username\n application = \"https://www.eecis.udel.edu/NewAccount/\"\n helprequest = \"https://www.eecis.udel.edu/service\"\n sponsor = \"%[email protected]\" % user.sponsor\n \n message = \"Your ECE/CIS Account has been rejected by ECE/CIS faculty adminstrators.\\n\" % user.sponsor\n message += \"The reason given for rejection was:\\n\\n%s\\n\\n\" % user.comments\n message += \"You may re-apply with corrected information at %s\\n\" % application\n message += \"Please don't reply to this email. If have any questions, please \\n\"\n message += \"please post a ticket as an outsider at %s\" % helprequest\n message += \"-- ECE\\CIS Labstaff\"\n\n\n send('[email protected]', 'ECE/CIS Account System', \\\n [user.email, sponsor], subject, message, MAILHOST)",
"def check_admin():\n if not current_user.is_admin:\n abort(403)",
"def check_admin():\n if not current_user.is_admin:\n abort(403)",
"def test_requester_is_no_admin(self) -> None:\n\n channel = self.make_request(\n \"DELETE\",\n self.url,\n access_token=self.other_user_tok,\n )\n\n self.assertEqual(403, channel.code, msg=channel.json_body)\n self.assertEqual(Codes.FORBIDDEN, channel.json_body[\"errcode\"])"
] | [
"0.69149536",
"0.65459514",
"0.63113207",
"0.63042814",
"0.617169",
"0.6162346",
"0.60777926",
"0.60088956",
"0.5990186",
"0.5983708",
"0.59674823",
"0.59468544",
"0.59359336",
"0.59180915",
"0.5916562",
"0.58903867",
"0.58831507",
"0.58831507",
"0.58763945",
"0.58660877",
"0.5841624",
"0.5831001",
"0.5813524",
"0.5813499",
"0.58131415",
"0.5809147",
"0.57989734",
"0.57705516",
"0.57705516",
"0.57644874"
] | 0.73568875 | 0 |
Returns a value in a nested associative structure, where `ks` is a sequence of keys. Returns `None`, if the key is not present, or the `default` value, if supplied. | def get_in(d, ks, default=None):
*ks_, last = ks
d_ = d
for k in ks_:
if type(d_) != dict or k not in d_:
return default
d_ = d_[k]
if type(d_) == dict:
return d_.get(last, default)
return default | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def do_get(d, *ks, **kwargs):\n try:\n res = reduce (lambda acc, k: acc[k], ks, d)\n except (KeyError, TypeError):\n if \"default\" in kwargs:\n return kwargs[\"default\"]\n else:\n t, v, tb = sys.exc_info()\n if t == KeyError:\n msg = \"nested keys {} not found in {}\".format(ks, d)\n else:\n msg = \"nesting of keys {} too is too deep for {}\".format(ks, d)\n raise KeyError, msg, tb\n else:\n return res",
"def _get_default(ddict, key, default):\n if ddict is None or key not in ddict or ddict[key] is None:\n return default\n return ddict[key]",
"def get(self, key, default=None):\n if self.root is not None:\n res = self._get(key, self.root)\n if res:\n return res\n else:\n return default\n return default",
"def get(dd, kk, default=0):\n if kk in dd.keys():\n return dd[kk]\n else:\n return default",
"def get(self, key: str, default=None):\n value = self._get(key)\n\n if value is None:\n return self._parent.get(key, default) if self._parent else default\n\n return value",
"def get(self, key, default=None):\n def find(found_item, _):\n \"\"\" This is the closer function which will be passed to find by key function , if key found than return the value \n otherwise return blanck\"\"\"\n if found_item:\n return found_item[1]\n else:\n return default\n\n return self._find_by_key(key, find)",
"def get(self, key, default=None):\n try:\n val = self[key]\n except KeyError:\n return default\n if val == []:\n return default\n return val",
"def get(self, key: str, default=None) -> Any:\n try:\n return self[key][0]\n except KeyError:\n return default",
"def value(\n self, key: _K = 0, default: t.Optional[object] = None\n ) -> t.Any:\n try:\n index = self.index(key)\n except (IndexError, KeyError):\n return default\n else:\n return self[index]",
"def get(self, keyname: str, default: Optional[Any] = None) -> Any:\n try:\n return self[keyname]\n except KeyError:\n return default",
"def safe_get(root_dict, list_keys, default_value=None):\n if root_dict is None:\n return default_value\n\n if list_keys is None or len(list_keys) == 0:\n return default_value\n\n dict_types = [dict, defaultdict]\n\n curr_obj = root_dict\n for k in list_keys:\n if type(curr_obj) in dict_types and k in curr_obj:\n curr_obj = curr_obj[k]\n else:\n curr_obj = default_value\n break\n\n return curr_obj",
"def get(self, key, default=None):\n try:\n return self.__getitem__(key)\n except ValueError:\n if default is not None:\n return default\n else:\n raise",
"def lookup(my_dict, my_key, default_value=None):\n if my_key in my_dict:\n return my_dict[my_key]\n else:\n return default_value",
"def get(self, key: Any, default: Optional[Any] = None) -> Any:\n try:\n return self[key]\n except (KeyError, ValueError, IndexError):\n return default",
"def get_default(section, option=\"\"):\n\tif not option:\n\t\tif defaults.has_key(section):\n\t\t\treturn defaults[section]\n\telse:\n\t\tif defaults.has_key(section):\n\t\t\tif defaults[section].has_key(option):\n\t\t\t\treturn defaults[section][option]\n\treturn None",
"def search_up(d, k, *default):\n if k in d:\n return d[k]\n if d.parent is d:\n if len(default):\n return default[0]\n else:\n raise AttributeError(k)\n else:\n return search_up(d.parent, k, *default)",
"def get_value(key, dic, default_dic):\n\n v = dic.get(key)\n\n if v is None:\n if key in default_dic:\n v = default_dic.get(key)\n else:\n print_log_msg(\n 'ERROR', 'get_param', 'key not in default_dic', key\n )\n\n return v",
"def get(self, key, default=None):\n try:\n return self[key]\n except KeyError:\n return default",
"def get(self, key, default=None):\r\n try:\r\n return self[key]\r\n except KeyError:\r\n return default",
"def peek(self, key, default=None):\n try:\n return iter(self.get(key, [])).next()\n except StopIteration:\n return default",
"def search_value(d, key, default=None):\n stack = [iter(d.items())]\n while stack:\n for k, v in stack[-1]:\n if isinstance(v, dict):\n stack.append(iter(v.items()))\n break\n elif k == key:\n return v\n else:\n stack.pop()\n return default",
"def get(self, k, default=None):\n return super(OrderedMultiDict, self).get(k, [default])[-1]",
"def get(self, key, default=None):\n return self[key] if key in self else default",
"def setdefault(self, k, default=_MISSING):\n if not super(OrderedMultiDict, self).__contains__(k):\n self[k] = None if default is _MISSING else default\n return self[k]",
"def getKey(kwargs,key,default=None):\n value = kwarg.get(key,default)\n if value==None:\n return default\n return value",
"def get_in(self, key=None, default=None):\n if key is None:\n raise KeyError(\"'Dict' attribute key can't be empty\")\n key_list = key.strip().split('.')\n data = self\n size = len(key_list)\n for index, k in enumerate(key_list):\n data = data.get(k)\n if index < size-1 and not isinstance(data, dict):\n return default\n return data",
"def deep_get(nested_dict, keys, default=None):\n if not isinstance(nested_dict, dict):\n raise exception.SysinvException(_(\n \"Expected a dictionary, cannot get keys {}.\".format(keys)))\n\n def _reducer(d, key):\n if isinstance(d, dict):\n return d.get(key, default)\n return default\n\n return functools.reduce(_reducer, keys, nested_dict)",
"def getFromDefaults(inDict, inKey, inLastDefault, *args):\n\n if inKey in inDict:\n return inDict[inKey]\n\n for defaultDict in args:\n if inKey in defaultDict:\n return defaultDict[inKey]\n\n return inLastDefault",
"def get(self, target_key: str, default_value: Any = None) -> Any:\n _keys = target_key.split('.')\n iteration = len(_keys)\n if iteration > 1:\n result = None\n counter = 1\n for key_holder in _keys:\n if counter == 1:\n result = self._data.get(key_holder, {})\n elif counter < iteration:\n result = result.get(key_holder, {})\n else:\n result = result.get(key_holder, default_value)\n counter += 1\n return result\n else:\n return self._data.get(_keys[0], default_value)",
"def dict_default(x, key=None):\n if isinstance(x, NoDefault):\n if key is None:\n raise KeyError()\n else:\n raise KeyError(key)\n elif isinstance(x, CallIfDefault):\n return x()\n else:\n return x"
] | [
"0.66533864",
"0.6498185",
"0.63302475",
"0.6233764",
"0.62260294",
"0.6200173",
"0.6188591",
"0.6064579",
"0.604745",
"0.6032425",
"0.6025601",
"0.60055006",
"0.6003231",
"0.59860516",
"0.59845686",
"0.5940937",
"0.59354126",
"0.5911613",
"0.5899532",
"0.5888177",
"0.5871198",
"0.58551186",
"0.5841778",
"0.5826931",
"0.5815506",
"0.57729095",
"0.5767121",
"0.5760459",
"0.57500494",
"0.5739984"
] | 0.7002469 | 0 |
Associates a value in a nested associative structure, where `ks` is a sequence of keys and `v` is the new value, and returns a nested structure. If any levels do not exist, `dict`s will be created. | def assoc_in(d, ks, v):
*ks_, last = ks
d_ = d
for k in ks_:
if k not in d_:
d_[k] = {}
d_ = d_[k]
d_[last] = v
return d | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def nested_set(d: t.Dict, keys: t.Sequence[str], value: t.Any) -> t.Dict:\n if not keys:\n return d\n\n if len(keys) == 1:\n d[keys[0]] = value\n return d\n\n subd = d\n for key in keys[:-1]:\n if key not in subd:\n subd = subd.setdefault(key, {})\n else:\n subd = subd[key]\n\n subd[keys[-1]] = value\n return d",
"def insert_to_dict(dc: dict, k: str, v: dict):\n if k not in dc:\n dc[k] = list()\n dc[k].append(v)",
"def deep_update(d, u):\n for k, v in six.iteritems(u):\n dv = d.get(k, {})\n if not isinstance(dv, collections.abc.Mapping):\n d[k] = v\n elif isinstance(v, collections.abc.Mapping):\n d[k] = deep_update(dv, v)\n else:\n d[k] = v\n return d",
"def add_or_update_key_in_dict(dictionary, keys_list, level=-1, value=None):\n dictionary = deepcopy(dictionary) # make a copy of the dictionary to avoid changing the state of the original dictionary\n\n is_terminal_key = False\n\n if level == len(keys_list) - 1 or level == -1:\n is_terminal_key = True\n\n if not is_terminal_key and value:\n raise ValueError('Value cannot be set on non terminal keys')\n\n '''get the reference to the dictionary that holds the key within the nesting'''\n current_location = get_object_in_dict(dictionary, keys_list[:level])\n key_index_info = extract_key_and_index(keys_list[level])\n parent_key_info = extract_key_and_index(keys_list[0])\n\n key_type = key_index_info[0]\n key_name = key_index_info[1]\n\n if key_type == 'key':\n if is_terminal_key:\n current_location[key_name] = value\n if parent_key_info[0] == 'simple_list' or parent_key_info[0] == 'compound_simple_list':\n current_location[key_name] = value.split(';') if value else []\n else:\n '''if key is not a terminal key then it must be a dictionary'''\n current_location[key_name] = {}\n elif key_type == 'simple_list':\n if level == 0 or level == -1:\n if is_terminal_key:\n current_location[key_name] = value.split(';') if value else []\n else:\n current_location[key_name] = {}\n else:\n raise KeyError('* should be on the top most key.')\n\n elif key_type == 'compound_list' or key_type == 'compound_simple_list':\n key_index = int(key_index_info[2])\n\n '''if the dictionary doesn't contain the key then initialize it'''\n if key_name not in current_location:\n current_location[key_name] = []\n\n curr_list = current_location[key_name]\n if key_index < len(curr_list):\n current_location[key_name][level] = value if is_terminal_key else {}\n else:\n current_list = current_location[key_name]\n '''if the index exceeds the size of compound list then expand the list.'''\n for index in range(len(current_list), key_index):\n current_list.append(None if is_terminal_key else {})\n current_list.append(value if is_terminal_key else {})\n\n return dictionary",
"def createDict(given_dict, words, value):\n\tresult_dict = given_dict\n\t# base case: if list is empty, add the value to the dict\n\tif not words:\n\t\tif '$value' in result_dict:\n\t\t\tresult_dict['$value'].append(value)\n\t\telse:\n\t\t\tresult_dict['$value'] = [value]\n\telse:\n\t\t# if the first word is already in dict, traverse through treemap with that word\n\t\t# call createDict with the tail of the words list\n\t\tif words[0] in result_dict:\n\t\t\tresult_dict[words[0]] = createDict(result_dict[words[0]], words[1:], value)\n\t\telse:\n\t\t\t# if the first word is not in the dict, create a new path\n\t\t\t# call createDict with the tail of the words list\n\t\t\tresult_dict[words[0]] = createDict({}, words[1:], value)\n\n\treturn result_dict",
"def _set_by_path(dic, keys, value, create_missing=True):\n d = dic\n i = 0\n n_key = len(keys) - 1\n while i < n_key:\n k = keys[i]\n if isinstance(k, int):\n assert isinstance(d, list), \"Internal Error: %s is Expected as a list for %s.\" % (d, k)\n\n while len(d) <= k:\n d.insert(k, {})\n d = d[k]\n elif k in d:\n d = d[k]\n elif create_missing:\n next_key = keys[i + 1]\n if isinstance(next_key, int):\n if isinstance(d, list):\n d.insert(k, [])\n else:\n d[k] = []\n else:\n d[k] = {}\n d = d[k]\n else:\n return dic\n i += 1\n\n if isinstance(d, list) and keys[-1] >= len(d):\n d.insert(keys[-1], value)\n else:\n d[keys[-1]] = value\n return dic",
"def nested_set(data, keys, value):\n for key in keys[:-1]:\n data = data.setdefault(key, {})\n data[keys[-1]] = value",
"def recursive_mapping_update(d, u):\n if u is not None:\n for k, v in u.items():\n if isinstance(v, collections.Mapping):\n r = recursive_mapping_update(d.get(k, {}), v)\n d[k] = r\n else:\n d[k] = u[k]\n return d",
"def recursive_update_cfg(d, u):\n for k, v in u.iteritems():\n if isinstance(v, collections.Mapping):\n r = update(d.get(k, {}), v)\n d[k] = r\n else:\n d[k] = u[k]\n return d",
"def _update_dict(full_key, val, d):\n for vk, vv in val.items():\n # The key of value is not in d.\n # if vk not in d:\n # # Exit.\n # raise ValueError(\"{}.{} does not exist in options\".format(full_key, vk))\n # else: # The key of val is in d.\n if isinstance(vv, list): # The value of the key is list.\n d[vk] = np.array(vv) # Store it as a numpy array.\n elif isinstance(vv, dict): # The value of the key is dictionary.\n _update_dict(full_key + \".\" + vk, vv, d[vk]) # Call the function again.\n else: # At the leaf of the dictionary.\n d[vk] = vv",
"def flatten(d: MutableMapping, sep: str = \".\", parent_key: str = \"\") -> dict:\n items = []\n for k, v in d.items():\n new_key = parent_key + sep + k if parent_key else k\n if isinstance(v, MutableMapping):\n items.extend(flatten(v, sep=sep, parent_key=new_key).items())\n else:\n items.append((new_key, v))\n return dict(items)",
"def update(d, u):\n\n for k, v in u.items():\n if isinstance(v, collections.Mapping):\n d[k] = update(d.get(k, {}), v)\n else:\n d[k] = v\n return d",
"def update(d, u):\n for k, v in u.items():\n if isinstance(v, collections.Mapping):\n d[k] = update(d.get(k, {}), v)\n else:\n d[k] = v\n return d",
"def nested_set(dictionary: dict, keys: list, value):\n nested_dict = dictionary\n for key in keys[:-1]:\n nested_dict = nested_dict[key]\n nested_dict[keys[-1]] = value\n return dictionary",
"def dict_deep_update(d, u, handlers=None):\n if handlers is None:\n handlers = {}\n for k, v in u.items():\n if isinstance(v, collections.Mapping):\n r = dict_deep_update(d.get(k, {}), v, handlers)\n d[k] = r\n elif k in d:\n h = handlers.get(type(v), None)\n if h is not None:\n d[k] = h(d[k], u[k])\n else:\n d[k] = u[k]\n else:\n d[k] = u[k]\n return d",
"def nest_dict(dct, keys):\n nested_dict = dct\n for key in reversed(keys):\n nested_dict = RecursiveDict({key: nested_dict})\n return nested_dict",
"def nested_dict():\n return defaultdict(nested_dict)",
"def create_level(dict, path_list, value):\n\tif len(path_list) == 0:\n\t\treturn\n\n\tfor k in path_list[:-1]:\n\t\tdict = dict[k]\n\t\n\tdict[path_list[-1]] = value",
"def add_or_append_dict_entry(main_dict, main_key, sub_key, value):\n # type: (dict, str, str, Any) -> dict\n if main_key not in main_dict:\n main_dict[main_key] = dict()\n if sub_key not in main_dict[main_key]:\n main_dict[main_key][sub_key] = [value]\n else:\n main_dict[main_key][sub_key].append(value)\n return main_dict",
"def set_nested_item(data_dict: dict, key_list: tuple or list, value):\r\n reduce(getitem, key_list[:-1], data_dict)[key_list[-1]] = value\r\n return data_dict",
"def deep_merge(d, u):\n stack = [(d, u)]\n while stack:\n d, u = stack.pop(0)\n for k, v in u.items():\n if not isinstance(v, collections.abc.Mapping):\n d[k] = v\n else:\n dv = d.setdefault(k, {})\n if not isinstance(dv, collections.abc.Mapping):\n d[k] = v\n else:\n stack.append((dv, v))",
"def update(d, u):\n for k, v in u.items():\n if isinstance(v, Mapping):\n d[k] = update(d.get(k, {}), v)\n else:\n d[k] = v\n return d",
"def flatten_dict(\n d, parent_key=\"\", sep=\".\", ignore_under_prefixed=True, mark_value=True\n):\n items = {}\n for k in d:\n if ignore_under_prefixed and k.startswith(\"__\"):\n continue\n v = d[k]\n if mark_value and k.startswith(\"_\") and not k.startswith(\"__\"):\n v = MarkValue(repr(v))\n\n new_key = sep.join((parent_key, k)) if parent_key else k\n if isinstance(v, collections.MutableMapping):\n items.update(\n flatten_dict(\n v, new_key, sep=sep, ignore_under_prefixed=True, mark_value=True\n )\n )\n else:\n items[new_key] = v\n\n return items",
"def build_dct(dic, keys, value):\n key = keys.pop(0)\n if len(keys):\n dic.setdefault(key, {})\n build_dct(dic[key], keys, value)\n else:\n # Transform cookbook default attribute strings into proper booleans\n if value == \"false\":\n value = False\n elif value == \"true\":\n value = True\n # It's a leaf, assign value\n dic[key] = value",
"def extend(d, k, v):\n\tn = d.copy()\n\tn[k] = v\n\treturn n",
"def FlattenDictionary(value, keys=[]):\n result = {}\n if type(value) is dict:\n for key in value:\n result.update(FlattenDictionary(value[key], keys + [key]))\n return result\n else:\n key = '.'.join(keys)\n return {key: value}",
"def flatten_dict(d, parent_key=\"\", sep=\"_\"):\n items = []\n for k, v in d.items():\n new_key = parent_key + sep + k if parent_key else k\n if isinstance(v, MutableMapping):\n items.extend(flatten_dict(v, new_key, sep=sep).items())\n else:\n items.append((new_key, v))\n return dict(items)",
"def _add_val_to_dict(self, d, k, v):\n if k in list(d):\n d[k] += v\n else:\n d[k] = [v]\n return d",
"def flatten_dict(d, sep=' ', parent_key=''):\n items = []\n for k, v in d.items():\n new_key = parent_key + sep + k if parent_key else k\n if isinstance(v, collections.MutableMapping):\n items.extend(flatten_dict(v, sep=sep, parent_key=new_key).items())\n else:\n items.append((new_key, v))\n return dict(items)",
"def create_recursive_dot_dict(data: Dict[str, Any], cls=DotDict) -> Union[DotDict, DotDefaultDict]:\n res = cls()\n for k, v in data.items():\n k = k.split(\".\")\n target = res\n for i in range(0, len(k)-1):\n t2 = target.get(k[i])\n if t2 is None:\n t2 = cls()\n target[k[i]] = t2\n\n assert isinstance(t2, cls), f\"Trying to overwrite key {'.'.join(k[:i+1])}\"\n target = t2\n\n assert isinstance(target, cls), f\"Trying to overwrite key {'.'.join(k)}\"\n target[k[-1]] = v\n return res"
] | [
"0.6572794",
"0.63907355",
"0.6336366",
"0.62305176",
"0.6214868",
"0.61555994",
"0.59882414",
"0.59860635",
"0.5962133",
"0.590882",
"0.58780855",
"0.5875496",
"0.5871324",
"0.58609086",
"0.58548045",
"0.582082",
"0.5820722",
"0.5794419",
"0.5784228",
"0.5780479",
"0.5768079",
"0.5665515",
"0.56541914",
"0.5653878",
"0.5646067",
"0.5603715",
"0.55895406",
"0.55769056",
"0.5568427",
"0.5537123"
] | 0.70314556 | 0 |
Print a `middleware_name` with a right arrow if `_VERBOSE_MODE` is on. | def _print_inwards(middleware_name):
if _VERBOSE_MODE:
print('{}--->'.format(middleware_name)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _print_outwards(middleware_name):\n if _VERBOSE_MODE:\n print('<---{}'.format(middleware_name))",
"def named(name):\n\n def new_annotate(mware):\n def new_middleware(handler):\n\n new_handler = mware(handler)\n\n def verbose_handler(ctx):\n _print_inwards(name)\n\n new_ctx = new_handler(ctx)\n\n _print_outwards(name)\n\n return new_ctx\n\n return verbose_handler\n\n return new_middleware\n\n return new_annotate",
"def _verboseHeader(self):\n\n if verbose:\n name = self._getName()\n methodName = self._getMethodName()\n\n title = f\"Running {name}.{methodName}\"\n print('{}\\n{}'.format(title, '-' * len(title)))",
"def v_print(msg):\n if (VERBOSE == 1):\n print(msg)",
"def showsession():\n for property,value in middleware.__dict__.items():\n if property.startswith('_') and not callable(property): continue\n if property in ['ixn', 'portMgmtObj', 'fileMgmtObj', 'protocolObj', 'statsObj']: continue\n print('\\t{0}: {1}'.format(property, value))",
"def show(self, *args, prefix=None):\n if prefix is None:\n prefix = '$'\n if self.verbose >= 2:\n print(prefix, *args)",
"def print_debug(message):\n if current_app.debug:\n print(message)",
"def verbose_print(msg: str = '') -> None:\n assert isinstance(msg, str)\n if __verbose:\n print(msg)",
"def cli(debug):\n print(f\"Debug mode is {'on' if debug else 'off'}\")",
"def DEBUG(*args, **kwargs):\n if __name__ != \"__main__\":\n print(*args, **kwargs)",
"def debug_print(self, *args, **kwargs):\n print(\"APP_DEBUG_PRINT\", args, kwargs)",
"def debug(string):\n if verbose:\n print string\n return",
"def show_details(name, f, is_partial=False):\n print '%s:' % name\n print '\\tobject:', f\n if not is_partial:\n print '\\t__name__:', f.__name__\n print '\\t__doc__', repr(f.__doc__)\n if is_partial:\n print '\\tfunc:', f.func\n print '\\targs:', f.args\n print '\\tkeywords:', f.keywords\n return",
"def debug():\n def _debug(x):\n return e.String(x.as_source())\n yield (\"(λ any . str)\", _debug)",
"def vprint(string):\n global verbose\n if verbose:\n print(string)",
"def stk_logger(context, msg: str):\n if not context:\n logger.info(msg)\n return\n uc = context.use_case()\n if (msg[:2] != \"->\") or (uc == \"\"):\n logger.info(msg)\n return\n logger.info(f\"-> {msg[2:]} uc={uc}\")\n return",
"def debug(s):\n if app.config['DEBUG']:\n print(s)",
"def debug(s):\n if app.config['DEBUG']:\n print(s)",
"def vprint(msg):\n if defaults.verbose:\n print(msg)",
"def verbose_print(verbose, print_function=None):\n\n if verbose:\n return print_function or print\n else:\n def vprint(*args, **kwars):\n pass\n return vprint",
"def __debugInfo(self, msg):\n\t\tif self.verbosity:\n\t\t\tprint(stylize(\"[*] DEBUG: {}\".format(msg), colored.fg(\"wheat_1\")))",
"def debug(msg):\n if settings.DEBUG:\n print \"DEBUG: cli.%(msg)s\" % locals()",
"def _verbose(self,text):\n if self.verbose:\n print(text)",
"def write_debug_info(self):\n #path = self.request.uri.split('?')[0]\n #method = path.split('/')[-1]\n \n self.write(\"Handler: \" + str(self.__class__.__name__)+\"<br>\")\n self.write(\"<hr>\")\n self.write(str(dir(self.request)))\n self.write(\"<br><hr>\")\n self.write(\"query_arguments:\" + str(self.request.query_arguments))\n self.write(\"<br>\")\n self.write(\"uri:\" + self.uri)\n self.write(\"<br>\")\n self.write(\"path:\" + self.path)\n self.write(\"<br>\")\n self.write(\"method to call: \" + self.request.method.lower() + \"_\" + self.method)\n self.write(\"<hr>\")\n self.write(\"request method: \" + self.request.method)\n self.write(\"<hr>\")\n self.write(\"request headers: \" + str(self.request.headers))\n self.write(\"<hr>\")\n self.flush()",
"def print_debug(context: str = \"\") -> None:\r\n print(context)\r\n print(\"This is the current board\")\r\n print(example)\r\n print(\"This is the conflict space\")\r\n print(conflict_space)\r\n print(\"This is the safeboard\")\r\n print(safeboard)",
"def vprint(*args, **kwargs ):\n\n forceprint = False\n for key in kwargs:\n if key == \"forceprint\":\n forceprint =kwargs[key]\n \n line = ''\n if debug or forceprint : \n for arg in args:\n line += str(arg) +\" \"\n log = open(exepath + 'pyframe.log', 'a') \n log.write(line + \"\\n\")\n log.close() \n print line",
"def _vprint(self, string):\n if self.verbose:\n print(string)",
"def verbose_print(text,verbose_level):\n if Args.verbose >= verbose_level:\n print '\\t' * (verbose_level-1) + text",
"def debug_print(debug_data):\n if DEBUG_MODE == \"true\":\n pp.pprint(debug_data)",
"def main(debug):\n click.echo('Debug mode is {{}}'.format(debug))"
] | [
"0.8443991",
"0.59464276",
"0.5638737",
"0.560429",
"0.5601479",
"0.5314763",
"0.5303445",
"0.5292417",
"0.528729",
"0.5279358",
"0.52545273",
"0.5227158",
"0.5212542",
"0.5212174",
"0.52042764",
"0.51971495",
"0.5194814",
"0.5194814",
"0.5190525",
"0.5150018",
"0.5143002",
"0.5138631",
"0.5076536",
"0.5073388",
"0.5059901",
"0.50518113",
"0.5045145",
"0.50286466",
"0.5023738",
"0.5022795"
] | 0.85856086 | 0 |
Print a `middleware_name` with a left arrow if `_VERBOSE_MODE` is on. | def _print_outwards(middleware_name):
if _VERBOSE_MODE:
print('<---{}'.format(middleware_name)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _print_inwards(middleware_name):\n if _VERBOSE_MODE:\n print('{}--->'.format(middleware_name))",
"def named(name):\n\n def new_annotate(mware):\n def new_middleware(handler):\n\n new_handler = mware(handler)\n\n def verbose_handler(ctx):\n _print_inwards(name)\n\n new_ctx = new_handler(ctx)\n\n _print_outwards(name)\n\n return new_ctx\n\n return verbose_handler\n\n return new_middleware\n\n return new_annotate",
"def _verboseHeader(self):\n\n if verbose:\n name = self._getName()\n methodName = self._getMethodName()\n\n title = f\"Running {name}.{methodName}\"\n print('{}\\n{}'.format(title, '-' * len(title)))",
"def showsession():\n for property,value in middleware.__dict__.items():\n if property.startswith('_') and not callable(property): continue\n if property in ['ixn', 'portMgmtObj', 'fileMgmtObj', 'protocolObj', 'statsObj']: continue\n print('\\t{0}: {1}'.format(property, value))",
"def v_print(msg):\n if (VERBOSE == 1):\n print(msg)",
"def stk_logger(context, msg: str):\n if not context:\n logger.info(msg)\n return\n uc = context.use_case()\n if (msg[:2] != \"->\") or (uc == \"\"):\n logger.info(msg)\n return\n logger.info(f\"-> {msg[2:]} uc={uc}\")\n return",
"def show(self, *args, prefix=None):\n if prefix is None:\n prefix = '$'\n if self.verbose >= 2:\n print(prefix, *args)",
"def DEBUG(*args, **kwargs):\n if __name__ != \"__main__\":\n print(*args, **kwargs)",
"def debug():\n def _debug(x):\n return e.String(x.as_source())\n yield (\"(λ any . str)\", _debug)",
"def print_debug(message):\n if current_app.debug:\n print(message)",
"def cli(debug):\n print(f\"Debug mode is {'on' if debug else 'off'}\")",
"def debug(msg):\n if settings.DEBUG:\n print \"DEBUG: cli.%(msg)s\" % locals()",
"def debug_print(self, *args, **kwargs):\n print(\"APP_DEBUG_PRINT\", args, kwargs)",
"def debug(s):\n if app.config['DEBUG']:\n print(s)",
"def debug(s):\n if app.config['DEBUG']:\n print(s)",
"def debug(string):\n if verbose:\n print string\n return",
"def debugargs(prefix='***'):\n def debug(func):\n @wraps(func)\n def wrapper(*args, **kwargs):\n print(prefix + ': ' + func.__qualname__)\n return func(*args, **kwargs)\n return wrapper\n return debug",
"def verbose_print(msg: str = '') -> None:\n assert isinstance(msg, str)\n if __verbose:\n print(msg)",
"def main(debug):\n click.echo('Debug mode is {{}}'.format(debug))",
"def __debugInfo(self, msg):\n\t\tif self.verbosity:\n\t\t\tprint(stylize(\"[*] DEBUG: {}\".format(msg), colored.fg(\"wheat_1\")))",
"def print_debug(msg):\n if IS_DEBUG:\n print(msg)",
"def print_debug(context: str = \"\") -> None:\r\n print(context)\r\n print(\"This is the current board\")\r\n print(example)\r\n print(\"This is the conflict space\")\r\n print(conflict_space)\r\n print(\"This is the safeboard\")\r\n print(safeboard)",
"def vprint(msg):\n if defaults.verbose:\n print(msg)",
"def vprint(string):\n global verbose\n if verbose:\n print(string)",
"def vprint(*args, **kwargs ):\n\n forceprint = False\n for key in kwargs:\n if key == \"forceprint\":\n forceprint =kwargs[key]\n \n line = ''\n if debug or forceprint : \n for arg in args:\n line += str(arg) +\" \"\n log = open(exepath + 'pyframe.log', 'a') \n log.write(line + \"\\n\")\n log.close() \n print line",
"def show_details(name, f, is_partial=False):\n print '%s:' % name\n print '\\tobject:', f\n if not is_partial:\n print '\\t__name__:', f.__name__\n print '\\t__doc__', repr(f.__doc__)\n if is_partial:\n print '\\tfunc:', f.func\n print '\\targs:', f.args\n print '\\tkeywords:', f.keywords\n return",
"def create_print(prefix):\n def inner(*args):\n print prefix + str(args)\n return inner",
"def debug_print(msg):\n\n if not MKL.MKL_DEBUG:\n return\n else:\n print(msg)",
"def debug_print(debug_data):\n if DEBUG_MODE == \"true\":\n pp.pprint(debug_data)",
"def write_debug_info(self):\n #path = self.request.uri.split('?')[0]\n #method = path.split('/')[-1]\n \n self.write(\"Handler: \" + str(self.__class__.__name__)+\"<br>\")\n self.write(\"<hr>\")\n self.write(str(dir(self.request)))\n self.write(\"<br><hr>\")\n self.write(\"query_arguments:\" + str(self.request.query_arguments))\n self.write(\"<br>\")\n self.write(\"uri:\" + self.uri)\n self.write(\"<br>\")\n self.write(\"path:\" + self.path)\n self.write(\"<br>\")\n self.write(\"method to call: \" + self.request.method.lower() + \"_\" + self.method)\n self.write(\"<hr>\")\n self.write(\"request method: \" + self.request.method)\n self.write(\"<hr>\")\n self.write(\"request headers: \" + str(self.request.headers))\n self.write(\"<hr>\")\n self.flush()"
] | [
"0.85265154",
"0.59112906",
"0.5598949",
"0.5553952",
"0.5419853",
"0.52704614",
"0.52555555",
"0.5231697",
"0.5227046",
"0.5205608",
"0.5202568",
"0.5120578",
"0.51193374",
"0.5041665",
"0.5041665",
"0.50310254",
"0.50283116",
"0.50258374",
"0.5003555",
"0.49982905",
"0.49935257",
"0.49706155",
"0.49673685",
"0.49571374",
"0.49556172",
"0.49550134",
"0.49333254",
"0.49314147",
"0.49069685",
"0.4895962"
] | 0.8331396 | 1 |
This function is used to decorate generators with exactly two `yield` statements and turn them into middleware. For examples see documentation to this module and tests. Extra arguments beyond name are passed to the generator that is being decorated during instantiation. If they are not defined during interpretation of this module, then this function can be used as a regular callable and not as an annotation. | def middleware(name, *args, **kwargs):
def new_annotate(g_fn):
def new_middleware(handler):
def new_handler(ctx):
_print_inwards(name)
g = g_fn(ctx, *args, **kwargs)
changed_ctx = next(g)
new_ctx = handler(changed_ctx)
last_ctx = g.send(new_ctx)
_print_outwards(name)
return last_ctx
return new_handler
return new_middleware
return new_annotate | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def consumer(func):\n\n from functools import wraps\n\n @wraps(func)\n def wrapper(*args,**kw):\n gen = func(*args, **kw)\n gen.next()\n return gen\n return wrapper",
"def named(name):\n\n def new_annotate(mware):\n def new_middleware(handler):\n\n new_handler = mware(handler)\n\n def verbose_handler(ctx):\n _print_inwards(name)\n\n new_ctx = new_handler(ctx)\n\n _print_outwards(name)\n\n return new_ctx\n\n return verbose_handler\n\n return new_middleware\n\n return new_annotate",
"def writer_wrapper_2(coroutine):\n yield from coroutine",
"def with_outer(*args):\n def generator():\n for i in args:\n yield i\n return generator",
"def construct_result_generator_middleware(result_generators):\n def result_generator_middleware(make_request, web3):\n def middleware(method, params):\n if method in result_generators:\n result = result_generators[method](method, params)\n return {'result': result}\n else:\n return make_request(method, params)\n return middleware\n return result_generator_middleware",
"def test_decorated(*args):\n for i in args:\n yield i",
"def get_generator(generator: Generator, **kwargs) -> Generator:\n return generator(**kwargs)",
"def writer_wrapper_3(coroutine):\n coroutine.send(None) # prime the coro\n while True:\n try:\n try:\n x = (yield)\n except SpamException as e: # This catches the SpamException\n coroutine.throw(e)\n else:\n coroutine.send(x)\n except StopIteration:\n pass",
"def _wrap_in_generator(func, source, namer, overload):\n\n nonlocals = []\n\n for var in six.get_function_code(func).co_freevars:\n # We must generate dummy vars so the generated function has the same closure\n # as the original function.\n free_template = 'var = None'\n nonlocal_node = templates.replace(free_template, var=var)\n nonlocals.extend(nonlocal_node)\n\n gen_fun_name = namer.new_symbol('gen_fun', set())\n template = \"\"\"\n def gen_fun(overload):\n nonlocals\n\n program\n\n return f_name\n \"\"\"\n\n ret = templates.replace(\n template,\n gen_fun=gen_fun_name,\n nonlocals=nonlocals,\n overload=overload.symbol_name,\n program=source,\n f_name=func.__name__)\n\n converted_module, _ = parsing.ast_to_object(ret)\n outer_func = getattr(converted_module, gen_fun_name)\n return outer_func(overload.module)",
"def simple():\n yield 1\n yield 2\n yield 3",
"def index(*args, **kwargs):\n yield from _generate(*args, **kwargs)",
"def coroutine(func):\n @wraps(func)\n def primer(*args, **kwargs):\n gen = func(*args, **kwargs)\n next(gen)\n return gen\n return primer",
"def simple_generator():\n yield 'horse'\n # just going to do it...\n yield 'cow'\n yield 'mouse'",
"def func_star(a_b):\n return insideroutine(*a_b)",
"def generator(func):\n\n @fn\n @wraps(func)\n def gen(*args, **kwargs):\n return Iter(func(*args, **kwargs))\n\n return gen",
"def _wrap_generator(self, func):\n @functools.wraps(func)\n def generator_context(*args, **kwargs):\n gen = func(*args, **kwargs)\n\n # Generators are suspended and unsuspended at `yield`, hence we\n # make sure the grad mode is properly set every time the execution\n # flow returns into the wrapped generator and restored when it\n # returns through our `yield` to our caller (see PR #49017).\n cls = type(self)\n try:\n # Issuing `None` to a generator fires it up\n with cls():\n response = gen.send(None)\n\n while True:\n try:\n # Forward the response to our caller and get its next request\n request = yield response\n\n except GeneratorExit:\n # Inform the still active generator about its imminent closure\n with cls():\n gen.close()\n raise\n\n except BaseException:\n # Propagate the exception thrown at us by the caller\n with cls():\n response = gen.throw(*sys.exc_info())\n\n else:\n # Pass the last request to the generator and get its response\n with cls():\n response = gen.send(request)\n\n # We let the exceptions raised above by the generator's `.throw` or\n # `.send` methods bubble up to our caller, except for StopIteration\n except StopIteration as e:\n # The generator informed us that it is done: take whatever its\n # returned value (if any) was and indicate that we're done too\n # by returning it (see docs for python's return-statement).\n return e.value\n\n return generator_context",
"def _wrap_generator(ctx_factory, func):\n @functools.wraps(func)\n def generator_context(*args, **kwargs):\n gen = func(*args, **kwargs)\n\n # Generators are suspended and unsuspended at `yield`, hence we\n # make sure the grad mode is properly set every time the execution\n # flow returns into the wrapped generator and restored when it\n # returns through our `yield` to our caller (see PR #49017).\n try:\n # Issuing `None` to a generator fires it up\n with ctx_factory():\n response = gen.send(None)\n\n while True:\n try:\n # Forward the response to our caller and get its next request\n request = yield response\n\n except GeneratorExit:\n # Inform the still active generator about its imminent closure\n with ctx_factory():\n gen.close()\n raise\n\n except BaseException:\n # Propagate the exception thrown at us by the caller\n with ctx_factory():\n response = gen.throw(*sys.exc_info())\n\n else:\n # Pass the last request to the generator and get its response\n with ctx_factory():\n response = gen.send(request)\n\n # We let the exceptions raised above by the generator's `.throw` or\n # `.send` methods bubble up to our caller, except for StopIteration\n except StopIteration as e:\n # The generator informed us that it is done: take whatever its\n # returned value (if any) was and indicate that we're done too\n # by returning it (see docs for python's return-statement).\n return e.value\n\n return generator_context",
"def test_nested_yield():\n yield (yield (yield 1))",
"def patched_generator(self, *args, **kwargs):\n self.validate(*args, **kwargs)\n yield from self.function(*args, **kwargs)",
"def test_func_generator_name():\n for i in range(0, 4):\n yield 'try_odd', i",
"def new_generator(self):\n return self.generator_function(*self.args, **self.kwargs)",
"def multiple_decorator(x):\n return x",
"def construct_error_generator_middleware(error_generators):\n def error_generator_middleware(make_request, web3):\n def middleware(method, params):\n if method in error_generators:\n error_msg = error_generators[method](method, params)\n return {'error': error_msg}\n else:\n return make_request(method, params)\n return middleware\n return error_generator_middleware",
"def test_generator_method_name(self):\n for i in range(0, 4):\n yield 'try_odd', i",
"def req_as_decorator(req_output, *args, **kwargs):\r\n return req_output(dummy_func)(*args, **kwargs)",
"def counter_wrapper_2(generator):\n yield from generator",
"def abc():\r\n yield \"a\"\r\n yield \"b\"\r\n yield \"c\"",
"def _context(name, func):\n\tpush_aspect(name, func)\n\tyield\n\tpop_aspect(name)",
"def wrap_generator(generator, wrapper_function):\n for item in generator:\n yield wrapper_function(item)",
"def testExplicitGeneratorConvenienceFunctionUsage(self):\n\t\tc = Controller()\n\t\tx = c.mock()\n\t\tc.generator(x.g(8, 9), [10, 11])\n\t\tc.replay()\n\t\tself.failUnless([k for k in x.g(8, 9)] == [10, 11])"
] | [
"0.61025184",
"0.6074266",
"0.60481",
"0.59272635",
"0.587607",
"0.57251585",
"0.5700559",
"0.565485",
"0.5628643",
"0.5623533",
"0.55874825",
"0.5568008",
"0.5555406",
"0.5531668",
"0.55288386",
"0.55071145",
"0.55039895",
"0.54155296",
"0.53743196",
"0.5331949",
"0.5328424",
"0.5326343",
"0.53241307",
"0.5315651",
"0.5297026",
"0.5269264",
"0.5256659",
"0.52475005",
"0.5220585",
"0.52136374"
] | 0.68625414 | 0 |
This function layers `middleware` left to right around the `handler` and calls it all with `ctx` as an argument. Setting `verbose` to `True` prints when handlers start their before and after sections. | def wrap_and_call(ctx, handler, *middleware, verbose=False):
global _VERBOSE_MODE
_VERBOSE_MODE = verbose
middleware_ = list(middleware)
return compose(*reversed(middleware_))(handler)(ctx) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _print_inwards(middleware_name):\n if _VERBOSE_MODE:\n print('{}--->'.format(middleware_name))",
"def _print_outwards(middleware_name):\n if _VERBOSE_MODE:\n print('<---{}'.format(middleware_name))",
"def middleware(name, *args, **kwargs):\n\n def new_annotate(g_fn):\n def new_middleware(handler):\n def new_handler(ctx):\n _print_inwards(name)\n\n g = g_fn(ctx, *args, **kwargs)\n\n changed_ctx = next(g)\n new_ctx = handler(changed_ctx)\n last_ctx = g.send(new_ctx)\n\n _print_outwards(name)\n\n return last_ctx\n\n return new_handler\n\n return new_middleware\n\n return new_annotate",
"def wrap_in_middleware(app, global_conf, application_stack, **local_conf):\n stack = application_stack\n # Merge the global and local configurations\n conf = global_conf.copy()\n conf.update(local_conf)\n debug = asbool(conf.get('debug', False))\n # First put into place httpexceptions, which must be most closely\n # wrapped around the application (it can interact poorly with\n # other middleware):\n app = wrap_if_allowed(app, stack, httpexceptions.make_middleware, name='paste.httpexceptions', args=(conf,))\n # The recursive middleware allows for including requests in other\n # requests or forwarding of requests, all on the server side.\n if asbool(conf.get('use_recursive', True)):\n from paste import recursive\n app = wrap_if_allowed(app, stack, recursive.RecursiveMiddleware, args=(conf,))\n # Various debug middleware that can only be turned on if the debug\n # flag is set, either because they are insecure or greatly hurt\n # performance\n if debug:\n # Middleware to check for WSGI compliance\n if asbool(conf.get('use_lint', True)):\n from paste import lint\n app = wrap_if_allowed(app, stack, lint.make_middleware, name='paste.lint', args=(conf,))\n # Middleware to run the python profiler on each request\n if asbool(conf.get('use_profile', False)):\n import profile\n app = wrap_if_allowed(app, stack, profile.ProfileMiddleware, args=(conf,))\n # Middleware that intercepts print statements and shows them on the\n # returned page\n if asbool(conf.get('use_printdebug', True)):\n from paste.debug import prints\n app = wrap_if_allowed(app, stack, prints.PrintDebugMiddleware, args=(conf,))\n if debug and asbool(conf.get('use_interactive', False)):\n # Interactive exception debugging, scary dangerous if publicly\n # accessible, if not enabled we'll use the regular error printing\n # middleware.\n try:\n from weberror import evalexception\n app = wrap_if_allowed_or_fail(app, stack, evalexception.EvalException,\n args=(conf,),\n kwargs=dict(templating_formatters=build_template_error_formatters()))\n except MiddlewareWrapUnsupported as exc:\n log.warning(str(exc))\n import galaxy.web.framework.middleware.error\n app = wrap_if_allowed(app, stack, galaxy.web.framework.middleware.error.ErrorMiddleware, args=(conf,))\n else:\n # Not in interactive debug mode, just use the regular error middleware\n import galaxy.web.framework.middleware.error\n app = wrap_if_allowed(app, stack, galaxy.web.framework.middleware.error.ErrorMiddleware, args=(conf,))\n # Transaction logging (apache access.log style)\n if asbool(conf.get('use_translogger', True)):\n from paste.translogger import TransLogger\n app = wrap_if_allowed(app, stack, TransLogger)\n # X-Forwarded-Host handling\n from galaxy.web.framework.middleware.xforwardedhost import XForwardedHostMiddleware\n app = wrap_if_allowed(app, stack, XForwardedHostMiddleware)\n return app",
"def load_middleware(*args, **kwargs):\n inject_middleware()\n BaseHandler.load_middleware = original_load_middleware\n return original_load_middleware(*args, **kwargs)",
"async def test_middleware_multiple(self, dm):\n\n async def _first(ctx, responder, handler):\n responder.frame[\"middles\"] = responder.frame.get(\"middles\", []) + [\"first\"]\n await handler(ctx, responder)\n\n async def _second(ctx, responder, handler):\n responder.frame[\"middles\"] = responder.frame.get(\"middles\", []) + [\"second\"]\n await handler(ctx, responder)\n\n async def _handler(ctx, responder):\n # '_first' should have been called first, then '_second'\n assert responder.frame[\"middles\"] == [\"first\", \"second\"]\n\n dm.add_middleware(_first)\n dm.add_middleware(_second)\n dm.add_dialogue_rule(\"middleware_test\", _handler, intent=\"middle\")\n ctx = create_request(\"domain\", \"middle\")\n result = await dm.apply_handler(ctx, create_responder(ctx))\n assert result.dialogue_state == \"middleware_test\"",
"def _inject_trace_middleware_to_args(trace_middleware, args, kwargs):\n # type: (Callable, Tuple, Dict) -> Tuple[Tuple, Dict]\n middlewares_arg = 8\n if _graphql_version >= (3, 2):\n # middleware is the 10th argument graphql.execute(..) version 3.2+\n middlewares_arg = 9\n\n # get middlewares from args or kwargs\n try:\n middlewares = get_argument_value(args, kwargs, middlewares_arg, \"middleware\") or []\n if isinstance(middlewares, MiddlewareManager):\n # First we must get the middlewares iterable from the MiddlewareManager then append\n # trace_middleware. For the trace_middleware to be called a new MiddlewareManager will\n # need to initialized. This is handled in graphql.execute():\n # https://github.com/graphql-python/graphql-core/blob/v3.2.1/src/graphql/execution/execute.py#L254\n middlewares = middlewares.middlewares # type: Iterable\n except ArgumentError:\n middlewares = []\n\n # Note - graphql middlewares are called in reverse order\n # add trace_middleware to the end of the list to wrap the execution of resolver and all middlewares\n middlewares = list(middlewares) + [trace_middleware]\n\n # update args and kwargs to contain trace_middleware\n args, kwargs = set_argument_value(args, kwargs, middlewares_arg, \"middleware\", middlewares)\n return args, kwargs",
"async def test_middleware_single(self, dm):\n\n async def _middle(request, responder, handler):\n responder.middle = True\n await handler(request, responder)\n\n async def _handler(request, responder):\n assert responder.middle\n responder.handler = True\n\n dm.add_middleware(_middle)\n dm.add_dialogue_rule(\"middleware_test\", _handler, intent=\"middle\")\n request = create_request(\"domain\", \"middle\")\n response = create_responder(request)\n result = await dm.apply_handler(request, response)\n assert result.dialogue_state == \"middleware_test\"\n assert result.handler",
"def middleware_after(self):\n pass",
"async def raven_middleware(app, handler):\n async def middleware_handler(request):\n try:\n return await handler(request)\n except aiohttp.web.HTTPClientError:\n # Do not capture client errors\n raise\n except Exception:\n raven_client.captureException()\n raise\n return middleware_handler",
"def instrument_flask():\n oc_trace_config = app.config.get('OPENCENSUS_TRACE', {})\n oc_trace_config.update({\n 'EXPORTER': trace_exporter.TraceExporter,\n 'PROPAGATOR': trace_context_http_header_format.TraceContextPropagator\n })\n app.config.update(OPENCENSUS_TRACE=oc_trace_config)\n return flask_middleware.FlaskMiddleware(app)",
"def main(ctx, debug):\n if debug:\n logger.setLevel(logging.DEBUG)",
"def named(name):\n\n def new_annotate(mware):\n def new_middleware(handler):\n\n new_handler = mware(handler)\n\n def verbose_handler(ctx):\n _print_inwards(name)\n\n new_ctx = new_handler(ctx)\n\n _print_outwards(name)\n\n return new_ctx\n\n return verbose_handler\n\n return new_middleware\n\n return new_annotate",
"def on_load_middleware():\n\n # protect middleware wrapping: only a single thread proceeds\n global load_middleware_lock # lock gets overwritten as None after init\n if not load_middleware_lock: # already initialized? abort\n return\n mwlock = load_middleware_lock\n mwlock.acquire() # acquire global lock\n if not load_middleware_lock: # check again\n mwlock.release() # abort\n return\n load_middleware_lock = None # mark global as \"init done\"\n\n try:\n # middleware hooks\n from django.conf import settings\n for i in settings.MIDDLEWARE_CLASSES:\n if i.startswith('oboe'):\n continue\n dot = i.rfind('.')\n if dot < 0 or dot+1 == len(i):\n continue\n objname = i[dot+1:]\n imports.whenImported(i[:dot],\n functools.partial(middleware_hooks, objname=objname)) # XXX Not Python2.4-friendly\n\n # ORM\n if oboe.config['inst_enabled']['django_orm']:\n from oboeware import inst_django_orm\n imports.whenImported('django.db.backends', inst_django_orm.wrap)\n\n # templates\n if oboe.config['inst_enabled']['django_templates']:\n from oboeware import inst_django_templates\n import django\n if StrictVersion(django.get_version()) >= StrictVersion('1.3'):\n imports.whenImported('django.template.base', inst_django_templates.wrap)\n else:\n imports.whenImported('django.template', inst_django_templates.wrap)\n\n # load pluggaable instrumentation\n from loader import load_inst_modules\n load_inst_modules()\n\n # it's usually a tuple, but sometimes it's a list\n if type(settings.MIDDLEWARE_CLASSES) is tuple:\n settings.MIDDLEWARE_CLASSES = ('oboeware.djangoware.OboeDjangoMiddleware',) + settings.MIDDLEWARE_CLASSES\n elif type(settings.MIDDLEWARE_CLASSES) is list:\n settings.MIDDLEWARE_CLASSES = ['oboeware.djangoware.OboeDjangoMiddleware'] + settings.MIDDLEWARE_CLASSES\n else:\n print >> sys.stderr, \"Oboe error: thought MIDDLEWARE_CLASSES would be either a tuple or a list, got \" + \\\n str(type(settings.MIDDLEWARE_CLASSES))\n\n finally: # release instrumentation lock\n mwlock.release()\n\n try:\n add_rum_template_tags()\n except Exception, e:\n print >> sys.stderr, \"Oboe error: couldn't add RUM template tags: %s\" % (e,)",
"def test_middleware_loads(self):\n self.client.get(\"/__debug__\")",
"def log_meta_context(**kwargs):\n if not hasattr(_meta_local, 'meta'):\n _meta_local.meta = []\n\n if len(_meta_local.meta):\n # Seems to be a nested context. Include meta from the parent\n # context\n d = _meta_local.meta[-1].to_dict()\n d.update(kwargs)\n kwargs = d\n\n _meta_local.meta.append(LogMeta(**kwargs))\n\n yield _meta_local.meta[-1]\n # Remove the current meta from the stack after the context exits\n _meta_local.meta.pop()",
"def test_continue_on_early_trace_ending(context):\n env = get_env(\n {\n \"AWS_LAMBDA_FUNCTION_NAME\": \"finishing_spans_early_handler\",\n \"DD_LAMBDA_HANDLER\": \"tests.contrib.aws_lambda.handlers.finishing_spans_early_handler\",\n }\n )\n\n with override_env(env):\n patch()\n\n datadog(finishing_spans_early_handler)({}, context())",
"def main(ctx, verbose):\n ctx.ensure_object(dict)\n if verbose:\n log_level = 'DEBUG'\n else:\n log_level = 'INFO'\n\n init_logger('reVX', log_level=log_level)",
"def verbose(ctx, msg, *args):\n if ctx.verbose:\n info(msg, *args)",
"def apply_middleware(self, page, html):\n for middleware in self.middleware:\n html = middleware(page, html)\n return html",
"def ContextLog(logger, oline, cline):\n logger('{}...'.format(oline))\n yield\n logger('{}.'.format(cline))",
"async def pre_middleware(\n self,\n event: T_contra,\n context_variables: Optional[dict] = None,\n ) -> Optional[List[BaseMiddleware]]:\n mw_instances = []\n\n for middleware in self.middlewares:\n mw_instance = middleware(event, view=self)\n await mw_instance.pre()\n if not mw_instance.can_forward:\n logger.debug(\"{} pre returned error {}\", mw_instance, mw_instance.error)\n return None\n\n mw_instances.append(mw_instance)\n\n if context_variables is not None:\n context_variables.update(mw_instance.context_update)\n\n return mw_instances",
"async def test_async_middleware(dm):\n\n async def _middle(request, responder, handler):\n responder.middle = True\n await handler(request, responder)\n\n async def _handler(request, responder):\n assert responder.middle\n responder.handler = True\n\n dm.add_middleware(_middle)\n dm.add_dialogue_rule(\"middleware_test\", _handler, intent=\"middle\")\n request = create_request(\"domain\", \"middle\")\n response = create_responder(request)\n result = await dm.apply_handler(request, response)\n dm.apply_handler(request, response)\n assert result.dialogue_state == \"middleware_test\"\n assert result.handler",
"def trace(filler):\n @decorator\n def dec(func):\n def wrapper(*args, **kwargs):\n indent = filler * wrapper.level\n arguments = ', '.join(str(x) for x in args)\n print('{} --> {}({})'.format(indent, func.__name__, arguments))\n wrapper.level += 1\n\n result = func(*args, **kwargs)\n print('{} <-- {}({}) == {}'.format(indent, func.__name__, arguments, result))\n wrapper.level -= 1\n return result\n wrapper.level = 0\n return wrapper\n return dec",
"def QueryParametersMidware(event, context):\n\n def decorator_function(func):\n \"\"\"Decorator: performs query parameter checking and passes response\n\n Arguments:\n func (function): inner function that takes the response\n\n Returns:\n (function): wrapped function\n \"\"\"\n\n def wrapper(resp):\n \"\"\"Inner function: performs media type checking and passes response\n\n Arguments:\n resp (Response): Response object\n \n Returns:\n (Response): Response object, modified by query parameter midware\n \"\"\"\n\n # perform query param middleware function, which modifies the \n # response status code/headers/body as necessary\n # if status code is still OK at end of function, then execute the\n # inner function\n QueryParametersMW.middleware_func(event, resp)\n if resp.get_status_code() == SC.OK:\n return func(resp)\n else:\n return resp\n \n return wrapper\n\n return decorator_function",
"def _app(ctx, logfile, verbose):\n log_levels = {\n 0: logging.WARNING,\n 1: logging.INFO,\n 2: logging.DEBUG,\n }\n loglevel = log_levels.get(verbose, logging.DEBUG)\n # TODO more flexible logging config\n logging.basicConfig(format='%(name)s: %(levelname)s: %(message)s',\n level=loglevel, filename=logfile)\n\n tasks = ctx.obj['tasks']\n tasks.context = ctx",
"def _log_nested_outer(self):\n def _log_nested_inner():\n logging.info('info nested')\n return _log_nested_inner",
"def session_thread(**kwargs):\n mw = Middleware()\n try:\n mw.on_request_start()\n except Exception:\n mw.on_request_error('')\n finally:\n mw.on_response()",
"def context(subcontext=None) -> None:\n if subcontext is None:\n subcontext = []\n args = subcontext\n\n if len(args) == 0:\n args = config_context_sections.split()\n\n sections = [(\"legend\", lambda *args, **kwargs: [M.legend()])] if args else []\n sections += [(arg, context_sections.get(arg[0], None)) for arg in args]\n\n result = defaultdict(list)\n result_settings: DefaultDict[str, dict] = defaultdict(dict)\n for section, func in sections:\n if func:\n target = output(section)\n # Last section of an output decides about output settings\n settings = output_settings.get(section, {})\n result_settings[target].update(settings)\n with target as out:\n result[target].extend(\n func(\n target=out,\n width=settings.get(\"width\", None),\n with_banner=settings.get(\"banner_top\", True),\n )\n )\n\n for target, res in result.items():\n settings = result_settings[target]\n if len(res) > 0 and settings.get(\"banner_bottom\", True):\n with target as out:\n res.append(pwndbg.ui.banner(\"\", target=out, width=settings.get(\"width\", None)))\n\n for target, lines in result.items():\n with target as out:\n if result_settings[target].get(\"clearing\", config_clear_screen) and lines:\n clear_screen(out)\n out.write(\"\\n\".join(lines))\n if out is sys.stdout:\n out.write(\"\\n\")\n out.flush()",
"def handle_request(request, *args, **kwargs):\n root = request.route.grit_params[\"root\"]\n logger.info(\"WSGIApplication::handle_request path: %s method: %s\", request.path_qs, request.method)\n\n reqctx = RequestCtx(request, request.response, kwargs)\n def run_pipeline(l):\n if l:\n handler_cls = l.pop(0)\n logger.debug(\"running pipeline entry %s\", handler_cls)\n with handler_cls.begin(reqctx):\n if 0 <= reqctx.response.status_int <= 299:\n run_pipeline(l)\n\n run_pipeline(list(root.pipeline))\n\n rv = request.response\n if isinstance(rv, basestring):\n rv = webapp2.Response(rv)\n elif isinstance(rv, tuple):\n rv = webapp2.Response(*rv)\n request.response = rv\n logger.debug(\"Pipeline completed with response status %s\", rv.status)"
] | [
"0.61028963",
"0.5979751",
"0.553887",
"0.5415598",
"0.5203609",
"0.5072327",
"0.5070068",
"0.50015277",
"0.4928767",
"0.4807581",
"0.47827762",
"0.4747526",
"0.46803856",
"0.46597835",
"0.46321198",
"0.46194658",
"0.4610729",
"0.4543063",
"0.4516026",
"0.45135337",
"0.45097077",
"0.45091388",
"0.4435848",
"0.44111866",
"0.43837216",
"0.4375146",
"0.43694988",
"0.43451947",
"0.43430796",
"0.43322003"
] | 0.7304824 | 0 |
converts kml files to open airspace files | def kml_2_open_airspace_and_json_format(self, full_path):
# read file
f = open(full_path,'r')
kml = f.readlines()
f.close()
# find airspaces
"""Placemark >
< name > Bremen - Blumenthal
Thermikplatte < / name >
< styleUrl > # inline10</styleUrl>
< Polygon >
< tessellate > 1 < / tessellate >
< outerBoundaryIs >
< LinearRing >
< coordinates >
8.529121049900063, 53.19549566929423, 0
8.52324583919868, 53.21131939607898, 0
8.545439298799483, 53.23055800702935, 0
8.588991466114615, 53.23047069814625, 0
8.575289966189502, 53.20745451706468, 0
8.560633120477348, 53.19724609335408, 0
8.529121049900063, 53.19549566929423, 0
< / coordinates >
< / LinearRing >
< / outerBoundaryIs >
< / Polygon >
< / Placemark >"""
container = []
idxLine = 0
did_not_pass_main_folder = True
list_of_airspace_types_included = []
while idxLine < len(kml):
#print(kml[idxLine])
#if '<Folder>' in kml[idxLine] and did_not_pass_main_folder:
# # we have to jump over the first folder
# print(f'Reading everything inside folder: {kml[idxLine]}')
# did_not_pass_main_folder = False
if '<Folder>' in kml[idxLine]: # begin of airspace
as_type = kml[idxLine+1].replace('\t','').replace('<name>','').replace('</name>\n','') # <name>B</name>
print('Reading AS-types: ' + as_type)
list_of_airspace_types_included.append(as_type)
#if not (as_type == 'A' or as_type == 'B'):
# print('#### Check Folder / Airspace Types, must be "A" or "B" and try again (current %s)' % as_type)
# msgbox('Check Folder / Airspace Types, are not "A" or "B" (current %s). Airspace E will be used for export.' % as_type)
# as_type = 'E'
if '<Placemark' in kml[idxLine]: # begin of airspace
container = []
if '</Placemark' in kml[idxLine]: # end of airspace
# make sure only Polygons are stored
for as_line in container:
if '<Polygon>' in as_line:
idx_lookAt_start = None
for idx, line_of_container in enumerate(container):
if "<LookAt>" in line_of_container:
idx_lookAt_start = idx
if "</LookAt>" in line_of_container:
idx_lookAt_end = idx
# Remove lookAt lines if necessary
if idx_lookAt_start:
container = container[0:idx_lookAt_start] + container[idx_lookAt_end+1::] # cut out look at part
# append airspace to airspace list as airspace class
self.airspaces.append(Airspace(lines=container, file_type='kml', as_type=as_type))
container.append(kml[idxLine])
idxLine += 1
print('Loaded %d airspaces from KML-file (%s)' %(len(self.airspaces),full_path))
# summary
outlines = ['* KML conversion file, rename this line']
json_dict = {"circles": [], "polygons": []}
for airspace in self.airspaces:
# prepare open-airspace formate
outlines.append('\n\n') # separate airspaces
outlines.extend(airspace.txt_lines)
# prepare json
json_dict['polygons'].append(airspace.json_dict)
# write open airspace format
target_path = full_path[:-4] + '_converted.txt'
# uisave dialog
target_path = filesavebox(default=target_path, filetypes="*.txt")
if target_path is None:
print('Airspace conversion was aborted by the user')
quit()
f = open(target_path,'w')
f.writelines(outlines)
f.close()
print('Result was written to: %s' % target_path)
# write json:
target_path_json = target_path[:-4] + '.json'
json_string = json.dumps(json_dict)
json_file = open(target_path_json, "w")
json_file.write(json_string)
json_file.close()
# write list of airspace files for index.html for leaflet map
print('The following airspace types have been converted:')
print(list_of_airspace_types_included) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def open_airspace_format_2_kml(self, source_file_txt):\n # load template for kml file\n self.load_kml_template(self.full_path_kml_template)\n # load airspace source\n self.load_airspace_open_air_format(source_file_txt)\n\n self.kml_lines = self.kml_template['header']\n self.kml_lines.extend(self.kml_template['good_subdivided']['head'])\n # collect all A and B kml lines\n kml_A = []\n kml_B = []\n # transform airspaces and attach to A and B collect-lists\n for airspace in self.airspaces:\n airspace.make_kml_format(self.kml_template)\n if airspace.as_type == 'A':\n kml_A.extend(airspace.kml_lines)\n if airspace.as_type == 'B':\n kml_B.extend(airspace.kml_lines)\n\n self.kml_lines.extend(kml_A)\n self.kml_lines.extend(self.kml_template['good_subdivided']['tail'])\n # start B part\n self.kml_lines.extend(self.kml_template['bad_subdivided']['head'])\n self.kml_lines.extend(kml_B)\n self.kml_lines.extend(self.kml_template['bad_subdivided']['tail'])\n\n full_path_kml = source_file_txt[:-4] + '_converted.kml'\n # uisave dialog\n full_path_kml = filesavebox(default=full_path_kml, filetypes=\"*.kml\")\n if full_path_kml is None:\n print('Airspace conversion was aborted by the user')\n quit()\n\n # write to file\n f = open(full_path_kml, 'w')\n f.writelines(self.kml_lines)\n f.close()\n print('Resulting KML files was saved to: %s' % full_path_kml)",
"def keyholemarkup2x(file,output='df'):\n r = re.compile(r'(?<=\\.)km+[lz]?',re.I)\n try:\n extension = r.search(file).group(0) #(re.findall(r'(?<=\\.)[\\w]+',file))[-1]\n \n \n except IOError as e:\n logging.error(\"I/O error {0}\".format(e))\n if (extension.lower()=='kml') is True:\n buffer = file\n elif (extension.lower()=='kmz') is True:\n kmz = ZipFile(file, 'r')\n \n vmatch = np.vectorize(lambda x:bool(r.search(x)))\n A = np.array(kmz.namelist())\n sel = vmatch(A)\n buffer = kmz.open(A[sel][0],'r')\n \n else:\n raise ValueError('Incorrect file format entered. Please provide the '\n 'path to a valid KML or KMZ file.') \n \n \n parser = xml.sax.make_parser()\n handler = PlacemarkHandler()\n parser.setContentHandler(handler)\n parser.parse(buffer)\n \n try:\n kmz.close()\n except:\n pass\n \n df = pd.DataFrame(handler.mapping).T\n names = list(map(lambda x: x.lower(),df.columns))\n if 'description' in names:\n extradata = df.apply(PlacemarkHandler.htmlizer,axis=1)\n df = df.join(extradata)\n \n \n output = output.lower()\n \n if output=='df' or output=='dataframe' or output == None:\n result = df\n \n elif output=='csv':\n out_filename = file[:-3] + \"csv\"\n df.to_csv(out_filename,encoding='utf-8',sep=\"\\t\")\n result = (\"Successfully converted {0} to CSV and output to\"\n \" disk at {1}\".format(file,out_filename))\n \n elif output=='gpd' or output == 'gdf' or output=='geoframe' or output == 'geodataframe':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n result = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n \n \n elif output=='geojson' or output=='json':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n try:\n import geojson\n except ImportError as e:\n raise ImportError('This operation requires geojson. {0}'.format(e))\n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n out_filename = file[:-3] + \"geojson\"\n gdf.to_file(out_filename,driver='GeoJSON')\n validation = geojson.is_valid(geojson.load(open(out_filename)))['valid']\n if validation == 'yes':\n \n result = (\"Successfully converted {0} to GeoJSON and output to\"\n \" disk at {1}\".format(file,out_filename))\n else:\n raise ValueError('The geojson conversion did not create a '\n 'valid geojson object. Try to clean your '\n 'data or try another file.')\n \n elif output=='shapefile' or output=='shp' or output =='esri shapefile':\n try:\n import shapely\n from shapely.geometry import Polygon,LineString,Point\n except ImportError as e:\n raise ImportError('This operation requires shapely. {0}'.format(e))\n try:\n import fiona\n except ImportError as e:\n raise ImportError('This operation requires fiona. {0}'.format(e))\n \n try:\n import geopandas as gpd\n except ImportError as e:\n raise ImportError('This operation requires geopandas. {0}'.format(e))\n \n try:\n import shapefile\n except ImportError as e:\n raise ImportError('This operation requires pyshp. {0}'.format(e))\n \n \n geos = gpd.GeoDataFrame(df.apply(PlacemarkHandler.spatializer,axis=1))\n gdf = gpd.GeoDataFrame(pd.concat([df,geos],axis=1))\n out_filename = file[:-3] + \"shp\"\n gdf.to_file(out_filename,driver='ESRI Shapefile')\n sf = shapefile.Reader(out_filename)\n import shapefile\n sf = shapefile.Reader(out_filename)\n if len(sf.shapes())>0:\n validation = \"yes\"\n else:\n validation = \"no\"\n if validation == 'yes':\n \n result = (\"Successfully converted {0} to Shapefile and output to\"\n \" disk at {1}\".format(file,out_filename))\n else:\n raise ValueError('The Shapefile conversion did not create a '\n 'valid shapefile object. Try to clean your '\n 'data or try another file.') \n else:\n raise ValueError('The conversion returned no data; check if'\n ' you entered a correct output file type. '\n 'Valid output types are geojson, shapefile,'\n ' csv, geodataframe, and/or pandas dataframe.')\n \n return result",
"def __init__(self, full_path_of_source=''):\n if len(full_path_of_source) == 0:\n full_path_of_source = fileopenbox(default=os.path.curdir, filetypes=[\"*.txt\", \"*.kml\"])\n if full_path_of_source is None:\n print('Airspace conversion was aborted by the user')\n quit()\n # set template (this should not be changed)\n self.full_path_kml_template = r'Thermal_Map_Template5.kml' # set template file here: Folder must be named \"good\" and \"bad\"\n\n self.airspaces = [] # airspace container\n self.kml_template = {'header': [], 'good': [], 'bad': [], # will be filled after loading template\n 'good_subdivided': {'head':[], 'placemark': [], 'tail': []},\n 'bad_subdivided': {'head':[], 'placemark': [], 'tail': []}}\n self.txt_lines = [] # airspace file in open airspace format\n self.kml_lines = [] # airspace file in kml format\n \"\"\" handle conversion from and to KML / airspace format\"\"\"\n if full_path_of_source.lower().endswith('.kml'):\n self.kml_2_open_airspace_and_json_format(full_path_of_source)\n if full_path_of_source.lower().endswith('.txt'):\n self.open_airspace_format_2_kml(full_path_of_source)\n self.plot_all() # works for now only for TXT input",
"def make_open_airspace_format(self):\n # Extract coordinates from KML\n for idxline in range(len(self.kml_lines)):\n if '<name>' in self.kml_lines[idxline]:\n self.name = self.kml_lines[idxline].replace('\\t', '').replace('<name>', '').replace('</name>', '').replace('\\n','')\n if not self.name.startswith('TS'):\n self.name = 'TS_' + self.name\n print('Type: %s | Name: %s' % (self.as_type, self.name))\n if '<coordinates>' in self.kml_lines[idxline]:\n self.coordinates_kml = self.kml_lines[idxline + 1].replace('\\t', '').replace('\\n', '')\n break\n # start conversion to airspace format\n \"\"\" AC A\n AN TS_Erzgeb\n AL FL98\n AH FL99\n DP 50:26:22 N 012:17:59 E\n DP 50:25:25 N 012:18:26 E\n DP 50:24:40 N 012:19:01 E\n DP 50:24:06 N 012:19:46 E\"\"\"\n\n # AC A\n self.txt_lines.append('AC %s\\n' % self.as_type)\n # AN TS_Erzgeb\n self.txt_lines.append('AN %s\\n' % self.name)\n # heights\n self.txt_lines.append('AL FL98\\n')\n self.txt_lines.append('AH FL99\\n')\n # coordinates\n for coo_pt in self.coordinates_kml.split(' ')[:-1]:\n # Target format: DP 50:26:22 N 012:17:59 E\n lat_long = coo_pt.split(',')\n # latitude\n latDecAsStr = lat_long[1].split('.')\n #if '.' not in latDecAsStr: # take care of case \"51\" instead of \"51.123456\"\n # latDecAsStr += '.000000'\n lat_degree = abs(int(latDecAsStr[0]))\n #print(f'latDecAsStr {latDecAsStr}')\n if len(latDecAsStr)==1:\n latDecAsStr.append('0')\n lat_secondDec = (float('0.' + latDecAsStr[1])*60) % 1\n lat_minute = round((float('0.' + latDecAsStr[1])*60) - lat_secondDec)\n lat_second = round(lat_secondDec*60)\n cooString = ('DP %02d:%02d:%02d' %(lat_degree,lat_minute,lat_second))\n if latDecAsStr[0].startswith('-'):\n cooString += ' S'\n else:\n cooString += ' N'\n # longitude\n #print(f'converting lat_long {lat_long}')\n # take care of case: no decimal sign included, case \"11\" instead of \"11.123456\"\n if '.' not in lat_long[0]:\n lat_long[0] += '.0'\n lonDecAsStr = lat_long[0].split('.')\n lon_degree = abs(int(lonDecAsStr[0]))\n lon_secondDec = (float('0.' + lonDecAsStr[1]) * 60) % 1\n lon_minute = round((float('0.' + lonDecAsStr[1]) * 60) - lon_secondDec)\n lon_second = round(lon_secondDec * 60)\n cooString += (' %03d:%02d:%02d' % (lon_degree, lon_minute, lon_second))\n if lonDecAsStr[0].startswith('-'):\n cooString += ' W'\n else:\n cooString += ' E'\n cooString += '\\n'\n self.txt_lines.append(cooString)",
"def make_kml_format(self,kml_template):\n if self.as_type == 'A':\n self.kml_lines = kml_template['good_subdivided']['placemark']\n elif self.as_type == 'B':\n self.kml_lines = kml_template['bad_subdivided']['placemark']\n else:\n print('Unknown airspace type')\n # get idx of name and coordinates\n idxLine = 0\n while idxLine < len(self.kml_lines):\n #print(self.kml_lines[idxLine]\n if self.kml_lines[idxLine].startswith('\\t\\t\\t\\t<name>'): # begin of airspace\n idx_name = idxLine\n if '\\t\\t\\t\\t\\t\\t\\t<coordinates>\\n' in self.kml_lines[idxLine]: # begin of airspace\n idx_coordinates = idxLine+1\n idxLine += 1\n # transform coordinates\n # add all coordinates: Format is:\n # source: 'DP 50:26:22 N 012:17:59 E\\n'\n # target: 9.025830271397426,53.46493577242719,0 8.986157446488383,53.46952117358134,0\n coo_list = [] # collect list of coorinates as strings\n for line in self.txt_lines:\n if line.startswith('AN'):\n self.name = line[3:].replace('\\n','')\n self.kml_lines[idx_name] = '\\t\\t\\t\\t<name>%s</name>\\n' % self.name\n\n if line.startswith('DP'):\n # lon\n lon_deg = float(line[14:17])\n lon_min = float(line[18:20])\n lon_sec = float(line[21:23])\n lon_dec = (lon_sec / 60 + lon_min) / 60 + lon_deg\n if line[24] == 'W':\n lon_dec *= -1 # negative if west\n # lat\n lat_deg = float(line[3:5])\n lat_min = float(line[6:8])\n lat_sec = float(line[9:11])\n lat_dec = (lat_sec / 60 + lat_min) / 60 + lat_deg\n if line[12] == 'S':\n lat_dec *= -1 # negative if west\n # attach coordinates\n coo_list.append('%1.16f,%1.16f,0 ' % (lon_dec,lat_dec))\n # store for later plotting\n self.lat_dec.append(lat_dec)\n self.lon_dec.append(lon_dec)\n\n # make sure that shape is closed --> first an last point must be the same\n if coo_list[0] != coo_list[-1]:\n coo_list.append(coo_list[0])\n self.lat_dec.append(self.lat_dec[0])\n self.lon_dec.append(self.lon_dec[0])\n\n # write coordinate strings into kml\n self.kml_lines[idx_coordinates] = '\\t\\t\\t\\t\\t\\t\\t\\t' # is prefix. Coordinates to be added as string below\n for pt in coo_list:\n self.kml_lines[idx_coordinates] += pt\n print('Converted airspace %s' % self.name)",
"def importKML(filepath):\n\tf = open(filepath, 'r')\n\tstr = f.read()\n\treturn etree.fromstring(str)",
"def make_input_data_kmls(rundata):\n \n import os\n from . import topotools, dtopotools\n\n regions2kml(rundata, combined=False)\n gauges2kml(rundata)\n\n topofiles = rundata.topo_data.topofiles\n for f in topofiles:\n topo_file_name = f[-1]\n topo_type = f[0]\n topo2kml(topo_file_name, topo_type)\n \n dtopofiles = rundata.dtopo_data.dtopofiles\n for f in dtopofiles:\n dtopo_file_name = f[-1]\n dtopo_type = f[0]\n dtopo2kml(dtopo_file_name, dtopo_type)",
"def tdump2kml(inputDir):\n # Check inputdir\n if not os.path.exists(inputDir):\n print(\"Entered directory is invalid.\")\n sys.exit()\n\n os.chdir(inputDir)\n\n # Main loop\n for run in os.walk('.').next()[1]:\n\n os.chdir(run)\n\n # Filter tdump files\n files = glob.glob(\"*.tdump\")\n\n # Conversion\n for entry in files:\n p = subprocess.Popen(\"C:\\\\hysplit4\\\\exec\\\\trajplot.exe -i%s -o%s.ps -a3 -v1 -l1\" % \\\n (entry, entry), shell=True, stdout=subprocess.PIPE)\n p.wait()\n os.remove(entry[:-6])\n #p_out = p.communicate()\n #print p_out[0], p_out[1]\n\n # Move all kmls into dir kmls\n #sys.stdout.flush()\n kmls = glob.glob(\"*.kml\")\n\n if not os.path.exists(\"kmls\"):\n os.makedirs(\"kmls\")\n\n for kml in kmls:\n os.rename(kml, \"kmls\\\\%s\" % kml)\n\n # Remove redundant ps files\n pss = glob.glob(\"*.ps\")\n\n for ps in pss:\n os.remove(ps)\n\n print \"DONE : %s %s\\kmls\" % (run, os.getcwd())\n os.chdir('../')",
"def makepkl():\n # Old osgeo.ogr approach\n from osgeo import ogr\n # USTimeZones.kml source is unknown, but was freely available and\n # Has been converted to a pkl file\n kmlpath = os.path.join(os.path.dirname(__file__), 'USTimeZones.kml')\n driver = ogr.GetDriverByName('KML')\n datasource = driver.Open(kmlpath)\n layer = datasource.GetLayer()\n layerDefn = layer.GetLayerDefn()\n oldfeats = [i_ for i_ in layer]\n featDefn = layer.GetLayerDefn()\n feat = ogr.Feature(featDefn)\n nbFeat = layer.GetFeatureCount()\n outfeat = file(uspklpath, 'w')\n featout = [(feat.GetField(0), feat.GetGeometryRef().ExportToWkt()) for feat in oldfeats]\n pickle.dump(featout, file(uspklpath, 'w'))\n\n # WorldTimeZones.kml source is below and was freely available and\n # Has been converted to a pkl file\n # https://productforums.google.com/forum/?fromgroups=#!msg/gec-tools/EdR18tz_5k8/MRPV85OxXIkJ\n kmlpath = os.path.join(os.path.dirname(__file__), 'WorldTimeZones.kml')\n driver = ogr.GetDriverByName('KML')\n datasource = driver.Open(kmlpath)\n layer = datasource.GetLayer()\n layerDefn = layer.GetLayerDefn()\n oldfeats = [i_ for i_ in layer]\n featDefn = layer.GetLayerDefn()\n feat = ogr.Feature(featDefn)\n nbFeat = layer.GetFeatureCount()\n outfeat = file(worldpklpath, 'w')\n featout = [(feat.GetField(0), feat.GetGeometryRef().ExportToWkt()) for feat in oldfeats]\n pickle.dump(featout, file(worldpklpath, 'w'))",
"def get_kml_object(filename: str) -> fastkml.kml.KML:\n\t\n\tkml_obj = fastkml.kml.KML()\n\t\n\twith open(filename) as file:\n\t\tkml_obj.from_string(file.read().encode(\"utf-8\"))\n\t\n\treturn kml_obj",
"def read_kml():\n global kmldata\n global CONFIG\n if type(kmldata) == type(None):\n if not os.path.exists(CONFIG[\"kmlfile\"]):\n fiona.drvsupport.supported_drivers['KML'] = 'rw'\n kmldata = geopandas.read_file(CONFIG[\"kmlrepo\"], driver=\"KML\")\n os.makedirs(CONFIG[\"cachedir\"],exist_ok=True)\n with open(CONFIG[\"kmlfile\"], \"wb\") as fh:\n pickle.dump(kmldata,fh)\n else:\n with open(CONFIG[\"kmlfile\"], \"rb\") as fh:\n kmldata = pickle.load(fh)\n return kmldata",
"def export_kml(self, kmz=False):\n orderby = self.orderby.get()\n currentregion = self.region.get()\n if kmz:\n outputfile = tkinter.filedialog.asksaveasfilename(\n defaultextension=\".kmz\",\n filetypes=((\"keyhole markup language\", \"*.kmz\"),\n (\"All Files\", \"*.*\")))\n else:\n outputfile = tkinter.filedialog.asksaveasfilename(\n defaultextension=\".kml\",\n filetypes=((\"keyhole markup language\", \"*.kml\"),\n (\"All Files\", \"*.*\")))\n if outputfile:\n self.tabs.window.aistracker.create_kml_map(\n outputfile, kmzoutput=kmz, orderby=orderby,\n region=currentregion)\n else:\n raise ExportAborted('Export cancelled by user.')",
"def kml(cls, user, logs, kml, kml_doc):\n # KML Compliant Datetime Formatter\n kml_datetime_format = \"%Y-%m-%dT%H:%M:%S.%fZ\"\n icon = 'http://maps.google.com/mapfiles/kml/shapes/airports.png'\n threshold = 1 # Degrees\n\n kml_folder = kml.newfolder(name=user.username)\n\n flights = TakeoffOrLandingEvent.flights(user)\n if len(flights) == 0:\n return\n\n logs = filter(lambda log: cls._is_bad_position(log, threshold), logs)\n for i, flight in enumerate(flights):\n label = 'Flight {}'.format(i + 1) # Flights are one-indexed\n kml_flight = kml_folder.newfolder(name=label)\n\n flight_logs = filter(lambda x: flight.within(x.timestamp), logs)\n if len(flight_logs) < 2:\n continue\n\n coords = []\n angles = []\n when = []\n for entry in flight_logs:\n pos = entry.uas_position.gps_position\n # Spatial Coordinates\n coord = (pos.longitude, pos.latitude,\n units.feet_to_meters(entry.uas_position.altitude_msl))\n coords.append(coord)\n\n # Time Elements\n time = entry.timestamp.strftime(kml_datetime_format)\n when.append(time)\n\n # Degrees heading, tilt, and roll\n angle = (entry.uas_heading, 0.0, 0.0)\n angles.append(angle)\n\n # Create a new track in the folder\n trk = kml_flight.newgxtrack(name='Flight Path')\n trk.altitudemode = AltitudeMode.absolute\n\n # Append flight data\n trk.newwhen(when)\n trk.newgxcoord(coords)\n trk.newgxangle(angles)\n\n # Set styling\n trk.extrude = 1 # Extend path to ground\n trk.style.linestyle.width = 2\n trk.style.linestyle.color = Color.blue\n trk.iconstyle.icon.href = icon\n\n for obstacle in MovingObstacle.objects.all():\n obstacle.kml(path=flight_logs, kml=kml_flight, kml_doc=kml_doc)",
"def kml_file_to_open511_element(filename):\n ds = DataSource(filename)\n base_element = get_base_open511_element(lang='fr')\n for layer in ds:\n for feature in layer:\n base_element.append(feature_to_open511_element(feature))\n return base_element",
"def convert(input_filename, output_filename):\n c_file = pkg_resources.resource_filename('ShapelyChipDesigns', 'convert.rb')\n os.system('klayout -z -rd input='+input_filename+' -rd output='+output_filename+' -r '+c_file)",
"def process(kml_file, kmz=False):\n\ttry:\n\t\tif kmz:\n\t\t\tzipped = zipfile.ZipFile(kml_file)\n\t\t\tkml = Kml(zipped.open('doc.kml'))\n\t\telse: \n\t\t\tkml = Kml(open(kml_file))\n\texcept Exception as e:\n\t\tprint('Failed for %s: %s' % (kml_file, e))\n\telse:\n\t\tprint('FILE NAME: %s' % kml_file)\n\t\tif not is_duplicate(kml.as_dict(), collection): \n\t\t\t# try to update database AND\n\t\t\t# extract files to right place; if one\n\t\t\t# fails, undo the other:\t\n\t\t\ttry:\n\t\t\t\tcollection.insert_one(kml.as_dict())\n\t\t\texcept Exception as e:\n\t\t\t\tprint('Failed to update database with %s: %s' % (kml, e))\n\t\t\telse:\n\t\t\t\ttry:\n\t\t\t\t\tdest = 'static/kml/%s' % kml.uid\n\t\t\t\t\tif kmz:\n\t\t\t\t\t\tzipped.extractall(dest)\n\t\t\t\t\telse:\n\t\t\t\t\t\tif not os.path.exists(os.path.dirname(dest)): os.makedirs(os.path.dirname(dest))\n\t\t\t\t\t\tshutil.copy(kml_file, '%s/doc.kml' % dest)\n\t\t\t\texcept Exception as e:\n\t\t\t\t\tprint('Failed to extract files: %s\\n\\tTrying to remove record from database...' % e)\n\t\t\t\t\ttry:\n\t\t\t\t\t\tcollection.remove(kml.as_json())\n\t\t\t\t\texcept Exception as e:\n\t\t\t\t\t\tprint('Failed to remove item from database -- db is no longer consistent w/ file system: %s' % e)\n\tfinally:\n\t\tif kmz:\n\t\t\tzipped.close()\n\t\telse:\n\t\t\tkml.close()",
"def kml_extract_RDD(xml_file):\n soup = BeautifulSoup(xml_file, \"lxml-xml\")\n return get_kml_content(soup)",
"def show_kml_list():\n out = []\n\n for filename in os.listdir(settings.KML_OUTPUT_DIR):\n path = os.path.join(settings.KML_OUTPUT_DIR, filename)\n if os.path.isdir(path):\n continue\n f = open(path)\n content = f.read(300)\n f.close()\n name = KML_NAME_RE.search(content)\n if not name:\n continue\n out.append((name.group(1), filename))\n\n return {'items': sorted(out, cmp=lambda a, b: dumb_czech_cmp(a, b)), 'MEDIA_URL': settings.MEDIA_URL}",
"def readKML(filename):\n\n kml_file = path.join(filename)\n\n #### se leen los elementos del KML\n with open(kml_file) as f:\n folder = parser.parse(f).getroot().Document.Folder\n\n #### se separan los elementos, nombres de los puntos y las coordenadas\n plnm=[]\n cordi=[]\n for pm in folder.Placemark:\n plnm1 = pm.name\n plcs1 = pm.Point.coordinates\n plnm.append(plnm1.text)\n cordi.append(plcs1.text)\n # print(cordi)\n # print(plnm) \n\n #### se genera el objeto pandas\n db=pd.DataFrame()\n db['point_name']=plnm\n db['cordinates']=cordi\n\n db['Longitude'], db['Latitude'], db['value'] = zip(*db['cordinates'].apply(lambda x: x.split(',', 2)))\n db[\"Longitude\"] = pd.to_numeric(db[\"Longitude\"])\n db[\"Latitude\"] = pd.to_numeric(db[\"Latitude\"])\n del db['cordinates']\n del db['value']\n\n db['Coordinates'] = list(zip(db.Longitude, db.Latitude))\n db['Coordinates'] = db['Coordinates'].apply(Point)\n\n # print(db)\n\n return db",
"def read(self, url: str):\n\n log.info(f\"Downloading KMZ file {basename(url)}\")\n kml = self.fetch(url)\n\n log.info(\"Parsing KML data\")\n self.iter_elems = iterparse(BytesIO(kml), events=(\"start\", \"end\"), resolve_entities=False)\n\n prod_items = {\n \"issuer\": \"Issuer\",\n \"product_id\": \"ProductID\",\n \"generating_process\": \"GeneratingProcess\",\n \"issue_time\": \"IssueTime\",\n }\n\n nsmap = None\n\n # Get Basic Metadata\n prod_definition = None\n prod_definition_tag = None\n for event, element in self.iter_elems:\n if event == \"start\":\n # get namespaces from root element\n if nsmap is None:\n nsmap = element.nsmap\n prod_definition_tag = f\"{{{nsmap['dwd']}}}ProductDefinition\"\n elif event == \"end\":\n if element.tag == prod_definition_tag:\n prod_definition = element\n # stop processing after head\n # leave forecast data for iteration\n break\n\n self.metadata = {k: prod_definition.find(f\"{{{nsmap['dwd']}}}{v}\").text for k, v in prod_items.items()}\n self.metadata[\"issue_time\"] = dt.datetime.fromisoformat(self.metadata[\"issue_time\"])\n\n # Get time steps.\n timesteps = prod_definition.findall(\n \"dwd:ForecastTimeSteps\",\n nsmap,\n )[0]\n self.timesteps = [dt.datetime.fromisoformat(i.text) for i in timesteps.getchildren()]\n\n # save namespace map for later iteration\n self.nsmap = nsmap",
"def load_asterix_category_format(k):\n global filenames\n try:\n __basePath__ = os.path.abspath(os.path.join(os.getcwd(), '../../../..'))\n\n # Look for file in current executing directory\n path_filename1 = filenames[k]\n\n # On default directory (absolute)\n path_filename2 = __basePath__ + \"/\" +filenames[k]\n\n # On default directory (relative)\n path_filename3 = os.path.dirname(os.path.realpath(__file__)) + \"/xml/\" + filenames[k]\n\n if os.path.isfile(path_filename1):\n # print \"Loading file '%s'\" % path_filename1\n return minidom.parse(path_filename1)\n\n if os.path.isfile(path_filename2):\n # print \"Loading file '%s'\" % path_filename2\n return minidom.parse(path_filename2)\n\n if os.path.isfile(path_filename3):\n # print \"Loading file '%s'\" % path_filename3\n return minidom.parse(path_filename3)\n\n return None\n\n except:\n traceback.print_exc()\n\n return None",
"def main():\n input_file_path = sys.argv[1]\n output_file_path = sys.argv[2]\n gps_df = create_df(input_file_path) # creates a data frame\n gps_df = clean_data(gps_df) # cleans the data\n print('Cleaning done')\n write_to_kml(gps_df, output_file_path) # writes to kml file",
"def funcion_escribe_kml():\n\n DB = \"geoinfo\" # default database name\n LOGIN = \"gast\" # default login\n PASSWORD = \"gast\" # default password\n\n cnx = MySQLdb.connect(db=DB, user=LOGIN, passwd=PASSWORD)\n cursor = cnx.cursor()\n\n cursor.execute(\"SELECT * from wlan order by essid\")\n results = cursor.fetchall()\n\n print \"Total APs: %s\" % len(results) # print total AP count\n\n f = open(sys.argv[1], 'w')\n f.write('<?xml version=\"1.0\" encoding=\"UTF-8\"?>\\n')\n f.write('<kml xmlns=\"http://earth.google.com/kml/2.2\">\\n')\n f.write(' <Folder>\\n')\n f.write(' <name>GpsDrive+Kismet wifis</name>\\n')\n # By default folder is showed\n f.write(' <visibility>1</visibility>\\n')\n # GpsDrive icon\n f.write(' <ScreenOverlay>\\n')\n f.write(' <name>Info</name>\\n')\n f.write(' <description>Wifi data</description>\\n')\n f.write(' <visibility>1</visibility>\\n')\n f.write(' <Icon>\\n')\n f.write(' <href>https://raw.github.com/rodrigorega/GpsDriveToGoogleEarth/master/img/gpsdrivelogo.png</href>\\n')\n f.write(' </Icon>\\n')\n f.write(' <overlayXY x=\"0\" y=\"-1\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' <screenXY x=\"0\" y=\"0\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' <rotationXY x=\"0\" y=\"0\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' <size x=\"0\" y=\"0\" xunits=\"fraction\" yunits=\"fraction\"/>\\n')\n f.write(' </ScreenOverlay>')\n\n # write all APs to .kml file\n for line in results:\n name = line[6].replace('&', 'and') # To avoid Google Earth errors\n wep = line[8]\n lat = line[1]\n lon = line[2]\n mac = line[5]\n\n f.write('\\n')\n f.write(' <Placemark>\\n')\n f.write(' <name>%s</name>\\n' % name)\n f.write(' <description>')\n f.write(' <![CDATA[ <table width=\"300\"><tr><td>')\n f.write(' - EESID: %s\\n <br />' % name)\n f.write(' - BBSID: %s\\n <br />' % mac)\n tipo_ap = funcion_tipo_ap(wep)\n f.write(' - Security: %s\\n <br />' % tipo_ap)\n f.write(' - GPS coords.: %s, %s\\n <br />' % (lon, lat))\n f.write(' </td></tr></table> ]]>')\n f.write(' </description>\\n')\n f.write(' <visibility>1</visibility>\\n')\n\n tipo_ap = funcion_tipo_ap(wep) # get AP type\n\n # Draw AP icon\n f.write('<Style>')\n f.write('<IconStyle>')\n f.write(' <Icon><href>https://raw.github.com/rodrigorega/GpsDriveToGoogleEarth/master/img/%s.png</href></Icon>\\n' % tipo_ap)\n f.write('</IconStyle>')\n f.write('</Style>')\n f.write(' <Point><coordinates>%s,%s,45</coordinates></Point>\\n' % (lon, lat))\n f.write(' </Placemark>\\n')\n\n f.write(' </Folder>\\n')\n f.write('</kml>')",
"def line2kml(xy,fname='line.kml',name='line',color='00FFFF',width=3,\n verbose=True):\n \n if type(xy[0]) is tuple:\n x1,x2 = xy[0]\n y1,y2 = xy[1]\n else:\n x1,x2,y1,y2 = xy[0:]\n\n if verbose:\n print(\"Line: %10.6f %10.6f %10.6f %10.6f\" % (x1,x2,y1,y2))\n\n elev = 0.\n kml_text = kml_header(fname)\n\n mapping = {}\n mapping['x1'] = x1\n mapping['x2'] = x2\n mapping['y1'] = y1\n mapping['y2'] = y2\n mapping['elev'] = elev\n mapping['name'] = name\n mapping['desc'] = \" x1 = %s, x2 = %s\\n\" % (f2s(x1),f2s(x2)) \\\n + \" y1 = %s, y2 = %s\" % (f2s(y1),f2s(y2))\n mapping['color'] = color\n mapping['width'] = width\n\n region_text = kml_line(mapping)\n\n kml_text = kml_text + region_text + kml_footer()\n kml_file = open(fname,'w')\n kml_file.write(kml_text)\n kml_file.close()\n if verbose:\n print(\"Created \",fname)",
"def dtopo2kml(dtopo_file_name, dtopo_type, color='8888FF'):\n\n import os\n from clawpack.geoclaw import dtopotools\n dtopo = dtopotools.DTopography()\n dtopo.read(dtopo_file_name, dtopo_type)\n x1 = dtopo.x.min()\n x2 = dtopo.x.max()\n y1 = dtopo.y.min()\n y2 = dtopo.y.max()\n xy = (x1,x2,y1,y2)\n name = os.path.splitext(os.path.split(dtopo_file_name)[-1])[0]\n file_name = '%s.kml' % name\n box2kml(xy, file_name, name, color)",
"def kmlWriter(output_data, output_dir, output_name):\n msg = 'Writing ' + output_name + ' KML output.'\n print '[+]', msg\n logging.info(msg)\n # Instantiate a Kml object and pass along the output filename\n kml = simplekml.Kml(name=output_name)\n for exif in output_data:\n if 'Latitude' in exif.keys() and 'Latitude Reference' in exif.keys() and 'Longitude Reference' in exif.keys() and 'Longitude' in exif.keys():\n\n if 'Original Date' in exif.keys():\n dt = exif['Original Date']\n else:\n dt = 'N/A'\n\n if exif['Latitude Reference'] == 'S':\n latitude = '-' + exif['Latitude']\n else:\n latitude = exif['Latitude']\n\n if exif['Longitude Reference'] == 'W':\n longitude = '-' + exif['Longitude']\n else:\n longitude = exif['Longitude']\n\n kml.newpoint(name=exif['Name'], description='Originally Created: ' + dt,\n coords=[(longitude, latitude)])\n else:\n pass\n kml.save(os.path.join(output_dir, output_name))",
"def export_kmz(self):\n self.export_kml(kmz=True)",
"def write_kml_object(kml_object: fastkml.kml.KML, filename: str) -> None:\n\t\n\twith open(filename, \"w+\") as file:\n\t\tfile.write(kml_object.to_string())",
"def test_convert_csv_to_kml(self):\n import tempfile\n from pykml.util import convert_csv_to_kml\n\n # create a CSV file for testing\n csvfile = tempfile.TemporaryFile(mode='w+')\n csvfile.write('name,snippet,lat,lon\\n')\n csvfile.write('first,The first one,45.0,-90.0\\n')\n csvfile.write('second,The second one,46.0,-89.0\\n')\n csvfile.write('third,\"The third one (with quotes)\",45.0,-88.0\\n')\n csvfile.seek(0)\n\n kmlobj = convert_csv_to_kml(csvfile)\n csvfile.close()\n\n target = etree.fromstring(\n '<kml '\n 'xmlns:atom=\"http://www.w3.org/2005/Atom\" '\n 'xmlns:gx=\"http://www.google.com/kml/ext/2.2\" '\n 'xmlns=\"http://www.opengis.net/kml/2.2\">'\n '<Document>'\n '<Folder>'\n '<name>KmlFile</name>'\n '<Placemark>'\n '<name>first</name>'\n '<Snippet maxLines=\"2\">The first one</Snippet>'\n '<description>'\n '<![CDATA['\n '<table border=\"1\"'\n '<tr><th>name</th><td>first</td></tr>'\n '<tr><th>snippet</th><td>The first one</td></tr>'\n '<tr><th>lat</th><td>45.0</td></tr>'\n '<tr><th>lon</th><td>-90.0</td></tr>'\n '</table>'\n ']]>'\n '</description>'\n '<Point>'\n '<coordinates>-90.0,45.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '<Placemark>'\n '<name>second</name>'\n '<Snippet maxLines=\"2\">The second one</Snippet>'\n '<description><![CDATA[<table border=\"1\"<tr><th>name</th><td>second</td></tr><tr><th>snippet</th><td>The second one</td></tr><tr><th>lat</th><td>46.0</td></tr><tr><th>lon</th><td>-89.0</td></tr></table>]]></description>'\n '<Point>'\n '<coordinates>-89.0,46.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '<Placemark>'\n '<name>third</name>'\n '<Snippet maxLines=\"2\">The third one (with quotes)</Snippet>'\n '<description><![CDATA[<table border=\"1\"<tr><th>name</th><td>third</td></tr><tr><th>snippet</th><td>The third one (with quotes)</td></tr><tr><th>lat</th><td>45.0</td></tr><tr><th>lon</th><td>-88.0</td></tr></table>]]></description>'\n '<Point>'\n '<coordinates>-88.0,45.0</coordinates>'\n '</Point>'\n '</Placemark>'\n '</Folder>'\n '</Document>'\n '</kml>'\n )\n self.assertTrue(compare_xml(target, kmlobj))",
"def saveKML(kmlFile):\n\n tilePath = os.path.basename('map-NYC_heatmap.png')\n north = topLeftLat\n south = bottomRightLat\n east = topLeftLon\n west = bottomRightLon\n \n bytes = KML % (tilePath, north, south, east, west)\n file(kmlFile, \"w\").write(bytes)"
] | [
"0.7400537",
"0.66784334",
"0.64540035",
"0.62201595",
"0.6089382",
"0.60512364",
"0.5901473",
"0.5742026",
"0.5689828",
"0.566298",
"0.5614185",
"0.5548671",
"0.55181766",
"0.54977727",
"0.54335207",
"0.54022497",
"0.53411245",
"0.5290524",
"0.51947296",
"0.5126708",
"0.5105023",
"0.51038444",
"0.5083665",
"0.5041383",
"0.50178385",
"0.49815032",
"0.4978497",
"0.49693292",
"0.49666274",
"0.49582243"
] | 0.68759656 | 1 |
convert to open airspace format | def make_open_airspace_format(self):
# Extract coordinates from KML
for idxline in range(len(self.kml_lines)):
if '<name>' in self.kml_lines[idxline]:
self.name = self.kml_lines[idxline].replace('\t', '').replace('<name>', '').replace('</name>', '').replace('\n','')
if not self.name.startswith('TS'):
self.name = 'TS_' + self.name
print('Type: %s | Name: %s' % (self.as_type, self.name))
if '<coordinates>' in self.kml_lines[idxline]:
self.coordinates_kml = self.kml_lines[idxline + 1].replace('\t', '').replace('\n', '')
break
# start conversion to airspace format
""" AC A
AN TS_Erzgeb
AL FL98
AH FL99
DP 50:26:22 N 012:17:59 E
DP 50:25:25 N 012:18:26 E
DP 50:24:40 N 012:19:01 E
DP 50:24:06 N 012:19:46 E"""
# AC A
self.txt_lines.append('AC %s\n' % self.as_type)
# AN TS_Erzgeb
self.txt_lines.append('AN %s\n' % self.name)
# heights
self.txt_lines.append('AL FL98\n')
self.txt_lines.append('AH FL99\n')
# coordinates
for coo_pt in self.coordinates_kml.split(' ')[:-1]:
# Target format: DP 50:26:22 N 012:17:59 E
lat_long = coo_pt.split(',')
# latitude
latDecAsStr = lat_long[1].split('.')
#if '.' not in latDecAsStr: # take care of case "51" instead of "51.123456"
# latDecAsStr += '.000000'
lat_degree = abs(int(latDecAsStr[0]))
#print(f'latDecAsStr {latDecAsStr}')
if len(latDecAsStr)==1:
latDecAsStr.append('0')
lat_secondDec = (float('0.' + latDecAsStr[1])*60) % 1
lat_minute = round((float('0.' + latDecAsStr[1])*60) - lat_secondDec)
lat_second = round(lat_secondDec*60)
cooString = ('DP %02d:%02d:%02d' %(lat_degree,lat_minute,lat_second))
if latDecAsStr[0].startswith('-'):
cooString += ' S'
else:
cooString += ' N'
# longitude
#print(f'converting lat_long {lat_long}')
# take care of case: no decimal sign included, case "11" instead of "11.123456"
if '.' not in lat_long[0]:
lat_long[0] += '.0'
lonDecAsStr = lat_long[0].split('.')
lon_degree = abs(int(lonDecAsStr[0]))
lon_secondDec = (float('0.' + lonDecAsStr[1]) * 60) % 1
lon_minute = round((float('0.' + lonDecAsStr[1]) * 60) - lon_secondDec)
lon_second = round(lon_secondDec * 60)
cooString += (' %03d:%02d:%02d' % (lon_degree, lon_minute, lon_second))
if lonDecAsStr[0].startswith('-'):
cooString += ' W'
else:
cooString += ' E'
cooString += '\n'
self.txt_lines.append(cooString) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def make_json_airspace_format(self):\n # The previous fct make_open_airspace_format already stored, coordinates_kml, name and type\n # This data is collected in an dictionary, which then is stored as json.\n # initialize dict\n coordinates_as_list_of_floats = []\n # run through coordinates\n coordinates_as_list_of_floats = []\n for coo_pt in self.coordinates_kml.split(' ')[:-1]:\n lat_long = coo_pt.split(',')\n coordinates_as_list_of_floats.append([float(lat_long[1]), float(lat_long[0])])\n # make json dict\n # rename name if not thermal space\n if self.name.startswith('TS_') and not (self.as_type == 'A' or self.as_type == 'B'):\n name_for_json = self.name[3:]\n else:\n name_for_json = self.name\n # rename airspace type for json:\n if self.as_type == 'A':\n self.as_type = 'Good_thermals'\n if self.as_type == 'B':\n self.as_type = 'Bad_thermals'\n self.json_dict = {\"AL\": \"FL98\", \"AH\": \"FL99\", \"AC\": self.as_type, \"AN\": name_for_json, \"data\": coordinates_as_list_of_floats}",
"def open_airspace_format_2_kml(self, source_file_txt):\n # load template for kml file\n self.load_kml_template(self.full_path_kml_template)\n # load airspace source\n self.load_airspace_open_air_format(source_file_txt)\n\n self.kml_lines = self.kml_template['header']\n self.kml_lines.extend(self.kml_template['good_subdivided']['head'])\n # collect all A and B kml lines\n kml_A = []\n kml_B = []\n # transform airspaces and attach to A and B collect-lists\n for airspace in self.airspaces:\n airspace.make_kml_format(self.kml_template)\n if airspace.as_type == 'A':\n kml_A.extend(airspace.kml_lines)\n if airspace.as_type == 'B':\n kml_B.extend(airspace.kml_lines)\n\n self.kml_lines.extend(kml_A)\n self.kml_lines.extend(self.kml_template['good_subdivided']['tail'])\n # start B part\n self.kml_lines.extend(self.kml_template['bad_subdivided']['head'])\n self.kml_lines.extend(kml_B)\n self.kml_lines.extend(self.kml_template['bad_subdivided']['tail'])\n\n full_path_kml = source_file_txt[:-4] + '_converted.kml'\n # uisave dialog\n full_path_kml = filesavebox(default=full_path_kml, filetypes=\"*.kml\")\n if full_path_kml is None:\n print('Airspace conversion was aborted by the user')\n quit()\n\n # write to file\n f = open(full_path_kml, 'w')\n f.writelines(self.kml_lines)\n f.close()\n print('Resulting KML files was saved to: %s' % full_path_kml)",
"def kml_2_open_airspace_and_json_format(self, full_path):\n # read file\n f = open(full_path,'r')\n kml = f.readlines()\n f.close()\n # find airspaces\n \"\"\"Placemark >\n < name > Bremen - Blumenthal\n Thermikplatte < / name >\n < styleUrl > # inline10</styleUrl>\n < Polygon >\n < tessellate > 1 < / tessellate >\n < outerBoundaryIs >\n < LinearRing >\n < coordinates >\n 8.529121049900063, 53.19549566929423, 0\n 8.52324583919868, 53.21131939607898, 0\n 8.545439298799483, 53.23055800702935, 0\n 8.588991466114615, 53.23047069814625, 0\n 8.575289966189502, 53.20745451706468, 0\n 8.560633120477348, 53.19724609335408, 0\n 8.529121049900063, 53.19549566929423, 0\n < / coordinates >\n \n < / LinearRing >\n < / outerBoundaryIs >\n < / Polygon >\n < / Placemark >\"\"\"\n container = []\n idxLine = 0\n did_not_pass_main_folder = True\n list_of_airspace_types_included = []\n while idxLine < len(kml):\n #print(kml[idxLine])\n #if '<Folder>' in kml[idxLine] and did_not_pass_main_folder:\n # # we have to jump over the first folder\n # print(f'Reading everything inside folder: {kml[idxLine]}')\n # did_not_pass_main_folder = False\n if '<Folder>' in kml[idxLine]: # begin of airspace\n as_type = kml[idxLine+1].replace('\\t','').replace('<name>','').replace('</name>\\n','') # <name>B</name>\n print('Reading AS-types: ' + as_type)\n list_of_airspace_types_included.append(as_type)\n #if not (as_type == 'A' or as_type == 'B'):\n # print('#### Check Folder / Airspace Types, must be \"A\" or \"B\" and try again (current %s)' % as_type)\n # msgbox('Check Folder / Airspace Types, are not \"A\" or \"B\" (current %s). Airspace E will be used for export.' % as_type)\n # as_type = 'E'\n\n if '<Placemark' in kml[idxLine]: # begin of airspace\n container = []\n if '</Placemark' in kml[idxLine]: # end of airspace\n # make sure only Polygons are stored\n for as_line in container:\n if '<Polygon>' in as_line:\n idx_lookAt_start = None\n for idx, line_of_container in enumerate(container):\n if \"<LookAt>\" in line_of_container:\n idx_lookAt_start = idx\n if \"</LookAt>\" in line_of_container:\n idx_lookAt_end = idx\n # Remove lookAt lines if necessary\n if idx_lookAt_start:\n container = container[0:idx_lookAt_start] + container[idx_lookAt_end+1::] # cut out look at part\n # append airspace to airspace list as airspace class\n self.airspaces.append(Airspace(lines=container, file_type='kml', as_type=as_type))\n container.append(kml[idxLine])\n idxLine += 1\n print('Loaded %d airspaces from KML-file (%s)' %(len(self.airspaces),full_path))\n # summary\n outlines = ['* KML conversion file, rename this line']\n json_dict = {\"circles\": [], \"polygons\": []}\n for airspace in self.airspaces:\n # prepare open-airspace formate\n outlines.append('\\n\\n') # separate airspaces\n outlines.extend(airspace.txt_lines)\n # prepare json\n json_dict['polygons'].append(airspace.json_dict)\n\n # write open airspace format\n target_path = full_path[:-4] + '_converted.txt'\n # uisave dialog\n\n target_path = filesavebox(default=target_path, filetypes=\"*.txt\")\n if target_path is None:\n print('Airspace conversion was aborted by the user')\n quit()\n\n f = open(target_path,'w')\n f.writelines(outlines)\n f.close()\n print('Result was written to: %s' % target_path)\n\n # write json:\n target_path_json = target_path[:-4] + '.json'\n\n json_string = json.dumps(json_dict)\n json_file = open(target_path_json, \"w\")\n json_file.write(json_string)\n json_file.close()\n\n # write list of airspace files for index.html for leaflet map\n print('The following airspace types have been converted:')\n print(list_of_airspace_types_included)",
"def airports(osm_path): \n return (retrieve(osm_path,'multipolygons',['aeroway'],**{'aeroway':[\"='aerodrome'\"]})).rename(columns={'aeroway': 'asset'})",
"def marshall(self):\n try:\n data = [\"x02\"] #start token\n data.extend(ac.getCarState(0, acsys.CS.CurrentTyresCoreTemp)) #0-3 - Core tyre temperatures, Degrees celcius\n data.extend(info.physics.tyreWear) #4-7 #tyre wear\n data.extend(ac.getCarState(0, acsys.CS.DynamicPressure)) #8-11 pressure of each tyre in PSI\n data.extend(ac.getCarState(0, acsys.CS.TyreDirtyLevel)) #12-15 amount of dirt on each tyre\n data.append(ac.getCarState(0, acsys.CS.SpeedMS)) #16 speed in metres/sec\n data.append(ac.getCarState(0, acsys.CS.Gear)) #17 gear number\n data.append(ac.getCarState(0, acsys.CS.BestLap)) #18 best lap time in ms\n data.append(ac.getCarState(0, acsys.CS.RPM)) #19 rpm\n data.append(ac.getCarState(0, acsys.CS.LapCount)) #20 lap count\n data.append(ac.getCarState(0, acsys.CS.LapInvalidated)) #21 is lap invalid? 0-no, 1-yes\n data.append(ac.getCarState(0, acsys.CS.LapTime)) #22 current lap time in ms\n data.append(ac.getCarState(0, acsys.CS.LastLap)) #23 last lap in ms\n data.append(ac.getCarState(0, acsys.CS.PerformanceMeter)) #24 delta time in ms from best lap?? (haven't checked)\n data.append(ac.getCarState(0, acsys.CS.Steer)) #25 steering rotation in radians\n data.append(ac.getCarName(0)) #26 name of car being driven by player\n data.append(ac.getTrackName(0)) #27 track name\n\n data.append(\"x04\") #end token\n except Exception as e:\n ac.console(\"{}\".format(e))\n return \",\".join(str(v) for v in data).encode()",
"def make_kml_format(self,kml_template):\n if self.as_type == 'A':\n self.kml_lines = kml_template['good_subdivided']['placemark']\n elif self.as_type == 'B':\n self.kml_lines = kml_template['bad_subdivided']['placemark']\n else:\n print('Unknown airspace type')\n # get idx of name and coordinates\n idxLine = 0\n while idxLine < len(self.kml_lines):\n #print(self.kml_lines[idxLine]\n if self.kml_lines[idxLine].startswith('\\t\\t\\t\\t<name>'): # begin of airspace\n idx_name = idxLine\n if '\\t\\t\\t\\t\\t\\t\\t<coordinates>\\n' in self.kml_lines[idxLine]: # begin of airspace\n idx_coordinates = idxLine+1\n idxLine += 1\n # transform coordinates\n # add all coordinates: Format is:\n # source: 'DP 50:26:22 N 012:17:59 E\\n'\n # target: 9.025830271397426,53.46493577242719,0 8.986157446488383,53.46952117358134,0\n coo_list = [] # collect list of coorinates as strings\n for line in self.txt_lines:\n if line.startswith('AN'):\n self.name = line[3:].replace('\\n','')\n self.kml_lines[idx_name] = '\\t\\t\\t\\t<name>%s</name>\\n' % self.name\n\n if line.startswith('DP'):\n # lon\n lon_deg = float(line[14:17])\n lon_min = float(line[18:20])\n lon_sec = float(line[21:23])\n lon_dec = (lon_sec / 60 + lon_min) / 60 + lon_deg\n if line[24] == 'W':\n lon_dec *= -1 # negative if west\n # lat\n lat_deg = float(line[3:5])\n lat_min = float(line[6:8])\n lat_sec = float(line[9:11])\n lat_dec = (lat_sec / 60 + lat_min) / 60 + lat_deg\n if line[12] == 'S':\n lat_dec *= -1 # negative if west\n # attach coordinates\n coo_list.append('%1.16f,%1.16f,0 ' % (lon_dec,lat_dec))\n # store for later plotting\n self.lat_dec.append(lat_dec)\n self.lon_dec.append(lon_dec)\n\n # make sure that shape is closed --> first an last point must be the same\n if coo_list[0] != coo_list[-1]:\n coo_list.append(coo_list[0])\n self.lat_dec.append(self.lat_dec[0])\n self.lon_dec.append(self.lon_dec[0])\n\n # write coordinate strings into kml\n self.kml_lines[idx_coordinates] = '\\t\\t\\t\\t\\t\\t\\t\\t' # is prefix. Coordinates to be added as string below\n for pt in coo_list:\n self.kml_lines[idx_coordinates] += pt\n print('Converted airspace %s' % self.name)",
"def igra2_ascii_to_dataframe(file=''):\n if debug:\n print(\"Running igra2_ascii_to_dataframe for: \", file) \n \n data = check_read_file(file=file, read=True)\n #source_file = [l for l in file.split('/') if '.txt' in l][0]\n read_data = [] # Lists containing the raw data from the ascii file, and the observation dates\n \"\"\" Data to be extracted and stored from the igra2 station files \n Some info is contained in the header of each ascent, some in the following data \"\"\"\n\n \"\"\" Initialize the variables that can be read from the igra2 files \"\"\"\n ident,year,month,day,hour,reltime,p_src,np_src,lat, lon = np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan \n lvltyp1,lvltyp2,etime,press,pflag,gph,zflag,temp,tflag,rh,dpdep,wdir,wspd = np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan # initialize to zeros\n stations_id = []\n idate = np.nan\n count = 0\n head_count = 0\n \n obs_id = 0\n \n def make_release_time(date_time, hour, release):\n \"\"\" build a sonde release time \n ex 2019 02 20 00 2349 \n ex 2019 01 10 00 0011 \n They round the release time to the closest hour. \n It can be the same day or the following !!!\n date_time = date_time pytohn object, \n date, time, release = original strings \n \"\"\"\n release_h = int(release[:2])\n release_m = int(release[2:4])\n \n if release_h == 99:\n return 0 #largest integer number int 64 \n \n else:\n if release_m == 99:\n release_m = 0\n release_date_time = date_time.replace(hour= release_h, minute= release_m) \n \n \"\"\" Here, I have to subtract one day to the release time stamp if the hour of the time stamp is in th evening,\n but the nominal time is reported at midnight hence in the following day. For example 2019 02 20 00 2349 from file VMM00048820 \"\"\"\n if hour == '00':\n if release_h > 20:\n release_date_time = release_date_time - timedelta(days=1)\n else:\n pass\n \n return release_date_time \n \n \n for i, line in enumerate(data):\n if line[0] == '#':\n head_count = head_count +1 \n # Info from the Header line of each ascent \n ident = line[1:12] # station identifier\n ident = ident[6:12]\n if ident not in stations_id:\n stations_id.append(ident)\n \n year = line[13:17] # year, months, day, hour of the observation\n month = line[18:20]\n day = line[21:23]\n hour = line[24:26] \n reltime = line[27:31] # release time of the sounding.\n numlev = int(line[32:36]) # number of levels in the sounding == number of data recorded in the ascent\n p_src = line[37:45] # data source code for the pressure levels \n np_src = line[46:54] # data source code for non-pressure levels\n lat = int(line[55:62]) / 10000. # latitude and longitude\n lon = int(line[63:71]) / 10000.\n #observation_id = i\n if int(hour) == 99:\n time = reltime + '00'\n else:\n time = hour + '0000'\n \n if '99' in time:\n time = time.replace('99', '00')\n\n idate = datetime.strptime(year + month + day + time, '%Y%m%d%H%M%S') # constructed according to CDM\n \n release_time = make_release_time(idate, hour, reltime) # making the release time \n \n \n iday = int(year + month + day)\n count = count + 1\n else:\n # Data of each ascent\n lvltyp1 = int(line[0]) # 1- 1 integer major level type indicator\n lvltyp2 = int(line[1]) # 2- 2 integer minor level type indicator\n etime = int(line[3:8]) # 4- 8 integer elapsed time since launch\n press = int(line[9:15]) # 10- 15 integer reported pressure\n \n if press == -9999:\n press = np.nan\n \n pflag = line[15] # 16- 16 character pressure processing flag\n \n gph = int(line[16:21]) # 17- 21 integer geopotential height [m]\n \n if gph == -9999 or gph == -8888: # reading the values andh check if they are missing or removed as -9999 or -8888 before dividing by 10 as the instructions say \n gph = np.nan # 23- 27 integer temperature, [Celsius to Kelvin ] \n \n zflag = line[21] # 22- 22 character gph processing flag, \n \n temp = int(line[22:27]) \n if temp != -9999 and temp != -8888: # reading the values andh check if they are missing or removed as -9999 or -8888 before dividing by 10 as the instructions say \n temp = temp / 10. + 273.15 # 23- 27 integer temperature, [Celsius to Kelvin ] \n else:\n temp = np.nan \n \n tflag = line[27] # 28- 28 character temperature processing flag\n \n rh = int(line[28:33]) # 30- 34 integer relative humidity [%] \n if rh != -8888 and rh != -9999:\n rh = rh / 1000. # converting from percentage to absolute ratio \n else:\n rh = np.nan\n \n dpdp = int(line[34:39]) \n if dpdp != -9999 and dpdp !=-8888: \n dpdp = dpdp / 10. # 36- 40 integer dew point depression (degrees to tenth e.g. 11=1.1 C) \n else:\n dpdp = np.nan \n \n wdir = int(line[40:45]) # 41- 45 integer wind direction (degrees from north, 90 = east)\n if wdir == -8888 or wdir == -9999 :\n wdir = np.nan \n \n wspd = int(line[46:51]) # 47- 51 integer wind speed (meters per second to tenths, e.g. 11 = 1.1 m/s [m/s]\n if wspd != -8888 and wspd != -9999 :\n wspd = wspd / 10. \n else:\n wspd = np.nan \n if reltime == 9999.0:\n reltime = np.nan \n \n z_type = np.nan\n if not (np.isnan(press)):\n z_type = 1\n elif (np.isnan(press) and not np.isnan(gph) ) :\n z_type = 2 \n \n for value,var in zip([gph, temp, wspd, wdir, rh, dpdp], ['gph', 'temperature', 'wind_speed', 'wind_direction', 'relative_humidity' , 'dew_point_depression'] ):\n obs_id = obs_id +1 \n if not np.isnan(press): # when pressure is available, z_coord== pressure and z_type==1 \n z_type = 1 \n read_data.append ( ( 'IGRA2'.rjust(10), head_count, int(obs_id), idate, iday, ident, lat, lon, press, value, cdmvar_dic[var]['cdm_var'], int(cdmvar_dic[var]['cdm_unit']), numlev, z_type, release_time ) )\n elif (np.isnan(press) and not np.isnan(gph) ) : # when pressure is not available, z_coord== gph and z_type==2 \n z_type = 2 \n read_data.append ( ( 'IGRA2'.rjust(10), head_count, int(obs_id), idate, iday, ident, lat, lon, gph, value, cdmvar_dic[var]['cdm_var'], int(cdmvar_dic[var]['cdm_unit']), numlev, z_type, release_time ) )\n else:\n z_type = -2147483648 \n read_data.append ( ( 'IGRA2'.rjust(10), head_count, int(obs_id), idate, iday, ident, lat, lon, press, value, cdmvar_dic[var]['cdm_var'], int(cdmvar_dic[var]['cdm_unit']), numlev, z_type, release_time ) )\n\n\n df = pd.DataFrame(data= read_data, columns= column_names_igra2)\n \n df['observation_id'] = np.chararray.zfill( (df['observation_id'].astype(int)) .astype('S'+str(id_string_length ) ), id_string_length ) #converting to fixed length bite objects \n df['report_id'] = np.chararray.zfill( (df['report_id'].astype(int)).astype ('S'+str(id_string_length ) ), id_string_length )\n \n df = df.replace([-999.9, -9999, -999, -999.0, -99999.0, -99999.9, 99999.0, -99999.00 ], np.nan)\n \n df = df.sort_values(by = ['record_timestamp', 'vertco_reference_1@body' ] ) # FF check here !!!! \n \n return df, stations_id",
"def alom():\n #\n # this is the alpha\n inlist = list(\"begin\") # change data into a list element\n outlist[0:5] = inlist # place data in the list in the correct place\n # print(\"\".join(outlist)) # see result\n #\n # this is the omega\n inlist = list(\"end\")\n #\n # change data into a list element\n outlist[1247:1250] = inlist # place data in the list in the correct place\n outstr = \"\".join(outlist)\n print(outstr)\n print(len(outstr))\n # of = open(\"workfile\", \"w\")\n # of.write(outstr)",
"def isochrone_to_aa(*args, **kwargs):\n return isochrone_xv_to_aa(*args, **kwargs)",
"def __init__(self, full_path_of_source=''):\n if len(full_path_of_source) == 0:\n full_path_of_source = fileopenbox(default=os.path.curdir, filetypes=[\"*.txt\", \"*.kml\"])\n if full_path_of_source is None:\n print('Airspace conversion was aborted by the user')\n quit()\n # set template (this should not be changed)\n self.full_path_kml_template = r'Thermal_Map_Template5.kml' # set template file here: Folder must be named \"good\" and \"bad\"\n\n self.airspaces = [] # airspace container\n self.kml_template = {'header': [], 'good': [], 'bad': [], # will be filled after loading template\n 'good_subdivided': {'head':[], 'placemark': [], 'tail': []},\n 'bad_subdivided': {'head':[], 'placemark': [], 'tail': []}}\n self.txt_lines = [] # airspace file in open airspace format\n self.kml_lines = [] # airspace file in kml format\n \"\"\" handle conversion from and to KML / airspace format\"\"\"\n if full_path_of_source.lower().endswith('.kml'):\n self.kml_2_open_airspace_and_json_format(full_path_of_source)\n if full_path_of_source.lower().endswith('.txt'):\n self.open_airspace_format_2_kml(full_path_of_source)\n self.plot_all() # works for now only for TXT input",
"def __init__(self):\n self.__deviceselected__ = \"SR-DMS4AP{LOCALBUMP}DEV:Sel-SP\"\n self.__source__ = \"SR-DMS4AP{LOCALBUMP}S-SP\"\n self.__plane__ = \"SR-DMS4AP{LOCALBUMP}PLANE-SP\"\n #self.__xshift__ = \"SR-DMS4AP{LOCALBUMP}SHIFT:X-SP\"\n #self.__yshift__ = \"SR-DMS4AP{LOCALBUMP}SHIFT:Y-SP\"\n #self.__xangle__ = \"SR-DMS4AP{LOCALBUMP}ANGLE:X-SP\"\n #self.__yangle__ = \"SR-DMS4AP{LOCALBUMP}ANGLE:Y-SP\"\n self.__shift__ = \"SR-DMS4AP{LOCALBUMP}SHIFT-SP\"\n self.__angle__ = \"SR-DMS4AP{LOCALBUMP}ANGLE-SP\"\n # with all offsets\n self.__anglerb__ = \"SR-DMS4AP{LOCALBUMP}ANGLE-I\"\n self.__positionrb__ = \"SR-DMS4AP{LOCALBUMP}POS-I\"\n # with BBA offset only\n self.__anglerb0__ = \"SR-DMS4AP{LOCALBUMP}ANGLE:BBA-I\"\n self.__positionrb0__ = \"SR-DMS4AP{LOCALBUMP}POS:BBA-I\"\n\n self.__bpmposition__ = \"SR-DMS4AP{LOCALBUMP:BPM}Pos-I\"\n self.__bpmorbitx__ = \"SR-DMS4AP{LOCALBUMP:BPM}ORB:X-I\"\n self.__bpmorbity__ = \"SR-DMS4AP{LOCALBUMP:BPM}ORB:Y-I\"\n self.__bpmorbitx0__ = \"SR-DMS4AP{LOCALBUMP:BPM}ORB:X0-I\"\n self.__bpmorbity0__ = \"SR-DMS4AP{LOCALBUMP:BPM}ORB:Y0-I\"\n\n self.__correctorposition__ = \"SR-DMS4AP{LOCALBUMP:COR}Pos-I\"\n self.__hcorrectorcurrent__ = \"SR-DMS4AP{LOCALBUMP:HCOR}PS-SP\"\n self.__hcorrectordiff__ = \"SR-DMS4AP{LOCALBUMP:HCOR}PS:Delta-SP\"\n self.__vcorrectorcurrent__ = \"SR-DMS4AP{LOCALBUMP:VCOR}PS-SP\"\n self.__vcorrectordiff__ = \"SR-DMS4AP{LOCALBUMP:VCOR}PS:Delta-SP\"\n\n self.__undo__ = \"SR-DMS4AP{LOCALBUMP}Enbl:Undo-Cmd\"\n self.__apply__ = \"SR-DMS4AP{LOCALBUMP}Enbl-Cmd\"\n self.__status__ = \"SR-DMS4AP{LOCALBUMP}TS-I\"\n self.__idposinfo__ = \"SR-DMS4AP{LOCALBUMP}S-I\"\n self.__srcposition__ = \"SR-DMS4AP{LOCALBUMP}SRC-SP\"",
"def export_db_macserial(db, path, year):\n\n with open(path, 'w') as fh:\n print('#ifndef GENSERIAL_MODELINFO_AUTOGEN_H', file=fh)\n print('#define GENSERIAL_MODELINFO_AUTOGEN_H\\n', file=fh)\n print('// DO NOT EDIT! This is an autogenerated file.\\n', file=fh)\n print('#include \"macserial.h\"\\n', file=fh)\n\n print('typedef enum {', file=fh)\n\n for info in db:\n print(' {}, // {}'.format(\n info['SystemProductName'].replace(',', '_'),\n info['Specifications']['CPU'][0]\n ), file=fh)\n\n print('} AppleModel;\\n', file=fh)\n print('#define APPLE_MODEL_MAX {}\\n'.format(len(db)), file=fh)\n\n print('static PLATFORMDATA ApplePlatformData[] = {', file=fh)\n for info in db:\n print(' {{ \"{}\", \"{}\" }},'.format(\n info['SystemProductName'],\n info['SystemSerialNumber']\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('#define APPLE_MODEL_CODE_MAX {}'.format(max(len(info['AppleModelCode']) for info in db)), file=fh)\n print('static const char *AppleModelCode[][APPLE_MODEL_CODE_MAX] = {', file=fh)\n\n for info in db:\n print(' /* {:14} */ {{\"{}\"}},'.format(\n info['SystemProductName'],\n '\", \"'.join(info['AppleModelCode'])\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('#define APPLE_BOARD_CODE_MAX {}'.format(max(len(info['AppleBoardCode']) for info in db)), file=fh)\n print('static const char *AppleBoardCode[][APPLE_BOARD_CODE_MAX] = {', file=fh)\n\n for info in db:\n print(' /* {:14} */ {{\"{}\"}},'.format(\n info['SystemProductName'],\n '\", \"'.join(info['AppleBoardCode'])\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('#define APPLE_MODEL_YEAR_MAX {}'.format(max(len(info['AppleModelYear']) for info in db)), file=fh)\n print('static uint32_t AppleModelYear[][APPLE_MODEL_YEAR_MAX] = {', file=fh)\n for info in db:\n print(' /* {:14} */ {{{}}},'.format(\n info['SystemProductName'],\n ', '.join(str(year) for year in info['AppleModelYear'])\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('static uint32_t ApplePreferredModelYear[] = {', file=fh)\n for info in db:\n print(' /* {:14} */ {},'.format(\n info['SystemProductName'],\n info.get('MacserialModelYear', 0)\n ), file=fh)\n\n print('};\\n', file=fh)\n\n print('#endif // GENSERIAL_MODELINFO_AUTOGEN_H', file=fh)",
"def xephemFormat(self):\n line = []\n #Field 1: names\n names = [self.getName()]\n identifiers = self.getIdentifiers()\n if identifiers[0] is not None:\n names.append(identifiers[0])\n for i in range(1,4):\n if identifiers[i] is not None:\n names.extend(identifiers[i])\n line.append(\"|\".join(names))\n\n #Field 2: type designation\n objType = self.getType()\n if objType in (\"Galaxy Pair\", \"Galaxy Triplet\", \"Group of galaxies\"):\n line.append(\"f|A\")\n elif objType == \"Globular Cluster\":\n line.append(\"f|C\")\n elif objType == \"Double star\":\n line.append(\"f|D\")\n elif objType in (\"HII Ionized region\", \"Nebula\"):\n line.append(\"f|F\")\n elif objType == \"Galaxy\":\n if self.getHubble().startswith(\"S\"):\n line.append(\"f|G\")\n else:\n line.append(\"f|H\")\n elif objType == \"Dark Nebula\":\n line.append(\"f|K\")\n elif objType in (\"Emission Nebula\", \"Reflection Nebula\"):\n line.append(\"f|N\")\n elif objType in (\"Association of stars\", \"Open Cluster\"):\n line.append(\"f|O\")\n elif objType == \"Planetary Nebula\":\n line.append(\"f|P\")\n elif objType == \"Supernova remnant\":\n line.append(\"f|R\")\n elif objType == \"Star\":\n line.append(\"f|S\")\n elif objType == \"Star cluster + Nebula\":\n line.append(\"f|U\")\n else:\n line.append(\"f\")\n\n #Field 3: Right Ascension\n line.append(self.getRA())\n\n #Field 4: Declination\n line.append(self.getDec())\n\n #Field 5: Magnitude\n #We use the first available magnitude in the sequence b,v,j,h,k\n for mag in self.getMagnitudes():\n if mag is not None:\n line.append(str(mag))\n break\n\n #Field 6: optional Epoch, we let it empty\n line.append(\"\")\n\n #Field 7: Dimensions\n dimensions = []\n #Xephem format wants axes espressed in arcsec, we have arcmin\n for value in (self.getDimensions()[0],self.getDimensions()[1]):\n if value is not None:\n dimensions.append(str(value*60))\n else:\n dimensions.append(\"\")\n if self.getDimensions()[2] is not None:\n dimensions.append(str(value))\n else:\n dimensions.append(\"\")\n line.append(\"|\".join(dimensions))\n\n return \",\".join(line)",
"def mac_ntoa(mac):\n return '%.2x:%.2x:%.2x:%.2x:%.2x:%.2x' % tuple(map(ord, list(mac)))",
"def american_date_to_iso(connection):\n _update_date_by_regexp(connection=connection,\n regexp=\"^[0-9]{2}/[0-9]{2}/[0-9]{4}$\",\n new_value=\"\"\"CONCAT_WS('-',\n SUBSTR(cav.attribute_value, 7, 4),\n SUBSTR(cav.attribute_value, 1, 2),\n SUBSTR(cav.attribute_value, 4, 2))\n \"\"\")",
"def to_ole_auto(self):\n try:\n dt_obj = duparser.parse(timestamp)\n self.out_ole_auto = \"{0:.12f}\".format((dt_obj - self.epoch_1899).total_seconds() / 86400)\n except Exception as e:\n if not args.log:\n pass\n else:\n logging.error(str(type(e)) + \",\" + str(e))\n self.out_ole_auto = False\n return self.out_ole_auto",
"def to_ole_auto(self):\n ts_type = self.ts_types['ole_auto']\n try:\n dt_obj = duparser.parse(self.timestamp)\n if hasattr(dt_obj.tzinfo, '_offset'):\n dt_tz = dt_obj.tzinfo._offset.total_seconds()\n dt_obj = duparser.parse(self.timestamp, ignoretz=True)\n else:\n dt_tz = 0\n self.out_ole_auto = \"{0:.12f}\".format(((dt_obj - self.epoch_1899).total_seconds() - int(dt_tz)) / 86400)\n ts_output = str(\"{}\\t\\t{}\".format(ts_type, self.out_ole_auto))\n except Exception:\n exc_type, exc_obj, exc_tb = sys.exc_info()\n print(str(exc_type) + \" - \" + str(exc_obj) + \" - line \" + str(exc_tb.tb_lineno))\n self.out_ole_auto = ts_output = False\n return self.out_ole_auto, ts_output",
"def operator_aircraft_info(self, apath):\r\n opfolder_path = apath.split(\"0 NEW\")[-1]\r\n opfolder = opfolder_path.replace(\"/\", \"\")\r\n opfolder = opfolder.replace(\"\\\\\", \"\")\r\n opfolder = opfolder.split(\" \")\r\n operator = opfolder[0].strip()\r\n aircraft = opfolder[1].strip()\r\n return operator, aircraft",
"def octa_cox_data_to_ss(data):\n t = pandas.Series((\n data['TIME_StartTime'] -\n data['TIME_StartTime'].values[0]) / 1.0e6, name='t, sec')\n xh = pandas.DataFrame(\n data[[\n 'LPOS_X', 'LPOS_Y', 'LPOS_Z',\n 'LPOS_VX', 'LPOS_VY', 'LPOS_VZ',\n 'ATT_Roll', 'ATT_Pitch', 'ATT_Yaw',\n 'ATT_RollRate', 'ATT_PitchRate', 'ATT_YawRate']].values,\n columns=[\n 'X', 'Y', 'Z', 'V_X', 'V_Y', 'V_Z',\n 'Phi', 'Theta', 'Psi',\n 'P', 'Q', 'R'], index=t)\n y = pandas.DataFrame(\n data[[\n 'GPS_Lat', 'GPS_Lon', 'GPS_Alt',\n 'SENS_BaroAlt',\n 'IMU1_AccX', 'IMU1_AccY', 'IMU1_AccZ',\n 'IMU1_GyroX', 'IMU1_GyroY', 'IMU1_GyroZ',\n 'IMU1_MagX', 'IMU1_MagY', 'IMU1_MagZ']].values,\n columns=[\n 'GPS_Lat', 'GPS_Lon', 'GPS_Alt',\n 'Baro_Alt',\n 'Acc_X', 'Acc_Y', 'Acc_Z',\n 'Gyro_X', 'Gyro_Y', 'Gyro_Z',\n 'Mag_X', 'Mag_Y', 'Mag_Z'], index=t)\n u_raw = pandas.DataFrame(\n ((data[[\n 'OUT0_Out0', 'OUT0_Out1', 'OUT0_Out2',\n 'OUT0_Out3', 'OUT0_Out4', 'OUT0_Out5', 'OUT0_Out6',\n 'OUT0_Out7']] - 1000.0) / 1000.0).values,\n columns=['1', '2', '3', '4', '5', '6', '7', '8'], index=t)\n c_mix_octo = np.array([\n [1, 1, 1, 1, 1, 1, 1, 1], # thrust\n [-1, 1, 1, -1, -1, 1, 1, -1], # roll\n [-1, -1, 1, 1, -1, -1, 1, 1], # pitch\n [1, -1, 1, -1, 1, -1, 1, -1], # yaw\n ]) / 8.0\n u = pandas.DataFrame(\n c_mix_octo.dot(u_raw.T).T,\n columns=['thrust', 'roll', 'pitch', 'yaw'],\n index=t)\n return t, xh, u, y, u_raw",
"def read_aircraft_nav_into_awot(\n AmprTB, project='OLYMPEX', platform='NASA ER-2', flight_number=None):\n\n if not hasattr(AmprTB, 'Aircraft_Nav'):\n print('No aircraft information in argument, failing ...')\n return\n\n flight = {}\n varlist = ['latitude', 'longitude', 'altitude', 'time']\n for var in varlist:\n flight[var] = {}\n flight['latitude']['data'] = AmprTB.Aircraft_Nav['GPS Latitude']\n flight['longitude']['data'] = AmprTB.Aircraft_Nav['GPS Longitude']\n flight['altitude']['data'] = AmprTB.Aircraft_Nav['GPS Altitude']\n\n ampr_datetime = []\n for et in AmprTB.Epoch_Time:\n ampr_datetime.append(dt.datetime(1970, 1, 1) +\n dt.timedelta(seconds=np.float(et)))\n flight['time']['data'] = ampr_datetime\n\n for var in varlist:\n flight[var]['data'] = np.ma.masked_array(\n flight[var]['data'], mask=False)\n flight['flight_number'] = flight_number\n flight['project'] = project\n flight['platform'] = platform\n flight['Uwind'] = None\n flight['Vwind'] = None\n return flight",
"def vac2air(w):\n return w / (1.0 + 2.735182E-4 + 131.4182 / w ** 2 + 2.76249E8 / w ** 4)",
"def to_ole_be(self):\n try:\n dt_obj = duparser.parse(timestamp)\n delta = (dt_obj - self.epoch_1899).total_seconds() / 86400\n conv = struct.unpack('<Q', struct.pack('<d', delta))[0]\n self.out_ole_be = str(hexlify(struct.pack('>Q', conv))).strip(\"b'\").strip(\"'\")\n except Exception as e:\n if not args.log:\n pass\n else:\n logging.error(str(type(e)) + \",\" + str(e))\n self.out_ole_be = False\n return self.out_ole_be",
"def to_amdl(self):\n from .adipls import ADIPLSStellarModel\n\n ioff = (0 if self.r[0] < 1e6 else 1) # mimic ADIPLS's FGONG to AMDL script\n A = np.zeros((len(self.data) + ioff, 6))\n\n # we can safely ignore division by 0 here\n with np.errstate(divide='ignore', invalid='ignore'):\n A[ioff:,0] = self.x\n A[ioff:,1] = self.q/self.x**3\n A[ioff:,2] = self.Vg\n A[ioff:,3] = self.Gamma_1\n A[ioff:,4] = self.AA\n A[ioff:,5] = self.U\n\n A[0,0] = 0.\n A[0,1] = 4.*np.pi/3.*self.rho[0]*self.R**3/self.M\n A[0,2] = 0.\n A[0,3] = self.Gamma_1[0]\n A[0,4] = 0.\n A[0,5] = 3.\n\n D = np.zeros(8)\n D[0] = self.M\n D[1] = self.R\n D[2] = self.P[0]\n D[3] = self.rho[0]\n D[4] = 4.*np.pi/3.*self.G*(self.rho[0]*self.R)**2/(self.P[0]*self.Gamma_1[0])\n D[5] = D[4]\n D[6] = -1.0\n D[7] = 0.0\n\n return ADIPLSStellarModel(D, A, G=self.G)",
"def data_airline():\n return load_airline()",
"def to_ir(self):",
"def format_data(self, raw_data):\n opz = raw_data.copy()\n opz['datetime'] = pd.to_datetime(opz['Datum-tijd'], format='%Y-%m-%dT%H:%M:%SZ')\n opz.drop(['Datum-tijd'],axis=1, inplace=True)\n opz['dag']=opz['datetime'].dt.day\n opz['tijd'] = opz['datetime'].dt.time\n #voeg open/dicht data toe en bepaal momenten waarop dit wisselt\n opz['Opzetstuk Noord (°)'] = opz['Opzetstuk Noord (°)'].str.replace(',', '.').astype(float)\n opz['Opzetstuk Zuid (°)'] = opz['Opzetstuk Zuid (°)'].str.replace(',', '.').astype(float)\n opz['Opzetstuk Noord (°)'].fillna(opz['Opzetstuk Zuid (°)'], inplace=True)\n opz['Opzetstuk Zuid (°)'].fillna(opz['Opzetstuk Noord (°)'], inplace=True)\n return opz",
"def indoor_air_quality(self):\n # name, command, signals, delay\n return self._i2c_read_words_from_cmd(command=[0x20, 0x08], reply_size=2, delay=0.05)",
"def to_abivars(self):",
"def to_ACEScg():\r\n selNodes = nuke.selectedNodes()\r\n for node in selNodes:\r\n if node.Class() == 'Read':\r\n inputDataType = {\r\n '8-bit fixed': 169, '16-bit fixed': 169,\r\n '16-bit half float': 163, '32-bit float': 163\r\n }\r\n bitDepth = node.metadata('input/bitsperchannel')\r\n node['colorspace'].setValue(inputDataType[bitDepth])\r\n fileParm = node['file'].value()\r\n fileName = str(fileParm.split('/')[-1])\r\n newName = str(fileName.split('.')[0] + '_ACEScg')\r\n fileName = fileName.replace(str(fileName.split('.')[0]), newName)\r\n filename, fileExt = os.path.splitext(fileName)\r\n newFileName = filename + '.exr'\r\n newPath = fileParm.replace(\r\n str(fileParm.split('/')[-1]), newFileName)\r\n\r\n # Create write node and save out as ACEScg\r\n wNode = nuke.nodes.Write()\r\n wNode.setInput(0, node)\r\n wNode['file'].setValue(newPath)\r\n wNode['file_type'].setValue(3)\r\n wNode['colorspace'].setValue(16)\r\n nuke.execute(wNode, start=1, end=1, incr=1)",
"def __make_geo(self):\n # gmsh freecad_part.iges -o out_iges.geo -0\n fname_list = self.__fname.split('.')\n geo_file = fname_list[0]+'.geo'\n runstr = \"%s %s -o %s -0\" % (environment.GMSH, self.__fname, geo_file)\n print(runstr)\n subprocess.call(runstr, shell=True)\n print('Wrote file: %s' % geo_file)"
] | [
"0.6572235",
"0.5615951",
"0.5423645",
"0.53658104",
"0.51459473",
"0.51208514",
"0.50073093",
"0.5003773",
"0.49911514",
"0.49624845",
"0.48734692",
"0.48479107",
"0.4842932",
"0.48100558",
"0.47674647",
"0.4759654",
"0.47496668",
"0.4749038",
"0.4746655",
"0.4734282",
"0.47204",
"0.47190318",
"0.4710448",
"0.4704642",
"0.46851444",
"0.46822503",
"0.4676338",
"0.46681082",
"0.4667365",
"0.46631727"
] | 0.76431525 | 0 |
uses template in order to make kml format | def make_kml_format(self,kml_template):
if self.as_type == 'A':
self.kml_lines = kml_template['good_subdivided']['placemark']
elif self.as_type == 'B':
self.kml_lines = kml_template['bad_subdivided']['placemark']
else:
print('Unknown airspace type')
# get idx of name and coordinates
idxLine = 0
while idxLine < len(self.kml_lines):
#print(self.kml_lines[idxLine]
if self.kml_lines[idxLine].startswith('\t\t\t\t<name>'): # begin of airspace
idx_name = idxLine
if '\t\t\t\t\t\t\t<coordinates>\n' in self.kml_lines[idxLine]: # begin of airspace
idx_coordinates = idxLine+1
idxLine += 1
# transform coordinates
# add all coordinates: Format is:
# source: 'DP 50:26:22 N 012:17:59 E\n'
# target: 9.025830271397426,53.46493577242719,0 8.986157446488383,53.46952117358134,0
coo_list = [] # collect list of coorinates as strings
for line in self.txt_lines:
if line.startswith('AN'):
self.name = line[3:].replace('\n','')
self.kml_lines[idx_name] = '\t\t\t\t<name>%s</name>\n' % self.name
if line.startswith('DP'):
# lon
lon_deg = float(line[14:17])
lon_min = float(line[18:20])
lon_sec = float(line[21:23])
lon_dec = (lon_sec / 60 + lon_min) / 60 + lon_deg
if line[24] == 'W':
lon_dec *= -1 # negative if west
# lat
lat_deg = float(line[3:5])
lat_min = float(line[6:8])
lat_sec = float(line[9:11])
lat_dec = (lat_sec / 60 + lat_min) / 60 + lat_deg
if line[12] == 'S':
lat_dec *= -1 # negative if west
# attach coordinates
coo_list.append('%1.16f,%1.16f,0 ' % (lon_dec,lat_dec))
# store for later plotting
self.lat_dec.append(lat_dec)
self.lon_dec.append(lon_dec)
# make sure that shape is closed --> first an last point must be the same
if coo_list[0] != coo_list[-1]:
coo_list.append(coo_list[0])
self.lat_dec.append(self.lat_dec[0])
self.lon_dec.append(self.lon_dec[0])
# write coordinate strings into kml
self.kml_lines[idx_coordinates] = '\t\t\t\t\t\t\t\t' # is prefix. Coordinates to be added as string below
for pt in coo_list:
self.kml_lines[idx_coordinates] += pt
print('Converted airspace %s' % self.name) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def generate_document_kml(self, title, content):\n return \"\"\"\\\n<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<kml xmlns=\"http://earth.google.com/kml/2.1\">\n <Document>\n <name>%s</name>\n <description></description>\n <Style>\n <ListStyle id=\"hideChildren\">\n <listItemType>checkHideChildren</listItemType>\n </ListStyle>\n </Style>\n%s\n </Document>\n</kml>\"\"\" % (title.replace('\\\\','/'), content)",
"def generate_kml(tx, ty, tz, tileext, tilesize, tileswne, options, children=None, **args):\n if not children:\n children = []\n\n args['tx'], args['ty'], args['tz'] = tx, ty, tz\n args['tileformat'] = tileext\n if 'tilesize' not in args:\n args['tilesize'] = tilesize\n\n if 'minlodpixels' not in args:\n args['minlodpixels'] = int(args['tilesize'] / 2)\n if 'maxlodpixels' not in args:\n args['maxlodpixels'] = int(args['tilesize'] * 8)\n if children == []:\n args['maxlodpixels'] = -1\n\n if tx is None:\n tilekml = False\n args['title'] = options.title\n else:\n tilekml = True\n args['title'] = \"%d/%d/%d.kml\" % (tz, tx, ty)\n args['south'], args['west'], args['north'], args['east'] = tileswne(tx, ty, tz)\n\n if tx == 0:\n args['drawOrder'] = 2 * tz + 1\n elif tx is not None:\n args['drawOrder'] = 2 * tz\n else:\n args['drawOrder'] = 0\n\n url = options.url\n if not url:\n if tilekml:\n url = \"../../\"\n else:\n url = \"\"\n\n s = \"\"\"<?xml version=\"1.0\" encoding=\"utf-8\"?>\n<kml xmlns=\"http://www.opengis.net/kml/2.2\">\n <Document>\n <name>%(title)s</name>\n <description></description>\n <Style>\n <ListStyle id=\"hideChildren\">\n <listItemType>checkHideChildren</listItemType>\n </ListStyle>\n </Style>\"\"\" % args\n if tilekml:\n s += \"\"\"\n <Region>\n <LatLonAltBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonAltBox>\n <Lod>\n <minLodPixels>%(minlodpixels)d</minLodPixels>\n <maxLodPixels>%(maxlodpixels)d</maxLodPixels>\n </Lod>\n </Region>\n <GroundOverlay>\n <drawOrder>%(drawOrder)d</drawOrder>\n <Icon>\n <href>%(ty)d.%(tileformat)s</href>\n </Icon>\n <LatLonBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonBox>\n </GroundOverlay>\n\"\"\" % args\n\n for cx, cy, cz in children:\n csouth, cwest, cnorth, ceast = tileswne(cx, cy, cz)\n s += \"\"\"\n <NetworkLink>\n <name>%d/%d/%d.%s</name>\n <Region>\n <LatLonAltBox>\n <north>%.14f</north>\n <south>%.14f</south>\n <east>%.14f</east>\n <west>%.14f</west>\n </LatLonAltBox>\n <Lod>\n <minLodPixels>%d</minLodPixels>\n <maxLodPixels>-1</maxLodPixels>\n </Lod>\n </Region>\n <Link>\n <href>%s%d/%d/%d.kml</href>\n <viewRefreshMode>onRegion</viewRefreshMode>\n <viewFormat/>\n </Link>\n </NetworkLink>\n \"\"\" % (cz, cx, cy, args['tileformat'], cnorth, csouth, ceast, cwest,\n args['minlodpixels'], url, cz, cx, cy)\n\n s += \"\"\" </Document>\n</kml>\n \"\"\"\n return s",
"def graphs_kelly():\n return render_template(\"graphs-Kelly.html\")",
"def generate_leaf_kml(self, d, content=\"\"):\n return (\"\"\"\\\n <Folder>\n <Region>\n <Lod>\n <minLodPixels>%(minlodpixels)d</minLodPixels>\n <maxLodPixels>%(maxlodpixels)d</maxLodPixels>\n </Lod>\n <LatLonAltBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonAltBox>\n </Region>\n <GroundOverlay>\n <drawOrder>%(draw_order)d</drawOrder>\n <Icon>\n <href>%(image_url)s</href>\n </Icon>\n <LatLonBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonBox>\n </GroundOverlay>\"\"\" % d\n + \"\"\"\\\n%s\n </Folder>\"\"\" % content)",
"def write_kml(self,varnames):\n if type(varnames) is str:\n varnames=(varnames,)\n content=[]\n for varname in varnames:\n content.append(self.image2kml(varname))\n kml=self.__class__.kmlstr % \\\n {'content':'\\n'.join(content),\\\n 'prog':self.__class__.progname}\n f=open(self.__class__.kmlname,'w')\n f.write(kml)\n f.close()",
"def __str__(self):\n buf = []\n for var, val in self._kml.items():\n if val is not None: # Exclude all variables that are None\n if var.endswith(\"_\"):\n buf.append(\"{0}\".format(val)) # Use the variable's __str__ as is\n else:\n if var in ['name', 'description', 'text'] and Kmlable._parse: # Parse value for HTML and convert\n val = Kmlable._chrconvert(val)\n elif (var == 'href' and os.path.exists(val) and Kmlable._kmz == True)\\\n or (var == 'targetHref' and os.path.exists(val) and Kmlable._kmz == True): # Check for images\n Kmlable._addimage(val)\n val = os.path.join('files', os.path.split(val)[1])\n buf.append(u(\"<{0}>{1}</{0}>\").format(var, val)) # Enclose the variable's __str__ with its name\n # Add namespaces\n if var.startswith(\"atom:\") and 'xmlns:atom=\"http://www.w3.org/2005/Atom\"' not in Kmlable._namespaces:\n Kmlable._namespaces.append('xmlns:atom=\"http://www.w3.org/2005/Atom\"')\n elif var.startswith(\"xal:\") and 'xmlns:xal=\"urn:oasis:names:tc:ciq:xsdschema:xAL:2.0\"' not in Kmlable._namespaces:\n Kmlable._namespaces.append('xmlns:xal=\"urn:oasis:names:tc:ciq:xsdschema:xAL:2.0\"')\n return \"\".join(buf)",
"def gen_wtml(base_dir, depth, **kwargs):\n kwargs.setdefault('FolderName', 'Toasty')\n kwargs.setdefault('BandPass', 'Visible')\n kwargs.setdefault('Name', 'Toasty map')\n kwargs.setdefault('Credits', 'Toasty')\n kwargs.setdefault('CreditsUrl', 'http://github.com/ChrisBeaumont/toasty')\n kwargs.setdefault('ThumbnailUrl', '')\n kwargs['url'] = base_dir\n kwargs['depth'] = depth\n\n template = ('<Folder Name=\"{FolderName}\">\\n'\n '<ImageSet Generic=\"False\" DataSetType=\"Sky\" '\n 'BandPass=\"{BandPass}\" Name=\"{Name}\" '\n 'Url=\"{url}/{{1}}/{{3}}/{{3}}_{{2}}.png\" BaseTileLevel=\"0\" '\n 'TileLevels=\"{depth}\" BaseDegreesPerTile=\"180\" '\n 'FileType=\".png\" BottomsUp=\"False\" Projection=\"Toast\" '\n 'QuadTreeMap=\"\" CenterX=\"0\" CenterY=\"0\" OffsetX=\"0\" '\n 'OffsetY=\"0\" Rotation=\"0\" Sparse=\"False\" '\n 'ElevationModel=\"False\">\\n'\n '<Credits> {Credits} </Credits>\\n'\n '<CreditsUrl>{CreditsUrl}</CreditsUrl>\\n'\n '<ThumbnailUrl>{ThumbnailUrl}</ThumbnailUrl>\\n'\n '<Description/>\\n</ImageSet>\\n</Folder>')\n return template.format(**kwargs)",
"def get_data_mrk():\n return render_template(\"l_markers.html\")",
"def generate_garmin_kml(self, d ):\n return (\"\"\"\n <GroundOverlay>\n <Icon>\n <href>%(image_url)s</href>\n <DrawOrder>%(draw_order)d</DrawOrder>\n </Icon>\n <LatLonBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonBox>\n </GroundOverlay>\"\"\" % d )",
"def test_starting_template(checker):\n contents = labeled.contents(label=\"template\")\n _ = tomllib.loads(contents)",
"def generate(self, namespace: Optional[str], template: str, func: Callable, call_args: Dict) -> str:",
"def get_kml_dict(self, tx, ty_tms, tz, image_format, draworder = 0):\n d = {}\n\n d[\"south\"], d[\"west\"], d[\"north\"], d[\"east\"] = self.tileswne(tx, ty_tms, tz)\n\n image_filename = get_tile_filename(tx, ty_tms, tz, format_extension[image_format],False)\n d[\"image_filename\"] = image_filename\n d[\"image_filename\"] = d[\"image_filename\"].replace(\"\\\\\",\"/\")\n\n if self.options.url is None:\n d[\"image_url\"] = \"../../%s\" % image_filename\n else:\n d[\"image_url\"] = \"%s%s\" % (self.options.url, image_filename)\n d[\"image_url\"] = d[\"image_url\"].replace(\"\\\\\",\"/\")\n\n url = self.options.url\n if url is None:\n # Top level KML is linked from `doc.kml' and it needs different path.\n if tz == self.tminz:\n url = \"\"\n else:\n url = \"../../\"\n\n if self.options.kmz:\n extension = \"kmz\"\n else:\n extension = \"kml\"\n\n d[\"link_url\"] = \"%s%s\" % (url, get_tile_filename(tx, ty_tms, tz, extension,False))\n d[\"link_url\"] = d[\"link_url\"].replace(\"\\\\\",\"/\")\n\n d[\"minlodpixels\"] = int(self.tilesize / 2)\n d[\"maxlodpixels\"] = -1 # int(self.tilesize * 8)\n\n if tx == 0:\n d[\"draw_order\"] = draworder + 2 * tz + 1\n else:\n d[\"draw_order\"] = draworder + 2 * tz\n\n return d",
"def machinelearn2():\n return render_template('frontml.html')",
"def create_key(template, outtype=('nii.gz',), annotation_classes=None):\n\n if template is None or not template:\n raise ValueError('Template must be a valid format string')\n return template, outtype, annotation_classes",
"def create_key(template, outtype=('nii.gz',), annotation_classes=None):\n\n if template is None or not template:\n raise ValueError('Template must be a valid format string')\n return template, outtype, annotation_classes",
"def GenerateXML(dictionary, fileName=\"labelling.xml\") : \n root = gfg.Element(\"annotation\") \n #the big section is called Annotation\n for key in dictionary:\n #for every polygon list in inside object witho subelement name and attributes and the type \"polygon\"\n objectElement = gfg.Element(\"object\") \n root.append(objectElement) \n subElement1 = gfg.SubElement(objectElement, \"name:\".strip(\":\"))\n subElement1.text = str(dictionary[key][\"name\"])\n subElement2 = gfg.SubElement(objectElement, \"attributes\".strip(\":\"))\n subElement2.text = str(dictionary[key][\"attributes\"])\n subElement3 = gfg.SubElement(objectElement, \"polygon\")\n \n for i in range(0, len(dictionary[key])-2):\n #for every vertex of the polygon list it's rounded x, y on xml\n SubInsidePolygon = gfg.SubElement(subElement3, \"pt\")\n sub_x = gfg.SubElement(SubInsidePolygon, \"x\")\n sub_y = gfg.SubElement(SubInsidePolygon, \"y\")\n sub_x.text = str(int(round(dictionary[key][\"x_y_\" + str(i)][0])))\n sub_y.text = str(int(round(dictionary[key][\"x_y_\" + str(i)][1])))\n tree = gfg.ElementTree(root) \n #create the xml tree\n with open (fileName, \"wb\") as files : \n tree.write(files) \n #if xml does not exist create one otherwise rewrite to it",
"def template(c, release=\"url-shortener\"):\n c.run(f\"helm template {release} {HELM_CHART_DIR} > ./generated-deployment.yml\")",
"def generate_lookat_kml_block(self, lng, lat, viewrange):\n return \"\"\"\n <LookAt>\n <longitude>%.14f</longitude>\n <latitude>%.14f</latitude>\n <altitude>0</altitude>\n <range>%.f</range>\n <tilt>0</tilt>\n <heading>0</heading>\n </LookAt>\n\"\"\" % (lng, lat, viewrange)",
"def __fill_template__(self,template_file,output_fname):\n dictionary = {}\n for k,v in self.__dict__.iteritems():\n if k == 'sample_key':\n try:\n int(v)\n new_sample_key = \"Sample_\" + str(v)\n dictionary.update({k:new_sample_key})\n continue\n except ValueError:\n pass\n dictionary.update({k:str(v)})\n dictionary.update({'restats_tail': self.restats_file + '.tail'})\n with open(output_fname,'w') as f:\n string = fill_template(template_file,dictionary)\n f.write(string)",
"def image2kml(self,varname,filename=None):\n\n vdata=self.get_array(varname)\n im=self.get_image(vdata)\n if filename is None:\n filename='%s.png' % varname\n f=open(filename,'w')\n f.write(im)\n f.close()\n d=self.get_kml_dict(varname,filename)\n pylab.close('all')\n return self.__class__.kmlimage % d",
"def template1(self, width, height):\n style = '\\n'.join(self.style_lines())\n defs = '\\n'.join(self.defs_lines())\n body = '\\n'.join(self.body_lines())\n defs_block = '' if not (style or defs) else '''<defs>\n <style type=\"text/css\"><![CDATA[\n%s\\n ]]></style>\\n%s\\n</defs>''' % (style, defs)\n txt = '''<?xml version=\"1.0\" standalone=\"no\"?>\n<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\" \n \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">\n<svg width=\"%s\" height=\"%s\" version=\"1.1\" xmlns=\"http://www.w3.org/2000/svg\">\n%s\\n%s\\n</svg>\\n''' % (width, height, defs_block, body)\n return txt",
"def test_create_namespaced_template(self):\n pass",
"def Template(Fenetre_largeur,Fenetre_hauteur):\r\n li= Select_ligne(\"Nombre de lignes: \",Fenetre_largeur,Fenetre_hauteur)\r\n nom=\"Template\"\r\n fich=\"Template\"\r\n version=0\r\n while Path(\"stages/\"+fich+\".txt\").is_file() == True:\r\n version+=1\r\n fich=nom+str(version)\r\n fichier=open(\"stages/\"+fich+\".txt\",'w')\r\n fichier.write(str(li))\r\n fichier.write(\"\\n\")\r\n fichier.write(\"\\n\")\r\n for i in range(li):\r\n for j in range(10):\r\n fichier.write(\"0,0|\")\r\n fichier.write(\"\\n\")\r\n fichier.write(\"\\n\")\r\n fichier.write(\"gauche: resistance, droite: bonus\")\r\n fichier.write(\"\\n\")\r\n fichier.write(\"resistance max: 3\")\r\n fichier.write(\"\\n\")\r\n fichier.write(\"6=barre+\")\r\n fichier.write(\"\\n\")\r\n fichier.write(\"7=score+\")\r\n fichier.close()",
"def saved_template(self, template_id):\n\n # From user params get the wanted type and size\n category, size = template_id.split('_')\n\n # Parse the xml file\n template_tree = Etree.parse(\"patron.xml\")\n root = template_tree.getroot()\n\n # Find The selected template\n for template in root.findall(\"./type[@name='%s']/template[@size='%s']\" % (category, size)):\n # Find useful data\n info = 'T-shirt_template_%s_%s' % (category, size)\n transform = template.find('transform')\n\n # Creation of a main group for the Template\n template_attribs = {\n inkex.addNS('label', 'inkscape'): info,\n 'transform': transform.text if transform is not None else ''\n }\n template_group = inkex.etree.SubElement(self.current_layer, 'g', template_attribs)\n\n # For each pieces of the template\n for piece in template.findall('piece'):\n # Find useful data\n pieceinfo = info + \"_\" + piece.find('name').text\n transform = piece.find('transform')\n\n # Create a group for the piece\n piece_attribs = {\n inkex.addNS('label', 'inkscape'): pieceinfo,\n 'transform': transform.text if transform is not None else ''\n }\n piece_group = inkex.etree.SubElement(template_group, 'g', piece_attribs)\n\n # Add a text to display the piece info\n add_text(piece_group, pieceinfo.replace('_', ' '), piece.find('info').text, 15)\n\n # For each paths of the piece\n for part in piece.findall('part'):\n # Find useful data\n label = part.find('name').text\n partinfo = pieceinfo + \"_\" + label\n transform = part.find('transform')\n\n # Create a group for the shape\n part_attribs = {\n inkex.addNS('label', 'inkscape'): partinfo,\n 'transform': transform.text if transform is not None else ''\n }\n part_group = inkex.etree.SubElement(piece_group, 'g', part_attribs)\n\n # Add the path to the group\n style = self.normal_line if self.options.style == 'print' or label != 'offset' else self.cut_line\n path_attribs = {\n inkex.addNS('label', 'inkscape'): partinfo,\n 'style': simplestyle.formatStyle(style),\n 'd': part.find('path').text\n }\n inkex.etree.SubElement(part_group, inkex.addNS('path', 'svg'), path_attribs)",
"def create_custom_template(morph_links):\n template_text = \"<?xml version='1.0' encoding='UTF-8'?>\\n\"\n template_text += \"<templates>\\n\"\n\n template_text += \"<template name='AEtransform'>\\n\"\n for link in morph_links:\n morph_label = morph_links[link][\"Label\"]\n morph_label_ns = morph_label.replace(\" \", \"\")\n template_text += \"<attribute name='\" + morph_label_ns + \"' type='maya.double'>\\n\"\n template_text += \"<label>\" + morph_label + \"</label>\\n\"\n template_text += \"</attribute>\\n\"\n template_text += \"</template>\\n\"\n\n template_text += \"<view name='Morphs' template='AEtransform'>\\n\"\n for link in morph_links:\n groups = morph_links[link][\"Path\"].split('/')\n groups = list(filter(None, groups))\n for group in groups:\n group = group.replace(\" \", \"\")\n #template_text += \"<group name='\" + group + \"'>\\n\"\n morph_label = morph_links[link][\"Label\"]\n morph_label_ns = morph_label.replace(\" \", \"\")\n template_text += \"<property name='\" + morph_label_ns + \"'/>\\n\"\n for group in groups:\n template_text += \"\" #\"</group>\\n\"\n template_text += \"</view>\\n\"\n\n template_text += \"</templates>\\n\"\n\n template_path = os.path.abspath(\"..\\scripts\\\\AETemplates\\AEtransform.MorphsTemplate.xml\")\n template_file = open(template_path, \"w\")\n template_file.write(template_text)\n template_file.close()\n\n cmds.refreshEditorTemplates()",
"def test_create_namespaced_processed_template(self):\n pass",
"def create_html(text, template, output):\n\n # TODO uncomment this for orginal DMP format (right now difficult with differing section sizes)\n #templateLoader = jinja2.FileSystemLoader(searchpath=\"../templates/new\")\n templateLoader = jinja2.FileSystemLoader(searchpath=\"../templates\")\n templateEnv = jinja2.Environment(loader=templateLoader)\n TEMPLATE_FILE = \"template_\" + template.lower() + \".html\"\n real_template = templateEnv.get_template(TEMPLATE_FILE)\n\n outputText = real_template.render(contact=text)\n html_file = open(output + \".html\", \"w\")\n html_file.write(outputText)\n html_file.close()\n\n return output + \".html\"",
"def generate_link_kml(self, d):\n return \"\"\"\\\n <NetworkLink>\n <name>%(image_filename)s</name>\n <Region>\n <Lod>\n <minLodPixels>%(minlodpixels)d</minLodPixels>\n <maxLodPixels>-1</maxLodPixels>\n </Lod>\n <LatLonAltBox>\n <north>%(north).14f</north>\n <south>%(south).14f</south>\n <east>%(east).14f</east>\n <west>%(west).14f</west>\n </LatLonAltBox>\n </Region>\n <Link>\n <href>%(link_url)s</href>\n <viewRefreshMode>onRegion</viewRefreshMode>\n </Link>\n </NetworkLink>\"\"\" % d",
"def render_from_lms(template_name, dictionary, context=None, namespace='main'):\r\n return render_to_string(template_name, dictionary, context, namespace=\"lms.\" + namespace)",
"def printKml (params, out = sys.stdout):\n \n out.write(\" <Placemark>\\n\")\n out.write(\" <name>\" + str(params['UserID']) + \"</name>\\n\")\n out.write(\" <description>\\n\")\n import io\n \n buf = io.StringIO()\n printHtml(params, buf)\n import cgi\n \n out.write(cgi.escape(buf.getvalue()))\n out.write(\" </description>\\n\")\n out.write(\" <styleUrl>#m_ylw-pushpin_copy0</styleUrl>\\n\")\n out.write(\" <Point>\\n\")\n out.write(\" <coordinates>\")\n out.write(str(params['longitude']))\n out.write(',')\n out.write(str(params['latitude']))\n out.write(\",0</coordinates>\\n\")\n out.write(\" </Point>\\n\")\n out.write(\" </Placemark>\\n\")"
] | [
"0.65473634",
"0.6186442",
"0.59212077",
"0.58994913",
"0.5786561",
"0.5749856",
"0.57343155",
"0.5694609",
"0.56501126",
"0.5589115",
"0.5586314",
"0.5547935",
"0.5522636",
"0.5514267",
"0.5514267",
"0.54982287",
"0.54740417",
"0.54516935",
"0.5447252",
"0.54235315",
"0.54164743",
"0.54114723",
"0.53889513",
"0.53844714",
"0.5379983",
"0.53570396",
"0.53538585",
"0.5350417",
"0.53376365",
"0.5326569"
] | 0.6787827 | 0 |
New() > itkTernaryAddImageFilterID2ID2ID2ID2_Superclass Create a new object of the class itkTernaryAddImageFilterID2ID2ID2ID2_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF22ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.762808",
"0.75490963",
"0.74511015",
"0.7423488",
"0.7417358",
"0.73689806",
"0.7338099",
"0.72890794",
"0.7233506",
"0.72322845",
"0.71680635",
"0.7162203",
"0.7157877",
"0.7050944",
"0.7018189",
"0.7015261",
"0.701475",
"0.7012063",
"0.69447744",
"0.69441223",
"0.6889131",
"0.6881423",
"0.6868967",
"0.6853723",
"0.68408406",
"0.6826385",
"0.6813031",
"0.68121004",
"0.68096787",
"0.6790982"
] | 0.7865722 | 0 |
New() > itkTernaryAddImageFilterID3ID3ID3ID3_Superclass Create a new object of the class itkTernaryAddImageFilterID3ID3ID3ID3_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkEdgePotentialImageFilterICVF33ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)"
] | [
"0.77666956",
"0.76079875",
"0.75039846",
"0.75035",
"0.7479952",
"0.74662477",
"0.7298497",
"0.72855514",
"0.72777826",
"0.72717106",
"0.72706074",
"0.7184689",
"0.7160855",
"0.71421075",
"0.71154994",
"0.7073263",
"0.7060131",
"0.70412266",
"0.7006315",
"0.6987071",
"0.6958632",
"0.6957956",
"0.6952372",
"0.69433296",
"0.6930342",
"0.6929591",
"0.6909524",
"0.68802714",
"0.68798286",
"0.68414164"
] | 0.7919466 | 0 |
New() > itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass Create a new object of the class itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.78903687",
"0.7618442",
"0.7587125",
"0.7554806",
"0.7497415",
"0.7490903",
"0.7460643",
"0.7349206",
"0.73199266",
"0.7273647",
"0.7265122",
"0.7225199",
"0.72199607",
"0.7203722",
"0.7196587",
"0.71783614",
"0.71586716",
"0.7110979",
"0.7084644",
"0.70412594",
"0.70065254",
"0.69687927",
"0.69413334",
"0.69385123",
"0.69342095",
"0.692517",
"0.6903904",
"0.69023705",
"0.68812555",
"0.68713856"
] | 0.8061528 | 0 |
itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass | def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF2IF2IF2_Superclass_cast(*args)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkCosImageFilterIF2IF2_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF2IF2_Superclass_cast(*args)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)"
] | [
"0.88076603",
"0.8572629",
"0.85263824",
"0.84869593",
"0.84707963",
"0.8447603",
"0.8407247",
"0.83666754",
"0.8311725",
"0.82673705",
"0.82649153",
"0.8258107",
"0.8230317",
"0.82138693",
"0.82006955",
"0.8125339",
"0.8115553",
"0.80756104",
"0.8072043",
"0.8067883",
"0.8057454",
"0.805316",
"0.8037226",
"0.79890996",
"0.7967445",
"0.79483676",
"0.7922309",
"0.7908108",
"0.7870035",
"0.78291464"
] | 0.8668573 | 1 |
New() > itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass Create a new object of the class itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.78577113",
"0.7592699",
"0.7584402",
"0.74809074",
"0.7461834",
"0.74548477",
"0.7382315",
"0.7356721",
"0.7290633",
"0.72715217",
"0.7263018",
"0.7174303",
"0.7171283",
"0.7165325",
"0.7131815",
"0.7109765",
"0.7042447",
"0.7011581",
"0.69516927",
"0.69480693",
"0.6946724",
"0.6933001",
"0.6930969",
"0.6927596",
"0.691683",
"0.6914075",
"0.68908465",
"0.68890315",
"0.6888107",
"0.6853245"
] | 0.7961178 | 0 |
itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass | def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkCosImageFilterIF3IF3_Superclass_cast(*args):\n return _itkCosImageFilterPython.itkCosImageFilterIF3IF3_Superclass_cast(*args)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterID3ID3ID3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args):\n return _itkEdgePotentialImageFilterPython.itkEdgePotentialImageFilterICVF33IF3_Superclass_cast(*args)"
] | [
"0.889163",
"0.872401",
"0.8716521",
"0.8686031",
"0.86727256",
"0.8645202",
"0.86261636",
"0.85349524",
"0.8480945",
"0.84491646",
"0.8429972",
"0.8390216",
"0.8305966",
"0.82382905",
"0.82324976",
"0.81321186",
"0.81260115",
"0.8123955",
"0.80859166",
"0.80845314",
"0.8056589",
"0.8052327",
"0.804861",
"0.80232453",
"0.79935324",
"0.7979531",
"0.7918457",
"0.790081",
"0.7882919",
"0.78771764"
] | 0.87746197 | 1 |
New() > itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass Create a new object of the class itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.77771425",
"0.777552",
"0.7686807",
"0.7643416",
"0.7587692",
"0.75774217",
"0.73941374",
"0.73560876",
"0.73111916",
"0.7292542",
"0.72911847",
"0.72615075",
"0.7257298",
"0.72467935",
"0.71607935",
"0.71508795",
"0.7145032",
"0.71084183",
"0.7099382",
"0.70673823",
"0.7050572",
"0.7045471",
"0.7041566",
"0.70305765",
"0.70130855",
"0.6998498",
"0.6995835",
"0.6992183",
"0.6981598",
"0.6981034"
] | 0.79703885 | 0 |
itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass | def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)"
] | [
"0.8583774",
"0.8431196",
"0.8420218",
"0.83839583",
"0.8363702",
"0.83492815",
"0.82900065",
"0.82717526",
"0.82178116",
"0.8217027",
"0.8207864",
"0.820718",
"0.8156194",
"0.80921507",
"0.8092105",
"0.8075868",
"0.8073345",
"0.8053059",
"0.8038925",
"0.8010338",
"0.79771775",
"0.7878296",
"0.7858739",
"0.78540045",
"0.78158575",
"0.7761325",
"0.77299756",
"0.77172834",
"0.76812",
"0.76592106"
] | 0.85070795 | 1 |
New() > itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass Create a new object of the class itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.78084457",
"0.7777113",
"0.7743919",
"0.7609945",
"0.75737077",
"0.7548231",
"0.73495024",
"0.7265904",
"0.7243554",
"0.7237153",
"0.7220631",
"0.7212902",
"0.72032726",
"0.7179373",
"0.7160813",
"0.7156109",
"0.7147796",
"0.71228814",
"0.71199644",
"0.70842755",
"0.70775044",
"0.7048789",
"0.7013777",
"0.70094776",
"0.69766766",
"0.6965889",
"0.69315624",
"0.6931002",
"0.68471116",
"0.6837332"
] | 0.7868488 | 0 |
itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass | def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)"
] | [
"0.8532647",
"0.85305256",
"0.8516313",
"0.849879",
"0.84499377",
"0.84234107",
"0.8399052",
"0.83886224",
"0.8355858",
"0.8344065",
"0.83316773",
"0.8331413",
"0.83286583",
"0.82926065",
"0.8276312",
"0.8266724",
"0.826375",
"0.8225655",
"0.8214009",
"0.8193666",
"0.8193466",
"0.81501955",
"0.78534245",
"0.7805978",
"0.7802641",
"0.7802427",
"0.7762043",
"0.7754395",
"0.77441114",
"0.7734065"
] | 0.86180055 | 0 |
New() > itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass Create a new object of the class itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.76136214",
"0.75883645",
"0.75875884",
"0.7529568",
"0.74792403",
"0.7449526",
"0.74375355",
"0.7260869",
"0.7239844",
"0.7108787",
"0.70131814",
"0.69569594",
"0.6922677",
"0.6912971",
"0.68873686",
"0.687519",
"0.6859417",
"0.6853394",
"0.68529296",
"0.68343085",
"0.67922634",
"0.67798215",
"0.6776158",
"0.6775277",
"0.6766944",
"0.67660344",
"0.67555183",
"0.67503774",
"0.67455685",
"0.67155784"
] | 0.77367264 | 0 |
itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass | def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL2IUL2IUL2_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF2_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF2_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_Superclass_cast(obj)"
] | [
"0.83347076",
"0.8259786",
"0.8254877",
"0.82273674",
"0.8213395",
"0.81952333",
"0.81827587",
"0.81737477",
"0.81565833",
"0.81444824",
"0.81440467",
"0.8123669",
"0.81183785",
"0.8090652",
"0.80894965",
"0.8077059",
"0.80663335",
"0.80111235",
"0.79480046",
"0.7916144",
"0.78519887",
"0.77994204",
"0.7795864",
"0.7681974",
"0.76066643",
"0.7603395",
"0.7585965",
"0.7562357",
"0.7551937",
"0.7528142"
] | 0.8331188 | 1 |
New() > itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass Create a new object of the class itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.77514553",
"0.765869",
"0.76537627",
"0.7553569",
"0.749837",
"0.7385497",
"0.7229729",
"0.72107583",
"0.71896565",
"0.71452445",
"0.71168566",
"0.7091152",
"0.70909774",
"0.7024852",
"0.70056397",
"0.6996257",
"0.69950944",
"0.69864553",
"0.69812405",
"0.69450486",
"0.6925556",
"0.69152147",
"0.6881612",
"0.6864559",
"0.68400246",
"0.68397975",
"0.68339914",
"0.6825365",
"0.67540365",
"0.67413706"
] | 0.7761621 | 0 |
itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(itkLightObject obj) > itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass | def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF3IF3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterISS3ISS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_Superclass_cast(obj)",
"def itkNotImageFilterIUC3IUC3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIUS3IUS3_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_Superclass_cast(obj)",
"def itkNotImageFilterIF2IF2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkNotImageFilterIUS2IUS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_Superclass_cast(obj)",
"def itkNotImageFilterIUC2IUC2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkNotImageFilterISS2ISS2_Superclass_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2_Superclass *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_Superclass_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass_cast(*args)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_Superclass_cast(*args)",
"def itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIUL3IUL3IUL3_Superclass_cast(*args)",
"def itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj: 'itkLightObject') -> \"itkHistogramToIntensityImageFilterHFIF3_Superclass *\":\n return _itkHistogramToIntensityImageFilterPython.itkHistogramToIntensityImageFilterHFIF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMD3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD3_Superclass_cast(obj)",
"def itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args):\n return _itkSubtractImageFilterPython.itkSubtractImageFilterIF3IF3IF3_Superclass_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_Superclass_cast(*args)",
"def itkVTKPolyDataReaderMF3_Superclass_cast(obj: 'itkLightObject') -> \"itkVTKPolyDataReaderMF3_Superclass *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF3_Superclass_cast(obj)"
] | [
"0.85246396",
"0.84796506",
"0.84340036",
"0.84302866",
"0.84288764",
"0.8423453",
"0.8416096",
"0.83491045",
"0.8327571",
"0.83225805",
"0.82702595",
"0.8268719",
"0.82654333",
"0.8251263",
"0.82445973",
"0.8234296",
"0.8218139",
"0.8206639",
"0.81782633",
"0.8108415",
"0.8069083",
"0.8034119",
"0.7786232",
"0.77656925",
"0.77128786",
"0.7711452",
"0.7677795",
"0.7654404",
"0.7647236",
"0.7645991"
] | 0.85044765 | 1 |
New() > itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass Create a new object of the class itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.7596377",
"0.75910646",
"0.7560997",
"0.75100327",
"0.74321425",
"0.73726755",
"0.73566854",
"0.734661",
"0.7272658",
"0.72386825",
"0.7217317",
"0.7106021",
"0.7098527",
"0.70696133",
"0.7002581",
"0.6983039",
"0.6974576",
"0.69361895",
"0.6912299",
"0.68864566",
"0.6862588",
"0.6853599",
"0.68524754",
"0.6848617",
"0.6841335",
"0.6814946",
"0.6791793",
"0.6789456",
"0.67701083",
"0.6760358"
] | 0.7922314 | 0 |
New() > itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass Create a new object of the class itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS3ISS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass_cast(*args)",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.76287913",
"0.7548939",
"0.7542766",
"0.74978733",
"0.74911535",
"0.73237395",
"0.7311357",
"0.7223127",
"0.7221928",
"0.7203291",
"0.711763",
"0.70980036",
"0.7094449",
"0.70661205",
"0.7045585",
"0.69802654",
"0.69774556",
"0.6953183",
"0.6930293",
"0.6927179",
"0.69252926",
"0.68612796",
"0.6853266",
"0.68411696",
"0.68341136",
"0.6805854",
"0.676833",
"0.676823",
"0.67421156",
"0.6724591"
] | 0.7858551 | 0 |
New() > itkTernaryAddImageFilterID2ID2ID2ID2 Create a new object of the class itkTernaryAddImageFilterID2ID2ID2ID2 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterISS2ISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.79866374",
"0.7950948",
"0.7902202",
"0.76756924",
"0.7476825",
"0.74265414",
"0.742054",
"0.7409127",
"0.7372828",
"0.73234135",
"0.7315541",
"0.7296767",
"0.7270058",
"0.7263262",
"0.7241665",
"0.7233361",
"0.72253376",
"0.72154576",
"0.715046",
"0.7147575",
"0.71471334",
"0.71427715",
"0.71396875",
"0.7122744",
"0.7106084",
"0.70948124",
"0.70886177",
"0.7087755",
"0.70497006",
"0.70360106"
] | 0.8147333 | 0 |
itkTernaryAddImageFilterID2ID2ID2ID2_cast(itkLightObject obj) > itkTernaryAddImageFilterID2ID2ID2ID2 | def itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkNotImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def itkNotImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS2IUS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)"
] | [
"0.76308167",
"0.758188",
"0.7517058",
"0.7454919",
"0.74101835",
"0.73981917",
"0.7383619",
"0.73698163",
"0.7358109",
"0.7336409",
"0.7328914",
"0.7323717",
"0.7288171",
"0.72586155",
"0.725105",
"0.72349465",
"0.7195933",
"0.7163779",
"0.7159076",
"0.7150028",
"0.7110235",
"0.71037704",
"0.7098279",
"0.70937634",
"0.70878726",
"0.70763785",
"0.7071058",
"0.7063865",
"0.7051465",
"0.7048067"
] | 0.7765551 | 0 |
New() > itkTernaryAddImageFilterID3ID3ID3ID3 Create a new object of the class itkTernaryAddImageFilterID3ID3ID3ID3 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF3IF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.8122643",
"0.80405945",
"0.79705286",
"0.7826779",
"0.7583411",
"0.75830215",
"0.7572595",
"0.756218",
"0.74430597",
"0.74388206",
"0.7398677",
"0.73933834",
"0.7389276",
"0.7356299",
"0.73425376",
"0.732044",
"0.73061585",
"0.7295671",
"0.7292391",
"0.7291407",
"0.7245295",
"0.7238673",
"0.72325724",
"0.7228048",
"0.72212756",
"0.7216588",
"0.72103286",
"0.7194369",
"0.7192333",
"0.719084"
] | 0.81664336 | 0 |
itkTernaryAddImageFilterID3ID3ID3ID3_cast(itkLightObject obj) > itkTernaryAddImageFilterID3ID3ID3ID3 | def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkNotImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)"
] | [
"0.78832257",
"0.785561",
"0.77852875",
"0.7771489",
"0.7718299",
"0.7687708",
"0.76786035",
"0.7662282",
"0.7625325",
"0.7598742",
"0.7597835",
"0.7562758",
"0.7558518",
"0.75351113",
"0.75327724",
"0.7522676",
"0.75222576",
"0.7520495",
"0.75182086",
"0.7504889",
"0.7499525",
"0.748609",
"0.74735343",
"0.74709594",
"0.7463708",
"0.7444209",
"0.74399316",
"0.7431289",
"0.7430193",
"0.7428997"
] | 0.80553585 | 0 |
New() > itkTernaryAddImageFilterIF2IF2IF2IF2 Create a new object of the class itkTernaryAddImageFilterIF2IF2IF2IF2 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF2IF2F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.8164579",
"0.796104",
"0.7792533",
"0.76417816",
"0.74947166",
"0.7469685",
"0.7461249",
"0.745837",
"0.7394439",
"0.73799187",
"0.7362876",
"0.735769",
"0.7347056",
"0.73209006",
"0.73039687",
"0.7292726",
"0.72827333",
"0.72694397",
"0.7240936",
"0.7170967",
"0.7154845",
"0.7142686",
"0.713682",
"0.7132094",
"0.7106825",
"0.71029645",
"0.7097051",
"0.70945853",
"0.70834047",
"0.7050367"
] | 0.8288121 | 0 |
itkTernaryAddImageFilterIF2IF2IF2IF2_cast(itkLightObject obj) > itkTernaryAddImageFilterIF2IF2IF2IF2 | def itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args)",
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass_cast(*args)",
"def itkNotImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterISS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS2ISS2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterISS2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHalfHermitianToRealInverseFFTImageFilterICF2IF2 *\":\n return _itkHalfHermitianToRealInverseFFTImageFilterPython.itkHalfHermitianToRealInverseFFTImageFilterICF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)"
] | [
"0.8325339",
"0.81298554",
"0.7900186",
"0.789129",
"0.7829227",
"0.7820389",
"0.78154504",
"0.7814893",
"0.7799674",
"0.7781515",
"0.77740836",
"0.7751609",
"0.77474713",
"0.7740967",
"0.77069825",
"0.7687321",
"0.7653217",
"0.7551432",
"0.75447375",
"0.7498614",
"0.74841386",
"0.7444034",
"0.7427243",
"0.74118775",
"0.74117213",
"0.7411362",
"0.74018717",
"0.74007183",
"0.7393822",
"0.7392059"
] | 0.82992935 | 1 |
New() > itkTernaryAddImageFilterIF3IF3IF3IF3 Create a new object of the class itkTernaryAddImageFilterIF3IF3IF3IF3 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF3IF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.81593454",
"0.8019483",
"0.77812564",
"0.7755564",
"0.76374006",
"0.7600967",
"0.7552205",
"0.75319076",
"0.75210726",
"0.75069284",
"0.7500817",
"0.74600273",
"0.7445691",
"0.7439426",
"0.74359894",
"0.7410714",
"0.7394769",
"0.7375656",
"0.73547715",
"0.73338944",
"0.7309137",
"0.7308926",
"0.7308153",
"0.73044044",
"0.7296827",
"0.72965664",
"0.7272183",
"0.72698",
"0.7251409",
"0.7247766"
] | 0.83370554 | 0 |
itkTernaryAddImageFilterIF3IF3IF3IF3_cast(itkLightObject obj) > itkTernaryAddImageFilterIF3IF3IF3IF3 | def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkNotImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterISS3ISS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)"
] | [
"0.8314334",
"0.8146819",
"0.81391484",
"0.8064029",
"0.8047938",
"0.80301446",
"0.8027858",
"0.80160296",
"0.8015273",
"0.7977002",
"0.79115695",
"0.7899026",
"0.7853632",
"0.78422785",
"0.78316253",
"0.78305656",
"0.78131515",
"0.7811875",
"0.7801812",
"0.77994573",
"0.7799187",
"0.7798523",
"0.7791438",
"0.779052",
"0.7758929",
"0.7754176",
"0.7751458",
"0.7724332",
"0.77170706",
"0.7716697"
] | 0.83667874 | 0 |
New() > itkTernaryAddImageFilterIUC2IUC2IUC2IUC2 Create a new object of the class itkTernaryAddImageFilterIUC2IUC2IUC2IUC2 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.80834025",
"0.79318345",
"0.7846418",
"0.7789117",
"0.7781465",
"0.76959866",
"0.7689662",
"0.7657546",
"0.76215744",
"0.7617233",
"0.76065916",
"0.75819594",
"0.7497899",
"0.7480199",
"0.7466372",
"0.7465572",
"0.7454194",
"0.74479383",
"0.74334204",
"0.7425108",
"0.74227524",
"0.74216574",
"0.74011093",
"0.73974645",
"0.7395684",
"0.7388915",
"0.7365073",
"0.73386604",
"0.73286253",
"0.73279756"
] | 0.82164854 | 0 |
itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(itkLightObject obj) > itkTernaryAddImageFilterIUC2IUC2IUC2IUC2 | def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkBinaryContourImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC2IUC2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC2IUC2_cast(obj)",
"def itkLabelStatisticsImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIF2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUC2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUC2_cast(obj)",
"def itkScalarImageKmeansImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF2IUC2 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkLabelStatisticsImageFilterIUC2IUC2 *\":\n return _itkLabelStatisticsImageFilterPython.itkLabelStatisticsImageFilterIUC2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def itkHuangThresholdImageFilterISS2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS2IUC2_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)"
] | [
"0.7892496",
"0.78112376",
"0.77925205",
"0.77598757",
"0.77170867",
"0.76765",
"0.764932",
"0.7625104",
"0.7615173",
"0.7612952",
"0.7601345",
"0.7593909",
"0.75822866",
"0.7575358",
"0.7571743",
"0.7553365",
"0.75141734",
"0.74964494",
"0.7489112",
"0.7484641",
"0.7465378",
"0.7461833",
"0.7454941",
"0.7440599",
"0.74391294",
"0.74350655",
"0.7432016",
"0.74281514",
"0.74166363",
"0.7403122"
] | 0.7989687 | 0 |
New() > itkTernaryAddImageFilterIUC3IUC3IUC3IUC3 Create a new object of the class itkTernaryAddImageFilterIUC3IUC3IUC3IUC3 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.81388545",
"0.810595",
"0.8090037",
"0.7969986",
"0.7956292",
"0.78857464",
"0.785059",
"0.77959174",
"0.77761877",
"0.7631814",
"0.75843716",
"0.7583665",
"0.7542179",
"0.75403064",
"0.7498079",
"0.74783415",
"0.74772173",
"0.74764365",
"0.7475445",
"0.7461655",
"0.7461627",
"0.7452697",
"0.74179375",
"0.74173796",
"0.74080294",
"0.7403118",
"0.7399328",
"0.73914623",
"0.7391003",
"0.73779005"
] | 0.82122886 | 0 |
itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(itkLightObject obj) > itkTernaryAddImageFilterIUC3IUC3IUC3IUC3 | def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def itkBinaryContourImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def itkBinaryContourImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUC3IUC3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUC3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)"
] | [
"0.7892304",
"0.78337157",
"0.78077155",
"0.7804674",
"0.77679163",
"0.7764134",
"0.7759513",
"0.7749586",
"0.7723623",
"0.7722778",
"0.77218026",
"0.7688984",
"0.7677636",
"0.76723707",
"0.76719034",
"0.76536036",
"0.7652875",
"0.7634969",
"0.76083463",
"0.7607304",
"0.7603553",
"0.76028025",
"0.7595124",
"0.75915504",
"0.7586499",
"0.7568826",
"0.7559071",
"0.755819",
"0.75473166",
"0.75273514"
] | 0.81195134 | 0 |
New() > itkTernaryAddImageFilterIUL2IUL2IUL2IUL2 Create a new object of the class itkTernaryAddImageFilterIUL2IUL2IUL2IUL2 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.7961385",
"0.7959622",
"0.79339135",
"0.7845965",
"0.7836587",
"0.78198117",
"0.7744429",
"0.765472",
"0.7626084",
"0.75983095",
"0.75397074",
"0.7475012",
"0.73143613",
"0.7257502",
"0.72472817",
"0.72370017",
"0.7167874",
"0.71628785",
"0.7112177",
"0.71041477",
"0.70785636",
"0.7064429",
"0.7063965",
"0.7046232",
"0.70395935",
"0.7029957",
"0.70141953",
"0.70115495",
"0.70110214",
"0.70063776"
] | 0.81316435 | 0 |
itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(itkLightObject obj) > itkTernaryAddImageFilterIUL2IUL2IUL2IUL2 | def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_cast(*args)",
"def itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkHuangThresholdImageFilterIF2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUS2_cast(obj)",
"def itkNotImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF2IF2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF2IF2_cast(obj)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def itkNotImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkHuangThresholdImageFilterIF2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIF2ISS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF2ISS2_cast(obj)",
"def itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID2ID2ID2ID2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF2IF2IF2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF2IF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC2IUC2 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUS2_cast(obj)",
"def itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC2IUC2IUC2 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC2IUC2IUC2_cast(obj)",
"def itkHuangThresholdImageFilterIUS2IUS2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS2IUS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS2IUS2_cast(obj)",
"def itkHuangThresholdImageFilterIUC2IUC2_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2IUC2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2IUC2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC2ISS2 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC2ISS2_cast(obj)",
"def itkBinaryContourImageFilterIF2IF2_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUC3IUC3IUC3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUC3IUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF2IF2 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF2IF2_cast(obj)"
] | [
"0.7352389",
"0.7324347",
"0.7310399",
"0.7304036",
"0.72892624",
"0.7284477",
"0.72233516",
"0.7186214",
"0.7185038",
"0.7158732",
"0.71572274",
"0.7150702",
"0.7124124",
"0.7089004",
"0.7087673",
"0.7077956",
"0.70659965",
"0.7064473",
"0.7033919",
"0.70215225",
"0.70097744",
"0.70002913",
"0.6992403",
"0.69826216",
"0.69639754",
"0.6947931",
"0.694189",
"0.69389534",
"0.69347817",
"0.693306"
] | 0.7681724 | 0 |
New() > itkTernaryAddImageFilterIUL3IUL3IUL3IUL3 Create a new object of the class itkTernaryAddImageFilterIUL3IUL3IUL3IUL3 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkReinitializeLevelSetImageFilterIF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkThresholdSegmentationLevelSetImageFilterIF3IF3F.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkCosImageFilterIF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.81462044",
"0.8110021",
"0.7995434",
"0.7912326",
"0.79099345",
"0.787577",
"0.77468467",
"0.7723048",
"0.77158433",
"0.7480538",
"0.74527663",
"0.73376215",
"0.7280301",
"0.7275997",
"0.7273901",
"0.7256137",
"0.723621",
"0.72328305",
"0.7226152",
"0.7203478",
"0.7197948",
"0.71940047",
"0.7193793",
"0.7184354",
"0.7168656",
"0.7158746",
"0.7146881",
"0.7145578",
"0.71421826",
"0.7138644"
] | 0.814467 | 1 |
itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(itkLightObject obj) > itkTernaryAddImageFilterIUL3IUL3IUL3IUL3 | def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterIF3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUC3_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUC3_cast(obj)",
"def itkHuangThresholdImageFilterISS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_cast(*args)",
"def itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIF3IF3IF3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIF3IF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIF3IF3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIF3IF3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3ISS3_cast(obj)"
] | [
"0.7748961",
"0.76095223",
"0.7589307",
"0.75607187",
"0.7520681",
"0.7497383",
"0.74870694",
"0.74818844",
"0.7464951",
"0.74644643",
"0.74344575",
"0.74030006",
"0.7387342",
"0.73710954",
"0.7369231",
"0.7366797",
"0.7360392",
"0.73567164",
"0.73386925",
"0.7336549",
"0.7334058",
"0.73039967",
"0.72962046",
"0.729028",
"0.7278138",
"0.7268886",
"0.72635156",
"0.72612345",
"0.7257982",
"0.72538674"
] | 0.78951705 | 0 |
New() > itkTernaryAddImageFilterIUS2IUS2IUS2IUS2 Create a new object of the class itkTernaryAddImageFilterIUS2IUS2IUS2IUS2 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID2ID2ID2ID2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL2IUL2IUL2IUL2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC2IUC2IUC2IUC2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS2ISS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBinaryContourImageFilterIUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.80503565",
"0.7489746",
"0.7481182",
"0.74704665",
"0.7466978",
"0.7464551",
"0.74596614",
"0.74402666",
"0.74363726",
"0.7434975",
"0.73753166",
"0.734277",
"0.73379105",
"0.7327973",
"0.7306081",
"0.7305786",
"0.7299731",
"0.7296037",
"0.7261096",
"0.7260247",
"0.7259942",
"0.72414213",
"0.72326684",
"0.7224352",
"0.7211549",
"0.7182855",
"0.7161747",
"0.7136908",
"0.7096544",
"0.7092104"
] | 0.83108383 | 0 |
New() > itkTernaryAddImageFilterIUS3IUS3IUS3IUS3 Create a new object of the class itkTernaryAddImageFilterIUS3IUS3IUS3IUS3 and set the input and the parameters if some named or nonnamed arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. | def New(*args, **kargs):
obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3.__New_orig__()
import itkTemplate
itkTemplate.New(obj, *args, **kargs)
return obj | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIF3IF3IF3IF3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterID3ID3ID3ID3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSpeckleNoiseImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkNotImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkShotNoiseImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSquaredDifferenceImageFilterIUS3IUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3ISS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkSubtractImageFilterIUS3IUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterIF3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUC3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUS2IUS2IUS2IUS2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkBoundedReciprocalImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarConnectedComponentImageFilterIUS3IUS3_Superclass.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkHuangThresholdImageFilterIUS3IUC3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def New(*args, **kargs):\n obj = itkScalarImageKmeansImageFilterISS3IUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.81209564",
"0.76765245",
"0.76381725",
"0.7637697",
"0.76059175",
"0.75846446",
"0.75640565",
"0.75020564",
"0.7486624",
"0.747826",
"0.7462136",
"0.7449853",
"0.74443847",
"0.7438893",
"0.7429914",
"0.74282986",
"0.74251896",
"0.74120355",
"0.73986375",
"0.7391535",
"0.7381102",
"0.7380992",
"0.73763245",
"0.7364215",
"0.7332884",
"0.7310022",
"0.72656566",
"0.72583055",
"0.7250104",
"0.72257584"
] | 0.83841634 | 0 |
itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(itkLightObject obj) > itkTernaryAddImageFilterIUS3IUS3IUS3IUS3 | def itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args):
return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def itkHuangThresholdImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUS3_cast(obj)",
"def itkNotImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIF3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIF3IUS3_cast(obj)",
"def itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUL3IUL3IUL3IUL3_cast(*args)",
"def itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUC3IUC3IUC3IUC3_cast(*args)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUS3IUS3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUS3IUS3_cast(obj)",
"def itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterID3ID3ID3ID3_cast(*args)",
"def itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF3IF3IF3IF3_cast(*args)",
"def itkHuangThresholdImageFilterIUS3ISS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterISS3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3ISS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3ISS3_cast(obj)",
"def itkHuangThresholdImageFilterIUC3IUS3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUC3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUC3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterISS3IUS3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterISS3IUS3_cast(obj)",
"def itkNotImageFilterIF3IF3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(*args):\n return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIUS3IUS3IUS3IUS3_cast(*args)",
"def itkBinaryContourImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkBinaryContourImageFilterIUS3IUS3 *\":\n return _itkBinaryContourImageFilterPython.itkBinaryContourImageFilterIUS3IUS3_cast(obj)",
"def itkHuangThresholdImageFilterIUS3IUC3_cast(obj: 'itkLightObject') -> \"itkHuangThresholdImageFilterIUS3IUC3 *\":\n return _itkHuangThresholdImageFilterPython.itkHuangThresholdImageFilterIUS3IUC3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIF3IUS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIF3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIF3IUS3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkNotImageFilterIF3IF3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIF3IF3_cast(obj)",
"def cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS3IUS3IUS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj)",
"def itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj: 'itkLightObject') -> \"itkSquaredDifferenceImageFilterIUS3IUS3IUS3 *\":\n return _itkSquaredDifferenceImageFilterPython.itkSquaredDifferenceImageFilterIUS3IUS3IUS3_cast(obj)",
"def itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj: 'itkLightObject') -> \"itkScalarImageKmeansImageFilterIUS3IUS3 *\":\n return _itkScalarImageKmeansImageFilterPython.itkScalarImageKmeansImageFilterIUS3IUS3_cast(obj)",
"def itkNotImageFilterIUC3IUC3_cast(obj: 'itkLightObject') -> \"itkNotImageFilterIUC3IUC3 *\":\n return _itkNotImageFilterPython.itkNotImageFilterIUC3IUC3_cast(obj)"
] | [
"0.80789053",
"0.80319655",
"0.8008976",
"0.79430354",
"0.79271203",
"0.78627837",
"0.78258866",
"0.782384",
"0.781271",
"0.779851",
"0.77923465",
"0.7790801",
"0.7773563",
"0.77652675",
"0.7721575",
"0.770351",
"0.7701595",
"0.7673285",
"0.7649216",
"0.76348764",
"0.7603356",
"0.7602878",
"0.76006067",
"0.7584524",
"0.7565475",
"0.75587696",
"0.75365216",
"0.7534538",
"0.75322545",
"0.7508018"
] | 0.81228995 | 0 |
Compute the aperture radius necessary to have a certain SPAXEL SCALE [in mas] at a certain WAVELENGTH [in microns] That would be the aperture radius in an array ranging from [1, 1] in physical length For example, if rho = 0.5, then the necessary aperture is a circle of half the size of the array We can use the inverse of that to get the "oversize" in physical units in our arrays to match a given scale | def rho_spaxel_scale(spaxel_scale=4.0, wavelength=1.0):
scale_rad = spaxel_scale / MILIARCSECS_IN_A_RAD
rho = scale_rad * ELT_DIAM / (wavelength * 1e-6)
return rho | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def spaxel_scale(scale=4, wave=1.0):\n\n scale_rad = scale / MILIARCSECS_IN_A_RAD\n rho = scale_rad * ELT_DIAM / (wave * 1e-6)\n print(rho)",
"def check_spaxel_scale(rho_aper, wavelength):\n\n SPAXEL_RAD = rho_aper * wavelength / ELT_DIAM * 1e-6\n SPAXEL_MAS = SPAXEL_RAD * MILIARCSECS_IN_A_RAD\n print('%.2f mas spaxels at %.2f microns' %(SPAXEL_MAS, wavelength))",
"def _compute_mass(box_size, evo_config):\n\n # ensure format\n standard_volume = evo_config['individuals']['standard_volume']\n if isinstance(box_size, list):\n if len(box_size) == 1: # sphere\n box_size = box_size[0]\n box_size = np.asarray(box_size)\n\n if np.prod(box_size.shape) < 2: # sphere\n return 4 / 3 * np.pi * box_size**3 / standard_volume\n else: # box\n if np.ndim(box_size) == 1:\n return np.prod(box_size * 2) / standard_volume\n else:\n return np.prod(box_size * 2, axis=1) / standard_volume",
"def sphere_volume(sphere_radius):\n return (4 / 3 * np.pi * sphere_radius**3)",
"def totalMass(self, trunc=None):\n if trunc is None:\n trunc = self.trunc\n rVir = self.U.rVir(m, z)\n rS, rhoS, c = self.rS_rhoS_c(m, z)\n # truncation radius over scale radius\n xMax = trunc * rVir/rS\n result = 4./3. * np.pi * rS**3 * rhoS\n result = xMax - np.log(1 + xMax)\n return result",
"def totalMass(self, trunc=None):\n if trunc is None:\n trunc = self.trunc\n rVir = self.U.rVir(m, z)\n rS, rhoS, c = self.rS_rhoS_c(m, z)\n # truncation radius over scale radius\n xMax = trunc * rVir/rS\n result = 4./3. * np.pi * rS**3 * rhoS\n result = xMax - np.log(1 + xMax)\n return result",
"def getSphereRadius(self):\n return 1.5",
"def sphere_volume(r):\n return (4/3) * 3.14159 * r**3",
"def sphere_volume(r):\n\treturn 4/3. * math.pi * r ** 3",
"def sphere_volume(radius : number) -> number:\n volume = 4/3*(pi*radius*radius*radius)\n return volume",
"def sphrad(vol):\n return (3.*vol/(4.*np.pi))**(1./3.)",
"def rscale(mag=10.0):\n if mag > 11.5:\n return 0.5\n elif mag > 11.0:\n return 1.0\n elif mag > 10.5:\n return 1.5\n elif mag > 10.0:\n return 1.5\n elif mag > 9.5:\n return 2.0\n elif mag > 9.0:\n return 2.5\n elif mag > 8.5:\n return 3.0\n else:\n return 3.5",
"def sphere_sre(solution):\n a = 0\n bias = 0.2\n x = solution.get_x()\n x1 = x[:10]\n x2 = x[10:]\n value1 = sum([(i-bias)*(i-bias) for i in x1])\n value2 = 1/len(x) * sum([(i-bias)*(i-bias) for i in x2])\n return value1 + value2",
"def get_mag_for_size(slide, size):\n max_size = slide.dimensions\n max_mag = highest_mag(slide)\n downsample = np.average([max_dim/size_dim for max_dim, size_dim in zip(max_size, size)])\n return max_mag/downsample",
"def airy_and_slicer(surface, wavelength, scale_mas, PSF_window, N_window):\n\n # Print message to know we are updating the cache\n print('Recalculating Airy Pattern for %.3f microns' % wavelength)\n\n # Plate scales [Px, Py] for each spaxel scale in mm / arcsec,\n # depending on the surface [IS: Image Slicer, DET: Detector]\n plate_scales = {'IS': {4.0: [125, 250], 60.0: [16.67, 16.67]},\n 'DET': {4.0: [3.75, 7.5], 60.0: [0.5, 0.5]}}\n plate_x = plate_scales[surface][scale_mas][0]\n plate_y = plate_scales[surface][scale_mas][1]\n\n # We know how many Microns the pixels of the Geometric PSF span [PSF_window / N_window]\n pix_sampling = PSF_window / N_window # micron at the detector plane\n # Using the plate scale we calculate how many m.a.s each of those pixels have to span\n pix_scale_x = pix_sampling / plate_x # milliarcsec / pixel\n pix_scale_y = pix_sampling / plate_y # milliarcsec / pixel\n\n # Calculate the relative size of the pupil aperture needed to ensure the PSF is\n # sampled with the given pix_scale at the focal plane\n ELT_DIAM = 39\n MILIARCSECS_IN_A_RAD = 206265000\n pix_rad_x = pix_scale_x / MILIARCSECS_IN_A_RAD # radians / pixel\n pix_rad_y = pix_scale_y / MILIARCSECS_IN_A_RAD\n RHO_APER_x = pix_rad_x * ELT_DIAM / (wavelength * 1e-6)\n RHO_APER_y = pix_rad_y * ELT_DIAM / (wavelength * 1e-6)\n RHO_OBSC_x = 0.30 * RHO_APER_x # ELT central obscuration\n RHO_OBSC_y = 0.30 * RHO_APER_y # ELT central obscuration\n\n # Sanity check\n PIX_RAD_x = RHO_APER_x * wavelength / ELT_DIAM * 1e-6\n PIX_RAD_y = RHO_APER_y * wavelength / ELT_DIAM * 1e-6\n PIX_MAS_x = PIX_RAD_x * MILIARCSECS_IN_A_RAD\n PIX_MAS_y = PIX_RAD_y * MILIARCSECS_IN_A_RAD\n\n # Define the ELT pupil mask. Note that we use a central obscuration too\n N = 2048\n x = np.linspace(-1, 1, N)\n xx, yy = np.meshgrid(x, x)\n\n # To get the anamorphic scaling we define the equation for an ellipse\n rho = np.sqrt((xx / RHO_APER_x) ** 2 + (yy / RHO_APER_y) ** 2)\n\n # (1) Propagate to the Image Slicer Focal plane\n elt_mask = (RHO_OBSC_x / RHO_APER_x < rho) & (rho < 1.0)\n pupil = elt_mask * np.exp(1j * elt_mask)\n image_electric = fftshift(fft2(pupil))\n\n if surface == 'IS':\n # print(\"IS\")\n # We are already at the Image Slicer, don't do anything else\n min_pix, max_pix = N // 2 - N_window // 2, N // 2 + N_window // 2\n final_psf = (np.abs(image_electric))**2\n final_psf /= np.max(final_psf)\n crop_psf = final_psf[min_pix:max_pix, min_pix:max_pix]\n\n elif surface == 'DET':\n # print(\"DET\")\n # (1.1) Add slicer effect by masking\n # We mask the PSF covering a band of size 1x SPAXEL, depending on the scale\n # If we have 4x4 mas, then we cover a band of 4 mas over the PSF\n x_min, x_max = -N/2 * PIX_MAS_x, N/2 * PIX_MAS_x\n y_min, y_max = -N/2 * PIX_MAS_y, N/2 * PIX_MAS_y\n x_slice = np.linspace(x_min, x_max, N, endpoint=True)\n y_slice = np.linspace(y_min, y_max, N, endpoint=True)\n x_grid, y_grid = np.meshgrid(x_slice, y_slice)\n slicer_mask = np.abs(y_grid) < scale_mas / 2\n\n # ## Show the PSF both in [mas] space where it should be circular and in [pixel] space where it should be anamorphic\n # fig, ax = plt.subplots(1, 1)\n # img1 = ax.imshow((np.abs(image_electric))**2, extent=[x_min, x_max, y_min, y_max], cmap='bwr')\n # # plt.colorbar(img1, ax=ax)\n # ax.set_title(r'Airy Pattern | %.1f mas scale | Wavelength: %.3f $\\mu$m' % (scale_mas, wavelength))\n # ax.set_xlabel(r'X [mas]')\n # ax.set_ylabel(r'Y [mas]')\n # ax.set_xlim([-10, 10])\n # ax.set_ylim([-10, 10])\n #\n # fig, ax = plt.subplots(1, 1)\n # img1 = ax.imshow((np.abs(image_electric))**2, extent=[-N/2, N/2, -N/2, N/2], cmap='bwr')\n # ax.set_title(r'Airy Pattern | %.1f mas scale | Wavelength: %.3f $\\mu$m' % (scale_mas, wavelength))\n # ax.set_xlabel(r'Pixels [ ]')\n # ax.set_ylabel(r'Pixels [ ]')\n # ax.set_xlim([-100, 100])\n # ax.set_ylim([-100, 100])\n\n # plt.show()\n\n # (2) Propagate the masked electric field to Pupil Plane\n pup_grating = ifft2(fftshift(slicer_mask * image_electric))\n # (2.1) Add pupil mask, this time without the central obscuration\n aperture_mask = rho < 1.0\n\n # (3) Propagate back to Focal Plane\n final_focal = fftshift(fft2(aperture_mask * pup_grating))\n final_psf = (np.abs(final_focal))**2\n final_psf /= np.max(final_psf)\n\n # (4) Crop the PSF to fit to the necessary window to ease the convolutions\n min_pix, max_pix = N//2 - N_window//2, N//2 + N_window//2\n crop_psf = final_psf[min_pix:max_pix, min_pix:max_pix]\n\n # If we want to show the plots for Documentation\n\n # fig, (ax1, ax2, ax3) = plt.subplots(1, 3)\n # psf_airy = (np.abs(image_electric))**2\n # img1 = ax1.imshow(psf_airy, extent=[x_min, x_max, y_min, y_max], cmap='bwr')\n # ax1.axhline(y=scale_mas/2, linestyle='--', color='black')\n # ax1.axhline(y=-scale_mas/2, linestyle='--', color='black')\n # ax1.set_xlabel(r'X [mas]')\n # ax1.set_ylabel(r'Y [mas]')\n # ax1.set_xlim([-15, 15])\n # ax1.set_ylim([-15, 15])\n # ax1.set_title(r'Airy Pattern | Slicer Mask %.1f mas' % scale_mas)\n #\n # img2 = ax2.imshow(aperture_mask * (np.abs(pup_grating)**2), extent=[-1, 1, -1, 1], cmap='bwr')\n # ax2.set_title(r'Pupil Plane | Aperture Mask')\n # ax2.set_xlim([-0.25, 0.25])\n # ax2.set_ylim([-0.25, 0.25])\n #\n # img3 = ax3.imshow(final_psf, extent=[x_min, x_max, y_min, y_max], cmap='bwr')\n # ax3.set_xlabel(r'X [mas]')\n # ax3.set_ylabel(r'Y [mas]')\n # ax3.set_xlim([-15, 15])\n # ax3.set_ylim([-15, 15])\n # ax3.set_title(r'Diffraction Effects')\n # plt.show()\n\n return crop_psf",
"def sphvol(r):\n return (4./3.)*np.pi*(r**3.)",
"def nsphere_volume(n, r):\n return math.pi ** (n / 2) * (r ** n) / gamma(n / 2 + 1)",
"def sphere_r_intensity(img):\n pixels = []\n for j in range(0, img.shape[0]):\n for i in range(1, 40):\n pixels.append(img[j, img.shape[1] - i])\n\n return np.mean(pixels)",
"def rms(a):\n\treturn np.sqrt(np.sum(np.power(a,2))/len(a))",
"def spherearea(dia):\n r = dia*1e-4 # convert to cm\n return(4*np.pi*r**2)",
"def get_scale(units, compartmentId, volume, extracellularVolume):\r\n if compartmentId == 'c':\r\n V = volume\r\n else:\r\n V = extracellularVolume\r\n\r\n if units == 'uM':\r\n return 1. / N_AVOGADRO / V * 1e6\r\n elif units == 'mM':\r\n return 1. / N_AVOGADRO / V * 1e3\r\n elif units == 'molecules':\r\n return 1.\r\n else:\r\n raise Exception('Invalid units \"%s\"' % units)",
"def surfaceIntSphere(r: float) -> float:\n return 4.0 * np.pi * r * r",
"def sphere_area(radius : number) -> number:\n area = 4*pi*radius*radius\n return area",
"def asphericity(Rnm_eg):\n num = (Rnm_eg[0] - Rnm_eg[2])**2 + (Rnm_eg[1] - Rnm_eg[2])**2 + (Rnm_eg[0] - Rnm_eg[1])**2\n dem = 2*(Rnm_eg[0] + Rnm_eg[1] + Rnm_eg[2])**2\n Asphere = num/dem\n return Asphere",
"def asphericity(Rnm_eg):\n num = (Rnm_eg[0] - Rnm_eg[2])**2 + (Rnm_eg[1] - Rnm_eg[2])**2 + (Rnm_eg[0] - Rnm_eg[1])**2\n dem = 2*(Rnm_eg[0] + Rnm_eg[1] + Rnm_eg[2])**2\n Asphere = num/dem\n return Asphere",
"def calc_hypersphere_volume(r: float, n: int) -> float:\n return (math.pi ** (n / 2) * r ** n) / gamma((n / 2) + 1)",
"def apply_spectral_radius(w,spectral_radius):\n assert len(w.shape)==2 and w.shape[0]==w.shape[1],\\\n \"Error: apply_spectral_radius must receive 'w' as a square matrix.\"\n\n new_w = np.array(w)\n spectral_radius_w = calc_spectral_radius(w)\n if spectral_radius_w > 0.0:\n new_w = (w / spectral_radius_w) * spectral_radius\n else:\n print(\"Warning: Spectral radius of 'w' is zero (because of small size). Therefore, spectral radius does not changed.\")\n\n return new_w",
"def beam_radius(self, x, Amp, beam_type='vortex', Amp_Flag=True):\r\n \r\n# dx = x[[0],[1]]-x[[0],[0]]\r\n# \r\n# Intensity = (Amp*Amp.conjugate()).real\r\n# N,N = Amp.shape\r\n# \r\n# if beam_type == 'vortex':\r\n# \r\n# \r\n# m,n = matrix_Lib.getPositon(Intensity)\r\n# \r\n# elif beam_type == 'gauss':\r\n# \r\n# m,n = matrix_Lib.getPositon(Intensity,value=np.max(Intensity)/np.e**2)\r\n# \r\n# # cartesian coordinate only;\r\n# radius = np.sqrt(((m-N/2)*dx)**2+((n-N/2)*dx)**2)\r\n# \r\n# return radius\r\n \r\n dx = x[[0],[1]]-x[[0],[0]]\r\n \r\n if Amp_Flag:\r\n Intensity = (Amp*Amp.conjugate()).real\r\n else:\r\n Intensity = Amp\r\n \r\n N,N = Amp.shape\r\n \r\n if beam_type == 'vortex':\r\n \r\n radius = 0\r\n Max = np.max(Intensity)\r\n \r\n NumofDots = 0\r\n \r\n for i in range(N):\r\n for j in range(N):\r\n if Intensity[i,j] > math.floor(Max*1e8)/1e8:\r\n radius += np.sqrt(((i-N/2)*dx)**2+((j-N/2)*dx)**2)\r\n NumofDots += 1\r\n \r\n radius = radius/NumofDots\r\n \r\n elif beam_type == 'gauss':\r\n \r\n m,n = self.getPositon(Intensity, value = np.max(Intensity)/np.e**2)\r\n # appropriate for cartesian coordinate only;\r\n radius = np.sqrt(((m-N/2)*dx)**2+((n-N/2)*dx)**2)\r\n \r\n return radius*2",
"def calculate_soma_surface(data: Data) -> float:\n\n soma = data.morphology.get_soma()\n return 4.0 * math.pi * soma['radius'] * soma['radius']",
"def resolution(self, radius, wave = None):\n dev = Prism.minDeviation(self,wave)\n alpha = dev/2 + self.angle/2\n\n # Form path difference between top and bottom of the beam\n d = 4*radius*math.sin(self.angle/2)/math.cos(alpha)\n dmax = 2.0*self.height*math.tan(self.angle/2) # Length of bottom of prism\n if d > dmax:\n d = dmax\n print(\"Resolution limited by size of prism\")\n\n\n dn = self.n.getDerivative(wave) # dn/d lambda\n return 1000*d*dn # scale to microms"
] | [
"0.6397559",
"0.59061825",
"0.58994806",
"0.5894734",
"0.58598316",
"0.58598316",
"0.5819259",
"0.5739598",
"0.57345325",
"0.56258273",
"0.5618448",
"0.5549182",
"0.55437136",
"0.5513248",
"0.55029523",
"0.5493522",
"0.54327166",
"0.54285365",
"0.5425453",
"0.54176575",
"0.5415356",
"0.5405274",
"0.5397435",
"0.5390997",
"0.5390997",
"0.5389578",
"0.53839207",
"0.5378676",
"0.53710204",
"0.5360433"
] | 0.6352933 | 1 |
Checks the spaxel scale at a certain wavelength, for a given aperture radius defined for a [1, 1] physical array | def check_spaxel_scale(rho_aper, wavelength):
SPAXEL_RAD = rho_aper * wavelength / ELT_DIAM * 1e-6
SPAXEL_MAS = SPAXEL_RAD * MILIARCSECS_IN_A_RAD
print('%.2f mas spaxels at %.2f microns' %(SPAXEL_MAS, wavelength)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def spaxel_scale(scale=4, wave=1.0):\n\n scale_rad = scale / MILIARCSECS_IN_A_RAD\n rho = scale_rad * ELT_DIAM / (wave * 1e-6)\n print(rho)",
"def rho_spaxel_scale(spaxel_scale=4.0, wavelength=1.0):\n\n scale_rad = spaxel_scale / MILIARCSECS_IN_A_RAD\n rho = scale_rad * ELT_DIAM / (wavelength * 1e-6)\n return rho",
"def guess_scaling(name, spectrum):\n spectra = '%s/disp/%s.1d.fits' % (name, zerocount(spectrum))\n skyname = '%s/sky.1d.fits' % name\n spectrafits = pyfits.open(spectra)\n skyfits = pyfits.open(skyname)\n scalings = []\n for line in LINES:\n spec_peak, spec_cont = get_peak_cont(spectrafits, line, 5)\n sky_peak, sky_cont = get_peak_cont(skyfits, line, 5)\n scale = ((spec_peak - spec_cont) / (sky_peak - sky_cont))\n scalings.append(scale)\n return avg(*scalings)",
"def powerlaw(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave",
"def analysis_function_rms_wfe(system, wavelength_idx, config, spaxels_per_slice, surface, pupil_sampling,\n remove_slicer=False):\n if config % 20 == 0:\n print(config)\n\n # Set Current Configuration\n system.MCE.SetCurrentConfiguration(config)\n\n # [WARNING]: for the 4x4 spaxel scale we noticed that a significant fraction of the rays get vignetted at the slicer\n # this introduces a bias in the RMS WFE calculation. To avoid this, we modify the Image Slicer aperture definition\n # so that all rays get through. Consequently, enough pupil rays are traced to get an unbiased estimation of RMS WFE\n if remove_slicer is True:\n\n expand_slicer_aperture(system)\n\n # [1] Some housekeeping and pre-processing operations\n # Get the Field Points for that configuration\n sysField = system.SystemData.Fields\n # Problem with the MC files. Before, all the E2E files had only 3 fields, now there's more, some spurious ones\n # So N_fields is no longer 3. Let's just hardcode the value to 3 temporarily\n # N_fields = sysField.NumberOfFields\n N_fields = 3\n N_waves = len(wavelength_idx)\n N_rays = N_waves * spaxels_per_slice\n\n # The only valid Field Points we should care about are 1-3 as defined by Matthias\n # The default Field Point definition of the E2E files is 1 & 3 are the edges of the slice, 2 is the centre\n fx_min, fy_min = sysField.GetField(1).X, sysField.GetField(1).Y\n fx_max, fy_max = sysField.GetField(3).X, sysField.GetField(3).Y\n\n # Note that this assumes Rectangular Normalization, the default in the E2E files.\n X_MAX = np.max([np.abs(sysField.GetField(i + 1).X) for i in range(N_fields)])\n Y_MAX = np.max([np.abs(sysField.GetField(i + 1).Y) for i in range(N_fields)])\n\n # Normalized field coordinates (hx, hy)\n hx_min, hx_max = fx_min / X_MAX, fx_max / X_MAX\n hy_min, hy_max = fy_min / Y_MAX, fy_max / Y_MAX\n\n # Sample between the edges of the slice as given by \"spaxels_per_slice\" to include as many points as we want\n hx = np.linspace(hx_min, hx_max, spaxels_per_slice)\n hy = np.linspace(hy_min, hy_max, spaxels_per_slice)\n\n # The useful data that we'll store\n obj_xy = np.array([X_MAX * hx, Y_MAX * hy]).T # The Field coordinates for the Object plane\n RMS_WFE = np.empty((N_waves, spaxels_per_slice)) # The RMS WFE results\n foc_xy = np.empty((N_waves, spaxels_per_slice, 2)) # The Chief Ray coordinates at the Detector\n\n # [2] This is where the core of the RMS WFE calculation takes place\n # First, we begin by defining the Raytrace\n raytrace = system.Tools.OpenBatchRayTrace()\n normUnPolData = raytrace.CreateNormUnpol(N_rays, constants.RaysType_Real, surface)\n\n # Start creating the Merit Function\n theMFE = system.MFE\n\n # Clear any operands that could be left from the E2E files\n nops = theMFE.NumberOfOperands\n theMFE.RemoveOperandsAt(1, nops)\n\n # Build the Merit Function\n # Set first operand to current configuration\n op = theMFE.GetOperandAt(1)\n op.ChangeType(constants.MeritOperandType_CONF)\n op.GetOperandCell(constants.MeritColumn_Param1).Value = config\n wfe_op = constants.MeritOperandType_RWRE # The Type of RMS WFE Operand: RWRE rectangular\n\n # Populate the Merit Function with RMS WFE Operands\n # Loop over the wavelengths\n for i_wave, wave_idx in enumerate(wavelength_idx):\n\n # Loop over all Spaxels in the Slice\n for j_field, (h_x, h_y) in enumerate(zip(hx, hy)):\n\n op = theMFE.AddOperand()\n op.ChangeType(wfe_op)\n op.GetOperandCell(constants.MeritColumn_Param1).Value = int(pupil_sampling)\n op.GetOperandCell(constants.MeritColumn_Param2).Value = int(wave_idx)\n op.GetOperandCell(constants.MeritColumn_Param3).Value = float(h_x)\n op.GetOperandCell(constants.MeritColumn_Param4).Value = float(h_y)\n op.GetOperandCell(constants.MeritColumn_Weight).Value = 0\n\n # Take advantage of the loop to simultaneously add the ray to the RayTrace\n normUnPolData.AddRay(wave_idx, h_x, h_y, 0, 0, constants.OPDMode_None)\n\n # time_1 = time() - start0\n # print(\"\\nTime spent setting up MF and Raytrace: %.3f sec\" % time_1)\n # start = time()\n\n # update the Merit Function\n theMFE.CalculateMeritFunction()\n # time_mf = time() - start\n # print(\"Time spent updating MF: %.3f sec\" % time_mf)\n\n # start = time()\n # Run the RayTrace for the whole Slice\n CastTo(raytrace, 'ISystemTool').RunAndWaitForCompletion()\n # time_ray = time() - start\n # print(\"Time spent running Raytrace: %.3f sec\" % time_ray)\n\n # start = time()\n # [3] Time to start reading the results of the RMS WFE Operands + Raytrace coordinates\n normUnPolData.StartReadingResults()\n # Loop over the wavelengths\n for i_wave, wave_idx in enumerate(wavelength_idx):\n # Loop over all Spaxels in the Slice\n for j_field, (h_x, h_y) in enumerate(zip(hx, hy)):\n\n # Calculate the Row index we need to get the Operand\n irow = 2 + i_wave * spaxels_per_slice + j_field\n # print(irow)\n\n op = theMFE.GetOperandAt(irow)\n\n # print(op.GetOperandCell(constants.MeritColumn_Param1).Value)\n # print(op.GetOperandCell(constants.MeritColumn_Param2).Value)\n # print(op.GetOperandCell(constants.MeritColumn_Param3).Value)\n # print(op.GetOperandCell(constants.MeritColumn_Param4).Value)\n rms = op.Value\n\n wavelength = system.SystemData.Wavelengths.GetWavelength(wave_idx).Wavelength\n\n RMS_WFE[i_wave, j_field] = wavelength * 1e3 * rms # We assume the Wavelength comes in Microns\n\n # If we get an RMS value of 0.0, print the data so we can double check the Zemax file\n # This is bad news and it mean the Rays are being vignetted somewhere\n if RMS_WFE[i_wave, j_field] == 0.0:\n print(\"\\nConfig #%d | Wave #%d | Field #%d\" % (config, wave_idx, j_field + 1))\n # raise ValueError\n\n output = normUnPolData.ReadNextResult()\n if output[2] == 0:\n x, y = output[4], output[5]\n foc_xy[i_wave, j_field, 0] = x\n foc_xy[i_wave, j_field, 1] = y\n\n vignetting_code = output[3]\n if vignetting_code != 0:\n vignetting_surface = system.LDE.GetSurfaceAt(vignetting_code).Comment\n # print(\"\\nConfig #%d\" % (config))\n # print(\"Vignetting at surface #%d: %s\" % (vignetting_code, vignetting_surface))\n # if config == 1:\n # raise ValueError\n\n normUnPolData.ClearData()\n CastTo(raytrace, 'ISystemTool').Close()\n # time_res = time() - start\n # print(\"Time spent reading results: %.3f sec\" % time_res)\n\n # time_total = time() - start0\n # print(\"TOTAL Time: %.3f sec\" % time_total)\n # sec_per_wave = time_total / N_waves * 1000\n # print(\"%3.f millisec per Wavelength\" % sec_per_wave)\n\n return [RMS_WFE, obj_xy, foc_xy]",
"def robust_scale(X, *, axis=..., with_centering=..., with_scaling=..., quantile_range=..., copy=..., unit_variance=...):\n ...",
"def apply_spectral_radius(w,spectral_radius):\n assert len(w.shape)==2 and w.shape[0]==w.shape[1],\\\n \"Error: apply_spectral_radius must receive 'w' as a square matrix.\"\n\n new_w = np.array(w)\n spectral_radius_w = calc_spectral_radius(w)\n if spectral_radius_w > 0.0:\n new_w = (w / spectral_radius_w) * spectral_radius\n else:\n print(\"Warning: Spectral radius of 'w' is zero (because of small size). Therefore, spectral radius does not changed.\")\n\n return new_w",
"def any_scale(scale):\n return scale",
"def param_scale_check(shape_x, shape_scale):\n\n length_x = len(shape_x)\n length_scale = len(shape_scale)\n\n if not(length_scale == 1 and shape_scale[0] == 1):\n if length_x != length_scale:\n raise RuntimeError(\n \"length_x and length_scale must be equal\")\n for i in range(length_scale):\n if shape_scale[i] != shape_x[i] and shape_scale[i] != 1:\n raise RuntimeError(\n \"shape_scale is not match to broadcast\")",
"def scale_mag_1(x):\n return np.array([np.true_divide(ui, mag(x)) for ui in x])",
"def norm_spectra(spectra, add_infinity=True):\n from scipy import interpolate\n start_n=np.array([3770.,3796.,3835.,3895.,3995.,4130.,4490.,4620.,5070.,5200.,\n 6000.,7000.,7550.,8400.])\n end_n=np.array([3795.,3830.,3885.,3960.,4075.,4290.,4570.,4670.,5100.,5300.,\n 6100.,7050.,7600.,8450.])\n n_range_s=np.array(['P','P','P','P','P','P','M','M','M','M','M','M','M','M'])\n if len(spectra[0])>2:\n snr = np.zeros([len(start_n),3])\n spectra[:,2][spectra[:,2]==0.] = spectra[:,2].max()\n else: \n snr = np.zeros([len(start_n),2])\n wav = spectra[:,0]\n for j in range(len(start_n)):\n if (start_n[j] < wav.max()) & (end_n[j] > wav.min()):\n _s = spectra[(wav>=start_n[j])&(wav<=end_n[j])]\n _w = _s[:,0]\n #Avoids gappy spectra\n k=3 # Check if there are more points than 3\n if len(_s)>k:\n #interpolate onto 10* resolution\n l = np.linspace(_w.min(),_w.max(),(len(_s)-1)*10+1)\n if len(spectra[0])>2:\n tck = interpolate.splrep(_w,_s[:,1],w=1/_s[:,2], s=1000)\n #median errors for max/mid point\n snr[j,2] = np.median(_s[:,2]) / np.sqrt(len(_w))\n else: tck = interpolate.splrep(_w,_s[:,1],s=0.0)\n f = interpolate.splev(l,tck)\n #find maxima and save\n if n_range_s[j]=='P': snr[j,0], snr[j,1] = l[f==f.max()][0], f.max()\n #find mean and save\n elif n_range_s[j]=='M': snr[j,0:2] = np.mean(l), np.mean(f)\n else: print('Unknown n_range_s, ignoring')\n snr = snr[ snr[:,0] != 0 ]\n #t parameter chosen by eye. Position of knots.\n if snr[:,0].max() < 6460: knots = [3000,4900,4100,4340,4860,int(snr[:,0].max()-5)]\n else: knots = [3885,4340,4900,6460]\n if snr[:,0].min() > 3885:\n print('Warning: knots used for spline norm unsuitable for high order fitting')\n knots=knots[1:]\n if (snr[:,0].min() > 4340) or (snr[:,0].max() < 4901): \n knots=None # 'Warning: knots used probably bad'\n if add_infinity: # Adds points at inf & 0 for spline to fit to err = mean(spec err)\n if snr.shape[1] > 2:\n mean_snr = np.mean(snr[:,2])\n snr = np.vstack([ snr, np.array([90000. ,0., mean_snr ]) ])\n snr = np.vstack([ snr, np.array([100000.,0., mean_snr ]) ])\n else:\n snr = np.vstack([ snr, np.array([90000.,0.]) ])\n snr = np.vstack([ snr, np.array([100000.,0.]) ])\n try: #weight by errors\n if len(spectra[0])>2: \n tck = interpolate.splrep(snr[:,0],snr[:,1], w=1/snr[:,2], t=knots, k=3)\n else: tck = interpolate.splrep(snr[:,0],snr[:,1], t=knots, k=3)\n except ValueError:\n knots=None\n if len(spectra[0])>2: \n tck = interpolate.splrep(snr[:,0],snr[:,1], w=1/snr[:,2], t=knots, k=3)\n else: tck = interpolate.splrep(snr[:,0],snr[:,1], t=knots, k=3)\n cont_flux = interpolate.splev(wav,tck).reshape(wav.size, 1)\n spectra_ret = np.copy(spectra)\n spectra_ret[:,1:] = spectra_ret[:,1:]/cont_flux\n return spectra_ret, cont_flux",
"def scale_sky_spectrum(wlm, sky_spectrum, spectra, cut_sky=4., fmax=10, fmin=1, valid_wave_min=0, valid_wave_max=0, \n fibre_list=[100,200,300,400,500,600,700,800,900], plot=True, verbose=True, warnings=True): \n \n# # Read sky lines provided by 2dFdr\n# sky_line_,flux_sky_line_ = read_table(\"sky_lines_2dfdr.dat\", [\"f\", \"f\"] )\n# # Choose those lines in the range\n# sky_line=[]\n# flux_sky_line=[]\n# valid_wave_min = 6240\n# valid_wave_max = 7355\n# for i in range(len(sky_line_)):\n# if valid_wave_min < sky_line_[i] < valid_wave_max:\n# sky_line.append(sky_line_[i])\n# flux_sky_line.append(flux_sky_line_[i])\n \n \n if valid_wave_min == 0: valid_wave_min = wlm[0]\n if valid_wave_max == 0: valid_wave_max = wlm[-1]\n \n if verbose: print(\"\\n> Identifying sky lines using cut_sky =\",cut_sky,\", allowed SKY/OBJ values = [\",fmin,\",\",fmax,\"]\")\n if verbose: print(\" Using fibres = \",fibre_list)\n\n peaks,peaks_name,peaks_rest,continuum_limits=search_peaks(wlm,sky_spectrum, plot=plot, cut=cut_sky, fmax=fmax, only_id_lines=False, verbose=False) \n\n ratio_list=[]\n valid_peaks=[]\n \n if verbose: print(\"\\n Sky line Gaussian ratio Flux ratio\")\n n_sky_lines_found=0\n for i in range(len(peaks)):\n sky_spectrum_data=fluxes(wlm,sky_spectrum, peaks[i], fcal=False, lowlow=50,highhigh=50, plot=False, verbose=False, warnings=False)\n \n sky_median_continuum = np.nanmedian(sky_spectrum_data[11])\n \n object_spectrum_data_gauss=[]\n object_spectrum_data_integrated=[] \n median_list=[]\n for fibre in fibre_list: \n object_spectrum_flux=fluxes(wlm, spectra[fibre], peaks[i], fcal=False, lowlow=50,highhigh=50, plot=False, verbose=False, warnings=False)\n object_spectrum_data_gauss.append(object_spectrum_flux[3]) # Gaussian flux is 3\n object_spectrum_data_integrated.append(object_spectrum_flux[7]) # integrated flux is 7\n median_list.append(np.nanmedian(object_spectrum_flux[11]))\n object_spectrum_data=np.nanmedian(object_spectrum_data_gauss)\n object_spectrum_data_i=np.nanmedian(object_spectrum_data_integrated)\n \n object_median_continuum=np.nanmin(median_list) \n \n if fmin < object_spectrum_data/sky_spectrum_data[3] * sky_median_continuum/object_median_continuum < fmax :\n n_sky_lines_found = n_sky_lines_found + 1\n valid_peaks.append(peaks[i])\n ratio_list.append(object_spectrum_data/sky_spectrum_data[3])\n if verbose: print(\"{:3.0f} {:5.3f} {:2.3f} {:2.3f}\".format(n_sky_lines_found,peaks[i],object_spectrum_data/sky_spectrum_data[3], object_spectrum_data_i/sky_spectrum_data[7])) \n\n\n #print \"ratio_list =\", ratio_list\n #fit = np.polyfit(valid_peaks, ratio_list, 0) # This is the same that doing an average/mean\n #fit_line = fit[0]+0*wlm\n fit_line =np.nanmedian(ratio_list) # We just do a median\n #fit_line = fit[1]+fit[0]*wlm\n #fit_line = fit[2]+fit[1]*wlm+fit[0]*wlm**2\n #fit_line = fit[3]+fit[2]*wlm+fit[1]*wlm**2+fit[0]*wlm**3\n \n \n if plot:\n plt.plot(valid_peaks,ratio_list,\"+\")\n #plt.plot(wlm,fit_line)\n plt.axhline(y=fit_line, color='k', linestyle='--')\n plt.xlim(valid_wave_min-10, valid_wave_max+10) \n #if len(ratio_list) > 0:\n plt.ylim(np.nanmin(ratio_list)-0.2,np.nanmax(ratio_list)+0.2)\n plt.title(\"Scaling sky spectrum to object spectra\")\n plt.xlabel(\"Wavelength [$\\mathrm{\\AA}$]\")\n plt.ylabel(\"OBJECT / SKY\")\n plt.minorticks_on()\n plt.show()\n plt.close()\n \n if verbose: print(\" Using this fit to scale sky spectrum to object, the median value is \",np.round(fit_line,3),\"...\") \n \n sky_corrected = sky_spectrum * fit_line\n\n# plt.plot(wlm,sky_spectrum, \"r\", alpha=0.3)\n# plt.plot(wlm,sky_corrected, \"g\", alpha=0.3)\n# plt.show()\n# plt.close()\n \n return sky_corrected, np.round(fit_line,3)",
"def test_scale_value(make_rampmodel):\n\n datmod = make_rampmodel(2, 2, 4, 2048, 2048)\n\n # Calculate the scale based off of the input.\n scale = datmod.meta.exposure.frame_divisor / datmod.meta.exposure.nframes\n\n output = GroupScaleStep.call(datmod)\n\n scale_from_data = np.unique(output.data / datmod.data)\n\n # Since the scale value is applied uniformly to the array, if we divide the output\n # by the input then we should get a single unique value (ie the scale) calculated\n # by the pipeline.\n assert len(scale_from_data) == 1\n\n # Make sure the scale calculated manually from the data model above matched what the\n # pipeline calculated.\n assert scale == scale_from_data[0]",
"def Fitzpactrick09(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave",
"def test_constructed_is_small(self):\n self.assertTrue(all(elt<10 for elt in goodwinsheaf.checkradii()))#check all entries have small radii",
"def Schlafly16(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave",
"def quality(\n wavelength: Union[Quantity, ndarray],\n flux: Union[Quantity, ndarray],\n mask: Optional[ndarray] = None,\n **kwargs,\n) -> float:\n flux = flux * u.dimensionless_unscaled # Turn into Quantity if not already\n flux = flux / flux.unit # Remove units from flux (sqrt(N_e) is unitless)\n\n wis = sqrt_sum_wis(wavelength, flux, mask=mask, **kwargs)\n q = wis / np.sqrt(np.nansum(flux))\n return q.value",
"def Fritz11(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave",
"def _scale_param(self, resid_us):\n return((resid_us**2).sum().sum() / self.dof)",
"def fluxes(wavelength, s, line, lowlow= 14, lowhigh=6, highlow=6, highhigh = 14, lmin=0, lmax=0, fmin=0, fmax=0, \n broad=2.355, plot=True, verbose=True, plot_sus = False, fcal = True, fit_continuum = True, median_kernel=35, warnings = True ): # Broad is FWHM for Gaussian sigma= 1,\n # s must be an array, no a list\n try: \n index_maximo_del_rango = s.tolist().index(np.nanmax(s))\n #print \" is AN ARRAY\"\n except Exception:\n #print \" s is A LIST -> must be converted into an ARRAY\" \n s = np.array(s)\n \n # Setup wavelength limits\n if lmin == 0 :\n lmin = line-65. # By default, +-65 A with respect to line\n if lmax == 0 :\n lmax = line+65.\n \n # Extract subrange to fit\n w_spec = []\n f_spec = []\n w_spec.extend((wavelength[i]) for i in range(len(wavelength)) if (wavelength[i] > lmin and wavelength[i] < lmax) ) \n f_spec.extend((s[i]) for i in range(len(wavelength)) if (wavelength[i] > lmin and wavelength[i] < lmax) ) \n \n if np.isnan(np.nanmedian(f_spec)): \n # The data are NAN!! Nothing to do\n if verbose or warnings: print(\" There is no valid data in the wavelength range [{},{}] !!\".format(lmin,lmax))\n \n resultado = [0, line, 0, 0, 0, 0, 0, 0, 0, 0, 0, s ] \n\n return resultado\n \n else: \n \n ## 20 Sep 2020\n f_spec_m=signal.medfilt(f_spec,median_kernel) # median_kernel = 35 default\n \n \n # Remove nans\n median_value = np.nanmedian(f_spec)\n f_spec = [median_value if np.isnan(x) else x for x in f_spec] \n \n \n # Setup min and max flux values in subrange to fit\n if fmin == 0 :\n fmin = np.nanmin(f_spec) \n if fmax == 0 :\n fmax = np.nanmax(f_spec) \n \n # We have to find some \"guess numbers\" for the Gaussian. Now guess_centre is line\n guess_centre = line\n \n # Define continuum regions: [-lowlow, -lowhigh] and [highlow,highhigh] in Angstroms with respect to guess_centre\n \n w_cont=[]\n f_cont=[]\n w_cont.extend((w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre-lowlow and w_spec[i] < guess_centre-lowhigh) or (w_spec[i] > guess_centre+highlow and w_spec[i] < guess_centre+highhigh) ) \n f_cont.extend((f_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre-lowlow and w_spec[i] < guess_centre-lowhigh) or (w_spec[i] > guess_centre+highlow and w_spec[i] < guess_centre+highhigh) ) \n \n if fit_continuum:\n # Linear Fit to continuum \n f_cont_filtered=sig.medfilt(f_cont,np.int(median_kernel))\n #print line #f_cont\n # if line == 8465.0:\n # print w_cont\n # print f_cont_filtered\n # plt.plot(w_cont,f_cont_filtered)\n # plt.show()\n # plt.close()\n # warnings=True\n try: \n mm,bb = np.polyfit(w_cont, f_cont_filtered, 1)\n except Exception:\n bb = np.nanmedian(f_cont_filtered)\n mm = 0.\n if warnings: \n print(\" WARNING: Impossible to get the continuum!\")\n print(\" Scaling the continuum to the median value b = \",bb,\": cont = 0 * w_spec + \", bb)\n continuum = mm*np.array(w_spec)+bb \n c_cont = mm*np.array(w_cont)+bb \n \n else: \n # Median value in each continuum range # NEW 15 Sep 2019\n w_cont_low = []\n f_cont_low = []\n w_cont_low.extend((w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre-lowlow and w_spec[i] < guess_centre-lowhigh) ) \n f_cont_low.extend((f_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre-lowlow and w_spec[i] < guess_centre-lowhigh) ) \n median_w_cont_low = np.nanmedian(w_cont_low)\n median_f_cont_low = np.nanmedian(f_cont_low)\n w_cont_high = []\n f_cont_high = []\n w_cont_high.extend((w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre+highlow and w_spec[i] < guess_centre+highhigh) ) \n f_cont_high.extend((f_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre+highlow and w_spec[i] < guess_centre+highhigh) ) \n median_w_cont_high = np.nanmedian(w_cont_high)\n median_f_cont_high = np.nanmedian(f_cont_high) \n \n b = (median_f_cont_low-median_f_cont_high)/(median_w_cont_low-median_w_cont_high)\n a = median_f_cont_low- b * median_w_cont_low\n \n continuum = a + b*np.array(w_spec)\n c_cont = a + b*np.array(w_cont) \n \n \n # rms continuum\n rms_cont = np.nansum([ np.abs(f_cont[i] - c_cont[i]) for i in range(len(w_cont)) ]) / len(c_cont)\n \n # Search for index here w_spec(index) closest to line\n min_w = np.abs(np.array(w_spec)-line)\n mini = np.nanmin(min_w)\n # guess_peak = f_spec[min_w.tolist().index(mini)] # WE HAVE TO SUSTRACT CONTINUUM!!!\n guess_peak = f_spec[min_w.tolist().index(mini)] - continuum[min_w.tolist().index(mini)]\n \n # LOW limit\n low_limit=0\n w_fit = []\n f_fit = []\n w_fit.extend((w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre-15 and w_spec[i] < guess_centre)) \n f_fit.extend((f_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre-15 and w_spec[i] < guess_centre)) \n if fit_continuum: \n c_fit=mm*np.array(w_fit)+bb \n else: \n c_fit=b*np.array(w_fit)+a \n \n fs=[]\n ws=[]\n for ii in range(len(w_fit)-1,1,-1):\n if f_fit[ii]/c_fit[ii] < 1.05 and f_fit[ii-1]/c_fit[ii-1] < 1.05 and low_limit == 0: low_limit = w_fit[ii]\n # if f_fit[ii]/c_fit[ii] < 1.05 and low_limit == 0: low_limit = w_fit[ii]\n fs.append(f_fit[ii]/c_fit[ii])\n ws.append(w_fit[ii])\n if low_limit == 0: \n sorted_by_flux=np.argsort(fs)\n try:\n low_limit = ws[sorted_by_flux[0]]\n except Exception:\n plot=True\n low_limit = 0\n \n # HIGH LIMIT \n high_limit=0\n w_fit = []\n f_fit = []\n w_fit.extend((w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre and w_spec[i] < guess_centre+15)) \n f_fit.extend((f_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > guess_centre and w_spec[i] < guess_centre+15)) \n if fit_continuum: \n c_fit=mm*np.array(w_fit)+bb \n else: \n c_fit=b*np.array(w_fit)+a\n \n fs=[]\n ws=[]\n for ii in range(len(w_fit)-1):\n if f_fit[ii]/c_fit[ii] < 1.05 and f_fit[ii+1]/c_fit[ii+1] < 1.05 and high_limit == 0: high_limit = w_fit[ii]\n # if f_fit[ii]/c_fit[ii] < 1.05 and high_limit == 0: high_limit = w_fit[ii]\n fs.append(f_fit[ii]/c_fit[ii])\n ws.append(w_fit[ii])\n if high_limit == 0: \n sorted_by_flux=np.argsort(fs)\n try:\n high_limit = ws[sorted_by_flux[0]] \n except Exception:\n plot=True\n high_limit = 0 \n \n # Guess centre will be the highest value in the range defined by [low_limit,high_limit]\n \n try: \n rango = np.where((high_limit >= wavelength ) & (low_limit <= wavelength)) \n index_maximo_del_rango = s.tolist().index(np.nanmax(s[rango]))\n guess_centre = wavelength[index_maximo_del_rango]\n except Exception:\n guess_centre = line #### It was 0 before\n \n \n # Fit a Gaussian to data - continuum \n p0 = [guess_centre, guess_peak, broad/2.355] # broad is the Gaussian sigma, 1.0 for emission lines\n try:\n fit, pcov = curve_fit(gauss, w_spec, f_spec-continuum, p0=p0, maxfev=10000) # If this fails, increase maxfev...\n fit_error = np.sqrt(np.diag(pcov))\n \n # New 28th Feb 2019: Check central value between low_limit and high_limit\n # Better: between guess_centre - broad, guess_centre + broad\n # If not, redo fit fixing central value to the peak (it does not work... just fix FWHM= (high_limit-low_limit)/2.5 )\n \n if verbose != False: print(\" ----------------------------------------------------------------------------------------\")\n # if low_limit < fit[0] < high_limit:\n if fit[0] < guess_centre - broad or fit[0] > guess_centre + broad:\n # if verbose: print \" Fitted center wavelength\", fit[0],\"is NOT in the range [\",low_limit,\",\",high_limit,\"]\"\n if verbose: print(\" Fitted center wavelength\", fit[0],\"is NOT in the expected range [\",guess_centre - broad,\",\",guess_centre + broad,\"]\")\n \n # print \"Re-do fitting fixing center wavelength\"\n # p01 = [guess_peak, broad]\n # fit1, pcov1 = curve_fit(gauss_fix_x0, w_spec, f_spec-continuum, p0=p01, maxfev=100000) # If this fails, increase maxfev...\n # fit_error1 = np.sqrt(np.diag(pcov1))\n # fit[0]=guess_centre\n # fit_error[0] = 0.\n # fit[1] = fit1[0]\n # fit_error[1] = fit_error1[0]\n # fit[2] = fit1[1]\n # fit_error[2] = fit_error1[1] \n \n fit[0]=guess_centre\n fit_error[0] = 0.000001\n fit[1]=guess_peak\n fit_error[1] = 0.000001\n fit[2] = broad/2.355\n fit_error[2] = 0.000001 \n else:\n if verbose: print(\" Fitted center wavelength\", fit[0],\"IS in the expected range [\",guess_centre - broad,\",\",guess_centre + broad,\"]\")\n \n \n if verbose: print(\" Fit parameters = \", fit[0], fit[1], fit[2])\n if fit[2] == broad and warnings == True : \n print(\" WARNING: Fit in\",fit[0],\"failed! Using given centre wavelength (cw), peak at (cv) & sigma = broad/2.355 given.\") \n gaussian_fit = gauss(w_spec, fit[0], fit[1], fit[2])\n \n \n # Estimate rms of the Gaussian fit in range [low_limit, high_limit]\n residuals = f_spec-gaussian_fit-continuum\n rms_fit = np.nansum([ ((residuals[i]**2)/(len(residuals)-2))**0.5 for i in range(len(w_spec)) if (w_spec[i] >= low_limit and w_spec[i] <= high_limit) ]) \n \n # Fluxes, FWHM and Eq. Width calculations\n gaussian_flux = gauss_flux(fit[1],fit[2])\n error1 = np.abs(gauss_flux(fit[1]+fit_error[1],fit[2]) - gaussian_flux)\n error2 = np.abs(gauss_flux(fit[1],fit[2]+fit_error[2]) - gaussian_flux)\n gaussian_flux_error = 1 / ( 1/error1**2 + 1/error2**2 )**0.5\n \n \n fwhm=fit[2]*2.355\n fwhm_error = fit_error[2] *2.355\n fwhm_vel = fwhm / fit[0] * C \n fwhm_vel_error = fwhm_error / fit[0] * C \n \n gaussian_ew = gaussian_flux/np.nanmedian(f_cont)\n gaussian_ew_error = gaussian_ew * gaussian_flux_error/gaussian_flux \n \n # Integrated flux\n # IRAF: flux = sum ((I(i)-C(i)) * (w(i2) - w(i1)) / (i2 - i2) \n flux = np.nansum([ (f_spec[i]-continuum[i])*(w_spec[i+1]-w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] >= low_limit and w_spec[i] <= high_limit) ]) \n flux_error = rms_cont * (high_limit - low_limit)\n wave_resolution = (wavelength[-1]-wavelength[0])/len(wavelength)\n ew = wave_resolution * np.nansum ([ (1 - f_spec[i]/continuum[i]) for i in range(len(w_spec)) if (w_spec[i] >= low_limit and w_spec[i] <= high_limit) ]) \n ew_error = np.abs(ew*flux_error/flux) \n gauss_to_integrated = gaussian_flux/flux * 100.\n \n index=0\n s_s=np.zeros_like(s)\n for wave in range(len(wavelength)):\n s_s[wave]=s[wave]\n if wavelength[wave] == w_spec[0] : \n s_s[wave] = f_spec[0]-gaussian_fit[0]\n index=1\n if wavelength[wave] > w_spec[0] and wavelength[wave] <= w_spec[-1]:\n s_s[wave] = f_spec[index]-gaussian_fit[index]\n index=index+1\n \n # Plotting \n ptitle = 'Fit: x0=%.2f y0=%.2e sigma=%.2f flux=%.2e rms=%.3e' % (fit[0], fit[1], fit[2], gaussian_flux, rms_fit)\n if plot :\n plt.figure(figsize=(10, 4))\n # Plot input spectrum\n plt.plot(np.array(w_spec),np.array(f_spec), \"b\", lw=3, alpha = 0.8)\n # Plot median input spectrum\n plt.plot(np.array(w_spec),np.array(f_spec_m), \"orange\", lw=3, alpha = 0.5) # 2021: era \"g\"\n # Plot spectrum - gauss subtracted\n plt.plot(wavelength,s_s,\"g\",lw=3, alpha = 0.6)\n \n plt.minorticks_on() \n plt.xlabel(\"Wavelength [$\\mathrm{\\AA}$ ]\")\n if fcal:\n plt.ylabel(\"Flux [ erg cm$^{-2}$ s$^{-1}$ $\\mathrm{\\AA}^{-1}$ ]\")\n else:\n plt.ylabel(\"Flux [ counts ]\")\n plt.xlim(lmin,lmax)\n plt.ylim(fmin,fmax)\n \n # Vertical line at guess_centre\n plt.axvline(x=guess_centre, color='r', linestyle='-', alpha=0.3)\n # Horizontal line at y = 0\n plt.axhline(y=0, color='k', linestyle=':', alpha=0.5) \n # Dashed green regions for continuum, defined by [lowlow, lowhigh] and [highlow,highhigh]\n plt.axvspan(guess_centre+highlow, guess_centre+highhigh, facecolor='g', alpha=0.15,zorder=3)\n plt.axvspan(guess_centre-lowlow, guess_centre-lowhigh, facecolor='g', alpha=0.15,zorder=3)\n # Plot linear fit for continuum\n plt.plot(w_spec, continuum,\"g--\")\n # Plot Gaussian fit \n plt.plot(w_spec, gaussian_fit+continuum, 'r-', alpha=0.8) \n # Vertical line at Gaussian center\n plt.axvline(x=fit[0], color='k', linestyle='-', alpha=0.5)\n # Vertical lines to emission line\n plt.axvline(x= low_limit, color='k', linestyle=':', alpha=0.5)\n plt.axvline(x= high_limit, color='k', linestyle=':', alpha=0.5) \n # Plot residuals\n plt.plot(w_spec, residuals, 'k')\n plt.title(ptitle)\n plt.show()\n \n # Printing results\n if verbose :\n print(\"\\n - Gauss and continuum fitting + integrated flux calculations:\\n\")\n print(\" rms continuum = %.3e erg/cm/s/A \" % (rms_cont)) \n print(\" Gaussian Fit parameters: x0 = ( %.2f +- %.2f ) A \" % (fit[0], fit_error[0]))\n print(\" y0 = ( %.3f +- %.3f ) 1E-16 erg/cm2/s/A\" % (fit[1]/1E-16, fit_error[1]/1E-16 ))\n print(\" sigma = ( %.3f +- %.3f ) A\" % (fit[2], fit_error[2])) \n print(\" rms fit = %.3e erg/cm2/s/A\" % (rms_fit))\n print(\" Gaussian Flux = ( %.2f +- %.2f ) 1E-16 erg/s/cm2 (error = %.1f per cent)\" % (gaussian_flux/1E-16, gaussian_flux_error/1E-16, gaussian_flux_error/gaussian_flux*100))\n print(\" FWHM = ( %.3f +- %.3f ) A = ( %.1f +- %.1f ) km/s \" % (fwhm, fwhm_error, fwhm_vel, fwhm_vel_error))\n print(\" Eq. Width = ( %.1f +- %.1f ) A\" % (-gaussian_ew, gaussian_ew_error)) \n print(\"\\n Integrated flux = ( %.2f +- %.2f ) 1E-16 erg/s/cm2 (error = %.1f per cent) \" % ( flux/1E-16, flux_error/1E-16, flux_error/flux *100)) \n print(\" Eq. Width = ( %.1f +- %.1f ) A\" % (ew, ew_error))\n print(\" Gauss/Integrated = %.2f per cent \" % gauss_to_integrated)\n \n \n # Plot independent figure with substraction if requested \n if plot_sus: plot_plot(wavelength,[s,s_s], xmin=lmin, xmax=lmax, ymin=fmin, ymax=fmax, fcal=fcal, frameon=True, ptitle=ptitle)\n \n # 0 1 2 3 4 5 6 7 8 9 10 11\n resultado = [rms_cont, fit[0], fit_error[0], gaussian_flux, gaussian_flux_error, fwhm, fwhm_error, flux, flux_error, ew, ew_error, s_s ]\n return resultado \n except Exception:\n if verbose: \n print(\" - Gaussian fit failed!\")\n print(\" However, we can compute the integrated flux and the equivalent width:\")\n \n flux = np.nansum([ (f_spec[i]-continuum[i])*(w_spec[i+1]-w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] >= low_limit and w_spec[i] <= high_limit) ]) \n flux_error = rms_cont * (high_limit - low_limit)\n wave_resolution = (wavelength[-1]-wavelength[0])/len(wavelength)\n ew = wave_resolution * np.nansum ([ (1 - f_spec[i]/continuum[i]) for i in range(len(w_spec)) if (w_spec[i] >= low_limit and w_spec[i] <= high_limit) ]) \n ew_error = np.abs(ew*flux_error/flux) \n \n if verbose:\n print(\" Integrated flux = ( %.2f +- %.2f ) 1E-16 erg/s/cm2 (error = %.1f per cent) \" % ( flux/1E-16, flux_error/1E-16, flux_error/flux *100)) \n print(\" Eq. Width = ( %.1f +- %.1f ) A\" % (ew, ew_error))\n \n resultado = [0, guess_centre, 0, 0, 0, 0, 0, flux, flux_error, ew, ew_error, s ] # guess_centre was identified at maximum value in the [low_limit,high_limit] range but Gaussian fit failed\n \n \n # Plotting \n if plot :\n plt.figure(figsize=(10, 4))\n plt.plot(np.array(w_spec),np.array(f_spec), \"b\", lw=3, alpha = 0.5)\n plt.minorticks_on() \n plt.xlabel(\"Wavelength [$\\mathrm{\\AA}$]\")\n if fcal:\n plt.ylabel(\"Flux [ erg cm$^{-2}$ s$^{-1}$ $\\mathrm{\\AA}^{-1}$ ]\")\n else:\n plt.ylabel(\"Flux [ counts ]\") \n plt.xlim(lmin,lmax)\n plt.ylim(fmin,fmax)\n \n # Vertical line at guess_centre\n plt.axvline(x=guess_centre, color='r', linestyle='-', alpha=0.5)\n # Horizontal line at y = 0\n plt.axhline(y=0, color='k', linestyle=':', alpha=0.5) \n # Dashed green regions for continuum, defined by [lowlow, lowhigh] and [highlow,highhigh]\n plt.axvspan(guess_centre+highlow, guess_centre+highhigh, facecolor='g', alpha=0.15,zorder=3)\n plt.axvspan(guess_centre-lowlow, guess_centre-lowhigh, facecolor='g', alpha=0.15,zorder=3)\n # Plot linear fit for continuum\n plt.plot(w_spec, continuum,\"g--\")\n # Plot Gaussian fit \n # plt.plot(w_spec, gaussian_fit+continuum, 'r-', alpha=0.8) \n # Vertical line at Gaussian center\n # plt.axvline(x=fit[0], color='k', linestyle='-', alpha=0.5)\n # Vertical lines to emission line\n plt.axvline(x= low_limit, color='k', linestyle=':', alpha=0.5)\n plt.axvline(x= high_limit, color='k', linestyle=':', alpha=0.5) \n # Plot residuals\n # plt.plot(w_spec, residuals, 'k')\n plt.title(\"No Gaussian fit obtained...\")\n plt.show()\n \n \n return resultado",
"def get_surfaceflux_from_wavelength_and_laser_power(wavelength, rover_specs, laser_powers, receiver_areas,\n power_reqs, pointing_error=[1e-7, 1e-7]):\n assert len(power_reqs) == len(receiver_areas)\n assert len(power_reqs) == len(rover_specs)\n\n # Set the parameter space\n trans_radius = np.logspace(-3, 1, 1000)\n altitudes = np.logspace(4, 7, 1001)\n R, Z = np.meshgrid(trans_radius, altitudes, indexing=\"ij\")\n\n fig, ax = plt.subplots(len(power_reqs), len(laser_powers), sharey=True, sharex=True, figsize=(12, 7))\n for i, laser_power in enumerate(laser_powers):\n for j in range(len(power_reqs)):\n rover_spec = rover_specs[j]\n receiver_area = receiver_areas[j]\n power_req = power_reqs[j]\n\n # Get the beam radius\n beam_radius = R * np.sqrt(1.0 + (Z * wavelength / (np.pi * R ** 2)) ** 2)\n receiver_radius = np.sqrt(receiver_area / np.pi)\n radius_constraint_one = pointing_error[j] * Z + receiver_radius\n radius_constraint_two = pointing_error[j] * Z + beam_radius\n mask_one = beam_radius < radius_constraint_one\n mask_two = receiver_radius > radius_constraint_two\n final_mask = np.logical_and(mask_one, np.logical_not(mask_two))\n beam_radius[final_mask] = np.nan\n\n # Calculate the resulting surface flux\n receiver_power = laser_power/ (np.pi * beam_radius ** 2) * receiver_area\n receiver_power[np.pi * beam_radius ** 2 < receiver_area] = laser_power\n receiver_power[receiver_power < power_req] = np.nan\n\n # Normalise result by input power to get total efficiency\n receiver_power /= laser_power\n receiver_power[receiver_power < 0.001] = np.nan\n\n log_power = np.log10(receiver_power * 100)\n ax[j, i].contourf(np.log10(R), Z / 1e3, log_power, 100)\n m = cm.ScalarMappable()\n m.set_array(log_power)\n m.set_clim(-1.0, 2.0)\n fig.colorbar(m, ax=ax[j, i])\n ax[j, 0].set_ylabel('{} \\n Transmission distance [km]'.format(rover_spec))\n ax[0, i].set_title('Laser Power: {}kW'.format(laser_power / 1e3))\n ax[1, i].set_xlabel('Logarithm of Transmitter Radius [m]')\n plt.tight_layout()\n plt.show()\n\n return beam_radius, receiver_power",
"def scaleLandsat(self,img):\n\t\tthermal = img.select(ee.List(['thermal'])).multiply(0.1)\n\t\tscaled = ee.Image(img).select(self.env.divideBands).multiply(ee.Number(0.0001))\n\t\t\n\t\treturn img.select([]).addBands(scaled).addBands(thermal)",
"def scale_volume(self, random_cell):\n\n # compute the volume to scale to\n composition = random_cell.composition\n total_volume = 0\n for specie in composition:\n total_volume += composition[specie]*self.vpas[specie.symbol]\n\n # scale the volume\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n random_cell.scale_lattice(total_volume)\n if str(random_cell.lattice.a) == 'nan' or \\\n random_cell.lattice.a > 100:\n return False\n else:\n return True",
"def ensquared_one_pix(array, pix_scale, new_scale=40, plot=True):\n\n n = int(new_scale // pix_scale)\n minPix, maxPix = (pix + 1 - n) // 2, (pix + 1 + n) // 2\n ens = array[minPix:maxPix, minPix:maxPix]\n # print(ens.shape)\n energy = np.sum(ens)\n\n if plot:\n mapp = 'viridis'\n f, (ax1, ax2) = plt.subplots(1, 2)\n ax1 = plt.subplot(1, 2, 1)\n square = Rectangle((minPix-0.5, minPix-0.5), n, n, linestyle='--', fill=None, color='white')\n ax1.add_patch(square)\n img1 = ax1.imshow(array, cmap=mapp)\n ax1.set_title('%.1f mas pixels' % (pix_scale))\n img1.set_clim(0, 1)\n plt.colorbar(img1, ax=ax1, orientation='horizontal')\n\n ax2 = plt.subplot(1, 2, 2)\n img2 = ax2.imshow(ens, cmap=mapp)\n ax2.set_title('%d mas window' %new_scale)\n img1.set_clim(0, 1)\n plt.colorbar(img2, ax=ax2, orientation='horizontal')\n\n return energy",
"def fun(self, x_s):\n i, q, u, v = [x_s[..., k] for k in range(4)]\n if np.min(i) < -np.spacing(np.max(i)):\n # negative intensity (trace of 2x2 block), obviously not PSD\n return np.inf\n else:\n i_pol = np.sqrt(q ** 2 + u ** 2 + v ** 2)\n i_diff = i - i_pol\n if np.min(i_diff) < -np.spacing(np.max(i_diff)):\n # polarized intensity higher than total (det of 2x2 block < 0)\n return np.inf\n else:\n return 0",
"def rscale(mag=10.0):\n if mag > 11.5:\n return 0.5\n elif mag > 11.0:\n return 1.0\n elif mag > 10.5:\n return 1.5\n elif mag > 10.0:\n return 1.5\n elif mag > 9.5:\n return 2.0\n elif mag > 9.0:\n return 2.5\n elif mag > 8.5:\n return 3.0\n else:\n return 3.5",
"def substract_given_gaussian(wavelength, spectrum, centre, peak=0, sigma=0, flux=0, search_peak=False, allow_absorptions = False,\n lowlow= 20, lowhigh=10, highlow=10, highhigh = 20, \n lmin=0, lmax=0, fmin=0, fmax=0, plot=True, fcal=False, verbose = True, warnings=True): \n do_it = False\n # Check that we have the numbers!\n if peak != 0 and sigma != 0 : do_it = True\n\n if peak == 0 and flux != 0 and sigma != 0:\n #flux = peak * sigma * np.sqrt(2*np.pi)\n peak = flux / (sigma * np.sqrt(2*np.pi))\n do_it = True \n\n if sigma == 0 and flux != 0 and peak != 0 :\n #flux = peak * sigma * np.sqrt(2*np.pi)\n sigma = flux / (peak * np.sqrt(2*np.pi)) \n do_it = True \n \n if flux == 0 and sigma != 0 and peak != 0 :\n flux = peak * sigma * np.sqrt(2*np.pi)\n do_it = True\n\n if sigma != 0 and search_peak == True: do_it = True \n\n if do_it == False:\n print(\"> Error! We need data to proceed! Give at least two of [peak, sigma, flux], or sigma and force peak to f[centre]\")\n s_s = spectrum\n else:\n # Setup wavelength limits\n if lmin == 0 :\n lmin = centre-65. # By default, +-65 A with respect to line\n if lmax == 0 :\n lmax = centre+65.\n \n # Extract subrange to fit\n w_spec = []\n f_spec = []\n w_spec.extend((wavelength[i]) for i in range(len(wavelength)) if (wavelength[i] > lmin and wavelength[i] < lmax) ) \n f_spec.extend((spectrum[i]) for i in range(len(wavelength)) if (wavelength[i] > lmin and wavelength[i] < lmax) ) \n \n # Setup min and max flux values in subrange to fit\n if fmin == 0 :\n fmin = np.nanmin(f_spec) \n if fmax == 0 :\n fmax = np.nanmax(f_spec) \n \n # Define continuum regions: [-lowlow, -lowhigh] and [highlow,highhigh] in Angstroms with respect to centre\n w_cont=[]\n f_cont=[]\n w_cont.extend((w_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > centre-lowlow and w_spec[i] < centre-lowhigh) or (w_spec[i] > centre+highlow and w_spec[i] < centre+highhigh) ) \n f_cont.extend((f_spec[i]) for i in range(len(w_spec)) if (w_spec[i] > centre-lowlow and w_spec[i] < centre-lowhigh) or (w_spec[i] > centre+highlow and w_spec[i] < centre+highhigh) ) \n \n # Linear Fit to continuum \n try: \n mm,bb = np.polyfit(w_cont, f_cont, 1)\n except Exception:\n bb = np.nanmedian(spectrum)\n mm = 0.\n if verbose or warnings: \n print(\" WARNING! Impossible to get the continuum!\")\n print(\" Scaling the continuum to the median value\") \n continuum = mm*np.array(w_spec)+bb \n # c_cont = mm*np.array(w_cont)+bb \n # rms continuum\n # rms_cont = np.nansum([ np.abs(f_cont[i] - c_cont[i]) for i in range(len(w_cont)) ]) / len(c_cont)\n\n if search_peak:\n # Search for index here w_spec(index) closest to line\n try:\n min_w = np.abs(np.array(w_spec)-centre)\n mini = np.nanmin(min_w)\n peak = f_spec[min_w.tolist().index(mini)] - continuum[min_w.tolist().index(mini)]\n flux = peak * sigma * np.sqrt(2*np.pi) \n if verbose: print(\" Using peak as f[\",np.round(centre,2),\"] = \",np.round(peak,2),\" and sigma = \", np.round(sigma,2), \" flux = \",np.round(flux,2))\n except Exception:\n if verbose or warnings: print(\" Error trying to get the peak as requested wavelength is \",np.round(centre,2),\"! Ignoring this fit!\")\n peak = 0.\n flux = -0.0001\n \n no_substract = False\n if flux < 0:\n if allow_absorptions == False:\n if np.isnan(centre) == False:\n if verbose or warnings : print(\" WARNING! This is an ABSORPTION Gaussian! As requested, this Gaussian is NOT substracted!\")\n no_substract = True\n if no_substract == False: \n if verbose: print(\" Substracting Gaussian at {:7.1f} with peak ={:10.4f} sigma ={:6.2f} and flux ={:9.4f}\".format(centre, peak,sigma,flux))\n \n gaussian_fit = gauss(w_spec, centre, peak, sigma)\n \n \n index=0\n s_s=np.zeros_like(spectrum)\n for wave in range(len(wavelength)):\n s_s[wave]=spectrum[wave]\n if wavelength[wave] == w_spec[0] : \n s_s[wave] = f_spec[0]-gaussian_fit[0]\n index=1\n if wavelength[wave] > w_spec[0] and wavelength[wave] <= w_spec[-1]:\n s_s[wave] = f_spec[index]-gaussian_fit[index]\n index=index+1\n if plot: \n plt.figure(figsize=(10, 4))\n plt.plot(np.array(w_spec),np.array(f_spec), \"b\", lw=3, alpha = 0.5)\n plt.minorticks_on() \n plt.xlabel(\"Wavelength [$\\mathrm{\\AA}$]\")\n if fcal:\n plt.ylabel(\"Flux [ erg cm$^{-2}$ s$^{-1}$ $\\mathrm{\\AA}^{-1}$ ]\")\n else:\n plt.ylabel(\"Flux [ counts ]\")\n plt.xlim(lmin,lmax)\n plt.ylim(fmin,fmax)\n \n # Vertical line at line\n plt.axvline(x=centre, color='k', linestyle='-', alpha=0.8)\n # Horizontal line at y = 0\n plt.axhline(y=0, color='k', linestyle=':', alpha=0.5) \n # Dashed green regions for continuum, defined by [lowlow, lowhigh] and [highlow,highhigh]\n plt.axvspan(centre+highlow, centre+highhigh, facecolor='g', alpha=0.15,zorder=3)\n plt.axvspan(centre-lowlow, centre-lowhigh, facecolor='g', alpha=0.15,zorder=3)\n # Plot linear fit for continuum\n plt.plot(w_spec, continuum,\"g--\")\n # Plot Gaussian fit \n plt.plot(w_spec, gaussian_fit+continuum, 'r-', alpha=0.8) \n # Vertical lines to emission line\n #plt.axvline(x= low_limit, color='k', linestyle=':', alpha=0.5)\n #plt.axvline(x= high_limit, color='k', linestyle=':', alpha=0.5) \n # Plot residuals\n #plt.plot(w_spec, residuals, 'k')\n #plt.title('Fit: x0=%.2f y0=%.2e sigma=%.2f flux=%.2e rms=%.3e' % (fit[0], fit[1], fit[2], gaussian_flux, rms_fit))\n plt.show() \n plt.close()\n \n plt.figure(figsize=(10, 4))\n plt.plot(wavelength,spectrum, \"r\")\n plt.plot(wavelength,s_s, \"c\")\n plt.minorticks_on() \n plt.xlabel(\"Wavelength [$\\mathrm{\\AA}$]\")\n if fcal:\n plt.ylabel(\"Flux [ erg cm$^{-2}$ s$^{-1}$ $\\mathrm{\\AA}^{-1}$ ]\")\n else:\n plt.ylabel(\"Flux [ counts ]\")\n plt.xlim(lmin,lmax)\n plt.ylim(fmin,fmax)\n plt.show()\n plt.close()\n else:\n s_s = spectrum\n return s_s",
"def eeg_rms(array, axis=0):\t\t\n\treturn np.sqrt(np.mean(array ** 2,axis))",
"def Hosek18(self, wavelength, AKs):\n # If input entry is a single float, turn it into an array\n try:\n len(wavelength)\n except:\n wavelength = [wavelength]\n\n # Return error if any wavelength is beyond interpolation range of\n # extinction law\n if ((min(wavelength) < (self.low_lim*10**-4)) | (max(wavelength) > (self.high_lim*10**-4))):\n return ValueError('{0}: wavelength values beyond interpolation range'.format(self))\n \n # Extract wave and A/AKs from law, turning wave into micron units\n wave = self.wave * (10**-4)\n law = self.obscuration\n\n # Find the value of the law at the closest points\n # to wavelength\n A_AKs_at_wave = []\n for ii in wavelength:\n idx = np.where( abs(wave - ii) == min(abs(wave - ii)) )\n A_AKs_at_wave.append(law[idx][0])\n\n # Now multiply by AKs (since law assumes AKs = 1)\n A_at_wave = np.array(A_AKs_at_wave) * AKs\n\n return A_at_wave",
"def evaluate_peak_norm(x, y, amplitude, x_0, y_0, r_in, width):\n rr = (x - x_0) ** 2 + (y - y_0) ** 2\n rr_in = r_in ** 2\n rr_out = (r_in + width) ** 2\n\n # Because np.select evaluates on the whole rr array\n # we have to catch the invalid value warnings\n # Note: for r > r_out 'np.select' fills automatically zeros!\n with np.errstate(invalid='ignore'):\n values = np.select([rr <= rr_in, rr <= rr_out],\n [np.sqrt(rr_out - rr) - np.sqrt(rr_in - rr),\n np.sqrt(rr_out - rr)])\n return amplitude * values / np.sqrt(rr_out - rr_in)"
] | [
"0.63555455",
"0.6032541",
"0.5720245",
"0.56559825",
"0.55820227",
"0.5571117",
"0.55641395",
"0.5554358",
"0.55440706",
"0.5535525",
"0.54904956",
"0.5435876",
"0.5406285",
"0.5405981",
"0.5359477",
"0.53531414",
"0.5340021",
"0.53357816",
"0.53222436",
"0.53126466",
"0.5305457",
"0.5287403",
"0.52856266",
"0.5281704",
"0.5274005",
"0.5266809",
"0.5258586",
"0.52553356",
"0.52535915",
"0.5251993"
] | 0.71937263 | 0 |
Returns a Dictionary for the triangular numbers associated with the Zernike pyramid | def triangular_numbers(N_levels):
zernike_rows = list(np.arange(1, N_levels + 1))
triangular = {}
for i, zernike_per_row in enumerate(zernike_rows):
total = np.sum(zernike_rows[:i+1])
triangular[zernike_per_row] = total
return triangular | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_tri_dict(self):\n tri_dict = dict(\n vertices=np.concatenate([self.contour.vertices] + [hole.vertices for hole in self.holes]),\n segments=list(self._segment_pairs())\n )\n if self.holes:\n tri_dict['holes'] = np.array([hole.interior_point for hole in self.holes])\n return tri_dict",
"def create_triad_counts():\n triads = [str(i) + str(j) + str(k) for i in range(2) for j in range(2) for k in range(2)]\n triad_counts = {}\n\n for triad in triads:\n triad_counts[triad] = [0, 0]\n\n return triad_counts",
"def _make_limb_dict():\n\n return {'left_arm_y': 10, 'right_arm_y': 13,\n 'left_arm_z': 11, 'right_arm_z': 14,\n 'left_leg_y': 4, 'right_leg_y': 7,\n 'left_leg_z': 5, 'right_leg_z': 8,\n 'hip_y': 2, 'hip_x': 1}",
"def pascal_segmentation_lut():\n\n classes_lut = dict([(0, 'background'), (255, 'teeth')])\n\n return classes_lut",
"def template(self) -> Dict[Union[int, str], int]:\r\n if not hasattr(self, '_template'):\r\n index = 0\r\n self._template = {}\r\n n = sum(self.sequence)\r\n for i, step in enumerate(self):\r\n max = index + step - 1\r\n max_next = max + self.sequence[i + 1] if i < len(self.sequence) - 1 else 0\r\n for j in range(index, index + step):\r\n if j < max:\r\n self._template[j] = j + 1\r\n if j + step < n and j + step <= max_next:\r\n self._template[str(j)] = j + step\r\n index += step\r\n return self._template",
"def create_dictionary():\n d = {}\n for y in range(HEIGHT):\n if (y % 2) != 0:\n pos = (10*y)+10\n else:\n pos =((10*y)-9)+10 \n for x in range(WIDTH):\n xy_tuple = (x,y)\n d[pos] = xy_tuple\n if (y % 2) != 0:\n pos = pos - 1\n else:\n pos = pos + 1\n \n return d",
"def make_pt_2_neighbors(tri):\n pt_dict=dict()\n for vlist in tri.vertices:\n for i in vlist:\n if not i in pt_dict:\n pt_dict[i]=list()\n for k in vlist:\n if k != i:\n pt_dict[i].insert(0,k)\n for i in range(tri.points.shape[0]):\n pt_dict[i]=np.unique(pt_dict[i]).tolist()\n return pt_dict",
"def test_get_triangle_dict_all_int(self):\n triangle = {'a': 1, 'b': 2, 'c': 3}\n result = get_triangle_type(triangle)\n self.assertEqual(result, 'scalene')",
"def dimension_homology_sc(self):\r\n vec_dic = {}\r\n for k in range(self.dimension()+1):\r\n p = k \r\n A = self.matrix_simmetric_representate(p)\r\n dn = 0\r\n dc = 0\r\n if (p == 0):\r\n dn = A.shape[1]\r\n if (p > 0 and (p <= self.dimension())):\r\n null = null_space(A)\r\n if (null.size != 0):\r\n dn = len(null[0])\r\n if (all(elem == 0 for elem in null[0])):\r\n dn = 0 \r\n p = k + 1\r\n if (p>0 and (p <= self.dimension())):\r\n A1=self.matrix_simmetric_representate(p)\r\n col = orth(A1)\r\n if (col.size != 0):\r\n dc = len(col[0])\r\n else: \r\n dc = 0\r\n vec_dic[k] = dn - dc\r\n return vec_dic",
"def get_all_potential_edges(self) -> Dict[str,\n Tuple[int, int, int, int]]:\n orig_rows = self.tile_rows\n\n ret = dict()\n\n for i in range(0, 4):\n self.rotate_right(i)\n for j in range(0, 2):\n self.flip_l_r(j)\n for k in range(0, 2):\n self.flip_t_b(k)\n edges = self.get_current_edges()\n if edges not in ret.values():\n ret[f'rr{i}_lr{j}_tb{k}'] = edges\n\n self.tile_rows = orig_rows\n\n for j in range(0, 2):\n self.flip_l_r(j)\n for i in range(0, 4):\n self.rotate_right(i)\n for k in range(0, 2):\n self.flip_t_b(k)\n edges = self.get_current_edges()\n if edges not in ret.values():\n ret[f'lr{j}_rr{i}_tb{k}'] = edges\n\n self.tile_rows = orig_rows\n\n for j in range(0, 2):\n self.flip_l_r(j)\n for k in range(0, 2):\n self.flip_t_b(k)\n for i in range(0, 4):\n self.rotate_right(i)\n edges = self.get_current_edges()\n if edges not in ret.values():\n ret[f'lr{j}_tb{k}_rr{i}'] = edges\n\n self.tile_rows = orig_rows\n\n for k in range(0, 2):\n self.flip_t_b(k)\n for j in range(0, 2):\n self.flip_l_r(j)\n for i in range(0, 4):\n self.rotate_right(i)\n edges = self.get_current_edges()\n if edges not in ret.values():\n ret[f'tb{k}_lr{j}_rr{i}'] = edges\n\n self.tile_rows = orig_rows\n\n for k in range(0, 2):\n self.flip_t_b(k)\n for i in range(0, 4):\n self.rotate_right(i)\n for j in range(0, 2):\n self.flip_l_r(j)\n edges = self.get_current_edges()\n if edges not in ret.values():\n ret[f'tb{k}_rr{i}_lr{j}'] = edges\n\n self.tile_rows = orig_rows\n\n for i in range(0, 4):\n self.rotate_right(i)\n for k in range(0, 2):\n self.flip_t_b(k)\n for j in range(0, 2):\n self.flip_l_r(j)\n edges = self.get_current_edges()\n if edges not in ret.values():\n ret[f'rr{i}_tb{k}_lr{j}'] = edges\n\n self.tile_rows = orig_rows\n\n return ret",
"def trios(self):\n return self._trios",
"def tripTrian(G):\r\n\tn = len(G)\r\n\ttrip = set()\r\n\ttrian = set()\r\n\tfor u in range(n):\r\n\t\tfor v in G[u]:\r\n\t\t\tfor w in G[v]:\r\n\t\t\t\tif v !=u and v!=w and u != w:\r\n\t\t\t\t\ttrip.add((u,v,w))\r\n\t\t\t\t\ts = getTrip((u,v,w))\r\n\t\t\t\t\tif u in G[w]:\r\n\t\t\t\t\t\ttrip.add((u,v,w))\r\n\t\t\t\t\t\ttrian.update(s)\r\n\t\t\t\t\t\ttrip.update(s)\r\n\t\t\t\t\telse:\r\n\t\t\t\t\t\ttrip.update(s)\r\n\r\n\treturn (len(trian)/6,len(trip)/6)",
"def wc_matrix(matrix):\n return [{\"A\": position[\"T\"], \"T\": position[\"A\"], \"C\": position[\"G\"], \"G\": position[\"C\"]} for position in matrix[::-1]]",
"def creating_dict(i, states):\n # base case\n if i == 5:\n # no more edges - recursion ends here\n return {'barcode': []}\n\n # iterative case\n else:\n # this is a tree structure where the node contains timepoint information and barcode information\n # and three edges link to other nodes that represent lineages in three differnet states\n updated_dict = {'t{}'.format(i): {state: creating_dict(i + 1, states) for state in states}}\n updated_dict['t{}'.format(i)].update({'barcode': []})\n return updated_dict",
"def grid_vals(grid):\n\tletters = list(grid)\n\t#print \"---------------------------------\\n-------------------\"\n\t#print letters\n\t#print \"----------------------------------\\n-------------------\"\n\tassert len(letters) == 81\n\ttempdict = zip(squares, letters)\n\treturn dict(tempdict)",
"def create_dictionary_indexes(self):\n direction_dictionary = {}\n direction_dictionary[UP] = self.direction_list(UP)\n direction_dictionary[DOWN] = self.direction_list(DOWN)\n direction_dictionary[LEFT] = self.direction_list(LEFT)\n direction_dictionary[RIGHT] = self.direction_list(RIGHT)\n return direction_dictionary",
"def second_round_output(self, ram_dict):\n\t\tresult = {}\n\t\tfor key in ram_dict:\n\t\t\tresult[key] = [len(ram_dict[key]), ram_dict[key]]\n\t\treturn result",
"def _get_tri_edges(tri):\n return [[tri[1], tri[2]], [tri[2], tri[0]], [tri[0], tri[1]]]",
"def getFi():\n fi = {}\n for i in range(4):\n for k in range(1,9):\n arg = i+1+(4*(k-1))\n val = (8*i)+k\n if arg <= 32 :\n fi[arg]=val\n return fi",
"def _upward_triangle_indicies(height=3):\n return [(height-r,c) for r in range(height) for c in range(-abs(r),abs(r)+1)]",
"def get_triangle_numbers(n):\n r = []\n for i in xrange(1, n + 1):\n t = ((i * (i + 1)) / 2)\n r.append(t)\n return r",
"def DictFunction2():\r\n print \"Create Second Dictionary\"\r\n NumberDict = dict(zip((i for i in range(16)), (hex(i) for i in range(16))))\r\n print NumberDict",
"def zernike_visuo__pyramid(zbasis, n, m, nlevels, figsize=(12, 12), cmap='jet', fontsize=20, colorbar_labelsize=10):\n \n cmap = plt.get_cmap('%s' %cmap)\n \n index = 0\n if not (nlevels>=0):\n print('Input parameter must be >= 0')\n raise AssertionError() \n \n axlist = []\n if (nlevels == 0):\n \n fig = plt.figure(num = 1, figsize=figsize)\n ax = fig.add_subplot(1,1,1)\n axlist.append(ax)\n im = ax.imshow(zbasis, cmap=cmap, interpolation='lanczos')\n ax.set_title(r'$Z_{%d}^{%d}$' %(n,m), fontsize=fontsize)\n\tax.axis('off')\n\n \n else:\n \n # ++++ Defining layout for row number n and colunmn number m ++++++++\n \n fig = plt.figure(1, figsize=figsize)\n row_n = nlevels + 1\n col_m = 2*nlevels + 1\n\n top = (col_m + 1)/2\n leftside = row_n*col_m - col_m + 1\n rightside = row_n*col_m \n\n k1 = 0; k2 = 0\n \n\n for i in xrange(top,row_n*col_m+1, 2*col_m):\n\n ax = fig.add_subplot(row_n,col_m,i)\n axlist.append(ax)\n im=ax.imshow(zbasis[index], cmap=cmap, interpolation='lanczos', alpha=None)\n ax.set_title(r'$Z_{%d}^{%d}$' %(n[index],m[index]), fontsize=fontsize)\n ax.axis('off')\n index += 1\n s1 = i + col_m + 1\n s2 = i + col_m - 1 \n jj1 = k1\n jj2 = k2\n\n\n while (s2 <= leftside): \n\n ax = fig.add_subplot(row_n,col_m,s2)\n axlist.append(ax)\n im=ax.imshow(zbasis[index], cmap=cmap, interpolation='lanczos')\n ax.set_title(r'$Z_{%d}^{%d}$' %(n[index],m[index]), fontsize=fontsize)\n ax.axis('off')\n index += 1\n s2 +=col_m - 1\n jj1 += 1\n jj2 -= 1\n\n leftside +=2\n\n jj1 = k1\n jj2 = k2\n\n while (s1 <= rightside):\n \n ax = fig.add_subplot(row_n,col_m,s1)\n axlist.append(ax)\n im=ax.imshow(zbasis[index], cmap=cmap, interpolation='lanczos')\n ax.set_title(r'$Z_{%d}^{%d}$' %(n[index],m[index]), fontsize=fontsize)\n ax.axis('off')\n index += 1\n s1 +=col_m + 1\n jj1 += 1\n jj2 += 1\n\n rightside -=2\n k1 = 0; k2 += 2\n\n\n cbar = fig.colorbar(im, ax=axlist,fraction=0.05, orientation='horizontal') \n cbar.ax.tick_params(labelsize=colorbar_labelsize)\n fig.subplots_adjust(wspace=0,hspace=0, right=0.72, bottom=0.2)\n fig.savefig('zernike_orders.png', dpi=300)\n\n return None",
"def tris(self):\n return self.nlegomena(3)",
"def merchandise(t):\n d = {}\n for wagon in t:\n d[wagon[0]] = d.get(wagon[0],0) + wagon[1]\n return d",
"def _pettifor_numbers():\n return { \"Li\": 0.45,\n \"Be\": 1.5,\n \"B\": 2.0,\n \"C\": 2.5,\n \"N\": 3.0, \n \"O\": 3.5,\n \"F\": 4.0,\n \n \"Na\": 0.4,\n \"Mg\": 1.28,\n \"Al\": 1.66,\n \"Si\": 1.92,\n \"P\": 2.18,\n \"S\": 2.44,\n \"Cl\": 2.70,\n \n \"K\": 0.35,\n \"Ca\": 0.60,\n \"Sc\": 0.74,\n \"Ti\": 0.79,\n \"V\": 0.84,\n \"Cr\": 0.89,\n \"Mn\": 0.94,\n \"Fe\": 0.99,\n \"Co\": 1.04,\n \"Ni\": 1.09,\n \"Cu\": 1.20,\n \"Zn\": 1.44,\n \"Ga\": 1.68,\n \"Ge\": 1.92,\n \"As\": 2.16,\n \"Se\": 2.40,\n \"Br\": 2.64,\n\n \"Rb\": 0.30,\n \"Sr\": 0.55,\n \"Y\": 0.70,\n \"Zr\": 0.76,\n \"Nb\": 0.82,\n \"Mo\": 0.88,\n \"Tc\": 0.94,\n \"Ru\": 1.00,\n \"Rh\": 1.06,\n \"Pd\": 1.12,\n \"Ag\": 1.18,\n \"Cd\": 1.36,\n \"In\": 1.60,\n \"Sn\": 1.84,\n \"Sb\": 2.08,\n \"Te\": 2.32,\n \"I\": 2.56,\n \n \"Cs\": 0.25,\n \"Ba\": 0.50,\n \"La\": 0.748,\n \"Hf\": 0.775,\n \"Ta\": 0.83,\n \"W\": 0.885,\n \"Re\": 0.94,\n \"Os\": 0.995,\n \"Ir\": 1.05,\n \"Pt\": 1.105,\n \"Au\": 1.16,\n \"Hg\": 1.32,\n \"Tl\": 1.56,\n \"Pb\": 1.80,\n \"Bi\": 2.04,\n \"Po\": 2.28, \n \"At\": 2.52 }",
"def print_triangular_numbers(n):\r\n\r\n\tfor i in range(1, n+1):\r\n\t\tsum = int((i / 2)*(1 + i))\r\n\t\tprint(i, \"\\t\", sum)",
"def gen_triangle_level(self, i):\r\n if i == 1:\r\n return list([1])\r\n\r\n # Select previous level\r\n t = self.gen_triangle_level(i - 1)\r\n m = len(t) + 1\r\n\r\n return [1\r\n if (j == 0 or\r\n j == m - 1)\r\n else t[j - 1] + t[j]\r\n for j\r\n in range(0, m, 1)]",
"def get_triangles_per_surface(my_core, entity_ranges):\n\n t_p_s = {}\n for surface in entity_ranges['Surfaces']:\n t_p_s[surface] = my_core.get_entities_by_type(\n surface, types.MBTRI).size()\n return t_p_s",
"def grid_values(self, grid):\n chars = [col for col in grid if col in self.digits or col in '0.']\n assert len(chars) == 81\n return dict(zip(self.squares, chars))"
] | [
"0.67723066",
"0.6626647",
"0.5800086",
"0.5686751",
"0.5580213",
"0.55674565",
"0.5506504",
"0.54808724",
"0.5421397",
"0.54160047",
"0.5405297",
"0.5372639",
"0.5343492",
"0.5306792",
"0.530101",
"0.5289107",
"0.5287096",
"0.5276585",
"0.52750313",
"0.52660143",
"0.52657723",
"0.52529484",
"0.5239551",
"0.5237266",
"0.52153945",
"0.51736224",
"0.5171337",
"0.51637864",
"0.5154374",
"0.5137404"
] | 0.6907697 | 0 |
Computes the (Xc, Yc) coordinates of actuator centres inside a circle of rho_aper, assuming there are N_actuators along the [1, 1] line | def actuator_centres(N_actuators, rho_aper=RHO_APER, rho_obsc=RHO_OBSC):
x0 = np.linspace(-1., 1., N_actuators, endpoint=True)
delta = x0[1] - x0[0]
N_in_D = 2*RHO_APER/delta
print('%.2f actuators in D' %N_in_D)
max_freq = N_in_D / 2 # Max spatial frequency we can sense
xx, yy = np.meshgrid(x0, x0)
x_f = xx.flatten()
y_f = yy.flatten()
act = []
for x_c, y_c in zip(x_f, y_f):
r = np.sqrt(x_c ** 2 + y_c ** 2)
if r < 0.97 * rho_aper and r > 1.05 * rho_obsc:
act.append([x_c, y_c])
total_act = len(act)
print('Total Actuators: ', total_act)
return [act, delta], max_freq | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def center_of_charge(self):\n ret = [0.0, 0.0, 0.0]\n total_c = 0.0\n\n for at in range(self.natom()):\n c = self.charge(at)\n ret = add(ret, scale(self.xyz(at), c))\n total_c += c\n\n ret = scale(ret, 1.0 / total_c)\n return ret",
"def gen_centers(self):\n\n \"\"\"x_track = self.cs.discrete_rollout()\n t = np.arange(len(x_track))*self.dt\n # choose the points in time we'd like centers to be at\n c_des = np.linspace(0, self.cs.run_time, self.n_bfs)\n self.c = np.zeros(len(c_des))\n for ii, point in enumerate(c_des):\n diff = abs(t - point)\n self.c[ii] = x_track[np.where(diff == min(diff))[0][0]]\"\"\"\n\n # desired activations throughout time\n des_c = jnp.linspace(0, self.cs.run_time, self.n_bfs)\n\n self.c = np.ones(len(des_c))\n for n in range(len(des_c)):\n # finding x for desired times t\n self.c[n] = jnp.exp(-self.cs.ax * des_c[n])\n self.c = jnp.array(self.c)",
"def compute_center(self, mole_object):\r\n if mole_object.plugin_type == \"PyMOL\":\r\n sel = PymolPlugin.PymolPlugin().get_model('all')\r\n cnt = len(sel.atom)\r\n\r\n else:\r\n sel = ChimeraPlugin.ChimeraPlugin().select()\r\n cnt = len(ChimeraPlugin.ChimeraPlugin().current_atoms())\r\n\r\n cent_x = 0\r\n cent_y = 0\r\n cent_z = 0\r\n\r\n if cnt == 0:\r\n return 0, 0, 0\r\n\r\n if mole_object.plugin_type == \"PyMOL\":\r\n\r\n for a in sel.atom:\r\n cent_x += a.coord[0]\r\n cent_y += a.coord[1]\r\n cent_z += a.coord[2]\r\n\r\n else:\r\n\r\n for a in ChimeraPlugin.ChimeraPlugin().current_atoms():\r\n cent_x += a.coord()[0]\r\n cent_y += a.coord()[1]\r\n cent_z += a.coord()[2]\r\n\r\n cent_x /= cnt\r\n cent_y /= cnt\r\n cent_z /= cnt\r\n\r\n self.point_x.component('entryfield').setentry(cent_x)\r\n self.point_y.component('entryfield').setentry(cent_y)\r\n self.point_z.component('entryfield').setentry(cent_z)\r\n\r\n self.show_crisscross(mole_object)",
"def find_center(r):\n cx=r.corner.x+(r.width/2)\n cy=r.corner.y+(r.height/2)\n return cx,cy",
"def circle_center(self):\n return self.container.width / 2, self.container.height / 2",
"def find_cea_coord(header,phi_c,lambda_c,nx,ny,dx,dy):\n nx = int(nx)\n ny = int(ny)\n\n # Array of CEA coords\n x = []\n y = []\n\n for j in range(ny):\n col = []\n row = []\n for i in range(nx):\n col.append(np.radians((i-(nx-1)/2)*dx))\n row.append(np.radians((j-(ny-1)/2)*dy))\n x.append(col)\n y.append(row)\n\n x = np.array(x)\n y = np.array(y)\n\n # Relevant header values\n rSun = header['rsun_obs']/header['cdelt1'] #solar radius in pixels\n disk_latc = np.radians(header['CRLT_OBS'])\n disk_lonc = np.radians(header['CRLN_OBS'])\n disk_xc = header['CRPIX1'] - 1 #disk center wrt lower left of patch\n disk_yc = header['CRPIX2'] - 1\n pa = np.radians(header['CROTA2']*-1)\n\n latc = np.radians(lambda_c)\n lonc = np.radians(phi_c) - disk_lonc\n\n # Convert coordinates\n lat = []\n lon = []\n xi = []\n eta = []\n\n for j in range(ny):\n lat_col = []\n lon_col = []\n xi_col = []\n eta_col = []\n for i in range(nx):\n lat0,lon0 = plane2sphere(x[j,i],y[j,i],latc,lonc)\n lat_col.append(lat0)\n lon_col.append(lon0)\n\n xi0,eta0 = sphere2img(lat0,lon0,disk_latc,0.0,disk_xc,disk_yc,rSun,pa)\n xi_col.append(xi0)\n eta_col.append(eta0)\n lat.append(lat_col)\n lon.append(lon_col)\n xi.append(xi_col)\n eta.append(eta_col)\n\n lat = np.array(lat)\n lon = np.array(lon)\n xi = np.array(xi)\n eta = np.array(eta)\n\n return xi,eta,lat,lon",
"def points_on_circumference(center=(0, 0), r=50, n=100):\n\treturn [\n (\n center[0]+(cos(2 * pi / n * x) * r), \n center[1] + (sin(2 * pi / n * x) * r) \n\n ) for x in range(0, n + 1)]",
"def getCartesianPoints2(r, theta, center):\n x = r * np.cos(theta) + center[0]\n y = r * np.sin(theta) + center[1]\n\n return x, y",
"def centers(self):\n return self.xc, self.yc",
"def get_circle_coords(center, r):\n circle = [[r, 180* phi/3.14159265] for phi in range(0, 180, 5)]\n circle = [pol2cart(p[0], p[1]) + (center[0], center[1]) for p in circle]\n return circle",
"def centroid(cnt):\n\tM = cv2.moments(cnt)\n\tcx = int(M['m10']/M['m00'])\n\tcy = int(M['m01']/M['m00'])\n\treturn (cx, cy)",
"def generate_circle(R,center,N=100,t0=0.0,t1=2.0*np.pi):\r\n theta = np.linspace(t0,t0+t1,N)\r\n y = R*np.sin(theta) + center[1]\r\n x = R*np.cos(theta) + center[0]\r\n return x,y",
"def get_arc_center(self):\n # First two anchors and handles\n a1, h1, h2, a2 = self.points[:4]\n # Tangent vectors\n t1 = h1 - a1\n t2 = h2 - a2\n # Normals\n n1 = rotate_vector(t1, TAU / 4)\n n2 = rotate_vector(t2, TAU / 4)\n try:\n return line_intersection(\n line1=(a1, a1 + n1),\n line2=(a2, a2 + n2),\n )\n except Exception:\n warnings.warn(\"Can't find Arc center, using ORIGIN instead\")\n return np.array(ORIGIN)",
"def image_proc(self):\r\n res_erode, res_in_rect = self._get_res_image()\r\n\r\n Moment_rect = cv2.moments(res_in_rect)\r\n if (Moment_rect[\"m00\"] != 0):\r\n self.rect_x = int(Moment_rect[\"m10\"] / Moment_rect[\"m00\"])\r\n self.rect_y = int(Moment_rect[\"m01\"] / Moment_rect[\"m00\"])\r\n\r\n # afin de trouver les centres de la rectangulaire.\r\n cnts, hierarchy = cv2.findContours(res_erode,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)\r\n for i in cnts:\r\n #Moment = cv2.moments(cnts)\r\n moment = cv2.moments(res_erode)\r\n self.cx = int(moment[\"m10\"] / moment[\"m00\"])\r\n self.cy = int(moment[\"m01\"] / moment[\"m00\"])\r\n cv2.circle(output_image,(self.cx,self.cy),7,(255,255,255),-1)\r\n cv2.putText(output_image, \"center\", (self.cx, self.cy), cv2.FONT_HERSHEY_SIMPLEX, 1, (255, 255, 255), 2)\r\n #print(cX,cY)\r",
"def get_circle(a, b, c):\n vec = [a[0]**2 + a[1]**2, b[0]**2 + b[1]**2, c[0]**2 + c[1]**2]\n x_mat = [vec, [a[1], b[1], c[1]], [1]*3]\n y_mat = [vec, [a[0], b[0], c[0]], [1]*3]\n d_mat = [[a[0], b[0], c[0]], [a[1], b[1], c[1]], [1] * 3]\n d = 2 * det(d_mat)\n x = 1 / d * det(x_mat)\n y = -1 / d * det(y_mat)\n center = [x, y]\n #r = norm(center - a)\n r = norm([center[0]-a[0], center[1]-a[1]])\n return center, r",
"def calculate_center(self):\n return [(self.startX + self.endX) / 2., (self.startY + self.endY) / 2.]",
"def center(self):\n xc = (self.x.max() + self.x.min())/2.\n yc = (self.y.max() + self.y.min())/2.\n return (xc, yc)",
"def center(self):\n return np.array([0,0,1/self.C+self.pos()])",
"def center(self):\n return [self.position[i]+self.radius for i in range(2)]",
"def plotCentroid(img, cnt, radius = 3, color=(255, 255, 0)):\n\tcx, cy = centroid(cnt)\n\tdrawCircle(img, (cx, cy), radius = radius, color = color)\n\treturn (cx, cy)",
"def attacker_position(inputs):\n rho, theta, _, _, _ = inputs\n x = rho * np.cos(theta)\n y = rho * np.sin(theta)\n return x, y",
"def center_of_coor(coordinates):\n return (np.sum(coordinates, axis=0) / coordinates.shape[0])",
"def calc_R(center):\r\n xc = center[0]\r\n yc = center[1]\r\n return np.sqrt((x-xc)**2 + (y-yc)**2)",
"def get_circle_coords(self, radius, divider, count,center_x, center_y):\n\n angle_deg = (360/divider)*count\n angle = radians(angle_deg-(90 + (360/divider)))\n x = radius*cos(angle) + center_x;\n y = radius*sin(angle) + center_y;\n return (int(x), int(y))",
"def getAfinityCenter(width, height, point, center, radius=7, img_affinity=None):\n tensor = torch.zeros(2, height, width).float()\n\n # Create the canvas for the affinity output\n imgAffinity = Image.new(\"RGB\", (width, height), \"black\")\n totensor = transforms.Compose([transforms.ToTensor()])\n\n draw = ImageDraw.Draw(imgAffinity)\n r1 = radius\n p = point\n draw.ellipse((p[0] - r1, p[1] - r1, p[0] + r1, p[1] + r1), (255, 255, 255))\n\n del draw\n\n # Compute the array to add the affinity\n array = (np.array(imgAffinity) / 255)[:, :, 0]\n\n angle_vector = np.array(center) - np.array(point)\n angle_vector = normalize(angle_vector)\n affinity = np.concatenate([[array * angle_vector[0]], [array * angle_vector[1]]])\n\n # print (tensor)\n if not img_affinity is None:\n # Find the angle vector\n # print (angle_vector)\n if length(angle_vector) > 0:\n angle = py_ang(angle_vector)\n else:\n angle = 0\n # print(angle)\n c = np.array(colorsys.hsv_to_rgb(angle / 360, 1, 1)) * 255\n draw = ImageDraw.Draw(img_affinity)\n draw.ellipse((p[0] - r1, p[1] - r1, p[0] + r1, p[1] + r1), fill=(int(c[0]), int(c[1]), int(c[2])))\n del draw\n re = torch.from_numpy(affinity).float() + tensor\n return re, img_affinity",
"def aerodynamic_center(self, chord_fraction: float = 0.25) -> np.ndarray:\n sectional_areas = self.area(_sectional=True)\n sectional_ACs = []\n\n for inner_xsec, outer_xsec in zip(self.xsecs[:-1], self.xsecs[1:]):\n\n section_taper_ratio = outer_xsec.chord / inner_xsec.chord\n section_MAC_length = (2 / 3) * inner_xsec.chord * (\n (1 + section_taper_ratio + section_taper_ratio ** 2) /\n (1 + section_taper_ratio)\n )\n section_MAC_le = (\n inner_xsec.xyz_le +\n (outer_xsec.xyz_le - inner_xsec.xyz_le) *\n (1 + 2 * section_taper_ratio) /\n (3 + 3 * section_taper_ratio)\n )\n section_AC = section_MAC_le + np.array([ # TODO rotate this vector by the local twist angle\n chord_fraction * section_MAC_length,\n 0,\n 0\n ])\n\n sectional_ACs.append(section_AC)\n\n sectional_AC_area_products = [\n AC * area\n for AC, area in zip(\n sectional_ACs,\n sectional_areas,\n )\n ]\n\n aerodynamic_center = sum(sectional_AC_area_products) / sum(sectional_areas)\n\n aerodynamic_center += self.xyz_le\n\n if self.symmetric:\n aerodynamic_center[1] = 0\n\n return aerodynamic_center",
"def get_center_ball(self, output):\n output = output.reshape((360, 640))\n\n # cv2 image must be numpy.uint8, convert numpy.int64 to numpy.uint8\n output = output.astype(np.uint8)\n\n # reshape the image size as original input image\n heatmap = cv2.resize(output, (640, 360))\n\n # heatmap is converted into a binary image by threshold method.\n ret, heatmap = cv2.threshold(heatmap, 127, 255, cv2.THRESH_BINARY)\n\n # find the circle in image with 2<=radius<=7\n circles = cv2.HoughCircles(heatmap, cv2.HOUGH_GRADIENT, dp=1, minDist=1, param1=50, param2=2, minRadius=2,\n maxRadius=7)\n # check if there have any tennis be detected\n if circles is not None:\n # if only one tennis be detected\n if len(circles) == 1:\n x = int(circles[0][0][0])\n y = int(circles[0][0][1])\n\n return x, y\n return None, None",
"def circle(draw, centrex, centrey, radius, color=\"#AAAAAAFF\") -> None:\n # convert cartesian centre to pixel centre\n cx, cy = pixelcoord(centrex, centrey)\n # top left and bottom right coordinates\n rect = [(cx-radius, cy-radius), (cx+radius, cy+radius)]\n # draw\n draw.arc(rect, 0, 360, color)",
"def find_center(self) -> tuple:\r\n \r\n # Add up all the x values of pixels in the plant\r\n # Then divide by total pixels in the plant\r\n avg_x = sum([i[0] for i in self.cluster]) / len(self.cluster)\r\n\r\n # Add up all the y values of pixels in the plant\r\n # Then divide by total pixels in the plant\r\n avg_y = sum([i[1] for i in self.cluster]) / len(self.cluster)\r\n\r\n self.center = (int(round(avg_x)), int(round(avg_y)))\r\n \r\n # return the results in a tuple of integers\r\n return self.center",
"def _compute_ball_visualization(self, center, radius, angle):\r\n x_coord = [center[0]]\r\n y_coord = [center[1]]\r\n\r\n angles = np.linspace(angle, angle + 2 * np.pi, 100)\r\n\r\n x_coord.extend([center[0] - radius * np.sin(a) for a in angles])\r\n y_coord.extend([center[1] + radius * np.cos(a) for a in angles])\r\n\r\n return [x_coord, y_coord]"
] | [
"0.6132837",
"0.5954457",
"0.5869159",
"0.5858206",
"0.5853505",
"0.5796136",
"0.57292473",
"0.5709713",
"0.5697524",
"0.5693983",
"0.56876165",
"0.56802106",
"0.56581277",
"0.56567794",
"0.5652664",
"0.5648251",
"0.56471676",
"0.5623604",
"0.561589",
"0.56110007",
"0.5595283",
"0.5580179",
"0.5579294",
"0.55617416",
"0.5560451",
"0.55415034",
"0.55311775",
"0.5514491",
"0.550758",
"0.54843956"
] | 0.69552195 | 0 |
Compute the PEAK of the PSF without aberrations so that we can normalize everything by it | def peak_PSF(self):
im, strehl = self.compute_PSF(np.zeros(self.N_act))
return strehl | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def pulp_smash():",
"def peak_PSF(self):\n return self.compute_PSF(np.zeros(self.N_zern))",
"def cal_pn(grams_set, grams, candidate, reference):\n count = 0\n for gram in grams_set:\n # print(gram)\n count += count_clip(gram, grams, reference)\n # calculate log() for p, so '+10**-8' avoid 'p==0'\n p = count / len(grams) + 10**-8 \n return p",
"def f_UPPS_pc(v, P_0, r_f, d, s, T, wealth, phi, n_s, n_o, K):\n W_T = f_W_T_pc(v, P_0, r_f, d, s, T, wealth, phi, n_s, n_o, K)\n value = pow(W_T, -gamma) * f_W_T_to_P_T_pc(v, P_0, r_f, d, s, T, wealth, phi, n_s, n_o, K) * f_P_T_to_P_0(v, r_f, d, s, T)\n return value",
"def precompute_scoring():\n global volume_void_inclusion\n global attract_point_distances\n global perlin_values\n \n volume_void_inclusion = []\n for i,void in enumerate(volumes_void):\n inclusion = gh.PointInBrep(void,points_input,False)\n volume_void_inclusion.append(inclusion)\n \n attract_point_distances = []\n for i,point in enumerate(points_attractor):\n distances = gh.Division(gh.Distance(point,points_input),max_dist)\n attract_point_distances.append(distances)",
"def _get_single_PSF(SCA, bandpass, SCA_pos, pupil_bin,\n n_waves, extra_aberrations, wavelength,\n pupil_plane_type, gsparams):\n from .. import OpticalPSF, ChromaticOpticalPSF\n from . import diameter\n from ..bandpass import Bandpass\n from .roman_bandpass import getBandpasses\n\n if wavelength is None:\n wave = zemax_wavelength\n elif isinstance(wavelength, Bandpass):\n wave = wavelength = wavelength.effective_wavelength\n else:\n wave = wavelength\n\n # All parameters relevant to the aperture. We may be able to use a cached version.\n aper = _make_aperture(SCA, pupil_plane_type, pupil_bin, wave, gsparams)\n\n # Start reading in the aberrations for that SCA\n aberrations, x_pos, y_pos = _read_aberrations(SCA)\n # Do bilinear interpolation, unless we're exactly at the center (default).\n use_aberrations = _interp_aberrations_bilinear(aberrations, x_pos, y_pos, SCA_pos)\n\n if extra_aberrations is not None:\n use_aberrations[:len(extra_aberrations)] += extra_aberrations\n # We don't want to use piston, tip, or tilt aberrations. The former doesn't affect the\n # appearance of the PSF, and the latter cause centroid shifts. So, we set the first 4\n # numbers (corresponding to a place-holder, piston, tip, and tilt) to zero.\n use_aberrations[0:4] = 0.\n\n # Now set up the PSF, including the option to interpolate over waves\n if wavelength is None:\n PSF = ChromaticOpticalPSF(lam=zemax_wavelength,\n diam=diameter, aberrations=use_aberrations,\n aper=aper, gsparams=gsparams)\n if n_waves is not None:\n # To decide the range of wavelengths to use, check the bandpass.\n bp_dict = getBandpasses()\n bp = bp_dict[bandpass]\n PSF = PSF.interpolate(waves=np.linspace(bp.blue_limit, bp.red_limit, n_waves),\n oversample_fac=1.5)\n else:\n tmp_aberrations = use_aberrations * zemax_wavelength / wavelength\n PSF = OpticalPSF(lam=wavelength, diam=diameter,\n aberrations=tmp_aberrations,\n aper=aper, gsparams=gsparams)\n\n return PSF",
"def test_nuke_psfs():\n # Without multiprocessing\n mt.nuke_psfs(mprocessing=False)\n\n # With multiprocessing\n mt.nuke_psfs()",
"def calculate_gmpe(src_keys, station, output_file, rrups, gmpe_group_name):\n gmpe_group = gmpe_config.GMPES[gmpe_group_name]\n origin = (src_keys['lon_top_center'], src_keys['lat_top_center'])\n dims = (src_keys['fault_length'], src_keys['dlen'],\n src_keys['fault_width'], src_keys['dwid'],\n src_keys['depth_to_top'])\n mech = (src_keys['strike'], src_keys['dip'], src_keys['rake'])\n\n # Station location\n site_geom = [float(station.lon), float(station.lat), 0.0]\n (fault_trace1, upper_seis_depth,\n lower_seis_depth, ave_dip,\n dummy1, dummy2) = putils.FaultTraceGen(origin, dims, mech)\n rjb, rrup, rx = putils.DistanceToSimpleFaultSurface(site_geom,\n fault_trace1,\n upper_seis_depth,\n lower_seis_depth,\n ave_dip)\n\n print \"station: %s, Rrup: %f\" % (station.scode, rrup)\n rrups.append(rrup)\n\n vs30 = 1000\n z10 = None # Let PyNGA calculate it\n z25 = None # Let PyNGA calculate it\n\n # Compute PSA for this stations\n station_median = []\n for period in gmpe_group[\"periods\"]:\n period_medians = []\n for nga_model in gmpe_group[\"models\"]:\n median = gmpe_config.calculate_gmpe(gmpe_group_name,\n nga_model,\n src_keys['magnitude'],\n rjb, vs30,\n period,\n rake=src_keys['rake'],\n dip=src_keys['dip'],\n W=src_keys['fault_width'],\n Ztor=src_keys['depth_to_top'],\n Rrup=rrup, Rx=rx,\n Z10=z10, Z25=z25)\n period_medians.append(median)\n station_median.append((period, period_medians))\n\n # Create label\n file_label = \"\"\n for nga_model in gmpe_group[\"models\"]:\n file_label = \"%s %s\" % (file_label, nga_model)\n # Output data to file\n outfile = open(output_file, 'w')\n outfile.write(\"#station: %s\\n\" % (station.scode))\n outfile.write(\"#period%s\\n\" % (file_label))\n for item in station_median:\n period = item[0]\n vals = item[1]\n out_str = \"%.4f\" % (period)\n for method in vals:\n out_str = out_str + \"\\t%.6f\" % (method)\n outfile.write(\"%s\\n\" % (out_str))\n outfile.close()\n\n # Return list\n return station_median",
"def ppf(self,x):\n # TODO speed this up by doing it in Crow, not in python\n if hasattr(x,'__len__'):\n returnPpf = np.array([self.ppf(i) for i in x])\n else:\n returnPpf = self._distribution.inverseCdf(x)\n return returnPpf",
"def ppf(self,x):\n return self.categoricalDist.ppf(x)",
"def compute_PSSM_self_information(p):\n return -sp.sum(p*sp.log(p))",
"def compute_PSF(self, coef, crop=True):\n\n phase = np.dot(self.RBF_mat, coef) + self.defocus\n\n pupil_function = self.pupil_mask * np.exp(2 * np.pi * 1j * phase)\n image = (np.abs(fftshift(fft2(pupil_function))))**2\n\n try:\n image /= self.PEAK\n\n except AttributeError:\n # If self.PEAK is not defined, self.compute_PSF will compute the peak\n pass\n\n strehl = np.max(image)\n\n if crop:\n image = image[self.minPix:self.maxPix, self.minPix:self.maxPix]\n else:\n pass\n return image, strehl",
"def calc_prob_local(self, *args):\n return 0",
"def getpval(teststat, statlist):\n \n propzero = 0\n bootvals = []\n for val in statlist:\n if val == 0:\n propzero += 1\n else:\n bootvals.append(val)\n \n propzero = float(propzero) / len(statlist)\n \n shapeinit = getstartingshape(statlist)\n \n shape = optimiselike(getlikeweibull, bootvals, shapeinit)\n scale = (sum(bootvals) / len(bootvals)) / scipy.special.gamma(1 + 1/shape)\n \n pvalue = math.exp(- (teststat/scale) ** shape)\n \n return pvalue * (1 - propzero)",
"def do_pnp(pts3d_for_pnp, pts2d_for_pnp, K, iterations=200, reprojThresh=5):\n list_pts3d_for_pnp = pts3d_for_pnp\n list_pts2d_for_pnp = pts2d_for_pnp\n pts3d_for_pnp = np.array(pts3d_for_pnp)\n # pts2d_for_pnp = np.expand_dims(np.squeeze(np.array(pts2d_for_pnp)), axis=1)\n # print(pts3d_for_pnp)\n # print(pts2d_for_pnp.shape)\n num_pts = len(pts3d_for_pnp)\n print(num_pts)\n highest_inliers = 0\n for j in range(iterations):\n pt_idxs = np.random.choice(num_pts, 6, replace=False)\n pts3 = np.array([pts3d_for_pnp[pt_idxs[i]] for i in range(len(pt_idxs))])\n # print(\"pts\",pts3)\n pts2 = np.array([pts2d_for_pnp[pt_idxs[i]] for i in range(len(pt_idxs))])\n _, rvec, tvec = cv2.solvePnP(pts3, pts2, K, distCoeffs=np.array([]), flags=cv2.SOLVEPNP_ITERATIVE)\n R, _ = cv2.Rodrigues(rvec)\n pnp_errors, projpts, avg_err, perc_inliers = test_reproj_pnp_points(list_pts3d_for_pnp, list_pts2d_for_pnp, R, tvec, K, rep_thresh=reprojThresh)\n if highest_inliers < perc_inliers:\n highest_inliers = perc_inliers\n best_R = R\n best_tvec = tvec\n R = best_R\n tvec = best_tvec\n # print('rvec:', rvec,'\\n\\ntvec:', tvec)\n print(\"avg\",avg_err)\n print(\"inlier\",perc_inliers)\n return R, tvec",
"def find_mpe(fbn, sbn, compat, beta, e):\n evars = set(e)\n freevars = [v for v in fbn.V if v.name not in evars]\n\n # para instanaciar las variables splitted primero. Ver popsition 1\n # del paper\n freevars.sort(key=lambda x: x.name in compat) \n \n t = datetime.now()\n ac = dnnf.todnnf(sbn)\n print datetime.now() - t\n print \"dfs\", freevars\n def dfs(q, varsleft, z, k):\n \"\"\"\n q: cota actual\n varsleft: variables que faltan por instanciar. Se sacan del final.\n z: instanciacion parcial actual\n k: numero de variables splitted que falta por instanciar\n \"\"\"\n var = varsleft.pop()\n varname = var.name\n domain = var.Domain\n k -= 1\n clones = []\n if varname in compat:\n for clone in compat[varname]:\n clones.append(clone)\n\n # probar todos sus posibles valores\n for value in domain:\n # agregar ese valor a la instancia parcial\n z[varname] = value\n for clone in clones:\n z[clone] = value\n p = ac.mpe(z)\n\n if varsleft:\n # si todavia quedan variables por asignar\n # hacer prune si podemos\n \n if k<=0:\n # ya todas las variables splitted estan\n # asignadas. Ahora el MPE(sbn) = MPE(fbn), no hace\n # falta hacer mas asignaciones para obtener el\n # valor exacto (Proposicion 1 del paper)\n q = max(q, beta*p)\n else:\n if p*beta <= q:\n # la cota superior sobre sbc es menor que la\n # cota inferior q que llevamos. Por aqui no\n # hay nada mejor\n continue\n else:\n # todavia puede haber algo bueno por aqui\n q = max(q, dfs(q, varsleft, z, k))\n else:\n # si no queda ninguna variable por asignar.\n # por un teorema, el MPE(fbn, x) == beta*MPE(sbn, x)\n q = max(q, beta*p)\n\n # regresar todo al estado orignal\n varsleft.append(var)\n del z[varname]\n for clone in clones:\n del z[clone]\n return q\n\n return dfs(0.0, freevars, e, len(compat))",
"def test_AFQ_pft():\n _, bids_path, sub_path = get_temp_hardi()\n\n bundle_names = [\"SLF\", \"ARC\", \"CST\", \"FP\"]\n\n f_pve_csf, f_pve_gm, f_pve_wm = get_fnames('stanford_pve_maps')\n os.rename(f_pve_wm, op.join(sub_path, \"sub-01_ses-01_WMprobseg.nii.gz\"))\n os.rename(f_pve_gm, op.join(sub_path, \"sub-01_ses-01_GMprobseg.nii.gz\"))\n os.rename(f_pve_csf, op.join(sub_path, \"sub-01_ses-01_CSFprobseg.nii.gz\"))\n\n stop_mask = PFTMask(\n MaskFile(\"WMprobseg\"),\n MaskFile(\"GMprobseg\"),\n MaskFile(\"CSFprobseg\"))\n\n my_afq = api.AFQ(\n bids_path,\n dmriprep='vistasoft',\n bundle_info=bundle_names,\n tracking_params={\n \"stop_mask\": stop_mask,\n \"stop_threshold\": \"CMC\",\n \"tracker\": \"pft\"\n })\n my_afq.export_streamlines()",
"def ps(image):\n\timage = image.astype(float)\n\tps_img = abs(pow(fft2(image), 2))\n\treturn ps_img",
"def calculate_precinct_score(pt, dstrct):\n return pt.F(dstrct)",
"def calculate_prp(self, ref_point: np.ndarray, f_current: np.ndarray) -> np.ndarray:\n\n # distance\n d = np.linalg.norm(np.atleast_2d(ref_point - f_current))\n\n # unit vectors\n ei = np.array([np.zeros(len(ref_point))])\n es = np.repeat(ei, len(ref_point), axis=0)\n\n for i, j in enumerate(es):\n for ind, _ in enumerate(j):\n if ind == i:\n j[ind] = 1\n\n return ref_point + (d * es)",
"def pdb2pka_sugelm(self):\n import Protool\n P=Protool.structureIO()\n P.readpdb(self.pdbfile)\n P.RemoveALT()\n #import Protool.mutate\n #MUT=Protool.mutate.Mutate(P)\n #\n # Construct arrays\n #\n import pKD_dict\n self.data=pKD_dict.pKD_dict()\n self.atom_data=pKD_dict.pKD_dict()\n #\n # Create dir for mutant PDB files\n #\n import os\n mutdir=os.path.join(self.topdir,self.pdbfile+'.pdbs')\n if not os.path.isdir(mutdir):\n os.mkdir(mutdir)\n #\n # Loop over all residues\n #\n residues=P.residues.keys()\n residues.sort()\n for residue in residues:\n orgres=P.resname(residue)\n print 'Calculating for %s %s' %(residue,P.resname(residue))\n #\n # If neutral mutate to Asp, Glu, Lys, Arg, His\n #\n targets=[]\n for res in ['ARG','LYS','HIS','ASP','GLU']:\n if P.resname(residue)!=res:\n targets.append(res)\n #if orgres=='GLU':\n # targets.append('GLN')\n #elif orgres=='ASP':\n # targets.append('ASN')\n #elif orgres=='HIS':\n # targets.append('PHE')\n #elif orgres=='ARG' or P.resname(residue)=='LYS':\n # targets.append('MET')\n #\n # Target identified. Now model each\n #\n for target in targets:\n import pKD_tools\n resid=pKD_tools.get_resid_from_res(residue)\n orgres=P.resname(residue)\n filename=os.path.join(mutdir,'%s:%s:%s.pdb' %(residue,orgres,target))\n mutation='%s:%s:%s' %(residue,orgres,target)\n if not os.path.isfile(filename):\n import Design_pKa_help\n Design_pKa_help.make_mutation(self.pdbfile,mutation)\n NP=Protool.structureIO()\n NP.readpdb(filename)\n NP.writepdb(filename,TER=None)\n #\n # Calculate the interaction energies\n #\n protein,routines,forcefield,apbs_setup,lig_titgrps = pdb2pka.pre_init(pdbfilename=filename,\n ff='parse',\n ligand=None,\n verbose=1)\n mypkaRoutines = pdb2pka.pKaRoutines(protein, routines, forcefield,apbs_setup)\n #\n # Find our group\n #\n sp=residue.split(':')\n chainid=sp[0]\n resnum=int(sp[1])\n mypkaRoutines.findTitratableGroups()\n this_pKa=None\n for pKa in mypkaRoutines.pKas:\n print pKa.residue.resSeq,resnum\n print pKa.residue.chainID,chainid\n print pKa.residue.name,target\n print pKa.pKaGroup.name,target\n print '--------------'\n print 'ChainID',pKa.residue.chainID\n if pKa.residue.resSeq==resnum and pKa.residue.chainID==chainid and pKa.residue.name==target and pKa.pKaGroup.name==target:\n #print 'Found group',pKa.residue.resSeq,pKa.pKaGroup.name\n this_pKa=pKa\n break\n if not this_pKa:\n raise Exception,'Could not find inserted titratable group'\n mypkaRoutines.get_interaction_energies_setup(this_pKa,mode='pKD')\n matrix=mypkaRoutines.matrix\n #\n # Dig the interaction energies out of the pdb2pka array\n #\n for titration1 in matrix[this_pKa].keys():\n for state1 in matrix[this_pKa][titration1].keys():\n grp_sub=matrix[this_pKa][titration1][state1]\n if mypkaRoutines.is_charged(this_pKa,titration1,state1):\n for pKa2 in grp_sub.keys(): \n import string\n chainID2=pKa.residue.chainID\n resid2='%s:%s' %(chainID2,string.zfill(pKa2.residue.resSeq,4))\n for titration2 in grp_sub[pKa2].keys():\n for state2 in grp_sub[pKa2][titration2].keys():\n if mypkaRoutines.is_charged(pKa2,titration2,state2):\n #\n # Both states are charged, so now we can pull the\n # interaction energies out\n #\n if not self.data.has_key(mutation):\n self.data[mutation]={}\n self.data[mutation][resid2]=grp_sub[pKa2][titration2][state2]\n #\n # Get the potentials at all atoms too\n #\n all_pots=mypkaRoutines.all_potentials[this_pKa][titration1][state1]\n sub_all_pots=all_pots[pKa2][titration2][state2]\n for atom in sub_all_pots.keys():\n resid=mutation\n import pKD_tools\n resid2=pKD_tools.get_resid_from_res(atom)\n atomname=atom.split(':')[-1] #atom.name\n if atomname[0]=='H' or atomname in ['N','C','O']:\n continue # Skip all H atoms and all non-CA backbone atoms to save memory\n if not self.atom_data.has_key(resid):\n self.atom_data[resid]={}\n if not self.atom_data[resid].has_key(resid2):\n self.atom_data[resid][resid2]={}\n self.atom_data[resid][resid2][atomname]=abs(sub_all_pots[atom])\n return self.data,self.atom_data",
"def compute_pvalue(self):\n # Run permutation test\n self.PermutationTest()\n # TS obtained from the original B,T samples\n self.compute_obs_TS()\n \n # Mean and std of the TS distribution\n self.mu = np.mean(self.TS_tilde)\n self.sigma = np.std(self.TS_tilde)\n \n # Standardized test statistic (zero mean, unit variance)\n self.TS_prime = (self.TS_tilde - self.mu)/self.sigma\n self.TS_prime_obs = (self.TS_obs - self.mu)/self.sigma\n \n # Two-sided p-value from TS' distribution\n self.p_value = 2*(1 - 0.01 * stats.percentileofscore(self.TS_prime,\n abs(self.TS_prime_obs)) )\n \n # if 0, compute it from standard normal\n if self.p_value == 0.0:\n self.p_value = self.pvalue_gaussian()\n \n print(\"\")\n print(\"p-value = {:e}\".format(self.p_value))",
"def __init__(self, peak, pki, parent, multiColorPeak=None):\n # Peak object\n self.peak = peak\n # int, peak index number\n self.pki = pki\n self.parent = parent\n self.multiColorPeak = multiColorPeak\n # union of all the ways of failing...\n self.skip = False\n\n self.outOfBounds = False\n self.tinyFootprint = False\n self.noValidPixels = False\n self.deblendedAsPsf = False\n self.degenerate = False\n\n # Field set during _fitPsf:\n self.psfFitFailed = False\n self.psfFitBadDof = False\n # (chisq, dof) for PSF fit without decenter\n self.psfFit1 = None\n # (chisq, dof) for PSF fit with decenter\n self.psfFit2 = None\n # (chisq, dof) for PSF fit after applying decenter\n self.psfFit3 = None\n # decentered PSF fit wanted to move the center too much\n self.psfFitBigDecenter = False\n # was the fit with decenter better?\n self.psfFitWithDecenter = False\n #\n self.psfFitR0 = None\n self.psfFitR1 = None\n self.psfFitStampExtent = None\n self.psfFitCenter = None\n self.psfFitBest = None\n self.psfFitParams = None\n self.psfFitFlux = None\n self.psfFitNOthers = None\n\n # Things only set in _fitPsf when debugging is turned on:\n self.psfFitDebugPsf0Img = None\n self.psfFitDebugPsfImg = None\n self.psfFitDebugPsfDerivImg = None\n self.psfFitDebugPsfModel = None\n\n self.failedSymmetricTemplate = False\n\n # The actual template Image and Footprint\n self.templateImage = None\n self.templateFootprint = None\n\n # The flux assigned to this template -- a MaskedImage\n self.fluxPortion = None\n\n # The stray flux assigned to this template (may be None), a HeavyFootprint\n self.strayFlux = None\n\n self.hasRampedTemplate = False\n\n self.patched = False\n\n # debug -- a copy of the original symmetric template\n self.origTemplate = None\n self.origFootprint = None\n # MaskedImage\n self.rampedTemplate = None\n # MaskedImage\n self.medianFilteredTemplate = None\n\n # when least-squares fitting templates, the template weight.\n self.templateWeight = 1.0",
"def _paa(self):\n self.paa = np.array([self.series[i * self.points_per_symbol : (i + 1) * self.points_per_symbol].mean() for i in range(len(self.series) / self.points_per_symbol)])",
"def calculate_p(candidate, reference):\n matches = 0\n for grama in candidate:\n if grama in reference:\n matches += 1\n return matches/len(candidate)",
"def get_fpp(self, i_seq):\n if (not self.use_fdp[i_seq]):\n return None\n if (self.fpp_from_phaser_ax_sites is None):\n return self.site_fdp[i_seq]\n fpp = self.fpp_from_phaser_ax_sites[i_seq]\n if (fpp < 0) : fpp = None\n return fpp",
"def do_pnp(pts3d_for_pnp, pts2d_for_pnp, K, iterations=200, reprojThresh=5):\n list_pts3d_for_pnp = pts3d_for_pnp\n list_pts2d_for_pnp = pts2d_for_pnp\n pts3d_for_pnp = np.squeeze(np.array(pts3d_for_pnp))\n pts2d_for_pnp = np.expand_dims(np.squeeze(np.array(pts2d_for_pnp)), axis=1)\n num_pts = len(pts3d_for_pnp)\n\n highest_inliers = 0\n for i in range(iterations):\n pt_idxs = np.random.choice(num_pts, 6, replace=False)\n pts3 = np.array([pts3d_for_pnp[pt_idxs[i]] for i in range(len(pt_idxs))])\n pts2 = np.array([pts2d_for_pnp[pt_idxs[i]] for i in range(len(pt_idxs))])\n _, rvec, tvec = cv2.solvePnP(pts3, pts2, K, distCoeffs=np.array([]), flags=cv2.SOLVEPNP_ITERATIVE)\n R, _ = cv2.Rodrigues(rvec)\n pnp_errors, projpts, avg_err, perc_inliers = test_reproj_pnp_points(list_pts3d_for_pnp, list_pts2d_for_pnp, R, tvec, K, rep_thresh=reprojThresh)\n if highest_inliers < perc_inliers:\n highest_inliers = perc_inliers\n best_R = R\n best_tvec = tvec\n R = best_R\n tvec = best_tvec\n print('rvec:', rvec,'\\n\\ntvec:', tvec)\n\n return R, tvec",
"def pss(self):\n return (self.table[0, 0] * self.table[1, 1] - self.table[0, 1] * self.table[1, 0]) / \\\n ((self.table[0, 0] + self.table[1, 0]) * (self.table[0, 1] + self.table[1, 1]))",
"def getPSF(self, z_value, shape = None, normalize = False):\n assert False",
"def infer_pvalues(self, p=0.05, mcp='maxstat'):\n # ---------------------------------------------------------------------\n # check that pac and surrogates has already been computed\n assert hasattr(self, 'pac'), (\"You should compute PAC first. Use the \"\n \"`fit` method\")\n assert hasattr(self, 'surrogates'), \"No surrogates computed\"\n\n # mean pac and surrogates across trials\n m_pac, m_surro = self.pac.mean(2), self.surrogates.mean(3)\n self._pvalues = self._infer_pvalues(m_pac, m_surro, p=p, mcp=mcp)\n\n return self._pvalues"
] | [
"0.58542764",
"0.58267516",
"0.5633884",
"0.5620691",
"0.5568786",
"0.5546477",
"0.5538438",
"0.5512286",
"0.5491357",
"0.5486278",
"0.53827614",
"0.535951",
"0.5333681",
"0.53270435",
"0.5324692",
"0.5322697",
"0.53162724",
"0.5314971",
"0.5281472",
"0.5278054",
"0.5277989",
"0.52655154",
"0.5253501",
"0.5233621",
"0.5231598",
"0.5224775",
"0.52214766",
"0.5193918",
"0.5190423",
"0.51874524"
] | 0.60214907 | 0 |
Plot an image of the PSF | def plot_PSF(self, coef, wave_idx):
PSF, strehl = self.compute_PSF(coef, wave_idx)
plt.figure()
plt.imshow(PSF)
plt.title('Strehl: %.3f' %strehl)
plt.colorbar()
plt.clim(vmin=0, vmax=1) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def plot_prodata_psf(self,font_size=28,img_name='prodata_psf.pdf',img_id=0):\n rawimage = self.raw_image\n dataimage = self.data\n len_mask = self.lens_mask\n plu_mask_out = self.plu_mask\n\n fig, (ax1, ax2, ax3, ax4,ax5) = plt.subplots(1, 5, figsize=(19, 10))\n ax1.imshow((rawimage), origin='lower', cmap=\"gist_heat\")\n ax1.set_title('Original Image', fontsize=font_size)\n ax1.text(rawimage.shape[0] * 0.55, rawimage.shape[0] * 0.8, 'ID='+repr(img_id), size=12, color='white',\n weight=\"bold\")\n ax1.text(rawimage.shape[0] * 0.2, rawimage.shape[0] * 0.05, 'observation', size=20, color='white', weight=\"bold\")\n ax1.axis('off')\n #\n ax2.imshow((dataimage), origin='lower', cmap=\"gist_heat\")\n ax2.set_title('Image Data', fontsize=font_size)\n ax2.text(dataimage.shape[0] * 0.2, dataimage.shape[0] * 0.05, 'image data', size=20, color='white', weight=\"bold\")\n ax2.axis('off')\n #\n ax3.imshow(len_mask, origin='lower')\n ax3.set_title('Lens light', fontsize=font_size)\n ax3.axis('off')\n #\n ax4.imshow(plu_mask_out, origin='lower')\n ax4.set_title('Mask', fontsize=font_size)\n ax4.axis('off')\n#\n psf=self.psf\n ax5.imshow(np.log10(psf), origin='lower', cmap=\"gist_heat\")\n ax5.set_title('lg(PSF)', fontsize=font_size)\n ax5.axis('off')\n\n plt.show()\n fig.savefig(img_name)\n return 0",
"def plot_fppy(self,LAXIS,xbl,xbr,ybu,ybd,ilg): \n\t\t\n # load x GRID\n grd1 = self.xzn0\n\t\n # load DATA to plot\n plt1 = self.fppy\n\t\t\t\t\n # create FIGURE\n plt.figure(figsize=(7,6))\n\t\t\n # format AXIS, make sure it is exponential\n plt.gca().yaxis.get_major_formatter().set_powerlimits((0,0))\t\t\n\n # set plot boundaries \n to_plot = [plt1]\t\t\n self.set_plt_axis(LAXIS,xbl,xbr,ybu,ybd,to_plot)\n\t\t\t\t\n # plot DATA \n plt.title(r'pressure flux y')\n plt.plot(grd1,plt1,color='brown',label = r'f$_{py}$')\n\n # define and show x/y LABELS\n setxlabel = r\"r (cm)\"\n setylabel = r\"$f_{py}$ (erg cm$^{-2}$ s$^{-1}$)\"\n plt.xlabel(setxlabel)\n plt.ylabel(setylabel)\n\t\t\n # show LEGEND\n plt.legend(loc=ilg,prop={'size':18})\n\n # display PLOT\n plt.show(block=False)\n\n # save PLOT\n plt.savefig('RESULTS/'+self.data_prefix+'mean_fppy.png')",
"def plot_pz(pz_inst):\n plt.figure()\n \n plt.plot(pz_inst.z_data, pz_inst.p_data)\n \n plt.savefig(\"pz_figure.png\")\n plt.close()",
"def show(image):\n fig = pyplot.figure()\n ax = fig.add_subplot(1,1,1)\n imgplot = ax.imshow(image, cmap=mpl.cm.Greys)\n imgplot.set_interpolation('nearest')\n ax.xaxis.set_ticks_position('top')\n ax.yaxis.set_ticks_position('left')\n pyplot.show()",
"def plot(self,id=1,dpi=150):\n fig = plt.figure(id)\n ax1 = fig.add_subplot(111)\n ax1.imshow(self.image,interpolation='nearest',extent=[self.xmin,self.xmax,\n self.ymin,self.ymax], origin='lower')\n #plt.savefig('.png',dpi=dpi)\n plt.draw()",
"def plotSate(s,i,seed):\r\n fig, ax = plt.subplots()\r\n\r\n im = ax.imshow(s)\r\n\r\n plt.xticks([i for i in range(dim)], \"\")\r\n plt.yticks([i for i in range(dim)], \"\")\r\n\r\n fig.tight_layout()\r\n plt.savefig(\"Systems/\" + str(dim) + \"_\" + str(seed) + \"/Images/\" + str(i) +\r\n \".jpeg\",quality=80,optimize=True,\r\n dpi=80,progressive=True,transparent=True)\r\n fig.clear()\r\n plt.close(fig)",
"def show(image):\n from matplotlib import pyplot\n import matplotlib as mpl\n fig = pyplot.figure()\n ax = fig.add_subplot(1,1,1)\n imgplot = ax.imshow(image, cmap=mpl.cm.Greys)\n imgplot.set_interpolation('nearest')\n ax.xaxis.set_ticks_position('top')\n ax.yaxis.set_ticks_position('left')\n pyplot.show()",
"def show(image):\n from matplotlib import pyplot\n import matplotlib as mpl\n fig = pyplot.figure()\n ax = fig.add_subplot(1,1,1)\n imgplot = ax.imshow(image, cmap=mpl.cm.Greys)\n imgplot.set_interpolation('nearest')\n ax.xaxis.set_ticks_position('top')\n ax.yaxis.set_ticks_position('left')\n pyplot.show()",
"def show(image):\n from matplotlib import pyplot\n import matplotlib as mpl\n fig = pyplot.figure()\n ax = fig.add_subplot(1,1,1)\n imgplot = ax.imshow(image, cmap=mpl.cm.Greys)\n imgplot.set_interpolation('nearest')\n ax.xaxis.set_ticks_position('top')\n ax.yaxis.set_ticks_position('left')\n pyplot.show()",
"def plot(self):\n\t\tself.plotOfTF().plot()",
"def pf_plot(pf, t):\n xx = pf.XS[t, :, 0]\n yy = pf.XS[t, :, 1]\n ww = pf.WS[t, :]\n plt.scatter(xx, yy, s=ww * 5000)",
"def show(image):\n fig = pyplot.figure()\n axis = fig.add_subplot(1, 1, 1)\n imgplot = axis.imshow(image)\n imgplot.set_interpolation('nearest')\n axis.xaxis.set_ticks_position('top')\n axis.yaxis.set_ticks_position('left')\n pyplot.show()",
"def psf_plot(self, irfname=None, outfile='psf.csv', title=''):\n psf = self.get_psf(irfname)\n \n def bkg_size(e, ct):\n f2 = lambda delta: psf(e,ct, delta)**2 * 2*np.pi*delta\n return np.degrees(1./np.sqrt(np.pi*integrate.quad(f2, 0, np.inf)[0]))\n \n def loc_size(e, ct):\n func = lambda x : psf(e,ct, x)\n fprime = lambda x : misc.derivative(func, x, dx=0.0001, order=5)\n integrand = lambda rp : rp * fprime(rp)**2/func(rp) * np.pi\n return np.degrees(1/np.sqrt(integrate.quad(integrand, 0, np.radians(5))[0]))\n \n \n egev = np.logspace(-1.+1/8., 2.5+1/8., 3.5*4+1)\n front, back = [[bkg_size(e*1e3,ct) for e in egev] for ct in range(2)]\n floc, bloc = [[loc_size(e*1e3,ct) for e in egev] for ct in range(2)]\n f68,b68 = [[psf.inverse_integral(e*1e3, ct) for e in egev] for ct in range(2)]\n fig,ax = plt.subplots(figsize=(6,6))\n for x, s, label in zip((front, back, floc, bloc, f68, b68),\n ('-g', 'r', '--g', '--r', ':g', ':r'),\n ('front bkg', 'back bkg','front loc', 'back loc', 'front 68', 'back 68')):\n ax.plot(egev, x, s, lw=2, label=label)\n \n plt.setp(ax, xlabel='Energy (GeV)', ylabel='PSF size (deg)', xscale='log', yscale='log',\n xlim=(0.1, 100), ylim=(0.02, 8), title=title)\n ax.legend(prop=dict(size=10)); ax.grid()\n #x.set_xticklabels('0.1 1 10 100'.split())\n #ax.set_yticklabels('0.01 0.1 1'.split())\n if outfile is None: return fig\n self.psf_df = pd.DataFrame(dict(front=front, floc=floc, back=back, bloc=bloc,f68=f68,b68=b68), \n index=egev.round(3))\n self.psf_df.index.name='energy'\n self.psf_df.to_csv(os.path.join(self.plotfolder, outfile))\n print ('wrote file %s' % os.path.join(self.plotfolder, outfile))\n return fig",
"def show_image(path):\n img = mpimg.imread(path)\n imgplot = plt.imshow(img)\n plt.show()\n plt.close()",
"def Plot(self):\n\n ### Create the path names ###\n folder_string = self.params.folder+\"/plots/\"\n u_string = self.params.folder+\"/plots/u.pdf\"\n p_string = self.params.folder+\"/plots/p.pdf\"\n\n ### Check if folder exists ###\n if not os.path.exists(folder_string): os.makedirs(folder_string)\n\n ### Plot the x component of velocity ###\n plot(self.u_next[0],title=\"Velocity in the x Direction\")\n plt.savefig(u_string)\n plt.figure()\n\n ### Plot the pressure ###\n plot(self.p_next,title=\"Pressure\")\n plt.savefig(p_string)\n plt.show()",
"def test_random_single_image():\n\n shap.image_plot(np.random.randn(3, 20, 20), np.random.randn(3, 20, 20), show=False)",
"def display_image(X):\n\n\tim = X.reshape(28, 28)\n\ttemp = plt.imshow(im)\n\tplt.show()",
"def show(image,label,pred):\n from matplotlib import pyplot\n import matplotlib as mpl\n fig = pyplot.figure()\n ax = fig.add_subplot(1,1,1)\n imgplot = ax.imshow(image, cmap=mpl.cm.Greys)\n imgplot.set_interpolation('nearest')\n s=\"True Label : \"+str(label)+\" Predicted label : \"+str(pred)\n pyplot.xlabel(s,fontname=\"Arial\", fontsize=20 )\n ax.xaxis.set_ticks_position('top')\n ax.yaxis.set_ticks_position('left')\n pyplot.show()",
"def _init_plot(self) -> None:\n\n # create a grayscale plot\n out = sys.stdout\n sys.stdout = open(\"/dev/null\", \"w\")\n hdu = self.image_generator.image(self.ra, self.dec)\n self.plot = aplpy.FITSFigure(hdu)\n self.plot.show_grayscale()\n self.plot.set_theme(\"publication\")\n sys.stdout = out\n\n # label for the position angle\n pa_string = \"PA = %.1f\" % self.mode_details.position_angle().to_value(u.deg)\n if self.mode_details.automated_position_angle():\n pa_string += \" (auto)\"\n self.draw_label(0.95, -0.05, pa_string, style=\"italic\", weight=\"bold\")\n\n # label for the title\n if self.title:\n self.draw_label(\n 0.5, 1.03, self.title, style=\"italic\", weight=\"bold\", size=\"large\"\n )\n\n # label for the image source\n self.draw_label(\n -0.05,\n -0.05,\n \"%s\" % self.image_generator.source(),\n style=\"italic\",\n weight=\"bold\",\n )\n\n # grid overlay\n self.plot.add_grid()\n self.plot.grid.set_alpha(0.2)\n self.plot.grid.set_color(\"b\")\n\n # indicate the RSS field of view\n self.draw_circle(self.ra, self.dec, 4.0 * u.arcmin, \"g\")\n self.draw_label(\n 0.79,\n 0.79,\n \"RSS\",\n style=\"italic\",\n weight=\"bold\",\n size=\"large\",\n horizontalalignment=\"left\",\n color=(0, 0, 1),\n )\n\n # indicate the Salticam field of view\n self.draw_circle(self.ra, self.dec, 5.0 * u.arcmin, \"g\")\n self.draw_label(\n 0.86,\n 0.86,\n \"SCAM\",\n style=\"italic\",\n weight=\"bold\",\n size=\"large\",\n horizontalalignment=\"left\",\n color=(0, 0, 1),\n )\n\n # labels for north and east direction\n self.draw_label(\n self.ra,\n self.dec + 4.8 * u.arcmin,\n \"N\",\n style=\"italic\",\n weight=\"bold\",\n size=\"large\",\n color=(0, 0.5, 1),\n )\n self.draw_label(\n self.ra + 4.8 * u.arcmin / np.abs(np.cos(self.dec)),\n self.dec,\n \"E\",\n style=\"italic\",\n weight=\"bold\",\n size=\"large\",\n horizontalalignment=\"right\",\n color=(0, 0.5, 1),\n )\n\n # add cross hairs\n self.draw_centered_line(\n 0 * u.deg,\n 8 * u.arcmin,\n self.ra,\n self.dec,\n color=\"g\",\n linewidth=0.5,\n alpha=1.0,\n )\n self.draw_centered_line(\n 90 * u.deg,\n 8 * u.arcmin,\n self.ra,\n self.dec,\n color=\"g\",\n linewidth=0.5,\n alpha=1.0,\n )\n\n # label for the magnitude range and bandpass\n if self.magnitude_range:\n self._show_magnitudes()\n\n # add mode specific content\n if not self.basic_annotations:\n self.mode_details.annotate_finder_chart(self)",
"def imshow(img):\n imadd(img)\n plt.ion()\n plt.show()",
"def show_image(image):\r\n plt.imshow(image, cmap='gray')\r\n plt.show()",
"def plot(self):\n self.fig = plt.figure('black hole')\n self.fig.clf() #clear the graph to avoir superposing data from the same set (can be deactivated if need to superpose)\n self.ax = plt.subplot()\n\n if self.img2 is not None:\n self.ax.imshow(self.img2)\n else:\n print(\"No black hole deformation in the memory, displayed the original image instead.\")\n self.ax.imshow(self.img_debut)\n\n self.fig.canvas.set_window_title('Black hole')\n self.ax.set_title(\"scrool to zoom in or out \\nright click to add an offset in the background \\nleft click to refresh image \\n close the option windows to stop the program\")\n self.fig.canvas.mpl_connect('scroll_event', self.onscroll)\n self.fig.canvas.mpl_connect('button_press_event', self.onclick)\n self.fig.canvas.mpl_connect('axes_leave_event', self.disconnect)\n self.fig.canvas.mpl_connect('axes_enter_event', self.connect)\n\n self.draw()",
"def plot(self):\n\t\tself.plotOfSpect().plot()",
"def show_flow(filename):\n flow = read_flow(filename)\n img = flow_to_image(flow)\n plt.imshow(img)\n plt.show()",
"def draw_image(self):\n \n pixel_array = self.imageprepare(self.image_path)\n newArr = self.reshape_pixel_array(pixel_array)\n plt.imshow(newArr, interpolation='nearest')\n plt.savefig('MNIST_IMAGE.png')#save MNIST image\n plt.show()#Show / plot that image",
"def plot_fr_and_spikes(self, t):\n plt.figure(figsize=(10, 8))\n\n plt.subplot(2, 2, 1)\n self.plot_base_image()\n\n plt.subplot(2, 2, 2)\n self.plot_firing_rates(t, mode='ON')\n plt.title('Retinal Image')\n\n # Spikes\n ax = plt.subplot(2, 2, 3)\n self.plot_spikes(ax, t, mode='ON', moving_average=True)\n\n ax = plt.subplot(2, 2, 4)\n self.plot_spikes(ax, t, mode='OFF', moving_average=True)",
"def plot(self):\n\t\tself.plotOfSpect()",
"def plot_sample(x):\n plt.imshow(x[:,:,0])\n plt.title(\"gasf\")\n plt.colorbar()\n plt.show()\n\n plt.imshow(x[:,:,1])\n plt.title(\"gadf\")\n plt.colorbar()\n plt.show()\n\n plt.imshow(x[:,:,2])\n plt.title(\"mtf\")\n plt.colorbar()\n plt.show()",
"def show_plot(img, title):\n plt.imshow(cv2.cvtColor(img, cv2.COLOR_BGR2RGB))\n plt.title(\"Hand Number: \" + title)\n plt.show()",
"def plot_image(img, label=\"\"): \n if img.shape[0] == 3:\n img = img.transpose(1,2,0)\n fig,ax = plt.subplots(1)\n sns.set_style('white')\n ax.imshow(np.asarray(img))\n if label!=\"\":\n plt.title(number_label[label])\n return fig,ax"
] | [
"0.68390614",
"0.667332",
"0.6661213",
"0.662219",
"0.655755",
"0.655406",
"0.6526125",
"0.6526125",
"0.6526125",
"0.6519338",
"0.64991987",
"0.64458805",
"0.6437348",
"0.643601",
"0.6406592",
"0.63417786",
"0.6327013",
"0.63187444",
"0.6312867",
"0.6310806",
"0.63035756",
"0.62978315",
"0.62460405",
"0.62263805",
"0.62197894",
"0.6213917",
"0.61846524",
"0.6184388",
"0.6179067",
"0.6170574"
] | 0.6686292 | 1 |
Given an oversampled PSF (typically 0.51.0 mas spaxels), it calculates the Ensquared Energy of the central spaxel in a new_scale (4, 10, 20 mas) It selects a window of size new_scale and adds up the Intensity of those pixels | def ensquared_one_pix(array, pix_scale, new_scale=40, plot=True):
n = int(new_scale // pix_scale)
minPix, maxPix = (pix + 1 - n) // 2, (pix + 1 + n) // 2
ens = array[minPix:maxPix, minPix:maxPix]
# print(ens.shape)
energy = np.sum(ens)
if plot:
mapp = 'viridis'
f, (ax1, ax2) = plt.subplots(1, 2)
ax1 = plt.subplot(1, 2, 1)
square = Rectangle((minPix-0.5, minPix-0.5), n, n, linestyle='--', fill=None, color='white')
ax1.add_patch(square)
img1 = ax1.imshow(array, cmap=mapp)
ax1.set_title('%.1f mas pixels' % (pix_scale))
img1.set_clim(0, 1)
plt.colorbar(img1, ax=ax1, orientation='horizontal')
ax2 = plt.subplot(1, 2, 2)
img2 = ax2.imshow(ens, cmap=mapp)
ax2.set_title('%d mas window' %new_scale)
img1.set_clim(0, 1)
plt.colorbar(img2, ax=ax2, orientation='horizontal')
return energy | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _scale_psf(self, input_irf_file, config):\n\n # Find all \"sigma\" values - tells how many PSF components we have in the IRF file\n column_names = [col.name.lower() for col in input_irf_file['POINT SPREAD FUNCTION'].columns]\n sigma_columns = list(filter(lambda s: \"sigma\" in s.lower(), column_names))\n\n # --------------------------\n # Reading the PSF parameters\n self._psf = dict()\n self._psf['Elow'] = input_irf_file['POINT SPREAD FUNCTION'].data['Energ_lo'][0].copy()\n self._psf['Ehigh'] = input_irf_file['POINT SPREAD FUNCTION'].data['Energ_hi'][0].copy()\n self._psf['ThetaLow'] = input_irf_file['POINT SPREAD FUNCTION'].data['Theta_lo'][0].copy()\n self._psf['ThetaHi'] = input_irf_file['POINT SPREAD FUNCTION'].data['Theta_hi'][0].copy()\n\n for i in range(0, len(sigma_columns)):\n sigma_name = 'sigma_{:d}'.format(i + 1)\n self._psf[sigma_name] = input_irf_file['POINT SPREAD FUNCTION'].data[sigma_name][0].transpose().copy()\n\n self._psf['E'] = scipy.sqrt(self._psf['Elow'] * self._psf['Ehigh'])\n self._psf['Theta'] = (self._psf['ThetaLow'] + self._psf['ThetaHi']) / 2.0\n # --------------------------\n\n # Creating the energy-theta mesh grid\n energy, theta = scipy.meshgrid(self._psf['E'], self._psf['Theta'], indexing='ij')\n\n # ---------------------------------\n # Scaling the PSF energy dependence\n\n # Constant error function\n if config['energy_scaling']['err_func_type'] == \"constant\":\n scale_params = config['energy_scaling'][\"constant\"]\n # Constant scaling. Loop over all \"sigma\" values and scale them by the same factor.\n for sigma_column in sigma_columns:\n self._psf[sigma_column + '_new'] = scale_params['scale'] * self._psf[sigma_column]\n\n # Gradients error function\n elif config['energy_scaling']['err_func_type'] == \"gradient\":\n scale_params = config['energy_scaling'][\"gradient\"]\n for sigma_column in sigma_columns:\n self._psf[sigma_column + '_new'] = self._psf[sigma_column] * (\n 1 + scale_params['scale'] * gradient(scipy.log10(energy),\n scipy.log10(scale_params['range_min']),\n scipy.log10(scale_params['range_max']))\n )\n\n # Step error function\n elif config['energy_scaling']['err_func_type'] == \"step\":\n scale_params = config['energy_scaling'][\"step\"]\n break_points = list(zip(scipy.log10(scale_params['transition_pos']),\n scale_params['transition_widths']))\n\n for sigma_column in sigma_columns:\n self._psf[sigma_column + '_new'] = self._psf[sigma_column] * (\n 1 + scale_params['scale'] * step(scipy.log10(energy), break_points)\n )\n\n else:\n raise ValueError(\"Unknown PSF scaling function {:s}\"\n .format(config['energy_scaling']['err_func_type']))\n # ---------------------------------\n\n # ---------------------------------\n # Scaling the PSF angular dependence\n\n # Constant error function\n if config['angular_scaling']['err_func_type'] == \"constant\":\n scale_params = config['angular_scaling'][\"constant\"]\n # Constant scaling. Loop over all \"sigma\" values and scale them by the same factor.\n for sigma_column in sigma_columns:\n # input_irf_file['POINT SPREAD FUNCTION'].data[sigma_column] *= scale_params['scale']\n self._psf[sigma_column + '_new'] = scale_params['scale'] * self._psf[sigma_column + '_new']\n\n # Gradients error function\n elif config['angular_scaling']['err_func_type'] == \"gradient\":\n scale_params = config['angular_scaling'][\"gradient\"]\n for sigma_column in sigma_columns:\n self._psf[sigma_column + '_new'] = self._psf[sigma_column + '_new'] * (\n 1 + scale_params['scale'] * gradient(theta,\n scale_params['range_min'],\n scale_params['range_max'])\n )\n\n # Step error function\n elif config['angular_scaling']['err_func_type'] == \"step\":\n scale_params = config['angular_scaling'][\"step\"]\n break_points = list(zip(scale_params['transition_pos'],\n scale_params['transition_widths']))\n\n for sigma_column in sigma_columns:\n self._psf[sigma_column + '_new'] = self._psf[sigma_column + '_new'] * (\n 1 + scale_params['scale'] * step(theta, break_points)\n )\n\n else:\n raise ValueError(\"Unknown PSF scaling function {:s}\"\n .format(config['angular_scaling']['err_func_type']))\n # ---------------------------------\n\n # Recording the scaled PSF\n for i in range(0, len(sigma_columns)):\n sigma_name = 'sigma_{:d}'.format(i + 1)\n\n input_irf_file['POINT SPREAD FUNCTION'].data[sigma_name][0] = self._psf[sigma_name + '_new'].transpose()",
"def createIntegratedPsf(self):\n\n (wavelengths, weights) = self.filter\n for i in range(len(wavelengths)):\n\n wavelength = wavelengths[i]\n weight = weights[i]\n self.convertToOpd(wavelength) # creates self.opd\n opd = self.embedOpd()\n zf = numpy.fft.fft2(opd)\n del opd\n # Compute the amplitude squared.\n # (psf is not really the point spread function yet)\n psf = np.conjugate(zf)\n # psf will now be the point spread function, but still complex\n np.multiply(psf, zf, psf)\n del zf\n # normalize the PSF, and convert to single precision\n psf = psf.real / psf.size\n psf = psf.astype(np.float32)\n\n self.center(psf)\n\n # This describes the image scale if no resampling is done.\n cdelt_before_resampling = (wavelength * MICRONStoMETERS) / \\\n (self.D * self.oversample) * RADIANStoDEGREES\n if self.pixel_size is None:\n # we won't resample the output image\n self.cdelt = cdelt_before_resampling\n # Extract a subset.\n if self.output_size < self.npix:\n o_npix = self.output_size\n n0 = (self.npix - o_npix) // 2\n self.integrated_psf += \\\n (psf[n0:n0 + o_npix, n0:n0 + o_npix] * weight)\n else:\n self.integrated_psf += (psf * weight)\n else:\n # we'll resample to this image scale\n self.cdelt = self.pixel_size / self.oversample * ARCSECtoDEGREES\n # These three parameters are only used by mapPsf and for\n # normalizing the weight after resampling.\n self.rescale = self.cdelt / cdelt_before_resampling\n self.input_center = (self.npix + 1) // 2\n self.output_center = (self.output_size + 1) // 2\n sub_psf = np.zeros((self.output_size, self.output_size),\n dtype=np.float32)\n # Do the resampling, writing the output to sub_psf.\n ndimage.geometric_transform(psf, self.mapPsf,\n output_shape=(self.output_size, self.output_size),\n output=sub_psf, prefilter=True)\n weight = weight * self.rescale**2\n self.integrated_psf += (sub_psf * weight)\n del sub_psf\n\n if self.verbose:\n print(\"PSF for wavelength %g has been computed\" % wavelength)",
"def new_scaled_energy(run, smoother=\"pol2\"):\n get_from_ccdb(run)\n endpoint_calib = ROOT.pstags().endpoint_calib\n endpoint_energy = ROOT.pstags().endpoint_energy\n fout = open(f\"new_scaled_energy.{run}\", \"w\")\n Eps_tagm = ROOT.gROOT.FindObject(\"Epair_Etagm_fit\")\n if not Eps_tagm:\n Eps_tagm = ROOT.gROOT.FindObject(\"Epair_Etagm\")\n if not Eps_tagm:\n Eps_tagm = plot_Etagm_Epair(run)[0]\n Eps_tagm.Fit(smoother)\n for func in Eps_tagm.GetListOfFunctions():\n ntagm = Eps_tagm.GetNbinsX()\n for i in range(ntagm):\n Elow = Eps_tagm.GetXaxis().GetBinLowEdge(102-i)\n Ehigh = Eps_tagm.GetXaxis().GetBinUpEdge(102-i)\n f = [(endpoint_calib - endpoint_energy + func.Eval(E)) /\n endpoint_calib for E in (Elow, Ehigh)]\n fout.write(f\"{i+1} {f[0]} {f[1]}\\n\")\n break",
"def _scale_edisp(self, input_irf_file, config):\n\n # Reading the Energy parameters\n self._edisp = dict()\n self._edisp['Mlow'] = input_irf_file['ENERGY DISPERSION'].data['MIGRA_LO'][0].copy()\n self._edisp['Mhigh'] = input_irf_file['ENERGY DISPERSION'].data['MIGRA_HI'][0].copy()\n self._edisp['M'] = (self._edisp['Mlow'] + self._edisp['Mhigh']) / 2.0\n\n # -------------------------------------------\n # Scaling the Energy dependence\n\n # Constant error function\n if config['energy_scaling']['err_func_type'] == \"constant\":\n scaling_params = config['energy_scaling']['constant']['scale']\n self._edisp['Mhigh_new'] = self._edisp['Mhigh'] * (scaling_params)\n self._edisp['Mlow_new'] = self._edisp['Mlow'] * (scaling_params)\n\n # Gradients error function\n elif config['energy_scaling']['err_func_type'] == \"gradient\":\n scaling_params = config['energy_scaling']['gradient']\n self._edisp['Mhigh_new'] = self._edisp['Mhigh'] * (\n 1. + scaling_params['scale'] * gradient(scipy.log10(self._edisp['Mhigh']),\n scipy.log10(scaling_params['range_min']),\n scipy.log10(scaling_params['range_max'])) \n )\n self._edisp['Mlow_new'] = self._edisp['Mlow'] * (\n 1. + scaling_params['scale'] * gradient(scipy.log10(self._edisp['Mlow']),\n scipy.log10(scaling_params['range_min']),\n scipy.log10(scaling_params['range_max'])) \n )\n # Step error function\n elif config['energy_scaling']['err_func_type'] == \"step\":\n scaling_params = config['energy_scaling']['step']\n break_points = list(zip(scipy.log10(scaling_params['transition_pos']),\n scaling_params['transition_widths']))\n self._edisp['Mhigh_new'] = self._edisp['Mhigh']* (\n 1 + scaling_params['scale'] * step(scipy.log10(self._edisp['Mhigh']), break_points)\n )\n self._edisp['Mlow_new'] = self._edisp['Mlow']* (\n 1 + scaling_params['scale'] * step(scipy.log10(self._edisp['Mlow']), break_points)\n )\n else:\n raise ValueError(\"Edisp energy scaling: unknown scaling function type '{:s}'\"\n .format(config['energy_scaling']['err_func_type'])\n )\n # ------------------------------------------\n # Recording the scaled variables\n input_irf_file['ENERGY DISPERSION'].data['MIGRA_HI'][0] = self._edisp['Mhigh_new']\n input_irf_file['ENERGY DISPERSION'].data['MIGRA_LO'][0] = self._edisp['Mlow_new']\n self._edisp['M_new'] = (self._edisp['Mlow_new'] + self._edisp['Mhigh_new']) / 2.0",
"def generate_fgs_fsw_coefficients(siaf=None, verbose=False, scale=0.06738281367):\n if siaf is None:\n siaf = pysiaf.Siaf('fgs')\n\n instrument = 'FGS'\n\n pre_delivery_dir = os.path.join(JWST_DELIVERY_DATA_ROOT, instrument)\n if not os.path.isdir(pre_delivery_dir):\n os.makedirs(pre_delivery_dir)\n\n for aperture_name in ['FGS1_FULL_OSS', 'FGS2_FULL_OSS']:\n\n aperture = siaf[aperture_name]\n\n # center_offset_x = 1023.5\n # center_offset_y = 1023.5\n center_offset_x = aperture.XSciRef - 1.\n center_offset_y = aperture.YSciRef - 1.\n\n if verbose:\n print('External scale {}'.format(scale))\n print(aperture.get_polynomial_scales())\n\n # get SIAF coefficients\n coefficients = aperture.get_polynomial_coefficients()\n\n ar = coefficients['Sci2IdlX']\n br = coefficients['Sci2IdlY']\n cr = coefficients['Idl2SciX']\n dr = coefficients['Idl2SciY']\n\n a_fsw, b_fsw, c_fsw, d_fsw = polynomial.rescale(ar, br, cr, dr, 1. / scale)\n factor = -1.\n\n if 'FGS1' in aperture_name:\n b_fsw *= -1\n c_fsw = polynomial.flip_y(c_fsw)\n d_fsw = polynomial.flip_y(d_fsw)\n\n a_fsw = polynomial.shift_coefficients(a_fsw, factor * center_offset_x,\n factor * center_offset_y)\n b_fsw = polynomial.shift_coefficients(b_fsw, factor * center_offset_x,\n factor * center_offset_y)\n c_fsw = polynomial.shift_coefficients(c_fsw, factor * center_offset_x,\n factor * center_offset_y)\n d_fsw = polynomial.shift_coefficients(d_fsw, factor * center_offset_x,\n factor * center_offset_y)\n\n a_fsw[0] += center_offset_x\n b_fsw[0] += center_offset_y\n c_fsw[0] += center_offset_x\n d_fsw[0] += center_offset_y\n\n # print FSW coefficients to screen\n fsw_coefficients = Table((c_fsw, d_fsw, a_fsw, b_fsw), names=(\n 'IDEALPTOREALPXCOE', 'IDEALPTOREALPYCOE', 'REALPTOIDEALPXCOE', 'REALPTOIDEALPYCOE'))\n if verbose:\n fsw_coefficients.pprint()\n\n table = Table(names=('parameter_name', 'value'), dtype=(object, float))\n table.add_row(['XOFFSET', center_offset_x])\n table.add_row(['YOFFSET', center_offset_y])\n table.add_row(['PLATESCALE', scale])\n for colname in fsw_coefficients.colnames:\n for i in range(len(fsw_coefficients[colname])):\n table.add_row(['{}_{}'.format(colname, i), fsw_coefficients[colname][i]])\n table['parameter_name'] = np.array(table['parameter_name']).astype(str)\n\n # write to file\n fsw_distortion_file = os.path.join(pre_delivery_dir, 'ifgs{}_distortion_tbl.txt'.format(aperture_name[3]))\n comments = []\n comments.append('FGS distortion coefficients for FSW')\n comments.append('')\n comments.append('Derived from SIAF distortion coefficients.')\n comments.append('')\n comments.append('Generated {} {}'.format(timestamp.isot, timestamp.scale))\n comments.append('by {}'.format(username))\n comments.append('')\n table.meta['comments'] = comments\n formats={'parameter_name': '%-20s', 'value': '%+2.6e'}\n table.write(fsw_distortion_file, format='ascii.fixed_width',\n delimiter=',', delimiter_pad=' ', bookend=False,\n overwrite=True, formats=formats)",
"def scaling():\n \n for i in range(cfg.nfea):\n dm = 0\n var = 0\n for j in range(cfg.ntrain):\n dm += cfg.a[j,i]\n dm = dm/cfg.ntrain\n \n for j in range(cfg.ntrain):\n var += (cfg.a[j,i]-dm)**2\n\n var = var/cfg.ntrain\n var = np.sqrt(var)\n \n if var >= 10**(-5):\n cfg.clin[i] = 1.0/var \n cfg.dlin[i] = -dm/var \n \n else: \n if np.abs(dm)<=1.0:\n cfg.clin[i] = 1.0\n cfg.dlin[i] = 0.0 \n else: \n cfg.clin[i] = 1.0/dm\n cfg.dlin[i] = 0.0 \n \n for j in range(cfg.ntrain):\n cfg.a_scaled[j,i] = cfg.clin[i]*cfg.a[j,i] + cfg.dlin[i]\n \n return",
"def get_psf_scale_map(self):\n\n scale_map = dict()\n\n scale_map['E_edges'] = scipy.concatenate((self._psf['Elow'], [self._psf['Ehigh'][-1]]))\n scale_map['Theta_edges'] = scipy.concatenate((self._psf['ThetaLow'], [self._psf['ThetaHi'][-1]]))\n\n # Find all \"sigma\" values - tells how many PSF components we have in the IRF file\n column_names = self._psf.keys()\n sigma_columns = list(filter(lambda s: (\"sigma\" in s.lower()) and not (\"new\" in s.lower()),\n column_names))\n\n for sigma_column in sigma_columns:\n # Avoiding division by zero\n can_divide = self._psf[sigma_column] > 0\n\n scale_map[sigma_column] = scipy.zeros_like(self._psf[sigma_column])\n scale_map[sigma_column][can_divide] = self._psf[sigma_column + '_new'][can_divide] / self._psf[sigma_column][can_divide]\n\n wh_nan = scipy.where(scipy.isnan(scale_map[sigma_column]))\n scale_map[sigma_column][wh_nan] = 0\n scale_map[sigma_column] -= 1\n\n return scale_map",
"def scale(self, sf):\n self.scale(sf, sf)",
"def reScaleLandsat(self,img):\n \n\t\tthermalBand = ee.List(['thermal'])\n\t\tthermal = ee.Image(img).select(thermalBand).multiply(10)\n \n\t\totherBands = ee.Image(img).bandNames().removeAll(thermalBand)\n\t\tscaled = ee.Image(img).select(otherBands).divide(0.0001)\n \n\t\timage = ee.Image(scaled.addBands(thermal)).int16()\n \n\t\treturn image.copyProperties(img)",
"def get_scale():\r\n\r\n \r\n return 0.5",
"def _eta_sfr_scaling(self,x,q):\n i = self.enum[q]\n A = self.scaling_params['A'][i]\n b = self.scaling_params['b'][i]\n return A*x**b",
"def finalSES(mat, C):\n\tscaleToMin, medianScore, maxScore = SES(mat, C)\n\tprint(f\"Background Score: {scaleToMin} \\tMax Score: {maxScore} \\tMedian Score: {medianScore}\")\n\treturn scaling(mat, medianScore)",
"def scale_psf_fluxes(frame, psf):\n scale_factor = (max_flux(frame) / max_flux(psf))\n return psf.profile * scale_factor, psf.fluxes * scale_factor",
"def s_multiplier(self):\n return 4 * np.pi * (self.bins[:, 1]/2)**2",
"def scale(self):",
"def gauss_seeing(npix = None,fwhm=None,e1=None,e2=None,scale=scale):\n fwhm = fwhm/scale\n M20 = 2.*(fwhm/2.35482)**2\n row,col = np.mgrid[-npix/2:npix/2,-npix/2:npix/2]\n rowc = row.mean()\n colc = col.mean()\n Mcc = 0.5*M20*(1+e1)\n Mrc = 0.5*e2*M20\n Mrr = 0.5*M20*(1-e1)\n rho = Mrc/np.sqrt(Mcc*Mrr)\n img = np.exp(-0.5/(1-rho**2)*(row**2/Mrr + col**2/Mcc - 2*rho*row*col/np.sqrt(Mrr*Mcc)))\n res = img/img.sum()\n return res",
"def run(self):\n #calculate platescale of first input image\n try:\n det = np.linalg.det(wcs.WCS(self.datain[0].header).wcs.cd)\n pscale = np.sqrt(np.abs(det))*3600.\n except:\n try:\n det = np.linalg.det(wcs.WCS(self.datain[0].header).wcs.pc)\n pscale = np.sqrt(np.abs(det))*3600.\n except:\n pscale = self.datain[0].header['PIXSCAL']\n #filtering out images which are too far away from the others\n #passing images added to a list of (image, WCS) tuples\n '''\n image_centers = []\n for f in self.datain:\n image_centers.append((f.header['CRVAL1'], f.header['CRVAL2']))\n filtered_datain = []\n dist_list = [[[0]*(len(image_centers)-1)]*len(image_centers)]\n for i in range(len(image_centers)):\n for j in range(len(image_centers)-1):\n dist_list[i][j+1] = np.sqrt((image_)**2+()**2)\n '''\n #calculations necessary for updating wcs information\n px = []\n py = []\n \n #in order to avoid NaN interactions, creating weight map\n weights=[]\n for f in self.datain:\n weights.append((np.where(np.isnan(f.image) == True, 0, 1)))\n \n for f in self.datain:\n px.extend(wcs.WCS(f.header).calc_footprint()[:,0])\n py.extend(wcs.WCS(f.header).calc_footprint()[:,1])\n x0 = (max(px)+min(px))/2.\n y0 = (max(py)+min(py))/2.\n sx = (max(px)-min(px))*np.cos(y0/180*np.pi) # arcsec\n sy = (max(py)-min(py)) # arcsec\n size = (sx*3600+self.getarg('pad')*2, sy*3600+self.getarg('pad')*2)\n xpix = size[0]//pscale\n ypix = size[1]//pscale\n cdelt = [pscale/3600.]*2\n \n #create self.dataout and give it a copy of an input's header\n self.dataout = DataFits(config = self.config)\n self.dataout.header = self.datain[0].header.copy()\n \n #update header wcs information\n self.log.info('Creating new WCS header')\n \n self.dataout.header['CRPIX1'] = xpix/2\n self.dataout.header['CRPIX2'] = ypix/2\n self.dataout.header['CRVAL1'] = x0\n self.dataout.header['CRVAL2'] = y0\n self.dataout.header['CD1_1'] = -cdelt[0]\n self.dataout.header['CD1_2'] = self.dataout.header['CD2_1'] = 0.\n self.dataout.header['CD2_2'] = cdelt[1]\n self.dataout.header['NAXIS1'] = int(xpix)\n self.dataout.header['NAXIS2'] = int(ypix)\n self.dataout.header['CTYPE1'] = 'RA---TAN-SIP'\n self.dataout.header['CTYPE2'] = 'DEC--TAN-SIP'\n self.dataout.header['RADESYS'] = 'ICRS'\n self.dataout.header['EQUINOX'] = 2000\n self.dataout.header['LATPOLE'] = self.datain[0].header['CRVAL2']\n self.dataout.header['LONPOLE'] = 180\n self.dataout.header['PIXASEC'] = pscale\n \n theta_rad = np.deg2rad(self.getarg('outangle'))\n rot_matrix = np.array([[np.cos(theta_rad), -np.sin(theta_rad)], \n [np.sin(theta_rad), np.cos(theta_rad)]])\n rot_cd = np.dot(rot_matrix, np.array([[self.dataout.header['CD1_1'], 0.],[0., self.dataout.header['CD2_2']]]))\n for i in [0,1]:\n for j in [0,1]:\n self.dataout.header['CD{0:d}_{1:d}'.format(i+1, j+1)] = rot_cd[i,j]\n \n #check drizzle arguments\n if self.getarg('kernel') == 'smoothing':\n kernel = 'lanczos3'\n elif self.getarg('kernel') in ['square', 'point', 'gaussian', 'tophat']:\n kernel = self.getarg('kernel')\n else:\n self.log.error('Kernel name not recognized, using default')\n kernel = 'square'\n if self.getarg('drizzleweights') == 'uniform':\n driz_wt = ''\n elif self.getarg('drizzleweights') in ['exptime', 'expsq']:\n driz_wt = self.getarg('drizzleweights')\n else:\n self.log.error('Drizzle weighting not recognized, using default')\n driz_wt = ''\n \n #create drizzle object and add input images\n fullwcs = wcs.WCS(self.dataout.header)\n self.log.info('Starting drizzle')\n driz = drz.Drizzle(outwcs = fullwcs, pixfrac=self.getarg('pixfrac'), \\\n kernel=kernel, fillval='10000', wt_scl=driz_wt)\n for i,f in enumerate(self.datain):\n self.log.info('Adding %s to drizzle stack' % f.filename)\n driz.add_image(f.imgdata[0], wcs.WCS(f.header), inwht=weights[i])\n \n try:\n fillval=float(self.getarg('fillval'))\n except:\n fillval=np.nan\n self.log.error('Fillvalue not recognized or missing, using default')\n \n #creates output fits file from drizzle output\n self.dataout.imageset(np.where(driz.outsci == 10000, fillval, driz.outsci))\n self.dataout.imageset(driz.outwht,'OutWeight', self.dataout.header)\n self.dataout.filename = self.datain[0].filename\n\n #add history\n self.dataout.setheadval('HISTORY','Coadd: %d files combined with %s kernel, pixfrac %f at %f times resolution' \\\n % (len(self.datain), kernel, self.getarg('pixfrac'), self.getarg('resolution')))",
"def get_scale_freq():\n return sf / 2 / (num_freq-1)",
"def scaleLandsat(self,img):\n\t\tthermal = img.select(ee.List(['thermal'])).multiply(0.1)\n\t\tscaled = ee.Image(img).select(self.env.divideBands).multiply(ee.Number(0.0001))\n\t\t\n\t\treturn img.select([]).addBands(scaled).addBands(thermal)",
"def fit_ePSF(self, sci, center=None, origin=[0,0], ivar=1, N=7, \n filter='F140W', tol=1.e-4, guess=None, get_extended=False):\n from scipy.optimize import minimize\n \n sh = sci.shape\n if center is None:\n y0, x0 = np.array(sh)/2.-1\n else:\n x0, y0 = center\n \n xd = x0+origin[1]\n yd = y0+origin[0]\n \n xc, yc = int(x0), int(y0)\n \n psf_xy = self.get_at_position(x=xd, y=yd, filter=filter)\n \n yp, xp = np.indices(sh)\n \n if guess is None:\n if np.isscalar(ivar):\n ix = np.argmax(sci.flatten())\n else:\n ix = np.argmax((sci*(ivar > 0)).flatten())\n \n xguess = xp.flatten()[ix]\n yguess = yp.flatten()[ix]\n else:\n xguess, yguess = guess\n \n guess = [sci[yc-N:yc+N, xc-N:xc+N].sum()/psf_xy.sum(), xguess, yguess, 0, 0, 0, 0]\n sly = slice(yc-N, yc+N); slx = slice(xc-N, xc+N)\n sly = slice(yguess-N, yguess+N); slx = slice(xguess-N, xguess+N)\n \n if get_extended:\n extended_data = self.extended_epsf[filter]\n else:\n extended_data = None\n \n args = (self, psf_xy, sci[sly, slx], ivar[sly, slx], xp[sly, slx], yp[sly, slx], extended_data, 'chi2')\n \n out = minimize(self.objective_epsf, guess, args=args, method='Powell', tol=tol)\n \n psf_params = out.x\n psf_params[1] -= x0\n psf_params[2] -= y0\n \n return psf_params\n \n # dx = xp-psf_params[1]\n # dy = yp-psf_params[2]\n # output_psf = self.eval_ePSF(psf_xy, dx, dy)*psf_params[0]\n # \n # return output_psf, psf_params",
"def shear_est(self, gal_image, psf_image, noise=None, F=False):\n # gal_ps = self.pow_spec(gal_image)\n gal_ps = gal_image\n # gal_ps = hk_tool_box.smooth(gal_ps,self.size)\n if noise is not None:\n nbg = self.pow_spec(noise)\n self.flux2 = numpy.sqrt(gal_ps[int(self.size/2), int(self.size/2)]/numpy.sum(self.rim*gal_ps)*numpy.sum(self.rim))\n # nbg = hk_tool_box.smooth(nbg,self.size)\n # rim = self.border(2, size)\n # n = numpy.sum(rim)\n # gal_pn = numpy.sum(gal_ps*rim)/n # the Possion noise of galaxy image\n # nbg_pn = numpy.sum(nbg*rim)/n # the Possion noise of background noise image\n gal_ps = gal_ps - nbg# + nbg_pn - gal_pn\n\n if F:\n psf_ps = psf_image\n else:\n psf_ps = self.pow_spec(psf_image)\n # self.get_radius_new(psf_ps, 2)\n wb, beta = self.wbeta(self.hlr)\n maxi = numpy.max(psf_ps)\n idx = psf_ps < maxi / 100000.\n wb[idx] = 0\n psf_ps[idx] = 1.\n tk = wb/psf_ps * gal_ps\n\n # ky, kx = self.ky, self.kx\n # #\n # kx2 = kx*kx\n # ky2 = ky*ky\n # kxy = kx*ky\n # k2 = kx2 + ky2\n # k4 = k2*k2\n # mn1 = (-0.5)*(kx2 - ky2) # (-0.5)*(kx**2 - ky**2)\n # mn2 = -kxy # -kx*ky\n # mn3 = k2 - 0.5*beta**2*k4 # kx**2 + ky**2 - 0.5*beta**2*(kx**2 + ky**2)**2\n # mn4 = k4 - 8*kx2*ky2 # kx**4 - 6*kx**2*ky**2 + ky**4\n # mn5 = kxy*(kx2 - ky2) # kx**3*ky - kx*ky**3\n\n # mn1 = self.mn1\n # mn2 = self.mn2\n mn3 = self.k2 - 0.5*beta**2*self.k4\n # mn4 = self.mn4\n # mn5 = self.mn5\n\n mg1 = numpy.sum(self.mn1 * tk)*self.alpha\n mg2 = numpy.sum(self.mn2 * tk)*self.alpha\n mn = numpy.sum(mn3 * tk)*self.alpha\n mu = numpy.sum(self.mn4 * tk)*(-0.5*beta**2)*self.alpha\n mv = numpy.sum(self.mn5 * tk)*(-2.*beta**2)*self.alpha\n\n return mg1, mg2, mn, mu, mv",
"def ellipse_sbprofile(ellipsefit, minerr=0.0, snrmin=1.0, sma_not_radius=False,\n cut_on_cog=False, sdss=False, linear=False):\n sbprofile = dict()\n bands = ellipsefit['bands']\n if 'refpixscale' in ellipsefit.keys():\n pixscale = ellipsefit['refpixscale']\n else:\n pixscale = ellipsefit['pixscale']\n eps = ellipsefit['eps_moment']\n if 'redshift' in ellipsefit.keys():\n sbprofile['redshift'] = ellipsefit['redshift'] \n \n for filt in bands:\n psfkey = 'psfsize_{}'.format(filt.lower())\n if psfkey in ellipsefit.keys():\n sbprofile[psfkey] = ellipsefit[psfkey]\n\n sbprofile['minerr'] = minerr\n sbprofile['smaunit'] = 'pixels'\n sbprofile['radiusunit'] = 'arcsec'\n\n # semi-major axis and circularized radius\n #sbprofile['sma'] = ellipsefit[bands[0]].sma * pixscale # [arcsec]\n\n for filt in bands:\n #area = ellipsefit[filt].sarea[indx] * pixscale**2\n\n sma = np.atleast_1d(ellipsefit['sma_{}'.format(filt.lower())]) # semi-major axis [pixels]\n sb = np.atleast_1d(ellipsefit['intens_{}'.format(filt.lower())]) # [nanomaggies/arcsec2]\n sberr = np.atleast_1d(np.sqrt(ellipsefit['intens_err_{}'.format(filt.lower())]**2 + (0.4 * np.log(10) * sb * minerr)**2))\n \n if sma_not_radius:\n radius = sma * pixscale # [arcsec]\n else:\n radius = sma * np.sqrt(1 - eps) * pixscale # circularized radius [arcsec]\n\n with warnings.catch_warnings():\n warnings.simplefilter('ignore')\n if linear:\n keep = np.isfinite(sb)\n else:\n keep = np.isfinite(sb) * ((sb / sberr) > snrmin)\n #if filt == 'FUV':\n # pdb.set_trace()\n \n if cut_on_cog:\n keep *= (ellipsefit['sma_{}'.format(filt.lower())] * pixscale) <= np.max(ellipsefit['cog_sma_{}'.format(filt.lower())])\n keep = np.where(keep)[0]\n \n sbprofile['keep_{}'.format(filt.lower())] = keep\n\n if len(keep) == 0 or sma[0] == -1:\n sbprofile['sma_{}'.format(filt.lower())] = np.array([-1.0]).astype('f4') # [pixels]\n sbprofile['radius_{}'.format(filt.lower())] = np.array([-1.0]).astype('f4') # [arcsec]\n sbprofile['mu_{}'.format(filt.lower())] = np.array([-1.0]).astype('f4') # [nanomaggies/arcsec2]\n sbprofile['muerr_{}'.format(filt.lower())] = np.array([-1.0]).astype('f4') # [nanomaggies/arcsec2]\n else:\n sbprofile['sma_{}'.format(filt.lower())] = sma[keep] # [pixels]\n sbprofile['radius_{}'.format(filt.lower())] = radius[keep] # [arcsec]\n if linear:\n sbprofile['mu_{}'.format(filt.lower())] = sb[keep] # [nanomaggies/arcsec2]\n sbprofile['muerr_{}'.format(filt.lower())] = sberr[keep] # [nanomaggies/arcsec2]\n continue\n else:\n sbprofile['mu_{}'.format(filt.lower())] = 22.5 - 2.5 * np.log10(sb[keep]) # [mag/arcsec2]\n sbprofile['muerr_{}'.format(filt.lower())] = 2.5 * sberr[keep] / sb[keep] / np.log(10) # [mag/arcsec2]\n\n #sbprofile[filt] = 22.5 - 2.5 * np.log10(ellipsefit[filt].intens)\n #sbprofile['mu_{}_err'.format(filt.lower())] = 2.5 * ellipsefit[filt].int_err / \\\n # ellipsefit[filt].intens / np.log(10)\n #sbprofile['mu_{}_err'.format(filt.lower())] = np.sqrt(sbprofile['mu_{}_err'.format(filt.lower())]**2 + minerr**2)\n\n # Just for the plot use a minimum uncertainty\n #sbprofile['{}_err'.format(filt.lower())][sbprofile['{}_err'.format(filt.lower())] < minerr] = minerr\n\n if 'g' in bands and 'r' in bands and 'z' in bands:\n radius_gr, indx_g, indx_r = np.intersect1d(sbprofile['radius_g'], sbprofile['radius_r'], return_indices=True)\n sbprofile['gr'] = sbprofile['mu_g'][indx_g] - sbprofile['mu_r'][indx_r]\n sbprofile['gr_err'] = np.sqrt(sbprofile['muerr_g'][indx_g]**2 + sbprofile['muerr_r'][indx_r]**2)\n sbprofile['radius_gr'] = radius_gr\n\n radius_rz, indx_r, indx_z = np.intersect1d(sbprofile['radius_r'], sbprofile['radius_z'], return_indices=True)\n sbprofile['rz'] = sbprofile['mu_r'][indx_r] - sbprofile['mu_z'][indx_z]\n sbprofile['rz_err'] = np.sqrt(sbprofile['muerr_r'][indx_r]**2 + sbprofile['muerr_z'][indx_z]**2)\n sbprofile['radius_rz'] = radius_rz\n \n # SDSS\n if sdss and 'g' in bands and 'r' in bands and 'i' in bands:\n radius_gr, indx_g, indx_r = np.intersect1d(sbprofile['radius_g'], sbprofile['radius_r'], return_indices=True)\n sbprofile['gr'] = sbprofile['mu_g'][indx_g] - sbprofile['mu_r'][indx_r]\n sbprofile['gr_err'] = np.sqrt(sbprofile['muerr_g'][indx_g]**2 + sbprofile['muerr_r'][indx_r]**2)\n sbprofile['radius_gr'] = radius_gr\n\n radius_ri, indx_r, indx_i = np.intersect1d(sbprofile['radius_r'], sbprofile['radius_i'], return_indices=True)\n sbprofile['ri'] = sbprofile['mu_r'][indx_r] - sbprofile['mu_i'][indx_i]\n sbprofile['ri_err'] = np.sqrt(sbprofile['muerr_r'][indx_r]**2 + sbprofile['muerr_i'][indx_i]**2)\n sbprofile['radius_ri'] = radius_ri\n \n # Just for the plot use a minimum uncertainty\n #sbprofile['gr_err'][sbprofile['gr_err'] < minerr] = minerr\n #sbprofile['rz_err'][sbprofile['rz_err'] < minerr] = minerr\n\n # # Add the effective wavelength of each bandpass, although this needs to take\n # # into account the DECaLS vs BASS/MzLS filter curves.\n # from speclite import filters\n # filt = filters.load_filters('decam2014-g', 'decam2014-r', 'decam2014-z', 'wise2010-W1', 'wise2010-W2')\n # for ii, band in enumerate(('g', 'r', 'z', 'W1', 'W2')):\n # sbprofile.update({'{}_wave_eff'.format(band): filt.effective_wavelengths[ii].value})\n\n return sbprofile",
"def get_edisp_scale_map(self):\n \n scale_map = dict()\n\n scale_map['M_edges'] = scipy.concatenate((self._edisp['Mlow'], [self._edisp['Mhigh'][-1]]))\n scale_map['M_edges_new'] = scipy.concatenate((self._edisp['Mlow_new'], [self._edisp['Mhigh_new'][-1]]))\n\n #can_divide = self._edisp['M'] > 0\n #scale_map['Map'] = scipy.zeros_like(scale_map['M_edges'])\n #scale_map['Map'][can_divide] = self._edisp['M_new'][can_divide]/self._edisp['M'][can_divide]\n #wh_nan = scipy.where(scipy.isnan(scale_map['Map']))\n #scale_map['Map'][wh_nan] = 0\n #scale_map['Map'] -= 1 \n\n return scale_map",
"def get_scale_parameter(self):\n\n shape_in_gamma_func = float(1 + (1 / self._shape_parameter))\n gamma_func = special.gamma(shape_in_gamma_func)\n self._scale_parameter = self._mean_fire_recurrence / gamma_func",
"def scale(original_train, new_train):\n # find magnitude original training data\n o_mag = np.linalg.norm(np.stack(original_train[:,1]))\n # find magnitude new data\n n_mag = np.linalg.norm(np.stack(new_train[:,1]))\n # scale new data\n scale = o_mag / n_mag\n return scale",
"def analysis_function_ensquared(system, wavelength_idx, surface, config, px, py, box_size):\n\n det_pix = 15e-3 # Size of the detector pixel [mm]\n\n # Set Current Configuration\n system.MCE.SetCurrentConfiguration(config)\n\n # First of all, we need to find the Surface Number for the IMAGE SLICER \"Image Plane\"\n N_surfaces = system.LDE.NumberOfSurfaces\n surface_names = {} # A dictionary of surface number -> surface comment\n for k in np.arange(1, N_surfaces):\n surface_names[k] = system.LDE.GetSurfaceAt(k).Comment\n # find the Slicer surface number\n try:\n slicer_num = list(surface_names.keys())[list(surface_names.values()).index('Image Plane')]\n except ValueError:\n slicer_num = list(surface_names.keys())[list(surface_names.values()).index('IFU SRM FP')]\n slicer_surface = slicer_num\n # slicer = system.LDE.GetSurfaceAt(slicer_num)\n\n # Get the Field Points for that configuration\n sysField = system.SystemData.Fields\n N_fields = sysField.NumberOfFields\n N_waves = len(wavelength_idx)\n\n X_MAX = np.max([np.abs(sysField.GetField(i + 1).X) for i in range(N_fields)])\n Y_MAX = np.max([np.abs(sysField.GetField(i + 1).Y) for i in range(N_fields)])\n\n # Use the Field Point at the centre of the Slice\n fx, fy = sysField.GetField(2).X, sysField.GetField(2).Y\n hx, hy = fx / X_MAX, fy / Y_MAX # Normalized field coordinates (hx, hy)\n obj_xy = np.array([fx, fy])\n\n N_pupil = px.shape[0] # Number of rays in the Pupil for a given field point and wavelength\n N_rays = N_waves * N_pupil\n\n EE = np.empty(N_waves)\n sli_foc_xy = np.empty((N_waves, 2))\n det_foc_xy = np.empty((N_waves, 2))\n\n slicer_xy = np.empty((N_waves, N_pupil, 2))\n slicer_xy[:] = np.nan\n detector_xy = np.empty((N_waves, N_pupil, 2))\n detector_xy[:] = np.nan\n\n # (1) Run the raytrace up to the IMAGE SLICER\n raytrace = system.Tools.OpenBatchRayTrace()\n # remember to specify the surface to which you are tracing!\n rays_slicer = raytrace.CreateNormUnpol(N_rays, constants.RaysType_Real, slicer_surface)\n\n # Loop over all wavelengths\n for i_wave, wave_idx in enumerate(wavelength_idx):\n\n for (p_x, p_y) in zip(px, py): # Add the ray to the RayTrace\n rays_slicer.AddRay(wave_idx, hx, hy, p_x, p_y, constants.OPDMode_None)\n\n CastTo(raytrace, 'ISystemTool').RunAndWaitForCompletion()\n rays_slicer.StartReadingResults()\n checksum_slicer = 0\n for k in range(N_rays): # Get Raytrace results at the Image Slicer\n i_wave = k // N_pupil\n j_pupil = k % N_pupil\n # print(i_wave, j_pupil)\n output = rays_slicer.ReadNextResult()\n if output[2] == 0 and output[3] == 0:\n slicer_xy[i_wave, j_pupil, 0] = output[4]\n slicer_xy[i_wave, j_pupil, 1] = output[5]\n checksum_slicer += 1\n # this might have to change. We assume no vignetting should occur before the slicer\n # but for the MC this might happen\n if output[2] == 0 and output[3] != 0:\n vignetting_code = output[3]\n vignetting_surface = system.LDE.GetSurfaceAt(vignetting_code).Comment\n print(\"\\nConfig #%d\" % config)\n print(\"Vignetting at surface #%d: %s\" % (vignetting_code, vignetting_surface))\n\n if checksum_slicer < N_rays:\n raise ValueError('Some rays were lost before the Image Slicer')\n\n rays_slicer.ClearData()\n\n # Count how many rays fall inside a +- 1 mm window in Y, wrt the centroid\n slicer_cent_x = np.nanmean(slicer_xy[:, :, 0], axis=1)\n slicer_cent_y = np.nanmean(slicer_xy[:, :, 1], axis=1)\n sli_foc_xy[:, 0] = slicer_cent_x\n sli_foc_xy[:, 1] = slicer_cent_y\n\n # print(slicer_cent_y)\n below_slicer = slicer_xy[:, :, 1] < slicer_cent_y[:, np.newaxis] + 1.0 * box_size / 2\n above_slicer = slicer_xy[:, :, 1] > slicer_cent_y[:, np.newaxis] - 1.0 * box_size / 2\n inside_slicer = (np.logical_and(below_slicer, above_slicer))\n # print(inside_slicer[0, :10])\n\n # Now, for each wavelength, we calculate which rays fulfil the Image Slicer conditions\n index_valid_slicer = [np.argwhere(inside_slicer[i, :] == True)[:, 0] for i in range(N_waves)]\n # print(index_valid_slicer[1][:10])\n # print(index_valid_slicer[2][:10])\n\n # (2) Run the raytrace up to the DETECTOR\n # For speed, we re-use the same Raytrace, just define new rays!\n # raytrace_det = system.Tools.OpenBatchRayTrace()\n # Detector is always the last surface\n detector_surface = system.LDE.NumberOfSurfaces - 1\n rays_detector = raytrace.CreateNormUnpol(N_rays, constants.RaysType_Real, detector_surface)\n # Loop over all wavelengths\n for i_wave, wave_idx in enumerate(wavelength_idx):\n for (p_x, p_y) in zip(px, py):\n rays_detector.AddRay(wave_idx, hx, hy, p_x, p_y, constants.OPDMode_None)\n\n CastTo(raytrace, 'ISystemTool').RunAndWaitForCompletion()\n\n rays_detector.StartReadingResults()\n checksum_detector = 0\n # index_valid_detector = [] # Valid means they make it to the detector even if vignetted at the Slicer\n vignetted = []\n index_vignetted = []\n index_valid_detector = np.empty((N_waves, N_pupil))\n index_valid_detector[:] = np.nan\n for k in range(N_rays): # Get Raytrace results at the Detector\n i_wave = k // N_pupil\n j_pupil = k % N_pupil\n output = rays_detector.ReadNextResult()\n if output[2] == 0 and output[3] == 0: # ErrorCode & VignetteCode\n detector_xy[i_wave, j_pupil, 0] = output[4]\n detector_xy[i_wave, j_pupil, 1] = output[5]\n checksum_detector += 1\n index_valid_detector[i_wave, j_pupil] = j_pupil\n\n elif output[2] == 0 and output[3] != 0:\n # Some rays are vignetted\n vignetted.append([output[4], output[5]])\n detector_xy[i_wave, j_pupil, 0] = output[4]\n detector_xy[i_wave, j_pupil, 1] = output[5]\n checksum_detector += 1\n index_valid_detector[i_wave, j_pupil] = j_pupil\n index_vignetted.append(k)\n\n # index_valid_detector = np.array(index_valid_detector)\n # # print(index_valid_detector.shape)\n # # print(index_valid_detector)\n # index_valid_detector = index_valid_detector.reshape((N_waves, N_pupil))\n # # print(index_valid_detector.shape)\n\n rays_detector.ClearData()\n CastTo(raytrace, 'ISystemTool').Close()\n\n # (3) Calculate the ENSQUARED ENERGY\n # We only count the rays that where inside the slicer to begin with and the ones that make it to the detector\n for i_wave in range(N_waves):\n valid_both = []\n for k in range(N_pupil):\n # print(index_valid_detector[i_wave])\n if k in index_valid_slicer[i_wave] and k in index_valid_detector[i_wave]:\n valid_both.append(k)\n\n valid_det_x = detector_xy[i_wave, :, 0][valid_both]\n valid_det_y = detector_xy[i_wave, :, 1][valid_both]\n\n # Now, out of the VALID rays, we calculate which detector rays fall inside a 2x pixel box along X\n dcx = np.mean(valid_det_x) # Detector Centroid X\n dcy = np.mean(valid_det_y)\n det_foc_xy[i_wave] = [dcx, dcy]\n\n left_detector = valid_det_x < dcx + det_pix * box_size / 2\n right_detector = valid_det_x > dcx - det_pix * box_size / 2\n inside_detector = (np.logical_and(left_detector, right_detector))\n total_detector = np.sum(inside_detector)\n ensq = total_detector / N_pupil\n # print(ensq)\n EE[i_wave] = ensq * 0.98\n\n # SHOW THIS in the methodology\n\n # fig, axes = plt.subplots(2, N_waves)\n # colors = cm.Reds(np.linspace(0.5, 1, N_waves))\n # for j in range(N_waves):\n # ax1 = axes[0][j]\n # scy = sli_foc_xy[j, 1]\n # scx = sli_foc_xy[j, 0]\n # ax1.axhline(y=scy + 1.0 * box_size / 2, color='black', linestyle='--')\n # ax1.axhline(y=scy - 1.0 * box_size / 2, color='black', linestyle='--')\n # ax1.scatter(slicer_xy[j, :, 0], slicer_xy[j, :, 1], s=3, color=colors[j])\n # ax1.scatter(sli_foc_xy[j, 0], sli_foc_xy[j, 1], s=3, color='black')\n # wavelength = system.SystemData.Wavelengths.GetWavelength(wavelength_idx[j]).Wavelength\n # ax1.set_title(\"IMG SLI | %.3f $\\mu$m\" % wavelength)\n # ax1.set_aspect('equal')\n # ax1.get_yaxis().set_visible(False)\n # ax1.get_xaxis().set_visible(False)\n #\n # p = 1.2\n # ax1.set_xlim([scx - p * box_size / 2, scx + p * box_size / 2])\n # ax1.set_ylim([scy - p * box_size / 2, scy + p * box_size / 2])\n #\n # ax2 = axes[1][j]\n # dcx = det_foc_xy[j, 0]\n # dcy = det_foc_xy[j, 1]\n # ax2.scatter(detector_xy[j, :, 0], detector_xy[j, :, 1], s=3, color=colors[j])\n # ax2.scatter(det_foc_xy[j, 0], det_foc_xy[j, 1], s=3, color='black')\n # ax2.axvline(x=dcx + det_pix * box_size / 2, color='black', linestyle='--')\n # ax2.axvline(x=dcx - det_pix * box_size / 2, color='black', linestyle='--')\n # ax2.set_title(\"DET | %.3f $\\mu$m\" % wavelength)\n # ax2.set_aspect('equal')\n # ax2.get_yaxis().set_visible(False)\n # ax2.get_xaxis().set_visible(False)\n # ax2.set_xlim([dcx - p * det_pix * box_size / 2, dcx + p * det_pix * box_size / 2])\n # ax2.set_ylim([dcy - p * det_pix * box_size / 2, dcy + p * det_pix * box_size / 2])\n #\n #\n # plt.show()\n\n return EE, obj_xy, sli_foc_xy, det_foc_xy",
"def scaleHF_fluxspace(self, PFC, lqEich, S, P):\n # Get R and Z vectors at the midplane\n R_omp_sol = PFC.ep.g['lcfs'][:,0].max()\n R_omp_min = R_omp_sol - 5.0*lqEich*(1e-3) #in meters now\n R_omp_max = R_omp_sol + 20.0*lqEich*(1e-3) #in meters now\n #if R_omp_max is outside EFIT grid, cap at maximum R of grid\n if R_omp_max > max(PFC.ep.g['R']):\n R_omp_max = max(PFC.ep.g['R']) #in meters now\n R_omp = np.linspace(R_omp_min, R_omp_max, 1000)\n Z_omp = np.zeros(R_omp.shape)\n #Calculate flux at midplane using gfile\n psiN = PFC.ep.psiFunc.ev(R_omp,Z_omp)\n psi = psiN * (PFC.ep.g['psiSep']-PFC.ep.g['psiAxis']) + PFC.ep.g['psiAxis']\n PFC.psiMinLCFS = PFC.ep.psiFunc.ev(R_omp_sol,0.0)\n s_hat = psiN - PFC.psiMinLCFS\n # Evaluate B at outboard midplane\n Bp_omp = PFC.ep.BpFunc.ev(R_omp,Z_omp)\n Bt_omp = PFC.ep.BtFunc.ev(R_omp,Z_omp)\n B_omp = np.sqrt(Bp_omp**2 + Bt_omp**2)\n\n #Get q|| profile then integrate in Psi\n q_hat = self.eich_profile_fluxspace(PFC, lqEich, S, R_omp, Bp_omp, psiN)\n\n #Menard's method\n P0 = 2*np.pi * simps(q_hat, psi)\n #Matt's Method\n# P0 = 2*np.pi * simps(q_hat / B_omp, psi)\n #account for nonphysical power\n if P0 < 0: P0 = -P0\n #Scale to input power\n q0 = P/P0\n return q0",
"def apply_gaussian_resolution(self,params,data,fwhm=1,dE=0.01,E_max=100):\n print('\\n################### CONVOLUTION #####################\\n')\n print(f'\\n\\tConvolution with Gaussian function, FWHM = {fwhm} meV\\n')\n\n data.fwhm = fwhm\n c = fwhm/2.35482\n\n data.dE = dE\n data.E_max = E_max\n data.spectra_E = np.arange(0,data.E_max+data.dE,data.dE)\n data.spectra_num_E = len(data.spectra_E)\n data.spectra = np.zeros((data.spectra_num_E,params.num_Qpoints))\n data.smooth_spectra = np.zeros((data.spectra_num_E,params.num_Qpoints))\n structure_factors = []\n energies = []\n\n ### sum intensity of degenerate bands\n if params.sum_degenerate_bands == True:\n print('\\n\\tSumming degenerate bands before convolution (using convolution dE as tolerance)\\n')\n for q in range(params.num_Qpoints):\n sfac = data.structure_factors[:,q]\n energy = data.frequencies[f'{q}']\n reduced_energies = []\n summed_sfac = []\n while True:\n if len(energy) == 0:\n break\n test_energy = energy[0]\n reduced_energies.append(test_energy)\n indicies = np.intersect1d(np.argwhere(energy <= (test_energy+data.dE)),\n np.argwhere(energy > (test_energy-data.dE)))\n summed_sfac.append(sfac[indicies].sum())\n sfac = np.delete(sfac,indicies)\n energy = np.delete(energy,indicies)\n energies.append(reduced_energies)\n structure_factors.append(summed_sfac)\n else:\n print('\\n\\tWARNING: You should definitely sum degenerate bands!!!\\n')\n for q in range(params.num_Qpoints):\n energies.append(data.frequencies[f'{q}'])\n structure_factors.append(data.structure_factors[:,q])\n\n ### populate array for heatmap\n ### try statement takes care of negative energies\n for q in range(params.num_Qpoints):\n for b in range(len(structure_factors[q][:])):\n try: # if there are negative modes, argwhere returns an empty vector and the slice crashes\n data.spectra[np.argwhere(data.spectra_E <= \n energies[q][b]).max(),q] = structure_factors[q][b]\n except:\n continue\n\n if params.bose_factor == True:\n print('\\n\\tWARNING: Bose factor isnt verified. Need to compare to SNAXS.\\n')\n if params.temperature < 5:\n temperature = 5\n else:\n temperature = params.temperature\n inds = np.argwhere(data.spectra_E <= 0.5)\n tmp_e = np.copy(data.spectra_E)\n tmp_e[inds] = 0.5\n bose = 1+1/(np.exp(tmp_e/(constants.kb*1000*temperature))-1)\n bose = np.tile(bose.reshape((data.spectra_num_E,1)),reps=(1,params.num_Qpoints))\n data.spectra = np.multiply(data.spectra,bose)\n data.spectra = data.spectra/np.max(data.spectra)\n\n ### gaussian convolution using for loops, slow but very little memory utilization\n g_energy = np.append(data.spectra_E-data.spectra_E.max(),data.spectra_E[1:])\n gaussian = np.exp(-0.5*g_energy**2/c**2)/c/np.sqrt(2*np.pi)\n gaussian = np.tile(gaussian.reshape((gaussian.shape[0],1)),(1,data.num_Qpoints))\n tmp = np.append(data.spectra,data.spectra,axis=0)[1:,:]\n for e in range(data.spectra_num_E):\n if e%50 == 0:\n print(f'\\t------ {e}/{data.spectra_num_E} -------')\n data.smooth_spectra[e,:] = np.trapz(tmp*np.roll(gaussian,shift=e,axis=0),g_energy,axis=0)\n print('\\n\\tDone convolving!\\n')\n data.smooth_spectra = data.smooth_spectra/np.max(data.smooth_spectra)\n\n# if params.random_background == True:\n# data.smooth_spectra = data.smooth_spectra+(np.random.normal(0,1,\n# (data.smooth_spectra.shape[0],data.smooth_spectra.shape[1])))*0.001\n \n plt.imshow(data.smooth_spectra,origin='lower',aspect='auto',cmap='hot')\n plt.show()",
"def _scale_param(self, resid_us):\n return((resid_us**2).sum().sum() / self.dof)",
"def _scale_aeff(self, input_irf_file, config):\n\n # Reading the Aeff parameters\n self._aeff['Elow'] = input_irf_file['Effective area'].data['Energ_lo'][0].copy()\n self._aeff['Ehigh'] = input_irf_file['Effective area'].data['Energ_hi'][0].copy()\n self._aeff['ThetaLow'] = input_irf_file['Effective area'].data['Theta_lo'][0].copy()\n self._aeff['ThetaHi'] = input_irf_file['Effective area'].data['Theta_hi'][0].copy()\n self._aeff['Area'] = input_irf_file['Effective area'].data['EffArea'][0].transpose().copy()\n self._aeff['E'] = scipy.sqrt(self._aeff['Elow'] * self._aeff['Ehigh'])\n self._aeff['Theta'] = (self._aeff['ThetaLow'] + self._aeff['ThetaHi']) / 2.0\n \n # Creating the energy-theta mesh grid\n energy, theta = scipy.meshgrid(self._aeff['E'], self._aeff['Theta'], indexing='ij')\n\n # ----------------------------------\n # Scaling the Aeff energy dependence\n\n # Constant error function\n if config['energy_scaling']['err_func_type'] == \"constant\":\n self._aeff['Area_new'] = self._aeff['Area'] * config['energy_scaling']['constant']['scale']\n\n # Gradients error function\n elif config['energy_scaling']['err_func_type'] == \"gradient\":\n scaling_params = config['energy_scaling']['gradient']\n self._aeff['Area_new'] = self._aeff['Area'] * (\n 1 + scaling_params['scale'] * gradient(scipy.log10(energy),\n scipy.log10(scaling_params['range_min']),\n scipy.log10(scaling_params['range_max']))\n )\n \n # Step error function\n elif config['energy_scaling']['err_func_type'] == \"step\":\n scaling_params = config['energy_scaling']['step']\n break_points = list(zip(scipy.log10(scaling_params['transition_pos']),\n scaling_params['transition_widths']))\n self._aeff['Area_new'] = self._aeff['Area'] * (\n 1 + scaling_params['scale'] * step(scipy.log10(energy), break_points)\n )\n else:\n raise ValueError(\"Aeff energy scaling: unknown scaling function type '{:s}'\"\n .format(config['energy_scaling']['err_func_type']))\n # ----------------------------------\n\n # ------------------------------------------\n # Scaling the Aeff off-axis angle dependence\n\n # Constant error function\n if config['angular_scaling']['err_func_type'] == \"constant\":\n self._aeff['Area_new'] = self._aeff['Area_new'] * config['angular_scaling']['constant']['scale']\n\n # Gradients error function\n elif config['angular_scaling']['err_func_type'] == \"gradient\":\n scaling_params = config['angular_scaling']['gradient']\n self._aeff['Area_new'] = self._aeff['Area_new'] * (\n 1 + scaling_params['scale'] * gradient(theta,\n scaling_params['range_min'],\n scaling_params['range_max'])\n )\n\n # Step error function\n elif config['angular_scaling']['err_func_type'] == \"step\":\n scaling_params = config['angular_scaling']['step']\n break_points = list(zip(scaling_params['transition_pos'],\n scaling_params['transition_widths']))\n self._aeff['Area_new'] = self._aeff['Area_new'] * (\n 1 + scaling_params['scale'] * step(theta, break_points)\n )\n else:\n raise ValueError(\"Aeff angular scaling: unknown scaling function type '{:s}'\"\n .format(config['angular_scaling']['err_func_type']))\n # ------------------------------------------\n\n # Recording the scaled Aeff\n input_irf_file['Effective area'].data['EffArea'][0] = self._aeff['Area_new'].transpose()"
] | [
"0.61338806",
"0.6089663",
"0.6083601",
"0.60192114",
"0.58811396",
"0.5845473",
"0.57790524",
"0.56270677",
"0.56215096",
"0.5540787",
"0.5538469",
"0.5513849",
"0.55025077",
"0.54857355",
"0.5447501",
"0.5444619",
"0.5426935",
"0.5422314",
"0.5416228",
"0.5408127",
"0.53833055",
"0.5352067",
"0.53422153",
"0.5338832",
"0.5322048",
"0.53011394",
"0.5271971",
"0.52600205",
"0.5258094",
"0.5248959"
] | 0.65217674 | 0 |
update kth(0indexed) value with a | def set_val(self, k, a):
k += self.n - 1
self.dat[k] = a
while k > 0:
k = (k - 1) // 2 # parent
self.dat[k] = self.op(self.dat[k * 2 + 1], self.dat[k * 2 + 2]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __setitem__(self, k, v):\n\n self.valores[( zero - k )%self.longitud] = v",
"def _bucket_setitem(self, j, k, v):\n if self._table[j] is None:\n self._table[j] = UnsortedTableMap() # create new bucket at index j\n oldSize = len(self._table[j])\n self._table[j][k] = v\n if len(self._table[j]) > oldSize: # key is new to the table\n self._n += 1",
"def _bucket_setitem(self, j, k, v):\n pass",
"def __setitem__(self, k, v):\n j = self._hash_function(k)\n self._bucket_setitem(j, k, v)\n if self._n > len(self._table) // 2:\n self._resize(2 * len(self._table) -1)",
"def __setitem__(self, k, value):\n self._coords[k] = value",
"def put(k, v):\n index = get_index(k)\n hash_data[index] = v",
"def update(self, idx, value):\n idx = self.__capacity - 1 + idx\n self.__tree[idx] = value\n self.__update(idx)",
"def __setitem__(self, k, value):\n if k < 0:\n k += len(self)\n if value is not None:\n self.store_array.add_list_item(ListNode(value , k))",
"def modify_pos(self, k, delta):\n self.pos[k] += delta",
"def put(self, k: Any, v: Any):\n i = abs(hash(k)) % self.size\n current = self.data[i]\n while current is not None:\n if current.key == k:\n current.value = v\n return\n current = current.next\n new_node = self.Node(k, v)\n new_node.next = self.data[i]\n self.data[i] = new_node",
"def __setitem__(self, key, val):\n x, y = key\n self.matrix[y][x] = val",
"def __setitem__(self, idx, val):\n self.rows[idx[0]][idx[1]] = val",
"def __setitem__(self,k,v):\n self.insert(k,v)",
"def put(self, key, value):\n i = key //1000\n j = key%1000\n self.container[i][j] = value",
"def __setitem__(self, key, item):\n assert isinstance(key,list) and isinstance(item,list) and len(key)==2 and len(item)==2\n self._data[self.__ptBin(key[0])][self.__etaBin(key[1])] = item",
"def set_v_item(self, vindex, new_val):\n\n i = [((0, 0),),\n ((1, 1),),\n ((2, 2),),\n ([1, 2], [2, 1]),\n ([2, 0], [0, 2]),\n ([0, 1], [1, 0])]\n\n for j, k in i[vindex]:\n self[j, k] = new_val",
"def __setitem__(self, key: Tuple[int, int], value: complex) -> None:\n self.coeff[self._core.index_alpha(key[0]),\n self._core.index_beta(key[1])] = value",
"def set_idx(self, i, other, tensor_value):\n for k, v in self.variables.items():\n if k not in other.variables:\n self.variables[k][i] *= 0\n\n for k, v in other.variables.items():\n if k not in self.variables:\n self.variables[k] = np.zeros(tensor_value.shape)\n self.variables[k][i] = other.variables[k]",
"def __getitem__(self, k) :\n raise NotImplementedError",
"def __setitem__(self, key, value):\n mixed_positions, vindex_positions = _advanced_indexer_subspaces(key)\n self._array[key] = np.moveaxis(value, vindex_positions, mixed_positions)",
"def __setitem__(self, index, value):\n self.elem[index] = value",
"def __setitem__(self, ind: int, value: float) -> None:",
"def update(self, index: int, x: int):\n index += self.n2\n self.tree[index] = self.binary(self.tree[index], x)\n while index > 1:\n # (index ^ 1) はiと1の排他的論理和(XOR)\n x = self.binary(x, self.tree[index ^ 1])\n index >>= 1 # 右ビットシフトで親ノードのインデックスへ移動\n self.tree[index] = self.binary(self.tree[index], x)",
"def __setitem__(key, value):",
"def update_memory_x(self, x_k):\n self.mem_x[self.mem_idx, :] = x_k",
"def put(self, i, value):\n\t\tif(i < 0 or i >= self.d):\n\t\t\traise ValueError(\"Illegal index\")\n\t\tif(value == 0.0):\n\t\t\tself.st.delete(i)\n\t\telse:\n\t\t\tself.st.put(i,value)",
"def _bucket_getitem(self, j, k):\n pass",
"def updateH(self,k_vec,it):\n self.k_vec = k_vec\n self.it = it\n self.H_kc = fl.H_k(k_vec, self.it, self.delta)",
"def update(self, key, value):\n hash_key = hash(key) % self.length\n bucket = self.array[hash_key]\n if not bucket:\n raise ValueError('Key does not exist')\n for key_val_pair in bucket:\n if key_val_pair[0] == key:\n key_val_pair[1] = value\n break",
"def put(self, key: int, value: int) -> None:\n idx = key % self.size\n if self.mp[idx]:\n for i in range(len(self.mp[idx])):\n if self.mp[idx][i][0] == key:\n self.mp[idx][i][1] = value\n return\n self.mp[idx].append([key, value])\n else:\n self.mp[idx].append([key, value])"
] | [
"0.69229925",
"0.6456467",
"0.6364326",
"0.62770015",
"0.6199403",
"0.60915345",
"0.6048265",
"0.6025952",
"0.60192573",
"0.59893894",
"0.59650415",
"0.59532386",
"0.5934357",
"0.59224254",
"0.58959097",
"0.5847799",
"0.5826606",
"0.5785241",
"0.577905",
"0.5765922",
"0.57567644",
"0.57482696",
"0.5742578",
"0.57411206",
"0.57360464",
"0.57339394",
"0.5730614",
"0.57084453",
"0.5708278",
"0.5704075"
] | 0.7316253 | 0 |
Decodes and yields each game event from the contents byte string. | def decode_replay_game_events(contents):
decoder = BitPackedDecoder(contents, typeinfos)
for event in _decode_event_stream(decoder,
game_eventid_typeid,
game_event_types,
decode_user_id=True):
yield event | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def decode_replay_message_events(contents):\n decoder = BitPackedDecoder(contents, typeinfos)\n for event in _decode_event_stream(decoder,\n message_eventid_typeid,\n message_event_types,\n decode_user_id=True):\n yield event",
"def decode_replay_tracker_events(contents):\n decoder = VersionedDecoder(contents, typeinfos)\n for event in _decode_event_stream(decoder,\n tracker_eventid_typeid,\n tracker_event_types,\n decode_user_id=False):\n yield event",
"def chunks(raw):\n for i in range(0, len(raw), EVENT_SIZE):\n yield struct.unpack(EVENT_FORMAT, raw[i:i+EVENT_SIZE])",
"def iter_unpack(raw):\n return struct.iter_unpack(EVENT_FORMAT, raw)",
"def decode(self, s):\r\n (tsec, tfrac, self.eventType, self.eventCode,\r\n self.eventValue) = struct.unpack(Format.Event, s)\r\n\r\n self.time = tsec + tfrac / 1000000.0",
"def parse_event(self):\n event_id = self.replay.read_string()\n group = self.replay.read_string()\n metadata = self.replay.read_string()\n start_time = self.replay.read_uint32()\n end_time = self.replay.read_uint32()\n size = self.replay.read_uint32()\n\n buffer = self.decrypt_buffer(size)\n\n if group == EventTypes.PLAYER_ELIMINATION.value:\n try:\n self.parse_elimination_event(buffer, start_time)\n except:\n logger.error(\"Couldnt parse event PLAYER_ELIMINATION\")\n\n if metadata == EventTypes.MATCH_STATS.value:\n self.parse_matchstats_event(buffer)\n\n if metadata == EventTypes.TEAM_STATS.value:\n self.parse_teamstats_event(buffer)",
"def _decode1(self, body, data):\r\n if \" \" in body:\r\n evtype,body = body.split(\" \",1)\r\n else:\r\n evtype,body = body,\"\"\r\n evtype = evtype.upper()\r\n if evtype == \"CIRC\":\r\n m = re.match(r\"(\\d+)\\s+(\\S+)(\\s\\S+)?(\\s\\S+)?(\\s\\S+)?(\\s\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"CIRC event misformatted.\")\r\n ident,status,path,purpose,reason,remote = m.groups()\r\n ident = int(ident)\r\n if path:\r\n if \"PURPOSE=\" in path:\r\n remote = reason\r\n reason = purpose\r\n purpose=path\r\n path=[]\r\n elif \"REASON=\" in path:\r\n remote = reason\r\n reason = path\r\n purpose = \"\"\r\n path=[]\r\n else:\r\n path_verb = path.strip().split(\",\")\r\n path = []\r\n for p in path_verb:\r\n path.append(p.replace(\"~\", \"=\").split(\"=\")[0])\r\n else:\r\n path = []\r\n\r\n if purpose and \"REASON=\" in purpose:\r\n remote=reason\r\n reason=purpose\r\n purpose=\"\"\r\n\r\n if purpose: purpose = purpose[9:]\r\n if reason: reason = reason[8:]\r\n if remote: remote = remote[15:]\r\n event = CircuitEvent(evtype, ident, status, path, purpose, reason,\r\n remote, body)\r\n elif evtype == \"STREAM\":\r\n #plog(\"DEBUG\", \"STREAM: \"+body)\r\n m = re.match(r\"(\\S+)\\s+(\\S+)\\s+(\\S+)\\s+(\\S+)?:(\\d+)(\\sREASON=\\S+)?(\\sREMOTE_REASON=\\S+)?(\\sSOURCE=\\S+)?(\\sSOURCE_ADDR=\\S+)?(\\s+PURPOSE=\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"STREAM event misformatted.\")\r\n ident,status,circ,target_host,target_port,reason,remote,source,source_addr,purpose = m.groups()\r\n ident,circ = map(int, (ident,circ))\r\n if not target_host: # This can happen on SOCKS_PROTOCOL failures\r\n target_host = \"(none)\"\r\n if reason: reason = reason[8:]\r\n if remote: remote = remote[15:]\r\n if source: source = source[8:]\r\n if source_addr: source_addr = source_addr[13:]\r\n if purpose:\r\n purpose = purpose.lstrip()\r\n purpose = purpose[8:]\r\n event = StreamEvent(evtype, ident, status, circ, target_host,\r\n int(target_port), reason, remote, source, source_addr,\r\n purpose, body)\r\n elif evtype == \"ORCONN\":\r\n m = re.match(r\"(\\S+)\\s+(\\S+)(\\sAGE=\\S+)?(\\sREAD=\\S+)?(\\sWRITTEN=\\S+)?(\\sREASON=\\S+)?(\\sNCIRCS=\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"ORCONN event misformatted.\")\r\n target, status, age, read, wrote, reason, ncircs = m.groups()\r\n\r\n #plog(\"DEBUG\", \"ORCONN: \"+body)\r\n if ncircs: ncircs = int(ncircs[8:])\r\n else: ncircs = 0\r\n if reason: reason = reason[8:]\r\n if age: age = int(age[5:])\r\n else: age = 0\r\n if read: read = int(read[6:])\r\n else: read = 0\r\n if wrote: wrote = int(wrote[9:])\r\n else: wrote = 0\r\n event = ORConnEvent(evtype, status, target, age, read, wrote,\r\n reason, ncircs, body)\r\n elif evtype == \"STREAM_BW\":\r\n m = re.match(r\"(\\d+)\\s+(\\d+)\\s+(\\d+)\", body)\r\n if not m:\r\n raise ProtocolError(\"STREAM_BW event misformatted.\")\r\n event = StreamBwEvent(evtype, body, *m.groups())\r\n elif evtype == \"BW\":\r\n m = re.match(r\"(\\d+)\\s+(\\d+)\", body)\r\n if not m:\r\n raise ProtocolError(\"BANDWIDTH event misformatted.\")\r\n read, written = map(long, m.groups())\r\n event = BWEvent(evtype, read, written, body)\r\n elif evtype in (\"DEBUG\", \"INFO\", \"NOTICE\", \"WARN\", \"ERR\"):\r\n event = LogEvent(evtype, body)\r\n elif evtype == \"NEWDESC\":\r\n ids_verb = body.split(\" \")\r\n ids = []\r\n for i in ids_verb:\r\n ids.append(i.replace(\"~\", \"=\").split(\"=\")[0].replace(\"$\",\"\"))\r\n event = NewDescEvent(evtype, ids, body)\r\n elif evtype == \"ADDRMAP\":\r\n # TODO: Also parse errors and GMTExpiry\r\n m = re.match(r'(\\S+)\\s+(\\S+)\\s+(\\\"[^\"]+\\\"|\\w+)', body)\r\n if not m:\r\n raise ProtocolError(\"ADDRMAP event misformatted.\")\r\n fromaddr, toaddr, when = m.groups()\r\n if when.upper() == \"NEVER\": \r\n when = None\r\n else:\r\n when = time.strptime(when[1:-1], \"%Y-%m-%d %H:%M:%S\")\r\n event = AddrMapEvent(evtype, fromaddr, toaddr, when, body)\r\n elif evtype == \"NS\":\r\n event = NetworkStatusEvent(evtype, parse_ns_body(data), data)\r\n elif evtype == \"NEWCONSENSUS\":\r\n event = NewConsensusEvent(evtype, parse_ns_body(data), data)\r\n elif evtype == \"BUILDTIMEOUT_SET\":\r\n m = re.match(\r\n r\"(\\S+)\\sTOTAL_TIMES=(\\d+)\\sTIMEOUT_MS=(\\d+)\\sXM=(\\d+)\\sALPHA=(\\S+)\\sCUTOFF_QUANTILE=(\\S+)\",\r\n body)\r\n set_type, total_times, timeout_ms, xm, alpha, quantile = m.groups()\r\n event = BuildTimeoutSetEvent(evtype, set_type, int(total_times),\r\n int(timeout_ms), int(xm), float(alpha),\r\n float(quantile), body)\r\n elif evtype == \"GUARD\":\r\n m = re.match(r\"(\\S+)\\s(\\S+)\\s(\\S+)\", body)\r\n entry, guard, status = m.groups()\r\n event = GuardEvent(evtype, entry, guard, status, body)\r\n elif evtype == \"TORCTL_TIMER\":\r\n event = TimerEvent(evtype, data)\r\n else:\r\n event = UnknownEvent(evtype, body)\r\n\r\n return event",
"def decode(self, s):",
"def decode(self, s):",
"def decode(data: bytes) -> Iterable:\r\n decoder = Decoder(data)\r\n return decoder.decode()",
"def get_game_events(self):\n\t\tcontents = self.archive.read_file('replay.game.events')\n\t\treturn self.protocol.decode_replay_game_events(contents)",
"def get_messages(self):\n\t\tcontents = self.archive.read_file('replay.message.events')\n\t\treturn self.protocol.decode_replay_message_events(contents)",
"def parse_event_elements(bv: binaryninja.binaryview.BinaryView, stream: Stream) -> List[Event]:\n number_of_event = stream.read_u32()\n stream.read(4) # padding\n\n events = []\n for i in range(0, number_of_event):\n event_id = stream.read_u16()\n version = stream.read_u8()\n channel = stream.read_u8()\n level = stream.read_u8()\n opcode = stream.read_u8()\n task = stream.read_u16()\n keywords = stream.read_u64()\n message_identifier = stream.read_u32()\n template_offset = stream.read_u32()\n opcode_offset = stream.read_u32()\n level_offset = stream.read_u32()\n task_offset = stream.read_u32()\n stream.read(12)\n events.append(Event(bv, event_id, version, channel, level, opcode, task, keywords))\n\n return events",
"def parse(self):\n i = 1\n times = []\n while 1:\n byte = yield\n if byte== 0xaa:\n byte = yield # This byte should be \"\\aa\" too\n if byte== 0xaa:\n # packet synced by 0xaa 0xaa\n packet_length = yield\n packet_code = yield\n if packet_code == 0xd4:\n # standing by\n self.state = \"standby\"\n elif packet_code == 0xd0:\n self.state = \"connected\"\n elif packet_code == 0xd2:\n data_len = yield\n headset_id = yield\n headset_id += yield\n self.dongle_state = \"disconnected\"\n else:\n self.sending_data = True\n left = packet_length - 2\n while left>0:\n if packet_code ==0x80: # raw value\n row_length = yield\n a = yield\n b = yield\n value = struct.unpack(\"<h\",chr(b)+chr(a))[0]\n self.dispatch_data(\"raw\", value)\n left -= 2\n elif packet_code == 0x02: # Poor signal\n a = yield\n\n left -= 1\n elif packet_code == 0x04: # Attention (eSense)\n a = yield\n if a>0:\n v = struct.unpack(\"b\",chr(a))[0]\n if 0 < v <= 100:\n self.dispatch_data(\"attention\", v)\n left-=1\n elif packet_code == 0x05: # Meditation (eSense)\n a = yield\n if a>0:\n v = struct.unpack(\"b\",chr(a))[0]\n if 0 < v <= 100:\n self.dispatch_data(\"meditation\", v)\n left-=1\n elif packet_code == 0x16: # Blink Strength\n self.current_blink_strength = yield\n \n left-=1\n elif packet_code == 0x83:\n vlength = yield\n self.current_vector = []\n for row in range(8):\n a = yield\n b = yield\n c = yield\n value = a*255*255+b*255+c\n left -= vlength\n self.dispatch_data(\"bands\", self.current_vector)\n packet_code = yield\n else:\n pass # sync failed\n else:\n pass # sync failed",
"def test_textAsEvent_encoding(self):\n self.assertEquals(\n textAsEvent(u\"S\\xe1nchez\"),\n b\"data: S\\xc3\\xa1nchez\\n\\n\"\n )",
"def decode_replay_attributes_events(contents):\n buffer = BitPackedBuffer(contents, 'little')\n attributes = {}\n if not buffer.done():\n attributes['source'] = buffer.read_bits(8)\n attributes['mapNamespace'] = buffer.read_bits(32)\n count = buffer.read_bits(32)\n attributes['scopes'] = {}\n while not buffer.done():\n value = {}\n value['namespace'] = buffer.read_bits(32)\n value['attrid'] = attrid = buffer.read_bits(32)\n scope = buffer.read_bits(8)\n value['value'] = buffer.read_aligned_bytes(4)[::-1].strip(b'\\x00')\n if not scope in attributes['scopes']:\n attributes['scopes'][scope] = {}\n if not attrid in attributes['scopes'][scope]:\n attributes['scopes'][scope][attrid] = []\n attributes['scopes'][scope][attrid].append(value)\n return attributes",
"def events_from_bytes(cls, data, res, frame_num):\n\t\tall_events = [np.zeros(res) for t in range(frame_num - 1)]\n\t\tfor i in range(res[0]):\n\t\t\tfor j in range(res[1]):\n\t\t\t\tevents = cls._pixel_events_from_bytes(data)\n\t\t\t\tfor event in events:\n\t\t\t\t\tall_events[event[1]][i, j] = event[0]\n\n\t\treturn all_events",
"def time_decode(self):\n for ii in range(100):\n msg = DIMSEMessage()\n for fragment in self.fragments:\n msg.decode_msg(fragment)",
"def decode(data): #@NoSelf",
"def msgs_from_bytes(self, b):\n msgs = []\n # User remainder bytes\n parse_bytes = self.remainder + b.decode('ascii')\n # Find the first frame delimiter\n i = parse_bytes.find('\\r\\n')\n while i >= 0:\n # Try to parse a single message\n m = self._parse_msg(parse_bytes[:i])\n # Remove parsed bytes and delimter\n parse_bytes = parse_bytes[i+2:]\n # Add parsed message, if any\n if m:\n msgs.append(m)\n self.logger.debug('Parsed ASCII frame: address={}, function={}, len={}'.format(m.address, m.function, len(m.data) if m.data else 0))\n #else - warn?\n i = parse_bytes.find('\\r\\n')\n # Store any remaining bytes for the next pass\n self.remainder = parse_bytes\n return msgs",
"def load(f):\n while True:\n c = f.read(1)\n if len(c) == 1:\n msg_len = _read_int(f, already_read=c)\n msg_str = f.read(msg_len)\n if len(msg_str) < msg_len:\n raise ValueError(\"Unexpected EOF while parsing message\")\n yield javascript.loads(msg_str.decode())\n else:\n break",
"def decode_stream(self):\n io = self.io\n result = None\n\n while True:\n opcode = io.read(1)\n if not opcode:\n break\n else:\n opcode = ord(opcode)\n\n klass = MicroOpDecoder.opcode_to_class.get(opcode)\n yield klass.decode(io)",
"def stream(cls, fd):\n\n #\n # advance until the title appears\n # \"Adapter: 0 - Number of Events : 9987\"\n #\n\n sd = {\n 'seqnum': None,\n 'sslr': None,\n 'time': None,\n 'code': None,\n 'level': None,\n 'locale': None,\n 'description': None,\n 'linestack': [],\n }\n def emit():\n assert sd['linestack'][0] == '==========='\n sd['linestack'].pop(0)\n event_data = '\\n'.join(sd['linestack']).strip()\n return cls(\n id=sd['seqnum'],\n code=sd['code'],\n level=sd['level'],\n locale=sd['locale'],\n description=sd['description'],\n data=event_data,\n sslr=sd['sslr'],\n time=sd['time'],\n )\n def reset():\n sd['sslr'] = None\n sd['time'] = None\n sd['linestack'] = []\n\n emit_count = 0\n for line in fd:\n\n if line.startswith('seqNum:'):\n match = re.match(r\"seqNum:\\W*0x([0-9a-f]+)\", line)\n if sd['seqnum']:\n yield emit()\n emit_count += 1\n reset()\n seqnum_hex, = match.groups()\n sd['seqnum'] = int(seqnum_hex, 16)\n elif line.startswith('Time:'):\n _, timestr = megasplit(line)\n sd['time'] = decode_event_time(timestr)\n elif line.startswith('Seconds since last reboot:'):\n match = re.match(r\"Seconds since last reboot:\\W*([0-9]+)\", line)\n sd['sslr'], = match.groups()\n elif line.startswith('Code:'):\n match = re.match(r\"Code:\\W*0x([0-9a-f]+)\", line)\n code_hex, = match.groups()\n sd['code'] = int(code_hex, 16)\n elif line.startswith('Locale:'):\n match = re.match(r\"Locale:\\W*0x([0-9a-f]+)\", line)\n locale_hex, = match.groups()\n sd['locale'] = int(locale_hex, 16)\n elif line.startswith('Class:'):\n match = re.match(r\"Class:\\W*([0-9]+)\", line)\n levelstr, = match.groups()\n sd['level'] = int(levelstr)\n elif line.startswith('Event Description:'):\n _, sd['description'] = megasplit(line)\n elif line.startswith('Event Data:'):\n sd['linestack'] = []\n else:\n sd['linestack'].append(line.strip())\n\n #endfor streamlines\n if sd['seqnum']:\n yield emit()\n emit_count += 1\n\n \"\"\"\n if emit_count != total_events:\n raise Exception(\"input stream indicated %d events, but %d events were detected\" % (total_events, emit_count))\n \"\"\"",
"def test_decode():\n decoding = d.decode()\n assert type(decoding) == list\n assert len(decoding) == 7\n assert decoding[0] == '-12;-1\\n\\nESS'\n assert decoding[-1] == '2;-2\\n\\nWSWESNESSS'\n for x in decoding:\n assert \"\\n\" in x",
"def decode_payload(self, bytes):\n packets = []\n while bytes:\n if six.byte2int(bytes[0:1]) <= 1:\n packet_len = 0\n i = 1\n while six.byte2int(bytes[i:i + 1]) != 255:\n packet_len = packet_len * 10 + six.byte2int(bytes[i:i + 1])\n i += 1\n packet_start = i+1\n else:\n bytes = bytes.decode('utf-8')\n i = bytes.find(b':')\n if i == -1:\n raise ValueError('Invalid payload')\n packet_len = int(bytes[0:i])\n packet_start = i+1\n\n packet = self.decode_packet(bytes[packet_start:packet_start+packet_len])\n packets.append(packet)\n bytes = bytes[packet_start+packet_len:]\n\n return packets",
"def unpack(self, s):\n\n raise NotImplementedError()",
"def _unpack_ies(buf):\n\t\t# each IE starts with an ID and a length\n\t\ties = []\n\t\toff = 0\n\t\tbuflen = len(buf)\n\t\t# logger.debug(\"lazy dissecting: %s\" % buf)\n\n\t\twhile off < buflen:\n\t\t\tie_id = buf[off]\n\t\t\ttry:\n\t\t\t\tparser = IEEE80211.ie_decoder[ie_id]\n\t\t\texcept KeyError:\n\t\t\t\t# some unknown tag, use standard format\n\t\t\t\tparser = IEEE80211.IE\n\n\t\t\tdlen = buf[off + 1]\n\t\t\t# logger.debug(\"IE parser is: %d = %s = %s\" % (ie_id, parser, buf[off: off+2+dlen]))\n\t\t\tie = parser(buf[off: off + 2 + dlen])\n\t\t\ties.append(ie)\n\t\t\toff += 2 + dlen\n\n\t\treturn ies",
"def _decrypt_string(self, event):\n _LOGGER.debug(\"Hub: Decrypt String: Original: %s\", str(event.encrypted_content))\n resmsg = self._decrypter.decrypt(unhexlify(event.encrypted_content)).decode(\n encoding=\"UTF-8\", errors=\"replace\"\n )\n _LOGGER.debug(\"Hub: Decrypt String: Decrypted: %s\", resmsg)\n event.parse_decrypted(resmsg)",
"def deserialize(self, str):\n try:\n if self.cnt is None:\n self.cnt = None\n end = 0\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.cnt = []\n for i in range(0, length):\n val1 = dgvmsg.msg.Encounter()\n _v4 = val1.header\n start = end\n end += 4\n (_v4.seq,) = _get_struct_I().unpack(str[start:end])\n _v5 = _v4.stamp\n _x = _v5\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v4.frame_id = str[start:end].decode('utf-8')\n else:\n _v4.frame_id = str[start:end]\n start = end\n end += 4\n (val1.devadd,) = _get_struct_i().unpack(str[start:end])\n _v6 = val1.now\n _x = _v6\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])\n start = end\n end += 4\n (val1.encounter,) = _get_struct_I().unpack(str[start:end])\n self.cnt.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def messaging_events(payload):\n data = json.loads(payload)\n messaging_events = data[\"entry\"][0][\"messaging\"]\n for event in messaging_events:\n if \"message\" in event and \"text\" in event[\"message\"]:\n yield event[\"sender\"][\"id\"], event[\"message\"][\"text\"].encode('unicode_escape')\n else:\n yield event[\"sender\"][\"id\"], \"rez can't parse this\""
] | [
"0.7527568",
"0.70205873",
"0.6208984",
"0.6139739",
"0.6114342",
"0.60903746",
"0.59156847",
"0.5900353",
"0.5900353",
"0.58041835",
"0.5764921",
"0.5743358",
"0.5727057",
"0.57248867",
"0.57141185",
"0.5698565",
"0.55629605",
"0.55396146",
"0.55039734",
"0.5499922",
"0.54963344",
"0.5470103",
"0.54284495",
"0.5427908",
"0.5280547",
"0.52510417",
"0.5215433",
"0.521527",
"0.5208973",
"0.5187036"
] | 0.7618051 | 0 |
Decodes and yields each message event from the contents byte string. | def decode_replay_message_events(contents):
decoder = BitPackedDecoder(contents, typeinfos)
for event in _decode_event_stream(decoder,
message_eventid_typeid,
message_event_types,
decode_user_id=True):
yield event | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def decode_replay_game_events(contents):\n decoder = BitPackedDecoder(contents, typeinfos)\n for event in _decode_event_stream(decoder,\n game_eventid_typeid,\n game_event_types,\n decode_user_id=True):\n yield event",
"def decode_replay_tracker_events(contents):\n decoder = VersionedDecoder(contents, typeinfos)\n for event in _decode_event_stream(decoder,\n tracker_eventid_typeid,\n tracker_event_types,\n decode_user_id=False):\n yield event",
"def msgs_from_bytes(self, b):\n msgs = []\n # User remainder bytes\n parse_bytes = self.remainder + b.decode('ascii')\n # Find the first frame delimiter\n i = parse_bytes.find('\\r\\n')\n while i >= 0:\n # Try to parse a single message\n m = self._parse_msg(parse_bytes[:i])\n # Remove parsed bytes and delimter\n parse_bytes = parse_bytes[i+2:]\n # Add parsed message, if any\n if m:\n msgs.append(m)\n self.logger.debug('Parsed ASCII frame: address={}, function={}, len={}'.format(m.address, m.function, len(m.data) if m.data else 0))\n #else - warn?\n i = parse_bytes.find('\\r\\n')\n # Store any remaining bytes for the next pass\n self.remainder = parse_bytes\n return msgs",
"def decode(data: bytes) -> Iterable:\r\n decoder = Decoder(data)\r\n return decoder.decode()",
"def _decode1(self, body, data):\r\n if \" \" in body:\r\n evtype,body = body.split(\" \",1)\r\n else:\r\n evtype,body = body,\"\"\r\n evtype = evtype.upper()\r\n if evtype == \"CIRC\":\r\n m = re.match(r\"(\\d+)\\s+(\\S+)(\\s\\S+)?(\\s\\S+)?(\\s\\S+)?(\\s\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"CIRC event misformatted.\")\r\n ident,status,path,purpose,reason,remote = m.groups()\r\n ident = int(ident)\r\n if path:\r\n if \"PURPOSE=\" in path:\r\n remote = reason\r\n reason = purpose\r\n purpose=path\r\n path=[]\r\n elif \"REASON=\" in path:\r\n remote = reason\r\n reason = path\r\n purpose = \"\"\r\n path=[]\r\n else:\r\n path_verb = path.strip().split(\",\")\r\n path = []\r\n for p in path_verb:\r\n path.append(p.replace(\"~\", \"=\").split(\"=\")[0])\r\n else:\r\n path = []\r\n\r\n if purpose and \"REASON=\" in purpose:\r\n remote=reason\r\n reason=purpose\r\n purpose=\"\"\r\n\r\n if purpose: purpose = purpose[9:]\r\n if reason: reason = reason[8:]\r\n if remote: remote = remote[15:]\r\n event = CircuitEvent(evtype, ident, status, path, purpose, reason,\r\n remote, body)\r\n elif evtype == \"STREAM\":\r\n #plog(\"DEBUG\", \"STREAM: \"+body)\r\n m = re.match(r\"(\\S+)\\s+(\\S+)\\s+(\\S+)\\s+(\\S+)?:(\\d+)(\\sREASON=\\S+)?(\\sREMOTE_REASON=\\S+)?(\\sSOURCE=\\S+)?(\\sSOURCE_ADDR=\\S+)?(\\s+PURPOSE=\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"STREAM event misformatted.\")\r\n ident,status,circ,target_host,target_port,reason,remote,source,source_addr,purpose = m.groups()\r\n ident,circ = map(int, (ident,circ))\r\n if not target_host: # This can happen on SOCKS_PROTOCOL failures\r\n target_host = \"(none)\"\r\n if reason: reason = reason[8:]\r\n if remote: remote = remote[15:]\r\n if source: source = source[8:]\r\n if source_addr: source_addr = source_addr[13:]\r\n if purpose:\r\n purpose = purpose.lstrip()\r\n purpose = purpose[8:]\r\n event = StreamEvent(evtype, ident, status, circ, target_host,\r\n int(target_port), reason, remote, source, source_addr,\r\n purpose, body)\r\n elif evtype == \"ORCONN\":\r\n m = re.match(r\"(\\S+)\\s+(\\S+)(\\sAGE=\\S+)?(\\sREAD=\\S+)?(\\sWRITTEN=\\S+)?(\\sREASON=\\S+)?(\\sNCIRCS=\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"ORCONN event misformatted.\")\r\n target, status, age, read, wrote, reason, ncircs = m.groups()\r\n\r\n #plog(\"DEBUG\", \"ORCONN: \"+body)\r\n if ncircs: ncircs = int(ncircs[8:])\r\n else: ncircs = 0\r\n if reason: reason = reason[8:]\r\n if age: age = int(age[5:])\r\n else: age = 0\r\n if read: read = int(read[6:])\r\n else: read = 0\r\n if wrote: wrote = int(wrote[9:])\r\n else: wrote = 0\r\n event = ORConnEvent(evtype, status, target, age, read, wrote,\r\n reason, ncircs, body)\r\n elif evtype == \"STREAM_BW\":\r\n m = re.match(r\"(\\d+)\\s+(\\d+)\\s+(\\d+)\", body)\r\n if not m:\r\n raise ProtocolError(\"STREAM_BW event misformatted.\")\r\n event = StreamBwEvent(evtype, body, *m.groups())\r\n elif evtype == \"BW\":\r\n m = re.match(r\"(\\d+)\\s+(\\d+)\", body)\r\n if not m:\r\n raise ProtocolError(\"BANDWIDTH event misformatted.\")\r\n read, written = map(long, m.groups())\r\n event = BWEvent(evtype, read, written, body)\r\n elif evtype in (\"DEBUG\", \"INFO\", \"NOTICE\", \"WARN\", \"ERR\"):\r\n event = LogEvent(evtype, body)\r\n elif evtype == \"NEWDESC\":\r\n ids_verb = body.split(\" \")\r\n ids = []\r\n for i in ids_verb:\r\n ids.append(i.replace(\"~\", \"=\").split(\"=\")[0].replace(\"$\",\"\"))\r\n event = NewDescEvent(evtype, ids, body)\r\n elif evtype == \"ADDRMAP\":\r\n # TODO: Also parse errors and GMTExpiry\r\n m = re.match(r'(\\S+)\\s+(\\S+)\\s+(\\\"[^\"]+\\\"|\\w+)', body)\r\n if not m:\r\n raise ProtocolError(\"ADDRMAP event misformatted.\")\r\n fromaddr, toaddr, when = m.groups()\r\n if when.upper() == \"NEVER\": \r\n when = None\r\n else:\r\n when = time.strptime(when[1:-1], \"%Y-%m-%d %H:%M:%S\")\r\n event = AddrMapEvent(evtype, fromaddr, toaddr, when, body)\r\n elif evtype == \"NS\":\r\n event = NetworkStatusEvent(evtype, parse_ns_body(data), data)\r\n elif evtype == \"NEWCONSENSUS\":\r\n event = NewConsensusEvent(evtype, parse_ns_body(data), data)\r\n elif evtype == \"BUILDTIMEOUT_SET\":\r\n m = re.match(\r\n r\"(\\S+)\\sTOTAL_TIMES=(\\d+)\\sTIMEOUT_MS=(\\d+)\\sXM=(\\d+)\\sALPHA=(\\S+)\\sCUTOFF_QUANTILE=(\\S+)\",\r\n body)\r\n set_type, total_times, timeout_ms, xm, alpha, quantile = m.groups()\r\n event = BuildTimeoutSetEvent(evtype, set_type, int(total_times),\r\n int(timeout_ms), int(xm), float(alpha),\r\n float(quantile), body)\r\n elif evtype == \"GUARD\":\r\n m = re.match(r\"(\\S+)\\s(\\S+)\\s(\\S+)\", body)\r\n entry, guard, status = m.groups()\r\n event = GuardEvent(evtype, entry, guard, status, body)\r\n elif evtype == \"TORCTL_TIMER\":\r\n event = TimerEvent(evtype, data)\r\n else:\r\n event = UnknownEvent(evtype, body)\r\n\r\n return event",
"def iter_unpack(raw):\n return struct.iter_unpack(EVENT_FORMAT, raw)",
"def get_messages(self):\n\t\tcontents = self.archive.read_file('replay.message.events')\n\t\treturn self.protocol.decode_replay_message_events(contents)",
"def chunks(raw):\n for i in range(0, len(raw), EVENT_SIZE):\n yield struct.unpack(EVENT_FORMAT, raw[i:i+EVENT_SIZE])",
"def decode(self, s):",
"def decode(self, s):",
"def messaging_events(payload):\n data = json.loads(payload)\n messaging_events = data[\"entry\"][0][\"messaging\"]\n for event in messaging_events:\n if \"message\" in event and \"text\" in event[\"message\"]:\n yield event[\"sender\"][\"id\"], event[\"message\"][\"text\"].encode('unicode_escape')\n else:\n yield event[\"sender\"][\"id\"], \"rez can't parse this\"",
"def load(f):\n while True:\n c = f.read(1)\n if len(c) == 1:\n msg_len = _read_int(f, already_read=c)\n msg_str = f.read(msg_len)\n if len(msg_str) < msg_len:\n raise ValueError(\"Unexpected EOF while parsing message\")\n yield javascript.loads(msg_str.decode())\n else:\n break",
"def _parse_msg(self, msg):\n try:\n self.received_msg += msg.decode()\n except:\n self.log.warning(\"invalid parse frame '%s'\" % msg)\n\n while True:\n pos = self.received_msg.find('\\r')\n if pos == -1: # no full msg\n break\n m = self.received_msg[:pos].strip()\n if not len(m):\n break\n self.platform.process_received_message(m)\n self.received_msg = self.received_msg[pos + 1:]",
"def decode(self, s):\r\n (tsec, tfrac, self.eventType, self.eventCode,\r\n self.eventValue) = struct.unpack(Format.Event, s)\r\n\r\n self.time = tsec + tfrac / 1000000.0",
"def time_decode(self):\n for ii in range(100):\n msg = DIMSEMessage()\n for fragment in self.fragments:\n msg.decode_msg(fragment)",
"def receive_bytes(self, bytes):\n self.client.reader.feed_data(bytes)",
"def parse_bytes_stream_from_message(msg: bytes,\n length_bytes: int,\n code_bytes: int\n ) -> Dict:\n\n code = int.from_bytes(msg[length_bytes:\n length_bytes + code_bytes],\n byteorder)\n data = msg[length_bytes + code_bytes:]\n\n return {\"code\": code,\n \"data\": data}",
"def decode_payload(self, bytes):\n packets = []\n while bytes:\n if six.byte2int(bytes[0:1]) <= 1:\n packet_len = 0\n i = 1\n while six.byte2int(bytes[i:i + 1]) != 255:\n packet_len = packet_len * 10 + six.byte2int(bytes[i:i + 1])\n i += 1\n packet_start = i+1\n else:\n bytes = bytes.decode('utf-8')\n i = bytes.find(b':')\n if i == -1:\n raise ValueError('Invalid payload')\n packet_len = int(bytes[0:i])\n packet_start = i+1\n\n packet = self.decode_packet(bytes[packet_start:packet_start+packet_len])\n packets.append(packet)\n bytes = bytes[packet_start+packet_len:]\n\n return packets",
"def parse_event_elements(bv: binaryninja.binaryview.BinaryView, stream: Stream) -> List[Event]:\n number_of_event = stream.read_u32()\n stream.read(4) # padding\n\n events = []\n for i in range(0, number_of_event):\n event_id = stream.read_u16()\n version = stream.read_u8()\n channel = stream.read_u8()\n level = stream.read_u8()\n opcode = stream.read_u8()\n task = stream.read_u16()\n keywords = stream.read_u64()\n message_identifier = stream.read_u32()\n template_offset = stream.read_u32()\n opcode_offset = stream.read_u32()\n level_offset = stream.read_u32()\n task_offset = stream.read_u32()\n stream.read(12)\n events.append(Event(bv, event_id, version, channel, level, opcode, task, keywords))\n\n return events",
"def decode_stream(self):\n io = self.io\n result = None\n\n while True:\n opcode = io.read(1)\n if not opcode:\n break\n else:\n opcode = ord(opcode)\n\n klass = MicroOpDecoder.opcode_to_class.get(opcode)\n yield klass.decode(io)",
"def messaging_events(payload):\n data = json.loads(payload)\n message = data[\"entry\"][0][\"messaging\"]\n for event in message:\n if \"message\" in event and \"text\" in event[\"message\"]:\n # if message in event and text in message set id and text\n sender_id = event[\"sender\"][\"id\"]\n text = event[\"message\"][\"text\"]\n quick_reply_payload = None\n\n if \"quick_reply\" in event[\"message\"]:\n # if quick_reply i message set payload\n quick_reply_payload = event[\"message\"][\"quick_reply\"][\"payload\"]\n yield sender_id, text, quick_reply_payload\n else:\n yield event[\"sender\"][\"id\"], \"I can't echo this\", None",
"def _read_message(self):\n msg = ''.join(self.received_data)\n self.l.debug('msg = %s', msg)\n try:\n cr = CollectorResponse()\n cr.ParseFromString(msg)\n s_resp = text_format.MessageToString(cr, as_one_line=True)\n self.l.debug('Received Response: %s' % s_resp)\n if self.json_file != None:\n json_str = json_format.MessageToJson(cr, including_default_value_fields=True)\n json_obj = json.loads(json_str)\n json_obj['utctime'] = str(datetime.datetime.utcnow())\n json.dump(json_obj, self.json_file)\n self.json_file.write('\\n')\n #self.json_file.write('%s\\n'%(json_format.MessageToJson(cr, including_default_value_fields=True)))\n print(json.dumps(json_obj))\n except Exception as e:\n self.l.exception('Failed to convert CollectorResponse') \n self.set_terminator(4)\n self.process_data = self._read_length\n self.received_data = []",
"def _decode_text(self):\n\n print(f\"Hex decode; received message is {self.message}\")\n return bytes.fromhex(self.message).decode('utf-8')",
"def decode(data): #@NoSelf",
"def _decode(self, message):\n raise NotImplementedError(\"_decode needs to be implemented in {} subclass\".format(type(self).__name__))",
"def unpack(self, s):\n\n raise NotImplementedError()",
"def read_message(self):\n\n while True:\n try:\n return sirf.from_bytes(self._read_binary_sirf_msg())\n except sirf.UnrecognizedMessageException:\n pass",
"def decode_message(self, message):\r\n\r\n\t\tprint(\"Decoding message '{}'\".format(message))\r\n\r\n\t\tmessage_split = message[1:-1].split('||')\r\n\r\n\t\tif len(message_split) > 1: # Several messages are queued\r\n\t\t\tfor m in message_split:\r\n\t\t\t\tself.decode_message('|' + m + '|')\r\n\t\t\treturn\r\n\t\telse:\r\n\t\t\tmessage = message_split[0]\r\n\r\n\t\tmessage_split = message.split('|')\r\n\r\n\t\tif message_split[0] == 'LA':\r\n\r\n\t\t\tlist_bars = message_split[1].split(',')\r\n\t\t\tself.send_bar_names.emit(list_bars) # Sending the list to the UI\r\n\r\n\t\telif message_split[0] == 'ME':\r\n\r\n\t\t\tprint(\"New message received : '{}'\".format(message))\r\n\r\n\t\t\tif len(message_split) == 3: # Author was found\r\n\t\t\t\tinfos = (message_split[2], message_split[1])\r\n\t\t\telif len(message_split) == 2: # No author\r\n\t\t\t\tinfos = (message_split[1],)\r\n\t\t\ttry:\r\n\t\t\t\tself.message_received.emit(infos)\r\n\t\t\texcept UnboundLocalError:\r\n\t\t\t\tself._window.open_dialog(\"Message de chat incompréhensible\",\r\n\t\t\t\t\t\t\t\t\t\t \"Le message de chat suivant n'a pas pu être décodé : {}\".format(message),\r\n\t\t\t\t\t\t\t\t\t\t type=\"warning\")\r\n\r\n\t\telif message_split[0] == 'LO': # Message is '|LO|' so just ignoring it\r\n\r\n\t\t\tself.name_set.emit() # Warning the UI about the name being set\r\n\r\n\t\telif message_split[0] == \"CH\":\r\n\r\n\t\t\tpass\r\n\t\t\r\n\t\telif message_split[0] == 'UR':\r\n\r\n\t\t\tprint(\"New message received : '{}'\".format(message))\r\n\r\n\t\t\tif len(message_split) == 3: # Author was found\r\n\t\t\t\tinfos = (message_split[2], message_split[1])\r\n\t\t\telif len(message_split) == 2: # No author\r\n\t\t\t\tinfos = (message_split[1],)\r\n\t\t\ttry:\r\n\t\t\t\tself.urgent_message_received.emit(infos)\r\n\t\t\texcept UnboundLocalError:\r\n\t\t\t\tself._window.open_dialog(\"Message de chat incompréhensible\",\r\n\t\t\t\t\t\t\t\t\t\t \"Le message de chat suivant n'a pas pu être décodé : {}\".format(message),\r\n\t\t\t\t\t\t\t\t\t\t type=\"warning\")\r\n\t\t\t\r\n\t\telif message_split[0] == \"LE\": # Getting the list of products\r\n\r\n\t\t\tif message_split[1]:\r\n\t\t\t\ttuples = message_split[1].split(',')\r\n\t\t\t\tfor t in tuples:\r\n\t\t\t\t\ti, f = t.split(':')\r\n\t\t\t\t\tself.__food[int(i)] = f\r\n\r\n\t\telif message_split[0] == \"RS\": # A new order for Restal\r\n\r\n\t\t\ttry:\r\n\t\t\t\tfood = self.__food[int(message_split[2])]\r\n\t\t\texcept KeyError:\r\n\t\t\t\tfood = \"Inconnue\"\r\n\t\t\t\tprint(\"Unable to get the name of food '{}'\".format(message_split[2]))\r\n\t\t\tprint(message_split[1],message_split[3],message_split[2])\r\n\t\t\tself.add_order.emit(message_split[1], food, int(message_split[3]))\r\n\r\n\t\telse:\r\n\t\t\tself._window.open_dialog(\"Message du serveur incompréhensible\",\r\n\t\t\t\t\t\t\t\t\t \"Le message suivant n'a pas pu être décodé : {}\".format(message), type=\"warning\")\r\n\t\t\tprint(\"Error : message '{}' could not be decoded\".format(message))",
"def parse(self):\n i = 1\n times = []\n while 1:\n byte = yield\n if byte== 0xaa:\n byte = yield # This byte should be \"\\aa\" too\n if byte== 0xaa:\n # packet synced by 0xaa 0xaa\n packet_length = yield\n packet_code = yield\n if packet_code == 0xd4:\n # standing by\n self.state = \"standby\"\n elif packet_code == 0xd0:\n self.state = \"connected\"\n elif packet_code == 0xd2:\n data_len = yield\n headset_id = yield\n headset_id += yield\n self.dongle_state = \"disconnected\"\n else:\n self.sending_data = True\n left = packet_length - 2\n while left>0:\n if packet_code ==0x80: # raw value\n row_length = yield\n a = yield\n b = yield\n value = struct.unpack(\"<h\",chr(b)+chr(a))[0]\n self.dispatch_data(\"raw\", value)\n left -= 2\n elif packet_code == 0x02: # Poor signal\n a = yield\n\n left -= 1\n elif packet_code == 0x04: # Attention (eSense)\n a = yield\n if a>0:\n v = struct.unpack(\"b\",chr(a))[0]\n if 0 < v <= 100:\n self.dispatch_data(\"attention\", v)\n left-=1\n elif packet_code == 0x05: # Meditation (eSense)\n a = yield\n if a>0:\n v = struct.unpack(\"b\",chr(a))[0]\n if 0 < v <= 100:\n self.dispatch_data(\"meditation\", v)\n left-=1\n elif packet_code == 0x16: # Blink Strength\n self.current_blink_strength = yield\n \n left-=1\n elif packet_code == 0x83:\n vlength = yield\n self.current_vector = []\n for row in range(8):\n a = yield\n b = yield\n c = yield\n value = a*255*255+b*255+c\n left -= vlength\n self.dispatch_data(\"bands\", self.current_vector)\n packet_code = yield\n else:\n pass # sync failed\n else:\n pass # sync failed",
"def decode_message(self, raw):\n return raw.decode('utf-8')"
] | [
"0.6809163",
"0.6627737",
"0.65401894",
"0.61294484",
"0.6119063",
"0.60791254",
"0.6045092",
"0.60242575",
"0.6023794",
"0.6023794",
"0.58951175",
"0.58344936",
"0.5778005",
"0.5745545",
"0.571627",
"0.57093483",
"0.57050645",
"0.5697271",
"0.5670528",
"0.56580216",
"0.5656734",
"0.56545204",
"0.56460965",
"0.5629065",
"0.56099993",
"0.5568512",
"0.5522078",
"0.5507449",
"0.54773897",
"0.54639155"
] | 0.78489214 | 0 |
Decodes and yields each tracker event from the contents byte string. | def decode_replay_tracker_events(contents):
decoder = VersionedDecoder(contents, typeinfos)
for event in _decode_event_stream(decoder,
tracker_eventid_typeid,
tracker_event_types,
decode_user_id=False):
yield event | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def decode_replay_message_events(contents):\n decoder = BitPackedDecoder(contents, typeinfos)\n for event in _decode_event_stream(decoder,\n message_eventid_typeid,\n message_event_types,\n decode_user_id=True):\n yield event",
"def decode_replay_game_events(contents):\n decoder = BitPackedDecoder(contents, typeinfos)\n for event in _decode_event_stream(decoder,\n game_eventid_typeid,\n game_event_types,\n decode_user_id=True):\n yield event",
"def iter_unpack(raw):\n return struct.iter_unpack(EVENT_FORMAT, raw)",
"def decode(self, s):\r\n (tsec, tfrac, self.eventType, self.eventCode,\r\n self.eventValue) = struct.unpack(Format.Event, s)\r\n\r\n self.time = tsec + tfrac / 1000000.0",
"def chunks(raw):\n for i in range(0, len(raw), EVENT_SIZE):\n yield struct.unpack(EVENT_FORMAT, raw[i:i+EVENT_SIZE])",
"def decode(self, s):",
"def decode(self, s):",
"def decode(data: bytes) -> Iterable:\r\n decoder = Decoder(data)\r\n return decoder.decode()",
"def stream(cls, fd):\n\n #\n # advance until the title appears\n # \"Adapter: 0 - Number of Events : 9987\"\n #\n\n sd = {\n 'seqnum': None,\n 'sslr': None,\n 'time': None,\n 'code': None,\n 'level': None,\n 'locale': None,\n 'description': None,\n 'linestack': [],\n }\n def emit():\n assert sd['linestack'][0] == '==========='\n sd['linestack'].pop(0)\n event_data = '\\n'.join(sd['linestack']).strip()\n return cls(\n id=sd['seqnum'],\n code=sd['code'],\n level=sd['level'],\n locale=sd['locale'],\n description=sd['description'],\n data=event_data,\n sslr=sd['sslr'],\n time=sd['time'],\n )\n def reset():\n sd['sslr'] = None\n sd['time'] = None\n sd['linestack'] = []\n\n emit_count = 0\n for line in fd:\n\n if line.startswith('seqNum:'):\n match = re.match(r\"seqNum:\\W*0x([0-9a-f]+)\", line)\n if sd['seqnum']:\n yield emit()\n emit_count += 1\n reset()\n seqnum_hex, = match.groups()\n sd['seqnum'] = int(seqnum_hex, 16)\n elif line.startswith('Time:'):\n _, timestr = megasplit(line)\n sd['time'] = decode_event_time(timestr)\n elif line.startswith('Seconds since last reboot:'):\n match = re.match(r\"Seconds since last reboot:\\W*([0-9]+)\", line)\n sd['sslr'], = match.groups()\n elif line.startswith('Code:'):\n match = re.match(r\"Code:\\W*0x([0-9a-f]+)\", line)\n code_hex, = match.groups()\n sd['code'] = int(code_hex, 16)\n elif line.startswith('Locale:'):\n match = re.match(r\"Locale:\\W*0x([0-9a-f]+)\", line)\n locale_hex, = match.groups()\n sd['locale'] = int(locale_hex, 16)\n elif line.startswith('Class:'):\n match = re.match(r\"Class:\\W*([0-9]+)\", line)\n levelstr, = match.groups()\n sd['level'] = int(levelstr)\n elif line.startswith('Event Description:'):\n _, sd['description'] = megasplit(line)\n elif line.startswith('Event Data:'):\n sd['linestack'] = []\n else:\n sd['linestack'].append(line.strip())\n\n #endfor streamlines\n if sd['seqnum']:\n yield emit()\n emit_count += 1\n\n \"\"\"\n if emit_count != total_events:\n raise Exception(\"input stream indicated %d events, but %d events were detected\" % (total_events, emit_count))\n \"\"\"",
"def _decode1(self, body, data):\r\n if \" \" in body:\r\n evtype,body = body.split(\" \",1)\r\n else:\r\n evtype,body = body,\"\"\r\n evtype = evtype.upper()\r\n if evtype == \"CIRC\":\r\n m = re.match(r\"(\\d+)\\s+(\\S+)(\\s\\S+)?(\\s\\S+)?(\\s\\S+)?(\\s\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"CIRC event misformatted.\")\r\n ident,status,path,purpose,reason,remote = m.groups()\r\n ident = int(ident)\r\n if path:\r\n if \"PURPOSE=\" in path:\r\n remote = reason\r\n reason = purpose\r\n purpose=path\r\n path=[]\r\n elif \"REASON=\" in path:\r\n remote = reason\r\n reason = path\r\n purpose = \"\"\r\n path=[]\r\n else:\r\n path_verb = path.strip().split(\",\")\r\n path = []\r\n for p in path_verb:\r\n path.append(p.replace(\"~\", \"=\").split(\"=\")[0])\r\n else:\r\n path = []\r\n\r\n if purpose and \"REASON=\" in purpose:\r\n remote=reason\r\n reason=purpose\r\n purpose=\"\"\r\n\r\n if purpose: purpose = purpose[9:]\r\n if reason: reason = reason[8:]\r\n if remote: remote = remote[15:]\r\n event = CircuitEvent(evtype, ident, status, path, purpose, reason,\r\n remote, body)\r\n elif evtype == \"STREAM\":\r\n #plog(\"DEBUG\", \"STREAM: \"+body)\r\n m = re.match(r\"(\\S+)\\s+(\\S+)\\s+(\\S+)\\s+(\\S+)?:(\\d+)(\\sREASON=\\S+)?(\\sREMOTE_REASON=\\S+)?(\\sSOURCE=\\S+)?(\\sSOURCE_ADDR=\\S+)?(\\s+PURPOSE=\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"STREAM event misformatted.\")\r\n ident,status,circ,target_host,target_port,reason,remote,source,source_addr,purpose = m.groups()\r\n ident,circ = map(int, (ident,circ))\r\n if not target_host: # This can happen on SOCKS_PROTOCOL failures\r\n target_host = \"(none)\"\r\n if reason: reason = reason[8:]\r\n if remote: remote = remote[15:]\r\n if source: source = source[8:]\r\n if source_addr: source_addr = source_addr[13:]\r\n if purpose:\r\n purpose = purpose.lstrip()\r\n purpose = purpose[8:]\r\n event = StreamEvent(evtype, ident, status, circ, target_host,\r\n int(target_port), reason, remote, source, source_addr,\r\n purpose, body)\r\n elif evtype == \"ORCONN\":\r\n m = re.match(r\"(\\S+)\\s+(\\S+)(\\sAGE=\\S+)?(\\sREAD=\\S+)?(\\sWRITTEN=\\S+)?(\\sREASON=\\S+)?(\\sNCIRCS=\\S+)?\", body)\r\n if not m:\r\n raise ProtocolError(\"ORCONN event misformatted.\")\r\n target, status, age, read, wrote, reason, ncircs = m.groups()\r\n\r\n #plog(\"DEBUG\", \"ORCONN: \"+body)\r\n if ncircs: ncircs = int(ncircs[8:])\r\n else: ncircs = 0\r\n if reason: reason = reason[8:]\r\n if age: age = int(age[5:])\r\n else: age = 0\r\n if read: read = int(read[6:])\r\n else: read = 0\r\n if wrote: wrote = int(wrote[9:])\r\n else: wrote = 0\r\n event = ORConnEvent(evtype, status, target, age, read, wrote,\r\n reason, ncircs, body)\r\n elif evtype == \"STREAM_BW\":\r\n m = re.match(r\"(\\d+)\\s+(\\d+)\\s+(\\d+)\", body)\r\n if not m:\r\n raise ProtocolError(\"STREAM_BW event misformatted.\")\r\n event = StreamBwEvent(evtype, body, *m.groups())\r\n elif evtype == \"BW\":\r\n m = re.match(r\"(\\d+)\\s+(\\d+)\", body)\r\n if not m:\r\n raise ProtocolError(\"BANDWIDTH event misformatted.\")\r\n read, written = map(long, m.groups())\r\n event = BWEvent(evtype, read, written, body)\r\n elif evtype in (\"DEBUG\", \"INFO\", \"NOTICE\", \"WARN\", \"ERR\"):\r\n event = LogEvent(evtype, body)\r\n elif evtype == \"NEWDESC\":\r\n ids_verb = body.split(\" \")\r\n ids = []\r\n for i in ids_verb:\r\n ids.append(i.replace(\"~\", \"=\").split(\"=\")[0].replace(\"$\",\"\"))\r\n event = NewDescEvent(evtype, ids, body)\r\n elif evtype == \"ADDRMAP\":\r\n # TODO: Also parse errors and GMTExpiry\r\n m = re.match(r'(\\S+)\\s+(\\S+)\\s+(\\\"[^\"]+\\\"|\\w+)', body)\r\n if not m:\r\n raise ProtocolError(\"ADDRMAP event misformatted.\")\r\n fromaddr, toaddr, when = m.groups()\r\n if when.upper() == \"NEVER\": \r\n when = None\r\n else:\r\n when = time.strptime(when[1:-1], \"%Y-%m-%d %H:%M:%S\")\r\n event = AddrMapEvent(evtype, fromaddr, toaddr, when, body)\r\n elif evtype == \"NS\":\r\n event = NetworkStatusEvent(evtype, parse_ns_body(data), data)\r\n elif evtype == \"NEWCONSENSUS\":\r\n event = NewConsensusEvent(evtype, parse_ns_body(data), data)\r\n elif evtype == \"BUILDTIMEOUT_SET\":\r\n m = re.match(\r\n r\"(\\S+)\\sTOTAL_TIMES=(\\d+)\\sTIMEOUT_MS=(\\d+)\\sXM=(\\d+)\\sALPHA=(\\S+)\\sCUTOFF_QUANTILE=(\\S+)\",\r\n body)\r\n set_type, total_times, timeout_ms, xm, alpha, quantile = m.groups()\r\n event = BuildTimeoutSetEvent(evtype, set_type, int(total_times),\r\n int(timeout_ms), int(xm), float(alpha),\r\n float(quantile), body)\r\n elif evtype == \"GUARD\":\r\n m = re.match(r\"(\\S+)\\s(\\S+)\\s(\\S+)\", body)\r\n entry, guard, status = m.groups()\r\n event = GuardEvent(evtype, entry, guard, status, body)\r\n elif evtype == \"TORCTL_TIMER\":\r\n event = TimerEvent(evtype, data)\r\n else:\r\n event = UnknownEvent(evtype, body)\r\n\r\n return event",
"def decode_replay_attributes_events(contents):\n buffer = BitPackedBuffer(contents, 'little')\n attributes = {}\n if not buffer.done():\n attributes['source'] = buffer.read_bits(8)\n attributes['mapNamespace'] = buffer.read_bits(32)\n count = buffer.read_bits(32)\n attributes['scopes'] = {}\n while not buffer.done():\n value = {}\n value['namespace'] = buffer.read_bits(32)\n value['attrid'] = attrid = buffer.read_bits(32)\n scope = buffer.read_bits(8)\n value['value'] = buffer.read_aligned_bytes(4)[::-1].strip(b'\\x00')\n if not scope in attributes['scopes']:\n attributes['scopes'][scope] = {}\n if not attrid in attributes['scopes'][scope]:\n attributes['scopes'][scope][attrid] = []\n attributes['scopes'][scope][attrid].append(value)\n return attributes",
"def decode(data): #@NoSelf",
"def get_messages(self):\n\t\tcontents = self.archive.read_file('replay.message.events')\n\t\treturn self.protocol.decode_replay_message_events(contents)",
"def deserialize(self, str):\n try:\n if self.cnt is None:\n self.cnt = None\n end = 0\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n self.cnt = []\n for i in range(0, length):\n val1 = dgvmsg.msg.Encounter()\n _v4 = val1.header\n start = end\n end += 4\n (_v4.seq,) = _get_struct_I().unpack(str[start:end])\n _v5 = _v4.stamp\n _x = _v5\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n _v4.frame_id = str[start:end].decode('utf-8')\n else:\n _v4.frame_id = str[start:end]\n start = end\n end += 4\n (val1.devadd,) = _get_struct_i().unpack(str[start:end])\n _v6 = val1.now\n _x = _v6\n start = end\n end += 8\n (_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])\n start = end\n end += 4\n (val1.encounter,) = _get_struct_I().unpack(str[start:end])\n self.cnt.append(val1)\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def msgs_from_bytes(self, b):\n msgs = []\n # User remainder bytes\n parse_bytes = self.remainder + b.decode('ascii')\n # Find the first frame delimiter\n i = parse_bytes.find('\\r\\n')\n while i >= 0:\n # Try to parse a single message\n m = self._parse_msg(parse_bytes[:i])\n # Remove parsed bytes and delimter\n parse_bytes = parse_bytes[i+2:]\n # Add parsed message, if any\n if m:\n msgs.append(m)\n self.logger.debug('Parsed ASCII frame: address={}, function={}, len={}'.format(m.address, m.function, len(m.data) if m.data else 0))\n #else - warn?\n i = parse_bytes.find('\\r\\n')\n # Store any remaining bytes for the next pass\n self.remainder = parse_bytes\n return msgs",
"def test_decode_trace(self):\n self.assertEqual(td.trace(), decoder.decode_trace(BytesIO(td.trace(True))))",
"def time_decode(self):\n for ii in range(100):\n msg = DIMSEMessage()\n for fragment in self.fragments:\n msg.decode_msg(fragment)",
"def parse_event_elements(bv: binaryninja.binaryview.BinaryView, stream: Stream) -> List[Event]:\n number_of_event = stream.read_u32()\n stream.read(4) # padding\n\n events = []\n for i in range(0, number_of_event):\n event_id = stream.read_u16()\n version = stream.read_u8()\n channel = stream.read_u8()\n level = stream.read_u8()\n opcode = stream.read_u8()\n task = stream.read_u16()\n keywords = stream.read_u64()\n message_identifier = stream.read_u32()\n template_offset = stream.read_u32()\n opcode_offset = stream.read_u32()\n level_offset = stream.read_u32()\n task_offset = stream.read_u32()\n stream.read(12)\n events.append(Event(bv, event_id, version, channel, level, opcode, task, keywords))\n\n return events",
"def carve(self, bs, dataFile, verbose=False):\n _bs = bs\n records = []\n headers = []\n\n i = 0\n # Find all occurrences of the magic string\n found = _bs.findall(evt_header.MagicString, bytealigned=False)\n readSoFarBits = 0\n for idx in found:\n _bs.pos = idx\n r = EvtRecord()\n r.setPathname(dataFile)\n r.setPosition(_bs.pos)\n\n # Read an EVT header field:\n # The algorithm here is to find the message separator \n # and use that as a basis for locating the other fields.\n # Since we split large input files, \"offset\" fields are\n # invalid. \n\n # Message length\n fieldBits = 32\n lenIdx = idx - fieldBits # Set position to idx of length\n _bs.pos = lenIdx\n recordLength = _bs.read(fieldBits).uintle\n r.setField(\"length\", recordLength)\n readSoFarBits += fieldBits\n\n # Calculate size of variable data at end of record \n varDataSize = evt_record.FixedSize - recordLength \n # When reading the size in a header\n if varDataSize < 0: \n varDataSize = 0\n\n # Reset stream position\n _bs.pos = idx\n\n # Message separator\n fieldBits = 32 \n # Check to see if we are reading past end of stream\n data = self.carveField(_bs, \"reserved\", \"uint\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"reserved\", data)\n\n # Record number\n fieldBits = 32 \n data = self.carveField(_bs, \"recordNumber\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"recordNumber\", data)\n\n # Date created\n fieldBits = 32 \n data = self.carveField(_bs, \"timeGenerated\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"timeGenerated\", data)\n\n # Date written\n fieldBits = 32 \n data = self.carveField(_bs, \"timeWritten\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"timeWritten\", data)\n\n # Event ID\n fieldBits = 16 \n data = self.carveField(_bs, \"eventID\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"eventID\", data)\n \n # Event RVA offset\n fieldBits = 16 \n data = self.carveField(_bs, \"eventRVA\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"eventRVA\", data)\n\n # Event type\n fieldBits = 16 \n data = self.carveField(_bs, \"eventType\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"eventType\", data)\n\n # Num strings\n fieldBits = 16 \n data = self.carveField(_bs, \"numStrings\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"numStrings\", data)\n\n # Category\n fieldBits = 16 \n data = self.carveField(_bs, \"eventCategory\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"eventCategory\", data)\n\n # Reserved flags \n fieldBits = 16 \n data = self.carveField(_bs, \"reservedFlags\", \"uint\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"reservedFlags\", data)\n\n # Closing record number\n fieldBits = 32 \n data = self.carveField(_bs, \"closingRecordNumber\", \"uint\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"closingRecordNumber\", data)\n\n # String offset\n fieldBits = 32 \n data = self.carveField(_bs, \"stringOffset\", \"uint\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"stringOffset\", data)\n\n # User SID length\n fieldBits = 32\n data = self.carveField(_bs, \"userSidLength\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"userSidLength\", data)\n\n # User SID offset\n fieldBits = 32 \n data = self.carveField(_bs, \"userSidOffset\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"userSidOffset\", data)\n\n # Data length\n fieldBits = 32 \n data = self.carveField(_bs, \"dataLength\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"dataLength\", data)\n\n # Data offset\n fieldBits = 32\n data = self.carveField(_bs, \"dataOffset\", \"uintle\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"dataOffset\", data)\n\n # Variable data\n # FIXME: dont rely on peek() to avoid reading past end of stream\n fieldBits = int(r.getField(\"length\"))\n try:\n data = _bs.peek(\"bytes\" + \":\" + str(fieldBits))\n except bitstring.ReadError:\n if verbose:\n print \"[EVT]: Unable to read EVT data field; \"\\\n \"it would be truncated\"\n break\n data = self.carveField(_bs, \"varData\", \"bytes\",\\\n fieldBits, verbose)\n if data == self.ERROR_END_OF_STREAM:\n break\n r.setField(\"varData\", data)\n\n # SID\n # FIXME: find out why sidLength is so weird\n #sidLength = r.getField(\"userSidLength\")\n #if sidLength > 0:\n # sidOffset = r.getField(\"userSidOffset\")\n # if sidOffset <= _bs.length:\n # _bs.pos = sidOffset\n # fieldBits = sidLength\n # if readSoFarBits + fieldBits >= _bs.len:\n # fieldBits = _bs.len - _bs.pos\n # sid = _bs.read(fieldBits).uint\n # r.setField(\"sid\", sid)\n # break\n # sid = _bs.read(fieldBits).uint\n # r.setField(\"sid\", sid)\n #readSoFarBits += fieldBits\n records.append(r)\n return (headers, records)",
"def events_from_bytes(cls, data, res, frame_num):\n\t\tall_events = [np.zeros(res) for t in range(frame_num - 1)]\n\t\tfor i in range(res[0]):\n\t\t\tfor j in range(res[1]):\n\t\t\t\tevents = cls._pixel_events_from_bytes(data)\n\t\t\t\tfor event in events:\n\t\t\t\t\tall_events[event[1]][i, j] = event[0]\n\n\t\treturn all_events",
"def timestamp_decode(e: Encoding) -> List[int]:\n return _decode(e, Decoder)",
"def _unpack_ies(buf):\n\t\t# each IE starts with an ID and a length\n\t\ties = []\n\t\toff = 0\n\t\tbuflen = len(buf)\n\t\t# logger.debug(\"lazy dissecting: %s\" % buf)\n\n\t\twhile off < buflen:\n\t\t\tie_id = buf[off]\n\t\t\ttry:\n\t\t\t\tparser = IEEE80211.ie_decoder[ie_id]\n\t\t\texcept KeyError:\n\t\t\t\t# some unknown tag, use standard format\n\t\t\t\tparser = IEEE80211.IE\n\n\t\t\tdlen = buf[off + 1]\n\t\t\t# logger.debug(\"IE parser is: %d = %s = %s\" % (ie_id, parser, buf[off: off+2+dlen]))\n\t\t\tie = parser(buf[off: off + 2 + dlen])\n\t\t\ties.append(ie)\n\t\t\toff += 2 + dlen\n\n\t\treturn ies",
"def get_trackers(self):\n\t\tcontents = self.archive.read_file('replay.tracker.events')\n\t\treturn self.protocol.decode_replay_tracker_events(contents)",
"def parse(self, buf):\r\n # Initialize variables\r\n pg_count = 0\r\n\r\n # Call the date finder for current fsevent file\r\n FSEventHandler.find_date(self, buf)\r\n self.valid_record_check = True\r\n\r\n # Iterate through DLS pages found in current fsevent file\r\n for i in self.my_dls:\r\n # Assign current DLS offsets\r\n start_offset = self.my_dls[pg_count]['Start Offset']\r\n end_offset = self.my_dls[pg_count]['End Offset']\r\n\r\n # Extract the raw DLS page from the fsevents file\r\n raw_page = buf[start_offset:end_offset]\r\n\r\n self.page_offset = start_offset\r\n\r\n # Reverse byte stream to match byte order little-endian\r\n m_dls_chk = raw_page[0:4]\r\n # Assign DLS version based off magic header in page\r\n if m_dls_chk == b\"1SLD\":\r\n self.dls_version = 1\r\n elif m_dls_chk == b\"2SLD\":\r\n self.dls_version = 2\r\n else:\r\n self.logfile.write(\"%s: Unknown DLS Version.\" % (self.src_filename))\r\n break\r\n\r\n # Pass the raw page + a start offset to find records within page\r\n FSEventHandler.find_page_records(\r\n self,\r\n raw_page,\r\n start_offset\r\n )\r\n # Increment the DLS page count by 1\r\n pg_count += 1",
"def parse_event(self):\n event_id = self.replay.read_string()\n group = self.replay.read_string()\n metadata = self.replay.read_string()\n start_time = self.replay.read_uint32()\n end_time = self.replay.read_uint32()\n size = self.replay.read_uint32()\n\n buffer = self.decrypt_buffer(size)\n\n if group == EventTypes.PLAYER_ELIMINATION.value:\n try:\n self.parse_elimination_event(buffer, start_time)\n except:\n logger.error(\"Couldnt parse event PLAYER_ELIMINATION\")\n\n if metadata == EventTypes.MATCH_STATS.value:\n self.parse_matchstats_event(buffer)\n\n if metadata == EventTypes.TEAM_STATS.value:\n self.parse_teamstats_event(buffer)",
"def receive_bytes(self, bytes):\n self.client.reader.feed_data(bytes)",
"def parse(self):\n i = 1\n times = []\n while 1:\n byte = yield\n if byte== 0xaa:\n byte = yield # This byte should be \"\\aa\" too\n if byte== 0xaa:\n # packet synced by 0xaa 0xaa\n packet_length = yield\n packet_code = yield\n if packet_code == 0xd4:\n # standing by\n self.state = \"standby\"\n elif packet_code == 0xd0:\n self.state = \"connected\"\n elif packet_code == 0xd2:\n data_len = yield\n headset_id = yield\n headset_id += yield\n self.dongle_state = \"disconnected\"\n else:\n self.sending_data = True\n left = packet_length - 2\n while left>0:\n if packet_code ==0x80: # raw value\n row_length = yield\n a = yield\n b = yield\n value = struct.unpack(\"<h\",chr(b)+chr(a))[0]\n self.dispatch_data(\"raw\", value)\n left -= 2\n elif packet_code == 0x02: # Poor signal\n a = yield\n\n left -= 1\n elif packet_code == 0x04: # Attention (eSense)\n a = yield\n if a>0:\n v = struct.unpack(\"b\",chr(a))[0]\n if 0 < v <= 100:\n self.dispatch_data(\"attention\", v)\n left-=1\n elif packet_code == 0x05: # Meditation (eSense)\n a = yield\n if a>0:\n v = struct.unpack(\"b\",chr(a))[0]\n if 0 < v <= 100:\n self.dispatch_data(\"meditation\", v)\n left-=1\n elif packet_code == 0x16: # Blink Strength\n self.current_blink_strength = yield\n \n left-=1\n elif packet_code == 0x83:\n vlength = yield\n self.current_vector = []\n for row in range(8):\n a = yield\n b = yield\n c = yield\n value = a*255*255+b*255+c\n left -= vlength\n self.dispatch_data(\"bands\", self.current_vector)\n packet_code = yield\n else:\n pass # sync failed\n else:\n pass # sync failed",
"def decode_payload(self, bytes):\n packets = []\n while bytes:\n if six.byte2int(bytes[0:1]) <= 1:\n packet_len = 0\n i = 1\n while six.byte2int(bytes[i:i + 1]) != 255:\n packet_len = packet_len * 10 + six.byte2int(bytes[i:i + 1])\n i += 1\n packet_start = i+1\n else:\n bytes = bytes.decode('utf-8')\n i = bytes.find(b':')\n if i == -1:\n raise ValueError('Invalid payload')\n packet_len = int(bytes[0:i])\n packet_start = i+1\n\n packet = self.decode_packet(bytes[packet_start:packet_start+packet_len])\n packets.append(packet)\n bytes = bytes[packet_start+packet_len:]\n\n return packets",
"def unpack(self, s):\n\n raise NotImplementedError()",
"def _decode(encoding: Encoding, decoder: Decoder) -> List[int]:\n tss = [encoding.initial_timestamp]\n dec = decoder(encoding.initial_timestamp)\n for v in encoding.values:\n tss.append(dec.decode(v))\n return tss"
] | [
"0.7260997",
"0.6863218",
"0.60744035",
"0.59773415",
"0.5972554",
"0.58067507",
"0.58067507",
"0.57750344",
"0.5721968",
"0.5715137",
"0.5576841",
"0.55634767",
"0.55306965",
"0.55270785",
"0.5488471",
"0.54650974",
"0.5433999",
"0.5426289",
"0.5408526",
"0.5392383",
"0.539066",
"0.53622603",
"0.53343755",
"0.53276783",
"0.53214365",
"0.53071254",
"0.5255964",
"0.52360755",
"0.5223233",
"0.5191241"
] | 0.78533643 | 0 |
Decodes and return the replay header from the contents byte string. | def decode_replay_header(contents):
decoder = VersionedDecoder(contents, typeinfos)
return decoder.instance(replay_header_typeid) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def decode_header(byte_iter):\n try:\n return MMSDecoder.decode_mms_header(byte_iter)\n except wsp_pdu.DecodeError:\n return wsp_pdu.Decoder.decode_header(byte_iter)",
"def get_decoded_header(value):\n decoded_header_items = decode_header(value)\n decoded_header_value = ''\n for item in decoded_header_items:\n try:\n decoded_item = item[0].decode(item[1], 'ignore') if item[1] is not None else item[0]\n except:\n logger.warning(f\"Decoding went wrong for value '{value}'!\")\n # Pretend decoded item is empty :-(\n decoded_item = ''\n if isinstance(decoded_item, bytes):\n decoded_item = decoded_item.decode('ascii', 'ignore')\n decoded_header_value += decoded_item\n return decoded_header_value",
"def decode_replay_details(contents):\n decoder = VersionedDecoder(contents, typeinfos)\n return decoder.instance(game_details_typeid)",
"def unpackRecHeader(self):\n return self.unpack('4s3i',16,'REC_HEAD')",
"def decode_content(raw_content):\n return raw_content",
"def deserialize(self, str):\n try:\n if self.header is None:\n self.header = std_msgs.msg._Header.Header()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n self.header.frame_id = str[start:end]\n _x = self\n start = end\n end += 3\n (_x.gear, _x.front_diff, _x.rear_diff,) = _struct_3B.unpack(str[start:end])\n return self\n except struct.error as e:\n raise roslib.message.DeserializationError(e) #most likely buffer underfill",
"def parse_header(self, size):\n logger.info('parse_header()')\n\n magic = self.replay.read_uint32()\n if (magic != NETWORK_MAGIC):\n raise InvalidReplayException()\n network_version = self.replay.read_uint32()\n network_checksum = self.replay.read_uint32()\n engine_network_version = self.replay.read_uint32()\n game_network_protocol = self.replay.read_uint32()\n\n if network_version > HeaderTypes.HISTORY_HEADER_GUID.value:\n guid = self.replay.read_guid()\n else:\n guid = \"\"\n\n major = self.replay.read_uint16()\n minor = self.replay.read_uint16()\n patch = self.replay.read_uint16()\n changelist = self.replay.read_uint32()\n branch = self.replay.read_string()\n\n levelnames_and_times = self.replay.read_tuple_array(\n self.replay.read_string, self.replay.read_uint32)\n flags = self.replay.read_uint32()\n game_specific_data = self.replay.read_array(self.replay.read_string)\n\n self.header = Header(\n network_version=network_version,\n network_checksum=network_checksum,\n engine_network_version=engine_network_version,\n game_network_protocol=game_network_protocol,\n guid=guid,\n major=major,\n minor=minor,\n patch=patch,\n changelist=changelist,\n branch=branch,\n levelnames_and_times=levelnames_and_times,\n flags=flags,\n game_specific_data=game_specific_data,\n )",
"def decode(cls, raw: bytes) -> \"EthernetHeader\":\n # unsigned char dmac[6];\n # unsigned char smac[6];\n # uint16_t ethertype;\n # unsigned char payload[];\n dmac = raw[:6]\n smac = raw[6:12]\n typ = socket.htons(struct.unpack(\"H\", raw[12:14])[0])\n payload = raw[14:]\n return EthernetHeader(dmac=dmac, smac=smac, typ=typ, payload=payload)",
"def _unserialize_header(self, data, persistent_start):\n name = \"\"\n sbuffer = data\n # Skip characters until a valid message id appears\n while len(sbuffer) >= self.header_size:\n header = sbuffer[:self.header_size]\n if repr(header) in self.messages:\n name = header\n break\n if not persistent_start:\n break\n sbuffer = sbuffer[1:]\n return name, len(data) - len(sbuffer)",
"def decode_replay_initdata(contents):\n decoder = BitPackedDecoder(contents, typeinfos)\n return decoder.instance(replay_initdata_typeid)",
"def decode_replay(replay_file_obj):\n decoder = zstd.ZstdDecompressor()\n # Rewind to the beginning of the file obj, because\n # gcloud might have read it first\n replay_file_obj.seek(0)\n replay_data = replay_file_obj.read()\n try:\n decoded_data = decoder.decompress(replay_data)\n json_data = json.loads(decoded_data.decode('utf-8').strip())\n return json_data\n except zstd.ZstdError:\n # The replay file can't be decoded.\n return None\n finally:\n # Seek the replay file back to start so we can upload it.\n replay_file_obj.seek(0)",
"def _decode_header(self, buf):\n ord_data = self._decode_vint(buf)\n f_type = ord_data & 7\n f_id = ord_data >> 3\n return f_type, f_id",
"def deserialize(self, str):\n try:\n if self.header is None:\n self.header = std_msgs.msg.Header()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.header.frame_id = str[start:end]\n _x = self\n start = end\n end += 56\n (_x.command, _x.set_num, _x.paraset_byte54, _x.paraset_byte53, _x.paraset_byte52, _x.paraset_byte51, _x.paraset_byte50, _x.paraset_byte49, _x.paraset_byte48, _x.paraset_byte47, _x.paraset_byte46, _x.paraset_byte45, _x.paraset_byte44, _x.paraset_byte43, _x.paraset_byte42, _x.paraset_byte41, _x.paraset_byte40, _x.paraset_byte39, _x.paraset_byte38, _x.paraset_byte37, _x.paraset_byte36, _x.paraset_byte35, _x.paraset_byte34, _x.paraset_byte33, _x.paraset_byte32, _x.paraset_byte31, _x.paraset_byte30, _x.paraset_byte29, _x.paraset_byte28, _x.paraset_byte27, _x.paraset_byte26, _x.paraset_byte25, _x.paraset_byte24, _x.paraset_byte23, _x.paraset_byte22, _x.paraset_byte21, _x.paraset_byte20, _x.paraset_byte19, _x.paraset_byte18, _x.paraset_byte17, _x.paraset_byte16, _x.paraset_byte15, _x.paraset_byte14, _x.paraset_byte13, _x.paraset_byte12, _x.paraset_byte11, _x.paraset_byte10, _x.paraset_byte9, _x.paraset_byte8, _x.paraset_byte7, _x.paraset_byte6, _x.paraset_byte5, _x.paraset_byte4, _x.paraset_byte3, _x.paraset_byte2, _x.paraset_byte1,) = _get_struct_56B().unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def recv_frame(self):\r\n header_bytes = self._recv_strict(2)\r\n if not header_bytes:\r\n return None\r\n b1 = ord(header_bytes[0])\r\n fin = b1 >> 7 & 1\r\n rsv1 = b1 >> 6 & 1\r\n rsv2 = b1 >> 5 & 1\r\n rsv3 = b1 >> 4 & 1\r\n opcode = b1 & 0xf\r\n b2 = ord(header_bytes[1])\r\n mask = b2 >> 7 & 1\r\n length = b2 & 0x7f\r\n\r\n length_data = \"\"\r\n if length == 0x7e:\r\n length_data = self._recv_strict(2)\r\n length = struct.unpack(\"!H\", length_data)[0]\r\n elif length == 0x7f:\r\n length_data = self._recv_strict(8)\r\n length = struct.unpack(\"!Q\", length_data)[0]\r\n\r\n mask_key = \"\"\r\n if mask:\r\n mask_key = self._recv_strict(4)\r\n data = self._recv_strict(length)\r\n if traceEnabled:\r\n recieved = header_bytes + length_data + mask_key + data\r\n logger.debug(\"recv: \" + repr(recieved))\r\n\r\n if mask:\r\n data = ABNF.mask(mask_key, data)\r\n\r\n frame = ABNF(fin, rsv1, rsv2, rsv3, opcode, mask, data)\r\n return frame",
"def _unpack(self, headerBytes):\n pass",
"def decode(self, s):",
"def decode(self, s):",
"def pre_dissect(self, s):\n # We commit the pending read state if it has been triggered.\n if self.tls_session.triggered_prcs_commit:\n if self.tls_session.prcs is not None:\n self.tls_session.rcs = self.tls_session.prcs\n self.tls_session.prcs = None\n self.tls_session.triggered_prcs_commit = False\n if len(s) < 5:\n raise Exception(\"Invalid record: header is too short.\")\n\n self.type = orb(s[0])\n if (isinstance(self.tls_session.rcs.cipher, Cipher_NULL) or\n self.type == 0x14):\n self.deciphered_len = None\n return s\n else:\n msglen = struct.unpack('!H', s[3:5])[0]\n hdr, efrag, r = s[:5], s[5:5 + msglen], s[msglen + 5:]\n frag, auth_tag = self._tls_auth_decrypt(efrag)\n self.deciphered_len = len(frag)\n return hdr + frag + auth_tag + r",
"def decode_message_header(self):\n data_iter = PreviewIterator(self._mms_data)\n\n # First 3 headers (in order\n ############################\n # - X-Mms-Message-Type\n # - X-Mms-Transaction-ID\n # - X-Mms-Version\n # TODO: reimplement strictness - currently we allow these 3 headers\n # to be mixed with any of the other headers (this allows the\n # decoding of \"broken\" MMSs, but is technically incorrect)\n\n # Misc headers\n ##############\n # The next few headers will not be in a specific order, except for\n # \"Content-Type\", which should be the last header\n # According to [4], MMS header field names will be short integers\n content_type_found = False\n header = ''\n while content_type_found == False:\n try:\n header, value = self.decode_header(data_iter)\n except StopIteration:\n break\n\n if header == mms_field_names[0x04][0]:\n content_type_found = True\n else:\n self._mms_message.headers[header] = value\n\n if header == 'Content-Type':\n # Otherwise it might break Content-Location\n # content_type, params = value\n self._mms_message.headers[header] = value\n\n return data_iter",
"def deserialize(self, str):\n try:\n if self.header is None:\n self.header = std_msgs.msg._Header.Header()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n self.header.frame_id = str[start:end]\n _x = self\n start = end\n end += 9\n (_x.dvl_sts, _x.svs_sts, _x.fog_sts, _x.nav_sts, _x.bat_sts, _x.t_sts, _x.h_sts, _x.p_sts, _x.water_sts,) = _struct_9B.unpack(str[start:end])\n self.dvl_sts = bool(self.dvl_sts)\n self.svs_sts = bool(self.svs_sts)\n self.fog_sts = bool(self.fog_sts)\n self.nav_sts = bool(self.nav_sts)\n self.bat_sts = bool(self.bat_sts)\n self.t_sts = bool(self.t_sts)\n self.h_sts = bool(self.h_sts)\n self.p_sts = bool(self.p_sts)\n self.water_sts = bool(self.water_sts)\n return self\n except struct.error as e:\n raise roslib.message.DeserializationError(e) #most likely buffer underfill",
"def deserialize(self, str):\n try:\n if self.header is None:\n self.header = std_msgs.msg.Header()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.header.frame_id = str[start:end].decode('utf-8')\n else:\n self.header.frame_id = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.cmd = str[start:end].decode('utf-8')\n else:\n self.cmd = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.cat = str[start:end].decode('utf-8')\n else:\n self.cat = str[start:end]\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def readframeheader(self):\n numbytes = self.readdword()\n magic = self.readword()\n assert magic == 0xF1FA\n oldchunks = self.readword()\n frameduration = self.readword()\n _ = self.readbytearr(2)\n newchunks = self.readdword()\n numchunks = oldchunks\n if oldchunks == 0xFFFF and newchunks != 0:\n numchunks = newchunks\n return {\n \"framebytes\": numbytes,\n \"frameduration\": frameduration,\n \"numchunks\": numchunks,\n }",
"def decodevaluefromheaders(req, headerprefix):\n chunks = []\n i = 1\n while True:\n v = req.headers.get(b'%s-%d' % (headerprefix, i))\n if v is None:\n break\n chunks.append(pycompat.bytesurl(v))\n i += 1\n\n return b''.join(chunks)",
"def decode_mms_header(byte_iter):\n # Get the MMS-field-name\n mms_field_name = ''\n preview = byte_iter.preview()\n byte = wsp_pdu.Decoder.decode_short_integer_from_byte(preview)\n\n if byte in mms_field_names:\n byte_iter.next()\n mms_field_name = mms_field_names[byte][0]\n else:\n byte_iter.reset_preview()\n raise wsp_pdu.DecodeError('Invalid MMS Header: could '\n 'not decode MMS field name')\n\n # Now get the MMS-value\n mms_value = ''\n try:\n name = mms_field_names[byte][1]\n mms_value = getattr(MMSDecoder, 'decode_%s' % name)(byte_iter)\n except wsp_pdu.DecodeError, msg:\n raise wsp_pdu.DecodeError('Invalid MMS Header: Could '\n 'not decode MMS-value: %s' % msg)\n except:\n raise RuntimeError('A fatal error occurred, probably due to an '\n 'unimplemented decoding operation. Tried to '\n 'decode header: %s' % mms_field_name)\n\n return mms_field_name, mms_value",
"def decode(self, s):\n o = self._decoder.decode(s)\n return o",
"def deserialize(self, str):\n if python3:\n codecs.lookup_error(\"rosmsg\").msg_type = self._type\n try:\n if self.Header is None:\n self.Header = std_msgs.msg.Header()\n end = 0\n _x = self\n start = end\n end += 12\n (_x.Header.seq, _x.Header.stamp.secs, _x.Header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.Header.frame_id = str[start:end].decode('utf-8', 'rosmsg')\n else:\n self.Header.frame_id = str[start:end]\n _x = self\n start = end\n end += 11\n (_x.x_pos, _x.y_pos, _x.angle, _x.code_type, _x.code_num,) = _get_struct_2hHBI().unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) # most likely buffer underfill",
"def decode_header(header):\n new_header = {}\n\n for item in header:\n split = item.split('\\t')\n new_header[split[0].replace(':', '')] = split[1].replace(\"\\r\\n\", \"\")\n\n return new_header",
"def decode_message(self, raw):\n return raw.decode('utf-8')",
"def _decode_str(self, buf):\n length = self._decode_vint(buf)\n result = buf.read(length)\n if len(result) != length:\n raise EndOfMessage(True)\n return result",
"def decode_network_string(msgtype, plen, buf):\n return buf[header.size:plen - 1]"
] | [
"0.65073955",
"0.6290337",
"0.61638",
"0.6045373",
"0.6032726",
"0.60009605",
"0.5976727",
"0.59020257",
"0.587665",
"0.58728296",
"0.58578587",
"0.58342755",
"0.57190514",
"0.5715883",
"0.57114273",
"0.5698579",
"0.5698579",
"0.56286454",
"0.56231004",
"0.5622471",
"0.55946314",
"0.5586041",
"0.55800074",
"0.55469674",
"0.5545549",
"0.5533282",
"0.5530749",
"0.5525593",
"0.55174595",
"0.54975855"
] | 0.8301495 | 0 |
Decodes and returns the game details from the contents byte string. | def decode_replay_details(contents):
decoder = VersionedDecoder(contents, typeinfos)
return decoder.instance(game_details_typeid) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def decode_content(raw_content):\n return raw_content",
"def decode(self, s):",
"def decode(self, s):",
"def loads(data):\n return Decoder().decode(data)",
"def decode_replay_header(contents):\n decoder = VersionedDecoder(contents, typeinfos)\n return decoder.instance(replay_header_typeid)",
"def decode(data): #@NoSelf",
"def read_string(self):\n return self.bits.read('bytes:{0}'.format(self.read_int())).decode(\"utf-8\", 'replace')",
"def _decode_5104(data):\n\n text = []\n start_byte = 0\n while start_byte + 2 < len(data):\n tag = data[start_byte:start_byte + 2]\n if tag == b'#u':\n start_byte += 2\n text_size = struct.unpack(\n '<h', data[start_byte:start_byte + 2])[0]\n start_byte += 2\n text.append(data[start_byte:start_byte + text_size].decode('utf8'))\n start_byte += text_size\n start_byte += 6\n elif tag == b'$u':\n start_byte += 2\n text.append(struct.unpack(\n '<h', data[start_byte:start_byte + 2])[0])\n start_byte += 2\n start_byte += 6\n elif tag == b',u':\n start_byte += 2\n text.append(struct.unpack(\n '<h', data[start_byte:start_byte + 2])[0])\n start_byte += 2\n else:\n start_byte += 1\n\n return {'analyst': text[0],\n 'date': text[2],\n 'image_name': text[4],\n 'instrument_model': text[5],\n 'instrument_serial_number': text[6],\n 'instrument_software_version': text[7],\n 'accumulations': text[9],\n 'detector': text[11],\n 'source': text[12],\n 'beam_splitter': text[13],\n 'apodization': text[15],\n 'spectrum_type': text[16],\n 'beam_type': text[17],\n 'phase_correction': text[20],\n 'ir_accessory': text[26],\n 'igram_type': text[28],\n 'scan_direction': text[29],\n 'background_scans': text[32]}",
"def decode_message(self, raw):\n return raw.decode('utf-8')",
"def decode(self, data):\n encoding = getattr(self, 'encoding', 'ascii')\n return data.decode(encoding, 'ignore')",
"def decode(self, encoded):",
"def decode_replay_initdata(contents):\n decoder = BitPackedDecoder(contents, typeinfos)\n return decoder.instance(replay_initdata_typeid)",
"def decode(self, s):\n o = self._decoder.decode(s)\n return o",
"def decode_content(self, raw_content):\n try:\n obj = pickle.loads(raw_content)\n return obj\n except Exception:\n raise IkatsException(\"Failed to load picked object. Context={}\".format(str(self)))",
"def decode(binary):\n return json_mod.loads(binary.decode(\"utf-8\"))",
"def decode(self,data):\n import yaml\n return yaml.load(data.decode('utf-8'))",
"def getData(self):\n return utf8decoder(self.data)[0]",
"def decode(cls, data):\n h = struct.unpack('B', data)[0]\n # Bits 7-5 define the message type\n mtype = (h & 224) >> 5\n # Bits 1-0 define the major version\n major = h & 3\n m = MACHeader(mtype, major)\n return m",
"def test_decode():\n decoding = d.decode()\n assert type(decoding) == list\n assert len(decoding) == 7\n assert decoding[0] == '-12;-1\\n\\nESS'\n assert decoding[-1] == '2;-2\\n\\nWSWESNESSS'\n for x in decoding:\n assert \"\\n\" in x",
"def _decode_str(self, buf):\n length = self._decode_vint(buf)\n result = buf.read(length)\n if len(result) != length:\n raise EndOfMessage(True)\n return result",
"def _encoded_string_to_string(encoded_blob):\n try:\n return encoded_blob.decode(\"base64\")\n except Exception:\n raise InvalidDeckDataException(\"Cannot decode deck data into anything readable.\")",
"def _decode_text(self):\n\n print(f\"Hex decode; received message is {self.message}\")\n return bytes.fromhex(self.message).decode('utf-8')",
"def decode(self, s, _w=WHITESPACE.match):\n obj, end = self.raw_decode(s, idx=_w(s, 0).end())\n end = _w(s, end).end()\n if end != len(s):\n raise ValueError(errmsg(\"Extra data\", s, end, len(s)))\n return obj",
"def decode(self, s, _w=WHITESPACE.match):\n obj, end = self.raw_decode(s, idx=_w(s, 0).end())\n end = _w(s, end).end()\n if end != len(s):\n raise ValueError(errmsg(\"Extra data\", s, end, len(s)))\n return obj",
"def decode(data):\n raise NotImplementedError",
"def decode(self, data: bytes) -> bytes:\n ...",
"def loads(value):\n return unpackb(value)",
"def decode(self):\n s = self.encoded_content\n if self.encoded_content:\n if self.encoding:\n if self.encoding == u'base64':\n s = decode_base64(s)\n else:\n raise Exception(u'unknown data encoding %s' % (self.encoding))\n if self.compression:\n if self.compression == u'gzip':\n s = decompress_gzip(s)\n else:\n raise Exception(u'unknown data compression %s' %(self.compression))\n else:\n raise Exception(u'no encoded content to decode')\n self.decoded_content = []\n for idx in xrange(0, len(s), 4):\n val = ord(str(s[idx])) | (ord(str(s[idx + 1])) << 8) | \\\n (ord(str(s[idx + 2])) << 16) | (ord(str(s[idx + 3])) << 24)\n self.decoded_content.append(val)\n # generate the 2D version\n self._gen_2D()",
"def decode_replay_game_events(contents):\n decoder = BitPackedDecoder(contents, typeinfos)\n for event in _decode_event_stream(decoder,\n game_eventid_typeid,\n game_event_types,\n decode_user_id=True):\n yield event",
"def unpack(self, data_type):\n\t\tif data_type in data_types:\n\t\t\tformat = data_types[data_type]\n\t\t\treturn self.unpack_real(format[0], format[1])\n\t\t\n\t\tif data_type == \"string8\":\n\t\t\tlength = self.unpack('short')\n\t\t\tif length < 0:\n\t\t\t\traise Exception(\"Negative length for string\")\n\t\t\tif len(self.buff) < length:\n\t\t\t\traise IncompleteData()\n\t\t\tstring = self.buff[:length]\n\t\t\tself.buff = self.buff[length:]\n\t\t\treturn string\n\t\tif data_type == \"string16\":\n\t\t\tlength = self.unpack('short')\n\t\t\tif length < 0:\n\t\t\t\traise Exception(\"Negative length for string\")\n\t\t\tif len(self.buff) < 2*length:\n\t\t\t\traise IncompleteData()\n\t\t\tstring = self.buff[:2*length].decode('utf-16be')\n\t\t\tself.buff = self.buff[2*length:]\n\t\t\treturn string\n\t\tif data_type == \"slot\":\n\t\t\to = {}\n\t\t\to[\"id\"] = self.unpack('short')\n\t\t\tif o[\"id\"] > 0:\n\t\t\t\to[\"amount\"] = self.unpack('byte')\n\t\t\t\to[\"damage\"] = self.unpack('short')\n\t\t\tif o[\"id\"] in SLOT_EXTRA_DATA_IDS:\n\t\t\t\textra_len = self.unpack('short')\n\t\t\t\tif extra_len <= 0:\n\t\t\t\t\to[\"extra\"] = None\n\t\t\t\telse:\n\t\t\t\t\tif len(self.buff) < extra_len:\n\t\t\t\t\t\traise IncompleteData()\n\t\t\t\t\textra_buff = self.buff[:extra_len]\n\t\t\t\t\tself.buff = self.buff[extra_len:]\n\t\t\t\t\to[\"extra\"] = extra_buff\n\t\t\treturn o\n\t\tif data_type == \"metadata\":\n\t\t\t#[(17, 0), (0, 0), (16, -1)]\n\t\t\to = []\n\t\t\tmtype = self.unpack('byte')\n\t\t\twhile mtype != 127:\n\t\t\t\tmtype2 = mtype >> 5\n\t\t\t\tt = 0\n\t\t\t\tif mtype2 == 0: t = self.unpack('byte') \n\t\t\t\tif mtype2 == 1: t = self.unpack('short') \n\t\t\t\tif mtype2 == 2: t = self.unpack('int') \n\t\t\t\tif mtype2 == 3: t = self.unpack('float') \n\t\t\t\tif mtype2 == 4: t = self.unpack('string16')\n\t\t\t\tif mtype2 == 5:\n\t\t\t\t\tt = {}\n\t\t\t\t\tt[\"id\"] = self.unpack('short')\n\t\t\t\t\tt[\"count\"] = self.unpack('byte')\n\t\t\t\t\tt[\"damage\"] = self.unpack('short')\n\t\t\t\tif mtype2 == 6:\n\t\t\t\t\tt = []\n\t\t\t\t\tfor i in range(3):\n\t\t\t\t\t\ts = self.unpack('int')\n\t\t\t\t\t\tt.append(s)\n\t\t\t\tt = (mtype, t)\n\t\t\t\to.append(t)\n\t\t\t\tmtype = self.unpack('byte')\n\t\t\treturn o"
] | [
"0.6129731",
"0.61129194",
"0.61129194",
"0.60414606",
"0.601643",
"0.5972789",
"0.5718805",
"0.57178694",
"0.5708704",
"0.57070786",
"0.5675133",
"0.56692094",
"0.5652299",
"0.56330013",
"0.5612766",
"0.5607002",
"0.55968446",
"0.5579638",
"0.5538174",
"0.55369866",
"0.55223185",
"0.552126",
"0.55139786",
"0.55139786",
"0.5510791",
"0.55053794",
"0.55030257",
"0.54962105",
"0.5448999",
"0.54448557"
] | 0.7517338 | 0 |
Decodes and return the replay init data from the contents byte string. | def decode_replay_initdata(contents):
decoder = BitPackedDecoder(contents, typeinfos)
return decoder.instance(replay_initdata_typeid) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def decode_replay_header(contents):\n decoder = VersionedDecoder(contents, typeinfos)\n return decoder.instance(replay_header_typeid)",
"def get_protocol_init_data(self):\n\t\tcontents = self.archive.read_file('replay.initData')\n\t\treturn self.protocol.decode_replay_initdata(contents)",
"def decode(self, s):",
"def decode(self, s):",
"def loads(data):\n return Decoder().decode(data)",
"def decode_replay_details(contents):\n decoder = VersionedDecoder(contents, typeinfos)\n return decoder.instance(game_details_typeid)",
"def decode(self, s):\n o = self._decoder.decode(s)\n return o",
"def decode(data): #@NoSelf",
"def decode_content(raw_content):\n return raw_content",
"def decode(self, data: bytes) -> bytes:\n ...",
"def decode(self, data):\n encoding = getattr(self, 'encoding', 'ascii')\n return data.decode(encoding, 'ignore')",
"def deserialize(self, str):\n try:\n end = 0\n start = end\n end += 2580\n self.Rscanpose = _get_struct_645f().unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def decode(self, data):\n return self.__cipher.decrypt(data)",
"def decode(self, s, _w=WHITESPACE.match):\n obj, end = self.raw_decode(s, idx=_w(s, 0).end())\n end = _w(s, end).end()\n if end != len(s):\n raise ValueError(errmsg(\"Extra data\", s, end, len(s)))\n return obj",
"def decode(self, s, _w=WHITESPACE.match):\n obj, end = self.raw_decode(s, idx=_w(s, 0).end())\n end = _w(s, end).end()\n if end != len(s):\n raise ValueError(errmsg(\"Extra data\", s, end, len(s)))\n return obj",
"def decode(data):\n raise NotImplementedError",
"def deserialize(self, str):\n try:\n end = 0\n _x = self\n start = end\n end += 12\n (_x.hlive, _x.hstate, _x.hfinished, _x.pressure, _x.c1, _x.c2, _x.c3, _x.c4, _x.c5, _x.c6, _x.c7, _x.c8,) = _struct_12B.unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def decode(self):\n s = self.encoded_content\n if self.encoded_content:\n if self.encoding:\n if self.encoding == u'base64':\n s = decode_base64(s)\n else:\n raise Exception(u'unknown data encoding %s' % (self.encoding))\n if self.compression:\n if self.compression == u'gzip':\n s = decompress_gzip(s)\n else:\n raise Exception(u'unknown data compression %s' %(self.compression))\n else:\n raise Exception(u'no encoded content to decode')\n self.decoded_content = []\n for idx in xrange(0, len(s), 4):\n val = ord(str(s[idx])) | (ord(str(s[idx + 1])) << 8) | \\\n (ord(str(s[idx + 2])) << 16) | (ord(str(s[idx + 3])) << 24)\n self.decoded_content.append(val)\n # generate the 2D version\n self._gen_2D()",
"def decode(self, s, _w=WHITESPACE.match):\n obj, end = self.raw_decode(s, idx=_w(s, 0).end())\n end = _w(s, end).end()\n\n return obj",
"def deserialize(self, str):\n try:\n end = 0\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.tsp_turtles = str[start:end].decode('utf-8')\n else:\n self.tsp_turtles = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.conveyor_turtle = str[start:end].decode('utf-8')\n else:\n self.conveyor_turtle = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.catch_turtle = str[start:end].decode('utf-8')\n else:\n self.catch_turtle = str[start:end]\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def decode(self, encoded):",
"def deserialize(self, str):\n try:\n end = 0\n start = end\n end += 8\n (self.i,) = _struct_d.unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def deserialize(self, str):\n try:\n end = 0\n _x = self\n start = end\n end += 152\n (_x.tcp, _x.ori, _x.zone, _x.vacuum, _x.workx, _x.worky, _x.workz, _x.workq0, _x.workqx, _x.workqy, _x.workqz, _x.toolx, _x.tooly, _x.toolz, _x.toolq0, _x.toolqx, _x.toolqy, _x.toolqz, _x.ret,) = _struct_2d2q14dq.unpack(str[start:end])\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.msg = str[start:end].decode('utf-8')\n else:\n self.msg = str[start:end]\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def deserializer():\n return bytes.decode",
"def decode(self):\n self.decoded_content = []\n if self.encoded_content:\n s = self.encoded_content\n if self.encoding:\n if self.encoding.lower() == u'base64':\n s = decode_base64(s)\n elif self.encoding.lower() == u'csv':\n list_of_lines = s.split()\n for line in list_of_lines:\n self.decoded_content.extend(line.split(','))\n self.decoded_content = map(int, [val for val in self.decoded_content if val])\n s = \"\"\n else:\n raise Exception(u'unknown data encoding %s' % (self.encoding))\n else:\n # in the case of xml the encoded_content already contains a list of integers\n self.decoded_content = map(int, self.encoded_content)\n s = \"\"\n if self.compression:\n if self.compression == u'gzip':\n s = decompress_gzip(s)\n elif self.compression == u'zlib':\n s = decompress_zlib(s)\n else:\n raise Exception(u'unknown data compression %s' %(self.compression))\n else:\n raise Exception(u'no encoded content to decode')\n for idx in xrange(0, len(s), 4):\n val = ord(str(s[idx])) | (ord(str(s[idx + 1])) << 8) | \\\n (ord(str(s[idx + 2])) << 16) | (ord(str(s[idx + 3])) << 24)\n self.decoded_content.append(val)\n #print len(self.decoded_content)\n # generate the 2D version\n self._gen_2D()",
"def decode(cls, data: bytes):\n\n return cls()",
"def decode(cls, data: bytes):\n\n return cls()",
"def deserialize(self, str):\n try:\n end = 0\n _x = self\n start = end\n end += 36\n (_x.mask, _x.dynModel, _x.fixMode, _x.fixedAlt, _x.fixedAltVar, _x.minElev, _x.drLimit, _x.pDop, _x.tDop, _x.pAcc, _x.tAcc, _x.staticHoldThresh, _x.dgpsTimeOut, _x.reserved2, _x.reserved3, _x.reserved4,) = _get_struct_H2BiIbB4H2B3I().unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def test_decode(self):\n pass # TODO(tlarsen)",
"def deserialize(self, str):\n try:\n end = 0\n _x = self\n start = end\n end += 57\n (_x.decision, _x.distance, _x.oriX, _x.oriY, _x.oriZ, _x.placX, _x.placY, _x.placZ,) = _get_struct_b7d().unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill"
] | [
"0.68792987",
"0.6816869",
"0.6576485",
"0.6576485",
"0.64634705",
"0.636914",
"0.6290958",
"0.6227351",
"0.6151208",
"0.60679924",
"0.6021028",
"0.6016486",
"0.60010266",
"0.5966884",
"0.5966884",
"0.59334475",
"0.592061",
"0.5876956",
"0.58746934",
"0.5855629",
"0.58289397",
"0.5826482",
"0.5810384",
"0.5804921",
"0.5801953",
"0.57944113",
"0.57944113",
"0.57887924",
"0.57804334",
"0.5776404"
] | 0.7790975 | 0 |
Decodes and yields each attribute from the contents byte string. | def decode_replay_attributes_events(contents):
buffer = BitPackedBuffer(contents, 'little')
attributes = {}
if not buffer.done():
attributes['source'] = buffer.read_bits(8)
attributes['mapNamespace'] = buffer.read_bits(32)
count = buffer.read_bits(32)
attributes['scopes'] = {}
while not buffer.done():
value = {}
value['namespace'] = buffer.read_bits(32)
value['attrid'] = attrid = buffer.read_bits(32)
scope = buffer.read_bits(8)
value['value'] = buffer.read_aligned_bytes(4)[::-1].strip(b'\x00')
if not scope in attributes['scopes']:
attributes['scopes'][scope] = {}
if not attrid in attributes['scopes'][scope]:
attributes['scopes'][scope][attrid] = []
attributes['scopes'][scope][attrid].append(value)
return attributes | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def decode(self) -> Iterable:\r\n if self.data[0:1] not in (b'd', b'l'):\r\n return self.__wrap_with_tuple()\r\n return self.__parse()",
"def decode(self, s):",
"def decode(self, s):",
"def decode(data: bytes) -> Iterable:\r\n decoder = Decoder(data)\r\n return decoder.decode()",
"def parse_attrs(buf):\r\n attrs = []\r\n while buf:\r\n t = ord(buf[0])\r\n l = ord(buf[1])\r\n if l < 2:\r\n break\r\n d, buf = buf[2:l], buf[l:]\r\n attrs.append((t, d))\r\n return attrs",
"def __iter__(self):\n attr = gv.firstattr(self.handle)\n while gv.ok(attr):\n yield gv.nameof(attr), \\\n decode_page(gv.getv(self.handle, attr))\n attr = gv.nextattr(self.handle, attr)",
"def unpack(self, s):\n\n raise NotImplementedError()",
"def _iterattrs(self, handle=\"\"):\n if not handle:\n handle = self.handle\n attr = gv.firstattr(handle)\n while gv.ok(attr):\n yield gv.nameof(attr), decode_page(gv.getv(handle, attr))\n attr = gv.nextattr(handle, attr)",
"def decode(data): #@NoSelf",
"def read_attribs(self):\n\n attribs = {}\n while self.index < self.length:\n self.ignore_whitespaces()\n if self.xtext[self.index] == '>':\n break\n name = self.read_until('=')\n self.index += 1\n self.read_until('\"')\n self.index += 1\n value = self.read_until('\"')\n self.index += 1\n\n attribs[name] = value\n\n return attribs",
"def read(self, istream):\n super(GetAttributeListResponsePayload, self).read(istream)\n tstream = utils.BytearrayStream(istream.read(self.length))\n\n if self.is_tag_next(enums.Tags.UNIQUE_IDENTIFIER, tstream):\n self._unique_identifier = primitives.TextString(\n tag=enums.Tags.UNIQUE_IDENTIFIER\n )\n self._unique_identifier.read(tstream)\n else:\n self._unique_identifier = None\n\n names = list()\n while self.is_tag_next(enums.Tags.ATTRIBUTE_NAME, tstream):\n name = primitives.TextString(tag=enums.Tags.ATTRIBUTE_NAME)\n name.read(tstream)\n names.append(name)\n self._attribute_names = names\n\n self.is_oversized(tstream)",
"def decode(self, encoded):",
"def deserialize(self, str):\n try:\n end = 0\n _x = self\n start = end\n end += 12\n (_x.hlive, _x.hstate, _x.hfinished, _x.pressure, _x.c1, _x.c2, _x.c3, _x.c4, _x.c5, _x.c6, _x.c7, _x.c8,) = _struct_12B.unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def decode(self, value):\r\n pass",
"def deserialize(self, str):\n try:\n end = 0\n _x = self\n start = end\n end += 72\n (_x.health, _x.utcA0, _x.utcA1, _x.utcTOW, _x.utcWNT, _x.utcLS, _x.utcWNF, _x.utcDN, _x.utcLSF, _x.utcSpare, _x.klobA0, _x.klobA1, _x.klobA2, _x.klobA3, _x.klobB0, _x.klobB1, _x.klobB2, _x.klobB3, _x.flags,) = _get_struct_I2di6h8fI().unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def deserialize(self, str):\n try:\n end = 0\n _x = self\n start = end\n end += 57\n (_x.decision, _x.distance, _x.oriX, _x.oriY, _x.oriZ, _x.placX, _x.placY, _x.placZ,) = _get_struct_b7d().unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def decode(self): # pragma: no cover\n pass",
"def deserialize(self, str):\n try:\n end = 0\n _x = self\n start = end\n end += 36\n (_x.mask, _x.dynModel, _x.fixMode, _x.fixedAlt, _x.fixedAltVar, _x.minElev, _x.drLimit, _x.pDop, _x.tDop, _x.pAcc, _x.tAcc, _x.staticHoldThresh, _x.dgpsTimeOut, _x.reserved2, _x.reserved3, _x.reserved4,) = _get_struct_H2BiIbB4H2B3I().unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def deserialize(self, str):\n try:\n end = 0\n start = end\n end += 8\n (self.i,) = _struct_d.unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def decrypt_attr(data, key):\n data = MegaCrypto.base64_decode(data)\n k, iv, meta_mac = MegaCrypto.get_cipher_key(key)\n attr = MegaCrypto.cbc_decrypt(data, k)\n\n #: Data is padded, 0-bytes must be stripped\n return json.loads(\n re.search(r'{.+?}', attr).group(0)) if attr[:6] == 'MEGA{\"' else False",
"def decode(self, data):\n encoding = getattr(self, 'encoding', 'ascii')\n return data.decode(encoding, 'ignore')",
"def deserialize(self, data):\n self.vals = iter(data.split()) ### split() convert string to list's iterator\n return self.decode()",
"def deserialize(self, str):\n try:\n end = 0\n _x = self\n start = end\n end += 72\n (_x.lnid, _x.did, _x.blid, _x.flid, _x.bnid, _x.fnid, _x.jct, _x.blid2, _x.blid3, _x.blid4, _x.flid2, _x.flid3, _x.flid4, _x.clossid, _x.span, _x.lcnt, _x.lno,) = _struct_14id2i.unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def deserialize(self, byte: bytes):\n pass",
"def deserialize(self, data):\n self.data_vals = iter(data.split())\n return self.decode()",
"def deserialize(self, str):\n try:\n end = 0\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.tsp_turtles = str[start:end].decode('utf-8')\n else:\n self.tsp_turtles = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.conveyor_turtle = str[start:end].decode('utf-8')\n else:\n self.conveyor_turtle = str[start:end]\n start = end\n end += 4\n (length,) = _struct_I.unpack(str[start:end])\n start = end\n end += length\n if python3:\n self.catch_turtle = str[start:end].decode('utf-8')\n else:\n self.catch_turtle = str[start:end]\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def from_bytes(self, ???):",
"def deserialize(self, str):\n try:\n end = 0\n _x = self\n start = end\n end += 16\n (_x.FL_vel, _x.FR_vel, _x.BL_vel, _x.BR_vel,) = _struct_4i.unpack(str[start:end])\n return self\n except struct.error as e:\n raise genpy.DeserializationError(e) #most likely buffer underfill",
"def decode(self, data: bytes) -> bytes:\n ...",
"def decode(self, s):\n o = self._decoder.decode(s)\n return o"
] | [
"0.6104779",
"0.6021927",
"0.6021927",
"0.59359056",
"0.58786607",
"0.57934487",
"0.57358",
"0.5714988",
"0.56852806",
"0.56393176",
"0.56005883",
"0.55895346",
"0.555217",
"0.5540884",
"0.5514059",
"0.549559",
"0.5486493",
"0.54844517",
"0.5476387",
"0.54693216",
"0.5431158",
"0.5426125",
"0.54167664",
"0.539964",
"0.5369697",
"0.53627986",
"0.5360254",
"0.53487927",
"0.5343453",
"0.5336834"
] | 0.60899234 | 1 |
Computes the squareroot Wiener filter (WF) gain function. | def srwf(xi):
return np.sqrt(wienergain(xi)) # SRWF gain function. | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def mw_f(mw):\n return np.power(mw, 0.5)",
"def fnutofwave(warr, farr):\n c= 2.99792458e18 #spped of light in Angstroms/s\n return farr*c/warr**2",
"def acWF(self):\n cg = self.surfaceW / self.spanW # mean geometric chord\n A = self.acW / self.cMACW\n B = 1.8 * self.fuselageDiameter * self.fuselageDiameter * self.lfn / (self.clAlphaWF * self.surfaceW * self.cMACW)\n C = 0.273 * self.fuselageDiameter * cg * (self.spanW - self.fuselageDiameter) * tan(radians(self.sweep25W))\n D = ((1 + self.taperRatioW) * (self.spanW + 2.15 * self.fuselageDiameter) * self.cMACW**2)\n return (A - B + C / D) * self.cMACW",
"def feature_weighted_energy(wv, dropoff=.1, s_pre=8, s_post=16):\n kernel = np.ones(s_pre + s_post)\n raise NotImplemented('This feature has not been implemented yet.')",
"def qwf(self, vw, ev, gp, psi_l, lai, dt):\n\t\t#if the amount of water in storage is less than amount that will be absorbed by plant in timestep dt, then what's left will be absorbed \n\t qw = (self.gwf(self.psi_wf(self.vw,self.d1, self.d1, self.ns, self.tl), self.H, self.J)*(self.psi_wf(self.vw, self.d1, self.d1, self.ns, self.tl) - (ev*(1. - self.F_CAP))/(lai*gp) - psi_l)*lai)\n\t if self.vw == 0:\n\t return 0.\n\t elif self.vw*10**6 <= qw*dt:\n\t return (self.vw*10**6/dt)\n\t else:\n\t return qw",
"def weight_update(u_ff, u_wc, alpha, beta, w, fan_all):\r\n mult_wc = np.matmul(np.reshape(hard_sigmoid_array(u_wc), (fan_all, 1)),\r\n np.reshape(hard_sigmoid_array(u_wc), (1, fan_all)))\r\n mult_ff = np.matmul(np.reshape(hard_sigmoid_array(u_ff), (fan_all, 1)),\r\n np.reshape(hard_sigmoid_array(u_ff), (1, fan_all)))\r\n delta_w = alpha * (1 / beta) * (mult_wc - mult_ff)\r\n delta_w[np.diag_indices(fan_all)] = 0\r\n w = w + delta_w\r\n return w",
"def A_weighting(fs):\n b, a = A_weighting_analog()\n\n # Use the bilinear transformation to get the digital filter.\n return bilinear(b, a, fs)",
"def butterworth_filter(freq):\n\tf_raw = 1/(0.00000002*100*33)\n\tb = np.array([[-32092,15750],[-31238,14895]])*2.0**(-14)\n\tomega = 2*np.pi*freq/f_raw\n\te1, e2 = np.exp(-1j*omega), np.exp(-2j*omega)\n\ttmp = (1+2*e1+e2)**2/(1+b[0,0]*e1+b[0,1]*e2)/(1+b[1,0]*e1+b[1,1]*e2)\n\treturn tmp * (1+sum(b[0]))*(1+sum(b[1]))/16",
"def digital_gain():\n def r(x):\n return x/512.\n\n def w(x):\n return int(x*512)\n return r, w",
"def f_mw(f):\n return np.power(f, 2)",
"def gwf(self, psi_w, H, J):\n\t if self.vw <= 0.:\n\t return 0.00001\n\t else:\n\t return self.GWMAX*exp(-(-psi_w/J)**H)",
"def spectral(w, s=1.0):\n n_in, n_out = w.size()\n n = max(n_out, n_in)\n gain = s / math.sqrt(n)\n return w.normal_(0, 1).mul_(gain)",
"def sharpness_penalty(self):\n # This polynomial function gives the gain for peaking filter which achieves 18 dB / octave max derivative\n # The polynomial estimate is accurate in the vicinity of 18 dB / octave\n gain_limit = -0.09503189270199464 + 20.575128011847003 * (1 / self.q)\n # Scaled sigmoid function as penalty coefficient\n x = self.gain / gain_limit - 1\n sharpness_penalty_coefficient = 1 / (1 + np.e ** (-x * 100))\n return np.mean(np.square(self.fr * sharpness_penalty_coefficient))",
"def vwf(self, vw, ev, gp, psi_l, lai, dt):\n\t return min(vw - self.qwf(self.vw, self.th, self.gp, self.psi_l, self.LAI, dt)*dt/10.**6, self.ZW)",
"def sharpen_weights(w, gamma):\n\n w = w**gamma\n w /= np.sum(w)\n\n return w",
"def schlichtkrull_std(shape, gain):\n fan_in, fan_out = shape[0], shape[1]\n return gain * 3.0 / sqrt(float(fan_in + fan_out))",
"def Equ_wave (previous_U):\n return lambda U: (U-previous_U)/DELTA_t+G((U + previous_U)/2)",
"def weighting(wb, m, a):\n s = control.tf([1, 0], [1])\n return (s/m + wb) / (s + wb*a)",
"def sweep25W(self):\n return 28.8",
"def f_W_T_gc(u, P_0, r_f, d, s, T, wealth, alpha_0, alpha_1):\n \n (beta_0, beta_1, beta_2) = f_beta(P_0, r_f, d, s, T)\n \n P_T = f_P_T(u, P_0, r_f, d, s, T)\n ln_P_T = f_ln_P_T(u, P_0, r_f, d, s, T)\n \n # If ln_P_T is smaller than certain shreshold, force it to be the shreshold.\n ln_P_T = max(ln_P_T, - beta_1 / (2 * beta_2))\n# print('u: ', end='')\n# print(u)\n# print(beta_2 * math.pow(ln_P_T, 2) + beta_1 * ln_P_T + beta_0)\n\n R_T = math.exp(beta_2 * math.pow(ln_P_T, 2) + beta_1 * ln_P_T + beta_0) * (alpha_1 * ln_P_T + alpha_0)\n \n if R_T > math.pow(wealth_min, gamma):\n W_T = math.pow(R_T, 1/gamma)\n else:\n W_T = wealth_min\n \n return W_T",
"def band_penalty(self):\n fc_ix = np.argmin(np.abs(self.f - self.fc)) # Index to frequency array closes to center frequency\n # Number of indexes on each side of center frequency, not extending outside, only up to 10 kHz\n n = min(fc_ix, self.ix10k - fc_ix)\n if n == 0:\n return 0.0\n return np.mean(np.square(self.fr[fc_ix - n:fc_ix] - (self.gain - self.fr[fc_ix + n - 1:fc_ix - 1:-1])))",
"def evolve_fqe_givens(wfn: Wavefunction, u: np.ndarray) -> Wavefunction:\n wfn = evolve_fqe_givens_sector(wfn, u, sector='alpha')\n wfn = evolve_fqe_givens_sector(wfn, u, sector='beta')\n return wfn",
"def calc_gain(s, i):\n return math.sqrt((i + s) / (6 * s))",
"def gaussbroad(w, s, hwhm):\n \"\"\"\n History\n --------\n Dec-90 GB,GM\n Rewrote with fourier convolution algorithm.\n Jul-91 AL\n Translated from ANA to IDL.\n 22-Sep-91 JAV\n Relaxed constant dispersion check# vectorized, 50% faster.\n 05-Jul-92 JAV\n Converted to function, handle nonpositive hwhm.\n Oct-18 AW\n Python version\n \"\"\"\n\n # Warn user if hwhm is negative.\n if hwhm < 0:\n logger.warning(\"Forcing negative smoothing width to zero.\")\n\n # Return input argument if half-width is nonpositive.\n if hwhm <= 0:\n return s # true: no broadening\n\n # Calculate (uniform) dispersion.\n nw = len(w) ## points in spectrum\n wrange = w[-1] - w[0]\n dw = wrange / (nw - 1) # wavelength change per pixel\n\n # Make smoothing gaussian; extend to 4 sigma.\n # 4.0 / sqrt(2.0 * alog(2.0)) = 3.3972872\n # sqrt(alog(2.0)) = 0.83255461\n # sqrt(alog(2.0) / pi) = 0.46971864\n # (*1.0000632 to correct for >4 sigma wings)\n if hwhm >= 5 * wrange:\n return np.full(nw, np.sum(s) / nw)\n ## points in half gaussian\n nhalf = int(3.3972872 * hwhm / dw)\n ## points in gaussian (odd!)\n ng = 2 * nhalf + 1\n # wavelength scale of gaussian\n wg = dw * (np.arange(ng, dtype=float) - (ng - 1) / 2)\n # convenient absisca\n xg = (0.83255461 / hwhm) * wg\n # unit area gaussian w / FWHM\n gpro = (0.46974832 * dw / hwhm) * np.exp(-xg * xg)\n gpro = gpro / np.sum(gpro)\n\n # Pad spectrum ends to minimize impact of Fourier ringing.\n sout = convolve(s, gpro, mode=\"nearest\")\n\n return sout",
"def downwashGradW(self):\n A = self.r / (self.r**2 + self.mTV**2)\n B = 0.4876 / (sqrt(self.r**2 + 0.6319 + self.mTV**2))\n C = 1 + (self.r**2 / (self.r**2 + 0.7915 + 5.0734 * self.mTV**2))**0.3113\n D = 1 - sqrt(self.mTV**2 / (1 + self.mTV**2))\n return self.Kepsilon * (A * B + C * D) * self.clAlphaW / (pi * self.aspectRatioW)",
"def computeW(self):\n E = np.where(self.v > 0, 1, -1)\n # theshold the connections to only -1,1\n binary_weights = np.where(self.c > 0, 1, self.c)\n binary_weights = np.where(binary_weights < 0, -1, binary_weights)\n W = np.sum(binary_weights * np.dot(E.reshape(-1,1), E.reshape(1,-1))) # W = C * E * E\n self.W = W\n if np.sum(binary_weights) != 0:\n self.W = self.W / np.sum(binary_weights) # W / W*\n return self.W",
"def _update_samples_weight(self):\n m, n = 0, self.u.shape[0]\n T = self.u.shape[1]\n N = n + T\n d_0 = matrix(self.d_0.reshape(n, 1))\n\n # Linear Inequallity Constraints, Gx <= h\n G = matrix(-1 * np.eye(N))\n h = matrix(np.zeros(shape=(N, 1)))\n\n # Linear Equality Constraints, Ax = b\n A = matrix(np.concatenate((np.ones(shape=(T, 1)), np.zeros(shape=(n, 1))), axis=0).T)\n b = matrix(1.0)\n\n def F(x=None, z=None):\n if x is None: return 0, matrix(0.5, (N, 1))\n w = x[:T, :]\n phi = x[T:, :]\n reg_inv = 1 / self.reg\n\n weighted_u = np.dot(self.u, w) # n x 1\n scores = -1 * reg_inv * (weighted_u + phi) # n x 1\n\n # Numeric correction\n scores -= max(scores)\n\n # Auxilliaries\n weighted_scores_exp = np.multiply(d_0, np.exp(scores))\n sum_weighted_scores_exp = np.sum(weighted_scores_exp)\n sum_weighted_scores_exp_square = sum_weighted_scores_exp ** 2\n squared_weighted_scores_exp = np.square(weighted_scores_exp)\n weighted_scores_exp_mults = np.dot(weighted_scores_exp, weighted_scores_exp.T)\n uw_mult = np.multiply(self.u, weighted_scores_exp)\n uw_mult_sum = np.sum(np.multiply(self.u, weighted_scores_exp), axis=0)\n\n f = self.reg * np.log(sum_weighted_scores_exp) + self.kappa * np.sum(phi) # f(x)\n\n dfdw = -1 * uw_mult_sum.T / sum_weighted_scores_exp\n dfdphi = (-1 * weighted_scores_exp / sum_weighted_scores_exp) + self.kappa\n Df = np.concatenate((dfdw, dfdphi), axis=0) # Gradient\n\n mf = matrix(f)\n mDf = matrix(Df.T)\n if z is None:\n return mf, mDf\n # Assumes d_0 is uniform\n H = np.zeros(shape=(N, N)) # Hessian\n dfdwiwi = np.zeros(shape=(T, 1))\n dfdphiiphij = -1 * reg_inv * (np.tril(weighted_scores_exp_mults)) / sum_weighted_scores_exp_square\n dfdphiiphii = reg_inv * (np.multiply(weighted_scores_exp,\n sum_weighted_scores_exp - weighted_scores_exp) / sum_weighted_scores_exp_square)\n # dfdwiwj, dfwiphij are zeros\n dfdphiiwj = reg_inv * ((\n uw_mult * sum_weighted_scores_exp - weighted_scores_exp * uw_mult_sum) / sum_weighted_scores_exp_square)\n\n H[T:, T:] = dfdphiiphij\n H[T:, :T] = dfdphiiwj\n H_diagonal = np.concatenate((dfdwiwi, dfdphiiphii), axis=0)\n np.fill_diagonal(H, H_diagonal)\n\n mH = matrix(z[0] * H)\n return mf, mDf, mH\n\n prev_w = self.w\n prev_slacks = self.slacks\n try:\n wphi = solvers.cp(F, G=G, h=h, A=A, b=b)['x']\n self.w = wphi[:T, :]\n self.slacks = wphi[T:, :]\n except Exception as e: # Catch rank errors and continue to next iteration\n self.slacks = prev_slacks\n self.w = prev_w\n try:\n self.w = np.concatenate((self.w, [[1 / (len(self.w) + 1)]]), axis=0)\n except:\n self.w = np.concatenate((self.w, [1 / (len(self.w) + 1)]), axis=0)\n self.w /= np.sum(self.w)\n\n scores = ((-1 / self.reg) * np.squeeze(np.asarray(np.dot(self.u, self.w) + self.slacks))) + np.log(\n self.d_0) # Update according to Equation (6)\n return self.softmax(scores)",
"def field_strength_to_power_flux(field: float) -> float:\n\n power = np.float_power(np.abs(field), 2)\n power *= (0.5 * speed_of_light * epsilon_0)\n\n return power",
"def get_weight(ew1, ew2):\n dw = flu.delta_epiweeks(ew1, ew2)\n yr = 52.2\n hl1, hl2, bw = yr, 1, 4\n a = 0.05\n #b = (np.cos(2 * np.pi * (dw / yr)) + 1) / 2\n b = np.exp(-((min(dw % yr, yr - dw % yr) / bw) ** 2))\n c = 2 ** -(dw / hl1)\n d = 1 - 2 ** -(dw / hl2)\n return (a + (1 - a) * b) * c * d",
"def A_weight(signal, fs):\n\n b, a = A_weighting(fs)\n return lfilter(b, a, signal)"
] | [
"0.6441982",
"0.6433172",
"0.64330995",
"0.6421363",
"0.6408753",
"0.6317383",
"0.62898636",
"0.622086",
"0.62058926",
"0.61769265",
"0.61759466",
"0.60642016",
"0.60255593",
"0.60095376",
"0.60083437",
"0.6006155",
"0.59878665",
"0.5971654",
"0.5965204",
"0.5912926",
"0.5908855",
"0.5893369",
"0.5883959",
"0.5883606",
"0.58701265",
"0.58523345",
"0.58490455",
"0.58448744",
"0.5839206",
"0.5832312"
] | 0.72039974 | 0 |
Returns the xpath to user folder link | def get_user_folder_link_xpath():
return links['users_folder'].get('folder_xpath') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_enrolment_methods_link_xpath():\n return links['users_folder']['enrolment_link'].get('xpath')",
"def get_home_page_link_xpath():\n return links['home_page_link'].get('xpath')",
"def get_home_directory(self, user: str) -> str:\n process = self.run(\n \"/\",\n \"root\",\n [\"sh\", \"-c\", f\"realpath ~{user}\"],\n encoding=\"utf-8\",\n stdout=subprocess.PIPE,\n )\n return process.stdout.strip()",
"def dir_user(assignment, user):\n return os.path.join(repository, assignment, user)",
"def get_links(folder):\n tree = etree.parse(folder +\"/PostLinks.xml\")\n return tree.getroot()",
"def getFolderPath(self) -> unicode:\n ...",
"def GetHomeFolder(self): # real signature unknown; restored from __doc__\n pass",
"def check_userfolder(item_container):\n return get_item_container_by_path_and_name(item_container.container.path + ACL_USERS + '/', '')",
"def user_directories():\r\n section = document.add_section()\r\n new_width, new_height = section.page_height, section.page_width\r\n section.orientation = WD_ORIENT.LANDSCAPE\r\n section.page_width = 10058400\r\n section.page_height = 7772400\r\n document.add_heading('User Directories', level=1)\r\n userdirectories = get_qlik_sense.get_userdirectory()\r\n num_of_udc = len(userdirectories)\r\n table = document.add_table(rows=num_of_udc+1, cols=6)\r\n table.style = 'Grid Table 1 Light Accent 1'\r\n row = table.rows[0]\r\n row.cells[0].text = 'name'\r\n row.cells[1].text = 'userDirectoryName'\r\n row.cells[2].text = 'configured'\r\n row.cells[3].text = 'operational'\r\n row.cells[4].text = 'type'\r\n row.cells[5].text = 'syncOnlyLoggedInUsers'\r\n for directory in range(num_of_udc):\r\n row = table.rows[directory+1]\r\n row.cells[0].text = str(userdirectories[directory][0])\r\n row.cells[1].text = str(userdirectories[directory][1])\r\n row.cells[2].text = str(userdirectories[directory][2])\r\n row.cells[3].text = str(userdirectories[directory][3])\r\n row.cells[4].text = str(userdirectories[directory][4])\r\n row.cells[5].text = str(userdirectories[directory][5])\r\n\r\n # document.add_page_break()\r",
"def get_downloadpath(user_id):\r\n path = settings.DOCUMENT_PATH + str(user_id) + '/'\r\n if not os.path.isdir(path):\r\n os.mkdir(path)\r\n return path",
"def get_main_courses_link_xpath():\n return links['main_courses_page_link'].get('xpath')",
"def getRootURL():",
"def path(self):\n return api.BASE_URI + 'apps/%s/app_users/%s' % (self._app_id, self._user_id)",
"def owncloud_folder_list(node_addon, user_addon, **kwargs):\n path = request.args.get('path')\n return node_addon.get_folders(path=path)",
"def get_download_path():\r\n sub_key = r'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders'\r\n downloads_guid = '{374DE290-123F-4565-9164-39C4925E467B}'\r\n with winreg.OpenKey(winreg.HKEY_CURRENT_USER, sub_key) as key:\r\n location = winreg.QueryValueEx(key, downloads_guid)[0]\r\n return location",
"def get_absolute_url(self) -> str:\n return \"/users/%s/\" % self.email",
"def test_getLinkrelToParentDirectory(self):\n linkrel = self.builder.getLinkrel(FilePath(\"/foo\"),\n FilePath(\"/foo/bar\"))\n self.assertEquals(linkrel, \"../\")",
"def path(self):\n return self._selenium.current_url.replace(\n 'http://{}'.format(self._address), '')",
"def getFSUserDir(self):\n\n return self.config.get(\"FileMan\",\"homedir\") + self.getRole()[\"roleName\"]",
"def user(self):\n ret = libxml2mod.xmlURIGetUser(self._o)\n return ret",
"def userlist_path(address):\n return path.join(conf.userlistdir, match_userlist(address))",
"def get_current_directory_uri(self): # real signature unknown; restored from __doc__\n return \"\"",
"def get_uri_for_user(self, target_user):\r\n users = self.get_json(USER_LIST_URI)[\"results\"]\r\n for user in users:\r\n if user[\"id\"] == target_user.id:\r\n return user[\"url\"]\r\n self.fail()",
"def getUserDir() -> str:\n\n if os.name == \"nt\": # Windows system, try to return documents directory\n try:\n import ctypes.wintypes\n CSIDL_PERSONAL = 5 # Documents\n SHGFP_TYPE_CURRENT = 0 # Current value\n\n buf = ctypes.create_unicode_buffer(ctypes.wintypes.MAX_PATH)\n ctypes.windll.shell32.SHGetFolderPathW(0, CSIDL_PERSONAL, 0, SHGFP_TYPE_CURRENT, buf)\n\n return buf.value\n except ImportError:\n pass\n\n return os.path.expanduser(\"~\") # Non-Windows system, return home directory",
"def get_user_folders(user):\n folders = Folder.objects.filter(user=user)\n return folders",
"def getGroupFolder(self):\n if platform.system()==\"Windows\":\n groupFolder = os.path.join(\"\\\\\\\\ursa\",\"AQOGroupFolder\")\n if platform.system()==\"Linux\":\n groupFolder = os.path.join(\"/media\",\"ursa\",\"AQOGroupFolder\")\n return groupFolder",
"def abspath(self, ref):\n \n directory, path = get_location(self.directory, ref.strip(),\n current=dirname(self.relative))\n path = join_fb_root(join(directory, path))\n return path",
"def get_download_path():\r\n if os.name == 'nt':\r\n import winreg\r\n sub_key = r'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders'\r\n downloads_guid = '{374DE290-123F-4565-9164-39C4925E467B}'\r\n with winreg.OpenKey(winreg.HKEY_CURRENT_USER, sub_key) as key:\r\n location = winreg.QueryValueEx(key, downloads_guid)[0]\r\n return location\r\n else:\r\n return os.path.join(os.path.expanduser('~'), 'downloads')",
"def test_get_object_link_folder(self):\n plugin = ProjectAppPluginPoint.get_plugin(PLUGIN_NAME)\n url = reverse(\n 'filesfolders:list', kwargs={'folder': self.folder.sodar_uuid}\n )\n ret = plugin.get_object_link('Folder', self.folder.sodar_uuid)\n self.assertEqual(ret['url'], url)\n self.assertEqual(ret['label'], self.folder.name)",
"def realPath(self):\n \n return (self.useLink and [self.linkPath] or [self.installPath])[0]"
] | [
"0.6328082",
"0.6125375",
"0.5475622",
"0.5455934",
"0.5450685",
"0.5408658",
"0.5387829",
"0.5355797",
"0.53383553",
"0.51845807",
"0.5153756",
"0.5123689",
"0.5121326",
"0.51196957",
"0.5119497",
"0.5108499",
"0.5081348",
"0.50647926",
"0.5047986",
"0.5043348",
"0.50315887",
"0.50256455",
"0.5017014",
"0.5012016",
"0.50069356",
"0.49670488",
"0.49559262",
"0.49402562",
"0.49370635",
"0.4934787"
] | 0.90168035 | 0 |
Returns the xpath to enrolment methods link | def get_enrolment_methods_link_xpath():
return links['users_folder']['enrolment_link'].get('xpath') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_main_courses_link_xpath():\n return links['main_courses_page_link'].get('xpath')",
"def functionURI(self):\n ret = libxml2mod.xmlXPathGetFunctionURI(self._o)\n return ret",
"def get_xpath_next_button(self) -> str:\n\n return self.__xpath_next_button",
"def getLink(self):",
"def _GetOpener(self):\r\n raise NotImplementedError()",
"def _GetOpener(self):\n raise NotImplementedError()",
"def getExpandedLinks():",
"def get_accomplishment_link(element):\n try:\n return element.find_element_by_class_name(\n \"pv-accomplishment-entity__external-source\"\n ).get_attribute(\"href\")\n except NoSuchElementException:\n return \"\"",
"def navigate_to():\n return Navi.navigate_to(\"XML Repoll Files\")",
"def etlWorkflowUrl(self):\n return self.sdaUrl + \"/workflows/_etl\"",
"def function(self):\n ret = libxml2mod.xmlXPathGetFunction(self._o)\n return ret",
"def oed_url(self):\n return 'http://www.oed.com/view/th/class/%d' % self.id",
"def xpathLangFunction(self, nargs):\n libxml2mod.xmlXPathLangFunction(self._o, nargs)",
"def test_view_enabled(self, method, url):\n with override_waffle_switch(COURSE_ENROLLMENT_ADMIN_SWITCH, active=True):\n response = getattr(self.client, method)(url)\n assert response.status_code == 200",
"def get_home_page_link_xpath():\n return links['home_page_link'].get('xpath')",
"def methods():\n list_groups_text = '<a href=\"/groups\">List Groups</a>'\n list_users_text = '<a href=\"/users\">List Users</a>'\n page_links = list_groups_text + \"<br>\" + list_users_text\n return page_links",
"def next_url(self) -> str:\n return 'https://www.mta-dialog.de/stellenmarkt.html?tx_jobs_pi1[action]=next'",
"def actionURL(self):\n raise NotImplementedError()",
"def online_documentation(self,event=None):\n import webbrowser\n link='http://enzyme.ucd.ie/PEAT/'\n webbrowser.open(link,autoraise=1)\n return",
"def href(self, request) -> str:\n return request.route_path(self.url)",
"def get_xpath(xpath=\"\"):\n query = {\"type\": \"config\", \"action\": \"get\", \"xpath\": xpath}\n\n return __proxy__[\"panos.call\"](query)",
"def href(self, request) -> str:\n raise NotImplementedError()",
"def __getStub(self):\n return re.findall(\"direct\\\\('(.*?)'\\\\);\", self.soup.find('a', onclick=True)['onclick'])[0]",
"def get_user_folder_link_xpath():\n return links['users_folder'].get('folder_xpath')",
"def get_token_link(self):\n return self.env['ir.config_parameter'].search([('key', '=', 'web.base.url')]).value + \"/web/signup?inv_id={}\".format(self.name)",
"def get_element_locator(self):\n return self._find_by_locator().locator",
"def create_url_for_supplements(class_name, class_urn, scope):\n filename = classname_to_filename(class_name)\n if \"ses:eurocontrol\" in class_urn:#target is in eur supp\n if scope == \"european-supplement/\":#current page in eur supp\n path = \"\"\n else:#current page in global\n path = \"european-supplement/\"\n else:#target is in global\n if scope == \"european-supplement/\":\n path = \"../\"\n else:\n path = \"\"\n url = path+filename\n return url",
"def wepay_docs_role(name, rawtext, text, lineno, inliner,\n options={}, content=[]):\n\n # get the application\n app = inliner.document.settings.env.app\n\n # parse the text entered in the role.\n # here, we simply split on space to define the two parts of our url\n # if a function parameter is not given, then we don't use one\n # example: /account is the account lookup call but it doesn't have a function attached.\n # We can also use _-/some/string to override the name of the link because not all of the documentation follows this pattern\n endpoint, name_override = text.split(\" -\") if ' -' in text else (text, None)\n endpoint, function = endpoint.split(\" \") if ' ' in endpoint else (endpoint, None)\n\n # make the node\n node = make_wepay_link(app, rawtext, endpoint, function, name_override, options)\n return ([node], [])",
"def rule_ext(self, rule_name, method):\n self.children[0].expr_ext(rule_name, method)",
"def get_end_effector_link(self):\n return self._g.get_end_effector_link()"
] | [
"0.5303405",
"0.49750277",
"0.49695107",
"0.49531114",
"0.48514926",
"0.47544903",
"0.47476315",
"0.4720942",
"0.47161612",
"0.46954942",
"0.46684697",
"0.46565244",
"0.46222523",
"0.4620573",
"0.45703828",
"0.45701542",
"0.45397356",
"0.4507773",
"0.44758257",
"0.44708025",
"0.44527555",
"0.4448393",
"0.44122392",
"0.4406212",
"0.43991593",
"0.437187",
"0.43470457",
"0.43439656",
"0.43287688",
"0.432435"
] | 0.8597276 | 0 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.