query
stringlengths 9
9.05k
| document
stringlengths 10
222k
| metadata
dict | negatives
sequencelengths 30
30
| negative_scores
sequencelengths 30
30
| document_score
stringlengths 4
10
| document_rank
stringclasses 2
values |
---|---|---|---|---|---|---|
assert that calling func(args, kwargs) triggers a DeprecationWarning. | def deprecated_call(func, *args, **kwargs):
warningmodule = py.std.warnings
l = []
oldwarn_explicit = getattr(warningmodule, 'warn_explicit')
def warn_explicit(*args, **kwargs):
l.append(args)
oldwarn_explicit(*args, **kwargs)
oldwarn = getattr(warningmodule, 'warn')
def warn(*args, **kwargs):
l.append(args)
oldwarn(*args, **kwargs)
warningmodule.warn_explicit = warn_explicit
warningmodule.warn = warn
try:
ret = func(*args, **kwargs)
finally:
warningmodule.warn_explicit = warn_explicit
warningmodule.warn = warn
if not l:
print warningmodule
raise AssertionError("%r did not produce DeprecationWarning" %(func,))
return ret | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_deprecate_args(self):\n @deprecate(arguments={\"bar\": \"use foo instead\"})\n def foo(a, foo=None, bar=None):\n return 2*a\n\n with warnings.catch_warnings(record=True) as w:\n self.assertEqual(foo(1, bar=True), 2,\n \"Decorated function does not return original \"\n \"return value\")\n self.assertTrue(len(w) > 0, \"No warning raised!\")\n\n with warnings.catch_warnings(record=True) as w:\n self.assertEqual(foo(1, foo=True), 2,\n \"Decorated function does not return original \"\n \"return value\")\n self.assertEqual(len(w), 0, \"Warning raised, but deprecated argument was not given.\")",
"def test_deprecate_kwargs(self):\n @deprecate(bar=\"use baz instead\")\n def foo(a, bar=None, baz=None):\n return 2*a\n\n with warnings.catch_warnings(record=True) as w:\n self.assertEqual(foo(1, bar=True), 2,\n \"Decorated function does not return original \"\n \"return value\")\n self.assertTrue(len(w) > 0, \"No warning raised!\")\n\n with warnings.catch_warnings(record=True) as w:\n self.assertEqual(foo(1, baz=True), 2,\n \"Decorated function does not return original \"\n \"return value\")\n self.assertEqual(len(w), 0, \"Warning raised, but deprecated argument was not given.\")",
"def deprecated( deprecated_function, *args, **kwargs ):\n\n @wraps( deprecated_function )\n def wrapper( *args, **kwargs ):\n warnings.filterwarnings( 'always' )\n warnings.warn( \"deprecated\", DeprecationWarning )\n deprecated_function( *args, **kwargs )\n\n return wrapper",
"def _deprecation_warning(func):\n\n deprecation_message = (\n \"The typename and field_names parameters will be removed in \"\n \"namedzip v2.0.0. Please use the named_tuple parameter instead.\"\n )\n\n @wraps(func)\n def wrapper(*args, **kwargs):\n deprecated_kwargs = bool(\n \"typename\" in kwargs.keys() or \"field_names\" in kwargs.keys()\n )\n if deprecated_kwargs:\n warnings.filterwarnings(\"always\", message=deprecation_message)\n warnings.warn(\n category=DeprecationWarning, message=deprecation_message, stacklevel=2\n )\n if func.__name__ == \"namedzip\":\n return _namedzip_v1(*args, **kwargs)\n else:\n return _namedzip_longest_v1(*args, **kwargs)\n else:\n return func(*args, **kwargs)\n\n return wrapper",
"def __call__(self, *args, **kwargs):\n self._Deprecator__warn()\n return self._Deprecator__todeprecate(*args, **kwargs)",
"def deprecated(func):\n\n @functools.wraps(func)\n def new_func(*args, **kwargs):\n warnings.warn(\n \"Call to deprecated function {}.\".format(func.__name__),\n category=DeprecationWarning,\n stacklevel=2,\n )\n return func(*args, **kwargs)\n\n return new_func",
"def deprecated(func):\n @functools.wraps(func)\n def new_func(*args, **kwargs):\n warnings.warn(\"Call to deprecated function {}.\".format(func.__name__),\n category=DeprecationWarning, stacklevel=2)\n return func(*args, **kwargs)\n return new_func",
"def test_instances(self):\n\n @deprecate(bar=\"use baz instead\")\n def foo(bar=None, baz=None):\n pass\n\n @deprecate(baz=\"use bar instead\")\n def food(bar=None, baz=None):\n pass\n\n with warnings.catch_warnings(record=True) as w:\n foo(bar=True)\n food(baz=True)\n self.assertEqual(len(w), 2, \"Not all warnings preserved.\")",
"def assert_warns(warning_class, func, *args, **kw):\n with warnings.catch_warnings(record=True) as warn:\n # Cause all warnings to always be triggered.\n warnings.simplefilter(\"always\")\n # Trigger a warning.\n result = func(*args, **kw)\n if hasattr(np, 'FutureWarning'):\n # Filter out numpy-specific warnings in numpy >= 1.9\n warn = [e for e in warn\n if e.category is not np.VisibleDeprecationWarning]\n\n # Verify some things\n if not len(warn) > 0:\n raise AssertionError(\"No warning raised when calling %s\"\n % func.__name__)\n\n found = any(warning.category is warning_class for warning in warn)\n if not found:\n raise AssertionError(\"%s did not give warning: %s( is %s)\"\n % (func.__name__, warning_class, warn))\n return result",
"def deprecated_call():\n # TODO: Remove this when testing requires pytest>=3.9.\n pieces = pytest.__version__.split(\".\")\n pytest_major_minor = (int(pieces[0]), int(pieces[1]))\n if pytest_major_minor < (3, 9):\n return pytest.warns((DeprecationWarning, PendingDeprecationWarning))\n return pytest.deprecated_call()",
"def test_deprecate(self):\n @deprecate\n def foo(a):\n return 2*a\n\n @deprecate(\"use baz instead\", version=\"0.2.0\")\n def bar(a):\n return 4*a\n\n with warnings.catch_warnings(record=True) as w:\n self.assertEqual(foo(1), 2,\n \"Decorated function does not return original \"\n \"return value\")\n self.assertTrue(len(w) > 0, \"No warning raised!\")\n self.assertEqual(w[0].category, DeprecationWarning,\n \"Raised warning is not a DeprecationWarning\")\n\n with warnings.catch_warnings(record=True) as w:\n self.assertEqual(bar(1), 4,\n \"Decorated function does not return original \"\n \"return value\")\n\n expected_message = \"use baz instead. It is not guaranteed to be in \" \\\n \"service in vers. 0.2.0\"\n self.assertTrue( w[0].message.args[0].endswith(expected_message),\n \"Warning message does not reflect decorator arguments.\")\n\n @deprecate_soon\n def baz(a):\n return 3*a\n\n with warnings.catch_warnings(record=True) as w:\n self.assertEqual(baz(1), 3,\n \"Decorated function does not return original \"\n \"return value\")\n self.assertEqual(w[0].category, PendingDeprecationWarning,\n \"Raised warning is not a PendingDeprecationWarning\")",
"def guarded_deprecation_warning(*args, **kwargs):\n if os.environ.get(\"SERVE_WARN_V1_DEPRECATIONS\", \"0\") == \"1\":\n from ray._private.utils import deprecated\n\n return deprecated(*args, **kwargs)\n else:\n\n def noop_decorator(func):\n return func\n\n return noop_decorator",
"def deprecated(func):\n @functools.wraps(func)\n def new_func(*args, **kwargs):\n warnings.simplefilter('always', DeprecationWarning) # turn off filter\n warnings.warn(\"Call to deprecated function {}.\".format(func.__name__),\n category=DeprecationWarning,\n stacklevel=2)\n warnings.simplefilter('default', DeprecationWarning) # reset filter\n return func(*args, **kwargs)\n return new_func",
"def deprecated(func):\n\n @functools.wraps(func)\n def new_func(*args, **kwargs):\n warnings.simplefilter('always', DeprecationWarning) # turn off filter\n warnings.warn(\"Call to deprecated function {}.\".format(func.__name__),\n category=DeprecationWarning,\n stacklevel=2)\n warnings.simplefilter('default', DeprecationWarning) # reset filter\n return func(*args, **kwargs)\n\n return new_func",
"def wrapper(self, *args, **kwargs):\n if sys.version_info >= (3, 2):\n warnings.simplefilter(\"ignore\", ResourceWarning)\n with warnings.catch_warnings():\n my_func(self, *args, **kwargs)",
"def obsolete(func, fail=True):\n\n def inner(*args, **kwargs):\n if not fail:\n logging.error('Called obsolete function %s' % func.__name__)\n return func(*args, **kwargs)\n raise ObsoleteError('Tried to call function %s but it is marked as obsolete' % func.__name__)\n\n return inner",
"def deprecated(func): # pragma: no cover\n\n def new_func(*args, **kwargs):\n warnings.simplefilter('always', DeprecationWarning) # Turn off filter\n warnings.warn(\n 'Call to deprecated function %s.' % func.__name__,\n category=DeprecationWarning,\n stacklevel=2\n )\n warnings.simplefilter('default', DeprecationWarning) # Reset filter\n return func(*args, **kwargs)\n\n new_func.__name__ = func.__name__\n new_func.__doc__ = func.__doc__\n new_func.__dict__.update(func.__dict__)\n return new_func",
"def deprecated(func):\n\n @functools.wraps(func)\n def new_func(*args, **kwargs):\n warnings.warn_explicit(\n \"Call to deprecated function. %s {}\".format(func.__name__),\n category=DeprecationWarning,\n filename=func.func_code.co_filename,\n lineno=func.func_code.co_firstlineno + 1\n )\n return func(*args, **kwargs)\n\n return new_func",
"def deprecated(func):\n\n @functools.wraps(func)\n def new_func(*args, **kwargs):\n \"\"\"Wrapper function.\"\"\"\n warnings.warn_explicit(\n \"Call to deprecated function %(funcname)s.\" % {\n 'funcname': func.__name__,\n },\n category=DeprecationWarning,\n filename=func.func_code.co_filename,\n lineno=func.func_code.co_firstlineno + 1\n )\n return func(*args, **kwargs)\n\n return new_func",
"def deprecated(func):\n\n @functools.wraps(func)\n def new_func(*args, **kwargs):\n warnings.warn_explicit(\n \"Call to deprecated function %(funcname)s.\" % {\n 'funcname': func.__name__,\n },\n category=DeprecationWarning,\n filename=func.__code__.co_filename,\n lineno=func.__code__.co_firstlineno + 1\n )\n return func(*args, **kwargs)\n\n return new_func",
"def wrapper(*args, **kwargs):\n logger.warn(\"Deprecated function {0}. Please use '{1}' instead.\".format(func.__name__, use_instead))\n return func(*args, **kwargs)",
"def deprecated(func):\n def newFunc(*args, **kwargs):\n warnings.simplefilter('always', DeprecationWarning) # turn off filter\n warnings.warn(\"Call to deprecated function {}.\".format(\n func.__name__), category=DeprecationWarning, stacklevel=2)\n warnings.simplefilter('default', DeprecationWarning) # reset filter\n return func(*args, **kwargs)\n newFunc.__name__ = func.__name__\n newFunc.__doc__ = func.__doc__\n newFunc.__dict__.update(func.__dict__)\n return newFunc",
"def _check_deprecated(self, dest: str, kwargs, print_warning: bool = True) -> None:\n removal_version = kwargs.get(\"removal_version\", None)\n if removal_version is not None:\n warn_or_error(\n removal_version=removal_version,\n entity=f\"option '{dest}' in {self._scope_str()}\",\n start_version=kwargs.get(\"deprecation_start_version\", None),\n hint=kwargs.get(\"removal_hint\", None),\n print_warning=print_warning,\n )",
"def warning(self, *args, **kwargs): # real signature unknown\n pass",
"def ignore_warnings(my_func):\n\n def wrapper(self, *args, **kwargs):\n \"\"\"\n This is where the warning suppression occurs.\n \"\"\"\n if sys.version_info >= (3, 2):\n warnings.simplefilter(\"ignore\", ResourceWarning)\n with warnings.catch_warnings():\n my_func(self, *args, **kwargs)\n\n return wrapper",
"def warning(self, *args, **kwargs):",
"def deprecated(func):\n def new_func(*args, **kwargs):\n warnings.simplefilter('default') #this will render these deprecation warnings visible to everyone (default is switched off in python >=2.7)\n warnings.warn(\"Call to deprecated function {n} of {f}.\".format(n=func.__name__, f=func.__doc__),\n category=DeprecationWarning)\n return func(*args, **kwargs)\n new_func.__name__ = func.__name__\n new_func.__doc__ = func.__doc__\n new_func.__dict__.update(func.__dict__)\n return new_func",
"def _not_assert(function):\n @wraps(function)\n def flipped(*args, **kwargs):\n try:\n function(*args, **kwargs)\n raise AssertionError()\n except AssertionError:\n return\n return flipped",
"def issues_warnings(*warnings):\r\n valid = ' or '.join([w.__name__ for w in warnings])\r\n\r\n def decorate(func):\r\n name = func.__name__\r\n\r\n def newfunc(*arg, **kw):\r\n with warnings_mod.catch_warnings(record=True) as issued_warnings:\r\n warnings_mod.simplefilter('always')\r\n\r\n func(*arg, **kw)\r\n\r\n interesting = [\r\n w for w in issued_warnings\r\n if issubclass(w.category, warnings)\r\n ]\r\n if not interesting:\r\n message = \"%s() did not issue %s\" % (name, valid)\r\n raise AssertionError(message)\r\n\r\n newfunc = make_decorator(func)(newfunc)\r\n return newfunc\r\n\r\n return decorate",
"def _canDisable(func):\n def wrapper(*args, **kwargs):\n if _DISABLE_ASSERTIONS == 0:\n return func(*args, **kwargs)\n return wrapper"
] | [
"0.76134264",
"0.7457519",
"0.69317734",
"0.68314976",
"0.6774156",
"0.67599773",
"0.6758293",
"0.6725724",
"0.67210484",
"0.6709473",
"0.66816026",
"0.6671405",
"0.6668715",
"0.66650754",
"0.65562075",
"0.6553829",
"0.6536344",
"0.6527607",
"0.6516987",
"0.6516459",
"0.6488438",
"0.64712715",
"0.64520466",
"0.64443344",
"0.64421946",
"0.6429064",
"0.6412323",
"0.6380749",
"0.6340044",
"0.63179594"
] | 0.7718524 | 0 |
Guess the bean name from a WSDL type. Assume the bean name is equal to the type having the first letter capitalized. | def guessbeanname(self):
t = self.name
return t[0].upper() + t[1:] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def service_type_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"service_type_name\")",
"def get_type_name(type):\n name = type.name\n if type.is_simple:\n return _get_simple_type_mapping(name)\n elif type.is_enum:\n return _get_simple_type_mapping('str')\n elif type.is_complex:\n return get_class_name(name)",
"def _type_name(cls, manual_name):\r\n cf_name = ''\r\n if manual_name:\r\n cf_name = manual_name.lower()\r\n else:\r\n camelcase = re.compile(r'([a-z])([A-Z])')\r\n ccase = lambda s: camelcase.sub(lambda v: '{}_{}'.format(v.group(1), v.group(2).lower()), s)\r\n \r\n cf_name += ccase(cls.__name__)\r\n cf_name = cf_name.lower()\r\n if cls.__use_module_name__:\r\n cf_name = cls.__module__ + '_{}'.format(cf_name)\r\n return cf_name",
"def _get_service_type(service):\n\n return service.split(':')[3]",
"def _get_type_name(type_):\n # type: (type) -> str\n name = repr(type_)\n if name.startswith(\"<\"):\n name = getattr(type_, \"__qualname__\", getattr(type_, \"__name__\", \"\"))\n return name.rsplit(\".\", 1)[-1] or repr(type_)",
"def get_type_doc_name(type):\n name = type.name\n if type.is_simple:\n return _get_simple_type_mapping(name)\n elif type.is_enum:\n return '{0}.{1}'.format(get_package_name(name), get_enum_name(name))\n elif type.is_complex:\n return '{0}.{1}'.format(get_package_name(name), get_class_name(name))",
"def pyxb_get_type_name(obj_pyxb):\n return pyxb_get_namespace_name(obj_pyxb).split('}')[-1]",
"def service_type_name(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"service_type_name\")",
"def fixType(typeStr):\n pos = typeStr.rfind('.')\n if pos != -1:\n typeStr = typeStr[pos+1:]\n return classMap.get(typeStr, typeStr)",
"def _get_type_name(self, st_type):\n if st_type <= 2045: return 'str' + str(st_type)\n return self._type_names[st_type]",
"def _get_type(self, obj):\n typever = obj['Type']\n typesplit = typever.split('.')\n return typesplit[0] + '.' + typesplit[1]",
"def _get_type_name(self, st_type):\n if st_type <= 244: return 'str' + str(st_type)\n return self._type_names[st_type]",
"def type_name_to_type(name):\n if name in SIMPLE_TYPES:\n return SIMPLE_TYPES[name]\n elif name in PROXY_TYPES:\n return PROXY_TYPES[name]\n return None",
"def infer_abbr(class_type):\r\n if not inspect.isclass(class_type):\r\n raise TypeError(\r\n f'class_type must be a type, but got {type(class_type)}')\r\n if hasattr(class_type, '_abbr_'):\r\n return class_type._abbr_\r\n if issubclass(class_type, _InstanceNorm): # IN is a subclass of BN\r\n return 'in'\r\n elif issubclass(class_type, _BatchNorm):\r\n return 'bn'\r\n elif issubclass(class_type, nn.GroupNorm):\r\n return 'gn'\r\n elif issubclass(class_type, nn.LayerNorm):\r\n return 'ln'\r\n else:\r\n class_name = class_type.__name__.lower()\r\n if 'batch' in class_name:\r\n return 'bn'\r\n elif 'group' in class_name:\r\n return 'gn'\r\n elif 'layer' in class_name:\r\n return 'ln'\r\n elif 'instance' in class_name:\r\n return 'in'\r\n else:\r\n return 'norm'",
"def get_type_s(self, type):\r\n\r\n return HTTP2_NAMES.get(type, None)",
"def class_name(name: str) -> str:\n return text.pascal_case(utils.safe_snake(name, \"type\"))",
"def wsdl_service_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"wsdl_service_name\")",
"def get_type_functional_name(type):\n name = type.name\n if type.is_simple:\n return name\n elif type.is_enum:\n return 'str'\n elif type.is_complex:\n return get_class_name(name)",
"def to_type_name(self, text) -> str:\n return util.to_snake_case(self.split_to_body_and_ext(text)[0]).capitalize()",
"def type_name(attr_type: AttrType) -> str:\n return attr_type.native_name or class_name(attr_type.name)",
"def wsdl_endpoint_name(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"wsdl_endpoint_name\")",
"def typeToName(type: int) -> unicode:\n ...",
"def name_for(element_defn: JSON, type_defn: JSON) -> str:\n return element_defn.path.replace('[x]', PathElement.type_name(type_defn))",
"def name_to_type(self, name):\n return self.CUSTOM_PREFIX + name",
"def _extract_table_type(type):\n if isinstance(type, str):\n type = type.lower()\n if type[0:7] == 'binary':\n table_type = BINARY_TBL\n elif type[0:6] == 'ascii':\n table_type = ASCII_TBL\n else:\n raise ValueError(\n \"table type string should begin with 'binary' or 'ascii' \"\n \"(case insensitive)\")\n else:\n type = int(type)\n if type not in [BINARY_TBL, ASCII_TBL]:\n raise ValueError(\n \"table type num should be BINARY_TBL (%d) or \"\n \"ASCII_TBL (%d)\" % (BINARY_TBL, ASCII_TBL))\n table_type = type\n\n return table_type",
"def get_type_label(type_url):\n return type_dict[type_url]",
"def get_service_type_for_service_name(self, service_name):\n from ranger_performance_tool import perf_globals\n service_type_mapping = perf_globals.CONFIG_READER.get_config_value(\"secondary\", \"service_type_mapping\")\n if service_name not in service_type_mapping.keys():\n raise Exception(f\"Unknown service name:{service_name}.\"\n f\"Add it to service_type_mapping in secondary config file\")\n return service_type_mapping[service_name]",
"def get_type_from_str(type_str: str) -> str:\n query = [x\n for x in PRIMITIVE_TYPES\n if type_str.lower() in PRIMITIVE_TYPES[x]]\n return query[0] if len(query) > 0 else 'None'",
"def test_resolves_name(self):\n class Foo(pyperry.Base):\n pass\n\n self.assertEqual(pyperry.Base.resolve_name('Foo')[-1], Foo)",
"def _get_short_type_name(cls, type_name: str) -> str:\n import re\n match = re.match('(typing\\.)?(?P<type>\\w+)(?:\\[.+\\])?', type_name)\n return match.group('type') if match else type_name"
] | [
"0.6055587",
"0.59900916",
"0.5906633",
"0.5842989",
"0.5832207",
"0.57966083",
"0.5786233",
"0.5717361",
"0.56284714",
"0.56159526",
"0.54767513",
"0.5473429",
"0.5454887",
"0.5423297",
"0.5414507",
"0.5383779",
"0.5352961",
"0.5352691",
"0.5325332",
"0.5307059",
"0.52658063",
"0.5262978",
"0.52502173",
"0.52407354",
"0.52342284",
"0.5225188",
"0.52102333",
"0.5206654",
"0.5204661",
"0.52033675"
] | 0.6790827 | 0 |
Return the many to one relations (relType == ONE). | def getrelations(self):
return self.getfieldnames('ONE') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _filter_related_one2one(self, rel):\n field = rel.field\n if isinstance(field, models.OneToOneField):\n if self._join_allowed(rel.parent_model, rel.model, field):\n return rel",
"def relationship(cls):\n return relationship.many_to_one(cls, 'relationship')",
"def many_to_one(table, backref):\n return relationship(table, back_populates=backref, viewonly=True)",
"def relation(self):\n # [(2,OBJ), (3,OBJ)])] => 2\n return len(self.relations) > 0 and self.relations[0][0] or None",
"def relationships(self):",
"def _filter_one2one(self, field):\n if isinstance(field, models.OneToOneField):\n if self._join_allowed(field.model, field.rel.to, field):\n return field",
"def relations(self):\n return set(self.triples()[\"relation\"])",
"def one_to_many(table, backref):\n return relationship(table, back_populates=backref, lazy=\"dynamic\", viewonly=True)",
"def relation(self) -> Optional[Relation]:\n return self.model.get_relation(self.endpoint, relation_id=self.relation_id)",
"def relationship_template(cls):\n return relationship.many_to_one(cls, 'relationship_template')",
"def relationship(self):\r\n return relationships.Relationship(self)",
"def _get_all_or_one(model, schema, _id=None):\n if _id:\n query = model.query.get(_id)\n else:\n query = model.query.all()\n\n return schema.dump(query, many=_id is None).data",
"def getmanyrelations(self):\n return self.getfieldnames('MANY')",
"def relation( self, obj, relType ):\n raise NotImplementedError(\"relation\")",
"def _get_lsp_config_frr_one_to_many(self):\n return self.__lsp_config_frr_one_to_many",
"def _filter_related_m2m(self, rel):\n field = rel.field\n if isinstance(field, models.ManyToManyField):\n if self._join_allowed(rel.parent_model, rel.model, field):\n return rel",
"def _do_relation(self):\n if self.chunks:\n ch = self.chunks[-1]\n for relation, role in ch.relations:\n if role == \"SBJ\" or role == \"OBJ\":\n self.relations[role][relation] = ch\n if ch.type in (\"VP\",):\n self.relations[ch.type][ch.relation] = ch",
"def relationship(self):\n return relationships.Relationship(self)",
"def related_to(self, name=None):\n\t\treturn self.related(name, True)",
"def getUniversal(cls):\n temp = cls.A * cls.A\n l = []\n for i in temp:\n l.append(i)\n return Relation(*l,name = 'Universal Relation')",
"def _produce_none_relations(self):\n print \"Producing NONE-relations\"\n relations = self.relations\n events = self.events\n none_relations = []\n\n for source in events:\n for target in events:\n new_relation = Relation(\"NONE\", self.text_obj, source, target, RelationType.NONE)\n print new_relation\n\n if new_relation in relations:\n continue\n else:\n none_relations.append(new_relation)\n\n self.relations = self.relations + none_relations\n\n print \"Finished producing NONE-relations\"",
"def import_single_object_relations(self, content_object):\n ct = ContentType.objects.get_for_model(content_object)\n object_id = content_object.id\n return self.import_relations(ct, asset_id=object_id)",
"def allow_relation(self, obj1, obj2, **hints):\n if obj1._meta.app_label == 'data_collection' or \\\n obj2._meta.app_label == 'data_collection':\n return True\n return None",
"def RelatedRecords(self, default=[{}]):\n tmp = self.data.get('metadata', {}).get('related_records', default)\n return [HEP.RelatedRecordObject(i) for i in tmp]",
"def allow_relation(self, obj1, obj2, **hints):\n return None",
"def create_relation_superset(self):\n return filter(lambda x: x[0] != x[1],\n super().create_relation_superset())",
"def listOneToOne(self, **kwargs):\n response = self._post(path='/do/listOneToOne', params=kwargs)\n return response",
"def get_related(this_obj, other_obj, m2m=False):\n # is het niet raar dat je voor twee concrete objecten ophaalt naar welke van het ene type\n # verwezen wordt vanuit het andere type? Of is dat om de vorige/volgende te kunnen bepalen?\n # als ik kijk naar het gebruik in GetRelations dan is het tweede argument ook niet een object\n # maar een relatie (uit de fields verzameling)\n if m2m:\n fields = [x for x in other_obj._meta.many_to_many]\n else:\n fields = [x for x in other_obj._meta.get_fields() if x.name != 'project' and\n x.get_internal_type() == 'ForeignKey']\n for fld in fields:\n if fld.related_model == this_obj._meta.model:\n related_name = fld.related_query_name()\n break\n else:\n return None # not found\n try:\n return this_obj.__getattribute__(related_name).all()\n except UnboundLocalError:\n return None\n # zou je deze ook kunnen vervangen door een aanroep van get_relation en dan met de opgehaalde\n # naam de gerelateerde objecten ophalen en meteen de vorige en de volgende bepalen?\n # (heeft uiteraard konsekwenties voor de aanroepende code)\n # oorspronkelijk lijkt dat ook zo geweest te zijn, de functie heette toen get_relation en het\n # gedeelte dat nu nog zo heet was daarin hardgecodeerd\n # deze functie wordt alleen aangeroepen in een paar methoden van de hieronder opgenomen klasse\n # GetRelations, namelijk om de namen van relaties uit andere objecten naar het huidige te kunnen\n # bepalen.\n # Als je get_relation zoals die nu is gebruikt zou je dat onderscheid (van versus naar relaties)\n # met dezelfde functie kunnen afhandelen",
"def get_related_objects(self):\n result = []\n if self['name'] != None:\n tmp = ObjectDefinition.objects.filter(use__has_field=self['name'], object_type=self['object_type'])\n for i in tmp: result.append(i)\n return result",
"def prepare_actor_entity_relation(self, object):\n relations = [\n actor_role.get_relation_status_display() for actor_role in\n ActorRole.objects.filter(actor__in=[object]).all()]\n\n result = relations\n result = filter(None, result)\n\n return list(set(result))"
] | [
"0.67867416",
"0.6581883",
"0.60937566",
"0.6083249",
"0.6047415",
"0.603017",
"0.58113414",
"0.5705639",
"0.55900675",
"0.55657053",
"0.5440979",
"0.54282725",
"0.5387697",
"0.53740245",
"0.5348175",
"0.5317037",
"0.53037167",
"0.5272414",
"0.5238443",
"0.51848626",
"0.5180718",
"0.51781887",
"0.5160339",
"0.51417154",
"0.5135437",
"0.5123248",
"0.50880307",
"0.508412",
"0.50814706",
"0.5049806"
] | 0.7039067 | 0 |
Return the one to many relations (relType == MANY). | def getmanyrelations(self):
return self.getfieldnames('MANY') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def getrelations(self):\n return self.getfieldnames('ONE')",
"def relations(self):\n return set(self.triples()[\"relation\"])",
"def relationships(self):",
"def get_relations(self):\n triples = list(self.get_triples())\n\n for s, p, o in triples:\n if not p.startswith(\"rel\"):\n s, o = int(s.id), int(o.id)\n yield {\"predicate\": p,\n \"subject\": s,\n \"subject_nodes\": list(self.get_descendants(s, triples)),\n \"object\": o,\n \"object_nodes\": list(self.get_descendants(o, triples)),\n }",
"def _filter_related_m2m(self, rel):\n field = rel.field\n if isinstance(field, models.ManyToManyField):\n if self._join_allowed(rel.parent_model, rel.model, field):\n return rel",
"def relationship_types(self):\n return frozenset(self._relationships_by_type.keys())",
"def relationship(cls):\n return relationship.many_to_one(cls, 'relationship')",
"def one_to_many(table, backref):\n return relationship(table, back_populates=backref, lazy=\"dynamic\", viewonly=True)",
"def get_related_objects(self):\n result = []\n if self['name'] != None:\n tmp = ObjectDefinition.objects.filter(use__has_field=self['name'], object_type=self['object_type'])\n for i in tmp: result.append(i)\n return result",
"def _find_relations(self, node, depth=0):\n depth += 1\n\n model = node.model\n opts = model._meta\n\n # determine relational fields to determine paths\n forward_fields = opts.fields\n reverse_fields = opts.get_all_related_objects()\n\n forward_o2o = filter(self._filter_one2one, forward_fields)\n reverse_o2o = filter(self._filter_related_one2one, reverse_fields)\n\n forward_fk = filter(self._filter_fk, forward_fields)\n reverse_fk = filter(self._filter_related_fk, reverse_fields)\n\n forward_m2m = filter(self._filter_m2m, opts.many_to_many)\n reverse_m2m = filter(self._filter_related_m2m,\n opts.get_all_related_many_to_many_objects())\n\n # iterate m2m relations\n for f in forward_m2m:\n kwargs = {\n 'parent': node,\n 'model': f.rel.to,\n 'relation': 'manytomany',\n 'reverse': False,\n 'related_name': f.name,\n 'accessor_name': f.name,\n 'nullable': True,\n 'depth': depth,\n }\n self._add_node(**kwargs)\n\n # iterate over related m2m fields\n for r in reverse_m2m:\n kwargs = {\n 'parent': node,\n 'model': r.model,\n 'relation': 'manytomany',\n 'reverse': True,\n 'related_name': r.field.related_query_name(),\n 'accessor_name': r.get_accessor_name(),\n 'nullable': True,\n 'depth': depth,\n }\n self._add_node(**kwargs)\n\n # iterate over one2one fields\n for f in forward_o2o:\n kwargs = {\n 'parent': node,\n 'model': f.rel.to,\n 'relation': 'onetoone',\n 'reverse': False,\n 'related_name': f.name,\n 'accessor_name': f.name,\n 'nullable': False,\n 'depth': depth,\n }\n self._add_node(**kwargs)\n\n # iterate over related one2one fields\n for r in reverse_o2o:\n kwargs = {\n 'parent': node,\n 'model': r.model,\n 'relation': 'onetoone',\n 'reverse': True,\n 'related_name': r.field.related_query_name(),\n 'accessor_name': r.get_accessor_name(),\n 'nullable': False,\n 'depth': depth,\n }\n self._add_node(**kwargs)\n\n # iterate over fk fields\n for f in forward_fk:\n kwargs = {\n 'parent': node,\n 'model': f.rel.to,\n 'relation': 'foreignkey',\n 'reverse': False,\n 'related_name': f.name,\n 'accessor_name': f.name,\n 'nullable': f.null,\n 'depth': depth,\n }\n self._add_node(**kwargs)\n\n # iterate over related foreign keys\n for r in reverse_fk:\n kwargs = {\n 'parent': node,\n 'model': r.model,\n 'relation': 'foreignkey',\n 'reverse': True,\n 'related_name': r.field.related_query_name(),\n 'accessor_name': r.get_accessor_name(),\n 'nullable': True,\n 'depth': depth,\n }\n self._add_node(**kwargs)\n\n return node",
"def get_relations(self):\n if not hasattr(self, '_BasePublication__relations_cache'):\n tree_opts = Rubric._mptt_meta\n self.__relations_cache = self.forward_relations.select_related('rubric', 'to_publication').order_by(\n 'rubric__%s' % tree_opts.tree_id_attr, 'rubric__%s' % tree_opts.left_attr)\n return self.__relations_cache",
"def get_all_relations(\n self,\n node: Tuple[str, str],\n relation: Optional[str] = None,\n ) -> List[Relation]:\n source_rels = self.get_source_relations(target=node, relation=relation)\n target_rels = self.get_target_relations(source=node, relation=relation)\n all_rels = source_rels + target_rels\n return all_rels",
"def get_all_associations(self):\n return",
"def get_related(this_obj, other_obj, m2m=False):\n # is het niet raar dat je voor twee concrete objecten ophaalt naar welke van het ene type\n # verwezen wordt vanuit het andere type? Of is dat om de vorige/volgende te kunnen bepalen?\n # als ik kijk naar het gebruik in GetRelations dan is het tweede argument ook niet een object\n # maar een relatie (uit de fields verzameling)\n if m2m:\n fields = [x for x in other_obj._meta.many_to_many]\n else:\n fields = [x for x in other_obj._meta.get_fields() if x.name != 'project' and\n x.get_internal_type() == 'ForeignKey']\n for fld in fields:\n if fld.related_model == this_obj._meta.model:\n related_name = fld.related_query_name()\n break\n else:\n return None # not found\n try:\n return this_obj.__getattribute__(related_name).all()\n except UnboundLocalError:\n return None\n # zou je deze ook kunnen vervangen door een aanroep van get_relation en dan met de opgehaalde\n # naam de gerelateerde objecten ophalen en meteen de vorige en de volgende bepalen?\n # (heeft uiteraard konsekwenties voor de aanroepende code)\n # oorspronkelijk lijkt dat ook zo geweest te zijn, de functie heette toen get_relation en het\n # gedeelte dat nu nog zo heet was daarin hardgecodeerd\n # deze functie wordt alleen aangeroepen in een paar methoden van de hieronder opgenomen klasse\n # GetRelations, namelijk om de namen van relaties uit andere objecten naar het huidige te kunnen\n # bepalen.\n # Als je get_relation zoals die nu is gebruikt zou je dat onderscheid (van versus naar relaties)\n # met dezelfde functie kunnen afhandelen",
"def settable_relations(cls):\n return [r for r in cls.relations\n if getattr(cls, r).property.viewonly is False]",
"def prepare_actor_entity_relation(self, object):\n relations = [\n actor_role.get_relation_status_display() for actor_role in\n ActorRole.objects.filter(actor__in=[object]).all()]\n\n result = relations\n result = filter(None, result)\n\n return list(set(result))",
"def RelatedRecords(self, default=[{}]):\n tmp = self.data.get('metadata', {}).get('related_records', default)\n return [HEP.RelatedRecordObject(i) for i in tmp]",
"def associated_objects(self):\n return self._associated_objects",
"def relationships(self, r_type=None, n_ids=()):\n if r_type is None:\n r_sets = []\n else:\n r_sets = [self._relationships_by_type.get(r_type, frozenset())]\n if not n_ids or (hasattr(n_ids, \"__iter__\") and all(n_id is None for n_id in n_ids)):\n pass\n elif isinstance(n_ids, Sequence):\n for n_index, n_id in enumerate_nodes(n_ids):\n if n_id is not None:\n r_sets.append({r_id for r_id, i in self._relationships_by_node.get(n_id, ())\n if i == n_index})\n elif isinstance(n_ids, Set):\n for n_id in n_ids:\n if n_id is not None:\n r_sets.append({r_id for r_id, i in self._relationships_by_node.get(n_id, ())})\n else:\n raise TypeError(\"Nodes must be supplied as a Sequence or a Set\")\n if r_sets:\n return iter(reduce(and_operator, r_sets))\n else:\n return iter(self._relationships)",
"def relations(self):\n\t\treturn [(self.factions[k][0], self._faction_affinity.get(k, 50)) for k in self.factions.keys()]",
"def fk_associations(cls):\n return cls._fk_associations",
"def relations(cls):\n return [c.key for c in cls.__mapper__.iterate_properties\n if isinstance(c, RelationshipProperty)]",
"def _filter_related_one2one(self, rel):\n field = rel.field\n if isinstance(field, models.OneToOneField):\n if self._join_allowed(rel.parent_model, rel.model, field):\n return rel",
"def associatedObjects (self):\n return self.__associatedObjects",
"def many_to_many(name, fromtable, totable):\r\n lfromtable = fromtable.lower()\r\n ltotable = totable.lower()\r\n table = db.Table(name,\r\n Column(ltotable + '_id', Integer, ForeignKey(ltotable + '.id')),\r\n Column(lfromtable + '_id', Integer, ForeignKey(lfromtable + '.id'))\r\n )\r\n\r\n return relationship(totable, secondary=table,\r\n backref=backref(name + '_' + lfromtable + 's', lazy='dynamic'))",
"def has_many(self, keys):\n return self.has_many_values(keys)",
"def joins(self):\n return self._joins",
"def get_queryset(self):\n\n return Relationship.objects.filter(\n Q(from_person=self.request.user.person) |\n Q(to_person=self.request.user.person))",
"def get_goterms_upper(self):\n # Requires GODag is created with 'relationship' in optional_attrs argument\n # pylint: disable=no-member\n return set.union(self.parents, *self.relationship.values())",
"def references(self):\n return self._get_related_resources(False)"
] | [
"0.7096476",
"0.655924",
"0.6366319",
"0.61227846",
"0.60044825",
"0.5877629",
"0.58542055",
"0.5840653",
"0.5739393",
"0.5713277",
"0.56678456",
"0.56351084",
"0.56170446",
"0.55665904",
"0.54818785",
"0.5468127",
"0.5463287",
"0.5452604",
"0.54127836",
"0.53986883",
"0.5376185",
"0.53722906",
"0.53529125",
"0.5346061",
"0.53427976",
"0.5336209",
"0.53302187",
"0.53227335",
"0.5322308",
"0.5321556"
] | 0.72177327 | 0 |
Check whether the entity is consistent with this entity info. The entity is supposed to be a subclass of Entity. Report any abnormalities as warnings to the logger. Return the number of warnings emitted. | def check(self, entity):
nwarn = 0
if entity is None:
return nwarn
if not issubclass(entity, Entity):
raise TypeError("invalid argument %s, expect subclass of Entity" %
entity)
cname = entity.__name__
beanname = self.beanname
if entity.BeanName is not None and entity.BeanName != beanname:
log.warning("%s: wrong BeanName '%s', should be '%s'",
cname, entity.BeanName, beanname)
nwarn += 1
constraint = self.getconstraint()
if entity.Constraint != constraint:
log.warning("%s: wrong Constraint '%s', should be '%s'",
cname, entity.Constraint, constraint)
nwarn += 1
nwarn += self._cmpattrs(self.getattrs(), entity.InstAttr,
cname, "attributes")
nwarn += self._cmpattrs(self.getrelations(), entity.InstRel,
cname, "many to one relations")
nwarn += self._cmpattrs(self.getmanyrelations(), entity.InstMRel,
cname, "one to many relations")
return nwarn | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def check(self):\n\n nwarn = 0\n\n # Check that the set of entity types is the same as in the\n # schema.\n schemanames = set(self.schema.keys())\n clientnames = set(self.client.typemap.keys())\n missing = schemanames - clientnames\n if missing:\n log.warning(\"missing entities: %s\", list(missing))\n nwarn += 1\n spurious = clientnames - schemanames\n if spurious:\n log.warning(\"spurious entities: %s\", list(spurious))\n nwarn += 1\n\n # For each entity type, check that its definition is\n # consistent with the schema.\n for n in schemanames & clientnames:\n log.debug(\"checking entity type %s ...\", n)\n nwarn += self.schema[n].check(self.client.typemap[n])\n\n return nwarn",
"def _validate_entities(self):\n valid = True\n for NAME, ID in self.pattern['classes'].items():\n if not self.ont.knowsClass(ID):\n warnings.warn(\"Pattern contains unknown class %s ; %s.\" % (NAME, ID))\n valid = False\n for NAME, ID in self.pattern['relations'].items():\n if not self.ont.knowsObjectProperty(ID):\n warnings.warn(\"Pattern contains unknown relation %s, %s.\" % (NAME, ID))\n valid = False\n \n # TODO - add check for obsoletion status\n return valid",
"def checkExceptions(self):\n\n nwarn = 0\n\n icatExceptionType = self.client.factory.create('icatExceptionType')\n schemaexceptions = set(icatExceptionType.__keylist__)\n clientexceptions = set(icat.exception.IcatExceptionTypeMap.keys())\n missing = schemaexceptions - clientexceptions\n if missing:\n log.warning(\"missing exception types: %s\", list(missing))\n nwarn += 1\n\n return nwarn",
"def _validate_entity_type(cls, item):\n if item.entity_type not in TARGET_TYPE_TO_TARGET_MODEL:\n cls._add_error(\n 'entity %s' % base_model_validators.ERROR_CATEGORY_TYPE_CHECK,\n 'Entity id %s: Entity type %s is not allowed' % (\n item.id, item.entity_type))",
"def check_influence_sanity(self):\n for influence in crest.get_all_influences(self.model):\n assert influence._name is not None, f\"There is an Influence in {influence._parent._name} ({influence._parent.__class__.__name__}) whose name is 'None'\"\n assert influence._name != \"\", f\"There is an Update in {influence._parent._name} ({influence._parent.__class__.__name__}) whose name is empty string\"\n\n assert isinstance(influence.source, crest.Port), f\"Influence {influence._name}'s source is not a crest.Port\"\n assert influence.source in api.get_sources(influence._parent), f\"Influence's source {influence.source._name} ({influence.source}) is not in the sources of entity {influence._parent._name} ({influence._parent})\"\n\n assert isinstance(influence.target, crest.Port), f\"Influence {influence._name}'s target is not a crest.Port\"\n assert influence.target in api.get_targets(influence._parent), f\"Influence's target {influence.target._name} ({influence.target}) is not in the targets of entity {influence._parent._name} ({influence._parent})\"\n\n assert isinstance(influence.function, (crestml.LearnedFunction, types.FunctionType)), f\"Influence {influence._name}'s function needs to be of type types.FunctionType or crestdsl.ml.LearnedFunction\"\n assert len(inspect.signature(influence.function).parameters) == 1, f\"An influence should not have arguments (except the input value)\"",
"def check_consistency(self):\n raise NotImplementedError()",
"def validate(self):\n self._check_type()",
"def _validate(self):\n fields, schema = self.__dict__, self._def.default\n extra_fields = fields.viewkeys() - schema.viewkeys()\n if len(extra_fields) > 0:\n raise AttributeError('Fields found that are not in the schema: %r' % (list(extra_fields)))\n for key in fields.iterkeys():\n if type(fields[key]) is not type(schema[key]):\n raise AttributeError('Invalid %s for field \"%s\", should be %s' %\n (type(fields[key]), key, type(schema[key])))",
"def _validate_entity_type_and_entity_id_feedback_reference(cls, item):\n for reference in item.feedback_message_references:\n try:\n split_thread_id = reference['thread_id'].split('.')\n if split_thread_id[0] != reference['entity_type'] or (\n split_thread_id[1] != reference['entity_id']):\n cls._add_error(\n 'feedback message %s' % (\n base_model_validators.ERROR_CATEGORY_REFERENCE_CHECK\n ),\n 'Entity id %s: Invalid feedback reference: %s' % (\n item.id, reference))\n except Exception:\n cls._add_error(\n 'feedback message %s' % (\n base_model_validators.ERROR_CATEGORY_REFERENCE_CHECK),\n 'Entity id %s: Invalid feedback reference: %s' % (\n item.id, reference))",
"def _check_consistency(self) -> None:\n lbl_vals_from_metadata = set(self.infos.keys())\n lbl_vals_from_data = set(np.unique(self.data))\n # TODO: check if numerical datatype shenanigans ruin the day\n # i.e. something along the lines of 1.0 != 1\n symm_diff = lbl_vals_from_data ^ lbl_vals_from_metadata\n\n if len(symm_diff) != 0:\n msg = (f'Label mismatch between data and metadata! Expected vanishing '\n f'symmetric difference but got: {symm_diff}')\n raise ValueError(msg)",
"def _is_consistent(self) -> bool:\n try:\n enforce(\n isinstance(self.dialogue_reference, tuple),\n \"Invalid type for 'dialogue_reference'. Expected 'tuple'. Found '{}'.\".format(\n type(self.dialogue_reference)\n ),\n )\n enforce(\n isinstance(self.dialogue_reference[0], str),\n \"Invalid type for 'dialogue_reference[0]'. Expected 'str'. Found '{}'.\".format(\n type(self.dialogue_reference[0])\n ),\n )\n enforce(\n isinstance(self.dialogue_reference[1], str),\n \"Invalid type for 'dialogue_reference[1]'. Expected 'str'. Found '{}'.\".format(\n type(self.dialogue_reference[1])\n ),\n )\n enforce(\n type(self.message_id) is int,\n \"Invalid type for 'message_id'. Expected 'int'. Found '{}'.\".format(\n type(self.message_id)\n ),\n )\n enforce(\n type(self.target) is int,\n \"Invalid type for 'target'. Expected 'int'. Found '{}'.\".format(\n type(self.target)\n ),\n )\n\n # Light Protocol Rule 2\n # Check correct performative\n enforce(\n isinstance(self.performative, SigningMessage.Performative),\n \"Invalid 'performative'. Expected either of '{}'. Found '{}'.\".format(\n self.valid_performatives, self.performative\n ),\n )\n\n # Check correct contents\n actual_nb_of_contents = len(self._body) - DEFAULT_BODY_SIZE\n expected_nb_of_contents = 0\n if self.performative == SigningMessage.Performative.SIGN_TRANSACTION:\n expected_nb_of_contents = 2\n enforce(\n isinstance(self.terms, CustomTerms),\n \"Invalid type for content 'terms'. Expected 'Terms'. Found '{}'.\".format(\n type(self.terms)\n ),\n )\n enforce(\n isinstance(self.raw_transaction, CustomRawTransaction),\n \"Invalid type for content 'raw_transaction'. Expected 'RawTransaction'. Found '{}'.\".format(\n type(self.raw_transaction)\n ),\n )\n elif self.performative == SigningMessage.Performative.SIGN_MESSAGE:\n expected_nb_of_contents = 2\n enforce(\n isinstance(self.terms, CustomTerms),\n \"Invalid type for content 'terms'. Expected 'Terms'. Found '{}'.\".format(\n type(self.terms)\n ),\n )\n enforce(\n isinstance(self.raw_message, CustomRawMessage),\n \"Invalid type for content 'raw_message'. Expected 'RawMessage'. Found '{}'.\".format(\n type(self.raw_message)\n ),\n )\n elif self.performative == SigningMessage.Performative.SIGNED_TRANSACTION:\n expected_nb_of_contents = 1\n enforce(\n isinstance(self.signed_transaction, CustomSignedTransaction),\n \"Invalid type for content 'signed_transaction'. Expected 'SignedTransaction'. Found '{}'.\".format(\n type(self.signed_transaction)\n ),\n )\n elif self.performative == SigningMessage.Performative.SIGNED_MESSAGE:\n expected_nb_of_contents = 1\n enforce(\n isinstance(self.signed_message, CustomSignedMessage),\n \"Invalid type for content 'signed_message'. Expected 'SignedMessage'. Found '{}'.\".format(\n type(self.signed_message)\n ),\n )\n elif self.performative == SigningMessage.Performative.ERROR:\n expected_nb_of_contents = 1\n enforce(\n isinstance(self.error_code, CustomErrorCode),\n \"Invalid type for content 'error_code'. Expected 'ErrorCode'. Found '{}'.\".format(\n type(self.error_code)\n ),\n )\n\n # Check correct content count\n enforce(\n expected_nb_of_contents == actual_nb_of_contents,\n \"Incorrect number of contents. Expected {}. Found {}\".format(\n expected_nb_of_contents, actual_nb_of_contents\n ),\n )\n\n # Light Protocol Rule 3\n if self.message_id == 1:\n enforce(\n self.target == 0,\n \"Invalid 'target'. Expected 0 (because 'message_id' is 1). Found {}.\".format(\n self.target\n ),\n )\n except (AEAEnforceError, ValueError, KeyError) as e:\n _default_logger.error(str(e))\n return False\n\n return True",
"def is_valid_entity(self):\n return is_correct_cve_id(self.cve_id)",
"def verify(self):\n if len(self.headers) not in [1, 5]:\n raise IncorrectNumberOfExtensions(\"header\", \"5\", self)\n if len(self.pixeldata) not in [1, 2, 3]:\n raise IncorrectNumberOfExtensions(\"pixel\", \"1, 2, or 3\", self)\n if len(self.tabledata) not in [0,4]:\n raise IncorrectNumberOfExtensions(\"table\", \"4\", self)",
"def _ValidateFields(self, entity):\n # if field_universe is not defined just return true\n if not self._field_universe:\n return True\n\n valid = True\n for field_tuple in entity.local_field_names.values():\n if not self._ValidateField(field_tuple.field, entity):\n valid = False\n return valid",
"def check(self):\n raise NotImplementedError('Must be implemented by subclass.')",
"def checkDiffTypes(self):\n count = 0\n for t in self.types:\n if t > 0:\n count = count + 1\n return count",
"def _check_consistency(self):\n # check that all required attributes in the schema are contained in the description\n required_attributes = [\n attribute.name\n for attribute in self.data_model.attributes\n if attribute.is_required\n ]\n if not all(\n attribute_name in self.values for attribute_name in required_attributes\n ):\n raise AttributeInconsistencyException(\"Missing required attribute.\")\n\n # check that all values are defined in the data model\n all_attributes = [attribute.name for attribute in self.data_model.attributes]\n if not all(key in all_attributes for key in self.values.keys()):\n raise AttributeInconsistencyException(\n \"Have extra attribute not in data model.\"\n )\n\n # check that each of the provided values are consistent with that specified in the data model\n for key, value in self.values.items():\n attribute = next(\n (\n attribute\n for attribute in self.data_model.attributes\n if attribute.name == key\n ),\n None,\n )\n if not isinstance(value, attribute.type):\n # values does not match type in data model\n raise AttributeInconsistencyException(\n \"Attribute {} has incorrect type: {}\".format(\n attribute.name, attribute.type\n )\n )\n if not type(value) in ALLOWED_ATTRIBUTE_TYPES:\n # value type matches data model, but it is not an allowed type\n raise AttributeInconsistencyException(\n \"Attribute {} has unallowed type: {}. Allowed types: {}\".format(\n attribute.name, type(value), ALLOWED_ATTRIBUTE_TYPES,\n )\n )",
"def assert_goodness(self):\n if self._setted:\n self.assert_stored_iss()\n self.assert_stored_ks()\n ## Check idxs\n self.assert_stored_idxs()\n ## Check sp_relative_pos\n self.assert_stored_sp_rel_pos()",
"def test_entities__Entities__1():\n zope.interface.verify.verifyObject(IEntities, Entities())",
"def __class_validation(cls):\n\n # check if this class is a subClass of Model\n if not issubclass(cls, db.Model):\n raise AttributeError(cls.__name__ + \" is not subclass of \" + db.Model.__name__)",
"def checkConsistency(self):\n return _libsbml.SBMLDocumentPlugin_checkConsistency(self)",
"def ensure_valid_data():\n cursor = connection.cursor()\n cursor.execute(\"SELECT id, name, st_area(geom) FROM firestation_firedepartment where st_area(geom)>6.99\")\n messages = []\n\n for id, name, area in cursor.fetchall():\n messages.append('{0} ({1}) has an area of {2}.'.format(name, id, area))\n\n if messages:\n mail_admins('Invalid Geometries Detected', message='\\n'.join(messages))\n\n cursor.execute(\"SELECT COUNT(*) FROM genericm2m_relatedobject;\")\n generic_count = cursor.fetchone()\n\n if generic_count[0] < 2940:\n generic_count_message = \"Related government units has dropped below 2,940.\"\n mail_admins('Low number of government units alert.', message=generic_count_message)",
"def schema_check(self):\n\n try:\n self.schema.assertValid(self.get_content())\n except lxml.etree.DocumentInvalid:\n logger.error(\"PDU failed schema check\")\n for line in self.pretty_print_content().splitlines():\n logger.warning(line)\n raise",
"def log_check_warnings(self):\n self._log_check_warnings_object(self._info)\n self._log_check_warnings_object(self._tags)\n self._log_check_warnings_object(self._schemes)\n self._log_check_warnings_object(self._paths)\n self._log_check_warnings_object(self._securityDefinitions)\n self._log_check_warnings_object(self._definitions)\n pass",
"async def test_one_low_severity_warning(self):\n response = await self.collect(get_request_json_return_value=self.vulnerabilities_json)\n self.assert_measurement(response, value=\"1\", entities=[self.expected_entity])",
"def entities_check(self, token_sequence, entities):\n entities_count = {k: len(v) for k, v in entities.items()}\n for token in token_sequence:\n for special_token in self.possible_entities:\n if token == special_token:\n if special_token not in entities_count:\n return False\n else:\n entities_count[special_token] -= 1\n\n for _, v in entities_count.items():\n if v < 0:\n return False\n\n return True",
"def sanity_check(self):\n return True",
"def check(self):\n raise NotImplementedError",
"def has_warnings(self) -> bool:",
"def check_errors(self):\n raise NotImplementedError(\"Implement it in a subclass.\")"
] | [
"0.7488003",
"0.59518033",
"0.5554449",
"0.5366329",
"0.5356038",
"0.52137417",
"0.51991415",
"0.5177108",
"0.51752853",
"0.51362544",
"0.5122849",
"0.5115364",
"0.5110129",
"0.5092445",
"0.5081767",
"0.5057919",
"0.50373167",
"0.50053465",
"0.4978771",
"0.49643213",
"0.49566138",
"0.49473724",
"0.49453786",
"0.49441618",
"0.49192446",
"0.4916991",
"0.49077857",
"0.4906207",
"0.4900924",
"0.49001974"
] | 0.7879822 | 0 |
Search for entities defined at the server. Return a dict with type names as keys and EntityInfo objects as values. | def getentities(self):
entities = {}
# The following will create lots of errors in suds.client, one
# for every type that is not an entity. Disable their logger
# temporarily to avoid cluttering the log.
sudslog = logging.getLogger('suds.client')
sudssav = sudslog.disabled
sudslog.disabled = True
for t in self.gettypes():
try:
info = EntityInfo(t, self.client)
except ICATError:
continue
entities[t] = info
sudslog.disabled = sudssav
return entities | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def readEntities(self):\r\n entities = {}\r\n \r\n # Regexes must be greedy to prevent matching outer entity and end_entity strings\r\n # Regexes have re.DOTALL to match newlines\r\n for m in re.finditer(\"ENTITY (.*?)END_ENTITY;\", self.data, re.DOTALL):\r\n entity = {}\r\n raw_entity_str = m.groups()[0]\r\n\r\n entity[\"name\"] = re.search(\"(.*?)[;|\\s]\", raw_entity_str).groups()[0].upper()\r\n\r\n subtypeofmatch = re.search(\".*SUBTYPE OF \\((.*?)\\);\", raw_entity_str)\r\n entity[\"supertype\"] = subtypeofmatch.groups()[0].upper() if subtypeofmatch else None\r\n\r\n # find the shortest string matched from the end of the entity type header to the\r\n # first occurence of a NO_ATTR string (when it occurs on a new line)\r\n inner_str = re.search(\";(.*?)$\", raw_entity_str, re.DOTALL).groups()[0] \r\n\r\n attrs_str = min([inner_str.partition(\"\\r\\n \"+a)[0] for a in self.NO_ATTR])\r\n attrs = []\r\n for am in re.finditer(\"(.*?) : (.*?);\", attrs_str, re.DOTALL):\r\n name, attr_type = [s.replace(\"\\r\\n\\t\",\"\") for s in am.groups()]\r\n attrs.append((name, attr_type))\r\n \r\n entity[\"attributes\"] = attrs\r\n entities[entity[\"name\"]] = entity\r\n \r\n\r\n return entities",
"def _extract_entity_class_registry(self):\n for description in self.sa_query.column_descriptions:\n if \"entity\" in description:\n declarative_meta = description[\"entity\"]\n _class_registry = getattr(\n declarative_meta, \"_decl_class_registry\", None)\n if _class_registry is not None:\n entity_class_registry = {}\n for elmnt in _class_registry.values():\n if type(elmnt) is DeclarativeMeta:\n description = elmnt.__table__.description\n entity_class_registry[description] = elmnt\n return entity_class_registry\n return None",
"def entities(self, params=None, **kwargs):\n entities = entity_map()\n\n # Sort entities into type => <set of aliases>.\n type_to_aliases = {}\n for alias in entities:\n entity = entities[alias]\n\n if isinstance(entity, Facility):\n type_name = 'Facilities'\n elif isinstance(entity, Ship):\n type_name = 'Ships'\n elif isinstance(entity, Defense):\n type_name = 'Defense'\n elif isinstance(entity, Technology):\n type_name = 'Technology'\n\n if type_name not in type_to_aliases:\n type_to_aliases[type_name] = set()\n type_to_aliases[type_name].add(alias)\n\n nick = self.irc.source.split('!')[0]\n self.irc.reply('Sending list of entities to %s.' % nick)\n\n for type_name in type_to_aliases:\n aliases = sorted(list(type_to_aliases[type_name]))\n self.irc.privmsg(nick, '%s: %s' % (type_name, ', '.join(aliases)))",
"def readEntities(self):\n entities = {}\n \n # Regexes must be greedy to prevent matching outer entity and end_entity strings\n # Regexes have re.DOTALL to match newlines\n for m in re.finditer(\"ENTITY (.*?)END_ENTITY;\", self.data, re.DOTALL):\n entity = {}\n raw_entity_str = m.groups()[0]\n\n entity[\"name\"] = re.search(\"(.*?)[;|\\s]\", raw_entity_str).groups()[0].upper()\n \n is_supertype = re.search(\"SUPERTYPE\", raw_entity_str) != None\n if is_supertype:\n supertypeofmatch = re.search(\".*SUPERTYPE OF \\((.*?)\\)\", raw_entity_str)\n is_abstract_supertype = re.search(\"ABSTRACT SUPERTYPE\", raw_entity_str) != None\n \n is_subtype = re.search(\"SUBTYPE\", raw_entity_str) != None\n\n if is_supertype and is_subtype:\n if is_abstract_supertype:\n # abstract supertype of ... subtype of ... ;\n match = re.search(\".*ABSTRACT SUPERTYPE OF\\s+\\((.*?)\\)\\s+SUBTYPE OF\\s+\\((.*?)\\);\", raw_entity_str, re.DOTALL)\n entity[\"isabstract\"] = True\n supertypeof,subtypeof = match.groups()\n entity[\"supertype\"] = subtypeof.upper()\n supertypeof = re.sub('\\s', '', supertypeof)\n supertypeofmatch = re.search(\".*\\((.*?)\\)\", supertypeof, re.DOTALL)\n subtypes = supertypeofmatch.groups()[0].upper().split(',')\n entity[\"subtypes\"] = subtypes\n else:\n # supertype of ... subtype of ... ;\n match = re.search(\".*SUPERTYPE OF\\s+\\((.*?)\\)\\s+SUBTYPE OF\\s+\\((.*?)\\);\", raw_entity_str, re.DOTALL)\n entity[\"isabstract\"] = False\n supertypeof,subtypeof = match.groups()\n entity[\"supertype\"] = subtypeof.upper()\n supertypeof = re.sub('\\s', '', supertypeof)\n supertypeofmatch = re.search(\".*\\((.*?)\\)\", supertypeof, re.DOTALL)\n subtypes = supertypeofmatch.groups()[0].upper().split(',')\n entity[\"subtypes\"] = subtypes\n elif is_subtype:\n # subtype of ... ;\n subtypeofmatch = re.search(\".*SUBTYPE OF \\((.*?)\\);\", raw_entity_str)\n entity[\"supertype\"] = subtypeofmatch.groups()[0].upper() if subtypeofmatch else None\n\n # find the shortest string matched from the end of the entity type header to the\n # first occurence of a NO_ATTR string (when it occurs on a new line)\n inner_str = re.search(\";(.*?)$\", raw_entity_str, re.DOTALL).groups()[0]\n\n attrs_str = min([inner_str.partition(\"\\r\\n \"+a)[0] for a in self.NO_ATTR])\n attrs = []\n for am in re.finditer(\"(\\S*?) : (.*?);\", attrs_str, re.DOTALL):\n name, attr_type = [s.replace(\"\\r\\n\\t\",\"\") for s in am.groups()]\n attrs.append((name, attr_type))\n \n entity[\"attributes\"] = attrs\n entities[entity[\"name\"]] = entity\n \n\n return entities",
"def extract_entities(self) :\n entities = []\n googleEntityList = self.googleLanguageModel.analyze_entities() \n watsonEntityList = self.watsonLanguageModel['entities']\n\n for entity in googleEntityList.entities[:self.entitySizeLimit]:\n if len(entity.metadata) > 0:\n entities.append({ 'name' : entity.name, 'metadata' : entity.metadata})\n \n for entity in watsonEntityList[:self.entitySizeLimit]: \n entities.append({ 'name': entity['text'], 'metadata': entity.get('disambiguation', {})}) \n\n return entities",
"def list_foundation_entity_instances(entity):\n return jsonify([serialise_instance(instance) for instance in STORE.values() if instance.entity == entity])",
"def load_entities():\n # TODO dynamic look into entities folder\n return ['location']",
"def get_entities(self, data):\n\n entities = None\n\n if \"d\" in data:\n logger.debug(f\"'d' found.\")\n if \"results\" in data.get(\"d\"):\n logger.debug(f\"'d.results' found.\")\n entities = data[\"d\"].get(\"results\")\n else:\n entities = data.get(\"d\")\n elif \"value\" in data:\n logger.debug(f\"'value' found.\")\n entities = data.get(\"value\")\n else:\n logger.debug(f\"No entities found.\")\n\n return entities",
"def get_entities_dict(p_str):\n nlp = en_core_web_sm.load()\n doc = nlp(p_str)\n entities = {}\n relevant_keywords = []\n list_of_types = ['NORP', 'ORG', 'GPE', 'LAW', 'LANGUAGE']\n for X in doc.ents:\n if not(X.label_ in entities):\n entities[X.label_] = []\n entities[X.label_].append(X.text)\n if X.label_ in list_of_types:\n relevant_keywords.append(X.text)\n print(entities)\n print(\"HERE\")\n print(relevant_keywords)\n return entities, relevant_keywords",
"def entity_types(self, eid):\n types = self._load_entity_types()\n return types[eid]",
"def _get_entity_mappings(query_list: ProcessedQueryList) -> Dict:\n entity_labels = set()\n logger.info(\"Generating Entity Labels...\")\n for d, i, entities in zip(\n query_list.domains(), query_list.intents(), query_list.entities()\n ):\n if len(entities):\n for entity in entities:\n e = str(entity.entity.type)\n entity_labels.add(f\"{d}.{i}.B|{e}\")\n entity_labels.add(f\"{d}.{i}.I|{e}\")\n entity_labels.add(f\"{d}.{i}.S|{e}\")\n entity_labels.add(f\"{d}.{i}.E|{e}\")\n\n e = \"O|\"\n entity_labels.add(f\"{d}.{i}.{e}\")\n\n entity_labels = sorted(list(entity_labels))\n return dict(zip(entity_labels, range(len(entity_labels))))",
"def entity_sources(self, code: str) -> Dict[str, Tuple[str, str]]:\n return {}",
"def get_named_entities(\n self,\n identity: Optional[str] = None,\n type: Optional[str] = None,\n subtype: Optional[str] = None,\n ) -> List[NamedEntity]:\n found: List[NamedEntity] = []\n for named_entity in [\n e for h in self.headlines for s in h.sentences for e in s.named_entities\n ]:\n if identity and (identity != named_entity.identity):\n continue\n if type and (type != named_entity.type):\n continue\n if subtype and (subtype != named_entity.subtype):\n continue\n found.append(named_entity)\n return found",
"def find_objects_by_type():\n try:\n keyword = request.form[\"keyword\"]\n object_type = request.form[\"object_type\"]\n\n # Get entities based on the selection\n entities = g.user.get_api().get_by_object_types(keyword, object_type)\n\n # Parse response object into table data\n data = raw_entities_to_table_data(entities)\n\n # If no entities were found reutrn with failure state and message\n result = get_result_template()\n if len(data[\"data\"]) == 0:\n result[\"status\"] = \"FAIL\"\n result[\"message\"] = 'No entities of type \"{TYPE}\" were found.'.format(\n TYPE=object_type\n )\n else:\n result[\"status\"] = \"SUCCESS\"\n result[\"data\"] = {\"table_field\": data}\n return jsonify(result_decorator(result))\n\n except Exception as e:\n result = get_result_template()\n result[\"status\"] = \"FAIL\"\n result[\"message\"] = str(e)\n return jsonify(result_decorator(result))",
"def entities(self):\n return self._entities",
"def getRunEntities(self) -> dict:\n return self._entities",
"def getEntities(self, request_data: dict = None) -> dict:\n if self.loggingEnabled:\n self.logger.debug(f\"Starting getEntities\")\n path = \"/access/entities\"\n if request_data is None or type(request_data) != dict:\n raise Exception(\"Expected a dictionary to fetch entities\")\n res = self.connector.postData(\n self.endpoint + path, data=request_data, headers=self.header\n )\n return res",
"def entities_data(self):\n entities_item = self.data['entities']\n my_entities_dict = {\"hashtags\": \"\"}\n for tag in entities_item['hashtags']:\n # Delimits hashtags with ` this is temporary, eventually there will be foreign keys linkng these values\n my_entities_dict['hashtags'] += tag['text'] + '`'\n my_entities_dict['tweet_mentions'] = \"\"\n my_entities_dict['links_mention'] = ''\n for person in entities_item['user_mentions']:\n # This is similar to the above\n my_entities_dict['tweet_mentions'] += person['id_str'] + '`'\n for links in entities_item['urls']:\n # Similar to the above\n my_entities_dict['links_mention'] += links['url'] + '`'\n return my_entities_dict",
"def get_entities(self, type, offset=0, limit=20):\n # url = '{}/ngsi-ld/v1/entities?type={}&offset={}&limit={}'.format(self.url, type, offset, limit)\n url = '{}/ngsi-ld/v1/entities?type={}'.format(self.url, type, offset, limit)\n r = requests.get(url, headers=self.headers_with_link)\n return r.json()",
"def getEntities(dbn='core', env=None):\n global gDbEnv\n\n if env is None:\n env = gDbEnv\n\n if env is None:\n raise DatabaseError(\"Database environment not set up\")\n\n entries = []\n subDb = gDbEnv.open_db(dbn.encode(\"utf-8\"), dupsort=True) # open named sub db named dbn within env\n with gDbEnv.begin(db=subDb) as txn: # txn is a Transaction object\n with txn.cursor() as cursor:\n if cursor.first(): # first key in database\n while True:\n key = cursor.key().decode()\n if len(key) == DID_LENGTH and \"/\" not in key:\n value = cursor.value().decode()\n ser, sep, sig = value.partition(SEPARATOR)\n try:\n dat = json.loads(ser, object_pairs_hook=ODict)\n except ValueError as ex:\n if cursor.next():\n continue\n else:\n break\n\n try:\n did, index = dat[\"signer\"].rsplit(\"#\", maxsplit=1)\n except (AttributeError, ValueError) as ex:\n if cursor.next():\n continue\n else:\n break\n\n entry = ODict(did=key)\n if did == key: # self signed so agent\n entry[\"kind\"] = \"agent\"\n else: # not self signed so thing\n entry[\"kind\"] = \"thing\"\n entries.append(entry)\n\n if not cursor.next(): # next key in database if any\n break\n return entries",
"def generate_entities(self, data):\r\n\t\t# create an empty dictionary to hold entities\r\n\t\tent_dic = {}\r\n\r\n\t\tfor row in data.itertuples():\r\n\t\t\t# feed nlp the first line's set of keywords\r\n\t\t\tdoc = self.nlp(row.keywords)\t\r\n\t\t\t# begin iterating through the nlp's entities\r\n\t\t\tfor ent in doc.ents:\r\n\r\n\t\t\t\t# For each entity, check if the label exists in 'ent_dic'.\r\n\t\t\t\t# If it does, append the entity into the key, value pair.\r\n\t\t\t\t# If it doesn't, create a new key, value pair\r\n\t\t\t\tkey = str(ent.label_) + ''\r\n\t\t\t\tif ent.label_ in ent_dic:\r\n\t\t\t\t\tent_dic[key].append(str(ent)) if not str(ent) in ent_dic[key] else print(f'The entity: {ent} is already in the array')\r\n\t\t\t\telse: \r\n\t\t\t\t\tent_dic[key] = [str(ent)]\r\n\r\n\t\t# return the dictionary of entities\r\n\t\treturn ent_dic",
"def entity_search():\n data = {'EntityType': entity_type}\n parameters = data_to_json(data)\n url = base_url + 'ams/entity/search'\n response = make_request(url, parameters)\n r_value = ''\n if response['Status'] == 0:\n r_value = response['Value']['Records']\n return r_value",
"async def entities_controller(self, request):\n result = {\n \"transports\": [transport.to_json() for transport in self.transport_agents.values()],\n \"customers\": [customer.to_json() for customer in self.customer_agents.values()],\n \"tree\": self.generate_tree(),\n \"stats\": self.get_stats(),\n \"stations\": [station.to_json() for station in self.station_agents.values()]\n }\n return result",
"def _get_global_entities(nest):\n entities = nest.get('global_entities', None)\n if not entities:\n return []\n return list(entities.keys())",
"def get_entities(self):\n return list(self._entities.values())",
"def find_entities(self, area=EVERYWHERE, tags=frozenset()):\n\t\t#entities are added into binary box tree by their binary box cover[s] for the corresponding tags\n\t\t#when searching build a set of all possibly affected entities\n\t\t#then do a real test on the hitboxes\n\t\tif isinstance(tags, str):\n\t\t\ttags = {tags} #M# could use frozenset instead\n\t\tfor entity in self.entities:\n\t\t\tif tags.issubset(entity[\"tags\"]):\n\t\t\t\tif area.collides_with(entity.HITBOX + entity[\"position\"]):\n\t\t\t\t\tyield entity",
"def get_entities_handler(response):\n\n if response.status_code != HTTP_CODE_OK:\n raise HttpError('HTTP GET for Entity Set {0} failed with status code {1}'\n .format(self._name, response.status_code), response)\n\n content = response.json()\n\n return content",
"def fetch_entity_information(\n values: Tuple[str, ...],\n language: str,\n batch_size: int = 5000,\n) -> Tuple[Dict[str, schemas.Entity], Set[str], Dict[str, str]]:\n\n assert 1 <= batch_size, f\"Batch size has to be at least 1.\"\n\n pbar = tqdm.tqdm(total=len(values))\n failed_lookups: Set[str] = set()\n name_qid_map: Dict[str, str] = {}\n entities: Dict[str, schemas.Entity] = {}\n\n for i in range(0, len(values), batch_size):\n chunk = tuple([v.replace(\"_\", \" \") for v in values[i : i + batch_size]])\n entities_chunk = load_entities(language, chunk)\n _failed_lookups = set(chunk)\n\n # Replace entity titles keys in dict with Wikidata QIDs. Add entity description.\n for entity in entities_chunk.values():\n entities[entity.qid] = entity\n name_qid_map[entity.name] = entity.qid\n _failed_lookups.remove(entity.qid)\n\n failed_lookups |= _failed_lookups\n pbar.update(len(chunk))\n\n pbar.close()\n\n return entities, failed_lookups, name_qid_map",
"def get_entity_type_subclass_map(entity_types):\n _logger.info(f'Requesting entity type subclass map from Wikidata ({len(entity_types)} types)...')\n\n subclass_map = {}\n for entity_type in entity_types:\n subclasses = _wikidata.get_type_subclasses(entity_type)\n subclass_map[entity_type] = subclasses\n\n _logger.info(f'Requested entity type subclass map from Wikidata')\n\n return subclass_map",
"def resolve_entities(root, info, ids: list[int], **kwargs):\n return Entity.objects.filter(id__in=ids)"
] | [
"0.654492",
"0.63510257",
"0.6295548",
"0.61492395",
"0.5928663",
"0.58731294",
"0.581502",
"0.580751",
"0.579876",
"0.5771325",
"0.57671726",
"0.5759269",
"0.57302684",
"0.5673989",
"0.5636686",
"0.55597204",
"0.55591005",
"0.5551724",
"0.55411315",
"0.55391514",
"0.55371106",
"0.5534439",
"0.55160815",
"0.54988354",
"0.54765004",
"0.5438689",
"0.54070693",
"0.53790414",
"0.53787494",
"0.532779"
] | 0.7634091 | 0 |
Check consistency of the ICAT client with the server schema. Report any abnormalities as warnings to the logger. Returns the number of warnings emitted. | def check(self):
nwarn = 0
# Check that the set of entity types is the same as in the
# schema.
schemanames = set(self.schema.keys())
clientnames = set(self.client.typemap.keys())
missing = schemanames - clientnames
if missing:
log.warning("missing entities: %s", list(missing))
nwarn += 1
spurious = clientnames - schemanames
if spurious:
log.warning("spurious entities: %s", list(spurious))
nwarn += 1
# For each entity type, check that its definition is
# consistent with the schema.
for n in schemanames & clientnames:
log.debug("checking entity type %s ...", n)
nwarn += self.schema[n].check(self.client.typemap[n])
return nwarn | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def checkExceptions(self):\n\n nwarn = 0\n\n icatExceptionType = self.client.factory.create('icatExceptionType')\n schemaexceptions = set(icatExceptionType.__keylist__)\n clientexceptions = set(icat.exception.IcatExceptionTypeMap.keys())\n missing = schemaexceptions - clientexceptions\n if missing:\n log.warning(\"missing exception types: %s\", list(missing))\n nwarn += 1\n\n return nwarn",
"def test_check_cds_21(self):\n import_genome.check_cds(self.cds1, self.eval_flags,\n description_field = \"function\")\n count = count_status(self.cds1, \"error\", \"warning\")\n self.assertEqual(count, 1)",
"def test_conformance_server(self):\n style = pycodestyle.StyleGuide(quiet=True)\n result = style.check_files(['server'])\n self.assertEqual(result.total_errors, 0,\n \"Found code style errors (and warnings).\")",
"def test_check_cds_18(self):\n self.cds1.gene = \"A\"\n import_genome.check_cds(self.cds1, self.eval_flags)\n count = count_status(self.cds1, \"error\", \"warning\")\n self.assertEqual(count, 2)",
"def schema_check(self):\n\n try:\n self.schema.assertValid(self.get_content())\n except lxml.etree.DocumentInvalid:\n logger.error(\"PDU failed schema check\")\n for line in self.pretty_print_content().splitlines():\n logger.warning(line)\n raise",
"def test_check_cds_15(self):\n self.cds1.locus_tag = \"\"\n import_genome.check_cds(self.cds1, self.eval_flags)\n count = count_status(self.cds1, \"error\", \"warning\")\n self.assertEqual(count, 2)",
"def _check_write_consistency(self):\n self.logger.warning('Not checking write consistency')",
"def test_check_cds_20(self):\n self.cds1.gene = \"11\"\n import_genome.check_cds(self.cds1, self.eval_flags)\n count = count_status(self.cds1, \"error\", \"warning\")\n self.assertEqual(count, 1)",
"def test_check_cds_17(self):\n self.cds1.gene = \"\"\n import_genome.check_cds(self.cds1, self.eval_flags)\n count = count_status(self.cds1, \"error\", \"warning\")\n self.assertEqual(count, 3)",
"def check(self):\n\n constrains = pm.ls(type='constraint')\n uselessConstrains = []\n\n for const in constrains:\n connections = const.listConnections(scn=True, s=False, d=True)\n if const in connections:\n connections.remove(const)\n\n if len(connections) == 0:\n uselessConstrains.append(const)\n\n if not uselessConstrains:\n self.status = \"OK\"\n else:\n self.status = self.errorMode\n self.errorNodes = uselessConstrains\n for obj in uselessConstrains:\n self.addError(\"%s doesn't have outgoing connections.\" % obj)\n self.errorMessage = \"%s useless constrains\" % (\n len(uselessConstrains))",
"def log_check_warnings(self):\n self._log_check_warnings_object(self._info)\n self._log_check_warnings_object(self._tags)\n self._log_check_warnings_object(self._schemes)\n self._log_check_warnings_object(self._paths)\n self._log_check_warnings_object(self._securityDefinitions)\n self._log_check_warnings_object(self._definitions)\n pass",
"def test_check_cds_19(self):\n self.cds1.gene = \"11\"\n import_genome.check_cds(self.cds1, self.eval_flags)\n count = count_status(self.cds1, \"error\", \"warning\")\n self.assertEqual(count, 1)",
"def log_check_warnings(self):\n pass",
"def log_check_warnings(self):\n pass",
"def log_check_warnings(self):\n pass",
"def log_check_warnings(self):\n pass",
"def log_check_warnings(self):\n pass",
"def log_check_warnings(self):\n pass",
"def log_check_warnings(self):\n pass",
"def log_check_warnings(self):\n pass",
"def test_check_cds_16(self):\n self.cds1.locus_tag = \"ABCXYZ\"\n import_genome.check_cds(self.cds1, self.eval_flags)\n count = count_status(self.cds1, \"error\", \"warning\")\n self.assertEqual(count, 1)",
"def checkConsistency(self):\n return _libsbml.CompSBMLDocumentPlugin_checkConsistency(self)",
"def test_validate_and_logger(self):\n data = V20CredProblemReport(\n description={\n \"en\": \"Insufficient credit\",\n \"code\": \"invalid_code\",\n },\n ).serialize()\n self._caplog.set_level(logging.WARNING)\n V20CredProblemReportSchema().validate_fields(data)\n assert \"Unexpected error code received\" in self._caplog.text",
"def checkConsistency(self):\n return _libsbml.SBMLDocumentPlugin_checkConsistency(self)",
"def checkConsistency(self):\n return _libsbml.MultiSBMLDocumentPlugin_checkConsistency(self)",
"def test_check_cds_10(self):\n self.cds1.translation_table = 1\n import_genome.check_cds(self.cds1, self.eval_flags)\n count = count_status(self.cds1, \"error\", \"warning\")\n self.assertEqual(count, 1)",
"def check_consistency(self):\n raise NotImplementedError()",
"def validate_schema_consistent(self, node):\n debug(\"validate_schema_consistent() \" + node.name)\n\n response = node.nodetool('describecluster', True)[0]\n schemas = response.split('Schema versions:')[1].strip()\n num_schemas = len(re.findall('\\[.*?\\]', schemas))\n assert num_schemas == 1, \"There were multiple schema versions: \" + pprint.pformat(schemas)",
"def has_warnings(self) -> bool:",
"def has_warnings_active(self) -> bool:"
] | [
"0.62300956",
"0.5898869",
"0.5719125",
"0.5677171",
"0.56404877",
"0.5635841",
"0.56192327",
"0.55733836",
"0.5571429",
"0.551054",
"0.54961216",
"0.54919946",
"0.54901636",
"0.54901636",
"0.54901636",
"0.54901636",
"0.54901636",
"0.54901636",
"0.54901636",
"0.54901636",
"0.5455475",
"0.5416501",
"0.53812456",
"0.53642064",
"0.53529096",
"0.5268285",
"0.5262363",
"0.5223018",
"0.5213465",
"0.52008325"
] | 0.6281774 | 0 |
Check consistency of exceptions. Check that all icatExceptionTypes defined in the WSDL have a corresponding exception class defined in icat.exception. Report missing exceptions as a warning to the logger. Return the number of warnings emitted. | def checkExceptions(self):
nwarn = 0
icatExceptionType = self.client.factory.create('icatExceptionType')
schemaexceptions = set(icatExceptionType.__keylist__)
clientexceptions = set(icat.exception.IcatExceptionTypeMap.keys())
missing = schemaexceptions - clientexceptions
if missing:
log.warning("missing exception types: %s", list(missing))
nwarn += 1
return nwarn | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def count_error_types(graph: BELGraph) -> typing.Counter[str]:\n return Counter(exc.__class__.__name__ for _, exc, _ in graph.warnings)",
"def check(self):\n\n nwarn = 0\n\n # Check that the set of entity types is the same as in the\n # schema.\n schemanames = set(self.schema.keys())\n clientnames = set(self.client.typemap.keys())\n missing = schemanames - clientnames\n if missing:\n log.warning(\"missing entities: %s\", list(missing))\n nwarn += 1\n spurious = clientnames - schemanames\n if spurious:\n log.warning(\"spurious entities: %s\", list(spurious))\n nwarn += 1\n\n # For each entity type, check that its definition is\n # consistent with the schema.\n for n in schemanames & clientnames:\n log.debug(\"checking entity type %s ...\", n)\n nwarn += self.schema[n].check(self.client.typemap[n])\n\n return nwarn",
"def test_conformance_server_api_exceptions(self):\n style = pycodestyle.StyleGuide(quiet=True)\n result = style.check_files(['server/api_exceptions.py'])\n self.assertEqual(result.total_errors, 0,\n \"Found code style errors (and warnings).\")",
"def warnings(self) -> List[Error]:",
"def has_warnings(self) -> bool:",
"def warnings_active(self) -> List[Error]:",
"def log_check_warnings(self):\n self._log_check_warnings_object(self._info)\n self._log_check_warnings_object(self._tags)\n self._log_check_warnings_object(self._schemes)\n self._log_check_warnings_object(self._paths)\n self._log_check_warnings_object(self._securityDefinitions)\n self._log_check_warnings_object(self._definitions)\n pass",
"def warnings(self) -> List[Error]:\n return self._get_warnings()",
"def FilterExceptions(image_name, errors):\n exceptions = _EXCEPTIONS.get(image_name, [])\n\n def _HasNoException(error):\n # Iterate over all the exceptions.\n for (severity, layer, stopcode, regexp) in exceptions:\n # And see if they match, first by type.\n if (error.severity == severity and\n error.layer == layer and\n error.stopcode == stopcode):\n # And then by regexpr match to the trace symbols.\n for trace in error.trace:\n if trace.symbol and re.match(regexp, trace.symbol):\n return False\n\n return True\n\n filtered_errors = filter(_HasNoException, errors)\n error_count = len(filtered_errors)\n filtered_count = len(errors) - error_count\n\n if error_count:\n suffix = '' if error_count == 1 else 's'\n filtered_errors.append(\n 'Error: Encountered %d AppVerifier exception%s for %s.' %\n (error_count, suffix, image_name))\n\n if filtered_count:\n suffix1 = '' if filtered_count == 1 else 's'\n suffix2 = '' if len(exceptions) == 1 else 's'\n filtered_errors.append(\n 'Warning: Filtered %d AppVerifier exception%s for %s using %d rule%s.' %\n (filtered_count, suffix1, image_name, len(exceptions), suffix2))\n\n return (error_count, filtered_errors)",
"def validate_types(self):\n for req in self.requests:\n required_types = req.get_required_types()\n available_types = self.substrate.get_types()\n if not (required_types <= available_types):\n print required_types - available_types, ' missing'\n return False\n return True",
"def _raise_warnings(image_properties): # pragma: no cover\n ip = image_properties\n if ip.unsupported_dtype:\n warn(\"Non-standard image type; displaying image with \"\n \"stretched contrast.\")\n if ip.out_of_range_float:\n warn(\"Float image out of standard range; displaying \"\n \"image with stretched contrast.\")",
"def checkDiffTypes(self):\n count = 0\n for t in self.types:\n if t > 0:\n count = count + 1\n return count",
"def threat_exceptions(self) -> pulumi.Output[Sequence[str]]:\n return pulumi.get(self, \"threat_exceptions\")",
"def has_warnings_active(self) -> bool:",
"def verify(self):\n if len(self.headers) not in [1, 5]:\n raise IncorrectNumberOfExtensions(\"header\", \"5\", self)\n if len(self.pixeldata) not in [1, 2, 3]:\n raise IncorrectNumberOfExtensions(\"pixel\", \"1, 2, or 3\", self)\n if len(self.tabledata) not in [0,4]:\n raise IncorrectNumberOfExtensions(\"table\", \"4\", self)",
"def test_non_reportable_error(self):\n self._check_initialized()\n configuration = self._style_checker_configuration()\n\n confidence = 1\n # Confirm the error is not reportable.\n self.assertFalse(configuration.is_reportable(self._category,\n confidence,\n self._file_path))\n error_handler = self._error_handler(configuration)\n self._call_error_handler(error_handler, confidence)\n\n self.assertEquals(0, self._error_count)\n self.assertEquals([], self._error_messages)",
"def checkIssnErrorCounts(pubmedMeta, ignoreIssns, outDir):\n global issnYearErrorCounts\n issnYear = getIssnYear(pubmedMeta)\n if issnYearErrorCounts[issnYear] > MAXISSNERRORCOUNT:\n blacklistIssnYear(outDir, issnYear, pubmedMeta['journal'])\n raise pubGetError('during this run, too many errors for ISSN %s and year %s' % issnYear, 'issnYearErrorExceed-new', str(issnYear))\n if issnYear in ignoreIssns:\n raise pubGetError('a previous run disabled this issn+year', 'issnYearErrorExceed-old', '%s %s' % issnYear)",
"def test_check_cds_21(self):\n import_genome.check_cds(self.cds1, self.eval_flags,\n description_field = \"function\")\n count = count_status(self.cds1, \"error\", \"warning\")\n self.assertEqual(count, 1)",
"def log_check_warnings(self):\n pass",
"def log_check_warnings(self):\n pass",
"def log_check_warnings(self):\n pass",
"def log_check_warnings(self):\n pass",
"def log_check_warnings(self):\n pass",
"def log_check_warnings(self):\n pass",
"def log_check_warnings(self):\n pass",
"def log_check_warnings(self):\n pass",
"def _Check(self, solution_type):\n zero_range = check_range.Singleton(0)\n for error_name, error_value in NOVATEL_SOLUTION_STATUS_HELPER:\n # Skip 'SolComputed' because that is what we want the status to be.\n if error_value == 0:\n continue\n # Raise a warning if the status is equal to the error_value.\n self._CheckForFailure(self._source + ' ' + error_name,\n numpy.array([int(s == error_value) for s in\n solution_type]),\n zero_range, False)",
"def test_raises_exceptions(recwarn, code):\n res = DummyResource()\n res.set_exception_type(code)\n try:\n res.raise_exception()\n except exceptions.WandException as e:\n assert not e.__class__.__name__.endswith('Warning')\n assert str(e) == 'Dummy exception'\n else:\n w = recwarn.pop()\n assert w.category.__name__.endswith('Warning')\n assert \"Dummy exception\" in str(w.message)\n assert recwarn.list == []",
"def check_errors(self):\n\n errors = []\n while True:\n err = self.values(\"SYST:ERR?\")\n if int(err[0]) != 0:\n errmsg = \"Agilent 5313xA: {0}: {1}\".format(err[0], err[1])\n log.error(errmsg + '\\n')\n errors.append(errmsg)\n else:\n break\n\n return errors",
"def eval_dep_warnings(warns, check_msgs):\n\n # Initialize the output\n found_msgs = [False for msg in check_msgs]\n\n # Test the warning messages, ensuring each attribute is present\n for iwar in warns:\n if iwar.category == DeprecationWarning:\n for i, msg in enumerate(check_msgs):\n if str(iwar.message).find(msg) >= 0:\n found_msgs[i] = True\n\n return found_msgs"
] | [
"0.6489571",
"0.5932624",
"0.57542944",
"0.5629398",
"0.54877156",
"0.53069246",
"0.52667695",
"0.52545625",
"0.51867276",
"0.5141248",
"0.5109995",
"0.5093013",
"0.50528795",
"0.5051258",
"0.50483483",
"0.5048305",
"0.50454915",
"0.5043633",
"0.50357765",
"0.50357765",
"0.50357765",
"0.50357765",
"0.50357765",
"0.50357765",
"0.50357765",
"0.50357765",
"0.50292325",
"0.50207555",
"0.49990594",
"0.4971445"
] | 0.8771143 | 0 |
Generate Python source code matching the ICAT schema. Generate source code for a set of classes that match the entity info found at the server. The source code is returned as a string. The Python classes are created as a hierarchy. It is assumed that there is one abstract base type which is the root of the genealogy tree. In the case of the ICAT 4.2. schema, this assumptions holds, the base is | def pythonsrc(self, genealogyrules=None, baseclassname='Entity'):
if genealogyrules is None:
genealogyrules = [(r'','entityBaseBean')]
tree = self._genealogy(genealogyrules)
base = [t for t in tree if tree[t]['base'] is None][0]
self.schema[base].classname = baseclassname
# Abstract entity classes are marked by setting BeanName to
# None.
for t in tree:
if tree[t]['level'] > 0:
self.schema[t].beanname = None
types = tree.keys()
types.sort(key=lambda t: (-tree[t]['level'], t))
src = ""
for t in types:
try:
b = self.schema[tree[t]['base']]
except KeyError:
b = None
src += self.schema[t].pythonsrc(b)
src += "\n"
return src | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def pythonsrc(self, baseclass=None):\n\n classname = self.classname\n baseclassname = 'object'\n classcomment = getattr(self.info, 'classComment', None)\n beanname = self.beanname\n addbeanname = True\n constraint = self.getconstraint()\n attrs = self.getattrs()\n rels = self.getrelations()\n mrels = self.getmanyrelations()\n\n if baseclass is not None and baseclass is not self:\n baseclassname = baseclass.classname\n if beanname == baseclass.beanname:\n addbeanname = False\n if constraint == baseclass.getconstraint():\n constraint = None\n if attrs == baseclass.getattrs():\n attrs = None\n if rels == baseclass.getrelations():\n rels = None\n if mrels == baseclass.getmanyrelations():\n mrels = None\n\n src = \"class %s(%s):\\n\" % (classname, baseclassname)\n if classcomment:\n src += \" \\\"\\\"\\\"%s\\\"\\\"\\\"\\n\" % (classcomment)\n if addbeanname:\n src += \" BeanName = %s\\n\" % (repr(beanname))\n if constraint is not None:\n src += \" Constraint = %s\\n\" % (repr(constraint))\n if attrs is not None:\n src += \" InstAttr = %s\\n\" % (attrs)\n if rels is not None:\n src += \" InstRel = %s\\n\" % (rels)\n if mrels is not None:\n src += \" InstMRel = %s\\n\" % (mrels)\n src += \"\\n\"\n\n return src",
"def build_mapping() -> str:\n templates = make_module_docstring(\"Template classes for GBD entities\", __file__)\n templates += make_import(\"typing\", [\"Union\", \"Tuple\"])\n templates += (\n make_import(\n \".id\",\n [\n \"c_id\",\n \"s_id\",\n \"hs_id\",\n \"me_id\",\n \"cov_id\",\n \"rei_id\",\n \"scalar\",\n ],\n )\n + SPACING\n )\n templates += make_gbd_record()\n\n for entity, info in get_base_types().items():\n templates += SPACING\n templates += make_record(entity, **info)\n\n return templates",
"def genCode(self, fileName, allowedTypes, genGraph = 1, isRootNode = 0, \r\n metaModelName = None, export = 0, newTypes = None, \r\n nodesToGenList = [], openModelStringList=[], attrGenFix=False):\r\n file = open(fileName, \"w+t\" )\r\n\r\n dir, fil = os.path.split(fileName)\r\n funcName = string.split (fil, \".\")\t\t\t\t\t# compose class name\r\n\r\n if export == 0:\r\n file.write('\"\"\"\\n')\r\n file.write(\"__\"+ fil +\"_____________________________________________________\\n\")\r\n file.write(\"\\n\") \r\n file.write(\"Automatically generated AToM3 Model File (Do not modify directly)\\n\")\r\n file.write(\"Author: \"+USER_NAME+\"\\n\")\r\n file.write(\"Modified: \"+time.asctime()+\"\\n\") \r\n file.write(\"__\"+ len(fil)*\"_\" +\"_____________________________________________________\\n\")\r\n file.write('\"\"\"\\n')\r\n #file.write('from graph_ASG_ERmetaMetaModel import *\\n')\t\t# just for the case!\r\n file.write('from stickylink import *\\n')\t\t\t\t# necessary if we describe some graphLinks...\r\n file.write('from widthXfillXdecoration import *\\n')\t\t\t# necessary if we describe some graphLinks...\r\n\r\n # import the subclass ...\r\n if( self.getClass() not in self.nodeTypes ):\r\n file.write('from '+self.getClass()+' import *\\n')\r\n \r\n # import all the node types...\r\n for nodetype in self.nodeTypes:\r\n if( self.listNodes[nodetype] != [] ): \r\n file.write('from '+nodetype+' import *\\n') \r\n \r\n # Import all the graphical appearences of the node types... that\r\n # are actually used! \r\n # Added by Denis Dube, last modified on Sept. 9, 2004\r\n if( genGraph ): \r\n # STEP 1: Find all graphObjects used in the model\r\n graph_objectDict = dict()\r\n for nodetype in self.listNodes.keys():\r\n for node in self.listNodes[nodetype]:\r\n if( node.graphClass_ ):\r\n graph_objectDict[ node.graphObject_.getGraphClassName() ]=1\r\n # STEP 2: Create the import statements for each graphObject\r\n for graphObject in graph_objectDict.keys():\r\n file.write('from '+graphObject+' import *\\n')\r\n # NOTE: I think the next two statements are caution overkill...\r\n #file.write('try: from '+graphObject+' import *\\n')\r\n #file.write('except: print \"WARNING: unable to load the graphical appearence file: '+graphObject+'.py\" \\n')\r\n \r\n # import the basic types...\r\n for typ in allowedTypes.keys():\r\n typeInstance, params = allowedTypes[typ]\r\n typeName = typeInstance.__name__\r\n file.write('from '+typeName+' import *\\n')\r\n \r\n # Generate the ASG constructor\r\n if( attrGenFix ):\r\n self.__genASGconstructor( file, funcName ) \r\n else:\r\n # Old way\r\n file.write('\\ndef '+funcName[0]+'(self, rootNode):\\n')\r\n \r\n # Generate code for the ASGroot attributes\r\n if( isRootNode ): \r\n # Should attrGenFix be always true? More testing required\r\n #todo: attrGenFix == True always?\r\n if( attrGenFix ): self.__genAttributesROOT( file )\r\n else: self.genAttributesCode(file, genGraph, \"rootNode\")\r\n\r\n self.writeGraph2File(file, genGraph, isRootNode, None, \" \", 1, funcName[0], nodesToGenList=nodesToGenList)\r\n\r\n # generate code for the sub-models\r\n counter = 0\r\n if( not nodesToGenList ):\r\n for nodetype in self.nodeTypes:\r\n for node in self.listNodes[nodetype]: \r\n newFile = funcName[0]+str(counter)\r\n res = node.genCode(os.path.join(dir, newFile+'.py'), allowedTypes, genGraph, 0)\r\n counter = counter + 1\r\n else: \r\n for node in nodesToGenList:\r\n newFile = funcName[0]+str(counter)\r\n res = node.genCode(os.path.join(dir, newFile+'.py'), allowedTypes, genGraph, 0)\r\n counter = counter + 1\r\n \r\n\r\n if isRootNode:\r\n hierarchical = self.isHierarchical()\r\n if export == 0:\r\n if hierarchical:\r\n file.write('def main'+funcName[0]+'(self, ASGroot):\\n')\r\n # file.write(' self.ASGroot = '+self.getClass()+'(self)\\n')\r\n file.write(' self.'+funcName[0]+'(self, ASGroot)\\n\\n')\r\n file.write(' self.'+funcName[0]+'_connections(self, ASGroot)\\n\\n')\r\n file.write('newfunction = main'+funcName[0]+'\\n\\n')\r\n else:\r\n file.write('newfunction = '+funcName[0]+'\\n\\n')\r\n if newTypes and len(newTypes)>0: # generate a list of newly added types\r\n file.write('loadedTypes = [')\r\n counter = 0\r\n for nt in newTypes:\r\n if counter > 0: file.write(',')\r\n file.write(str(nt))\r\n counter = counter + 1\r\n file.write(']\\n')\r\n \r\n self.genLoadedMMName( file )\r\n if( attrGenFix ): file.write( '\\natom3version = \\'0.3\\'\\n' )\r\n file.close()\r\n return funcName[0] \t\t\t\t# this indicates that we've done something\r",
"def GenerateCode(self):\n print \"Generating code...\"\n for type in self.getObjectTypes():\n generator = __import__(\"codegen.Cpp\" + type, globals(), locals(), [''])\n print \"Generating code for objects of type: %s\" % type\n generator.GenerateCode(self)",
"def generate_glue_code(self, node, data, symbol_table):\n def gen(node): return self.generate_glue_code(node, data, symbol_table)\n\n def generate_ext_stub(cls):\n \"\"\"\n shared code for class/interface\n \"\"\"\n # Qualified name (C Version)\n qname = '_'.join(symbol_table.prefix+[cls.name])\n self.exts.append(qname)\n\n if self.config.verbose:\n import sys\n mod_name = '.'.join(symbol_table.prefix[1:]+[cls.name])\n sys.stdout.write('\\r'+' '*80)\n sys.stdout.write('\\rgenerating glue code for %s'%mod_name)\n sys.stdout.flush()\n\n # Consolidate all methods, defined and inherited\n cls.scan_methods()\n \n # chpl_defs = ChapelScope(chpl_stub)\n ci = self.ClassInfo(cls)\n\n # if self.server:\n # ci.impl = self.pkg_impl\n\n ci.stub.new_def(babel.externals(cls.get_scoped_id()))\n ci.stub.new_def(babel.builtin_stub_functions(cls.get_scoped_id()))\n \n has_contracts = ior_template.generateContractChecks(cls)\n self.gen_default_methods(cls, has_contracts, ci)\n\n #print qname, map(lambda x: x[2][1]+x[2][2], cls.all_methods)\n for method in cls.all_methods:\n (Method, Type, Name, Attrs, Args, \n Except, From, Requires, Ensures, DocComment) = method\n ci.epv.add_method((method, Type, Name, Attrs, \n babel.drop_rarray_ext_args(Args),\n Except, From, Requires, Ensures, DocComment))\n\n # all the methods for which we would generate a server impl\n impl_methods = babel.builtins+cls.get_methods()\n impl_methods_names = [sidlir.method_method_name(m) for m in impl_methods]\n\n # client\n for method in cls.all_methods:\n has_impl = sidlir.method_method_name(method) in impl_methods_names\n self.generate_client_method(symbol_table, method, ci, has_impl)\n\n if self.server:\n class_methods = filter(sidlir.is_not_static, impl_methods)\n static_methods = filter(sidlir.is_static, impl_methods)\n\n # # Class\n # ci.impl.new_def(gen_doc_comment(cls.doc_comment, chpl_stub)+\n # 'class %s_Impl {'%qname)\n # splicer = '.'.join(cls.qualified_name+['Impl'])\n # ci.impl.new_def('// DO-NOT-DELETE splicer.begin(%s)'%splicer)\n # ci.impl.new_def('// DO-NOT-DELETE splicer.end(%s)'%splicer)\n # for method in class_methods: \n # self.generate_server_method(symbol_table, method, ci)\n\n # ci.impl.new_def('} // class %s_Impl'%qname)\n # ci.impl.new_def('')\n # ci.impl.new_def('')\n\n # # Static\n # if static_methods:\n # ci.impl.new_def('// all static member functions of '+qname)\n # ci.impl.new_def(gen_doc_comment(cls.doc_comment, chpl_stub)+\n # '// FIXME: chpl allows only one module per library //'+\n # ' module %s_static_Impl {'%qname)\n\n # for method in static_methods:\n # self.generate_server_method(symbol_table, method, ci)\n\n # ci.impl.new_def('//} // module %s_static_Impl'%qname)\n # ci.impl.new_def('')\n # ci.impl.new_def('')\n\n\n # # Chapel Stub (client-side Chapel bindings)\n # self.generate_chpl_stub(chpl_stub, qname, ci)\n \n # # Because of Chapel's implicit (filename-based) modules it\n # # is important for the Chapel stub to be one file, but we\n # # generate separate files for the cstubs\n # self.pkg_chpl_stub.new_def(chpl_stub)\n\n # Stub (in C), the order of these definitions is somewhat sensitive\n ci.stub.genh_top(ir.Import(qname+'_IOR'))\n ci.stub.gen(ir.Import(ci.stub._name))\n\n pkg_name = '_'.join(symbol_table.prefix)\n ci.stub.gen(ir.Import(pkg_name))\n ci.stub.write()\n\n # IOR\n ior_template.generate_ior(ci, with_ior_c=self.server, _braid_config=self.config )\n ci.ior.write()\n\n # Skeleton\n if self.server:\n self.generate_skeleton(ci, qname)\n\n # Convenience header\n ext_h = CFile(qname)\n ext_h.genh(ir.Import(qname+'_IOR'))\n ext_h.genh(ir.Import(qname+'_Stub'))\n ext_h.write()\n\n # Makefile\n self.classes.append(qname)\n\n\n if not symbol_table:\n raise Exception()\n\n with match(node):\n if (sidlir.class_, (Name), Extends, Implements, Invariants, Methods, DocComment):\n expect(data, None)\n generate_ext_stub(sidlobjects.Class(symbol_table, node, self.class_attrs))\n\n elif (sidlir.struct, (Name), Items, DocComment):\n # Generate Chapel stub\n # self.pkg_chpl_stub.gen(ir.Type_decl(lower_ir(symbol_table, node, struct_suffix='')))\n\n # record it for later, when the package is being finished\n self.pkg_enums_and_structs.append(struct_ior_names(node))\n\n elif (sidlir.interface, (Name), Extends, Invariants, Methods, DocComment):\n # Interfaces also have an IOR to be generated\n expect(data, None)\n generate_ext_stub(sidlobjects.Interface(symbol_table, node, self.class_attrs))\n\n elif (sidlir.enum, Name, Items, DocComment):\n # Generate Chapel stub\n # self.pkg_chpl_stub.gen(ir.Type_decl(node))\n\n # record it for later, when the package is being finished\n self.pkg_enums_and_structs.append(node)\n \n elif (sidlir.package, Name, Version, UserTypes, DocComment):\n # Generate the chapel stub\n qname = '_'.join(symbol_table.prefix+[Name])\n _, pkg_symbol_table = symbol_table[sidlir.Scoped_id([], Name, '')]\n\n if self.in_package:\n # nested modules are generated in-line\n # self.pkg_chpl_stub.new_def('module %s {'%Name)\n self.generate_glue_code(UserTypes, data, pkg_symbol_table)\n # self.pkg_chpl_stub.new_def('}')\n else:\n # server-side Chapel implementation template\n if self.server: self.begin_impl(qname)\n\n # new file for the toplevel package\n # self.pkg_chpl_stub = ChapelFile(relative_indent=0)\n self.pkg_enums_and_structs = []\n self.in_package = True\n \n # recursion!\n self.generate_glue_code(UserTypes, data, pkg_symbol_table)\n # write_to(qname+'.chpl', str(self.pkg_chpl_stub))\n\n # server-side Chapel implementation template\n if self.server: self.end_impl(qname)\n \n # Makefile\n self.pkgs.append(qname)\n\n pkg_h = CFile(qname)\n pkg_h = pkg_h\n pkg_h.genh(ir.Import('sidl_header'))\n for es in self.pkg_enums_and_structs:\n es_ior = babel.lower_ir(pkg_symbol_table, es, header=pkg_h, qualify_names=True)\n pkg_h.gen(ir.Type_decl(es_ior))\n\n for ext in self.exts:\n pkg_h.genh(ir.Import(ext))\n\n pkg_h.write()\n\n\n elif (sidlir.user_type, Attrs, Cipse):\n self.class_attrs = Attrs\n gen(Cipse)\n\n elif (sidlir.file, Requires, Imports, UserTypes):\n self.in_package = False\n gen(UserTypes)\n\n elif A:\n if (isinstance(A, list)):\n for defn in A:\n gen(defn)\n else:\n raise Exception(\"NOT HANDLED:\"+repr(A))\n else:\n raise Exception(\"match error\")\n return data",
"def generate_schema(class_name, schema):\n superclass = schema[\"superclass\"]\n if superclass == \"Schema\": superclass = \"Object\"\n\n has_map = False\n is_model_builder = False\n has_inherited = False\n for field in schema[\"fields\"]:\n if field[\"name\"] == \"__meta\": continue\n if field[\"is_inherited\"]:\n has_inherited = True\n continue\n if field[\"type\"].startswith(\"Map\"): has_map = True\n if field[\"name\"] == \"can_build\": is_model_builder = True\n\n fields = []\n for field in schema[\"fields\"]:\n if field[\"name\"] == \"__meta\": continue\n java_type = translate_type(field[\"type\"], field[\"schema_name\"])\n java_value = get_java_value(field)\n\n # hackery: we flatten the parameters up into the ModelBuilder schema, rather than nesting them in the\n # parameters schema class...\n if False and is_model_builder and field[\"name\"] == \"parameters\":\n fields.append((\"parameters\", \"null\", \"ModelParameterSchemaV3[]\", field[\"help\"], field[\"is_inherited\"]))\n else:\n fields.append((field[\"name\"], java_value, java_type, field[\"help\"], field[\"is_inherited\"]))\n\n class_decl = class_name\n if \"generics\" in schema:\n class_decl += \"<\" + \", \".join(\"%s extends %s\" % (t, long_type) for t, long_type in schema[\"generics\"]) + \">\"\n super_decl = superclass\n if \"super_generics\" in schema:\n super_decl += \"<\" + \", \".join(schema[\"super_generics\"]) + \">\"\n\n yield \"/*\"\n yield \" * This file is auto-generated by h2o-3/h2o-bindings/bin/gen_java.py\"\n yield \" * Copyright 2016 H2O.ai; Apache License Version 2.0 (see LICENSE for details)\"\n yield \" */\"\n yield \"package water.bindings.pojos;\"\n yield \"\"\n yield \"import com.google.gson.Gson;\"\n yield \"import com.google.gson.annotations.*;\"\n yield \"import java.util.Map;\" if has_map else None\n yield \"\"\n yield \"\"\n yield \"public class %s extends %s {\" % (class_decl, super_decl) if super_decl != \"Object\" else None\n yield \"public class %s {\" % (class_decl) if super_decl == \"Object\" else None\n yield \"\"\n for name, value, ftype, fhelp, inherited in fields:\n if inherited: continue\n ccname = translate_name(name)\n yield \" /**\"\n yield bi.wrap(fhelp, indent=\" * \")\n yield \" */\"\n yield \" @SerializedName(\\\"%s\\\")\" % name if name != ccname else None\n yield \" public %s %s;\" % (ftype, ccname)\n yield \"\"\n if has_inherited:\n yield \"\"\n yield \" /*\" + (\"-\" * 114)\n yield \" //\" + (\" \" * 50) + \"INHERITED\"\n yield \" //\" + (\"-\" * 114)\n yield \"\"\n for name, value, ftype, fhelp, inherited in fields:\n if not inherited: continue\n yield bi.wrap(fhelp, \" // \")\n yield \" public %s %s;\" % (ftype, translate_name(name))\n yield \"\"\n yield \" */\"\n yield \"\"\n yield \" /**\"\n yield \" * Public constructor\"\n yield \" */\"\n yield \" public %s() {\" % class_name\n for name, value, _, _, _ in fields:\n if name == \"parameters\": continue\n if value == \"null\": continue\n yield \" %s = %s;\" % (translate_name(name), value)\n yield \" }\"\n yield \"\"\n yield \" /**\"\n yield \" * Return the contents of this object as a JSON String.\"\n yield \" */\"\n yield \" @Override\"\n yield \" public String toString() {\"\n yield \" return new Gson().toJson(this);\"\n yield \" }\"\n yield \"\"\n yield \"}\"",
"def generate_class_template(self):\n template = {\n \"@id\": \"uri or curie of the class\",\n \"@type\": \"rdfs:Class\",\n \"rdfs:comment\": \"description of the class\",\n \"rdfs:label\": \"class label, should match @id\",\n \"rdfs:subClassOf\": {\n \"@id\": \"parent class, could be list\"\n },\n \"schema:isPartOf\": {\n \"@id\": \"http://schema.biothings.io\"\n }\n }\n return template",
"def test_class_definition_with_base(self):\n self.script(\"# script.py\\n\"\n \"class C(object):\\n\"\n \" 'cdoc'\\n\"\n \" pass\\n\")\n self.compile()\n\n script = self.find_code_component(name=\"script.py\")\n class_def = self.find_code_component(name=\"C\")\n var_object = self.find_code_component(name=\"object\")\n\n self.assertEqual(class_def.type, \"class_def\")\n self.assertEqual(class_def.mode, \"w\")\n self.assertEqual(class_def.first_char_line, 2)\n self.assertEqual(class_def.first_char_column, 0)\n self.assertEqual(class_def.last_char_line, 4)\n self.assertEqual(class_def.last_char_column, 8)\n self.assertEqual(class_def.container_id, script.id)\n\n self.assertEqual(var_object.type, \"name\")\n self.assertEqual(var_object.mode, \"r\")\n self.assertEqual(var_object.first_char_line, 2)\n self.assertEqual(var_object.first_char_column, 8)\n self.assertEqual(var_object.last_char_line, 2)\n self.assertEqual(var_object.last_char_column, 14)\n self.assertEqual(var_object.container_id, class_def.id)\n\n class_def_block = self.metascript.code_blocks_store[class_def.id]\n self.assertEqual(class_def_block.code,\n \"class C(object):\\n\"\n \" 'cdoc'\\n\"\n \" pass\")\n self.assertEqual(class_def_block.docstring, \"cdoc\")\n self.assertTrue(bool(class_def_block.code_hash))",
"def generate_type_hierarchy(ctx):\n ctx.run(\"./env/bin/python -m puresnmp.types > doc/typetree.rst\")",
"def generate_ext_stub(cls):\n # Qualified name (C Version)\n qname = '_'.join(symbol_table.prefix+[cls.name])\n self.exts.append(qname)\n\n if self.config.verbose:\n import sys\n mod_name = '.'.join(symbol_table.prefix[1:]+[cls.name])\n sys.stdout.write('\\r'+' '*80)\n sys.stdout.write('\\rgenerating glue code for %s'%mod_name)\n sys.stdout.flush()\n\n # Consolidate all methods, defined and inherited\n cls.scan_methods()\n \n # chpl_defs = ChapelScope(chpl_stub)\n ci = self.ClassInfo(cls)\n\n # if self.server:\n # ci.impl = self.pkg_impl\n\n ci.stub.new_def(babel.externals(cls.get_scoped_id()))\n ci.stub.new_def(babel.builtin_stub_functions(cls.get_scoped_id()))\n \n has_contracts = ior_template.generateContractChecks(cls)\n self.gen_default_methods(cls, has_contracts, ci)\n\n #print qname, map(lambda x: x[2][1]+x[2][2], cls.all_methods)\n for method in cls.all_methods:\n (Method, Type, Name, Attrs, Args, \n Except, From, Requires, Ensures, DocComment) = method\n ci.epv.add_method((method, Type, Name, Attrs, \n babel.drop_rarray_ext_args(Args),\n Except, From, Requires, Ensures, DocComment))\n\n # all the methods for which we would generate a server impl\n impl_methods = babel.builtins+cls.get_methods()\n impl_methods_names = [sidlir.method_method_name(m) for m in impl_methods]\n\n # client\n for method in cls.all_methods:\n has_impl = sidlir.method_method_name(method) in impl_methods_names\n self.generate_client_method(symbol_table, method, ci, has_impl)\n\n if self.server:\n class_methods = filter(sidlir.is_not_static, impl_methods)\n static_methods = filter(sidlir.is_static, impl_methods)\n\n # # Class\n # ci.impl.new_def(gen_doc_comment(cls.doc_comment, chpl_stub)+\n # 'class %s_Impl {'%qname)\n # splicer = '.'.join(cls.qualified_name+['Impl'])\n # ci.impl.new_def('// DO-NOT-DELETE splicer.begin(%s)'%splicer)\n # ci.impl.new_def('// DO-NOT-DELETE splicer.end(%s)'%splicer)\n # for method in class_methods: \n # self.generate_server_method(symbol_table, method, ci)\n\n # ci.impl.new_def('} // class %s_Impl'%qname)\n # ci.impl.new_def('')\n # ci.impl.new_def('')\n\n # # Static\n # if static_methods:\n # ci.impl.new_def('// all static member functions of '+qname)\n # ci.impl.new_def(gen_doc_comment(cls.doc_comment, chpl_stub)+\n # '// FIXME: chpl allows only one module per library //'+\n # ' module %s_static_Impl {'%qname)\n\n # for method in static_methods:\n # self.generate_server_method(symbol_table, method, ci)\n\n # ci.impl.new_def('//} // module %s_static_Impl'%qname)\n # ci.impl.new_def('')\n # ci.impl.new_def('')\n\n\n # # Chapel Stub (client-side Chapel bindings)\n # self.generate_chpl_stub(chpl_stub, qname, ci)\n \n # # Because of Chapel's implicit (filename-based) modules it\n # # is important for the Chapel stub to be one file, but we\n # # generate separate files for the cstubs\n # self.pkg_chpl_stub.new_def(chpl_stub)\n\n # Stub (in C), the order of these definitions is somewhat sensitive\n ci.stub.genh_top(ir.Import(qname+'_IOR'))\n ci.stub.gen(ir.Import(ci.stub._name))\n\n pkg_name = '_'.join(symbol_table.prefix)\n ci.stub.gen(ir.Import(pkg_name))\n ci.stub.write()\n\n # IOR\n ior_template.generate_ior(ci, with_ior_c=self.server, _braid_config=self.config )\n ci.ior.write()\n\n # Skeleton\n if self.server:\n self.generate_skeleton(ci, qname)\n\n # Convenience header\n ext_h = CFile(qname)\n ext_h.genh(ir.Import(qname+'_IOR'))\n ext_h.genh(ir.Import(qname+'_Stub'))\n ext_h.write()\n\n # Makefile\n self.classes.append(qname)",
"def generate_skeleton(self, ci, qname):\n symbol_table = ci.epv.symbol_table\n cls = ci.co\n\n\n # Skeleton (in Chapel)\n self.pkg_chpl_skel.gen(ir.Import('.'.join(symbol_table.prefix)))\n\n self.pkg_chpl_skel.new_def('use sidl;')\n objname = '.'.join(ci.epv.symbol_table.prefix+[ci.epv.name]) + '_Impl'\n\n self.pkg_chpl_skel.new_def('extern record %s__object { var d_data: opaque; };'\n %qname)#,objname))\n self.pkg_chpl_skel.new_def('extern proc %s__createObject('%qname+\n 'd_data: int, '+\n 'out ex: sidl_BaseInterface__object)'+\n ': %s__object;'%qname)\n self.pkg_chpl_skel.new_def(ci.chpl_skel)\n\n\n # Skeleton (in C)\n cskel = ci.chpl_skel.cstub\n cskel._name = qname+'_Skel'\n cskel.gen(ir.Import('stdint'))\n cskel.gen(ir.Import('stdio'))\n cskel.gen(ir.Import(cskel._name))\n cskel.gen(ir.Import(qname+'_IOR'))\n cskel.gen(ir.Fn_defn([], ir.pt_void, qname+'__call_load', [],\n [ir.Comment(\"FIXME: [ir.Stmt(ir.Call('_load', []))\")], ''))\n\n # set_epv ... Setup the entry-point vectors (EPV)s\n #\n # there are 2*3 types of EPVs:\n # epv: regular methods\n # sepv: static methods\n # pre_(s)epv: pre-hooks\n # post_(s)epv: post-hooks\n epv_t = ci.epv.get_ir()\n sepv_t = ci.epv.get_sepv_ir()\n pre_epv_t = ci.epv.get_pre_epv_ir()\n pre_sepv_t = ci.epv.get_pre_sepv_ir()\n post_epv_t = ci.epv.get_post_epv_ir()\n post_sepv_t = ci.epv.get_post_sepv_ir()\n cskel.gen(ir.Fn_decl([], ir.pt_void, 'ctor', [], ''))\n cskel.gen(ir.Fn_decl([], ir.pt_void, 'dtor', [], ''))\n\n epv_init = []\n sepv_init = []\n for m in builtins+cls.get_methods():\n fname = m[2][1] + m[2][2]\n attrs = sidlir.method_method_attrs(m)\n static = member_chk(sidlir.static, attrs)\n def entry(stmts, epv_t, table, field, pointer):\n stmts.append(ir.Set_struct_item_stmt(epv_t, ir.Deref(table), field, pointer))\n\n if static: entry(sepv_init, sepv_t, 'sepv', 'f_'+fname, '%s_%s_skel'%(qname, fname))\n else: entry(epv_init, epv_t, 'epv', 'f_'+fname, '%s_%s_skel'%(qname, fname))\n\n builtin_names = ['_ctor', '_ctor2', '_dtor']\n with_hooks = member_chk(ir.hooks, attrs)\n if fname not in builtin_names and with_hooks:\n if static: entry(sepv_init, pre_sepv_t, 'pre_sepv', 'f_%s_pre'%fname, 'NULL')\n else: entry(epv_init, pre_epv_t, 'pre_epv', 'f_%s_pre'%fname, 'NULL')\n if static: entry(sepv_init, post_sepv_t, 'post_sepv', 'f_%s_post'%fname, 'NULL')\n else: entry(epv_init, post_epv_t, 'post_epv', 'f_%s_post'%fname, 'NULL')\n\n pkgname = '_'.join(ci.epv.symbol_table.prefix)\n\n dummyargv = '''\n char* argv[] = { \n babel_program_name,\n \"-nl\", /* number of locales */\n \"\",\n \"-v\", /* verbose chapel runtime */\n NULL\n };\n argv[2] = getenv(\"SLURM_NTASKS\");\n if (argv[2] == NULL) {\n fprintf(stdout, \"**ERROR: please set the SLURM_NTASKS environment variable\\\\n\"\n \" to the desired number of Chapel locales.\");\n argv[2] = \"0\";\n }\n int ignored = setenv(\"GASNET_BACKTRACE\", \"1\", 1);\n'''\n cskel.genh(ir.Import('stdlib'))\n cskel.pre_def('extern int chpl_init_library(int argc, char* argv[]);')\n cskel.pre_def('// You can set this to argv[0] in main() to get better debugging output')\n cskel.pre_def('char* __attribute__((weak)) babel_program_name = \"BRAID_LIBRARY\";')\n # These are now called by chpl_init_library -> chpl_gen_init\n #cskel.pre_def('extern void chpl__init_chpl__Program(int, const char*);')\n #cskel.pre_def('extern void chpl__init_%s_Impl(int, const char*);'%pkgname)\n init_code = [dummyargv,\n 'int locale_id = chpl_init_library(4, argv)',\n # 'chpl__init_chpl__Program(__LINE__, __FILE__)',\n # 'chpl__init_%s_Impl(__LINE__, __FILE__)'%pkgname\n ]\n init_code = map(lambda x: (ir.stmt, x), init_code)\n epv_init.extend(init_code)\n sepv_init.extend(init_code)\n\n cskel.gen(ir.Fn_defn(\n [], ir.pt_void, qname+'__set_epv',\n [ir.Arg([], ir.out, epv_t, 'epv'),\n ir.Arg([], ir.out, pre_epv_t, 'pre_epv'),\n ir.Arg([], ir.out, post_epv_t, 'post_epv')],\n epv_init, ''))\n\n if sepv_t:\n cskel.gen(ir.Fn_defn(\n [], ir.pt_void, qname+'__set_sepv',\n [ir.Arg([], ir.out, sepv_t, 'sepv'),\n ir.Arg([], ir.out, pre_sepv_t, 'pre_sepv'),\n ir.Arg([], ir.out, post_sepv_t, 'post_sepv')],\n sepv_init, ''))\n\n # C Skel\n for code in cskel.optional:\n cskel.new_global_def(code)\n cskel.write()",
"def exportTypes( c ) :\n assert str(type(c)) == \"<type '_mysql.connection'>\"\n xml = \"\"\n cT = sqlQuery ( c, \"select * from CrisisKind;\" )\n oT = sqlQuery ( c, \"select * from OrganizationKind;\" )\n pT = sqlQuery ( c, \"select * from PersonKind;\" ) \n for i in cT:\n xml += openTagAtt (\"CrisisKind\", \"crisisKindIdent\", i[0])\n xml += openCloseTag (\"Name\", i[1])\n xml += openCloseTag (\"Description\", i[2])\n xml += closeTag (\"CrisisKind\") \n for i in oT:\n xml += openTagAtt (\"OrganizationKind\", \"organizationKindIdent\", i[0])\n xml += openCloseTag (\"Name\", i[1])\n xml += openCloseTag (\"Description\", i[2])\n xml += closeTag (\"OrganizationKind\")\n for i in pT:\n xml += openTagAtt (\"PersonKind\", \"personKindIdent\", i[0])\n xml += openCloseTag (\"Name\", i[1])\n xml += openCloseTag (\"Description\", i[2])\n xml += closeTag (\"PersonKind\")\n assert str ( type ( xml ) ) == \"<type 'str'>\"\n return xml",
"def compile(data_gen, script_type):\n try:\n question_class = abstract_question.IliasQuestion.available_types()[script_type]\n item_list = [question_class(**data).xml() for data in data_gen]\n except KeyError:\n messages.abort('Question type not found.')\n\n return create_xml_tree(item_list)",
"def genCode(self):\n # Init variables and environment.\n if 'Sys' in self.inputs:\n self.cg_bootstrap_sys()\n\n for ns, vm_code in self.inputs.items():\n input = Parser(vm_code).parse()\n self.ns = ns\n\n if input['errors']:\n # No meaningful output can be generated.\n # Print errors to stderr and return empty string.\n print(\"ERRORS\", file=sys.stderr)\n for err in input['errors']:\n print(\"Line {}: {}\".format(err.line, err.msg))\n return f\"// Encountered errors parsing {filename}\"\n\n # Translate input.\n self.asm(f\"// Input file: {ns}\")\n for cmd in input['commands']:\n # Print original VM command as a comment.\n self.asm('// ' + cmd.src)\n self.cg(cmd)\n self.asm(f\"// End of input file: {ns}\")\n\n # Add the stop loop.\n self.cg_stop()\n\n return '\\n'.join(self.text)",
"def __init__(self):\n super().__init__()\n self.name = '' # name of this istance (alias)\n self.type = type(self).__name__ # specific type within this class\n self.verbosity = None # verbosity level (see message handler)\n self.globalAttributes = {} # this is a dictionary that contains parameters that are set at the level of the base classes defining the types\n self._knownAttribute = [] # this is a list of strings representing the allowed attribute in the xml input for the class\n self._knownAttribute += ['name','verbosity'] # attributes that are known\n self.printTag = 'BaseType' # the tag that refers to this class in all the specific printing\n self.variableGroups = {} # the variables this class needs to be aware of\n self.metadataKeys = set() # list of registered metadata keys to expect from this entity\n self.metadataParams = {} # dictionary of registered metadata keys with repect to their indexes",
"def generate_from(ast: ast_pb2.AST,\n include_paths: List[str]) -> Generator[str, None, None]:\n includes = set(ast.usertype_includes)\n\n for include in includes:\n # Not generating type casters for the builtin types.\n # Not scanning headers generated by pybind11 code generator because the\n # `// CLIF USE` in those headers do not have associated `Clif_PyObjFrom` or\n # `Clif_PyObjAs`.\n if (include.startswith('clif/python') or\n # Excluding absl::Status and absl::StatusOr\n include.startswith('util/task/python')):\n continue\n clif_uses = _get_clif_uses(include, include_paths)\n for clif_use in clif_uses:\n yield from _generate_type_caster(clif_use.py_name, clif_use.cpp_name,\n clif_use.generate_load,\n clif_use.generate_cast)",
"def node_catalogue():\r\n\r\n classes = node_subclasses(Node)\r\n\r\n catalogue = {}\r\n\r\n for node_class in classes:\r\n try:\r\n name = node_class.identifier()\r\n except AttributeError:\r\n # If node does not provide identifier, we consider it to be\r\n # private or abstract class\r\n continue\r\n\r\n # Get copy of node info\r\n info = dict(get_node_info(node_class))\r\n info[\"name\"] = name\r\n info[\"factory\"] = node_class\r\n\r\n # Get node type based on superclass, if not provided\r\n\r\n if \"type\" not in info:\r\n if issubclass(node_class, SourceNode):\r\n info[\"type\"] = \"source\"\r\n elif not issubclass(node_class, SourceNode) \\\r\n and not issubclass(node_class, TargetNode):\r\n info[\"type\"] = \"processing\"\r\n elif issubclass(node_class, TargetNode):\r\n info[\"type\"] = \"target\"\r\n else:\r\n info[\"type\"] = \"unknown\"\r\n\r\n catalogue[name] = info\r\n\r\n return catalogue",
"def gen_extractor_classes():\n from .extractors import _ALL_CLASSES\n\n return _ALL_CLASSES",
"def generateModelClass(self):\n\t\tself.printt_cls(\"export class {} {}\".format(self.objName, \"{\"))\n\t\tfor col in self.objSchema[\"fields\"]:\n\t\t\tcolName = col[\"name\"]\n\t\t\tcolType = col[\"type\"]\n\t\t\tself.printt_cls(\"\\t{} : {};\".format(colName, self.JS_DATA_TYPES[colType]))\n\t\tself.printt_cls(\"}\")\n\t\tself.printt_cls(\"\")",
"def get_src(self):\n\n self.codegen = json.loads(self.cmod.get_source(\"json\"))\n self.sub_module_name = self.codegen[\"symbol\"]\n self.nodes = self.codegen[\"nodes\"]\n self.clml_code.append(self.MakeHeader.substitute(module=self.sub_module_name))\n\n def get_tensor_from_map(\n node_seq, shape=None, layout=\"CL_TENSOR_LAYOUT_OPTIMAL_QCOM\", dtype=\"float32\"\n ):\n if node_seq in self.node_map:\n return self.node_map[node_seq]\n else:\n node = self.nodes[node_seq]\n dtype = str(node[\"attrs\"][\"dtype\"][0][0])\n if node[\"op\"] == \"input\":\n self.clml_code.append(\"// Input Node\")\n node_out_name = self.sub_module_name + \"_\" + \"input_\" + str(node_seq)\n else:\n node_out_name = node[\"name\"]\n if shape is None:\n shape = str(tuple(node[\"attrs\"][\"shape\"][0][0]))[1:-1]\n\n self.clml_code.append(\n self.MakeCLMLTensor.substitute(\n name=node_out_name, shape=shape, dtype=dtype, layout=layout\n )\n )\n self.clml_code.append(\n self.MapInsert.substitute(nid=node_out_name, tensor_desc=node_out_name)\n )\n if node[\"op\"] == \"input\":\n self.clml_code.append(\n Template(\"runner.inputs.push_back($clml_input);\").substitute(\n clml_input=node_out_name\n )\n )\n self.input_meta.append(\n self.MakeInputMetaInfo.substitute(\n in_name=node_out_name, dtype=dtype, shape=shape\n )\n )\n\n if self.nodes[node_seq][\"op\"] == \"const\":\n self.clml_code.append(\n Template('runner.consts.push_back(\"$nid\");').substitute(nid=node[\"name\"])\n )\n self.node_map[node_seq] = node_out_name\n return node_out_name\n\n def make_output_tensor(\n node, node_seq, shape=None, layout=\"CL_TENSOR_LAYOUT_OPTIMAL_QCOM\", dtype=\"float32\"\n ):\n if dtype is None:\n dtype = str(node[\"attrs\"][\"dtype\"][0][0])\n if shape is None:\n shape = str(tuple(node[\"attrs\"][\"shape\"][0][0]))[1:-1]\n node_out_name = self.sub_module_name + \"_\" + \"layer_out_\" + str(node_seq)\n self.clml_code.append(\n self.MakeCLMLTensor.substitute(\n name=node_out_name,\n shape=shape,\n dtype=dtype,\n layout=layout,\n )\n )\n return node_out_name\n\n for node_seq, node in enumerate(self.nodes):\n if node[\"op\"] == \"kernel\":\n self.clml_code.append(\"// Kernel Node : \" + node[\"name\"])\n if node[\"name\"] == \"nn.conv2d\" or node[\"name\"] == \"nn.depthwise_conv2d\":\n if \"padding\" in node[\"attrs\"]:\n padding = str(tuple(int(x) for x in node[\"attrs\"][\"padding\"][0]))[1:-1]\n else:\n padding = \"0, 0, 0, 0\"\n dilation = str(tuple(int(x) for x in node[\"attrs\"][\"dilation\"][0]))[1:-1]\n strides = str(tuple(int(x) for x in node[\"attrs\"][\"strides\"][0]))[1:-1]\n groups = node[\"attrs\"][\"groups\"][0][0]\n if node[\"name\"] == \"nn.conv2d\":\n mode = \"CL_CONVOLUTION_MODE_CONVOLUTION_QCOM\"\n else:\n mode = \"CL_CONVOLUTION_MODE_DEPTHWISE_QCOM\"\n activation = \"CL_ACTIVATION_RELU\"\n has_act = False\n if \"activation_type\" in node[\"attrs\"]:\n has_act = True\n activation = node[\"attrs\"][\"activation_type\"][0][0]\n if activation == \"relu\":\n activation = \"CL_ACTIVATION_RELU\"\n elif activation == \"relu6\":\n activation = \"CL_ACTIVATION_RELU6\"\n else:\n RuntimeError(\"Unknown activation:\" + activation)\n has_bias = bool((node[\"inputs\"] == 3) or (node[\"inputs\"] == 7))\n has_bn = bool((node[\"inputs\"] == 6) or (node[\"inputs\"] == 7))\n input_tensor = get_tensor_from_map(node[\"inputs\"][0][0])\n weight_tensor = get_tensor_from_map(node[\"inputs\"][1][0])\n if not has_bias:\n bias_tensor = \"runner.unusedTensor\"\n else:\n bias_tensor = get_tensor_from_map(node[\"inputs\"][2][0])\n\n node_out_name = make_output_tensor(node, node_seq)\n\n if not has_bn:\n self.clml_code.append(\n self.MakeConv2D.substitute(\n input_tensor=input_tensor,\n weight_tensor=weight_tensor,\n bias_tensor=bias_tensor,\n output_tensor=node_out_name,\n padding=padding,\n dilation=dilation,\n strides=strides,\n groups=groups,\n mode=mode,\n activation=activation,\n has_bias=\"true\" if has_bias else \"false\",\n has_act=\"true\" if has_act else \"false\",\n dtype=node[\"attrs\"][\"dtype\"][0][0],\n )\n )\n else:\n bn_index = 3 if has_bias else 2\n bn_attrs = tuple(node[\"attrs\"][\"batchnorm\"][0][0])\n axis = bn_attrs[0]\n bn_shape = [1, 1, 1, 1]\n bn_node = self.nodes[node[\"inputs\"][bn_index][0]]\n bn_shape[axis] = bn_node[\"attrs\"][\"shape\"][0][0]\n dtype = bn_node[\"attrs\"][\"dtype\"][0][0]\n\n bn_scale_tensor = get_tensor_from_map(\n node[\"inputs\"][bn_index][0],\n shape=str(tuple(bn_shape))[1:-1],\n dtype=dtype,\n )\n\n bn_bias_tensor = get_tensor_from_map(\n node[\"inputs\"][bn_index + 1][0],\n shape=str(tuple(bn_shape))[1:-1],\n dtype=dtype,\n )\n\n bn_mean_tensor = get_tensor_from_map(\n node[\"inputs\"][bn_index + 2][0],\n shape=str(tuple(bn_shape))[1:-1],\n dtype=dtype,\n )\n\n bn_var_tensor = get_tensor_from_map(\n node[\"inputs\"][bn_index + 3][0],\n shape=str(tuple(bn_shape))[1:-1],\n dtype=dtype,\n )\n\n self.clml_code.append(\n self.MakeConv2DWithBN.substitute(\n input_tensor=input_tensor,\n weight_tensor=weight_tensor,\n bias_tensor=bias_tensor,\n output_tensor=node_out_name,\n bn_scale_tensor=bn_scale_tensor,\n bn_bias_tensor=bn_bias_tensor,\n bn_mean_tensor=bn_mean_tensor,\n bn_var_tensor=bn_var_tensor,\n bn_attrs=str(bn_attrs)[1:-1],\n padding=padding,\n dilation=dilation,\n strides=strides,\n groups=groups,\n mode=mode,\n activation=activation,\n has_bias=\"true\" if has_bias else \"false\",\n has_act=\"true\" if has_act else \"false\",\n dtype=node[\"attrs\"][\"dtype\"][0][0],\n )\n )\n elif node[\"name\"] == \"nn.relu6\" or node[\"name\"] == \"nn.relu\":\n input_tensor = get_tensor_from_map(node[\"inputs\"][0][0])\n node_out_name = make_output_tensor(node, node_seq)\n relu_type = (\n \"CL_ACTIVATION_RELU\" if node[\"name\"] == \"nn.relu\" else \"CL_ACTIVATION_RELU6\"\n )\n self.clml_code.append(\n self.MakeRelu.substitute(\n input_tensor=input_tensor,\n output_tensor=node_out_name,\n relu_type=relu_type,\n dtype=node[\"attrs\"][\"dtype\"][0][0],\n )\n )\n elif node[\"name\"] == \"nn.batch_norm\":\n bn_attrs = tuple(node[\"attrs\"][\"batchnorm\"][0][0])\n axis = bn_attrs[0]\n bn_shape = [1, 1, 1, 1]\n bn_node = self.nodes[node[\"inputs\"][0][0]]\n bn_shape[axis] = bn_node[\"attrs\"][\"shape\"][0][0]\n dtype = bn_node[\"attrs\"][\"dtype\"][0][0]\n bn_scale_tensor = get_tensor_from_map(\n node[\"inputs\"][0][0], shape=str(tuple(bn_shape))[1:-1], dtype=dtype\n )\n bn_bias_tensor = get_tensor_from_map(\n node[\"inputs\"][1][0], shape=str(tuple(bn_shape))[1:-1], dtype=dtype\n )\n bn_mean_tensor = get_tensor_from_map(\n node[\"inputs\"][2][0], shape=str(tuple(bn_shape))[1:-1], dtype=dtype\n )\n bn_var_tensor = get_tensor_from_map(\n node[\"inputs\"][3][0], shape=str(tuple(bn_shape))[1:-1], dtype=dtype\n )\n\n input_tensor = get_tensor_from_map(node[\"inputs\"][0][0])\n node_out_name = make_output_tensor(node, node_seq)\n\n self.clml_code.append(\n self.MakeBN.substitute(\n input_tensor=input_tensor,\n output_tensor=node_out_name,\n bn_scale_tensor=bn_scale_tensor,\n bn_bias_tensor=bn_bias_tensor,\n bn_mean_tensor=bn_mean_tensor,\n bn_var_tensor=bn_var_tensor,\n bn_attrs=str(bn_attrs)[1:-1],\n dtype=node[\"attrs\"][\"dtype\"][0][0],\n )\n )\n elif node[\"name\"] in [\"nn.max_pool2d\", \"nn.avg_pool2d\", \"nn.l2_pool2d\"]:\n input_tensor = get_tensor_from_map(node[\"inputs\"][0][0])\n node_out_name = make_output_tensor(node, node_seq)\n pool_size = str(tuple(int(x) for x in node[\"attrs\"][\"pool_size\"][0]))[1:-1]\n strides = str(tuple(int(x) for x in node[\"attrs\"][\"strides\"][0]))[1:-1]\n padding = str(tuple(int(x) for x in node[\"attrs\"][\"padding\"][0]))[1:-1]\n self.clml_code.append(\n self.MakePool2D.substitute(\n input_tensor=input_tensor,\n output_tensor=node_out_name,\n pool_size=pool_size,\n strides=strides,\n padding=padding,\n pool_type=node[\"name\"],\n dtype=node[\"attrs\"][\"dtype\"][0][0],\n )\n )\n elif node[\"name\"] in [\"nn.global_max_pool2d\", \"nn.global_avg_pool2d\"]:\n input_tensor = get_tensor_from_map(node[\"inputs\"][0][0])\n node_out_name = make_output_tensor(node, node_seq)\n in_node = self.nodes[node[\"inputs\"][0][0]]\n in_shape = str(tuple(in_node[\"attrs\"][\"shape\"][0][0]))[1:-1]\n self.clml_code.append(\n self.MakeGlobalPool2D.substitute(\n input_tensor=input_tensor,\n output_tensor=node_out_name,\n in_shape=in_shape,\n pool_type=node[\"name\"],\n dtype=node[\"attrs\"][\"dtype\"][0][0],\n )\n )\n elif node[\"name\"] == \"reshape\":\n input_tensor = get_tensor_from_map(node[\"inputs\"][0][0])\n node_out_name = make_output_tensor(node, node_seq)\n self.clml_code.append(\n self.MakeReshape.substitute(\n input_tensor=input_tensor,\n output_tensor=node_out_name,\n dtype=node[\"attrs\"][\"dtype\"][0][0],\n )\n )\n elif node[\"name\"] == \"concatenate\":\n input_len = len(node[\"inputs\"])\n in_list = str(\n [get_tensor_from_map(node[\"inputs\"][x][0]) for x in range(input_len)]\n )[1:-1]\n node_out_name = make_output_tensor(node, node_seq)\n axis = node[\"attrs\"][\"axis\"][0][0]\n self.clml_code.append(\n self.MakeConcatenate.substitute(\n in_list=in_list,\n output_tensor=node_out_name,\n axis=axis,\n dtype=node[\"attrs\"][\"dtype\"][0][0],\n )\n )\n elif node[\"name\"] == \"nn.dense\":\n in_node = self.nodes[node[\"inputs\"][0][0]]\n in_shape = tuple(in_node[\"attrs\"][\"shape\"][0][0])\n wt_shape = tuple(in_node[\"attrs\"][\"shape\"][0][0])\n input_tensor = get_tensor_from_map(\n node[\"inputs\"][0][0], layout=\"CL_TENSOR_LAYOUT_NCHW_QCOM\"\n )\n weight_tensor = get_tensor_from_map(\n node[\"inputs\"][1][0],\n shape=str(tuple([1, 1, wt_shape[0], wt_shape[1]]))[1:-1],\n layout=\"CL_TENSOR_LAYOUT_NCHW_QCOM\",\n )\n node_out_name = make_output_tensor(\n node,\n node_seq,\n shape=str(tuple([in_shape[0], wt_shape[0], 1, 1]))[1:-1],\n layout=\"CL_TENSOR_LAYOUT_NCHW_QCOM\",\n )\n self.clml_code.append(\n self.MakeDense.substitute(\n input_tensor=input_tensor,\n weight_tensor=weight_tensor,\n output_tensor=node_out_name,\n in_shape=str(in_shape)[1:-1],\n wt_shape=str(wt_shape)[1:-1],\n dtype=node[\"attrs\"][\"dtype\"][0][0],\n )\n )\n elif node[\"name\"] == \"nn.softmax\":\n input_tensor = get_tensor_from_map(node[\"inputs\"][0][0])\n node_out_name = make_output_tensor(node, node_seq)\n self.clml_code.append(\n self.MakeSoftMax.substitute(\n input_tensor=input_tensor,\n output_tensor=node_out_name,\n dtype=node[\"attrs\"][\"dtype\"][0][0],\n )\n )\n elif node[\"name\"] == \"nn.pad\":\n input_tensor = get_tensor_from_map(node[\"inputs\"][0][0])\n node_out_name = make_output_tensor(node, node_seq)\n pad_mode = node[\"attrs\"][\"pad_mode\"][0][0]\n padding = str(tuple(int(x) for x in node[\"attrs\"][\"pad_width\"][0]))[1:-1]\n self.clml_code.append(\n self.MakePad.substitute(\n input_tensor=input_tensor,\n output_tensor=node_out_name,\n pad_mode=pad_mode,\n padding=padding,\n dtype=node[\"attrs\"][\"dtype\"][0][0],\n )\n )\n elif node[\"name\"] == \"nn.batch_flatten\":\n input_tensor = get_tensor_from_map(node[\"inputs\"][0][0])\n node_out_name = make_output_tensor(node, node_seq)\n self.clml_code.append(\n self.MakeBatchFlatten.substitute(\n input_tensor=input_tensor,\n output_tensor=node_out_name,\n dtype=node[\"attrs\"][\"dtype\"][0][0],\n )\n )\n elif node[\"name\"] == \"clip\":\n input_tensor = get_tensor_from_map(node[\"inputs\"][0][0])\n node_out_name = make_output_tensor(node, node_seq)\n a_max = node[\"attrs\"][\"a_max\"][0][0]\n a_min = node[\"attrs\"][\"a_min\"][0][0]\n self.clml_code.append(\n self.MakeClip.substitute(\n input_tensor=input_tensor,\n output_tensor=node_out_name,\n a_max=a_max,\n a_min=a_min,\n dtype=node[\"attrs\"][\"dtype\"][0][0],\n )\n )\n elif node[\"name\"] in [\n \"add\",\n \"subtract\",\n \"multiply\",\n \"minimum\",\n \"maximum\",\n \"divide\",\n ]:\n input_a = get_tensor_from_map(node[\"inputs\"][0][0])\n input_b = get_tensor_from_map(node[\"inputs\"][1][0])\n node_out_name = make_output_tensor(node, node_seq)\n self.clml_code.append(\n self.MakeBinaryOp.substitute(\n input_a=input_a,\n input_b=input_b,\n output_tensor=node_out_name,\n op=node[\"name\"],\n dtype=node[\"attrs\"][\"dtype\"][0][0],\n )\n )\n else:\n RuntimeError(\"Unsupported Op:\" + node[\"name\"])\n self.clml_code.append(\n self.MapInsert.substitute(nid=node_out_name, tensor_desc=node_out_name)\n )\n self.node_map[node_seq] = node_out_name\n\n elif node[\"op\"] not in [\"const\", \"input\"]:\n print(\"Unknown Node type:\", node[\"op\"])\n\n # Populate outputs\n out_nodes = self.codegen[\"heads\"]\n self.clml_code.append(\"// Populate outputs\")\n for nid_triple in out_nodes:\n nid = nid_triple[0]\n out_node = self.nodes[nid]\n dtype = str(out_node[\"attrs\"][\"dtype\"][0][0])\n shape = str(tuple(out_node[\"attrs\"][\"shape\"][0][0]))[1:-1]\n out_name = self.sub_module_name + \"_\" + \"layer_out_\" + str(nid)\n self.clml_code.append(\n Template(\n 'runner.outputs.insert({\"$out_name\", runner.storage_map[\"$out_name\"]});'\n ).substitute(out_name=out_name)\n )\n self.clml_code.append(\n Template('runner.outputs_dtypes.insert({\"$out_name\", \"$dtype\"});').substitute(\n out_name=out_name, dtype=dtype\n )\n )\n self.clml_code.append(\n Template(\n \"runner.outputs_shapes.insert\" '({\"$out_name\", std::vector<size_t>({$shape})});'\n ).substitute(out_name=out_name, shape=shape)\n )\n self.output_meta.append(\n self.MakeOutputMetaInfo.substitute(out_name=out_name, dtype=dtype, shape=shape)\n )\n\n # Mem allocation & Param copy\n self.clml_code.append(\"// Allocate Tensor Memory and copy params\")\n self.clml_code.append(\"runner.AllocateMemAndPopulateParams();\")\n\n # Meta data preparation\n self.clml_code.append(\n self.MakeMetaInfo.substitute(\n name=self.sub_module_name,\n input_count=len(self.input_meta),\n output_count=len(self.output_meta),\n input_meta=\"\\\\\\n\".join(self.input_meta),\n output_meta=\"\\\\\\n\".join(self.output_meta),\n )\n )\n\n self.clml_code.append(self.MakeFooter.substitute())\n return (self.sub_module_name, self.clml_code)",
"def __repr__(self):\n\n if not self.output :\n return \"\"\n\n # if self.is_abstract:\n # result = \"\\n # Properties inherited from \" + self.class_name + \"\\n\"\n # if len(self.attributes) > 0:\n # for attribute_ in self.attributes:\n # result += repr(attribute_)\n # # else:\n # # result += \"\\n pass\"\n # result += \"\\n\"\n # result += \" # End of properties inherited from \" + self.class_name + \"\\n\"\n # return result.encode('ascii', 'ignore')\n\n # Not an abstract class, so output as a full class\n result = \"class \"\n result += self.class_name\n\n result += \"(Base): # class definition\\n\"\n\n result += \" __tablename__ = \\'\" + self.class_name.lower() + \"\\'\\n\\n\"\n result += \" id = Column(Integer, primary_key=True)\\n\"\n\n\n # If this class has a general class, then add a foreign key to it\n if self.general_class:\n #result += repr(self.general_class)\n result += \" \" + self.general_class_name.lower() + \"_id = \"\n result += \"Column('\" + self.general_class_name.lower() + \"_id', \"\n result += \"ForeignKey('\" + self.general_class_name.lower() + \".id'), nullable = True)\"\n\n if len(self.attributes) > 0:\n for attribute_ in self.attributes:\n result += repr(attribute_)\n # else:\n # result += \"\\n pass\"\n\n result += \"\\n\"\n\n return result.encode('ascii', 'ignore')",
"def readEntities(self):\n entities = {}\n \n # Regexes must be greedy to prevent matching outer entity and end_entity strings\n # Regexes have re.DOTALL to match newlines\n for m in re.finditer(\"ENTITY (.*?)END_ENTITY;\", self.data, re.DOTALL):\n entity = {}\n raw_entity_str = m.groups()[0]\n\n entity[\"name\"] = re.search(\"(.*?)[;|\\s]\", raw_entity_str).groups()[0].upper()\n \n is_supertype = re.search(\"SUPERTYPE\", raw_entity_str) != None\n if is_supertype:\n supertypeofmatch = re.search(\".*SUPERTYPE OF \\((.*?)\\)\", raw_entity_str)\n is_abstract_supertype = re.search(\"ABSTRACT SUPERTYPE\", raw_entity_str) != None\n \n is_subtype = re.search(\"SUBTYPE\", raw_entity_str) != None\n\n if is_supertype and is_subtype:\n if is_abstract_supertype:\n # abstract supertype of ... subtype of ... ;\n match = re.search(\".*ABSTRACT SUPERTYPE OF\\s+\\((.*?)\\)\\s+SUBTYPE OF\\s+\\((.*?)\\);\", raw_entity_str, re.DOTALL)\n entity[\"isabstract\"] = True\n supertypeof,subtypeof = match.groups()\n entity[\"supertype\"] = subtypeof.upper()\n supertypeof = re.sub('\\s', '', supertypeof)\n supertypeofmatch = re.search(\".*\\((.*?)\\)\", supertypeof, re.DOTALL)\n subtypes = supertypeofmatch.groups()[0].upper().split(',')\n entity[\"subtypes\"] = subtypes\n else:\n # supertype of ... subtype of ... ;\n match = re.search(\".*SUPERTYPE OF\\s+\\((.*?)\\)\\s+SUBTYPE OF\\s+\\((.*?)\\);\", raw_entity_str, re.DOTALL)\n entity[\"isabstract\"] = False\n supertypeof,subtypeof = match.groups()\n entity[\"supertype\"] = subtypeof.upper()\n supertypeof = re.sub('\\s', '', supertypeof)\n supertypeofmatch = re.search(\".*\\((.*?)\\)\", supertypeof, re.DOTALL)\n subtypes = supertypeofmatch.groups()[0].upper().split(',')\n entity[\"subtypes\"] = subtypes\n elif is_subtype:\n # subtype of ... ;\n subtypeofmatch = re.search(\".*SUBTYPE OF \\((.*?)\\);\", raw_entity_str)\n entity[\"supertype\"] = subtypeofmatch.groups()[0].upper() if subtypeofmatch else None\n\n # find the shortest string matched from the end of the entity type header to the\n # first occurence of a NO_ATTR string (when it occurs on a new line)\n inner_str = re.search(\";(.*?)$\", raw_entity_str, re.DOTALL).groups()[0]\n\n attrs_str = min([inner_str.partition(\"\\r\\n \"+a)[0] for a in self.NO_ATTR])\n attrs = []\n for am in re.finditer(\"(\\S*?) : (.*?);\", attrs_str, re.DOTALL):\n name, attr_type = [s.replace(\"\\r\\n\\t\",\"\") for s in am.groups()]\n attrs.append((name, attr_type))\n \n entity[\"attributes\"] = attrs\n entities[entity[\"name\"]] = entity\n \n\n return entities",
"def compile_class(self):\n\t\t\n\t\txml = '<class>\\n' + self.tokenizer.keyword() + self.tokenizer.identifier() + self.tokenizer.symbol()\n\n\t\tself.outfile.write(xml)",
"def build_class_view(self, dotted_name):\r\n cls = get_obj(self.dsa, self.pkg, dotted_name)\r\n # XXX is this a safe check?\r\n try:\r\n sourcefile = inspect.getsourcefile(cls)\r\n except TypeError:\r\n sourcefile = None\r\n\r\n docstring = cls.__doc__\r\n if docstring:\r\n docstring = deindent(docstring)\r\n if not hasattr(cls, '__name__'):\r\n clsname = 'instance of %s' % (cls.__class__.__name__,)\r\n else:\r\n clsname = cls.__name__\r\n bases = self.build_bases(dotted_name)\r\n properties = self.build_properties(cls)\r\n methods = self.build_methods(dotted_name)\r\n\r\n if sourcefile is None:\r\n sourcelink = H.div('no source available')\r\n else:\r\n if sourcefile[-1] in ['o', 'c']:\r\n sourcefile = sourcefile[:-1]\r\n sourcelink = H.div(H.a('view source',\r\n href=self.linker.get_lazyhref(sourcefile,\r\n self.get_anchor(cls))))\r\n\r\n snippet = H.ClassDescription(\r\n # XXX bases HTML\r\n H.ClassDef(clsname, bases, docstring, sourcelink,\r\n properties, methods),\r\n )\r\n\r\n return snippet",
"def get_code(self, parent, modname, fqname):\n\n if self.verbose:\n print >> sys.stderr, '-'*78\n print >> sys.stderr, \"Importing %s from the network ...\" % fqname\n print >> sys.stderr, '-'*78\n\n\n out = None\n for baseurl in self.path:\n\n proto_url = '/'.join([baseurl] + fqname.split('.'))\n\n\n # Is this a package?\n # ==================\n # If so, we want to look for __init__.py.\n\n is_package = self.download(proto_url + '/')\n if is_package:\n proto_url += '/__init__'\n\n\n # Try to find some code.\n # ======================\n\n for suffix in imp.get_suffixes():\n url = proto_url + suffix[0]\n fp = self.download(url)\n if fp is not None:\n\n # Prepare elements for imputil.Importer.\n # ======================================\n\n mod = imp.load_module(modname, fp, fp.name, suffix)\n out = (is_package, mod, {})\n break\n\n if out is not None:\n break\n\n return out",
"def createClassFile( p ):\n create_modules( p[\"package\"] )\n name = p[\"protocol\"][\"name\"]\n name.lower()\n path = os.path.join( *p[\"package\"].split( \".\" ) )\n with open( \"./%s/%s.py\" % ( path, name ), \"w\" ) as f:\n for i in p[\"imports\"]:\n createClassFile( i )\n\n c = Klass( package=p[\"package\"], includes=p[\"imports\"], **p[\"protocol\"] )\t\n\n f.write( c.generate() )",
"def compile_class(self):\r\n self.tokenizer.advance() # ignore 'class' keyword\r\n self.class_name = self.tokenizer.identifier()\r\n self.tokenizer.advance()\r\n self.tokenizer.advance() # ignore '{' symbol\r\n while self.tokenizer.curtok < len(self.tokenizer.tokens) - 1:\r\n dec = self.tokenizer.key_word()\r\n if dec == \"field\" or dec == \"static\":\r\n self.compile_var_dec()\r\n else:\r\n self.compile_subroutine()\r\n self.tokenizer.advance()",
"def makehxx(self, gen):\n services = []\n for serv in self.services:\n service = \" %s %s(\" % (corba_rtn_type(serv.ret,gen.module.name),serv.name)\n service = service+gen.makeArgs(serv)+\");\"\n services.append(service)\n\n if self.addedmethods:\n services.append(self.addedmethods)\n servicesdef = \"\\n\".join(services)\n\n inheritedclass=self.inheritedclass\n if self.inheritedclass:\n inheritedclass= \" public virtual \" + self.inheritedclass + \",\"\n\n return hxxCompo.substitute(component=self.name, module=gen.module.name,\n servicesdef=servicesdef, inheritedclass=inheritedclass,\n compodefs=self.compodefs)",
"def make_class(attributes, base_classes=()):\r\n \"*** YOUR CODE HERE ***\"",
"def IIAGG(self):\r\n logger.info(\"IIAGG (Indirect Inheritance AGGregation)\")\r\n logger.info(\"Step1: Get all parent and child classes\")\r\n inh = self.get_node_by_name(\"inheritance\")\r\n logger.debug(\"inheritance: %s\" % inh)\r\n logger.info(\"Step2: Get all Aggregation relation classes\")\r\n agg = self.get_node_by_name(\"aggregation\")\r\n logger.debug(\"aggregation: %s\" % agg)\r\n return self.__IIAGG_helper(inh, agg)"
] | [
"0.6546843",
"0.6342097",
"0.61589974",
"0.61360514",
"0.58900464",
"0.55907094",
"0.55879545",
"0.5527749",
"0.54475427",
"0.54270315",
"0.540438",
"0.53865635",
"0.535935",
"0.52980083",
"0.52552074",
"0.5254086",
"0.5237905",
"0.5225976",
"0.5212285",
"0.5208125",
"0.51882744",
"0.51859933",
"0.51708686",
"0.51186377",
"0.51185083",
"0.5115658",
"0.5110685",
"0.5092058",
"0.5067585",
"0.5066708"
] | 0.68348277 | 0 |
updates .coveralls.yml file to allow upload of coverage report | def update_coveralls_config(
path_to_coverage,
coveralls_token,
token_key='repo_token',
):
try:
with open(path_to_coverage, 'r') as cover_fh:
raw_file = cover_fh.read()
except FileNotFoundError:
raw_file = ''
# check if repo_token is already in .coveralls.yml
if token_key in raw_file:
return # already has coveralls credentials
# TODO: check if `repo_token` is blank
lines = raw_file.splitlines()
lines.append(token_key + ': ' + coveralls_token)
with open(path_to_coverage, 'w') as cover_fh:
cover_fh.writelines(lines) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cover(ctx, html=False):\n header(cover.__doc__)\n extra = \"--cov-report html\" if html else \"\"\n with ctx.cd(ROOT):\n ctx.run(\n \"pytest --benchmark-skip --cov flask_restx --cov-report term --cov-report xml {0}\".format(\n extra\n ),\n pty=True,\n )",
"def task_coverage():\n return {\n 'actions': ['py.test --cov nikola --cov-report term-missing tests/'],\n 'verbosity': 2,\n }",
"def coverage(session) -> None:\n session.install(\".[test]\", \"pytest-cov\")\n session.run(\n \"pytest\", \"-n\", \"auto\", \"--cov=./\", \"--cov-report=xml\", *session.posargs\n )",
"def cuv(ctx, coverage_fname, exclude, branch):\n if coverage_fname is None:\n coverage_fname = find_coverage_data('.')\n # coverage_fname still could be None\n\n cfg = Config()\n ctx.obj = cfg\n\n cfg.nice_width = min(80, shutil.get_terminal_size()[0])\n cfg.exclude = exclude\n\n cfg.branch = branch\n if coverage_fname is not None:\n cfg.data = coverage.Coverage(data_file=coverage_fname)\n cfg.data.load()\n else:\n raise click.UsageError(\n \"No coverage data. Do you have a .coverage file?\"\n )",
"def coverage(session):\n session.install(\"coverage[toml]\", \"codecov\")\n session.run(\"coverage\", \"xml\", \"--fail-under=0\")\n session.run(\"codecov\", *session.posargs)",
"def set_coverage(self, coverage): \n self.coverage = coverage\n if os.path.isfile(TESTS_PATH + \"/\" + self.name):\n os.rename(TESTS_PATH + \"/\" + self.name, TESTS_PATH + \"/\" \\\n + self.app_pkg + \"_\"+self.timestamp + \"_\" \\\n + str(coverage) + \".sh\")\n else:\n f_out = open(TESTS_PATH + \"/\" + self.app_pkg + \"_\"+self.timestamp \\\n + \"_\" + str(coverage) + \".sh\", \"w\")\n f_out.write(self.script)\n f_out.close()\n self.name = self.app_pkg + \"_\" + self.timestamp + \"_\" + str(coverage) + \".sh\"",
"def upload_coverage(self, name, directory):\n logging.info('Not uploading coverage because no Filestore.')",
"def coverage(ctx):\n ctx.run(\"coverage run --source {PROJECT_NAME} -m pytest\".format(PROJECT_NAME=PROJECT_NAME))\n ctx.run(\"coverage report -m\")\n ctx.run(\"coverage html\")",
"def cov():\n cov = coverage.coverage(branch=True, include='project/*')\n cov.start()\n tests = unittest.TestLoader().discover('tests')\n unittest.TextTestRunner(verbosity=2).run(tests)\n cov.stop()\n cov.save()\n print('Coverage Summary:')\n cov.report()\n basedir = os.path.abspath(os.path.dirname(__file__))\n covdir = os.path.join(basedir, 'tmp/coverage')\n cov.html_report(directory=covdir)\n print('HTML version: file://%s/index.html' % covdir)\n cov.erase()",
"def cov():\n cov = coverage.coverage(\n branch=True,\n include='project/*',\n omit=\"*/__init__.py\"\n )\n cov.start()\n tests = unittest.TestLoader().discover('tests')\n unittest.TextTestRunner(verbosity=2).run(tests)\n cov.stop()\n cov.save()\n print 'Coverage Summary:'\n cov.report()\n basedir = os.path.abspath(os.path.dirname(__file__))\n covdir = os.path.join(basedir, 'tmp/coverage')\n cov.html_report(directory=covdir)\n print('HTML version: file://%s/index.html' % covdir)\n cov.erase()",
"def test(coverage):\n print('success')\n pass",
"def _update_coverage(self, msg, subtype, by):\n try:\n coverage = self.get_local(msg, \"coverage\")\n except KeyError:\n coverage = defaultdict(int)\n coverage[\"all\"] += by\n coverage[subtype] += by\n self.set_local(msg, \"coverage\", coverage)",
"def generate_cobertura_xml(self, coverage_data):\n\n dom_impl = minidom.getDOMImplementation()\n doctype = dom_impl.createDocumentType(\"coverage\", None,\n \"http://cobertura.sourceforge.net/xml/coverage-03.dtd\")\n document = dom_impl.createDocument(None, \"coverage\", doctype)\n root = document.documentElement\n summary = coverage_data['summary']\n self._attrs(root, {\n 'branch-rate': self._percent(summary['branches-total'],\n summary['branches-covered']),\n 'branches-covered': str(summary['branches-covered']),\n 'branches-valid': str(summary['branches-total']),\n 'complexity': '0',\n 'line-rate': self._percent(summary['lines-total'],\n summary['lines-covered']),\n 'lines-valid': str(summary['lines-total']),\n 'timestamp': coverage_data['timestamp'],\n 'version': '1.9'\n })\n\n sources = self._el(document, 'sources', {})\n source = self._el(document, 'source', {})\n source.appendChild(document.createTextNode(self.base_dir))\n sources.appendChild(source)\n\n root.appendChild(sources)\n\n packages_el = self._el(document, 'packages', {})\n\n packages = coverage_data['packages']\n for package_name, package_data in list(packages.items()):\n package_el = self._el(document, 'package', {\n 'line-rate': package_data['line-rate'],\n 'branch-rate': package_data['branch-rate'],\n 'name': package_name\n })\n classes_el = self._el(document, 'classes', {})\n for class_name, class_data in list(package_data['classes'].items()):\n class_el = self._el(document, 'class', {\n 'branch-rate': self._percent(class_data['branches-total'],\n class_data['branches-covered']),\n 'complexity': '0',\n 'filename': class_name,\n 'line-rate': self._percent(class_data['lines-total'],\n class_data['lines-covered']),\n 'name': class_data['name']\n })\n\n # Process methods\n methods_el = self._el(document, 'methods', {})\n for method_name, hits in list(class_data['methods'].items()):\n method_el = self._el(document, 'method', {\n 'name': method_name,\n 'signature' : '',\n 'hits': hits\n })\n methods_el.appendChild(method_el)\n\n # Process lines\n lines_el = self._el(document, 'lines', {})\n lines = list(class_data['lines'].keys())\n lines.sort()\n for line_number in lines:\n line_el = self._el(document, 'line', {\n 'branch': class_data['lines'][line_number]['branch'],\n 'hits': str(class_data['lines'][line_number]['hits']),\n 'number': str(line_number)\n })\n if class_data['lines'][line_number]['branch'] == 'true':\n total = int(class_data['lines'][line_number]['branches-total'])\n covered = int(class_data['lines'][line_number]['branches-covered'])\n percentage = int((covered * 100.0) / total)\n line_el.setAttribute('condition-coverage',\n '{0}% ({1}/{2})'.format(\n percentage, covered, total))\n lines_el.appendChild(line_el)\n\n class_el.appendChild(methods_el)\n class_el.appendChild(lines_el)\n classes_el.appendChild(class_el)\n package_el.appendChild(classes_el)\n packages_el.appendChild(package_el)\n root.appendChild(packages_el)\n\n return document.toprettyxml()",
"def main():\n import coverage\n import nose\n import os\n from shutil import rmtree\n rmtree('./covhtml', ignore_errors=True)\n try:\n os.remove('./.coverage')\n except Exception,e:\n pass\n\n # run nose in its own process because the .coverage file isn't written\n # until the process terminates and we need to read it\n nose.run()",
"def download_coverage(self, name, dst_directory):\n logging.info('Not downloading coverage because no Filestore.')",
"def gcov_it(version, num):\n subprocess.run([\"gcov-7\", \"-i\", \"replace.c\"],\n cwd=CWD_PREFIX+str(version))\n gcov = open(\"replace/versions.alt/versions.orig/v\" + str(version) +\n \"/replace.c.gcov\", 'r')\n out = []\n for line in gcov:\n info = line.split(':')[1].split(',')\n if line[0]=='l' and int(info[1]):\n out.append(\"cover(T{},S{})\".format(str(num), info[0]))\n gcov.close()\n subprocess.run([\"rm\", \"replace/versions.alt/versions.orig/v\" +\n str(version) + \"/replace.gcda\"])\n return out",
"def run(self):\n cmd = 'coverage run setup.py test && coverage report -m'\n check_call(cmd, shell=True)",
"def coverage(context):\n context.run(\" \".join([\n \"python -m pytest\",\n \"--cov=%s\" % PACKAGE_NAME,\n \"--cov-report html\",\n \"--cov-branch\",\n \"--cov-fail-under=75\"\n ]))",
"def test_coverage_2(base_settings):\n filename = base_settings[\"unittest_data_dir\"] / \"coverage-example-selfpay.json\"\n inst = coverage.Coverage.parse_file(\n filename, content_type=\"application/json\", encoding=\"utf-8\"\n )\n assert \"Coverage\" == inst.resource_type\n\n impl_coverage_2(inst)\n\n # testing reverse by generating data from itself and create again.\n data = inst.dict()\n assert \"Coverage\" == data[\"resourceType\"]\n\n inst2 = coverage.Coverage(**data)\n impl_coverage_2(inst2)",
"def cov():\n tests = unittest.TestLoader().discover('project/tests')\n result = unittest.TextTestRunner(verbosity=2).run(tests)\n if result.wasSuccessful():\n COV.stop()\n COV.save()\n print('Coverage Summary:')\n COV.report()\n basedir = os.path.abspath(os.path.dirname(__file__))\n covdir = os.path.join(basedir, 'tmp/coverage')\n COV.html_report(directory=covdir)\n print('HTML version: file://%s/index.html' % covdir)\n COV.erase()\n return 0\n return 1",
"def test_coverage_4(base_settings):\n filename = base_settings[\"unittest_data_dir\"] / \"coverage-example.json\"\n inst = coverage.Coverage.parse_file(\n filename, content_type=\"application/json\", encoding=\"utf-8\"\n )\n assert \"Coverage\" == inst.resource_type\n\n impl_coverage_4(inst)\n\n # testing reverse by generating data from itself and create again.\n data = inst.dict()\n assert \"Coverage\" == data[\"resourceType\"]\n\n inst2 = coverage.Coverage(**data)\n impl_coverage_4(inst2)",
"def coverage():\n print(\"Coverage tests always re-run\")\n with safe_cd(SRC):\n my_env = config_pythonpath()\n # You will need something like this in pytest.ini\n # By default, pytest is VERY restrictive in the file names it will match.\n #\n # [pytest]\n # DJANGO_SETTINGS_MODULE = core.settings\n # python_files = tests.py test_*.py *_tests.py test*_*.py *_test*.py\n if not os.path.exists(\"pytest.ini\") and IS_DJANGO:\n print(\n \"pytest.ini MUST exist for Django test detection or too few tests are found.\"\n )\n exit(-1)\n return\n\n my_env = config_pythonpath()\n command = \"{0} py.test {1} --cov={2} --cov-report html:coverage --cov-fail-under 55 --verbose\".format(\n PIPENV, \"test\", PROJECT_NAME\n )\n execute_with_environment(command, my_env)",
"def bump_upstream_sources(**kwargs):\n\n # Find out current tracking branch to bump\n # the services matching the branch:\n oa_folder = kwargs['workdir'] + '/openstack-ansible'\n try:\n remote_branch = tracking_branch_name(oa_folder)\n except ValueError as verr:\n raise SystemExit(verr)\n\n LOGGER.info(\"Each file can take a while to update.\")\n prevline = {}\n reporegex = re.compile('(?P<project>.*)_git_repo: (?P<remote>.*)')\n branchregex = re.compile(('(?P<project>.*)_git_install_branch: '\n '(?P<sha>[0-9a-f]{40}) '\n '# HEAD of \"(?P<branch>.*)\" '\n 'as of .*'))\n\n update_files = glob.glob(\n \"{}/playbooks/defaults/repo_packages/*.yml\".format(oa_folder))\n\n stable_branch_skips = [\n \"openstack_testing.yml\",\n \"nova_consoles.yml\",\n ]\n\n for filename in update_files:\n if remote_branch.startswith(\"stable/\") and \\\n os.path.basename(filename) in stable_branch_skips:\n LOGGER.info(\"Skipping {} for stable branch\".format(filename))\n continue\n LOGGER.info(\"Updating {}\".format(filename))\n for line in fileinput.input(filename, inplace=True):\n rrm = reporegex.match(line)\n if rrm:\n # Extract info of repo line (previous line)\n # for branch line (current line)\n prevline['project'] = rrm.group('project')\n prevline['remote'] = rrm.group('remote')\n print(branchregex.sub(\n lambda x: bump_project_sha_with_comments(x, prevline), line)),\n\n LOGGER.info(\"All files patched !\")\n msg = (\"Update all SHAs for {next_release}\\n\\n\"\n \"This patch updates all the roles to the latest available stable \\n\"\n \"SHA's, copies the release notes from the updated roles into the \\n\"\n \"integrated repo, updates all the OpenStack Service SHA's, and \\n\"\n \"updates the appropriate python requirements pins. \\n\\n\"\n \"Depends-On: {release_changeid}\").format(\n next_release=os.environ.get('next_release', '<NEW VERSION>'),\n release_changeid=os.environ.get('release_changeid', '<TODO>'),)\n if kwargs['commit']:\n repo = Repo(oa_folder)\n repo.git.add('.')\n repo.index.commit(msg)\n click.echo(\"Commit done. Please verify before review.\")\n else:\n click.echo(\"Here is a commit message you could use:\\n\")\n click.echo(msg)",
"def run_coverage(session):\n set_environment_variables(PYBAMM_ENV, session=session)\n session.run_always(\"pip\", \"install\", \"coverage\")\n session.run_always(\"pip\", \"install\", \"-e\", \".[all]\")\n if sys.platform != \"win32\":\n session.run_always(\"pip\", \"install\", \"-e\", \".[odes]\")\n session.run_always(\"pip\", \"install\", \"-e\", \".[jax]\")\n session.run(\"coverage\", \"run\", \"--rcfile=.coveragerc\", \"run-tests.py\", \"--nosub\")\n session.run(\"coverage\", \"combine\")\n session.run(\"coverage\", \"xml\")",
"def cov():\n tests = unittest.TestLoader().discover('tests')\n result = unittest.TextTestRunner(verbosity=1).run(tests)\n if result.wasSuccessful():\n COV.stop()\n COV.save()\n print('Coverage Summary:')\n COV.report()\n basedir = os.path.abspath(os.path.dirname(__file__))\n covdir = os.path.join(basedir, 'tmp/coverage')\n COV.html_report(directory=covdir)\n print('HTML version: file://%s/index.html' % covdir)\n COV.erase()\n return 0\n return 1",
"def test_coverage_1(base_settings):\n filename = base_settings[\"unittest_data_dir\"] / \"coverage-example-2.json\"\n inst = coverage.Coverage.parse_file(\n filename, content_type=\"application/json\", encoding=\"utf-8\"\n )\n assert \"Coverage\" == inst.resource_type\n\n impl_coverage_1(inst)\n\n # testing reverse by generating data from itself and create again.\n data = inst.dict()\n assert \"Coverage\" == data[\"resourceType\"]\n\n inst2 = coverage.Coverage(**data)\n impl_coverage_1(inst2)",
"def cov():\n tests = unittest.TestLoader().discover('project/tests')\n result = unittest.TextTestRunner(verbosity=2).run(tests)\n if result.wasSuccessful():\n COV.stop()\n COV.save()\n print('Coverage Summary:')\n COV.report()\n COV.html_report()\n COV.erase()\n return 0\n return 1",
"def test_coverage_3(base_settings):\n filename = base_settings[\"unittest_data_dir\"] / \"coverage-example-ehic.json\"\n inst = coverage.Coverage.parse_file(\n filename, content_type=\"application/json\", encoding=\"utf-8\"\n )\n assert \"Coverage\" == inst.resource_type\n\n impl_coverage_3(inst)\n\n # testing reverse by generating data from itself and create again.\n data = inst.dict()\n assert \"Coverage\" == data[\"resourceType\"]\n\n inst2 = coverage.Coverage(**data)\n impl_coverage_3(inst2)",
"def coverage_init(reg, options):\n from .coveragepy import CoveragePlugin\n\n reg.add_file_tracer(CoveragePlugin())",
"def _update_disco(repo: _git.Repository, github_account: accounts.GitHubAccount) -> int:\n with TemporaryDirectory() as gopath:\n os.makedirs(join(gopath, 'src'))\n check_output(['ln', '-s',\n join(repo.filepath, 'src'),\n join(gopath, 'src/discovery-artifact-manager')])\n env = os.environ.copy()\n env['GOPATH'] = gopath\n check_output(['go', 'run', 'src/main/updatedisco/main.go'],\n cwd=repo.filepath,\n env=env)\n repo.add(['discoveries'])\n if not repo.diff_name_status():\n return 0\n repo.commit('Autogenerated Discovery document update',\n github_account.name,\n github_account.email)\n return 1"
] | [
"0.57402664",
"0.55637956",
"0.5521971",
"0.5499088",
"0.54615265",
"0.5441391",
"0.54214483",
"0.5401124",
"0.53201175",
"0.51842374",
"0.5157459",
"0.51465404",
"0.5143215",
"0.5136255",
"0.5116265",
"0.5108954",
"0.506716",
"0.49769455",
"0.49514818",
"0.49292338",
"0.49191287",
"0.49159703",
"0.48945257",
"0.48787403",
"0.4871899",
"0.48485205",
"0.48069066",
"0.47754508",
"0.47708666",
"0.47613415"
] | 0.7289181 | 0 |
turn multiline config entry into a list of commands | def parse_command_list(config_str):
return [command for command in config_str.splitlines() if command] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def config_changes(cli):\n result = []\n in_config = False\n for line in cli.splitlines():\n if not in_config and line == 'Building configuration...':\n in_config = True\n elif in_config:\n result.append(line)\n\n return '\\n'.join(result)",
"def get_commands_list() -> list:\n return open(\"data/metadata/commands.list.txt\", \"r\").read().split(\"\\n\")",
"def config_to_list(config_name=CONFIG_FILE_NAME):\n result = []\n config = configparser.ConfigParser()\n\n if not config.read([config_name]):\n return []\n\n for section in SECTIONS:\n for name, opts in ((n, o) for n, o in SECTIONS[section].items() if config.has_option(section, n)):\n value = config.get(section, name)\n\n if value != '' and value != 'None':\n action = opts.get('action', None)\n\n if action == 'store_true' and value == 'True':\n # Only the key is on the command line for this action\n result.append('--{}'.format(name))\n\n if not action == 'store_true':\n if opts.get('nargs', None) == '+':\n result.append('--{}'.format(name))\n result.extend((v.strip() for v in value.split(',')))\n else:\n result.append('--{}={}'.format(name, value))\n\n return result",
"def commands(config, names):\n commands = {\n cmd: Command(\n **{minus_to_underscore(k): v for k, v in config.items(cmd)}\n )\n for cmd in config.sections()\n if cmd != 'packages'\n }\n\n try:\n return tuple(commands[x] for x in names)\n except KeyError as e:\n raise RuntimeError(\n 'Section [commands] in the config file does not contain the '\n 'key {.args[0]!r} you requested to execute.'.format(e))",
"def load_command_list(filename=None):\n contents = None\n if filename:\n logger.debug('Attempting to read commands from \"{}\"'.format(filename))\n with open(filename, 'r') as fp:\n contents = fp.read().strip()\n\n if not contents:\n contents = ''\n\n # Split data as lines (ignore empty)\n return [l.strip().upper() for l in contents.split('\\n') if l.strip() != '']",
"def add_config(self, config):\n clean=lambda n: n.strip().strip('\"').lower()\n for line in config.split('\\n'):\n items=line.strip().split()\n if items and len(items) >= 3:\n cmd, evt, hnd=items[:3]\n \"\"\" NOTE\n - just 'bind' command expected right now\n - '+' prepended ti the handler means REPEAT (make sense just for keyboard keys actually)\n \"\"\"\n cmd=clean(cmd)\n if cmd in ['bind']:\n evt,hnd=(clean(evt), clean(hnd))\n if not cmd in self.config: self.config[cmd]={}\n repeat=hnd.startswith('+')\n if repeat: hnd=hnd[1:]\n self.config[cmd].update([[evt, [hnd, repeat]]])",
"def get_config_lines(self, key):\n return \" \".join([key, self._config[key][\"value\"], \"#\", self._config[key][\"comments\"]] if self._config[key][\"comments\"] is not None\n else [key, self._config[key][\"value\"]])",
"def test_removes_trailing_newline_from_complex_keys(self):\n commands = list(parser.generate_commands(yaml.load(\"\"\"\n - ? >\n Line 1\n Line 2\n : retries: 1\n \"\"\")))\n assert commands == [('Line 1 Line 2', {'retries': 1})]",
"def parse_commands(command_list: List[str]) -> List[str]:\n return [' '.join(x.split('-')) for x in command_list]",
"def configToCliArguments(config):\n if not isinstance(config, dict):\n raise TypeError(\"Expected dict for config\")\n\n args = []\n for key, value in config.items():\n if value == None:\n args.append(f\"--{key}\")\n continue\n\n if isinstance(value, list):\n value = \",\".join(value)\n args.append(f\"--{key}={value}\")\n\n return args",
"def makeConfig (self):\n for line in self.lines :\n ll = line.split ('=', 1)\n if len(ll) < 2 :\n print \"Error in parsing cfg label line: \" , line\n return None\n self.config[(ll[0]).strip()] = ((ll[1]).strip())",
"def _config_list(res, ctx):\n\n if _has_error_code(res):\n return print_errors(res, ctx)\n\n lines = []\n for config in res['configs']:\n line = '* ' if config['current'] else ' '\n\n if ctx.verbose:\n line += config['mtime'] + ' '\n\n line += config['name']\n lines.append(line)\n\n return \"\\n\".join(lines)",
"def config(bot, event, cmd=None, *args):\n\n # consume arguments and differentiate beginning of a json array or object\n tokens = list(args)\n parameters = []\n value = []\n state = \"key\"\n for token in tokens:\n if token.startswith((\"{\", \"[\")):\n # apparent start of json array/object, consume into a single list item\n state = \"json\"\n if state == \"key\":\n parameters.append(token)\n elif state == \"json\":\n value.append(token)\n else:\n raise ValueError(\"unknown state\")\n if value:\n parameters.append(\" \".join(value))\n print(\"config {}\".format(parameters))\n\n if cmd == 'get' or cmd is None:\n config_args = list(parameters)\n value = bot.config.get_by_path(config_args) if config_args else dict(bot.config)\n elif cmd == 'set':\n config_args = list(parameters[:-1])\n if len(parameters) >= 2:\n bot.config.set_by_path(config_args, json.loads(parameters[-1]))\n bot.config.save()\n value = bot.config.get_by_path(config_args)\n else:\n yield from command.unknown_command(bot, event)\n return\n elif cmd == 'append':\n config_args = list(parameters[:-1])\n if len(parameters) >= 2:\n value = bot.config.get_by_path(config_args)\n if isinstance(value, list):\n value.append(json.loads(parameters[-1]))\n bot.config.set_by_path(config_args, value)\n bot.config.save()\n else:\n value = _('append failed on non-list')\n else:\n yield from command.unknown_command(bot, event)\n return\n elif cmd == 'remove':\n config_args = list(parameters[:-1])\n if len(parameters) >= 2:\n value = bot.config.get_by_path(config_args)\n if isinstance(value, list):\n value.remove(json.loads(parameters[-1]))\n bot.config.set_by_path(config_args, value)\n bot.config.save()\n else:\n value = _('remove failed on non-list')\n else:\n yield from command.unknown_command(bot, event)\n return\n else:\n yield from command.unknown_command(bot, event)\n return\n\n if value is None:\n value = _('Parameter does not exist!')\n\n config_path = ' '.join(k for k in ['config'] + config_args)\n segments = [hangups.ChatMessageSegment('{}:'.format(config_path),\n is_bold=True),\n hangups.ChatMessageSegment('\\n', hangups.SegmentType.LINE_BREAK)]\n segments.extend(text_to_segments(json.dumps(value, indent=2, sort_keys=True)))\n bot.send_message_segments(event.conv, segments)",
"def parse(self):\n raw_config_lines = self.load_config()\n self.config_lines_str = raw_config_lines\n self._create_cfg_line_objects()",
"def process_commands(self, commands: List[str]):",
"def configDict(config):\n config_dict = {}\n line_number = 0\n if type(config) == str:\n config_object = config.splitlines()\n else:\n return \"ERROR: config not type str\"\n for index, line in enumerate(config_object):\n if not bool(re.match(\"^\\s|!\", line)):\n line_number = index\n config_dict[line] = []\n elif bool(re.match(\"^\\s\", line)):\n config_dict[config_object[line_number]].append(line.strip())\n return config_dict",
"def command(self):\n with open(self.x, 'rt') as fi:\n line = next(fi) # the first line\n\n version, cmd_line = line.strip().split(';')\n version = version.split(' ')[2]\n cmd_line = re.sub('\"', '', cmd_line.strip())\n\n return [version, cmd_line]",
"def command_groups(self, lines):\n for line in lines:\n match = command_regex.match(line)\n if match:\n if self.current_group:\n yield self.current_group\n groupdict = match.groupdict()\n comm = groupdict['comm']\n param = groupdict['param']\n if param:\n param = param[1:-1]\n data = groupdict['data']\n self.current_group = (comm, param, data)\n self.parse_multiline = True\n elif self.parse_multiline:\n match = cont_regex.match(line)\n if cont_regex.match(line):\n new_data, = match.groups()\n if new_data:\n name, param, data = self.current_group\n data += ' ' + new_data\n self.current_group = (name, param, data)\n else:\n self.parse_multiline = False\n else:\n self.parse_multiline = False\n if self.current_group:\n yield self.current_group\n self.current_group = []",
"def get_by_name_as_list(cls, name, token=','):\n config = Configuration.get_by_name(name) or []\n if config:\n return [item.strip() for item in config.split(token)]\n else:\n return config",
"def _config_sections(self):\n data = []\n section_data = []\n for index, line in enumerate(self.running_config):\n if self._nextline_startswith_space(index):\n section_data.append(line)\n else:\n if len(section_data) > 0:\n section_data.append(line)\n data.append(section_data)\n section_data = []\n return data",
"def main(args):\n\n with open(args.cfg_fn, 'r') as cfg_fd:\n config = cfg_fd.read().split(\"\\n\")\n\n with open(args.opt_fn, 'r') as opt_fd:\n for oline in opt_fd:\n option, value = oline.strip().split(\"=\")\n\n conf_addition = \"%s=%s\" % (option, value)\n added = False\n for line_nr, line in enumerate(config):\n if \"# %s is not set\" % option in line or \\\n \"%s=\" % option in line:\n config[line_nr] = conf_addition\n added = True\n break\n\n if not added:\n config.append(conf_addition)\n\n with open(args.cfg_fn, 'w') as cfg_fd:\n cfg_fd.write(\"\\n\".join(config))",
"def list_config():\n console = Console()\n _config = loadConfig()\n json_data = richJSON.from_data({**asdict(_config)})\n console.print(Panel(json_data, title=\"SubmarineCliConfig\"))",
"def config(name, config, edit=True):\n\n configs = []\n for entry in config:\n key = next(iter(entry.keys()))\n configs.append(_parse_config(entry[key], key))\n\n # Python auto-correct line endings\n configstext = \"\\n\".join(salt.utils.data.decode(configs))\n if edit:\n with salt.utils.files.fopen(name, \"w\") as configfile:\n configfile.write(\"# This file is managed by Salt.\\n\")\n configfile.write(salt.utils.stringutils.to_str(configstext))\n return configstext",
"def load_commands(filename):\n lines_out = []\n try:\n with open(filename, \"r\") as inFile:\n for line in inFile.readlines():\n comment = line.find(';') # Don't read comments after ';'\n if comment != -1:\n line = line[:comment]\n line = line.strip()\n if len(line) > 0: # Don't read lines of length 0.\n lines_out.append(line)\n except FileNotFoundError:\n print(f\"File {filename} not found.\")\n return lines_out",
"def parse_from_string(self, blob: str) -> None:\n\t\tself._entries = []\n\t\tis_not_set_matcher = re.compile(CONFIG_IS_NOT_SET_PATTERN)\n\t\tconfig_matcher = re.compile(CONFIG_PATTERN)\n\t\tfor line in blob.split('\\n'):\n\t\t\tline = line.strip()\n\t\t\tif not line:\n\t\t\t\tcontinue\n\t\t\telif config_matcher.match(line) or is_not_set_matcher.match(line):\n\t\t\t\tself._entries.append(KconfigEntry(line))\n\t\t\telif line[0] == '#':\n\t\t\t\tcontinue\n\t\t\telse:\n\t\t\t\traise KconfigParseError('Failed to parse: ' + line)",
"def config(ctx):\n if not ctx.invoked_subcommand:\n cfg = ctx.obj['cfg']\n for section in cfg.sections():\n print(\"[\", section, \"]\")\n for option in cfg[section]:\n print(option, \" = \", cfg[section][option])",
"def parse(self, lines):\n cur_entry = None\n indents = []\n for line in lines:\n kv_ = _key_value(line)\n if len(kv_) > 1:\n key, value = kv_\n if key.lower() == \"host\":\n cur_entry = value\n self.hosts_.add(value)\n else:\n indents.append(_indent(line))\n self.lines_.append(ConfigLine(line=line, host=cur_entry, key=key, value=value))\n else:\n self.lines_.append(ConfigLine(line=line))\n # use most popular indent as indent for file, default ' '\n counter = Counter(indents)\n popular = list(reversed(sorted(counter.items(), key=lambda e: e[1])))\n self.indent = popular[0][0] if len(popular) > 0 else ' '",
"def config(self):\n return \"\\n\".join([ c.config(True) for p, c in self.configs_ ])",
"def normalize(self, cfg):\n clean_cfg = []\n\n for line in cfg.splitlines():\n\n if not line.strip():\n # empty line\n continue\n if \"--More--\" in line:\n # pick up anything that may be included after the \"More\"\n line = line[line.find('--More--') + 8:]\n if not line.split():\n # emptied line\n continue\n if line.startswith('#'):\n continue\n if line.startswith('!'):\n continue\n if line.startswith('Current configuration'):\n continue\n if line.rstrip().endswith(\"#\"):\n continue\n if line.split()[0] in self.skip:\n continue\n if self._check_timestamps(line):\n continue\n line = self._check_special_handles(line)\n if line is None:\n continue\n\n clean_cfg.append(line.strip())\n\n return clean_cfg",
"def config(bot, event, cmd=None, *args):\n\n # consume arguments and differentiate beginning of a json array or object\n tokens = list(args)\n parameters = []\n value = []\n state = \"key\"\n for token in tokens:\n if token.startswith((\"{\", \"[\", '\"', \"'\")):\n # apparent start of json array/object, consume into a single list item\n state = \"json\"\n if state == \"key\":\n parameters.append(token)\n elif state == \"json\":\n value.append(token)\n else:\n raise ValueError(\"unknown state\")\n if value:\n parameters.append(\" \".join(value))\n\n if cmd == 'get' or cmd is None:\n config_args = list(parameters)\n value = bot.config.get_by_path(config_args) if config_args else dict(bot.config)\n\n elif cmd == 'test':\n num_parameters = len(parameters)\n text_parameters = []\n last = num_parameters - 1\n for num, token in enumerate(parameters):\n if num == last:\n try:\n json.loads(token)\n token += \" <b>(valid json)</b>\"\n except ValueError:\n token += \" <em>(INVALID)</em>\"\n text_parameters.append(str(num + 1) + \": \" + token)\n text_parameters.insert(0, \"<b>config test</b>\")\n\n if num_parameters == 1:\n text_parameters.append(_(\"<em>note: testing single parameter as json</em>\"))\n elif num_parameters < 1:\n yield from command.unknown_command(bot, event)\n return\n\n yield from bot.coro_send_message(event.conv, \"<br />\".join(text_parameters))\n return\n\n elif cmd == 'set':\n config_args = list(parameters[:-1])\n if len(parameters) >= 2:\n bot.config.set_by_path(config_args, json.loads(parameters[-1]))\n bot.config.save()\n value = bot.config.get_by_path(config_args)\n else:\n yield from command.unknown_command(bot, event)\n return\n\n elif cmd == 'append':\n config_args = list(parameters[:-1])\n if len(parameters) >= 2:\n value = bot.config.get_by_path(config_args)\n if isinstance(value, list):\n value.append(json.loads(parameters[-1]))\n bot.config.set_by_path(config_args, value)\n bot.config.save()\n else:\n value = _('append failed on non-list')\n else:\n yield from command.unknown_command(bot, event)\n return\n\n elif cmd == 'remove':\n config_args = list(parameters[:-1])\n if len(parameters) >= 2:\n value = bot.config.get_by_path(config_args)\n if isinstance(value, list):\n value.remove(json.loads(parameters[-1]))\n bot.config.set_by_path(config_args, value)\n bot.config.save()\n else:\n value = _('remove failed on non-list')\n else:\n yield from command.unknown_command(bot, event)\n return\n\n else:\n yield from command.unknown_command(bot, event)\n return\n\n if value is None:\n value = _('Parameter does not exist!')\n\n config_path = ' '.join(k for k in ['config'] + config_args)\n segments = [hangups.ChatMessageSegment('{}:'.format(config_path),\n is_bold=True),\n hangups.ChatMessageSegment('\\n', hangups.SegmentType.LINE_BREAK)]\n segments.extend(text_to_segments(json.dumps(value, indent=2, sort_keys=True)))\n yield from bot.coro_send_message(event.conv, segments)"
] | [
"0.64328206",
"0.6407235",
"0.6104706",
"0.6073341",
"0.5869535",
"0.5853029",
"0.57613",
"0.5759418",
"0.568729",
"0.56760406",
"0.5661332",
"0.56305355",
"0.5599522",
"0.5569513",
"0.55690366",
"0.5557906",
"0.55198437",
"0.54839694",
"0.5481613",
"0.5474146",
"0.5458116",
"0.5457289",
"0.54424477",
"0.54115736",
"0.54082257",
"0.53881675",
"0.53609216",
"0.53526855",
"0.53523374",
"0.53517026"
] | 0.7687394 | 0 |
atexit handler for deactivating and removing local venv even if tools crash | def atexit_deactivate_venv(
venv_name,
cwd,
logger=p_logging.DEFAULT_LOGGER
): # pragma: no cover
logger.info('Cleaning up venv post-test')
logger.info('--removing venv')
try:
rm_log = local['rm']('-rf', path.join(cwd, venv_name))
logger.debug(rm_log)
except Exception:
logger.error('Unable to remove venv files post-test', exc_info=True)
# TODO: remove .egg/pycache/dist files?
logger.info('venv cleanup complete!') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def env_cleanup(self):\n pass",
"def tear_down(self):\n self.destroy_env()\n self.dut.kill_all()",
"def teardown(self):\n self.logger.info('Tearing down file server vm')\n self.local_env.execute('uninstall', task_retries=40,\n task_retry_interval=30)",
"def destroy_env(self):\n self.dut.send_expect(\"quit\", \"# \")\n time.sleep(2)",
"def state_failsafe_exit(cfg, app, win):",
"def __exit__(self, exc_type, exc_value, traceback):\n nvmlShutdown()",
"def pytest_unconfigure() -> None: # pragma: no cover\n if PROC.exitcode is None:\n assert PROC.pid is not None # not sure if this can happen (mypy error); if it does, be explicit\n os.kill(PROC.pid, signal.SIGINT)\n PROC.join(5)\n if PROC.exitcode is None:\n PROC.kill()\n PROC.join()\n print(\"\\nServer app terminated, logs in logs/server.log\")",
"def hook (self, *args, **kwargs):\n self.launch([\"--fastexit\"])",
"def cleanup():\n logger.critical(\"Program termination cleanup routine executing.\")\n # Using os._exit() to fix a bug in subprocess.popen that causes the\n # interpreter to hang after on regular sys.exit, exit, or quit call.\n os._exit(0)",
"def unload(args):\n subprocess.check_call([\"/bin/launchctl\", \"unload\"] + values.get(args))",
"def cli_teardown(argv):\n parser = argparse.ArgumentParser(\n prog=\"bazel_bf teardown\",\n description=\"Tear down the remote environment entirely.\")\n parser.add_argument(\"--force\", action='store_true')\n\n args = parser.parse_args(argv)\n\n lambda_config = config.read_config()\n\n if not args.force:\n print \"Configuration is: \" + json.dumps(\n lambda_config, indent=2, sort_keys=True)\n sys.stdout.write(\"Confirm tearing down the remote environment? [yes/No] \")\n choice = raw_input().lower()\n if choice == \"yes\":\n print \"Proceeding...\"\n else:\n raise CommandLineException(\"Abort!\")\n\n (next_lambda_config, err) = setup.teardown(lambda_config)\n config.write_config(next_lambda_config)\n\n if err:\n raise CommandLineException(\n \"Errors were encountered during tear down: \" +\n \"some resources might not have been properly deleted\")",
"def stop_and_restart():\n updater.stop()\n os.execl(sys.executable, sys.executable, *sys.argv)",
"def exit(self):\n self.runtime.halted = True",
"def shutdown(self):\n self.exit_app()",
"def stop_and_restart():\n updater.stop()\n os.execl(sys.executable, sys.executable, *sys.argv)",
"def cleanupAtExit():\n \n global client\n \n client.stop()",
"def cleanup_at_exit():\n \n global ifW\n \n if ifW:\n if logger: logger.info('Cleaning up at exit')\n if ifW._secondary_if and ifW._secondary_if.ifname:\n if logger: logger.debug('De-configuring interface ' + ifW._secondary_if.ifname)\n ifW.if_destroyed(ifW._secondary_if.ifname)",
"def on_exit(self):\n pass",
"def state_finish_exit(cfg, app, win):",
"def _clean_up():\n from tests.util import report\n report.update()\n if MAIN_RUNNER is not None:\n MAIN_RUNNER.on_exit()\n from tests.util.services import get_running_services\n for service in get_running_services():\n sys.stderr.write(\"Stopping service \")\n for c in service.cmd:\n sys.stderr.write(c + \" \")\n sys.stderr.write(\"...\\n\\r\")\n service.stop()",
"def cleanup():\n management.call_command('cleanup')",
"def graceful_exit(*args, **kwargs):\n if updater is not None:\n updater.bot.delete_webhook()\n\n sys.exit(1)",
"def stop():\n _with_deploy_env(['./bin/paster serve src/remix/oerpub/rhaptoslabs/production.ini --stop-daemon'])",
"def config_exit(self):\n self._master.destroy()\n self._config_status = False # ensure the world wouldn't be built",
"def DeleteEnv(self):\n if len(self.input.get()) > 0:\n VirtualEnvApp().EndEnv(self.input.get())\n self.restart_program()\n else:\n messagebox.showinfo('Error', 'Please Enter the Name of the ENV')",
"def local_uninstall(environment):\n environment.remove_cleanup(\n environment.cfy.local.execute,\n args=['uninstall'],\n )\n result = environment.cfy.local.execute('uninstall')\n assert result['returncode'] == 0, (\n 'Uninstall workflow failed!'\n )",
"def shiva_the_destroyer():\n with settings(warn_only=True):\n run('rm -Rf %(path)s' % env)\n run('rm -Rf %(log_path)s' % env)\n sudo('rm %(apache_config_path)s' % env)\n reboot()",
"def done(self, env):\n del env\n return False",
"def __exit__(self, exec_type, exec_value, traceback):\n #TODO: probably should shut down the visualization server component gracefully here",
"def exitprogram():\n sys.exit()"
] | [
"0.6747679",
"0.6670127",
"0.66516036",
"0.6634058",
"0.6194183",
"0.6155125",
"0.6096577",
"0.6070269",
"0.6064456",
"0.60605145",
"0.6057598",
"0.60506946",
"0.6045445",
"0.6012506",
"0.6011123",
"0.59959084",
"0.598898",
"0.5965664",
"0.59515667",
"0.5941659",
"0.59395814",
"0.5921367",
"0.5902089",
"0.58972305",
"0.589131",
"0.58656675",
"0.586519",
"0.5843614",
"0.5836605",
"0.5826082"
] | 0.7242372 | 0 |
Test that the extension validation is working properly | def test_extensions(self):
field = TypedFileField(required=False, ext_whitelist=self.good_extensions)
for ext in self.good_extensions:
name = 'somefooname.%s' % ext
file = UploadedFile(name=name, size=1)
assert field.clean(file) is file
for ext in self.bad_extensions:
name = 'somefooname.%s' % ext
file = UploadedFile(name=name, size=1)
with pytest.raises(forms.ValidationError):
field.clean(file) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_validate(self):\n pass",
"def test_validators():",
"def validate_extension(extension):\n\n error_flag = 0\n error_string = ''\n\n if isinstance(extension, dict):\n try:\n schema = jsonref.load_uri(extension['extension_schema'])\n try:\n print(\"Loaded Extension Schema: \", schema['title'])\n name = schema['title']\n error_string, error_flag = bco_validator(schema, extension)\n\n # For if the schema has no ['title']\n except KeyError:\n print(\"Loaded Extension Schema: \", schema['$id'])\n name = schema['$id']\n\n except json.decoder.JSONDecodeError:\n print('Failed to load extension schema', schema['$id'])\n error_flag += 1\n\n except TypeError:\n print('Failed to load extension schema. \\nInvalid format ', )\n print(extension)\n error_string += json.dumps(extension)\n error_flag += 1\n\n else:\n print('Invalid BCO extension format')\n error_string += json.dumps(extension)\n error_flag = 1\n\n if error_flag == 0:\n print(name + ' PASSED \\U0001F44D')\n return error_string, error_flag",
"def test_get_built_in_extension(self):\n\n spec = {\n '$ext': {\n \"function\": \"random_string\",\n \"extra_args\": [4]\n }\n }\n\n validate_extensions(spec, None, None)",
"def _validate_extension(self, extension, allowed_extensions):\n if extension not in allowed_extensions:\n raise LiveDocxError(\"That format isn't allowed - please pick one of these: %s\" % (','.join(self.ALLOWED_TEMPLATE_EXT))\n\nclass LiveDocxError(Exception):",
"def test_11_is_allowed_file_correct_ext(self):\n\n for ext in list(ALLOWED_EXTENSIONS):\n filename = f\"somename.{ext}\"\n is_allowed = utils.is_allowed_file(filename)\n self.assertTrue(is_allowed)",
"def test_preference_extension_regex():\n\n assert not _do_test_raw('\"chrome://mozapps/skin/extensions/update1.png\"').failed()\n assert _do_test_raw('\"foo.extensions.update.bar\"').failed()",
"def test_get_extension(self):\n\n spec = {\n \"$ext\": {\n \"function\": \"operator:add\",\n }\n }\n\n validate_extensions(spec, None, None)",
"def test_allowed_file(self):\r\n u = Uploader()\r\n for ext in u.allowed_extensions:\r\n # Change extension to uppercase to check that it works too\r\n filename = 'test.%s' % ext.upper()\r\n err_msg = (\"This file: %s should be allowed, but it failed\"\r\n % filename)\r\n assert u.allowed_file(filename) is True, err_msg\r\n\r\n err_msg = \"Non allowed extensions should return false\"\r\n assert u.allowed_file('wrong.pdf') is False, err_msg",
"def validate():",
"def test_validate_and_write_extended_validation(req):\n handle = StringIO()\n req.get('http://fake/', text=u'>foo\\nMAGIC')\n r = requests.get('http://fake/')\n config = core.Config(extended_validation='loads', molecule='protein')\n core._validate_and_write(r, handle, 'FAKE', config)\n\n assert handle.getvalue() == u'>foo\\nMAGIC'",
"def validate_file_extension(value, valid_extensions):\n if not value.name.split(\".\")[-1] in valid_extensions:\n raise ValidationError(\"Invalid File Extension.\")",
"def _sanityCheckEMSExtension(other):\n if other.useExtendedMasterSecret not in (True, False):\n raise ValueError(\"useExtendedMasterSecret must be True or False\")\n if other.requireExtendedMasterSecret not in (True, False):\n raise ValueError(\"requireExtendedMasterSecret must be True \"\n \"or False\")\n if other.requireExtendedMasterSecret and \\\n not other.useExtendedMasterSecret:\n raise ValueError(\"requireExtendedMasterSecret requires \"\n \"useExtendedMasterSecret\")",
"def test_validate_file_extension_json(self):\n data_locations = open(self.test_dir + 'mannheim_short.json',\n encoding='utf-8')\n data_locations_false = open(self.test_dir + 'contacts.csv',\n encoding='utf-8')\n a = validate_file_extension_json(data_locations)\n self.assertEqual(a, None)\n with self.assertRaises(ValidationError) as context:\n validate_file_extension_json(data_locations_false)\n data_locations.close()\n data_locations_false.close()\n self.assertTrue(\"Kein gültiges JSON-File\" or \"No valid JSON file\" in\n str(context.exception))",
"def test_validation(self):\n self.validationFails()",
"def __validate():\n # TODO: implement",
"def check_validity(self):",
"def test_10_is_allowed_file_wrong_ext(self):\n\n filename = \"somename.pdf\"\n is_allowed = utils.is_allowed_file(filename)\n self.assertFalse(is_allowed)",
"def test_kyc_get_validation_legal(self):\n pass",
"def test_dcm_extension_validation(self):\n\n file_name = self.image.dcm.name\n extension = DjangoDicomConfig.data_extension\n self.image.dcm.name = file_name.replace(extension, \".abc\")\n with self.assertRaises(ValidationError):\n self.image.full_clean()",
"def _check_extension(self):\n if self.extension in Config.override_ext:\n expected_mimetype = Config.override_ext[self.extension]\n else:\n expected_mimetype, encoding = mimetypes.guess_type(self.src_path,\n strict=False)\n if expected_mimetype in Config.aliases:\n expected_mimetype = Config.aliases[expected_mimetype]\n is_known_extension = self.extension in mimetypes.types_map.keys()\n if is_known_extension and expected_mimetype != self.mimetype:\n # LOG: improve this string\n self.make_dangerous('expected_mimetype')",
"def validate(self):",
"def validate(self):",
"def test_makeExtension(self):\n try:\n markdown.Markdown(extensions=[\"regdown\"])\n except AttributeError as e: # pragma: no cover\n self.fail(\n \"Markdown failed to load regdown extension: \"\n \"{}\".format(e.message)\n )",
"def test_badge_should_have_extensions(self):\n\n badge = self.get_sample_badge()\n self.assertTrue(hasattr(badge, 'extensions'))",
"def test_both(self):\n field = TypedFileField(required=False,\n ext_whitelist=self.good_extensions,\n type_whitelist=self.good_types,\n use_magic=False)\n\n for ext in self.good_extensions:\n name = 'somefooname.%s' % ext\n\n for t in self.good_types:\n file = UploadedFile(name=name, size=1, content_type=t)\n assert field.clean(file) is file\n\n for t in self.bad_types:\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)\n\n for ext in self.bad_extensions:\n name = 'somefooname.%s' % ext\n\n for t in self.good_types:\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)\n\n for t in self.bad_types:\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def test_get_filename_extension(self):\r\n u = Uploader()\r\n filename = \"image.png\"\r\n err_msg = \"The extension should be PNG\"\r\n assert u.get_filename_extension(filename) == 'png', err_msg\r\n filename = \"image.jpg\"\r\n err_msg = \"The extension should be JPEG\"\r\n assert u.get_filename_extension(filename) == 'jpeg', err_msg\r\n filename = \"imagenoextension\"\r\n err_msg = \"The extension should be None\"\r\n assert u.get_filename_extension(filename) == None, err_msg",
"def test_get_extension(self):\r\n expectedyoutube = 'video/youtube'\r\n expectednotyoutube = 'video/mp4'\r\n result1 = self.mod._get_extension(self.sample_sourceurl) # pylint: disable=W0212\r\n result2 = self.mod._get_extension(self.sample_youtubeurl) # pylint: disable=W0212\r\n self.assertEqual(expectedyoutube, result2)\r\n self.assertEqual(expectednotyoutube, result1)",
"def test_kyc_get_validation(self):\n pass",
"def test_extension_json():\n path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'schema', 'extension-schema.json')\n if os.path.isfile(path):\n with open(path) as f:\n schema = json.load(f)\n else:\n url = 'https://raw.githubusercontent.com/open-contracting/standard-maintenance-scripts/main/schema/extension-schema.json' # noqa: E501\n schema = http_get(url).json()\n\n expected_codelists = {name for _, name, _, _, _ in\n walk_csv_data(top=os.path.join(extensiondir, 'codelists'))}\n expected_schemas = {name for _, name, _, _ in\n walk_json_data(patch, top=extensiondir) if name.endswith('-schema.json')}\n\n path = os.path.join(extensiondir, 'extension.json')\n if os.path.isfile(path):\n with open(path) as f:\n data = json.load(f, object_pairs_hook=rejecting_dict)\n\n validate_json_schema(path, 'extension.json', data, schema)\n\n urls = data.get('dependencies', []) + data.get('testDependencies', [])\n for url in urls:\n try:\n status_code = http_head(url).status_code\n except requests.exceptions.ConnectionError as e:\n assert False, f'{e} on {url}'\n else:\n assert status_code == 200, f'HTTP {status_code} on {url}'\n\n urls = list(data['documentationUrl'].values())\n for url in urls:\n try:\n status_code = http_get(url).status_code # allow redirects\n except requests.exceptions.ConnectionError as e:\n assert False, f'{e} on {url}'\n else:\n assert status_code == 200, f'HTTP {status_code} on {url}'\n\n actual_codelists = set(data.get('codelists', []))\n if actual_codelists != expected_codelists:\n added, removed = difference(actual_codelists, expected_codelists)\n assert False, f'{path} has mismatch with codelists{added}{removed}'\n\n actual_schemas = set(data.get('schemas', []))\n if actual_schemas != expected_schemas:\n added, removed = difference(actual_schemas, expected_schemas)\n assert False, f'{path} has mismatch with schema{added}{removed}'\n else:\n # This code is never reached, as the test is only run if there is an extension.json file.\n assert False, 'expected an extension.json file'"
] | [
"0.7298474",
"0.69515324",
"0.6950843",
"0.68759197",
"0.68593514",
"0.68426394",
"0.6806111",
"0.6804598",
"0.6562722",
"0.65436846",
"0.65347743",
"0.65035516",
"0.6489072",
"0.6402878",
"0.6388997",
"0.6346691",
"0.63387173",
"0.6312984",
"0.62796175",
"0.62742496",
"0.62511235",
"0.6248542",
"0.6248542",
"0.622795",
"0.62125057",
"0.61988544",
"0.61896884",
"0.61865985",
"0.6186477",
"0.6161799"
] | 0.75316143 | 0 |
Test that the mimetypes are validate correctly | def test_mimetypes(self):
field = TypedFileField(required=False, type_whitelist=self.good_types, use_magic=False)
for t in self.good_types:
name = 'somefooname'
file = UploadedFile(name=name, size=1, content_type=t)
assert field.clean(file) is file
for t in self.bad_types:
name = 'somefooname'
file = UploadedFile(name=name, size=1, content_type=t)
with pytest.raises(forms.ValidationError):
field.clean(file) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_mimetypes_magic(self, mock_get_content_type):\n\n def get_content_type(value):\n return value.content_type\n\n mock_get_content_type.side_effect = get_content_type\n\n field = TypedFileField(required=False, type_whitelist=self.good_types, use_magic=True)\n\n for t in self.good_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n assert field.clean(file) is file\n\n for t in self.bad_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def test_no_mimetype_magic(self, mock_get_content_type):\n mock_get_content_type.side_effect = ValueError\n\n field = TypedFileField(required=False, type_whitelist=self.good_types)\n\n for t in self.good_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def _check_mimetype(self):\n if self.mimetype in Config.aliases:\n mimetype = Config.aliases[self.mimetype]\n else:\n mimetype = self.mimetype\n expected_extensions = mimetypes.guess_all_extensions(mimetype,\n strict=False)\n if expected_extensions:\n if self.has_extension and self.extension not in expected_extensions:\n # LOG: improve this string\n self.make_dangerous('expected extensions')",
"def test_no_mimetype(self):\n field = TypedFileField(required=False, type_whitelist=self.good_types, use_magic=False)\n\n for t in self.good_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n del file.content_type\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def test_extensions(self):\n field = TypedFileField(required=False, ext_whitelist=self.good_extensions)\n\n for ext in self.good_extensions:\n name = 'somefooname.%s' % ext\n file = UploadedFile(name=name, size=1)\n assert field.clean(file) is file\n\n for ext in self.bad_extensions:\n name = 'somefooname.%s' % ext\n file = UploadedFile(name=name, size=1)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def test_extension_to_content_type(self):\n assert ct.extension_to_content_type(\"jpg\") == \"image/jpg\"\n assert ct.extension_to_content_type(\"jpeg\") == \"image/jpg\"\n assert ct.extension_to_content_type(\"png\") == \"image/png\"\n ct.extension_to_content_type(\"css\",) == \"text/css\"\n ct.extension_to_content_type(\"html\") == \"text/html\"\n ct.extension_to_content_type(\"json\") == \"application/json\"\n ct.extension_to_content_type(\"xml\") == \"application/xml\"\n ct.extension_to_content_type(\"zip\") == \"application/zip\"",
"def test_content_type_to_extension(self):\n assert ct.content_type_to_extension(\"image/jpg\") == \"jpg\"\n assert ct.content_type_to_extension(\"image/jpeg\") == \"jpg\"\n assert ct.content_type_to_extension(\"image/png\",) == \"png\"\n assert ct.content_type_to_extension(\"text/css\",) == \"css\"\n assert ct.content_type_to_extension(\"text/html\") == \"html\"\n assert ct.content_type_to_extension(\"text/css\") == \"css\"\n assert ct.content_type_to_extension(\"application/json\") == \"json\"\n assert ct.content_type_to_extension(\"application/xml\") == \"xml\"\n assert ct.content_type_to_extension(\"application/zip\") == \"zip\"",
"def getMimeTypes(self): #$NON-NLS-1$\r",
"def test_general_subset_file_type():\n pass",
"def test_both(self):\n field = TypedFileField(required=False,\n ext_whitelist=self.good_extensions,\n type_whitelist=self.good_types,\n use_magic=False)\n\n for ext in self.good_extensions:\n name = 'somefooname.%s' % ext\n\n for t in self.good_types:\n file = UploadedFile(name=name, size=1, content_type=t)\n assert field.clean(file) is file\n\n for t in self.bad_types:\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)\n\n for ext in self.bad_extensions:\n name = 'somefooname.%s' % ext\n\n for t in self.good_types:\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)\n\n for t in self.bad_types:\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def test_invalid_filetype(self):\n rv = self.post('/queue/',\n content={'image': (StringIO('This is not an image'),\n 'text.txt')},\n token=self.user_token)\n self.assertJSONError(rv, 'TagalleryInvalidFileExtension')\n return",
"def test_available_input_formats():\n assert set([\"Mapchete\", \"raster_file\", \"vector_file\"]).issubset(\n set(available_input_formats())\n )",
"def check_eligible_mimetype(self, ctype, uid):\n self.helper.log_debug(\n 'check_eligible_mimtype: checking content-type %s of msg uid %s' %\n (ctype, uid))\n if ctype == \"application/zip\":\n return True\n elif ctype == \"application/gzip\":\n return True\n elif ctype == \"application/x-gzip\":\n return True\n elif ctype == \"application/octet-stream\":\n # Non-standard mimetype used by Amazon SES dmarc reports\n return True\n elif ctype == \"application-x-gzip\":\n # Non-standard mimetype used by Comcast dmarc reports\n return True\n elif ctype == \"application/x-zip-compressed\":\n # Non-standard mimetype used by Yahoo dmarc reports\n return True\n elif ctype == \"application/xml\":\n return True\n elif ctype == \"text/xml\":\n return True\n else:\n self.helper.log_debug(\n 'check_eligible_mimtype: skipping content-type %s of msg uid %s' %\n (ctype, uid))\n return False",
"def secure_filetype(file):\n ext_list = ['png', 'jpg', 'jpeg']\n ext_valid = file.filename.split('.')[-1] in ext_list\n\n mimetype_list = [\"image/jpeg\", \"image/jpg\", \"image/png\"]\n mimetype_valid = file.mimetype in mimetype_list\n\n return ext_valid and mimetype_valid",
"def test_allowed_file(self):\r\n u = Uploader()\r\n for ext in u.allowed_extensions:\r\n # Change extension to uppercase to check that it works too\r\n filename = 'test.%s' % ext.upper()\r\n err_msg = (\"This file: %s should be allowed, but it failed\"\r\n % filename)\r\n assert u.allowed_file(filename) is True, err_msg\r\n\r\n err_msg = \"Non allowed extensions should return false\"\r\n assert u.allowed_file('wrong.pdf') is False, err_msg",
"def test_invalid_file_type(barred_tac_list_importer):\n expect_failure(barred_tac_list_importer, exc_message='Wrong suffix')",
"def _check_extension(self):\n if self.extension in Config.override_ext:\n expected_mimetype = Config.override_ext[self.extension]\n else:\n expected_mimetype, encoding = mimetypes.guess_type(self.src_path,\n strict=False)\n if expected_mimetype in Config.aliases:\n expected_mimetype = Config.aliases[expected_mimetype]\n is_known_extension = self.extension in mimetypes.types_map.keys()\n if is_known_extension and expected_mimetype != self.mimetype:\n # LOG: improve this string\n self.make_dangerous('expected_mimetype')",
"def test_mime_lookup(self):\n mime_out_test_path = os.path.join(THIS_DIR, 'file-blobs.out')\n mime_lookup = MimeLookup(mime_out_test_path)\n self.assertEqual(mime_lookup.get_entry_count(), 5)\n self.assertEqual(mime_lookup.get_mime_string('4b11cb448cab68470c546bc52220b01fbc4572f7'),\n 'image/png; charset=binary')\n self.assertEqual(mime_lookup.get_mime_string('f8fa2aa81a623f9847436c5162d4e775e04cd948'),\n 'text/plain; charset=us-ascii')\n self.assertEqual(mime_lookup.get_mime_string('9f422292259b59ee6c9ad7a25180b0afc16f47e9'),\n LONG_MIME)\n self.assertEqual(mime_lookup.get_mime_string('d1717e616fdae20110acb51b3ba3a37350628131'),\n 'application/pdf; charset=binary')\n self.assertEqual(mime_lookup.get_mime_string('a7510ac5483396687bf670860f48d21eecede68a'),\n 'application/zip; charset=binary')",
"def test_fetch_or_create_requires_file_type():\n pytest.raises(ValueError, media.fetch_or_create_media_item, b'spam')",
"def check_file_type(fname):\n ext = path.splitext(fname)[1]\n return ext in allowed_extensions",
"def validate_image_type(filename: str) -> bool:\n supported_extensions = (\"png\", \"jpg\", \"jpeg\")\n return (filename not in (None, \"\")) and (get_extension(filename) in supported_extensions)",
"def valid_media_type(media_type):\n return media_type in ACCEPTED_MEDIA_TYPES",
"def match_mime_type(self, src: str):\n for key in self.keys():\n if Pattern.test(key, src):\n return self[key]\n return \"text/plain\"",
"def test_11_is_allowed_file_correct_ext(self):\n\n for ext in list(ALLOWED_EXTENSIONS):\n filename = f\"somename.{ext}\"\n is_allowed = utils.is_allowed_file(filename)\n self.assertTrue(is_allowed)",
"def test_get_file_type(self):\n file_list = {'events': 'monol_testA_nustar_fpma_ev',\n 'lc': 'monol_testA_E3-50_lc',\n 'pds': 'monol_testA_E3-50_pds',\n 'gti': 'monol_testA_E3-50_rebin4_gti',\n 'cpds': 'monol_test_E3-50_cpds'}\n for realtype in file_list.keys():\n fname = os.path.join(self.datadir,\n file_list[realtype] + HEN_FILE_EXTENSION)\n ftype, _ = hen.io.get_file_type(fname)\n assert ftype == realtype, \"File types do not match\"",
"def is_accept_type(file_name):\n bare_name, file_extension = os.path.splitext(file_name)\n for ext in ACCEPTED_FILES:\n if file_extension.lower() == ext:\n return True\n return False",
"def allowed_file_type(file_name):\n\treturn file_name.lower().endswith(ALLOWED_FILE_TYPES)",
"def validFiles(self, files):\n for myfile in files:\n if not ( ( myfile.get_uri_scheme() == 'file' ) or \\\n ( myfile.get_uri_scheme() == 'smb' ) ):\n return False\n elif ( not myfile.get_mime_type() in self.oootypes ) and \\\n ( not myfile.get_mime_type() in self.plaintypes ):\n return False\n return True",
"def getMimeTypeFileExtensions(mimeType):\n #getMimeTypeFileExtensions body\n\n if mimeType == applicationzlib:\n return [ \"zz\" ]\n\n if mimeType == applicationzstd:\n return [ \"zst\" ]\n\n if mimeType == applicationxzoo:\n return [ \"zoo\" ]\n\n if mimeType == applicationvndhandheldentertainment_xml:\n return [ \"zmm\" ]\n\n if mimeType == applicationvndzul:\n return [ \"zir\", \"zirz\" ]\n\n if mimeType == applicationzip:\n return [ \"zip\", \"zipx\" ]\n\n if mimeType == applicationxopenzim:\n return [ \"zim\" ]\n\n if mimeType == applicationvndzzazzdeck_xml:\n return [ \"zaz\" ]\n\n if mimeType == applicationxzmachine:\n return [ \"z1\", \"z2\", \"z3\", \"z4\", \"z5\", \"z6\", \"z7\", \"z8\" ]\n\n if mimeType == applicationxcompress:\n return [ \"z\" ]\n\n if mimeType == videovndyoutubeyt:\n return [ \"yt\" ]\n\n if mimeType == textxsuseymp:\n return [ \"ymp\" ]\n\n if mimeType == applicationyin_xml:\n return [ \"yin\" ]\n\n if mimeType == applicationyang:\n return [ \"yang\" ]\n\n if mimeType == applicationxyaml:\n return [ \"yaml\", \"yml\" ]\n\n if mimeType == applicationxxz:\n return [ \"xz\" ]\n\n if mimeType == chemicalxxyz:\n return [ \"xyz\" ]\n\n if mimeType == imagexxwindowdump:\n return [ \"xwd\" ]\n\n if mimeType == applicationvndmozillaxul_xml:\n return [ \"xul\" ]\n\n if mimeType == applicationxspf_xml:\n return [ \"xspf\" ]\n\n if mimeType == applicationvndsyncml_xml:\n return [ \"xsm\" ]\n\n if mimeType == applicationxslt_xml:\n return [ \"xsl\", \"xslt\" ]\n\n if mimeType == applicationprsxsf_xml:\n return [ \"xsf\" ]\n\n if mimeType == applicationvndinterconformnet:\n return [ \"xpw\", \"xpx\" ]\n\n if mimeType == applicationvndmsxpsdocument:\n return [ \"xps\" ]\n\n if mimeType == applicationvndisxpr:\n return [ \"xpr\" ]\n\n if mimeType == imagexxpixmap:\n return [ \"xpm\" ]\n\n if mimeType == applicationxproc_xml:\n return [ \"xpl\" ]\n\n if mimeType == applicationxxpinstall:\n return [ \"xpi\" ]\n\n if mimeType == applicationxop_xml:\n return [ \"xop\" ]\n\n if mimeType == applicationvndolpcsugar:\n return [ \"xo\" ]\n\n if mimeType == applicationxcapns_xml:\n return [ \"xns\" ]\n\n if mimeType == applicationxml:\n return [ \"xml\", \"xbl\", \"xsd\", \"rng\" ]\n\n if mimeType == textxxmi:\n return [ \"xmi\" ]\n\n if mimeType == audioxxmf:\n return [ \"xmf\" ]\n\n if mimeType == audioxxm:\n return [ \"xm\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentspreadsheetmltemplate:\n return [ \"xltx\" ]\n\n if mimeType == applicationvndmsexceltemplatemacroenabled12:\n return [ \"xltm\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentspreadsheetmlsheet:\n return [ \"xlsx\" ]\n\n if mimeType == applicationvndmsexcelsheetmacroenabled12:\n return [ \"xlsm\" ]\n\n if mimeType == applicationvndmsexcelsheetbinarymacroenabled12:\n return [ \"xlsb\" ]\n\n if mimeType == applicationvndmsexcel:\n return [ \"xls\", \"xlc\", \"xll\", \"xlm\", \"xlw\", \"xla\", \"xlt\", \"xld\" ]\n\n if mimeType == applicationxliff_xml:\n return [ \"xlf\", \"xliff\" ]\n\n if mimeType == applicationvndmsexceladdinmacroenabled12:\n return [ \"xlam\" ]\n\n if mimeType == imagevndxiff:\n return [ \"xif\" ]\n\n if mimeType == audioxxi:\n return [ \"xi\" ]\n\n if mimeType == applicationxhtml_xml:\n return [ \"xhtml\", \"xht\", \"html\", \"htm\" ]\n\n if mimeType == applicationvndpwgxhtmlprint_xml:\n return [ \"xhtm\" ]\n\n if mimeType == applicationvndxfdl:\n return [ \"xfdl\" ]\n\n if mimeType == applicationvndadobexfdf:\n return [ \"xfdf\" ]\n\n if mimeType == applicationpatchopserror_xml:\n return [ \"xer\" ]\n\n if mimeType == applicationxenc_xml:\n return [ \"xenc\" ]\n\n if mimeType == applicationxcapel_xml:\n return [ \"xel\" ]\n\n if mimeType == applicationvndfujixeroxdocuworks:\n return [ \"xdw\" ]\n\n if mimeType == applicationdssc_xml:\n return [ \"xdssc\" ]\n\n if mimeType == applicationvndadobexdp_xml:\n return [ \"xdp\" ]\n\n if mimeType == applicationvndsyncmldm_xml:\n return [ \"xdm\" ]\n\n if mimeType == applicationxcapdiff_xml:\n return [ \"xdf\" ]\n\n if mimeType == applicationcalendar_xml:\n return [ \"xcs\" ]\n\n if mimeType == imagexcompressedxcf:\n return [ \"xcfgz\", \"xcfbz2\" ]\n\n if mimeType == imagexxcf:\n return [ \"xcf\" ]\n\n if mimeType == applicationxcapcaps_xml:\n return [ \"xca\" ]\n\n if mimeType == imagexxbitmap:\n return [ \"xbm\" ]\n\n if mimeType == applicationxxbel:\n return [ \"xbel\" ]\n\n if mimeType == applicationvndfujixeroxdocuworksbinder:\n return [ \"xbd\" ]\n\n if mimeType == applicationxmsxbap:\n return [ \"xbap\" ]\n\n if mimeType == applicationxcapatt_xml:\n return [ \"xav\" ]\n\n if mimeType == applicationxxar:\n return [ \"xar\", \"pkg\" ]\n\n if mimeType == applicationxsilverlightapp:\n return [ \"xap\" ]\n\n if mimeType == applicationxaml_xml:\n return [ \"xaml\" ]\n\n if mimeType == imagexsigmax3f:\n return [ \"x3f\" ]\n\n if mimeType == modelx3d_vrml:\n return [ \"x3dv\", \"x3dvz\" ]\n\n if mimeType == modelx3d_binary:\n return [ \"x3db\", \"x3dbz\" ]\n\n if mimeType == modelx3d_xml:\n return [ \"x3d\", \"x3dz\" ]\n\n if mimeType == modelvndparasolidtransmittext:\n return [ \"x_t\" ]\n\n if mimeType == modelvndparasolidtransmitbinary:\n return [ \"x_b\" ]\n\n if mimeType == applicationxwwf:\n return [ \"wwf\" ]\n\n if mimeType == audioxwavpackcorrection:\n return [ \"wvc\" ]\n\n if mimeType == audioxwavpack:\n return [ \"wv\", \"wvp\" ]\n\n if mimeType == applicationvndwebturbo:\n return [ \"wtb\" ]\n\n if mimeType == applicationwspolicy_xml:\n return [ \"wspolicy\" ]\n\n if mimeType == applicationwsdl_xml:\n return [ \"wsdl\" ]\n\n if mimeType == applicationxwonderswancolorrom:\n return [ \"wsc\" ]\n\n if mimeType == applicationxwonderswanrom:\n return [ \"ws\" ]\n\n if mimeType == applicationxmswrite:\n return [ \"wri\" ]\n\n if mimeType == applicationvndwqd:\n return [ \"wqd\" ]\n\n if mimeType == applicationvndmswpl:\n return [ \"wpl\" ]\n\n if mimeType == applicationxwpg:\n return [ \"wpg\" ]\n\n if mimeType == applicationvndwordperfect:\n return [ \"wp\", \"wp4\", \"wp5\", \"wp6\", \"wpd\", \"wpp\" ]\n\n if mimeType == fontwoff2:\n return [ \"woff2\" ]\n\n if mimeType == fontwoff:\n return [ \"woff\" ]\n\n if mimeType == applicationxmswmz:\n return [ \"wmz\" ]\n\n if mimeType == videoxmswmv:\n return [ \"wmv\" ]\n\n if mimeType == applicationvndwapwmlscriptc:\n return [ \"wmlsc\" ]\n\n if mimeType == textvndwapwmlscript:\n return [ \"wmls\" ]\n\n if mimeType == applicationvndwapwmlc:\n return [ \"wmlc\" ]\n\n if mimeType == textvndwapwml:\n return [ \"wml\" ]\n\n if mimeType == imagewmf:\n return [ \"wmf\" ]\n\n if mimeType == applicationxmswmd:\n return [ \"wmd\" ]\n\n if mimeType == audioxmswma:\n return [ \"wma\" ]\n\n if mimeType == videoxmswm:\n return [ \"wm\" ]\n\n if mimeType == applicationxpartialdownload:\n return [ \"wkdownload\", \"crdownload\", \"part\" ]\n\n if mimeType == applicationxmswim:\n return [ \"wim\", \"swm\" ]\n\n if mimeType == applicationwatcherinfo_xml:\n return [ \"wif\" ]\n\n if mimeType == applicationwidget:\n return [ \"wgt\" ]\n\n if mimeType == applicationvndpmiwidget:\n return [ \"wg\" ]\n\n if mimeType == imagewebp:\n return [ \"webp\" ]\n\n if mimeType == applicationmanifest_json:\n return [ \"webmanifest\" ]\n\n if mimeType == videowebm:\n return [ \"webm\" ]\n\n if mimeType == applicationxwebappmanifest_json:\n return [ \"webapp\" ]\n\n if mimeType == audiowebm:\n return [ \"weba\" ]\n\n if mimeType == imagevndmsphoto:\n return [ \"wdp\" ]\n\n if mimeType == applicationvndmsworks:\n return [ \"wcm\", \"wdb\", \"wps\", \"xlr\" ]\n\n if mimeType == applicationvndwapwbxml:\n return [ \"wbxml\" ]\n\n if mimeType == applicationvndcriticaltoolswbs_xml:\n return [ \"wbs\" ]\n\n if mimeType == imagevndwapwbmp:\n return [ \"wbmp\" ]\n\n if mimeType == applicationxquattropro:\n return [ \"wb1\", \"wb2\", \"wb3\" ]\n\n if mimeType == audioxwav:\n return [ \"wav\" ]\n\n if mimeType == applicationwasm:\n return [ \"wasm\" ]\n\n if mimeType == applicationjavaarchive:\n return [ \"war\", \"ear\" ]\n\n if mimeType == applicationvndsunwadl_xml:\n return [ \"wadl\" ]\n\n if mimeType == applicationxwiiwad:\n return [ \"wad\" ]\n\n if mimeType == applicationvoicexml_xml:\n return [ \"vxml\" ]\n\n if mimeType == modelvndvtu:\n return [ \"vtu\" ]\n\n if mimeType == textvtt:\n return [ \"vtt\" ]\n\n if mimeType == imagevndvalvesourcetexture:\n return [ \"vtf\" ]\n\n if mimeType == applicationvndmsvisiotemplatemain_xml:\n return [ \"vstx\" ]\n\n if mimeType == applicationvndmsvisiotemplatemacroenabledmain_xml:\n return [ \"vstm\" ]\n\n if mimeType == applicationvndmsvisiostencilmain_xml:\n return [ \"vssx\" ]\n\n if mimeType == applicationvndmsvisiostencilmacroenabledmain_xml:\n return [ \"vssm\" ]\n\n if mimeType == applicationvndvsf:\n return [ \"vsf\" ]\n\n if mimeType == applicationvndmsvisiodrawingmain_xml:\n return [ \"vsdx\" ]\n\n if mimeType == applicationvndmsvisiodrawingmacroenabledmain_xml:\n return [ \"vsdm\" ]\n\n if mimeType == applicationvndvisio:\n return [ \"vsd\", \"vst\", \"vsw\", \"vss\" ]\n\n if mimeType == modelvrml:\n return [ \"vrm\", \"vrml\", \"wrl\" ]\n\n if mimeType == applicationxvhddisk:\n return [ \"vpc\" ]\n\n if mimeType == audioxvoc:\n return [ \"voc\" ]\n\n if mimeType == applicationxvmdkdisk:\n return [ \"vmdk\" ]\n\n if mimeType == videovndvivo:\n return [ \"viv\", \"vivo\" ]\n\n if mimeType == applicationvndvisionary:\n return [ \"vis\" ]\n\n if mimeType == applicationxvhdxdisk:\n return [ \"vhdx\" ]\n\n if mimeType == textxvhdl:\n return [ \"vhd\", \"vhdl\" ]\n\n if mimeType == modelvndsapvds:\n return [ \"vds\" ]\n\n if mimeType == applicationxvdidisk:\n return [ \"vdi\" ]\n\n if mimeType == applicationvndvcx:\n return [ \"vcx\" ]\n\n if mimeType == textcalendar:\n return [ \"vcs\", \"ics\", \"ifb\" ]\n\n if mimeType == applicationvndgroovevcard:\n return [ \"vcg\" ]\n\n if mimeType == applicationxcdlink:\n return [ \"vcd\" ]\n\n if mimeType == textvcard:\n return [ \"vcard\", \"vcf\", \"vct\", \"gcrd\" ]\n\n if mimeType == textvbscript:\n return [ \"vbs\" ]\n\n if mimeType == applicationxvirtualboxvboxextpack:\n return [ \"vbox-extpack\" ]\n\n if mimeType == applicationxvirtualboxvbox:\n return [ \"vbox\" ]\n\n if mimeType == applicationxvirtualboyrom:\n return [ \"vb\" ]\n\n if mimeType == textxvala:\n return [ \"vala\", \"vapi\" ]\n\n if mimeType == textxverilog:\n return [ \"v\" ]\n\n if mimeType == applicationvnddecezip:\n return [ \"uvz\", \"uvvz\" ]\n\n if mimeType == applicationvnddeceunspecified:\n return [ \"uvx\", \"uvvx\" ]\n\n if mimeType == videovnddecevideo:\n return [ \"uvv\", \"uvvv\" ]\n\n if mimeType == videovnduvvump4:\n return [ \"uvu\", \"uvvu\" ]\n\n if mimeType == applicationvnddecettml_xml:\n return [ \"uvt\", \"uvvt\" ]\n\n if mimeType == videovnddecesd:\n return [ \"uvs\", \"uvvs\" ]\n\n if mimeType == videovnddecepd:\n return [ \"uvp\", \"uvvp\" ]\n\n if mimeType == videovnddecemobile:\n return [ \"uvm\", \"uvvm\" ]\n\n if mimeType == imagevnddecegraphic:\n return [ \"uvi\", \"uvvi\", \"uvg\", \"uvvg\" ]\n\n if mimeType == videovnddecehd:\n return [ \"uvh\", \"uvvh\" ]\n\n if mimeType == applicationvnddecedata:\n return [ \"uvf\", \"uvvf\", \"uvd\", \"uvvd\" ]\n\n if mimeType == audiovnddeceaudio:\n return [ \"uva\", \"uvva\" ]\n\n if mimeType == textxuuencode:\n return [ \"uue\", \"uu\" ]\n\n if mimeType == applicationvnduiqtheme:\n return [ \"utz\" ]\n\n if mimeType == applicationxustar:\n return [ \"ustar\" ]\n\n if mimeType == modelvndusdz_zip:\n return [ \"usdz\" ]\n\n if mimeType == applicationxmswinurl:\n return [ \"url\" ]\n\n if mimeType == texturilist:\n return [ \"uri\", \"uris\", \"urls\" ]\n\n if mimeType == applicationvnduoml_xml:\n return [ \"uoml\", \"uo\" ]\n\n if mimeType == applicationvndunity:\n return [ \"unityweb\" ]\n\n if mimeType == applicationvndumajin:\n return [ \"umj\" ]\n\n if mimeType == applicationxglulx:\n return [ \"ulx\" ]\n\n if mimeType == audioxmod:\n return [ \"ult\", \"uni\", \"m15\", \"mtm\", \"669\", \"med\" ]\n\n if mimeType == textxuil:\n return [ \"uil\" ]\n\n if mimeType == applicationxdesigner:\n return [ \"ui\" ]\n\n if mimeType == applicationxufraw:\n return [ \"ufraw\" ]\n\n if mimeType == applicationvndufdl:\n return [ \"ufd\", \"ufdl\" ]\n\n if mimeType == applicationubjson:\n return [ \"ubj\" ]\n\n if mimeType == messageglobal:\n return [ \"u8msg\" ]\n\n if mimeType == messageglobaldispositionnotification:\n return [ \"u8mdn\" ]\n\n if mimeType == messageglobalheaders:\n return [ \"u8hdr\" ]\n\n if mimeType == messageglobaldeliverystatus:\n return [ \"u8dsn\" ]\n\n if mimeType == modelu3d:\n return [ \"u3d\" ]\n\n if mimeType == textplain:\n return [ \"txt\", \"text\", \"conf\", \"def\", \"list\", \"in\", \"ini\" ]\n\n if mimeType == applicationvndmobiustxf:\n return [ \"txf\" ]\n\n if mimeType == applicationvndgenomatixtuxedo:\n return [ \"txd\" ]\n\n if mimeType == textxtwig:\n return [ \"twig\" ]\n\n if mimeType == applicationvndsimtechmindmapper:\n return [ \"twd\", \"twds\" ]\n\n if mimeType == applicationxfontttx:\n return [ \"ttx\" ]\n\n if mimeType == applicationttml_xml:\n return [ \"ttml\" ]\n\n if mimeType == textturtle:\n return [ \"ttl\" ]\n\n if mimeType == fontttf:\n return [ \"ttf\" ]\n\n if mimeType == fontcollection:\n return [ \"ttc\" ]\n\n if mimeType == audioxtta:\n return [ \"tta\" ]\n\n if mimeType == texttabseparatedvalues:\n return [ \"tsv\" ]\n\n if mimeType == applicationtimestampeddata:\n return [ \"tsd\" ]\n\n if mimeType == textvndtrolltechlinguist:\n return [ \"ts\" ]\n\n if mimeType == applicationxmsterminal:\n return [ \"trm\" ]\n\n if mimeType == applicationtrig:\n return [ \"trig\" ]\n\n if mimeType == applicationvndtrueapp:\n return [ \"tra\" ]\n\n if mimeType == texttroff:\n return [ \"tr\", \"roff\" ]\n\n if mimeType == applicationvndtridtpt:\n return [ \"tpt\" ]\n\n if mimeType == applicationvndgroovetooltemplate:\n return [ \"tpl\" ]\n\n if mimeType == applicationxbittorrent:\n return [ \"torrent\" ]\n\n if mimeType == applicationtoml:\n return [ \"toml\" ]\n\n if mimeType == applicationxcdrdaotoc:\n return [ \"toc\" ]\n\n if mimeType == applicationvndmstnef:\n return [ \"tnef\", \"tnf\", \"winmaildat\" ]\n\n if mimeType == applicationvndtmobilelivetv:\n return [ \"tmo\" ]\n\n if mimeType == imagetiff:\n return [ \"tif\", \"tiff\" ]\n\n if mimeType == applicationvndmsofficetheme:\n return [ \"thmx\" ]\n\n if mimeType == applicationxwindowsthemepack:\n return [ \"themepack\" ]\n\n if mimeType == applicationxtheme:\n return [ \"theme\" ]\n\n if mimeType == imagextga:\n return [ \"tga\", \"icb\", \"tpic\", \"vda\" ]\n\n if mimeType == imagetifffx:\n return [ \"tfx\" ]\n\n if mimeType == applicationxtextfm:\n return [ \"tfm\" ]\n\n if mimeType == applicationthraud_xml:\n return [ \"tfi\" ]\n\n if mimeType == textxtexinfo:\n return [ \"texi\", \"texinfo\" ]\n\n if mimeType == textxtex:\n return [ \"tex\", \"ltx\", \"sty\", \"cls\", \"dtx\", \"ins\", \"latex\" ]\n\n if mimeType == applicationtei_xml:\n return [ \"tei\", \"teicorpus\" ]\n\n if mimeType == applicationvndsmartteacher:\n return [ \"teacher\" ]\n\n if mimeType == applicationurctargetdesc_xml:\n return [ \"td\" ]\n\n if mimeType == texttcl:\n return [ \"tcl\", \"tk\" ]\n\n if mimeType == applicationvnd3gpp2tcap:\n return [ \"tcap\" ]\n\n if mimeType == applicationxzstdcompressedtar:\n return [ \"tarzst\", \"tzst\" ]\n\n if mimeType == applicationxtarz:\n return [ \"tarz\", \"taz\" ]\n\n if mimeType == applicationxxzcompressedtar:\n return [ \"tarxz\", \"txz\" ]\n\n if mimeType == applicationxtzo:\n return [ \"tarlzo\", \"tzo\" ]\n\n if mimeType == applicationxlzmacompressedtar:\n return [ \"tarlzma\", \"tlz\" ]\n\n if mimeType == applicationxlz4compressedtar:\n return [ \"tarlz4\" ]\n\n if mimeType == applicationxlzipcompressedtar:\n return [ \"tarlz\" ]\n\n if mimeType == applicationxlrzipcompressedtar:\n return [ \"tarlrz\", \"tlrz\" ]\n\n if mimeType == applicationxcompressedtar:\n return [ \"targz\", \"tgz\" ]\n\n if mimeType == applicationxbzipcompressedtar:\n return [ \"tarbz2\", \"tarbz\", \"tbz2\", \"tbz\", \"tb2\" ]\n\n if mimeType == applicationxtar:\n return [ \"tar\", \"gtar\", \"gem\" ]\n\n if mimeType == imagevndtencenttap:\n return [ \"tap\" ]\n\n if mimeType == applicationvndtaointentmodulearchive:\n return [ \"tao\" ]\n\n if mimeType == audioxtak:\n return [ \"tak\" ]\n\n if mimeType == applicationvndmynfc:\n return [ \"taglet\" ]\n\n if mimeType == imaget38:\n return [ \"t38\" ]\n\n if mimeType == applicationxt3vmimage:\n return [ \"t3\" ]\n\n if mimeType == textxtxt2tags:\n return [ \"t2t\" ]\n\n if mimeType == textspreadsheet:\n return [ \"sylk\", \"slk\" ]\n\n if mimeType == applicationvndsunxmlwriter:\n return [ \"sxw\" ]\n\n if mimeType == applicationvndsunxmlmath:\n return [ \"sxm\" ]\n\n if mimeType == applicationvndsunxmlimpress:\n return [ \"sxi\" ]\n\n if mimeType == applicationvndsunxmlwriterglobal:\n return [ \"sxg\" ]\n\n if mimeType == applicationvndsunxmldraw:\n return [ \"sxd\" ]\n\n if mimeType == applicationvndsunxmlcalc:\n return [ \"sxc\" ]\n\n if mimeType == applicationswid_xml:\n return [ \"swidtag\" ]\n\n if mimeType == applicationvndaristanetworksswi:\n return [ \"swi\" ]\n\n if mimeType == applicationvndadobeflashmovie:\n return [ \"swf\", \"spl\" ]\n\n if mimeType == textxsvhdr:\n return [ \"svh\" ]\n\n if mimeType == imagesvg_xmlcompressed:\n return [ \"svgz\", \"svggz\" ]\n\n if mimeType == imagesvg_xml:\n return [ \"svg\" ]\n\n if mimeType == applicationvndsvd:\n return [ \"svd\" ]\n\n if mimeType == applicationvnddvbservice:\n return [ \"svc\" ]\n\n if mimeType == applicationxsv4crc:\n return [ \"sv4crc\" ]\n\n if mimeType == applicationxsv4cpio:\n return [ \"sv4cpio\" ]\n\n if mimeType == textxsvsrc:\n return [ \"sv\" ]\n\n if mimeType == applicationvndsuscalendar:\n return [ \"sus\", \"susp\" ]\n\n if mimeType == imagexsunraster:\n return [ \"sun\" ]\n\n if mimeType == textxmicrodvd:\n return [ \"sub\" ]\n\n if mimeType == textstylus:\n return [ \"stylus\", \"styl\" ]\n\n if mimeType == applicationvndsunxmlwritertemplate:\n return [ \"stw\" ]\n\n if mimeType == applicationvndpgformat:\n return [ \"str\" ]\n\n if mimeType == modelstep_zip:\n return [ \"stpz\" ]\n\n if mimeType == modelstepxml_zip:\n return [ \"stpxz\" ]\n\n if mimeType == modelstep_xml:\n return [ \"stpx\" ]\n\n if mimeType == audioxstm:\n return [ \"stm\" ]\n\n if mimeType == modelstl:\n return [ \"stl\" ]\n\n if mimeType == applicationhyperstudio:\n return [ \"stk\" ]\n\n if mimeType == applicationvndsunxmlimpresstemplate:\n return [ \"sti\" ]\n\n if mimeType == applicationvndwtstf:\n return [ \"stf\" ]\n\n if mimeType == applicationvndsunxmldrawtemplate:\n return [ \"std\" ]\n\n if mimeType == applicationvndsunxmlcalctemplate:\n return [ \"stc\" ]\n\n if mimeType == applicationvndsailingtrackertrack:\n return [ \"st\" ]\n\n if mimeType == applicationssml_xml:\n return [ \"ssml\" ]\n\n if mimeType == applicationvndepsonssf:\n return [ \"ssf\" ]\n\n if mimeType == applicationvndkodakdescriptor:\n return [ \"sse\" ]\n\n if mimeType == applicationssdl_xml:\n return [ \"ssdl\" ]\n\n if mimeType == textxssa:\n return [ \"ssa\", \"ass\" ]\n\n if mimeType == applicationsparqlresults_xml:\n return [ \"srx\" ]\n\n if mimeType == applicationsru_xml:\n return [ \"sru\" ]\n\n if mimeType == applicationxsubrip:\n return [ \"srt\" ]\n\n if mimeType == imagexsonysrf:\n return [ \"srf\" ]\n\n if mimeType == applicationxsourcerpm:\n return [ \"srcrpm\", \"spm\" ]\n\n if mimeType == applicationxwaissource:\n return [ \"src\" ]\n\n if mimeType == imagexsonysr2:\n return [ \"sr2\" ]\n\n if mimeType == applicationvndsquashfs:\n return [ \"sqsh\" ]\n\n if mimeType == applicationvndsqlite3:\n return [ \"sqlite3\" ]\n\n if mimeType == applicationxsqlite2:\n return [ \"sqlite2\" ]\n\n if mimeType == applicationsql:\n return [ \"sql\" ]\n\n if mimeType == applicationxapplesystemprofiler_xml:\n return [ \"spx\" ]\n\n if mimeType == applicationscvpvprequest:\n return [ \"spq\" ]\n\n if mimeType == applicationscvpvpresponse:\n return [ \"spp\" ]\n\n if mimeType == textvndin3dspot:\n return [ \"spot\" ]\n\n if mimeType == applicationvndyamahasmafphrase:\n return [ \"spf\" ]\n\n if mimeType == textxrpmspec:\n return [ \"spec\" ]\n\n if mimeType == textspdx:\n return [ \"spdx\" ]\n\n if mimeType == applicationxfontspeedo:\n return [ \"spd\" ]\n\n if mimeType == applicationxsharedlib:\n return [ \"so\", \"so09\" ]\n\n if mimeType == applicationxfontsnf:\n return [ \"snf\" ]\n\n if mimeType == applicationvndsnap:\n return [ \"snap\" ]\n\n if mimeType == applicationvndstepmaniapackage:\n return [ \"smzip\" ]\n\n if mimeType == videoxsmv:\n return [ \"smv\" ]\n\n if mimeType == applicationxsmsrom:\n return [ \"sms\" ]\n\n if mimeType == videovndradgamettoolssmacker:\n return [ \"smk\" ]\n\n if mimeType == applicationsmil_xml:\n return [ \"smil\", \"smi\", \"sml\", \"kino\" ]\n\n if mimeType == applicationvndstardivisionmath:\n return [ \"smf\" ]\n\n if mimeType == applicationvndstardivisionmail:\n return [ \"smd\" ]\n\n if mimeType == applicationvndstepmaniastepchart:\n return [ \"sm\" ]\n\n if mimeType == applicationvndepsonsalt:\n return [ \"slt\" ]\n\n if mimeType == applicationroutestsid_xml:\n return [ \"sls\" ]\n\n if mimeType == textslim:\n return [ \"slim\", \"slm\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentpresentationmlslide:\n return [ \"sldx\" ]\n\n if mimeType == applicationvndmspowerpointslidemacroenabled12:\n return [ \"sldm\" ]\n\n if mimeType == applicationpgpkeys:\n return [ \"skr\", \"pkr\", \"key\" ]\n\n if mimeType == applicationvndkoan:\n return [ \"skp\", \"skd\", \"skt\", \"skm\" ]\n\n if mimeType == imagexskencil:\n return [ \"sk\", \"sk1\" ]\n\n if mimeType == applicationsieve:\n return [ \"siv\", \"sieve\" ]\n\n if mimeType == applicationxstuffitx:\n return [ \"sitx\" ]\n\n if mimeType == applicationxstuffit:\n return [ \"sit\" ]\n\n if mimeType == xepocxsisxapp:\n return [ \"sisx\" ]\n\n if mimeType == applicationvndsymbianinstall:\n return [ \"sis\" ]\n\n if mimeType == audiosilk:\n return [ \"sil\" ]\n\n if mimeType == applicationpgpsignature:\n return [ \"sig\" ]\n\n if mimeType == audioprssid:\n return [ \"sid\", \"psid\" ]\n\n if mimeType == applicationxsiag:\n return [ \"siag\" ]\n\n if mimeType == texthtml:\n return [ \"shtml\" ]\n\n if mimeType == applicationxshorten:\n return [ \"shn\" ]\n\n if mimeType == applicationshf_xml:\n return [ \"shf\" ]\n\n if mimeType == textshex:\n return [ \"shex\" ]\n\n if mimeType == applicationxshar:\n return [ \"shar\" ]\n\n if mimeType == applicationxdiashape:\n return [ \"shape\" ]\n\n if mimeType == applicationxshellscript:\n return [ \"sh\" ]\n\n if mimeType == textsgml:\n return [ \"sgml\", \"sgm\" ]\n\n if mimeType == imagexsgi:\n return [ \"sgi\" ]\n\n if mimeType == applicationxgosgf:\n return [ \"sgf\" ]\n\n if mimeType == applicationxsg1000rom:\n return [ \"sg\" ]\n\n if mimeType == textxsfv:\n return [ \"sfv\" ]\n\n if mimeType == applicationvndspotfiresfs:\n return [ \"sfs\" ]\n\n if mimeType == applicationvndhydrostatixsofdata:\n return [ \"sfd-hdstx\" ]\n\n if mimeType == applicationvndnintendosnesrom:\n return [ \"sfc\", \"smc\" ]\n\n if mimeType == applicationsetregistrationinitiation:\n return [ \"setreg\" ]\n\n if mimeType == applicationsetpaymentinitiation:\n return [ \"setpay\" ]\n\n if mimeType == textxdbusservice:\n return [ \"service\" ]\n\n if mimeType == applicationjavaserializedobject:\n return [ \"ser\" ]\n\n if mimeType == applicationsensml_xml:\n return [ \"sensmlx\" ]\n\n if mimeType == applicationsenml_xml:\n return [ \"senmlx\" ]\n\n if mimeType == applicationvndsemf:\n return [ \"semf\" ]\n\n if mimeType == applicationvndsemd:\n return [ \"semd\" ]\n\n if mimeType == applicationvndsema:\n return [ \"sema\" ]\n\n if mimeType == applicationvndfdsnseed:\n return [ \"seed\", \"dataless\" ]\n\n if mimeType == applicationvndseemail:\n return [ \"see\" ]\n\n if mimeType == applicationxsea:\n return [ \"sea\" ]\n\n if mimeType == applicationvndstardivisionwriter:\n return [ \"sdw\", \"vor\", \"sgl\" ]\n\n if mimeType == applicationvndstardivisionchart:\n return [ \"sds\" ]\n\n if mimeType == applicationvndsolentsdkm_xml:\n return [ \"sdkm\", \"sdkd\" ]\n\n if mimeType == applicationvndstardivisionimpress:\n return [ \"sdd\", \"sdp\" ]\n\n if mimeType == applicationvndstardivisioncalc:\n return [ \"sdc\" ]\n\n if mimeType == applicationvndstardivisiondraw:\n return [ \"sda\" ]\n\n if mimeType == textvndcurlscurl:\n return [ \"scurl\" ]\n\n if mimeType == textxscss:\n return [ \"scss\" ]\n\n if mimeType == applicationscvpcvresponse:\n return [ \"scs\" ]\n\n if mimeType == applicationscvpcvrequest:\n return [ \"scq\" ]\n\n if mimeType == textxscons:\n return [ \"sconstruct\", \"sconscript\" ]\n\n if mimeType == applicationxgodotscene:\n return [ \"scn\", \"tscn\", \"escn\" ]\n\n if mimeType == textxscheme:\n return [ \"scm\", \"ss\" ]\n\n if mimeType == applicationxmsschedule:\n return [ \"scd\" ]\n\n if mimeType == textxscala:\n return [ \"scala\", \"sc\" ]\n\n if mimeType == applicationsbml_xml:\n return [ \"sbml\" ]\n\n if mimeType == applicationxspsssav:\n return [ \"sav\", \"zsav\" ]\n\n if mimeType == textxsass:\n return [ \"sass\" ]\n\n if mimeType == applicationxthomsonsapimage:\n return [ \"sap\" ]\n\n if mimeType == applicationxsami:\n return [ \"sami\" ]\n\n if mimeType == applicationxamipro:\n return [ \"sam\" ]\n\n if mimeType == textxsagemath:\n return [ \"sage\" ]\n\n if mimeType == applicationvndyamahasmafaudio:\n return [ \"saf\" ]\n\n if mimeType == audioxs3m:\n return [ \"s3m\" ]\n\n if mimeType == textxasm:\n return [ \"s\", \"asm\" ]\n\n if mimeType == imagexpanasonicrw2:\n return [ \"rw2\" ]\n\n if mimeType == videovndrnrealvideo:\n return [ \"rv\", \"rvx\" ]\n\n if mimeType == applicationrouteusd_xml:\n return [ \"rusd\" ]\n\n if mimeType == applicationxmakeself:\n return [ \"run\" ]\n\n if mimeType == textrichtext:\n return [ \"rtx\" ]\n\n if mimeType == applicationrtf:\n return [ \"rtf\" ]\n\n if mimeType == textvndrnrealtext:\n return [ \"rt\" ]\n\n if mimeType == textxrst:\n return [ \"rst\" ]\n\n if mimeType == applicationrss_xml:\n return [ \"rss\" ]\n\n if mimeType == applicationurcressheet_xml:\n return [ \"rsheet\" ]\n\n if mimeType == applicationrsd_xml:\n return [ \"rsd\" ]\n\n if mimeType == applicationatscrsat_xml:\n return [ \"rsat\" ]\n\n if mimeType == textrust:\n return [ \"rs\" ]\n\n if mimeType == applicationvndnokiaradiopreset:\n return [ \"rpst\" ]\n\n if mimeType == applicationvndnokiaradiopresets:\n return [ \"rpss\" ]\n\n if mimeType == applicationxrpm:\n return [ \"rpm\" ]\n\n if mimeType == applicationvndcloantorp9:\n return [ \"rp9\" ]\n\n if mimeType == imagevndrnrealpix:\n return [ \"rp\" ]\n\n if mimeType == applicationrpkiroa:\n return [ \"roa\" ]\n\n if mimeType == applicationrelaxngcompactsyntax:\n return [ \"rnc\" ]\n\n if mimeType == audioxpnrealaudioplugin:\n return [ \"rmp\" ]\n\n if mimeType == messagexgnurmail:\n return [ \"rmail\" ]\n\n if mimeType == applicationvndrnrealmedia:\n return [ \"rm\", \"rmj\", \"rmm\", \"rms\", \"rmx\", \"rmvb\" ]\n\n if mimeType == imagerle:\n return [ \"rle\" ]\n\n if mimeType == applicationresourcelistsdiff_xml:\n return [ \"rld\" ]\n\n if mimeType == imagevndfujixeroxedmicsrlc:\n return [ \"rlc\" ]\n\n if mimeType == applicationresourcelists_xml:\n return [ \"rl\" ]\n\n if mimeType == applicationxresearchinfosystems:\n return [ \"ris\" ]\n\n if mimeType == audiovndrip:\n return [ \"rip\" ]\n\n if mimeType == applicationreginfo_xml:\n return [ \"rif\" ]\n\n if mimeType == imagexrgb:\n return [ \"rgb\" ]\n\n if mimeType == applicationxgodotresource:\n return [ \"res\", \"tres\" ]\n\n if mimeType == applicationvndbusinessobjects:\n return [ \"rep\" ]\n\n if mimeType == applicationp2poverlay_xml:\n return [ \"relo\" ]\n\n if mimeType == textxreject:\n return [ \"rej\" ]\n\n if mimeType == textxmsregedit:\n return [ \"reg\" ]\n\n if mimeType == textxreadme:\n return [ \"readme\" ]\n\n if mimeType == applicationvnddatavisionrdz:\n return [ \"rdz\" ]\n\n if mimeType == applicationrdf_xml:\n return [ \"rdf\", \"rdfs\", \"owl\" ]\n\n if mimeType == applicationvndipunpluggedrcprofile:\n return [ \"rcprofile\" ]\n\n if mimeType == applicationxruby:\n return [ \"rb\" ]\n\n if mimeType == applicationxrawdiskimagexzcompressed:\n return [ \"rawdiskimagexz\", \"imgxz\" ]\n\n if mimeType == applicationxrawdiskimage:\n return [ \"rawdiskimage\", \"img\" ]\n\n if mimeType == imagexpanasonicrw:\n return [ \"raw\" ]\n\n if mimeType == imagexcmuraster:\n return [ \"ras\" ]\n\n if mimeType == applicationvndrar:\n return [ \"rar\" ]\n\n if mimeType == applicationrouteapd_xml:\n return [ \"rapd\" ]\n\n if mimeType == applicationraml_yaml:\n return [ \"raml\" ]\n\n if mimeType == applicationram:\n return [ \"ram\" ]\n\n if mimeType == imagexfujiraf:\n return [ \"raf\" ]\n\n if mimeType == audiovndrnrealaudio:\n return [ \"ra\", \"rax\" ]\n\n if mimeType == applicationvndquarkquarkxpress:\n return [ \"qxd\", \"qxt\", \"qwd\", \"qwt\", \"qxl\", \"qxb\" ]\n\n if mimeType == applicationxquicktimemedialink:\n return [ \"qtl\" ]\n\n if mimeType == imagexquicktime:\n return [ \"qtif\" ]\n\n if mimeType == applicationxqtiplot:\n return [ \"qti\", \"qtigz\" ]\n\n if mimeType == videoquicktime:\n return [ \"qt\", \"mov\", \"moov\", \"qtvr\" ]\n\n if mimeType == applicationsparqlquery:\n return [ \"qs\", \"rq\" ]\n\n if mimeType == applicationvndpublisharedeltatree:\n return [ \"qps\" ]\n\n if mimeType == applicationxqpress:\n return [ \"qp\" ]\n\n if mimeType == textxqml:\n return [ \"qml\", \"qmltypes\", \"qmlproject\" ]\n\n if mimeType == applicationxqw:\n return [ \"qif\" ]\n\n if mimeType == applicationvndintuqfx:\n return [ \"qfx\" ]\n\n if mimeType == applicationxqeddisk:\n return [ \"qed\" ]\n\n if mimeType == applicationxqemudisk:\n return [ \"qcow2\", \"qcow\" ]\n\n if mimeType == applicationvndintuqbo:\n return [ \"qbo\" ]\n\n if mimeType == applicationvndepsonquickanime:\n return [ \"qam\" ]\n\n if mimeType == textxpython:\n return [ \"pyx\", \"wsgi\" ]\n\n if mimeType == videovndmsplayreadymediapyv:\n return [ \"pyv\" ]\n\n if mimeType == applicationxpyspreadspreadsheet:\n return [ \"pysu\" ]\n\n if mimeType == applicationxpyspreadbzspreadsheet:\n return [ \"pys\" ]\n\n if mimeType == modelvndpythapyox:\n return [ \"pyox\" ]\n\n if mimeType == applicationxpythonbytecode:\n return [ \"pyc\", \"pyo\" ]\n\n if mimeType == audiovndmsplayreadymediapya:\n return [ \"pya\" ]\n\n if mimeType == textxpython3:\n return [ \"py\", \"py3\", \"py3x\", \"pyi\" ]\n\n if mimeType == applicationvnd3mpostitnotes:\n return [ \"pwn\" ]\n\n if mimeType == applicationxpw:\n return [ \"pw\" ]\n\n if mimeType == applicationvnd3gpppicbwvar:\n return [ \"pvb\" ]\n\n if mimeType == applicationvndmspublisher:\n return [ \"pub\" ]\n\n if mimeType == applicationvndpviptid1:\n return [ \"ptid\" ]\n\n if mimeType == imageprspti:\n return [ \"pti\" ]\n\n if mimeType == applicationxpocketword:\n return [ \"psw\" ]\n\n if mimeType == applicationpskc_xml:\n return [ \"pskcxml\" ]\n\n if mimeType == applicationxgzpostscript:\n return [ \"psgz\" ]\n\n if mimeType == audioxpsflib:\n return [ \"psflib\" ]\n\n if mimeType == applicationxgzfontlinuxpsf:\n return [ \"psfgz\" ]\n\n if mimeType == applicationxfontlinuxpsf:\n return [ \"psf\" ]\n\n if mimeType == imagevndadobephotoshop:\n return [ \"psd\" ]\n\n if mimeType == applicationxbzpostscript:\n return [ \"psbz2\" ]\n\n if mimeType == applicationvnd3gpppicbwsmall:\n return [ \"psb\" ]\n\n if mimeType == applicationpostscript:\n return [ \"ps\" ]\n\n if mimeType == applicationprovenance_xml:\n return [ \"provx\" ]\n\n if mimeType == applicationxgodotproject:\n return [ \"projectgodot\" ]\n\n if mimeType == applicationpicsrules:\n return [ \"prf\" ]\n\n if mimeType == applicationvndlotusfreelance:\n return [ \"pre\" ]\n\n if mimeType == applicationvndpalm:\n return [ \"pqa\", \"oprc\" ]\n\n if mimeType == applicationvndmspowerpoint:\n return [ \"ppz\", \"ppt\", \"pps\", \"pot\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentpresentationmlpresentation:\n return [ \"pptx\" ]\n\n if mimeType == applicationvndmspowerpointpresentationmacroenabled12:\n return [ \"pptm\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentpresentationmlslideshow:\n return [ \"ppsx\" ]\n\n if mimeType == applicationvndmspowerpointslideshowmacroenabled12:\n return [ \"ppsm\" ]\n\n if mimeType == imagexportablepixmap:\n return [ \"ppm\" ]\n\n if mimeType == applicationvndcupsppd:\n return [ \"ppd\" ]\n\n if mimeType == applicationvndmspowerpointaddinmacroenabled12:\n return [ \"ppam\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentpresentationmltemplate:\n return [ \"potx\" ]\n\n if mimeType == applicationvndmspowerpointtemplatemacroenabled12:\n return [ \"potm\" ]\n\n if mimeType == applicationvndmacportsportpkg:\n return [ \"portpkg\" ]\n\n if mimeType == applicationxspsspor:\n return [ \"por\" ]\n\n if mimeType == textxmaven_xml:\n return [ \"pomxml\", \"settingsxml\" ]\n\n if mimeType == textxgettexttranslation:\n return [ \"po\" ]\n\n if mimeType == imagexmacpaint:\n return [ \"pntg\" ]\n\n if mimeType == imagexportableanymap:\n return [ \"pnm\" ]\n\n if mimeType == imagepng:\n return [ \"png\" ]\n\n if mimeType == applicationvndctcposml:\n return [ \"pml\" ]\n\n if mimeType == audioxscpls:\n return [ \"pls\" ]\n\n if mimeType == applicationxplanperfect:\n return [ \"pln\" ]\n\n if mimeType == applicationvndpocketlearn:\n return [ \"plf\" ]\n\n if mimeType == applicationvndmobiusplc:\n return [ \"plc\" ]\n\n if mimeType == applicationvnd3gpppicbwlarge:\n return [ \"plb\" ]\n\n if mimeType == audioxiriverpla:\n return [ \"pla\" ]\n\n if mimeType == applicationxperl:\n return [ \"pl\", \"pm\", \"al\", \"perl\", \"pod\", \"t\" ]\n\n if mimeType == applicationvndapplepkpass:\n return [ \"pkpass\" ]\n\n if mimeType == applicationpkixpkipath:\n return [ \"pkipath\" ]\n\n if mimeType == applicationpkixcmp:\n return [ \"pki\" ]\n\n if mimeType == applicationxtexpk:\n return [ \"pk\" ]\n\n if mimeType == applicationxphp:\n return [ \"php\", \"php3\", \"php4\", \"php5\", \"phps\" ]\n\n if mimeType == applicationpgpencrypted:\n return [ \"pgp\", \"gpg\", \"asc\" ]\n\n if mimeType == applicationvndchesspgn:\n return [ \"pgn\" ]\n\n if mimeType == imagexportablegraymap:\n return [ \"pgm\" ]\n\n if mimeType == applicationfonttdpfr:\n return [ \"pfr\" ]\n\n if mimeType == applicationxfonttype1:\n return [ \"pfa\", \"pfb\", \"gsf\", \"pfm\" ]\n\n if mimeType == imagexpentaxpef:\n return [ \"pef\" ]\n\n if mimeType == applicationxxzpdf:\n return [ \"pdfxz\" ]\n\n if mimeType == applicationxlzpdf:\n return [ \"pdflz\" ]\n\n if mimeType == applicationxgzpdf:\n return [ \"pdfgz\" ]\n\n if mimeType == applicationxbzpdf:\n return [ \"pdfbz2\" ]\n\n if mimeType == applicationpdf:\n return [ \"pdf\" ]\n\n if mimeType == textxprocessing:\n return [ \"pde\" ]\n\n if mimeType == applicationxaportisdoc:\n return [ \"pdb\", \"pdc\" ]\n\n if mimeType == imagevndzbrushpcx:\n return [ \"pcx\" ]\n\n if mimeType == applicationvndcurlpcurl:\n return [ \"pcurl\" ]\n\n if mimeType == imagexpict:\n return [ \"pct\", \"pict\", \"pict1\", \"pict2\", \"pic\" ]\n\n if mimeType == applicationvndhppclxl:\n return [ \"pclxl\" ]\n\n if mimeType == applicationvndhppcl:\n return [ \"pcl\" ]\n\n if mimeType == applicationxfontpcf:\n return [ \"pcf\", \"pcfz\", \"pcfgz\" ]\n\n if mimeType == applicationxpcenginerom:\n return [ \"pce\" ]\n\n if mimeType == imagexphotocd:\n return [ \"pcd\" ]\n\n if mimeType == applicationvndtcpdumppcap:\n return [ \"pcap\", \"cap\", \"dmp\" ]\n\n if mimeType == imagexportablebitmap:\n return [ \"pbm\" ]\n\n if mimeType == applicationvndpowerbuilder6:\n return [ \"pbd\" ]\n\n if mimeType == applicationvndpawaafile:\n return [ \"paw\" ]\n\n if mimeType == imagexgimppat:\n return [ \"pat\" ]\n\n if mimeType == applicationxpar2:\n return [ \"par2\" ]\n\n if mimeType == applicationxpak:\n return [ \"pak\" ]\n\n if mimeType == applicationvndapplepages:\n return [ \"pages\" ]\n\n if mimeType == applicationxjavapack200:\n return [ \"pack\" ]\n\n if mimeType == applicationxnsproxyautoconfig:\n return [ \"pac\" ]\n\n if mimeType == applicationpkcs8encrypted:\n return [ \"p8e\" ]\n\n if mimeType == applicationpkcs8:\n return [ \"p8\" ]\n\n if mimeType == applicationpkcs7signature:\n return [ \"p7s\" ]\n\n if mimeType == applicationxpkcs7certreqresp:\n return [ \"p7r\" ]\n\n if mimeType == applicationpkcs7mime:\n return [ \"p7c\", \"p7m\" ]\n\n if mimeType == applicationxpkcs7certificates:\n return [ \"p7b\", \"spc\" ]\n\n if mimeType == applicationxpagemaker:\n return [ \"p65\", \"pm6\", \"pmd\" ]\n\n if mimeType == applicationpkcs12:\n return [ \"p12\", \"pfx\" ]\n\n if mimeType == applicationpkcs10:\n return [ \"p10\" ]\n\n if mimeType == textxpascal:\n return [ \"p\", \"pas\" ]\n\n if mimeType == applicationvndopenofficeorgextension:\n return [ \"oxt\" ]\n\n if mimeType == applicationoxps:\n return [ \"oxps\" ]\n\n if mimeType == applicationowl_xml:\n return [ \"owx\" ]\n\n if mimeType == applicationxvirtualboxovf:\n return [ \"ovf\" ]\n\n if mimeType == applicationovf:\n return [ \"ova\" ]\n\n if mimeType == applicationvndoasisopendocumenttexttemplate:\n return [ \"ott\" ]\n\n if mimeType == applicationvndoasisopendocumentspreadsheettemplate:\n return [ \"ots\" ]\n\n if mimeType == applicationvndoasisopendocumentpresentationtemplate:\n return [ \"otp\" ]\n\n if mimeType == applicationvndoasisopendocumentimagetemplate:\n return [ \"oti\" ]\n\n if mimeType == applicationvndoasisopendocumenttextweb:\n return [ \"oth\" ]\n\n if mimeType == applicationvndoasisopendocumentgraphicstemplate:\n return [ \"otg\" ]\n\n if mimeType == applicationvndoasisopendocumentformulatemplate:\n return [ \"otf\", \"odft\" ]\n\n if mimeType == applicationvndoasisopendocumentcharttemplate:\n return [ \"otc\" ]\n\n if mimeType == applicationvndopenstreetmapdata_xml:\n return [ \"osm\" ]\n\n if mimeType == applicationvndyamahaopenscoreformatosfpvg_xml:\n return [ \"osfpvg\" ]\n\n if mimeType == applicationvndyamahaopenscoreformat:\n return [ \"osf\" ]\n\n if mimeType == textorg:\n return [ \"org\" ]\n\n if mimeType == imagexolympusorf:\n return [ \"orf\" ]\n\n if mimeType == imageopenraster:\n return [ \"ora\" ]\n\n if mimeType == textxopml_xml:\n return [ \"opml\" ]\n\n if mimeType == applicationoebpspackage_xml:\n return [ \"opf\" ]\n\n if mimeType == textxooc:\n return [ \"ooc\" ]\n\n if mimeType == applicationonenote:\n return [ \"onetoc\", \"onetoc2\", \"onetmp\", \"onepkg\" ]\n\n if mimeType == applicationomdoc_xml:\n return [ \"omdoc\" ]\n\n if mimeType == applicationxoleo:\n return [ \"oleo\" ]\n\n if mimeType == applicationogg:\n return [ \"ogx\" ]\n\n if mimeType == videoogg:\n return [ \"ogv\" ]\n\n if mimeType == videoxogm_ogg:\n return [ \"ogm\" ]\n\n if mimeType == modelvndopengex:\n return [ \"ogex\" ]\n\n if mimeType == audioogg:\n return [ \"oga\", \"ogg\", \"opus\" ]\n\n if mimeType == applicationvndoasisopendocumenttext:\n return [ \"odt\" ]\n\n if mimeType == applicationvndoasisopendocumentspreadsheet:\n return [ \"ods\" ]\n\n if mimeType == applicationvndoasisopendocumentpresentation:\n return [ \"odp\" ]\n\n if mimeType == applicationvndoasisopendocumenttextmaster:\n return [ \"odm\" ]\n\n if mimeType == applicationvndoasisopendocumentimage:\n return [ \"odi\" ]\n\n if mimeType == applicationvndoasisopendocumentgraphics:\n return [ \"odg\" ]\n\n if mimeType == applicationvndoasisopendocumentformula:\n return [ \"odf\" ]\n\n if mimeType == applicationvndoasisopendocumentchart:\n return [ \"odc\" ]\n\n if mimeType == applicationvndoasisopendocumentdatabase:\n return [ \"odb\" ]\n\n if mimeType == applicationoda:\n return [ \"oda\" ]\n\n if mimeType == textxocl:\n return [ \"ocl\" ]\n\n if mimeType == applicationxtgif:\n return [ \"obj\" ]\n\n if mimeType == applicationvndopenbloxgame_xml:\n return [ \"obgx\" ]\n\n if mimeType == applicationxmsbinder:\n return [ \"obd\" ]\n\n if mimeType == applicationvndfujitsuoasys:\n return [ \"oas\" ]\n\n if mimeType == applicationvndfujitsuoasys3:\n return [ \"oa3\" ]\n\n if mimeType == applicationvndfujitsuoasys2:\n return [ \"oa2\" ]\n\n if mimeType == applicationxobject:\n return [ \"o\", \"mod\" ]\n\n if mimeType == applicationxnzb:\n return [ \"nzb\" ]\n\n if mimeType == applicationvndapplenumbers:\n return [ \"numbers\" ]\n\n if mimeType == applicationvndnitf:\n return [ \"ntf\", \"nitf\" ]\n\n if mimeType == applicationntriples:\n return [ \"nt\" ]\n\n if mimeType == videoxnsv:\n return [ \"nsv\" ]\n\n if mimeType == applicationvndlotusnotes:\n return [ \"nsf\" ]\n\n if mimeType == applicationxnetshowchannel:\n return [ \"nsc\" ]\n\n if mimeType == imagexnikonnrw:\n return [ \"nrw\" ]\n\n if mimeType == applicationnquads:\n return [ \"nq\" ]\n\n if mimeType == imagevndnetfpx:\n return [ \"npx\" ]\n\n if mimeType == applicationvndnoblenetweb:\n return [ \"nnw\" ]\n\n if mimeType == applicationvndnoblenetsealer:\n return [ \"nns\" ]\n\n if mimeType == applicationvndnoblenetdirectory:\n return [ \"nnd\" ]\n\n if mimeType == applicationvndenliven:\n return [ \"nml\" ]\n\n if mimeType == applicationvndneurolanguagenlu:\n return [ \"nlu\" ]\n\n if mimeType == applicationxneogeopocketrom:\n return [ \"ngp\" ]\n\n if mimeType == applicationvndnokiangagedata:\n return [ \"ngdat\" ]\n\n if mimeType == applicationxneogeopocketcolorrom:\n return [ \"ngc\" ]\n\n if mimeType == applicationvndnokiangagesymbianinstall:\n return [ \"n-gage\" ]\n\n if mimeType == textxnfo:\n return [ \"nfo\" ]\n\n if mimeType == applicationxnesrom:\n return [ \"nes\", \"nez\", \"unf\", \"unif\" ]\n\n if mimeType == imagexnikonnef:\n return [ \"nef\" ]\n\n if mimeType == applicationxnintendodsrom:\n return [ \"nds\" ]\n\n if mimeType == applicationxdtbncx_xml:\n return [ \"ncx\" ]\n\n if mimeType == applicationvndwolframplayer:\n return [ \"nbp\" ]\n\n if mimeType == applicationmathematica:\n return [ \"nb\", \"ma\", \"mb\" ]\n\n if mimeType == applicationxn64rom:\n return [ \"n64\", \"z64\", \"v64\" ]\n\n if mimeType == textn3:\n return [ \"n3\" ]\n\n if mimeType == applicationvndtriscapemxs:\n return [ \"mxs\" ]\n\n if mimeType == applicationxv_xml:\n return [ \"mxml\", \"xhvml\", \"xvml\", \"xvm\" ]\n\n if mimeType == audiomobilexmf:\n return [ \"mxmf\" ]\n\n if mimeType == applicationvndrecordaremusicxml:\n return [ \"mxl\" ]\n\n if mimeType == applicationmxf:\n return [ \"mxf\" ]\n\n if mimeType == applicationvndmfer:\n return [ \"mwf\" ]\n\n if mimeType == applicationvndmapboxvectortile:\n return [ \"mvt\" ]\n\n if mimeType == applicationxmsmediaview:\n return [ \"mvb\", \"m13\", \"m14\" ]\n\n if mimeType == applicationvndrecordaremusicxml_xml:\n return [ \"musicxml\" ]\n\n if mimeType == applicationmmtusd_xml:\n return [ \"musd\" ]\n\n if mimeType == applicationvndmusician:\n return [ \"mus\" ]\n\n if mimeType == textxmup:\n return [ \"mup\", \"not\" ]\n\n if mimeType == modelmtl:\n return [ \"mtl\" ]\n\n if mimeType == applicationxmsxrom:\n return [ \"msx\" ]\n\n if mimeType == applicationvndmuveestyle:\n return [ \"msty\" ]\n\n if mimeType == imagexmsod:\n return [ \"msod\" ]\n\n if mimeType == applicationvndmobiusmsl:\n return [ \"msl\" ]\n\n if mimeType == applicationxmsi:\n return [ \"msi\" ]\n\n if mimeType == modelmesh:\n return [ \"msh\", \"mesh\", \"silo\" ]\n\n if mimeType == applicationvndmsoutlook:\n return [ \"msg\" ]\n\n if mimeType == applicationvndepsonmsf:\n return [ \"msf\" ]\n\n if mimeType == applicationvndmseq:\n return [ \"mseq\" ]\n\n if mimeType == applicationvndfdsnmseed:\n return [ \"mseed\" ]\n\n if mimeType == applicationmediaservercontrol_xml:\n return [ \"mscml\" ]\n\n if mimeType == textxtroffms:\n return [ \"ms\" ]\n\n if mimeType == imagexminoltamrw:\n return [ \"mrw\" ]\n\n if mimeType == textxmrml:\n return [ \"mrml\", \"mrl\" ]\n\n if mimeType == applicationmarcxml_xml:\n return [ \"mrcx\" ]\n\n if mimeType == applicationmarc:\n return [ \"mrc\" ]\n\n if mimeType == applicationvndmobiusmqy:\n return [ \"mqy\" ]\n\n if mimeType == applicationvndibmminipay:\n return [ \"mpy\" ]\n\n if mimeType == applicationvndmsproject:\n return [ \"mpt\" ]\n\n if mimeType == applicationvndmophunapplication:\n return [ \"mpn\" ]\n\n if mimeType == applicationvndblueicemultipass:\n return [ \"mpm\" ]\n\n if mimeType == textxmpl2:\n return [ \"mpl\" ]\n\n if mimeType == applicationvndappleinstaller_xml:\n return [ \"mpkg\" ]\n\n if mimeType == applicationmediapolicydataset_xml:\n return [ \"mpf\" ]\n\n if mimeType == videompeg:\n return [ \"mpeg\", \"mpg\", \"mpe\", \"vob\", \"090909vdr\", \"m1v\", \"m2v\" ]\n\n if mimeType == applicationdash_xml:\n return [ \"mpd\" ]\n\n if mimeType == audioxmusepack:\n return [ \"mpc\", \"mpp\", \"mp\" ]\n\n if mimeType == applicationmp4:\n return [ \"mp4s\", \"m4p\" ]\n\n if mimeType == videomp4:\n return [ \"mp4\", \"m4v\", \"f4v\", \"lrv\", \"mp4v\", \"mpg4\" ]\n\n if mimeType == audiompeg:\n return [ \"mp3\", \"mpga\", \"mp2a\", \"m2a\", \"m3a\" ]\n\n if mimeType == audiomp2:\n return [ \"mp2\" ]\n\n if mimeType == videoxsgimovie:\n return [ \"movie\" ]\n\n if mimeType == textxmof:\n return [ \"mof\" ]\n\n if mimeType == applicationmods_xml:\n return [ \"mods\" ]\n\n if mimeType == textxmoc:\n return [ \"moc\" ]\n\n if mimeType == applicationxmobipocketebook:\n return [ \"mobi\", \"prc\" ]\n\n if mimeType == audioxmo3:\n return [ \"mo3\" ]\n\n if mimeType == applicationxmsmoney:\n return [ \"mny\" ]\n\n if mimeType == videoxmng:\n return [ \"mng\" ]\n\n if mimeType == imagevndfujixeroxedmicsmmr:\n return [ \"mmr\" ]\n\n if mimeType == applicationmathml_xml:\n return [ \"mml\", \"mathml\" ]\n\n if mimeType == applicationvndsmaf:\n return [ \"mmf\", \"smaf\" ]\n\n if mimeType == applicationvndchipnutskaraokemmd:\n return [ \"mmd\" ]\n\n if mimeType == textxobjc__src:\n return [ \"mm\" ]\n\n if mimeType == applicationvnddolbymlp:\n return [ \"mlp\" ]\n\n if mimeType == textxocaml:\n return [ \"ml\", \"mli\" ]\n\n if mimeType == videoxmatroska:\n return [ \"mkv\", \"mks\" ]\n\n if mimeType == audioxmatroska:\n return [ \"mka\" ]\n\n if mimeType == videoxmatroska3d:\n return [ \"mk3d\" ]\n\n if mimeType == videoxmjpeg:\n return [ \"mjpeg\", \"mjpg\" ]\n\n if mimeType == videomj2:\n return [ \"mj2\", \"mjp2\" ]\n\n if mimeType == audioxminipsf:\n return [ \"minipsf\" ]\n\n if mimeType == applicationxmif:\n return [ \"mif\" ]\n\n if mimeType == applicationxmie:\n return [ \"mie\" ]\n\n if mimeType == audiomidi:\n return [ \"mid\", \"midi\", \"kar\", \"rmi\" ]\n\n if mimeType == applicationxmimearchive:\n return [ \"mhtml\", \"mht\" ]\n\n if mimeType == applicationvndproteusmagazine:\n return [ \"mgz\" ]\n\n if mimeType == applicationxmagicpoint:\n return [ \"mgp\" ]\n\n if mimeType == applicationrpkimanifest:\n return [ \"mft\" ]\n\n if mimeType == applicationvndmfmp:\n return [ \"mfm\" ]\n\n if mimeType == applicationmets_xml:\n return [ \"mets\" ]\n\n if mimeType == applicationmetalink_xml:\n return [ \"metalink\" ]\n\n if mimeType == applicationmetalink4_xml:\n return [ \"meta4\" ]\n\n if mimeType == textxmeson:\n return [ \"mesonbuild\", \"mesonoptionstxt\" ]\n\n if mimeType == textxtroffme:\n return [ \"me\" ]\n\n if mimeType == imagevndmsmodi:\n return [ \"mdi\" ]\n\n if mimeType == applicationvndmsaccess:\n return [ \"mdb\" ]\n\n if mimeType == textmarkdown:\n return [ \"md\", \"mkd\", \"markdown\" ]\n\n if mimeType == textvndcurlmcurl:\n return [ \"mcurl\" ]\n\n if mimeType == applicationvndmcd:\n return [ \"mcd\" ]\n\n if mimeType == textvndsenxwarpscript:\n return [ \"mc2\" ]\n\n if mimeType == applicationvndmedcalcdata:\n return [ \"mc1\" ]\n\n if mimeType == applicationmbox:\n return [ \"mbox\" ]\n\n if mimeType == applicationvndmobiusmbk:\n return [ \"mbk\" ]\n\n if mimeType == textcachemanifest:\n return [ \"manifest\", \"appcache\" ]\n\n if mimeType == applicationxtroffman:\n return [ \"man\", \"19\" ]\n\n if mimeType == textxmakefile:\n return [ \"makefile\", \"gnumakefile\", \"mk\", \"mak\" ]\n\n if mimeType == applicationvndecowinchart:\n return [ \"mag\" ]\n\n if mimeType == applicationmmtaei_xml:\n return [ \"maei\" ]\n\n if mimeType == applicationmads_xml:\n return [ \"mads\" ]\n\n if mimeType == applicationxmarkaby:\n return [ \"mab\" ]\n\n if mimeType == applicationxthomsoncartridgememo7:\n return [ \"m7\" ]\n\n if mimeType == videoisosegment:\n return [ \"m4s\" ]\n\n if mimeType == audioxm4r:\n return [ \"m4r\" ]\n\n if mimeType == audioxm4b:\n return [ \"m4b\", \"f4b\" ]\n\n if mimeType == audiomp4:\n return [ \"m4a\", \"f4a\", \"mp4a\" ]\n\n if mimeType == applicationxm4:\n return [ \"m4\" ]\n\n if mimeType == audioxmpegurl:\n return [ \"m3u\", \"m3u8\", \"vlc\" ]\n\n if mimeType == videomp2t:\n return [ \"m2t\", \"m2ts\", \"mts\", \"cpi\", \"clpi\", \"mpls\", \"bdm\", \"bdmv\" ]\n\n if mimeType == applicationmp21:\n return [ \"m21\", \"mp21\" ]\n\n if mimeType == videovndmpegurl:\n return [ \"m1u\", \"m4u\", \"mxu\" ]\n\n if mimeType == textxobjcsrc:\n return [ \"m\" ]\n\n if mimeType == applicationxlzop:\n return [ \"lzo\" ]\n\n if mimeType == applicationxlzma:\n return [ \"lzma\" ]\n\n if mimeType == applicationxlz4:\n return [ \"lz4\" ]\n\n if mimeType == applicationxlzip:\n return [ \"lz\" ]\n\n if mimeType == applicationxlyx:\n return [ \"lyx\" ]\n\n if mimeType == textxlilypond:\n return [ \"ly\" ]\n\n if mimeType == imagexlws:\n return [ \"lws\" ]\n\n if mimeType == applicationvndlotuswordpro:\n return [ \"lwp\" ]\n\n if mimeType == imagexlwo:\n return [ \"lwo\", \"lwob\" ]\n\n if mimeType == audiovndlucentvoice:\n return [ \"lvp\" ]\n\n if mimeType == applicationxluabytecode:\n return [ \"luac\" ]\n\n if mimeType == textxlua:\n return [ \"lua\" ]\n\n if mimeType == applicationvndfrogansltf:\n return [ \"ltf\" ]\n\n if mimeType == applicationxlrzip:\n return [ \"lrz\" ]\n\n if mimeType == applicationvndmslrm:\n return [ \"lrm\" ]\n\n if mimeType == applicationlost_xml:\n return [ \"lostxml\" ]\n\n if mimeType == textxlog:\n return [ \"log\" ]\n\n if mimeType == audiousac:\n return [ \"loas\", \"xhe\" ]\n\n if mimeType == applicationxatarilynxrom:\n return [ \"lnx\" ]\n\n if mimeType == applicationxmsshortcut:\n return [ \"lnk\" ]\n\n if mimeType == textcoffeescript:\n return [ \"litcoffee\" ]\n\n if mimeType == applicationvndroute66link66_xml:\n return [ \"link66\" ]\n\n if mimeType == applicationxlhz:\n return [ \"lhz\" ]\n\n if mimeType == textxliteratehaskell:\n return [ \"lhs\" ]\n\n if mimeType == applicationxlha:\n return [ \"lha\", \"lzh\" ]\n\n if mimeType == applicationlgr_xml:\n return [ \"lgr\" ]\n\n if mimeType == textless:\n return [ \"less\" ]\n\n if mimeType == applicationvndhhelessonplayer:\n return [ \"les\" ]\n\n if mimeType == textxldif:\n return [ \"ldif\" ]\n\n if mimeType == applicationvndllamagraphicslifebalanceexchange_xml:\n return [ \"lbe\" ]\n\n if mimeType == applicationvndllamagraphicslifebalancedesktop:\n return [ \"lbd\" ]\n\n if mimeType == applicationvndlaslas_xml:\n return [ \"lasxml\" ]\n\n if mimeType == applicationxsharedlibraryla:\n return [ \"la\" ]\n\n if mimeType == applicationxkword:\n return [ \"kwd\", \"kwt\" ]\n\n if mimeType == applicationxkugar:\n return [ \"kud\" ]\n\n if mimeType == applicationvndkahootz:\n return [ \"ktz\", \"ktr\" ]\n\n if mimeType == imagektx2:\n return [ \"ktx2\" ]\n\n if mimeType == imagektx:\n return [ \"ktx\" ]\n\n if mimeType == textxkotlin:\n return [ \"kt\" ]\n\n if mimeType == textxkaitaistruct:\n return [ \"ksy\" ]\n\n if mimeType == applicationxkspread:\n return [ \"ksp\" ]\n\n if mimeType == applicationxkrita:\n return [ \"kra\", \"krz\" ]\n\n if mimeType == applicationvnddskeypoint:\n return [ \"kpxx\" ]\n\n if mimeType == applicationxkpresenter:\n return [ \"kpr\", \"kpt\" ]\n\n if mimeType == applicationxkpovmodeler:\n return [ \"kpm\" ]\n\n if mimeType == applicationxkontour:\n return [ \"kon\" ]\n\n if mimeType == applicationvndkinar:\n return [ \"kne\", \"knp\" ]\n\n if mimeType == applicationvndgoogleearthkmz:\n return [ \"kmz\" ]\n\n if mimeType == applicationvndgoogleearthkml_xml:\n return [ \"kml\" ]\n\n if mimeType == applicationxkillustrator:\n return [ \"kil\" ]\n\n if mimeType == applicationvndkidspiration:\n return [ \"kia\" ]\n\n if mimeType == applicationxkformula:\n return [ \"kfo\" ]\n\n if mimeType == applicationxkexiprojectshortcut:\n return [ \"kexis\" ]\n\n if mimeType == applicationxkexiconnectiondata:\n return [ \"kexic\" ]\n\n if mimeType == applicationxkexiprojectsqlite2:\n return [ \"kexi\" ]\n\n if mimeType == imagexkodakkdc:\n return [ \"kdc\" ]\n\n if mimeType == applicationxkeepass2:\n return [ \"kdbx\" ]\n\n if mimeType == applicationxkarbon:\n return [ \"karbon\" ]\n\n if mimeType == applicationxthomsoncassette:\n return [ \"k7\" ]\n\n if mimeType == imagexkodakk25:\n return [ \"k25\" ]\n\n if mimeType == imagejxss:\n return [ \"jxss\" ]\n\n if mimeType == imagejxsi:\n return [ \"jxsi\" ]\n\n if mimeType == imagejxsc:\n return [ \"jxsc\" ]\n\n if mimeType == imagejxs:\n return [ \"jxs\" ]\n\n if mimeType == imagejxrs:\n return [ \"jxrs\" ]\n\n if mimeType == imagejxra:\n return [ \"jxra\" ]\n\n if mimeType == imagejxr:\n return [ \"jxr\" ]\n\n if mimeType == imagejxl:\n return [ \"jxl\" ]\n\n if mimeType == textjsx:\n return [ \"jsx\" ]\n\n if mimeType == applicationjsonpatch_json:\n return [ \"jsonpatch\" ]\n\n if mimeType == applicationjsonml_json:\n return [ \"jsonml\" ]\n\n if mimeType == applicationld_json:\n return [ \"jsonld\" ]\n\n if mimeType == applicationjson5:\n return [ \"json5\" ]\n\n if mimeType == applicationjson:\n return [ \"json\", \"map\" ]\n\n if mimeType == textjavascript:\n return [ \"js\", \"jsm\", \"mjs\" ]\n\n if mimeType == applicationjrd_json:\n return [ \"jrd\" ]\n\n if mimeType == applicationxjbuilderproject:\n return [ \"jpr\", \"jpx\" ]\n\n if mimeType == imagejpm:\n return [ \"jpm\", \"jpgm\" ]\n\n if mimeType == imagejph:\n return [ \"jph\" ]\n\n if mimeType == videojpeg:\n return [ \"jpgv\" ]\n\n if mimeType == imagejpeg:\n return [ \"jpg\", \"jpeg\", \"jpe\" ]\n\n if mimeType == imagejpx:\n return [ \"jpf\" ]\n\n if mimeType == imagejp2:\n return [ \"jp2\", \"jpg2\" ]\n\n if mimeType == applicationvndjoostjodaarchive:\n return [ \"joda\" ]\n\n if mimeType == applicationxjavajnlpfile:\n return [ \"jnlp\" ]\n\n if mimeType == imagexjng:\n return [ \"jng\" ]\n\n if mimeType == applicationvndhpjlyt:\n return [ \"jlt\" ]\n\n if mimeType == imagejls:\n return [ \"jls\" ]\n\n if mimeType == applicationxjavakeystore:\n return [ \"jks\", \"ks\", \"cacerts\" ]\n\n if mimeType == applicationvndjisp:\n return [ \"jisp\" ]\n\n if mimeType == imagejphc:\n return [ \"jhc\" ]\n\n if mimeType == applicationxjavajcekeystore:\n return [ \"jceks\" ]\n\n if mimeType == textxjava:\n return [ \"java\" ]\n\n if mimeType == applicationxjavaarchivediff:\n return [ \"jardiff\" ]\n\n if mimeType == applicationxjavaarchive:\n return [ \"jar\" ]\n\n if mimeType == applicationvndjam:\n return [ \"jam\" ]\n\n if mimeType == textjade:\n return [ \"jade\" ]\n\n if mimeType == textvndsunj2meappdescriptor:\n return [ \"jad\" ]\n\n if mimeType == imagexjp2codestream:\n return [ \"j2c\", \"j2k\", \"jpc\" ]\n\n if mimeType == applicationvndimmervisionivu:\n return [ \"ivu\" ]\n\n if mimeType == applicationvndimmervisionivp:\n return [ \"ivp\" ]\n\n if mimeType == applicationits_xml:\n return [ \"its\" ]\n\n if mimeType == applicationvndshanainformedformtemplate:\n return [ \"itp\" ]\n\n if mimeType == applicationxit87:\n return [ \"it87\" ]\n\n if mimeType == audioxit:\n return [ \"it\" ]\n\n if mimeType == applicationxcdimage:\n return [ \"iso\", \"iso9660\" ]\n\n if mimeType == applicationvndirepositorypackage_xml:\n return [ \"irp\" ]\n\n if mimeType == applicationvndibmrightsmanagement:\n return [ \"irm\" ]\n\n if mimeType == applicationxipynb_json:\n return [ \"ipynb\" ]\n\n if mimeType == textxiptables:\n return [ \"iptables\" ]\n\n if mimeType == applicationxipspatch:\n return [ \"ips\" ]\n\n if mimeType == applicationvndshanainformedpackage:\n return [ \"ipk\" ]\n\n if mimeType == applicationipfix:\n return [ \"ipfix\" ]\n\n if mimeType == applicationvndastraeasoftwareiota:\n return [ \"iota\" ]\n\n if mimeType == textxinstall:\n return [ \"install\" ]\n\n if mimeType == applicationinkml_xml:\n return [ \"ink\", \"inkml\" ]\n\n if mimeType == textximelody:\n return [ \"imy\", \"ime\" ]\n\n if mimeType == applicationvndmsims:\n return [ \"ims\" ]\n\n if mimeType == applicationvndaccpacsimplyimp:\n return [ \"imp\" ]\n\n if mimeType == applicationvndshanainformedinterchange:\n return [ \"iif\" ]\n\n if mimeType == applicationvndmicrografxigx:\n return [ \"igx\" ]\n\n if mimeType == modeliges:\n return [ \"igs\", \"iges\" ]\n\n if mimeType == applicationvndinsorsigm:\n return [ \"igm\" ]\n\n if mimeType == applicationvndigloader:\n return [ \"igl\" ]\n\n if mimeType == applicationvndshanainformedformdata:\n return [ \"ifm\" ]\n\n if mimeType == imagexilbm:\n return [ \"iff\", \"ilbm\", \"lbm\" ]\n\n if mimeType == imageief:\n return [ \"ief\" ]\n\n if mimeType == textxidl:\n return [ \"idl\" ]\n\n if mimeType == imagevndmicrosofticon:\n return [ \"ico\" ]\n\n if mimeType == imagexicns:\n return [ \"icns\" ]\n\n if mimeType == xconferencexcooltalk:\n return [ \"ice\" ]\n\n if mimeType == applicationvndiccprofile:\n return [ \"icc\", \"icm\" ]\n\n if mimeType == applicationxica:\n return [ \"ica\" ]\n\n if mimeType == applicationvndintergeo:\n return [ \"i2g\" ]\n\n if mimeType == applicationxhwt:\n return [ \"hwt\" ]\n\n if mimeType == applicationxhwp:\n return [ \"hwp\" ]\n\n if mimeType == applicationvndyamahahvscript:\n return [ \"hvs\" ]\n\n if mimeType == applicationvndyamahahvvoice:\n return [ \"hvp\" ]\n\n if mimeType == applicationvndyamahahvdic:\n return [ \"hvd\" ]\n\n if mimeType == applicationvndkenameaapp:\n return [ \"htke\" ]\n\n if mimeType == textxcomponent:\n return [ \"htc\" ]\n\n if mimeType == imagehsj2:\n return [ \"hsj2\" ]\n\n if mimeType == textxhaskell:\n return [ \"hs\" ]\n\n if mimeType == applicationmacbinhex40:\n return [ \"hqx\" ]\n\n if mimeType == applicationvndhphps:\n return [ \"hps\" ]\n\n if mimeType == applicationvndhphpid:\n return [ \"hpid\" ]\n\n if mimeType == applicationvndhphpgl:\n return [ \"hpgl\" ]\n\n if mimeType == applicationwinhlp:\n return [ \"hlp\" ]\n\n if mimeType == applicationhjson:\n return [ \"hjson\" ]\n\n if mimeType == textxc__hdr:\n return [ \"hh\", \"hp\", \"hpp\", \"h\", \"hxx\" ]\n\n if mimeType == applicationxhfefloppyimage:\n return [ \"hfe\" ]\n\n if mimeType == applicationatscheld_xml:\n return [ \"held\" ]\n\n if mimeType == imagehej2k:\n return [ \"hej2\" ]\n\n if mimeType == imageheifsequence:\n return [ \"heifs\" ]\n\n if mimeType == imageheicsequence:\n return [ \"heics\" ]\n\n if mimeType == imageheif:\n return [ \"heic\", \"heif\", \"hif\" ]\n\n if mimeType == applicationxhdf:\n return [ \"hdf\", \"hdf4\", \"h4\", \"hdf5\", \"h5\" ]\n\n if mimeType == applicationxvirtualboxhdd:\n return [ \"hdd\" ]\n\n if mimeType == textxhandlebarstemplate:\n return [ \"hbs\" ]\n\n if mimeType == applicationvndhbci:\n return [ \"hbci\" ]\n\n if mimeType == applicationvndhal_xml:\n return [ \"hal\" ]\n\n if mimeType == videoh264:\n return [ \"h264\" ]\n\n if mimeType == videoh263:\n return [ \"h263\" ]\n\n if mimeType == videoh261:\n return [ \"h261\" ]\n\n if mimeType == applicationgzip:\n return [ \"gz\" ]\n\n if mimeType == applicationvndgeonext:\n return [ \"gxt\" ]\n\n if mimeType == applicationgxf:\n return [ \"gxf\" ]\n\n if mimeType == textxgcodegx:\n return [ \"gx\" ]\n\n if mimeType == textxgooglevideopointer:\n return [ \"gvp\" ]\n\n if mimeType == textvndgraphviz:\n return [ \"gv\" ]\n\n if mimeType == modelvndgtw:\n return [ \"gtw\" ]\n\n if mimeType == applicationvndgroovetoolmessage:\n return [ \"gtm\" ]\n\n if mimeType == audioxgsm:\n return [ \"gsm\" ]\n\n if mimeType == applicationvndgoogleappspresentation:\n return [ \"gslides\" ]\n\n if mimeType == applicationvndgoogleappsspreadsheet:\n return [ \"gsheet\" ]\n\n if mimeType == textxgenie:\n return [ \"gs\" ]\n\n if mimeType == applicationsrgs_xml:\n return [ \"grxml\" ]\n\n if mimeType == applicationvndgrooveinjector:\n return [ \"grv\" ]\n\n if mimeType == textxgroovy:\n return [ \"groovy\", \"gvy\", \"gy\", \"gsh\" ]\n\n if mimeType == applicationxgrampsxml:\n return [ \"gramps\" ]\n\n if mimeType == applicationsrgs:\n return [ \"gram\" ]\n\n if mimeType == textxgradle:\n return [ \"gradle\" ]\n\n if mimeType == applicationxgraphite:\n return [ \"gra\" ]\n\n if mimeType == applicationvndgrafeq:\n return [ \"gqf\", \"gqs\" ]\n\n if mimeType == applicationgpx_xml:\n return [ \"gpx\" ]\n\n if mimeType == applicationvndflographit:\n return [ \"gph\" ]\n\n if mimeType == applicationxgnuplot:\n return [ \"gp\", \"gplt\", \"gnuplot\" ]\n\n if mimeType == textxgo:\n return [ \"go\" ]\n\n if mimeType == applicationxgnumeric:\n return [ \"gnumeric\" ]\n\n if mimeType == applicationxgnucash:\n return [ \"gnucash\", \"gnc\", \"xac\" ]\n\n if mimeType == applicationgnunetdirectory:\n return [ \"gnd\" ]\n\n if mimeType == applicationvndgmx:\n return [ \"gmx\" ]\n\n if mimeType == applicationxprofile:\n return [ \"gmonout\" ]\n\n if mimeType == applicationxgettexttranslation:\n return [ \"gmo\", \"mo\" ]\n\n if mimeType == applicationgml_xml:\n return [ \"gml\" ]\n\n if mimeType == modelgltf_json:\n return [ \"gltf\" ]\n\n if mimeType == modelgltfbinary:\n return [ \"glb\" ]\n\n if mimeType == applicationxglade:\n return [ \"glade\" ]\n\n if mimeType == applicationvndgrooveidentitymessage:\n return [ \"gim\" ]\n\n if mimeType == imagexgimpgih:\n return [ \"gih\" ]\n\n if mimeType == imagegif:\n return [ \"gif\" ]\n\n if mimeType == applicationvndgroovehelp:\n return [ \"ghf\" ]\n\n if mimeType == applicationvndgeogebratool:\n return [ \"ggt\" ]\n\n if mimeType == applicationvndgeogebrafile:\n return [ \"ggb\" ]\n\n if mimeType == applicationxgamegearrom:\n return [ \"gg\" ]\n\n if mimeType == applicationxtexgf:\n return [ \"gf\" ]\n\n if mimeType == applicationvndgeometryexplorer:\n return [ \"gex\", \"gre\" ]\n\n if mimeType == applicationgeo_json:\n return [ \"geojson\" ]\n\n if mimeType == applicationvnddynageo:\n return [ \"geo\" ]\n\n if mimeType == applicationxgenesisrom:\n return [ \"gen\", \"sgd\" ]\n\n if mimeType == applicationxgedcom:\n return [ \"ged\", \"gedcom\" ]\n\n if mimeType == applicationxgodotshader:\n return [ \"gdshader\" ]\n\n if mimeType == applicationvndgoogleappsdocument:\n return [ \"gdoc\" ]\n\n if mimeType == modelvndgdl:\n return [ \"gdl\" ]\n\n if mimeType == applicationxgdromcue:\n return [ \"gdi\" ]\n\n if mimeType == applicationxgdscript:\n return [ \"gd\" ]\n\n if mimeType == textxgcode:\n return [ \"gcode\" ]\n\n if mimeType == applicationxgcacompressed:\n return [ \"gca\" ]\n\n if mimeType == imagexgimpgbr:\n return [ \"gbr\" ]\n\n if mimeType == applicationxgameboycolorrom:\n return [ \"gbc\", \"cgb\" ]\n\n if mimeType == applicationxgbarom:\n return [ \"gba\", \"agb\" ]\n\n if mimeType == applicationxgameboyrom:\n return [ \"gb\", \"sgb\" ]\n\n if mimeType == applicationxtads:\n return [ \"gam\" ]\n\n if mimeType == applicationvndgrooveaccount:\n return [ \"gac\" ]\n\n if mimeType == applicationvndgeospace:\n return [ \"g3w\" ]\n\n if mimeType == imageg3fax:\n return [ \"g3\" ]\n\n if mimeType == applicationvndgeoplan:\n return [ \"g2w\" ]\n\n if mimeType == applicationvndfuzzysheet:\n return [ \"fzs\" ]\n\n if mimeType == applicationvndadobefxp:\n return [ \"fxp\", \"fxpl\" ]\n\n if mimeType == videoxjavafx:\n return [ \"fxm\" ]\n\n if mimeType == videovndfvt:\n return [ \"fvt\" ]\n\n if mimeType == applicationvndanserwebfundstransferinitiation:\n return [ \"fti\" ]\n\n if mimeType == applicationvndfluxtimeclip:\n return [ \"ftc\" ]\n\n if mimeType == imagevndfst:\n return [ \"fst\" ]\n\n if mimeType == applicationvndfscweblaunch:\n return [ \"fsc\" ]\n\n if mimeType == imagevndfpx:\n return [ \"fpx\" ]\n\n if mimeType == applicationvndoasisopendocumenttextflatxml:\n return [ \"fodt\" ]\n\n if mimeType == applicationvndoasisopendocumentspreadsheetflatxml:\n return [ \"fods\" ]\n\n if mimeType == applicationvndoasisopendocumentpresentationflatxml:\n return [ \"fodp\" ]\n\n if mimeType == applicationvndoasisopendocumentgraphicsflatxml:\n return [ \"fodg\" ]\n\n if mimeType == textxxslfo:\n return [ \"fo\", \"xslfo\" ]\n\n if mimeType == applicationvndfrogansfnc:\n return [ \"fnc\" ]\n\n if mimeType == applicationvndframemaker:\n return [ \"fm\", \"frame\", \"maker\", \"book\" ]\n\n if mimeType == textvndfly:\n return [ \"fly\" ]\n\n if mimeType == textvndfmiflexstor:\n return [ \"flx\" ]\n\n if mimeType == applicationxkivio:\n return [ \"flw\" ]\n\n if mimeType == videoxflv:\n return [ \"flv\" ]\n\n if mimeType == applicationvndmicrografxflo:\n return [ \"flo\" ]\n\n if mimeType == videoxflic:\n return [ \"fli\", \"flc\" ]\n\n if mimeType == applicationvndflatpakrepo:\n return [ \"flatpakrepo\" ]\n\n if mimeType == applicationvndflatpakref:\n return [ \"flatpakref\" ]\n\n if mimeType == applicationvndflatpak:\n return [ \"flatpak\", \"xdgapp\" ]\n\n if mimeType == audioflac:\n return [ \"flac\" ]\n\n if mimeType == applicationxfluid:\n return [ \"fl\" ]\n\n if mimeType == applicationfits:\n return [ \"fits\", \"fit\", \"fts\" ]\n\n if mimeType == imagexxfig:\n return [ \"fig\" ]\n\n if mimeType == imagexfreehand:\n return [ \"fh\", \"fhc\", \"fh4\", \"fh5\", \"fh7\" ]\n\n if mimeType == applicationvndfujitsuoasysgp:\n return [ \"fg5\" ]\n\n if mimeType == textxgherkin:\n return [ \"feature\" ]\n\n if mimeType == applicationvnddenovofcselayoutlink:\n return [ \"fe_launch\" ]\n\n if mimeType == applicationfdt_xml:\n return [ \"fdt\" ]\n\n if mimeType == applicationxfdsdisk:\n return [ \"fds\" ]\n\n if mimeType == applicationfdf:\n return [ \"fdf\" ]\n\n if mimeType == applicationxrawfloppydiskimage:\n return [ \"fd\", \"qd\" ]\n\n if mimeType == applicationvndisacfcs:\n return [ \"fcs\" ]\n\n if mimeType == applicationvndadobeformscentralfcdt:\n return [ \"fcdt\" ]\n\n if mimeType == imagevndfastbidsheet:\n return [ \"fbs\" ]\n\n if mimeType == applicationxzipcompressedfb2:\n return [ \"fb2zip\" ]\n\n if mimeType == applicationxfictionbook_xml:\n return [ \"fb2\" ]\n\n if mimeType == textxfortran:\n return [ \"f\", \"f90\", \"f95\", \"for\", \"f77\" ]\n\n if mimeType == applicationvndezpixpackage:\n return [ \"ez3\" ]\n\n if mimeType == applicationvndezpixalbum:\n return [ \"ez2\" ]\n\n if mimeType == applicationandrewinset:\n return [ \"ez\" ]\n\n if mimeType == applicationvndnovadigmext:\n return [ \"ext\" ]\n\n if mimeType == imagexexr:\n return [ \"exr\" ]\n\n if mimeType == applicationexpress:\n return [ \"exp\" ]\n\n if mimeType == applicationexi:\n return [ \"exi\" ]\n\n if mimeType == applicationxmsdosexecutable:\n return [ \"exe\" ]\n\n if mimeType == textxelixir:\n return [ \"ex\", \"exs\" ]\n\n if mimeType == applicationxenvoy:\n return [ \"evy\" ]\n\n if mimeType == applicationxeva:\n return [ \"eva\" ]\n\n if mimeType == textxsetext:\n return [ \"etx\" ]\n\n if mimeType == applicationxetheme:\n return [ \"etheme\" ]\n\n if mimeType == applicationvndepsonesf:\n return [ \"esf\" ]\n\n if mimeType == applicationvndosgisubsystem:\n return [ \"esa\" ]\n\n if mimeType == applicationvndeszigno3_xml:\n return [ \"es3\", \"et3\" ]\n\n if mimeType == applicationecmascript:\n return [ \"es\", \"ecma\" ]\n\n if mimeType == textxerlang:\n return [ \"erl\" ]\n\n if mimeType == applicationepub_zip:\n return [ \"epub\" ]\n\n if mimeType == imagexgzeps:\n return [ \"epsgz\", \"epsigz\", \"epsfgz\" ]\n\n if mimeType == imagexbzeps:\n return [ \"epsbz2\", \"epsibz2\", \"epsfbz2\" ]\n\n if mimeType == imagexeps:\n return [ \"eps\", \"epsi\", \"epsf\" ]\n\n if mimeType == applicationvndmsfontobject:\n return [ \"eot\" ]\n\n if mimeType == audiovnddigitalwinds:\n return [ \"eol\" ]\n\n if mimeType == applicationxmlexternalparsedentity:\n return [ \"ent\" ]\n\n if mimeType == applicationxmsmetafile:\n return [ \"emz\" ]\n\n if mimeType == applicationvndemusicemusic_package:\n return [ \"emp\" ]\n\n if mimeType == applicationemotionml_xml:\n return [ \"emotionml\" ]\n\n if mimeType == applicationemma_xml:\n return [ \"emma\" ]\n\n if mimeType == messagerfc822:\n return [ \"eml\", \"mime\" ]\n\n if mimeType == imageemf:\n return [ \"emf\" ]\n\n if mimeType == textxemacslisp:\n return [ \"el\" ]\n\n if mimeType == applicationvndpgosasli:\n return [ \"ei6\" ]\n\n if mimeType == applicationxegon:\n return [ \"egon\" ]\n\n if mimeType == applicationvndpicsel:\n return [ \"efif\" ]\n\n if mimeType == applicationvndnovadigmedx:\n return [ \"edx\" ]\n\n if mimeType == applicationvndnovadigmedm:\n return [ \"edm\" ]\n\n if mimeType == audiovndnueraecelp9600:\n return [ \"ecelp9600\" ]\n\n if mimeType == audiovndnueraecelp7470:\n return [ \"ecelp7470\" ]\n\n if mimeType == audiovndnueraecelp4800:\n return [ \"ecelp4800\" ]\n\n if mimeType == textxeiffel:\n return [ \"e\", \"eif\" ]\n\n if mimeType == applicationvndspotfiredxp:\n return [ \"dxp\" ]\n\n if mimeType == imagevnddxf:\n return [ \"dxf\" ]\n\n if mimeType == imagevnddwg:\n return [ \"dwg\" ]\n\n if mimeType == modelvnddwf:\n return [ \"dwf\" ]\n\n if mimeType == applicationatscdwd_xml:\n return [ \"dwd\" ]\n\n if mimeType == applicationxgzdvi:\n return [ \"dvigz\" ]\n\n if mimeType == applicationxbzdvi:\n return [ \"dvibz2\" ]\n\n if mimeType == applicationxdvi:\n return [ \"dvi\" ]\n\n if mimeType == videovnddvbfile:\n return [ \"dvb\" ]\n\n if mimeType == videodv:\n return [ \"dv\" ]\n\n if mimeType == textxdevicetreesource:\n return [ \"dtsi\" ]\n\n if mimeType == audiovnddtshd:\n return [ \"dtshd\" ]\n\n if mimeType == audiovnddts:\n return [ \"dts\" ]\n\n if mimeType == applicationxmldtd:\n return [ \"dtd\" ]\n\n if mimeType == textxdevicetreebinary:\n return [ \"dtb\" ]\n\n if mimeType == applicationdssc_der:\n return [ \"dssc\" ]\n\n if mimeType == textxdsl:\n return [ \"dsl\" ]\n\n if mimeType == audioxdsf:\n return [ \"dsf\" ]\n\n if mimeType == textprslinestag:\n return [ \"dsc\" ]\n\n if mimeType == imagedicomrle:\n return [ \"drle\" ]\n\n if mimeType == audiovnddra:\n return [ \"dra\" ]\n\n if mimeType == applicationvnddpgraph:\n return [ \"dpg\" ]\n\n if mimeType == applicationvndosgidp:\n return [ \"dp\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentwordprocessingmltemplate:\n return [ \"dotx\" ]\n\n if mimeType == applicationvndmswordtemplatemacroenabled12:\n return [ \"dotm\" ]\n\n if mimeType == applicationmswordtemplate:\n return [ \"dot\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentwordprocessingmldocument:\n return [ \"docx\" ]\n\n if mimeType == applicationvndmsworddocumentmacroenabled12:\n return [ \"docm\" ]\n\n if mimeType == applicationmsword:\n return [ \"doc\" ]\n\n if mimeType == imagexadobedng:\n return [ \"dng\" ]\n\n if mimeType == applicationvnddna:\n return [ \"dna\" ]\n\n if mimeType == applicationxapplediskimage:\n return [ \"dmg\" ]\n\n if mimeType == imagevnddjvu:\n return [ \"djvu\", \"djv\" ]\n\n if mimeType == messagedispositionnotification:\n return [ \"disposition-notification\" ]\n\n if mimeType == applicationvndmobiusdis:\n return [ \"dis\" ]\n\n if mimeType == applicationxdirector:\n return [ \"dir\", \"dxr\", \"cst\", \"cct\", \"cxt\", \"w3d\", \"fgd\", \"swa\" ]\n\n if mimeType == textxpatch:\n return [ \"diff\", \"patch\" ]\n\n if mimeType == applicationdicom:\n return [ \"dicomdir\", \"dcm\" ]\n\n if mimeType == textxc:\n return [ \"dic\" ]\n\n if mimeType == applicationxdiadiagram:\n return [ \"dia\" ]\n\n if mimeType == applicationxdgccompressed:\n return [ \"dgc\" ]\n\n if mimeType == audioxdff:\n return [ \"dff\" ]\n\n if mimeType == applicationvnddreamfactory:\n return [ \"dfac\" ]\n\n if mimeType == applicationxdesktop:\n return [ \"desktop\", \"kdelnk\" ]\n\n if mimeType == applicationxx509cacert:\n return [ \"der\", \"crt\", \"cert\", \"pem\" ]\n\n if mimeType == applicationvnddebianbinarypackage:\n return [ \"deb\", \"udeb\" ]\n\n if mimeType == imagexdds:\n return [ \"dds\" ]\n\n if mimeType == applicationvndsyncmldmddf_xml:\n return [ \"ddf\" ]\n\n if mimeType == applicationvndfujixeroxddd:\n return [ \"ddd\" ]\n\n if mimeType == applicationvndomadd2_xml:\n return [ \"dd2\" ]\n\n if mimeType == textvndcurldcurl:\n return [ \"dcurl\" ]\n\n if mimeType == imagexkodakdcr:\n return [ \"dcr\" ]\n\n if mimeType == textxdcl:\n return [ \"dcl\" ]\n\n if mimeType == applicationxdocbook_xml:\n return [ \"dbk\", \"docbook\" ]\n\n if mimeType == applicationxdbf:\n return [ \"dbf\" ]\n\n if mimeType == applicationdavmount_xml:\n return [ \"davmount\" ]\n\n if mimeType == textxdart:\n return [ \"dart\" ]\n\n if mimeType == applicationxdar:\n return [ \"dar\" ]\n\n if mimeType == applicationvndmobiusdaf:\n return [ \"daf\" ]\n\n if mimeType == modelvndcollada_xml:\n return [ \"dae\" ]\n\n if mimeType == textxdsrc:\n return [ \"d\", \"di\" ]\n\n if mimeType == applicationprscww:\n return [ \"cww\" ]\n\n if mimeType == applicationcwl:\n return [ \"cwl\" ]\n\n if mimeType == applicationxappleworksdocument:\n return [ \"cwk\" ]\n\n if mimeType == textvndcurl:\n return [ \"curl\" ]\n\n if mimeType == imagexwinbitmap:\n return [ \"cur\" ]\n\n if mimeType == applicationxcue:\n return [ \"cue\" ]\n\n if mimeType == applicationcuseeme:\n return [ \"cu\" ]\n\n if mimeType == textcsvschema:\n return [ \"csvs\" ]\n\n if mimeType == textcsv:\n return [ \"csv\" ]\n\n if mimeType == textcss:\n return [ \"css\" ]\n\n if mimeType == applicationvndcommonspace:\n return [ \"csp\" ]\n\n if mimeType == applicationxcompressediso:\n return [ \"cso\" ]\n\n if mimeType == chemicalxcsml:\n return [ \"csml\" ]\n\n if mimeType == applicationvndcitationstylesstyle_xml:\n return [ \"csl\" ]\n\n if mimeType == applicationxcsh:\n return [ \"csh\" ]\n\n if mimeType == textxcsharp:\n return [ \"cs\" ]\n\n if mimeType == applicationvndrigcryptonote:\n return [ \"cryptonote\" ]\n\n if mimeType == applicationxchromeextension:\n return [ \"crx\" ]\n\n if mimeType == imagexcanoncrw:\n return [ \"crw\" ]\n\n if mimeType == applicationpkixcrl:\n return [ \"crl\" ]\n\n if mimeType == textxcredits:\n return [ \"credits\" ]\n\n if mimeType == applicationxmscardfile:\n return [ \"crd\" ]\n\n if mimeType == imagexcanoncr3:\n return [ \"cr3\" ]\n\n if mimeType == imagexcanoncr2:\n return [ \"cr2\" ]\n\n if mimeType == textxcrystal:\n return [ \"cr\" ]\n\n if mimeType == applicationmaccompactpro:\n return [ \"cpt\" ]\n\n if mimeType == textxc__src:\n return [ \"cpp\", \"cxx\", \"cc\", \"c\" ]\n\n if mimeType == applicationcpl_xml:\n return [ \"cpl\" ]\n\n if mimeType == applicationxcpiocompressed:\n return [ \"cpiogz\" ]\n\n if mimeType == applicationxcpio:\n return [ \"cpio\" ]\n\n if mimeType == applicationxcore:\n return [ \"core\" ]\n\n if mimeType == textxcopying:\n return [ \"copying\" ]\n\n if mimeType == applicationxmsdownload:\n return [ \"com\", \"bat\" ]\n\n if mimeType == applicationvndcoffeescript:\n return [ \"coffee\" ]\n\n if mimeType == applicationvndrimcod:\n return [ \"cod\" ]\n\n if mimeType == imagexcmx:\n return [ \"cmx\" ]\n\n if mimeType == applicationvndyellowrivercustommenu:\n return [ \"cmp\" ]\n\n if mimeType == chemicalxcml:\n return [ \"cml\" ]\n\n if mimeType == chemicalxcmdf:\n return [ \"cmdf\" ]\n\n if mimeType == applicationvndcosmocaller:\n return [ \"cmc\" ]\n\n if mimeType == textxcmake:\n return [ \"cmake\", \"cmakeliststxt\" ]\n\n if mimeType == applicationxmsclip:\n return [ \"clp\" ]\n\n if mimeType == applicationvndcrickclicker:\n return [ \"clkx\" ]\n\n if mimeType == applicationvndcrickclickerwordbank:\n return [ \"clkw\" ]\n\n if mimeType == applicationvndcrickclickertemplate:\n return [ \"clkt\" ]\n\n if mimeType == applicationvndcrickclickerpalette:\n return [ \"clkp\" ]\n\n if mimeType == applicationvndcrickclickerkeyboard:\n return [ \"clkk\" ]\n\n if mimeType == applicationxjava:\n return [ \"class\" ]\n\n if mimeType == applicationvndclaymore:\n return [ \"cla\" ]\n\n if mimeType == textxopenclsrc:\n return [ \"cl\" ]\n\n if mimeType == applicationnode:\n return [ \"cjs\" ]\n\n if mimeType == applicationvndmsartgalry:\n return [ \"cil\" ]\n\n if mimeType == applicationvndanserwebcertificateissueinitiation:\n return [ \"cii\" ]\n\n if mimeType == chemicalxcif:\n return [ \"cif\" ]\n\n if mimeType == applicationxkchart:\n return [ \"chrt\" ]\n\n if mimeType == applicationvndmshtmlhelp:\n return [ \"chm\" ]\n\n if mimeType == applicationxmamechd:\n return [ \"chd\" ]\n\n if mimeType == applicationxchat:\n return [ \"chat\" ]\n\n if mimeType == textxchangelog:\n return [ \"changelog\" ]\n\n if mimeType == imagecgm:\n return [ \"cgm\" ]\n\n if mimeType == applicationxcfscompressed:\n return [ \"cfs\" ]\n\n if mimeType == applicationpkixcert:\n return [ \"cer\" ]\n\n if mimeType == applicationvndcinderella:\n return [ \"cdy\" ]\n\n if mimeType == applicationvndchemdraw_xml:\n return [ \"cdxml\" ]\n\n if mimeType == chemicalxcdx:\n return [ \"cdx\" ]\n\n if mimeType == applicationvndcoreldraw:\n return [ \"cdr\" ]\n\n if mimeType == applicationcdmiqueue:\n return [ \"cdmiq\" ]\n\n if mimeType == applicationcdmiobject:\n return [ \"cdmio\" ]\n\n if mimeType == applicationcdmidomain:\n return [ \"cdmid\" ]\n\n if mimeType == applicationcdmicontainer:\n return [ \"cdmic\" ]\n\n if mimeType == applicationcdmicapability:\n return [ \"cdmia\" ]\n\n if mimeType == applicationvndmediastationcdkey:\n return [ \"cdkey\" ]\n\n if mimeType == applicationxdiscjugglercdimage:\n return [ \"cdi\" ]\n\n if mimeType == applicationcdfx_xml:\n return [ \"cdfx\" ]\n\n if mimeType == applicationxnetcdf:\n return [ \"cdf\", \"nc\" ]\n\n if mimeType == applicationvndcontactcmsg:\n return [ \"cdbcmsg\" ]\n\n if mimeType == applicationccxml_xml:\n return [ \"ccxml\" ]\n\n if mimeType == applicationxcocoa:\n return [ \"cco\" ]\n\n if mimeType == applicationxccmx:\n return [ \"ccmx\" ]\n\n if mimeType == applicationvndcomicbook_zip:\n return [ \"cbz\" ]\n\n if mimeType == applicationxcbt:\n return [ \"cbt\" ]\n\n if mimeType == applicationvndcomicbookrar:\n return [ \"cbr\" ]\n\n if mimeType == textxcobol:\n return [ \"cbl\", \"cob\" ]\n\n if mimeType == applicationxcbr:\n return [ \"cba\" ]\n\n if mimeType == applicationxcb7:\n return [ \"cb7\" ]\n\n if mimeType == applicationvndmspkiseccat:\n return [ \"cat\" ]\n\n if mimeType == applicationvndcurlcar:\n return [ \"car\" ]\n\n if mimeType == audioxcaf:\n return [ \"caf\" ]\n\n if mimeType == applicationvndmscabcompressed:\n return [ \"cab\" ]\n\n if mimeType == applicationvndclonkc4group:\n return [ \"c4g\", \"c4d\", \"c4f\", \"c4p\", \"c4u\" ]\n\n if mimeType == applicationvndcluetrustcartomobileconfigpkg:\n return [ \"c11amz\" ]\n\n if mimeType == applicationvndcluetrustcartomobileconfig:\n return [ \"c11amc\" ]\n\n if mimeType == applicationxbzip:\n return [ \"bz2\", \"bz\" ]\n\n if mimeType == imageprsbtif:\n return [ \"btif\", \"btf\" ]\n\n if mimeType == modelvndvalvesourcecompiledmap:\n return [ \"bsp\" ]\n\n if mimeType == applicationxbsdiff:\n return [ \"bsdiff\" ]\n\n if mimeType == applicationxbpspatch:\n return [ \"bps\" ]\n\n if mimeType == applicationxbzip2:\n return [ \"boz\" ]\n\n if mimeType == applicationvndpreviewsystemsbox:\n return [ \"box\" ]\n\n if mimeType == imagebmp:\n return [ \"bmp\", \"dib\" ]\n\n if mimeType == applicationvndbalsamiqbmml_xml:\n return [ \"bmml\" ]\n\n if mimeType == applicationvndbmi:\n return [ \"bmi\" ]\n\n if mimeType == applicationxblender:\n return [ \"blend\", \"blender\" ]\n\n if mimeType == applicationxblorb:\n return [ \"blb\", \"blorb\" ]\n\n if mimeType == applicationoctetstream:\n return [ \"bin\", \"dms\", \"lrf\", \"mar\", \"dist\", \"distz\", \"bpk\", \"dump\", \"elc\", \"deploy\", \"dll\", \"msp\", \"msm\", \"buffer\" ]\n\n if mimeType == videovndradgamettoolsbink:\n return [ \"bik\", \"bk2\" ]\n\n if mimeType == textxbibtex:\n return [ \"bib\" ]\n\n if mimeType == applicationvndfujitsuoasysprs:\n return [ \"bh2\" ]\n\n if mimeType == applicationvndrealvncbed:\n return [ \"bed\" ]\n\n if mimeType == applicationbdoc:\n return [ \"bdoc\" ]\n\n if mimeType == applicationxfontbdf:\n return [ \"bdf\" ]\n\n if mimeType == applicationxbcpio:\n return [ \"bcpio\" ]\n\n if mimeType == applicationxtrash:\n return [ \"bak\", \"old\", \"sik\" ]\n\n if mimeType == imagevndpcob16:\n return [ \"b16\" ]\n\n if mimeType == applicationvndamazonmobi8ebook:\n return [ \"azw3\", \"kfx\" ]\n\n if mimeType == applicationvndamazonebook:\n return [ \"azw\" ]\n\n if mimeType == imagevndairzipacceleratorazv:\n return [ \"azv\" ]\n\n if mimeType == applicationvndairzipfilesecureazs:\n return [ \"azs\" ]\n\n if mimeType == applicationvndairzipfilesecureazf:\n return [ \"azf\" ]\n\n if mimeType == videoannodex:\n return [ \"axv\" ]\n\n if mimeType == audioannodex:\n return [ \"axa\" ]\n\n if mimeType == applicationxawk:\n return [ \"awk\" ]\n\n if mimeType == audioamrwb:\n return [ \"awb\" ]\n\n if mimeType == applicationxapplixword:\n return [ \"aw\" ]\n\n if mimeType == imageavif:\n return [ \"avif\", \"avifs\" ]\n\n if mimeType == videoxmsvideo:\n return [ \"avi\", \"avf\", \"divx\" ]\n\n if mimeType == imageavcs:\n return [ \"avcs\" ]\n\n if mimeType == imageavci:\n return [ \"avci\" ]\n\n if mimeType == textxsystemdunit:\n return [ \"automount\", \"device\", \"mount\", \"path\", \"scope\", \"slice\", \"socket\", \"swap\", \"target\", \"timer\" ]\n\n if mimeType == textxauthors:\n return [ \"authors\" ]\n\n if mimeType == audiobasic:\n return [ \"au\", \"snd\" ]\n\n if mimeType == applicationvndantixgamecomponent:\n return [ \"atx\" ]\n\n if mimeType == applicationatomsvc_xml:\n return [ \"atomsvc\" ]\n\n if mimeType == applicationatomdeleted_xml:\n return [ \"atomdeleted\" ]\n\n if mimeType == applicationatomcat_xml:\n return [ \"atomcat\" ]\n\n if mimeType == applicationatom_xml:\n return [ \"atom\" ]\n\n if mimeType == applicationvndacucorp:\n return [ \"atc\", \"acutc\" ]\n\n if mimeType == audioxmsasx:\n return [ \"asx\", \"wax\", \"wvx\", \"wmx\" ]\n\n if mimeType == imageastc:\n return [ \"astc\" ]\n\n if mimeType == applicationxasp:\n return [ \"asp\" ]\n\n if mimeType == applicationvndaccpacsimplyaso:\n return [ \"aso\" ]\n\n if mimeType == applicationvndmsasf:\n return [ \"asf\" ]\n\n if mimeType == textxcommonlisp:\n return [ \"asd\", \"fasl\", \"lisp\", \"ros\" ]\n\n if mimeType == applicationxasar:\n return [ \"asar\" ]\n\n if mimeType == applicationxapplixspreadsheet:\n return [ \"as\" ]\n\n if mimeType == imagexsonyarw:\n return [ \"arw\" ]\n\n if mimeType == applicationxarj:\n return [ \"arj\" ]\n\n if mimeType == applicationxfreearc:\n return [ \"arc\" ]\n\n if mimeType == applicationvndlotusapproach:\n return [ \"apr\" ]\n\n if mimeType == applicationxmsapplication:\n return [ \"application\" ]\n\n if mimeType == applicationxiso9660appimage:\n return [ \"appimage\" ]\n\n if mimeType == imageapng:\n return [ \"apng\" ]\n\n if mimeType == applicationvndandroidpackagearchive:\n return [ \"apk\" ]\n\n if mimeType == audioxape:\n return [ \"ape\" ]\n\n if mimeType == applicationannodex:\n return [ \"anx\" ]\n\n if mimeType == videoxanim:\n return [ \"anim19j\" ]\n\n if mimeType == applicationxnavianimation:\n return [ \"ani\" ]\n\n if mimeType == audioxamzxml:\n return [ \"amz\" ]\n\n if mimeType == audioamr:\n return [ \"amr\" ]\n\n if mimeType == applicationvndamigaami:\n return [ \"ami\" ]\n\n if mimeType == applicationxalz:\n return [ \"alz\" ]\n\n if mimeType == applicationvnddvbait:\n return [ \"ait\" ]\n\n if mimeType == applicationvndadobeairapplicationinstallerpackage_zip:\n return [ \"air\" ]\n\n if mimeType == audioxaiff:\n return [ \"aiff\", \"aif\" ]\n\n if mimeType == audioxaifc:\n return [ \"aifc\", \"aiffc\" ]\n\n if mimeType == applicationillustrator:\n return [ \"ai\" ]\n\n if mimeType == applicationvndaheadspace:\n return [ \"ahead\" ]\n\n if mimeType == applicationvndage:\n return [ \"age\" ]\n\n if mimeType == imagexapplixgraphics:\n return [ \"ag\" ]\n\n if mimeType == applicationvndibmmodcap:\n return [ \"afp\", \"listafp\", \"list3820\" ]\n\n if mimeType == applicationxfontafm:\n return [ \"afm\" ]\n\n if mimeType == applicationvndaudiograph:\n return [ \"aep\" ]\n\n if mimeType == audioadpcm:\n return [ \"adp\" ]\n\n if mimeType == applicationxamigadiskformat:\n return [ \"adf\" ]\n\n if mimeType == textxadasrc:\n return [ \"adb\", \"ads\" ]\n\n if mimeType == applicationvndacucobol:\n return [ \"acu\" ]\n\n if mimeType == applicationxace:\n return [ \"ace\" ]\n\n if mimeType == applicationvndamericandynamicsacc:\n return [ \"acc\" ]\n\n if mimeType == audioac3:\n return [ \"ac3\" ]\n\n if mimeType == applicationpkixattrcert:\n return [ \"ac\" ]\n\n if mimeType == applicationxabiword:\n return [ \"abw\", \"abwcrashed\", \"abwgz\", \"zabw\" ]\n\n if mimeType == audiovndaudibleaax:\n return [ \"aax\" ]\n\n if mimeType == applicationxauthorwareseg:\n return [ \"aas\" ]\n\n if mimeType == applicationxauthorwaremap:\n return [ \"aam\" ]\n\n if mimeType == audioaac:\n return [ \"aac\", \"adts\" ]\n\n if mimeType == applicationxauthorwarebin:\n return [ \"aab\", \"x32\", \"u32\", \"vox\" ]\n\n if mimeType == audioxpnaudibleaudio:\n return [ \"aa\" ]\n\n if mimeType == applicationxatari7800rom:\n return [ \"a78\" ]\n\n if mimeType == applicationxatari2600rom:\n return [ \"a26\" ]\n\n if mimeType == applicationxarchive:\n return [ \"a\", \"ar\" ]\n\n if mimeType == applicationx7zcompressed:\n return [ \"7z\", \"7z001\" ]\n\n if mimeType == applicationxt602:\n return [ \"602\" ]\n\n if mimeType == model3mf:\n return [ \"3mf\" ]\n\n if mimeType == video3gpp:\n return [ \"3gp\", \"3gpp\", \"3ga\" ]\n\n if mimeType == video3gpp2:\n return [ \"3g2\", \"3gp2\", \"3gpp2\" ]\n\n if mimeType == applicationxnintendo3dsexecutable:\n return [ \"3dsx\" ]\n\n if mimeType == applicationxnintendo3dsrom:\n return [ \"3ds\", \"cci\" ]\n\n if mimeType == textvndin3d3dml:\n return [ \"3dml\" ]\n\n if mimeType == applicationxgenesis32xrom:\n return [ \"32x\", \"mdx\" ]\n\n if mimeType == applicationvnd1000mindsdecisionmodel_xml:\n return [ \"1km\" ]\n\n if mimeType == applicationvndlotus123:\n return [ \"123\", \"wk1\", \"wk3\", \"wk4\", \"wks\" ]\n \n return []",
"def _check_url_file_type(headers: Dict[str, str]) -> Optional[str]:\n content_type = headers.get(\"content-type\", \"\").lower()\n file_type = None\n\n for extension in SUPPORTED_MIME_TYPES.keys():\n for mime_type in SUPPORTED_MIME_TYPES.get(extension, []):\n if mime_type in content_type:\n file_type = extension\n break\n\n return file_type"
] | [
"0.7667273",
"0.763851",
"0.75550187",
"0.75023365",
"0.7006191",
"0.69769895",
"0.69073343",
"0.69062054",
"0.6781376",
"0.67640036",
"0.67325264",
"0.6723504",
"0.6719304",
"0.6687197",
"0.6671605",
"0.66456544",
"0.66422045",
"0.66077816",
"0.65924525",
"0.65606976",
"0.65226626",
"0.6508522",
"0.6476599",
"0.6474171",
"0.6473315",
"0.64422166",
"0.64230984",
"0.6411514",
"0.6403587",
"0.64011955"
] | 0.82419276 | 0 |
Test that the mimetypes are validate correctly | def test_mimetypes_magic(self, mock_get_content_type):
def get_content_type(value):
return value.content_type
mock_get_content_type.side_effect = get_content_type
field = TypedFileField(required=False, type_whitelist=self.good_types, use_magic=True)
for t in self.good_types:
name = 'somefooname'
file = UploadedFile(name=name, size=1, content_type=t)
assert field.clean(file) is file
for t in self.bad_types:
name = 'somefooname'
file = UploadedFile(name=name, size=1, content_type=t)
with pytest.raises(forms.ValidationError):
field.clean(file) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_mimetypes(self):\n field = TypedFileField(required=False, type_whitelist=self.good_types, use_magic=False)\n\n for t in self.good_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n assert field.clean(file) is file\n\n for t in self.bad_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def test_no_mimetype_magic(self, mock_get_content_type):\n mock_get_content_type.side_effect = ValueError\n\n field = TypedFileField(required=False, type_whitelist=self.good_types)\n\n for t in self.good_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def _check_mimetype(self):\n if self.mimetype in Config.aliases:\n mimetype = Config.aliases[self.mimetype]\n else:\n mimetype = self.mimetype\n expected_extensions = mimetypes.guess_all_extensions(mimetype,\n strict=False)\n if expected_extensions:\n if self.has_extension and self.extension not in expected_extensions:\n # LOG: improve this string\n self.make_dangerous('expected extensions')",
"def test_no_mimetype(self):\n field = TypedFileField(required=False, type_whitelist=self.good_types, use_magic=False)\n\n for t in self.good_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n del file.content_type\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def test_extensions(self):\n field = TypedFileField(required=False, ext_whitelist=self.good_extensions)\n\n for ext in self.good_extensions:\n name = 'somefooname.%s' % ext\n file = UploadedFile(name=name, size=1)\n assert field.clean(file) is file\n\n for ext in self.bad_extensions:\n name = 'somefooname.%s' % ext\n file = UploadedFile(name=name, size=1)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def test_extension_to_content_type(self):\n assert ct.extension_to_content_type(\"jpg\") == \"image/jpg\"\n assert ct.extension_to_content_type(\"jpeg\") == \"image/jpg\"\n assert ct.extension_to_content_type(\"png\") == \"image/png\"\n ct.extension_to_content_type(\"css\",) == \"text/css\"\n ct.extension_to_content_type(\"html\") == \"text/html\"\n ct.extension_to_content_type(\"json\") == \"application/json\"\n ct.extension_to_content_type(\"xml\") == \"application/xml\"\n ct.extension_to_content_type(\"zip\") == \"application/zip\"",
"def test_content_type_to_extension(self):\n assert ct.content_type_to_extension(\"image/jpg\") == \"jpg\"\n assert ct.content_type_to_extension(\"image/jpeg\") == \"jpg\"\n assert ct.content_type_to_extension(\"image/png\",) == \"png\"\n assert ct.content_type_to_extension(\"text/css\",) == \"css\"\n assert ct.content_type_to_extension(\"text/html\") == \"html\"\n assert ct.content_type_to_extension(\"text/css\") == \"css\"\n assert ct.content_type_to_extension(\"application/json\") == \"json\"\n assert ct.content_type_to_extension(\"application/xml\") == \"xml\"\n assert ct.content_type_to_extension(\"application/zip\") == \"zip\"",
"def getMimeTypes(self): #$NON-NLS-1$\r",
"def test_general_subset_file_type():\n pass",
"def test_both(self):\n field = TypedFileField(required=False,\n ext_whitelist=self.good_extensions,\n type_whitelist=self.good_types,\n use_magic=False)\n\n for ext in self.good_extensions:\n name = 'somefooname.%s' % ext\n\n for t in self.good_types:\n file = UploadedFile(name=name, size=1, content_type=t)\n assert field.clean(file) is file\n\n for t in self.bad_types:\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)\n\n for ext in self.bad_extensions:\n name = 'somefooname.%s' % ext\n\n for t in self.good_types:\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)\n\n for t in self.bad_types:\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def test_invalid_filetype(self):\n rv = self.post('/queue/',\n content={'image': (StringIO('This is not an image'),\n 'text.txt')},\n token=self.user_token)\n self.assertJSONError(rv, 'TagalleryInvalidFileExtension')\n return",
"def test_available_input_formats():\n assert set([\"Mapchete\", \"raster_file\", \"vector_file\"]).issubset(\n set(available_input_formats())\n )",
"def check_eligible_mimetype(self, ctype, uid):\n self.helper.log_debug(\n 'check_eligible_mimtype: checking content-type %s of msg uid %s' %\n (ctype, uid))\n if ctype == \"application/zip\":\n return True\n elif ctype == \"application/gzip\":\n return True\n elif ctype == \"application/x-gzip\":\n return True\n elif ctype == \"application/octet-stream\":\n # Non-standard mimetype used by Amazon SES dmarc reports\n return True\n elif ctype == \"application-x-gzip\":\n # Non-standard mimetype used by Comcast dmarc reports\n return True\n elif ctype == \"application/x-zip-compressed\":\n # Non-standard mimetype used by Yahoo dmarc reports\n return True\n elif ctype == \"application/xml\":\n return True\n elif ctype == \"text/xml\":\n return True\n else:\n self.helper.log_debug(\n 'check_eligible_mimtype: skipping content-type %s of msg uid %s' %\n (ctype, uid))\n return False",
"def secure_filetype(file):\n ext_list = ['png', 'jpg', 'jpeg']\n ext_valid = file.filename.split('.')[-1] in ext_list\n\n mimetype_list = [\"image/jpeg\", \"image/jpg\", \"image/png\"]\n mimetype_valid = file.mimetype in mimetype_list\n\n return ext_valid and mimetype_valid",
"def test_allowed_file(self):\r\n u = Uploader()\r\n for ext in u.allowed_extensions:\r\n # Change extension to uppercase to check that it works too\r\n filename = 'test.%s' % ext.upper()\r\n err_msg = (\"This file: %s should be allowed, but it failed\"\r\n % filename)\r\n assert u.allowed_file(filename) is True, err_msg\r\n\r\n err_msg = \"Non allowed extensions should return false\"\r\n assert u.allowed_file('wrong.pdf') is False, err_msg",
"def test_invalid_file_type(barred_tac_list_importer):\n expect_failure(barred_tac_list_importer, exc_message='Wrong suffix')",
"def _check_extension(self):\n if self.extension in Config.override_ext:\n expected_mimetype = Config.override_ext[self.extension]\n else:\n expected_mimetype, encoding = mimetypes.guess_type(self.src_path,\n strict=False)\n if expected_mimetype in Config.aliases:\n expected_mimetype = Config.aliases[expected_mimetype]\n is_known_extension = self.extension in mimetypes.types_map.keys()\n if is_known_extension and expected_mimetype != self.mimetype:\n # LOG: improve this string\n self.make_dangerous('expected_mimetype')",
"def test_mime_lookup(self):\n mime_out_test_path = os.path.join(THIS_DIR, 'file-blobs.out')\n mime_lookup = MimeLookup(mime_out_test_path)\n self.assertEqual(mime_lookup.get_entry_count(), 5)\n self.assertEqual(mime_lookup.get_mime_string('4b11cb448cab68470c546bc52220b01fbc4572f7'),\n 'image/png; charset=binary')\n self.assertEqual(mime_lookup.get_mime_string('f8fa2aa81a623f9847436c5162d4e775e04cd948'),\n 'text/plain; charset=us-ascii')\n self.assertEqual(mime_lookup.get_mime_string('9f422292259b59ee6c9ad7a25180b0afc16f47e9'),\n LONG_MIME)\n self.assertEqual(mime_lookup.get_mime_string('d1717e616fdae20110acb51b3ba3a37350628131'),\n 'application/pdf; charset=binary')\n self.assertEqual(mime_lookup.get_mime_string('a7510ac5483396687bf670860f48d21eecede68a'),\n 'application/zip; charset=binary')",
"def test_fetch_or_create_requires_file_type():\n pytest.raises(ValueError, media.fetch_or_create_media_item, b'spam')",
"def check_file_type(fname):\n ext = path.splitext(fname)[1]\n return ext in allowed_extensions",
"def validate_image_type(filename: str) -> bool:\n supported_extensions = (\"png\", \"jpg\", \"jpeg\")\n return (filename not in (None, \"\")) and (get_extension(filename) in supported_extensions)",
"def valid_media_type(media_type):\n return media_type in ACCEPTED_MEDIA_TYPES",
"def match_mime_type(self, src: str):\n for key in self.keys():\n if Pattern.test(key, src):\n return self[key]\n return \"text/plain\"",
"def test_11_is_allowed_file_correct_ext(self):\n\n for ext in list(ALLOWED_EXTENSIONS):\n filename = f\"somename.{ext}\"\n is_allowed = utils.is_allowed_file(filename)\n self.assertTrue(is_allowed)",
"def test_get_file_type(self):\n file_list = {'events': 'monol_testA_nustar_fpma_ev',\n 'lc': 'monol_testA_E3-50_lc',\n 'pds': 'monol_testA_E3-50_pds',\n 'gti': 'monol_testA_E3-50_rebin4_gti',\n 'cpds': 'monol_test_E3-50_cpds'}\n for realtype in file_list.keys():\n fname = os.path.join(self.datadir,\n file_list[realtype] + HEN_FILE_EXTENSION)\n ftype, _ = hen.io.get_file_type(fname)\n assert ftype == realtype, \"File types do not match\"",
"def is_accept_type(file_name):\n bare_name, file_extension = os.path.splitext(file_name)\n for ext in ACCEPTED_FILES:\n if file_extension.lower() == ext:\n return True\n return False",
"def allowed_file_type(file_name):\n\treturn file_name.lower().endswith(ALLOWED_FILE_TYPES)",
"def validFiles(self, files):\n for myfile in files:\n if not ( ( myfile.get_uri_scheme() == 'file' ) or \\\n ( myfile.get_uri_scheme() == 'smb' ) ):\n return False\n elif ( not myfile.get_mime_type() in self.oootypes ) and \\\n ( not myfile.get_mime_type() in self.plaintypes ):\n return False\n return True",
"def getMimeTypeFileExtensions(mimeType):\n #getMimeTypeFileExtensions body\n\n if mimeType == applicationzlib:\n return [ \"zz\" ]\n\n if mimeType == applicationzstd:\n return [ \"zst\" ]\n\n if mimeType == applicationxzoo:\n return [ \"zoo\" ]\n\n if mimeType == applicationvndhandheldentertainment_xml:\n return [ \"zmm\" ]\n\n if mimeType == applicationvndzul:\n return [ \"zir\", \"zirz\" ]\n\n if mimeType == applicationzip:\n return [ \"zip\", \"zipx\" ]\n\n if mimeType == applicationxopenzim:\n return [ \"zim\" ]\n\n if mimeType == applicationvndzzazzdeck_xml:\n return [ \"zaz\" ]\n\n if mimeType == applicationxzmachine:\n return [ \"z1\", \"z2\", \"z3\", \"z4\", \"z5\", \"z6\", \"z7\", \"z8\" ]\n\n if mimeType == applicationxcompress:\n return [ \"z\" ]\n\n if mimeType == videovndyoutubeyt:\n return [ \"yt\" ]\n\n if mimeType == textxsuseymp:\n return [ \"ymp\" ]\n\n if mimeType == applicationyin_xml:\n return [ \"yin\" ]\n\n if mimeType == applicationyang:\n return [ \"yang\" ]\n\n if mimeType == applicationxyaml:\n return [ \"yaml\", \"yml\" ]\n\n if mimeType == applicationxxz:\n return [ \"xz\" ]\n\n if mimeType == chemicalxxyz:\n return [ \"xyz\" ]\n\n if mimeType == imagexxwindowdump:\n return [ \"xwd\" ]\n\n if mimeType == applicationvndmozillaxul_xml:\n return [ \"xul\" ]\n\n if mimeType == applicationxspf_xml:\n return [ \"xspf\" ]\n\n if mimeType == applicationvndsyncml_xml:\n return [ \"xsm\" ]\n\n if mimeType == applicationxslt_xml:\n return [ \"xsl\", \"xslt\" ]\n\n if mimeType == applicationprsxsf_xml:\n return [ \"xsf\" ]\n\n if mimeType == applicationvndinterconformnet:\n return [ \"xpw\", \"xpx\" ]\n\n if mimeType == applicationvndmsxpsdocument:\n return [ \"xps\" ]\n\n if mimeType == applicationvndisxpr:\n return [ \"xpr\" ]\n\n if mimeType == imagexxpixmap:\n return [ \"xpm\" ]\n\n if mimeType == applicationxproc_xml:\n return [ \"xpl\" ]\n\n if mimeType == applicationxxpinstall:\n return [ \"xpi\" ]\n\n if mimeType == applicationxop_xml:\n return [ \"xop\" ]\n\n if mimeType == applicationvndolpcsugar:\n return [ \"xo\" ]\n\n if mimeType == applicationxcapns_xml:\n return [ \"xns\" ]\n\n if mimeType == applicationxml:\n return [ \"xml\", \"xbl\", \"xsd\", \"rng\" ]\n\n if mimeType == textxxmi:\n return [ \"xmi\" ]\n\n if mimeType == audioxxmf:\n return [ \"xmf\" ]\n\n if mimeType == audioxxm:\n return [ \"xm\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentspreadsheetmltemplate:\n return [ \"xltx\" ]\n\n if mimeType == applicationvndmsexceltemplatemacroenabled12:\n return [ \"xltm\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentspreadsheetmlsheet:\n return [ \"xlsx\" ]\n\n if mimeType == applicationvndmsexcelsheetmacroenabled12:\n return [ \"xlsm\" ]\n\n if mimeType == applicationvndmsexcelsheetbinarymacroenabled12:\n return [ \"xlsb\" ]\n\n if mimeType == applicationvndmsexcel:\n return [ \"xls\", \"xlc\", \"xll\", \"xlm\", \"xlw\", \"xla\", \"xlt\", \"xld\" ]\n\n if mimeType == applicationxliff_xml:\n return [ \"xlf\", \"xliff\" ]\n\n if mimeType == applicationvndmsexceladdinmacroenabled12:\n return [ \"xlam\" ]\n\n if mimeType == imagevndxiff:\n return [ \"xif\" ]\n\n if mimeType == audioxxi:\n return [ \"xi\" ]\n\n if mimeType == applicationxhtml_xml:\n return [ \"xhtml\", \"xht\", \"html\", \"htm\" ]\n\n if mimeType == applicationvndpwgxhtmlprint_xml:\n return [ \"xhtm\" ]\n\n if mimeType == applicationvndxfdl:\n return [ \"xfdl\" ]\n\n if mimeType == applicationvndadobexfdf:\n return [ \"xfdf\" ]\n\n if mimeType == applicationpatchopserror_xml:\n return [ \"xer\" ]\n\n if mimeType == applicationxenc_xml:\n return [ \"xenc\" ]\n\n if mimeType == applicationxcapel_xml:\n return [ \"xel\" ]\n\n if mimeType == applicationvndfujixeroxdocuworks:\n return [ \"xdw\" ]\n\n if mimeType == applicationdssc_xml:\n return [ \"xdssc\" ]\n\n if mimeType == applicationvndadobexdp_xml:\n return [ \"xdp\" ]\n\n if mimeType == applicationvndsyncmldm_xml:\n return [ \"xdm\" ]\n\n if mimeType == applicationxcapdiff_xml:\n return [ \"xdf\" ]\n\n if mimeType == applicationcalendar_xml:\n return [ \"xcs\" ]\n\n if mimeType == imagexcompressedxcf:\n return [ \"xcfgz\", \"xcfbz2\" ]\n\n if mimeType == imagexxcf:\n return [ \"xcf\" ]\n\n if mimeType == applicationxcapcaps_xml:\n return [ \"xca\" ]\n\n if mimeType == imagexxbitmap:\n return [ \"xbm\" ]\n\n if mimeType == applicationxxbel:\n return [ \"xbel\" ]\n\n if mimeType == applicationvndfujixeroxdocuworksbinder:\n return [ \"xbd\" ]\n\n if mimeType == applicationxmsxbap:\n return [ \"xbap\" ]\n\n if mimeType == applicationxcapatt_xml:\n return [ \"xav\" ]\n\n if mimeType == applicationxxar:\n return [ \"xar\", \"pkg\" ]\n\n if mimeType == applicationxsilverlightapp:\n return [ \"xap\" ]\n\n if mimeType == applicationxaml_xml:\n return [ \"xaml\" ]\n\n if mimeType == imagexsigmax3f:\n return [ \"x3f\" ]\n\n if mimeType == modelx3d_vrml:\n return [ \"x3dv\", \"x3dvz\" ]\n\n if mimeType == modelx3d_binary:\n return [ \"x3db\", \"x3dbz\" ]\n\n if mimeType == modelx3d_xml:\n return [ \"x3d\", \"x3dz\" ]\n\n if mimeType == modelvndparasolidtransmittext:\n return [ \"x_t\" ]\n\n if mimeType == modelvndparasolidtransmitbinary:\n return [ \"x_b\" ]\n\n if mimeType == applicationxwwf:\n return [ \"wwf\" ]\n\n if mimeType == audioxwavpackcorrection:\n return [ \"wvc\" ]\n\n if mimeType == audioxwavpack:\n return [ \"wv\", \"wvp\" ]\n\n if mimeType == applicationvndwebturbo:\n return [ \"wtb\" ]\n\n if mimeType == applicationwspolicy_xml:\n return [ \"wspolicy\" ]\n\n if mimeType == applicationwsdl_xml:\n return [ \"wsdl\" ]\n\n if mimeType == applicationxwonderswancolorrom:\n return [ \"wsc\" ]\n\n if mimeType == applicationxwonderswanrom:\n return [ \"ws\" ]\n\n if mimeType == applicationxmswrite:\n return [ \"wri\" ]\n\n if mimeType == applicationvndwqd:\n return [ \"wqd\" ]\n\n if mimeType == applicationvndmswpl:\n return [ \"wpl\" ]\n\n if mimeType == applicationxwpg:\n return [ \"wpg\" ]\n\n if mimeType == applicationvndwordperfect:\n return [ \"wp\", \"wp4\", \"wp5\", \"wp6\", \"wpd\", \"wpp\" ]\n\n if mimeType == fontwoff2:\n return [ \"woff2\" ]\n\n if mimeType == fontwoff:\n return [ \"woff\" ]\n\n if mimeType == applicationxmswmz:\n return [ \"wmz\" ]\n\n if mimeType == videoxmswmv:\n return [ \"wmv\" ]\n\n if mimeType == applicationvndwapwmlscriptc:\n return [ \"wmlsc\" ]\n\n if mimeType == textvndwapwmlscript:\n return [ \"wmls\" ]\n\n if mimeType == applicationvndwapwmlc:\n return [ \"wmlc\" ]\n\n if mimeType == textvndwapwml:\n return [ \"wml\" ]\n\n if mimeType == imagewmf:\n return [ \"wmf\" ]\n\n if mimeType == applicationxmswmd:\n return [ \"wmd\" ]\n\n if mimeType == audioxmswma:\n return [ \"wma\" ]\n\n if mimeType == videoxmswm:\n return [ \"wm\" ]\n\n if mimeType == applicationxpartialdownload:\n return [ \"wkdownload\", \"crdownload\", \"part\" ]\n\n if mimeType == applicationxmswim:\n return [ \"wim\", \"swm\" ]\n\n if mimeType == applicationwatcherinfo_xml:\n return [ \"wif\" ]\n\n if mimeType == applicationwidget:\n return [ \"wgt\" ]\n\n if mimeType == applicationvndpmiwidget:\n return [ \"wg\" ]\n\n if mimeType == imagewebp:\n return [ \"webp\" ]\n\n if mimeType == applicationmanifest_json:\n return [ \"webmanifest\" ]\n\n if mimeType == videowebm:\n return [ \"webm\" ]\n\n if mimeType == applicationxwebappmanifest_json:\n return [ \"webapp\" ]\n\n if mimeType == audiowebm:\n return [ \"weba\" ]\n\n if mimeType == imagevndmsphoto:\n return [ \"wdp\" ]\n\n if mimeType == applicationvndmsworks:\n return [ \"wcm\", \"wdb\", \"wps\", \"xlr\" ]\n\n if mimeType == applicationvndwapwbxml:\n return [ \"wbxml\" ]\n\n if mimeType == applicationvndcriticaltoolswbs_xml:\n return [ \"wbs\" ]\n\n if mimeType == imagevndwapwbmp:\n return [ \"wbmp\" ]\n\n if mimeType == applicationxquattropro:\n return [ \"wb1\", \"wb2\", \"wb3\" ]\n\n if mimeType == audioxwav:\n return [ \"wav\" ]\n\n if mimeType == applicationwasm:\n return [ \"wasm\" ]\n\n if mimeType == applicationjavaarchive:\n return [ \"war\", \"ear\" ]\n\n if mimeType == applicationvndsunwadl_xml:\n return [ \"wadl\" ]\n\n if mimeType == applicationxwiiwad:\n return [ \"wad\" ]\n\n if mimeType == applicationvoicexml_xml:\n return [ \"vxml\" ]\n\n if mimeType == modelvndvtu:\n return [ \"vtu\" ]\n\n if mimeType == textvtt:\n return [ \"vtt\" ]\n\n if mimeType == imagevndvalvesourcetexture:\n return [ \"vtf\" ]\n\n if mimeType == applicationvndmsvisiotemplatemain_xml:\n return [ \"vstx\" ]\n\n if mimeType == applicationvndmsvisiotemplatemacroenabledmain_xml:\n return [ \"vstm\" ]\n\n if mimeType == applicationvndmsvisiostencilmain_xml:\n return [ \"vssx\" ]\n\n if mimeType == applicationvndmsvisiostencilmacroenabledmain_xml:\n return [ \"vssm\" ]\n\n if mimeType == applicationvndvsf:\n return [ \"vsf\" ]\n\n if mimeType == applicationvndmsvisiodrawingmain_xml:\n return [ \"vsdx\" ]\n\n if mimeType == applicationvndmsvisiodrawingmacroenabledmain_xml:\n return [ \"vsdm\" ]\n\n if mimeType == applicationvndvisio:\n return [ \"vsd\", \"vst\", \"vsw\", \"vss\" ]\n\n if mimeType == modelvrml:\n return [ \"vrm\", \"vrml\", \"wrl\" ]\n\n if mimeType == applicationxvhddisk:\n return [ \"vpc\" ]\n\n if mimeType == audioxvoc:\n return [ \"voc\" ]\n\n if mimeType == applicationxvmdkdisk:\n return [ \"vmdk\" ]\n\n if mimeType == videovndvivo:\n return [ \"viv\", \"vivo\" ]\n\n if mimeType == applicationvndvisionary:\n return [ \"vis\" ]\n\n if mimeType == applicationxvhdxdisk:\n return [ \"vhdx\" ]\n\n if mimeType == textxvhdl:\n return [ \"vhd\", \"vhdl\" ]\n\n if mimeType == modelvndsapvds:\n return [ \"vds\" ]\n\n if mimeType == applicationxvdidisk:\n return [ \"vdi\" ]\n\n if mimeType == applicationvndvcx:\n return [ \"vcx\" ]\n\n if mimeType == textcalendar:\n return [ \"vcs\", \"ics\", \"ifb\" ]\n\n if mimeType == applicationvndgroovevcard:\n return [ \"vcg\" ]\n\n if mimeType == applicationxcdlink:\n return [ \"vcd\" ]\n\n if mimeType == textvcard:\n return [ \"vcard\", \"vcf\", \"vct\", \"gcrd\" ]\n\n if mimeType == textvbscript:\n return [ \"vbs\" ]\n\n if mimeType == applicationxvirtualboxvboxextpack:\n return [ \"vbox-extpack\" ]\n\n if mimeType == applicationxvirtualboxvbox:\n return [ \"vbox\" ]\n\n if mimeType == applicationxvirtualboyrom:\n return [ \"vb\" ]\n\n if mimeType == textxvala:\n return [ \"vala\", \"vapi\" ]\n\n if mimeType == textxverilog:\n return [ \"v\" ]\n\n if mimeType == applicationvnddecezip:\n return [ \"uvz\", \"uvvz\" ]\n\n if mimeType == applicationvnddeceunspecified:\n return [ \"uvx\", \"uvvx\" ]\n\n if mimeType == videovnddecevideo:\n return [ \"uvv\", \"uvvv\" ]\n\n if mimeType == videovnduvvump4:\n return [ \"uvu\", \"uvvu\" ]\n\n if mimeType == applicationvnddecettml_xml:\n return [ \"uvt\", \"uvvt\" ]\n\n if mimeType == videovnddecesd:\n return [ \"uvs\", \"uvvs\" ]\n\n if mimeType == videovnddecepd:\n return [ \"uvp\", \"uvvp\" ]\n\n if mimeType == videovnddecemobile:\n return [ \"uvm\", \"uvvm\" ]\n\n if mimeType == imagevnddecegraphic:\n return [ \"uvi\", \"uvvi\", \"uvg\", \"uvvg\" ]\n\n if mimeType == videovnddecehd:\n return [ \"uvh\", \"uvvh\" ]\n\n if mimeType == applicationvnddecedata:\n return [ \"uvf\", \"uvvf\", \"uvd\", \"uvvd\" ]\n\n if mimeType == audiovnddeceaudio:\n return [ \"uva\", \"uvva\" ]\n\n if mimeType == textxuuencode:\n return [ \"uue\", \"uu\" ]\n\n if mimeType == applicationvnduiqtheme:\n return [ \"utz\" ]\n\n if mimeType == applicationxustar:\n return [ \"ustar\" ]\n\n if mimeType == modelvndusdz_zip:\n return [ \"usdz\" ]\n\n if mimeType == applicationxmswinurl:\n return [ \"url\" ]\n\n if mimeType == texturilist:\n return [ \"uri\", \"uris\", \"urls\" ]\n\n if mimeType == applicationvnduoml_xml:\n return [ \"uoml\", \"uo\" ]\n\n if mimeType == applicationvndunity:\n return [ \"unityweb\" ]\n\n if mimeType == applicationvndumajin:\n return [ \"umj\" ]\n\n if mimeType == applicationxglulx:\n return [ \"ulx\" ]\n\n if mimeType == audioxmod:\n return [ \"ult\", \"uni\", \"m15\", \"mtm\", \"669\", \"med\" ]\n\n if mimeType == textxuil:\n return [ \"uil\" ]\n\n if mimeType == applicationxdesigner:\n return [ \"ui\" ]\n\n if mimeType == applicationxufraw:\n return [ \"ufraw\" ]\n\n if mimeType == applicationvndufdl:\n return [ \"ufd\", \"ufdl\" ]\n\n if mimeType == applicationubjson:\n return [ \"ubj\" ]\n\n if mimeType == messageglobal:\n return [ \"u8msg\" ]\n\n if mimeType == messageglobaldispositionnotification:\n return [ \"u8mdn\" ]\n\n if mimeType == messageglobalheaders:\n return [ \"u8hdr\" ]\n\n if mimeType == messageglobaldeliverystatus:\n return [ \"u8dsn\" ]\n\n if mimeType == modelu3d:\n return [ \"u3d\" ]\n\n if mimeType == textplain:\n return [ \"txt\", \"text\", \"conf\", \"def\", \"list\", \"in\", \"ini\" ]\n\n if mimeType == applicationvndmobiustxf:\n return [ \"txf\" ]\n\n if mimeType == applicationvndgenomatixtuxedo:\n return [ \"txd\" ]\n\n if mimeType == textxtwig:\n return [ \"twig\" ]\n\n if mimeType == applicationvndsimtechmindmapper:\n return [ \"twd\", \"twds\" ]\n\n if mimeType == applicationxfontttx:\n return [ \"ttx\" ]\n\n if mimeType == applicationttml_xml:\n return [ \"ttml\" ]\n\n if mimeType == textturtle:\n return [ \"ttl\" ]\n\n if mimeType == fontttf:\n return [ \"ttf\" ]\n\n if mimeType == fontcollection:\n return [ \"ttc\" ]\n\n if mimeType == audioxtta:\n return [ \"tta\" ]\n\n if mimeType == texttabseparatedvalues:\n return [ \"tsv\" ]\n\n if mimeType == applicationtimestampeddata:\n return [ \"tsd\" ]\n\n if mimeType == textvndtrolltechlinguist:\n return [ \"ts\" ]\n\n if mimeType == applicationxmsterminal:\n return [ \"trm\" ]\n\n if mimeType == applicationtrig:\n return [ \"trig\" ]\n\n if mimeType == applicationvndtrueapp:\n return [ \"tra\" ]\n\n if mimeType == texttroff:\n return [ \"tr\", \"roff\" ]\n\n if mimeType == applicationvndtridtpt:\n return [ \"tpt\" ]\n\n if mimeType == applicationvndgroovetooltemplate:\n return [ \"tpl\" ]\n\n if mimeType == applicationxbittorrent:\n return [ \"torrent\" ]\n\n if mimeType == applicationtoml:\n return [ \"toml\" ]\n\n if mimeType == applicationxcdrdaotoc:\n return [ \"toc\" ]\n\n if mimeType == applicationvndmstnef:\n return [ \"tnef\", \"tnf\", \"winmaildat\" ]\n\n if mimeType == applicationvndtmobilelivetv:\n return [ \"tmo\" ]\n\n if mimeType == imagetiff:\n return [ \"tif\", \"tiff\" ]\n\n if mimeType == applicationvndmsofficetheme:\n return [ \"thmx\" ]\n\n if mimeType == applicationxwindowsthemepack:\n return [ \"themepack\" ]\n\n if mimeType == applicationxtheme:\n return [ \"theme\" ]\n\n if mimeType == imagextga:\n return [ \"tga\", \"icb\", \"tpic\", \"vda\" ]\n\n if mimeType == imagetifffx:\n return [ \"tfx\" ]\n\n if mimeType == applicationxtextfm:\n return [ \"tfm\" ]\n\n if mimeType == applicationthraud_xml:\n return [ \"tfi\" ]\n\n if mimeType == textxtexinfo:\n return [ \"texi\", \"texinfo\" ]\n\n if mimeType == textxtex:\n return [ \"tex\", \"ltx\", \"sty\", \"cls\", \"dtx\", \"ins\", \"latex\" ]\n\n if mimeType == applicationtei_xml:\n return [ \"tei\", \"teicorpus\" ]\n\n if mimeType == applicationvndsmartteacher:\n return [ \"teacher\" ]\n\n if mimeType == applicationurctargetdesc_xml:\n return [ \"td\" ]\n\n if mimeType == texttcl:\n return [ \"tcl\", \"tk\" ]\n\n if mimeType == applicationvnd3gpp2tcap:\n return [ \"tcap\" ]\n\n if mimeType == applicationxzstdcompressedtar:\n return [ \"tarzst\", \"tzst\" ]\n\n if mimeType == applicationxtarz:\n return [ \"tarz\", \"taz\" ]\n\n if mimeType == applicationxxzcompressedtar:\n return [ \"tarxz\", \"txz\" ]\n\n if mimeType == applicationxtzo:\n return [ \"tarlzo\", \"tzo\" ]\n\n if mimeType == applicationxlzmacompressedtar:\n return [ \"tarlzma\", \"tlz\" ]\n\n if mimeType == applicationxlz4compressedtar:\n return [ \"tarlz4\" ]\n\n if mimeType == applicationxlzipcompressedtar:\n return [ \"tarlz\" ]\n\n if mimeType == applicationxlrzipcompressedtar:\n return [ \"tarlrz\", \"tlrz\" ]\n\n if mimeType == applicationxcompressedtar:\n return [ \"targz\", \"tgz\" ]\n\n if mimeType == applicationxbzipcompressedtar:\n return [ \"tarbz2\", \"tarbz\", \"tbz2\", \"tbz\", \"tb2\" ]\n\n if mimeType == applicationxtar:\n return [ \"tar\", \"gtar\", \"gem\" ]\n\n if mimeType == imagevndtencenttap:\n return [ \"tap\" ]\n\n if mimeType == applicationvndtaointentmodulearchive:\n return [ \"tao\" ]\n\n if mimeType == audioxtak:\n return [ \"tak\" ]\n\n if mimeType == applicationvndmynfc:\n return [ \"taglet\" ]\n\n if mimeType == imaget38:\n return [ \"t38\" ]\n\n if mimeType == applicationxt3vmimage:\n return [ \"t3\" ]\n\n if mimeType == textxtxt2tags:\n return [ \"t2t\" ]\n\n if mimeType == textspreadsheet:\n return [ \"sylk\", \"slk\" ]\n\n if mimeType == applicationvndsunxmlwriter:\n return [ \"sxw\" ]\n\n if mimeType == applicationvndsunxmlmath:\n return [ \"sxm\" ]\n\n if mimeType == applicationvndsunxmlimpress:\n return [ \"sxi\" ]\n\n if mimeType == applicationvndsunxmlwriterglobal:\n return [ \"sxg\" ]\n\n if mimeType == applicationvndsunxmldraw:\n return [ \"sxd\" ]\n\n if mimeType == applicationvndsunxmlcalc:\n return [ \"sxc\" ]\n\n if mimeType == applicationswid_xml:\n return [ \"swidtag\" ]\n\n if mimeType == applicationvndaristanetworksswi:\n return [ \"swi\" ]\n\n if mimeType == applicationvndadobeflashmovie:\n return [ \"swf\", \"spl\" ]\n\n if mimeType == textxsvhdr:\n return [ \"svh\" ]\n\n if mimeType == imagesvg_xmlcompressed:\n return [ \"svgz\", \"svggz\" ]\n\n if mimeType == imagesvg_xml:\n return [ \"svg\" ]\n\n if mimeType == applicationvndsvd:\n return [ \"svd\" ]\n\n if mimeType == applicationvnddvbservice:\n return [ \"svc\" ]\n\n if mimeType == applicationxsv4crc:\n return [ \"sv4crc\" ]\n\n if mimeType == applicationxsv4cpio:\n return [ \"sv4cpio\" ]\n\n if mimeType == textxsvsrc:\n return [ \"sv\" ]\n\n if mimeType == applicationvndsuscalendar:\n return [ \"sus\", \"susp\" ]\n\n if mimeType == imagexsunraster:\n return [ \"sun\" ]\n\n if mimeType == textxmicrodvd:\n return [ \"sub\" ]\n\n if mimeType == textstylus:\n return [ \"stylus\", \"styl\" ]\n\n if mimeType == applicationvndsunxmlwritertemplate:\n return [ \"stw\" ]\n\n if mimeType == applicationvndpgformat:\n return [ \"str\" ]\n\n if mimeType == modelstep_zip:\n return [ \"stpz\" ]\n\n if mimeType == modelstepxml_zip:\n return [ \"stpxz\" ]\n\n if mimeType == modelstep_xml:\n return [ \"stpx\" ]\n\n if mimeType == audioxstm:\n return [ \"stm\" ]\n\n if mimeType == modelstl:\n return [ \"stl\" ]\n\n if mimeType == applicationhyperstudio:\n return [ \"stk\" ]\n\n if mimeType == applicationvndsunxmlimpresstemplate:\n return [ \"sti\" ]\n\n if mimeType == applicationvndwtstf:\n return [ \"stf\" ]\n\n if mimeType == applicationvndsunxmldrawtemplate:\n return [ \"std\" ]\n\n if mimeType == applicationvndsunxmlcalctemplate:\n return [ \"stc\" ]\n\n if mimeType == applicationvndsailingtrackertrack:\n return [ \"st\" ]\n\n if mimeType == applicationssml_xml:\n return [ \"ssml\" ]\n\n if mimeType == applicationvndepsonssf:\n return [ \"ssf\" ]\n\n if mimeType == applicationvndkodakdescriptor:\n return [ \"sse\" ]\n\n if mimeType == applicationssdl_xml:\n return [ \"ssdl\" ]\n\n if mimeType == textxssa:\n return [ \"ssa\", \"ass\" ]\n\n if mimeType == applicationsparqlresults_xml:\n return [ \"srx\" ]\n\n if mimeType == applicationsru_xml:\n return [ \"sru\" ]\n\n if mimeType == applicationxsubrip:\n return [ \"srt\" ]\n\n if mimeType == imagexsonysrf:\n return [ \"srf\" ]\n\n if mimeType == applicationxsourcerpm:\n return [ \"srcrpm\", \"spm\" ]\n\n if mimeType == applicationxwaissource:\n return [ \"src\" ]\n\n if mimeType == imagexsonysr2:\n return [ \"sr2\" ]\n\n if mimeType == applicationvndsquashfs:\n return [ \"sqsh\" ]\n\n if mimeType == applicationvndsqlite3:\n return [ \"sqlite3\" ]\n\n if mimeType == applicationxsqlite2:\n return [ \"sqlite2\" ]\n\n if mimeType == applicationsql:\n return [ \"sql\" ]\n\n if mimeType == applicationxapplesystemprofiler_xml:\n return [ \"spx\" ]\n\n if mimeType == applicationscvpvprequest:\n return [ \"spq\" ]\n\n if mimeType == applicationscvpvpresponse:\n return [ \"spp\" ]\n\n if mimeType == textvndin3dspot:\n return [ \"spot\" ]\n\n if mimeType == applicationvndyamahasmafphrase:\n return [ \"spf\" ]\n\n if mimeType == textxrpmspec:\n return [ \"spec\" ]\n\n if mimeType == textspdx:\n return [ \"spdx\" ]\n\n if mimeType == applicationxfontspeedo:\n return [ \"spd\" ]\n\n if mimeType == applicationxsharedlib:\n return [ \"so\", \"so09\" ]\n\n if mimeType == applicationxfontsnf:\n return [ \"snf\" ]\n\n if mimeType == applicationvndsnap:\n return [ \"snap\" ]\n\n if mimeType == applicationvndstepmaniapackage:\n return [ \"smzip\" ]\n\n if mimeType == videoxsmv:\n return [ \"smv\" ]\n\n if mimeType == applicationxsmsrom:\n return [ \"sms\" ]\n\n if mimeType == videovndradgamettoolssmacker:\n return [ \"smk\" ]\n\n if mimeType == applicationsmil_xml:\n return [ \"smil\", \"smi\", \"sml\", \"kino\" ]\n\n if mimeType == applicationvndstardivisionmath:\n return [ \"smf\" ]\n\n if mimeType == applicationvndstardivisionmail:\n return [ \"smd\" ]\n\n if mimeType == applicationvndstepmaniastepchart:\n return [ \"sm\" ]\n\n if mimeType == applicationvndepsonsalt:\n return [ \"slt\" ]\n\n if mimeType == applicationroutestsid_xml:\n return [ \"sls\" ]\n\n if mimeType == textslim:\n return [ \"slim\", \"slm\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentpresentationmlslide:\n return [ \"sldx\" ]\n\n if mimeType == applicationvndmspowerpointslidemacroenabled12:\n return [ \"sldm\" ]\n\n if mimeType == applicationpgpkeys:\n return [ \"skr\", \"pkr\", \"key\" ]\n\n if mimeType == applicationvndkoan:\n return [ \"skp\", \"skd\", \"skt\", \"skm\" ]\n\n if mimeType == imagexskencil:\n return [ \"sk\", \"sk1\" ]\n\n if mimeType == applicationsieve:\n return [ \"siv\", \"sieve\" ]\n\n if mimeType == applicationxstuffitx:\n return [ \"sitx\" ]\n\n if mimeType == applicationxstuffit:\n return [ \"sit\" ]\n\n if mimeType == xepocxsisxapp:\n return [ \"sisx\" ]\n\n if mimeType == applicationvndsymbianinstall:\n return [ \"sis\" ]\n\n if mimeType == audiosilk:\n return [ \"sil\" ]\n\n if mimeType == applicationpgpsignature:\n return [ \"sig\" ]\n\n if mimeType == audioprssid:\n return [ \"sid\", \"psid\" ]\n\n if mimeType == applicationxsiag:\n return [ \"siag\" ]\n\n if mimeType == texthtml:\n return [ \"shtml\" ]\n\n if mimeType == applicationxshorten:\n return [ \"shn\" ]\n\n if mimeType == applicationshf_xml:\n return [ \"shf\" ]\n\n if mimeType == textshex:\n return [ \"shex\" ]\n\n if mimeType == applicationxshar:\n return [ \"shar\" ]\n\n if mimeType == applicationxdiashape:\n return [ \"shape\" ]\n\n if mimeType == applicationxshellscript:\n return [ \"sh\" ]\n\n if mimeType == textsgml:\n return [ \"sgml\", \"sgm\" ]\n\n if mimeType == imagexsgi:\n return [ \"sgi\" ]\n\n if mimeType == applicationxgosgf:\n return [ \"sgf\" ]\n\n if mimeType == applicationxsg1000rom:\n return [ \"sg\" ]\n\n if mimeType == textxsfv:\n return [ \"sfv\" ]\n\n if mimeType == applicationvndspotfiresfs:\n return [ \"sfs\" ]\n\n if mimeType == applicationvndhydrostatixsofdata:\n return [ \"sfd-hdstx\" ]\n\n if mimeType == applicationvndnintendosnesrom:\n return [ \"sfc\", \"smc\" ]\n\n if mimeType == applicationsetregistrationinitiation:\n return [ \"setreg\" ]\n\n if mimeType == applicationsetpaymentinitiation:\n return [ \"setpay\" ]\n\n if mimeType == textxdbusservice:\n return [ \"service\" ]\n\n if mimeType == applicationjavaserializedobject:\n return [ \"ser\" ]\n\n if mimeType == applicationsensml_xml:\n return [ \"sensmlx\" ]\n\n if mimeType == applicationsenml_xml:\n return [ \"senmlx\" ]\n\n if mimeType == applicationvndsemf:\n return [ \"semf\" ]\n\n if mimeType == applicationvndsemd:\n return [ \"semd\" ]\n\n if mimeType == applicationvndsema:\n return [ \"sema\" ]\n\n if mimeType == applicationvndfdsnseed:\n return [ \"seed\", \"dataless\" ]\n\n if mimeType == applicationvndseemail:\n return [ \"see\" ]\n\n if mimeType == applicationxsea:\n return [ \"sea\" ]\n\n if mimeType == applicationvndstardivisionwriter:\n return [ \"sdw\", \"vor\", \"sgl\" ]\n\n if mimeType == applicationvndstardivisionchart:\n return [ \"sds\" ]\n\n if mimeType == applicationvndsolentsdkm_xml:\n return [ \"sdkm\", \"sdkd\" ]\n\n if mimeType == applicationvndstardivisionimpress:\n return [ \"sdd\", \"sdp\" ]\n\n if mimeType == applicationvndstardivisioncalc:\n return [ \"sdc\" ]\n\n if mimeType == applicationvndstardivisiondraw:\n return [ \"sda\" ]\n\n if mimeType == textvndcurlscurl:\n return [ \"scurl\" ]\n\n if mimeType == textxscss:\n return [ \"scss\" ]\n\n if mimeType == applicationscvpcvresponse:\n return [ \"scs\" ]\n\n if mimeType == applicationscvpcvrequest:\n return [ \"scq\" ]\n\n if mimeType == textxscons:\n return [ \"sconstruct\", \"sconscript\" ]\n\n if mimeType == applicationxgodotscene:\n return [ \"scn\", \"tscn\", \"escn\" ]\n\n if mimeType == textxscheme:\n return [ \"scm\", \"ss\" ]\n\n if mimeType == applicationxmsschedule:\n return [ \"scd\" ]\n\n if mimeType == textxscala:\n return [ \"scala\", \"sc\" ]\n\n if mimeType == applicationsbml_xml:\n return [ \"sbml\" ]\n\n if mimeType == applicationxspsssav:\n return [ \"sav\", \"zsav\" ]\n\n if mimeType == textxsass:\n return [ \"sass\" ]\n\n if mimeType == applicationxthomsonsapimage:\n return [ \"sap\" ]\n\n if mimeType == applicationxsami:\n return [ \"sami\" ]\n\n if mimeType == applicationxamipro:\n return [ \"sam\" ]\n\n if mimeType == textxsagemath:\n return [ \"sage\" ]\n\n if mimeType == applicationvndyamahasmafaudio:\n return [ \"saf\" ]\n\n if mimeType == audioxs3m:\n return [ \"s3m\" ]\n\n if mimeType == textxasm:\n return [ \"s\", \"asm\" ]\n\n if mimeType == imagexpanasonicrw2:\n return [ \"rw2\" ]\n\n if mimeType == videovndrnrealvideo:\n return [ \"rv\", \"rvx\" ]\n\n if mimeType == applicationrouteusd_xml:\n return [ \"rusd\" ]\n\n if mimeType == applicationxmakeself:\n return [ \"run\" ]\n\n if mimeType == textrichtext:\n return [ \"rtx\" ]\n\n if mimeType == applicationrtf:\n return [ \"rtf\" ]\n\n if mimeType == textvndrnrealtext:\n return [ \"rt\" ]\n\n if mimeType == textxrst:\n return [ \"rst\" ]\n\n if mimeType == applicationrss_xml:\n return [ \"rss\" ]\n\n if mimeType == applicationurcressheet_xml:\n return [ \"rsheet\" ]\n\n if mimeType == applicationrsd_xml:\n return [ \"rsd\" ]\n\n if mimeType == applicationatscrsat_xml:\n return [ \"rsat\" ]\n\n if mimeType == textrust:\n return [ \"rs\" ]\n\n if mimeType == applicationvndnokiaradiopreset:\n return [ \"rpst\" ]\n\n if mimeType == applicationvndnokiaradiopresets:\n return [ \"rpss\" ]\n\n if mimeType == applicationxrpm:\n return [ \"rpm\" ]\n\n if mimeType == applicationvndcloantorp9:\n return [ \"rp9\" ]\n\n if mimeType == imagevndrnrealpix:\n return [ \"rp\" ]\n\n if mimeType == applicationrpkiroa:\n return [ \"roa\" ]\n\n if mimeType == applicationrelaxngcompactsyntax:\n return [ \"rnc\" ]\n\n if mimeType == audioxpnrealaudioplugin:\n return [ \"rmp\" ]\n\n if mimeType == messagexgnurmail:\n return [ \"rmail\" ]\n\n if mimeType == applicationvndrnrealmedia:\n return [ \"rm\", \"rmj\", \"rmm\", \"rms\", \"rmx\", \"rmvb\" ]\n\n if mimeType == imagerle:\n return [ \"rle\" ]\n\n if mimeType == applicationresourcelistsdiff_xml:\n return [ \"rld\" ]\n\n if mimeType == imagevndfujixeroxedmicsrlc:\n return [ \"rlc\" ]\n\n if mimeType == applicationresourcelists_xml:\n return [ \"rl\" ]\n\n if mimeType == applicationxresearchinfosystems:\n return [ \"ris\" ]\n\n if mimeType == audiovndrip:\n return [ \"rip\" ]\n\n if mimeType == applicationreginfo_xml:\n return [ \"rif\" ]\n\n if mimeType == imagexrgb:\n return [ \"rgb\" ]\n\n if mimeType == applicationxgodotresource:\n return [ \"res\", \"tres\" ]\n\n if mimeType == applicationvndbusinessobjects:\n return [ \"rep\" ]\n\n if mimeType == applicationp2poverlay_xml:\n return [ \"relo\" ]\n\n if mimeType == textxreject:\n return [ \"rej\" ]\n\n if mimeType == textxmsregedit:\n return [ \"reg\" ]\n\n if mimeType == textxreadme:\n return [ \"readme\" ]\n\n if mimeType == applicationvnddatavisionrdz:\n return [ \"rdz\" ]\n\n if mimeType == applicationrdf_xml:\n return [ \"rdf\", \"rdfs\", \"owl\" ]\n\n if mimeType == applicationvndipunpluggedrcprofile:\n return [ \"rcprofile\" ]\n\n if mimeType == applicationxruby:\n return [ \"rb\" ]\n\n if mimeType == applicationxrawdiskimagexzcompressed:\n return [ \"rawdiskimagexz\", \"imgxz\" ]\n\n if mimeType == applicationxrawdiskimage:\n return [ \"rawdiskimage\", \"img\" ]\n\n if mimeType == imagexpanasonicrw:\n return [ \"raw\" ]\n\n if mimeType == imagexcmuraster:\n return [ \"ras\" ]\n\n if mimeType == applicationvndrar:\n return [ \"rar\" ]\n\n if mimeType == applicationrouteapd_xml:\n return [ \"rapd\" ]\n\n if mimeType == applicationraml_yaml:\n return [ \"raml\" ]\n\n if mimeType == applicationram:\n return [ \"ram\" ]\n\n if mimeType == imagexfujiraf:\n return [ \"raf\" ]\n\n if mimeType == audiovndrnrealaudio:\n return [ \"ra\", \"rax\" ]\n\n if mimeType == applicationvndquarkquarkxpress:\n return [ \"qxd\", \"qxt\", \"qwd\", \"qwt\", \"qxl\", \"qxb\" ]\n\n if mimeType == applicationxquicktimemedialink:\n return [ \"qtl\" ]\n\n if mimeType == imagexquicktime:\n return [ \"qtif\" ]\n\n if mimeType == applicationxqtiplot:\n return [ \"qti\", \"qtigz\" ]\n\n if mimeType == videoquicktime:\n return [ \"qt\", \"mov\", \"moov\", \"qtvr\" ]\n\n if mimeType == applicationsparqlquery:\n return [ \"qs\", \"rq\" ]\n\n if mimeType == applicationvndpublisharedeltatree:\n return [ \"qps\" ]\n\n if mimeType == applicationxqpress:\n return [ \"qp\" ]\n\n if mimeType == textxqml:\n return [ \"qml\", \"qmltypes\", \"qmlproject\" ]\n\n if mimeType == applicationxqw:\n return [ \"qif\" ]\n\n if mimeType == applicationvndintuqfx:\n return [ \"qfx\" ]\n\n if mimeType == applicationxqeddisk:\n return [ \"qed\" ]\n\n if mimeType == applicationxqemudisk:\n return [ \"qcow2\", \"qcow\" ]\n\n if mimeType == applicationvndintuqbo:\n return [ \"qbo\" ]\n\n if mimeType == applicationvndepsonquickanime:\n return [ \"qam\" ]\n\n if mimeType == textxpython:\n return [ \"pyx\", \"wsgi\" ]\n\n if mimeType == videovndmsplayreadymediapyv:\n return [ \"pyv\" ]\n\n if mimeType == applicationxpyspreadspreadsheet:\n return [ \"pysu\" ]\n\n if mimeType == applicationxpyspreadbzspreadsheet:\n return [ \"pys\" ]\n\n if mimeType == modelvndpythapyox:\n return [ \"pyox\" ]\n\n if mimeType == applicationxpythonbytecode:\n return [ \"pyc\", \"pyo\" ]\n\n if mimeType == audiovndmsplayreadymediapya:\n return [ \"pya\" ]\n\n if mimeType == textxpython3:\n return [ \"py\", \"py3\", \"py3x\", \"pyi\" ]\n\n if mimeType == applicationvnd3mpostitnotes:\n return [ \"pwn\" ]\n\n if mimeType == applicationxpw:\n return [ \"pw\" ]\n\n if mimeType == applicationvnd3gpppicbwvar:\n return [ \"pvb\" ]\n\n if mimeType == applicationvndmspublisher:\n return [ \"pub\" ]\n\n if mimeType == applicationvndpviptid1:\n return [ \"ptid\" ]\n\n if mimeType == imageprspti:\n return [ \"pti\" ]\n\n if mimeType == applicationxpocketword:\n return [ \"psw\" ]\n\n if mimeType == applicationpskc_xml:\n return [ \"pskcxml\" ]\n\n if mimeType == applicationxgzpostscript:\n return [ \"psgz\" ]\n\n if mimeType == audioxpsflib:\n return [ \"psflib\" ]\n\n if mimeType == applicationxgzfontlinuxpsf:\n return [ \"psfgz\" ]\n\n if mimeType == applicationxfontlinuxpsf:\n return [ \"psf\" ]\n\n if mimeType == imagevndadobephotoshop:\n return [ \"psd\" ]\n\n if mimeType == applicationxbzpostscript:\n return [ \"psbz2\" ]\n\n if mimeType == applicationvnd3gpppicbwsmall:\n return [ \"psb\" ]\n\n if mimeType == applicationpostscript:\n return [ \"ps\" ]\n\n if mimeType == applicationprovenance_xml:\n return [ \"provx\" ]\n\n if mimeType == applicationxgodotproject:\n return [ \"projectgodot\" ]\n\n if mimeType == applicationpicsrules:\n return [ \"prf\" ]\n\n if mimeType == applicationvndlotusfreelance:\n return [ \"pre\" ]\n\n if mimeType == applicationvndpalm:\n return [ \"pqa\", \"oprc\" ]\n\n if mimeType == applicationvndmspowerpoint:\n return [ \"ppz\", \"ppt\", \"pps\", \"pot\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentpresentationmlpresentation:\n return [ \"pptx\" ]\n\n if mimeType == applicationvndmspowerpointpresentationmacroenabled12:\n return [ \"pptm\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentpresentationmlslideshow:\n return [ \"ppsx\" ]\n\n if mimeType == applicationvndmspowerpointslideshowmacroenabled12:\n return [ \"ppsm\" ]\n\n if mimeType == imagexportablepixmap:\n return [ \"ppm\" ]\n\n if mimeType == applicationvndcupsppd:\n return [ \"ppd\" ]\n\n if mimeType == applicationvndmspowerpointaddinmacroenabled12:\n return [ \"ppam\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentpresentationmltemplate:\n return [ \"potx\" ]\n\n if mimeType == applicationvndmspowerpointtemplatemacroenabled12:\n return [ \"potm\" ]\n\n if mimeType == applicationvndmacportsportpkg:\n return [ \"portpkg\" ]\n\n if mimeType == applicationxspsspor:\n return [ \"por\" ]\n\n if mimeType == textxmaven_xml:\n return [ \"pomxml\", \"settingsxml\" ]\n\n if mimeType == textxgettexttranslation:\n return [ \"po\" ]\n\n if mimeType == imagexmacpaint:\n return [ \"pntg\" ]\n\n if mimeType == imagexportableanymap:\n return [ \"pnm\" ]\n\n if mimeType == imagepng:\n return [ \"png\" ]\n\n if mimeType == applicationvndctcposml:\n return [ \"pml\" ]\n\n if mimeType == audioxscpls:\n return [ \"pls\" ]\n\n if mimeType == applicationxplanperfect:\n return [ \"pln\" ]\n\n if mimeType == applicationvndpocketlearn:\n return [ \"plf\" ]\n\n if mimeType == applicationvndmobiusplc:\n return [ \"plc\" ]\n\n if mimeType == applicationvnd3gpppicbwlarge:\n return [ \"plb\" ]\n\n if mimeType == audioxiriverpla:\n return [ \"pla\" ]\n\n if mimeType == applicationxperl:\n return [ \"pl\", \"pm\", \"al\", \"perl\", \"pod\", \"t\" ]\n\n if mimeType == applicationvndapplepkpass:\n return [ \"pkpass\" ]\n\n if mimeType == applicationpkixpkipath:\n return [ \"pkipath\" ]\n\n if mimeType == applicationpkixcmp:\n return [ \"pki\" ]\n\n if mimeType == applicationxtexpk:\n return [ \"pk\" ]\n\n if mimeType == applicationxphp:\n return [ \"php\", \"php3\", \"php4\", \"php5\", \"phps\" ]\n\n if mimeType == applicationpgpencrypted:\n return [ \"pgp\", \"gpg\", \"asc\" ]\n\n if mimeType == applicationvndchesspgn:\n return [ \"pgn\" ]\n\n if mimeType == imagexportablegraymap:\n return [ \"pgm\" ]\n\n if mimeType == applicationfonttdpfr:\n return [ \"pfr\" ]\n\n if mimeType == applicationxfonttype1:\n return [ \"pfa\", \"pfb\", \"gsf\", \"pfm\" ]\n\n if mimeType == imagexpentaxpef:\n return [ \"pef\" ]\n\n if mimeType == applicationxxzpdf:\n return [ \"pdfxz\" ]\n\n if mimeType == applicationxlzpdf:\n return [ \"pdflz\" ]\n\n if mimeType == applicationxgzpdf:\n return [ \"pdfgz\" ]\n\n if mimeType == applicationxbzpdf:\n return [ \"pdfbz2\" ]\n\n if mimeType == applicationpdf:\n return [ \"pdf\" ]\n\n if mimeType == textxprocessing:\n return [ \"pde\" ]\n\n if mimeType == applicationxaportisdoc:\n return [ \"pdb\", \"pdc\" ]\n\n if mimeType == imagevndzbrushpcx:\n return [ \"pcx\" ]\n\n if mimeType == applicationvndcurlpcurl:\n return [ \"pcurl\" ]\n\n if mimeType == imagexpict:\n return [ \"pct\", \"pict\", \"pict1\", \"pict2\", \"pic\" ]\n\n if mimeType == applicationvndhppclxl:\n return [ \"pclxl\" ]\n\n if mimeType == applicationvndhppcl:\n return [ \"pcl\" ]\n\n if mimeType == applicationxfontpcf:\n return [ \"pcf\", \"pcfz\", \"pcfgz\" ]\n\n if mimeType == applicationxpcenginerom:\n return [ \"pce\" ]\n\n if mimeType == imagexphotocd:\n return [ \"pcd\" ]\n\n if mimeType == applicationvndtcpdumppcap:\n return [ \"pcap\", \"cap\", \"dmp\" ]\n\n if mimeType == imagexportablebitmap:\n return [ \"pbm\" ]\n\n if mimeType == applicationvndpowerbuilder6:\n return [ \"pbd\" ]\n\n if mimeType == applicationvndpawaafile:\n return [ \"paw\" ]\n\n if mimeType == imagexgimppat:\n return [ \"pat\" ]\n\n if mimeType == applicationxpar2:\n return [ \"par2\" ]\n\n if mimeType == applicationxpak:\n return [ \"pak\" ]\n\n if mimeType == applicationvndapplepages:\n return [ \"pages\" ]\n\n if mimeType == applicationxjavapack200:\n return [ \"pack\" ]\n\n if mimeType == applicationxnsproxyautoconfig:\n return [ \"pac\" ]\n\n if mimeType == applicationpkcs8encrypted:\n return [ \"p8e\" ]\n\n if mimeType == applicationpkcs8:\n return [ \"p8\" ]\n\n if mimeType == applicationpkcs7signature:\n return [ \"p7s\" ]\n\n if mimeType == applicationxpkcs7certreqresp:\n return [ \"p7r\" ]\n\n if mimeType == applicationpkcs7mime:\n return [ \"p7c\", \"p7m\" ]\n\n if mimeType == applicationxpkcs7certificates:\n return [ \"p7b\", \"spc\" ]\n\n if mimeType == applicationxpagemaker:\n return [ \"p65\", \"pm6\", \"pmd\" ]\n\n if mimeType == applicationpkcs12:\n return [ \"p12\", \"pfx\" ]\n\n if mimeType == applicationpkcs10:\n return [ \"p10\" ]\n\n if mimeType == textxpascal:\n return [ \"p\", \"pas\" ]\n\n if mimeType == applicationvndopenofficeorgextension:\n return [ \"oxt\" ]\n\n if mimeType == applicationoxps:\n return [ \"oxps\" ]\n\n if mimeType == applicationowl_xml:\n return [ \"owx\" ]\n\n if mimeType == applicationxvirtualboxovf:\n return [ \"ovf\" ]\n\n if mimeType == applicationovf:\n return [ \"ova\" ]\n\n if mimeType == applicationvndoasisopendocumenttexttemplate:\n return [ \"ott\" ]\n\n if mimeType == applicationvndoasisopendocumentspreadsheettemplate:\n return [ \"ots\" ]\n\n if mimeType == applicationvndoasisopendocumentpresentationtemplate:\n return [ \"otp\" ]\n\n if mimeType == applicationvndoasisopendocumentimagetemplate:\n return [ \"oti\" ]\n\n if mimeType == applicationvndoasisopendocumenttextweb:\n return [ \"oth\" ]\n\n if mimeType == applicationvndoasisopendocumentgraphicstemplate:\n return [ \"otg\" ]\n\n if mimeType == applicationvndoasisopendocumentformulatemplate:\n return [ \"otf\", \"odft\" ]\n\n if mimeType == applicationvndoasisopendocumentcharttemplate:\n return [ \"otc\" ]\n\n if mimeType == applicationvndopenstreetmapdata_xml:\n return [ \"osm\" ]\n\n if mimeType == applicationvndyamahaopenscoreformatosfpvg_xml:\n return [ \"osfpvg\" ]\n\n if mimeType == applicationvndyamahaopenscoreformat:\n return [ \"osf\" ]\n\n if mimeType == textorg:\n return [ \"org\" ]\n\n if mimeType == imagexolympusorf:\n return [ \"orf\" ]\n\n if mimeType == imageopenraster:\n return [ \"ora\" ]\n\n if mimeType == textxopml_xml:\n return [ \"opml\" ]\n\n if mimeType == applicationoebpspackage_xml:\n return [ \"opf\" ]\n\n if mimeType == textxooc:\n return [ \"ooc\" ]\n\n if mimeType == applicationonenote:\n return [ \"onetoc\", \"onetoc2\", \"onetmp\", \"onepkg\" ]\n\n if mimeType == applicationomdoc_xml:\n return [ \"omdoc\" ]\n\n if mimeType == applicationxoleo:\n return [ \"oleo\" ]\n\n if mimeType == applicationogg:\n return [ \"ogx\" ]\n\n if mimeType == videoogg:\n return [ \"ogv\" ]\n\n if mimeType == videoxogm_ogg:\n return [ \"ogm\" ]\n\n if mimeType == modelvndopengex:\n return [ \"ogex\" ]\n\n if mimeType == audioogg:\n return [ \"oga\", \"ogg\", \"opus\" ]\n\n if mimeType == applicationvndoasisopendocumenttext:\n return [ \"odt\" ]\n\n if mimeType == applicationvndoasisopendocumentspreadsheet:\n return [ \"ods\" ]\n\n if mimeType == applicationvndoasisopendocumentpresentation:\n return [ \"odp\" ]\n\n if mimeType == applicationvndoasisopendocumenttextmaster:\n return [ \"odm\" ]\n\n if mimeType == applicationvndoasisopendocumentimage:\n return [ \"odi\" ]\n\n if mimeType == applicationvndoasisopendocumentgraphics:\n return [ \"odg\" ]\n\n if mimeType == applicationvndoasisopendocumentformula:\n return [ \"odf\" ]\n\n if mimeType == applicationvndoasisopendocumentchart:\n return [ \"odc\" ]\n\n if mimeType == applicationvndoasisopendocumentdatabase:\n return [ \"odb\" ]\n\n if mimeType == applicationoda:\n return [ \"oda\" ]\n\n if mimeType == textxocl:\n return [ \"ocl\" ]\n\n if mimeType == applicationxtgif:\n return [ \"obj\" ]\n\n if mimeType == applicationvndopenbloxgame_xml:\n return [ \"obgx\" ]\n\n if mimeType == applicationxmsbinder:\n return [ \"obd\" ]\n\n if mimeType == applicationvndfujitsuoasys:\n return [ \"oas\" ]\n\n if mimeType == applicationvndfujitsuoasys3:\n return [ \"oa3\" ]\n\n if mimeType == applicationvndfujitsuoasys2:\n return [ \"oa2\" ]\n\n if mimeType == applicationxobject:\n return [ \"o\", \"mod\" ]\n\n if mimeType == applicationxnzb:\n return [ \"nzb\" ]\n\n if mimeType == applicationvndapplenumbers:\n return [ \"numbers\" ]\n\n if mimeType == applicationvndnitf:\n return [ \"ntf\", \"nitf\" ]\n\n if mimeType == applicationntriples:\n return [ \"nt\" ]\n\n if mimeType == videoxnsv:\n return [ \"nsv\" ]\n\n if mimeType == applicationvndlotusnotes:\n return [ \"nsf\" ]\n\n if mimeType == applicationxnetshowchannel:\n return [ \"nsc\" ]\n\n if mimeType == imagexnikonnrw:\n return [ \"nrw\" ]\n\n if mimeType == applicationnquads:\n return [ \"nq\" ]\n\n if mimeType == imagevndnetfpx:\n return [ \"npx\" ]\n\n if mimeType == applicationvndnoblenetweb:\n return [ \"nnw\" ]\n\n if mimeType == applicationvndnoblenetsealer:\n return [ \"nns\" ]\n\n if mimeType == applicationvndnoblenetdirectory:\n return [ \"nnd\" ]\n\n if mimeType == applicationvndenliven:\n return [ \"nml\" ]\n\n if mimeType == applicationvndneurolanguagenlu:\n return [ \"nlu\" ]\n\n if mimeType == applicationxneogeopocketrom:\n return [ \"ngp\" ]\n\n if mimeType == applicationvndnokiangagedata:\n return [ \"ngdat\" ]\n\n if mimeType == applicationxneogeopocketcolorrom:\n return [ \"ngc\" ]\n\n if mimeType == applicationvndnokiangagesymbianinstall:\n return [ \"n-gage\" ]\n\n if mimeType == textxnfo:\n return [ \"nfo\" ]\n\n if mimeType == applicationxnesrom:\n return [ \"nes\", \"nez\", \"unf\", \"unif\" ]\n\n if mimeType == imagexnikonnef:\n return [ \"nef\" ]\n\n if mimeType == applicationxnintendodsrom:\n return [ \"nds\" ]\n\n if mimeType == applicationxdtbncx_xml:\n return [ \"ncx\" ]\n\n if mimeType == applicationvndwolframplayer:\n return [ \"nbp\" ]\n\n if mimeType == applicationmathematica:\n return [ \"nb\", \"ma\", \"mb\" ]\n\n if mimeType == applicationxn64rom:\n return [ \"n64\", \"z64\", \"v64\" ]\n\n if mimeType == textn3:\n return [ \"n3\" ]\n\n if mimeType == applicationvndtriscapemxs:\n return [ \"mxs\" ]\n\n if mimeType == applicationxv_xml:\n return [ \"mxml\", \"xhvml\", \"xvml\", \"xvm\" ]\n\n if mimeType == audiomobilexmf:\n return [ \"mxmf\" ]\n\n if mimeType == applicationvndrecordaremusicxml:\n return [ \"mxl\" ]\n\n if mimeType == applicationmxf:\n return [ \"mxf\" ]\n\n if mimeType == applicationvndmfer:\n return [ \"mwf\" ]\n\n if mimeType == applicationvndmapboxvectortile:\n return [ \"mvt\" ]\n\n if mimeType == applicationxmsmediaview:\n return [ \"mvb\", \"m13\", \"m14\" ]\n\n if mimeType == applicationvndrecordaremusicxml_xml:\n return [ \"musicxml\" ]\n\n if mimeType == applicationmmtusd_xml:\n return [ \"musd\" ]\n\n if mimeType == applicationvndmusician:\n return [ \"mus\" ]\n\n if mimeType == textxmup:\n return [ \"mup\", \"not\" ]\n\n if mimeType == modelmtl:\n return [ \"mtl\" ]\n\n if mimeType == applicationxmsxrom:\n return [ \"msx\" ]\n\n if mimeType == applicationvndmuveestyle:\n return [ \"msty\" ]\n\n if mimeType == imagexmsod:\n return [ \"msod\" ]\n\n if mimeType == applicationvndmobiusmsl:\n return [ \"msl\" ]\n\n if mimeType == applicationxmsi:\n return [ \"msi\" ]\n\n if mimeType == modelmesh:\n return [ \"msh\", \"mesh\", \"silo\" ]\n\n if mimeType == applicationvndmsoutlook:\n return [ \"msg\" ]\n\n if mimeType == applicationvndepsonmsf:\n return [ \"msf\" ]\n\n if mimeType == applicationvndmseq:\n return [ \"mseq\" ]\n\n if mimeType == applicationvndfdsnmseed:\n return [ \"mseed\" ]\n\n if mimeType == applicationmediaservercontrol_xml:\n return [ \"mscml\" ]\n\n if mimeType == textxtroffms:\n return [ \"ms\" ]\n\n if mimeType == imagexminoltamrw:\n return [ \"mrw\" ]\n\n if mimeType == textxmrml:\n return [ \"mrml\", \"mrl\" ]\n\n if mimeType == applicationmarcxml_xml:\n return [ \"mrcx\" ]\n\n if mimeType == applicationmarc:\n return [ \"mrc\" ]\n\n if mimeType == applicationvndmobiusmqy:\n return [ \"mqy\" ]\n\n if mimeType == applicationvndibmminipay:\n return [ \"mpy\" ]\n\n if mimeType == applicationvndmsproject:\n return [ \"mpt\" ]\n\n if mimeType == applicationvndmophunapplication:\n return [ \"mpn\" ]\n\n if mimeType == applicationvndblueicemultipass:\n return [ \"mpm\" ]\n\n if mimeType == textxmpl2:\n return [ \"mpl\" ]\n\n if mimeType == applicationvndappleinstaller_xml:\n return [ \"mpkg\" ]\n\n if mimeType == applicationmediapolicydataset_xml:\n return [ \"mpf\" ]\n\n if mimeType == videompeg:\n return [ \"mpeg\", \"mpg\", \"mpe\", \"vob\", \"090909vdr\", \"m1v\", \"m2v\" ]\n\n if mimeType == applicationdash_xml:\n return [ \"mpd\" ]\n\n if mimeType == audioxmusepack:\n return [ \"mpc\", \"mpp\", \"mp\" ]\n\n if mimeType == applicationmp4:\n return [ \"mp4s\", \"m4p\" ]\n\n if mimeType == videomp4:\n return [ \"mp4\", \"m4v\", \"f4v\", \"lrv\", \"mp4v\", \"mpg4\" ]\n\n if mimeType == audiompeg:\n return [ \"mp3\", \"mpga\", \"mp2a\", \"m2a\", \"m3a\" ]\n\n if mimeType == audiomp2:\n return [ \"mp2\" ]\n\n if mimeType == videoxsgimovie:\n return [ \"movie\" ]\n\n if mimeType == textxmof:\n return [ \"mof\" ]\n\n if mimeType == applicationmods_xml:\n return [ \"mods\" ]\n\n if mimeType == textxmoc:\n return [ \"moc\" ]\n\n if mimeType == applicationxmobipocketebook:\n return [ \"mobi\", \"prc\" ]\n\n if mimeType == audioxmo3:\n return [ \"mo3\" ]\n\n if mimeType == applicationxmsmoney:\n return [ \"mny\" ]\n\n if mimeType == videoxmng:\n return [ \"mng\" ]\n\n if mimeType == imagevndfujixeroxedmicsmmr:\n return [ \"mmr\" ]\n\n if mimeType == applicationmathml_xml:\n return [ \"mml\", \"mathml\" ]\n\n if mimeType == applicationvndsmaf:\n return [ \"mmf\", \"smaf\" ]\n\n if mimeType == applicationvndchipnutskaraokemmd:\n return [ \"mmd\" ]\n\n if mimeType == textxobjc__src:\n return [ \"mm\" ]\n\n if mimeType == applicationvnddolbymlp:\n return [ \"mlp\" ]\n\n if mimeType == textxocaml:\n return [ \"ml\", \"mli\" ]\n\n if mimeType == videoxmatroska:\n return [ \"mkv\", \"mks\" ]\n\n if mimeType == audioxmatroska:\n return [ \"mka\" ]\n\n if mimeType == videoxmatroska3d:\n return [ \"mk3d\" ]\n\n if mimeType == videoxmjpeg:\n return [ \"mjpeg\", \"mjpg\" ]\n\n if mimeType == videomj2:\n return [ \"mj2\", \"mjp2\" ]\n\n if mimeType == audioxminipsf:\n return [ \"minipsf\" ]\n\n if mimeType == applicationxmif:\n return [ \"mif\" ]\n\n if mimeType == applicationxmie:\n return [ \"mie\" ]\n\n if mimeType == audiomidi:\n return [ \"mid\", \"midi\", \"kar\", \"rmi\" ]\n\n if mimeType == applicationxmimearchive:\n return [ \"mhtml\", \"mht\" ]\n\n if mimeType == applicationvndproteusmagazine:\n return [ \"mgz\" ]\n\n if mimeType == applicationxmagicpoint:\n return [ \"mgp\" ]\n\n if mimeType == applicationrpkimanifest:\n return [ \"mft\" ]\n\n if mimeType == applicationvndmfmp:\n return [ \"mfm\" ]\n\n if mimeType == applicationmets_xml:\n return [ \"mets\" ]\n\n if mimeType == applicationmetalink_xml:\n return [ \"metalink\" ]\n\n if mimeType == applicationmetalink4_xml:\n return [ \"meta4\" ]\n\n if mimeType == textxmeson:\n return [ \"mesonbuild\", \"mesonoptionstxt\" ]\n\n if mimeType == textxtroffme:\n return [ \"me\" ]\n\n if mimeType == imagevndmsmodi:\n return [ \"mdi\" ]\n\n if mimeType == applicationvndmsaccess:\n return [ \"mdb\" ]\n\n if mimeType == textmarkdown:\n return [ \"md\", \"mkd\", \"markdown\" ]\n\n if mimeType == textvndcurlmcurl:\n return [ \"mcurl\" ]\n\n if mimeType == applicationvndmcd:\n return [ \"mcd\" ]\n\n if mimeType == textvndsenxwarpscript:\n return [ \"mc2\" ]\n\n if mimeType == applicationvndmedcalcdata:\n return [ \"mc1\" ]\n\n if mimeType == applicationmbox:\n return [ \"mbox\" ]\n\n if mimeType == applicationvndmobiusmbk:\n return [ \"mbk\" ]\n\n if mimeType == textcachemanifest:\n return [ \"manifest\", \"appcache\" ]\n\n if mimeType == applicationxtroffman:\n return [ \"man\", \"19\" ]\n\n if mimeType == textxmakefile:\n return [ \"makefile\", \"gnumakefile\", \"mk\", \"mak\" ]\n\n if mimeType == applicationvndecowinchart:\n return [ \"mag\" ]\n\n if mimeType == applicationmmtaei_xml:\n return [ \"maei\" ]\n\n if mimeType == applicationmads_xml:\n return [ \"mads\" ]\n\n if mimeType == applicationxmarkaby:\n return [ \"mab\" ]\n\n if mimeType == applicationxthomsoncartridgememo7:\n return [ \"m7\" ]\n\n if mimeType == videoisosegment:\n return [ \"m4s\" ]\n\n if mimeType == audioxm4r:\n return [ \"m4r\" ]\n\n if mimeType == audioxm4b:\n return [ \"m4b\", \"f4b\" ]\n\n if mimeType == audiomp4:\n return [ \"m4a\", \"f4a\", \"mp4a\" ]\n\n if mimeType == applicationxm4:\n return [ \"m4\" ]\n\n if mimeType == audioxmpegurl:\n return [ \"m3u\", \"m3u8\", \"vlc\" ]\n\n if mimeType == videomp2t:\n return [ \"m2t\", \"m2ts\", \"mts\", \"cpi\", \"clpi\", \"mpls\", \"bdm\", \"bdmv\" ]\n\n if mimeType == applicationmp21:\n return [ \"m21\", \"mp21\" ]\n\n if mimeType == videovndmpegurl:\n return [ \"m1u\", \"m4u\", \"mxu\" ]\n\n if mimeType == textxobjcsrc:\n return [ \"m\" ]\n\n if mimeType == applicationxlzop:\n return [ \"lzo\" ]\n\n if mimeType == applicationxlzma:\n return [ \"lzma\" ]\n\n if mimeType == applicationxlz4:\n return [ \"lz4\" ]\n\n if mimeType == applicationxlzip:\n return [ \"lz\" ]\n\n if mimeType == applicationxlyx:\n return [ \"lyx\" ]\n\n if mimeType == textxlilypond:\n return [ \"ly\" ]\n\n if mimeType == imagexlws:\n return [ \"lws\" ]\n\n if mimeType == applicationvndlotuswordpro:\n return [ \"lwp\" ]\n\n if mimeType == imagexlwo:\n return [ \"lwo\", \"lwob\" ]\n\n if mimeType == audiovndlucentvoice:\n return [ \"lvp\" ]\n\n if mimeType == applicationxluabytecode:\n return [ \"luac\" ]\n\n if mimeType == textxlua:\n return [ \"lua\" ]\n\n if mimeType == applicationvndfrogansltf:\n return [ \"ltf\" ]\n\n if mimeType == applicationxlrzip:\n return [ \"lrz\" ]\n\n if mimeType == applicationvndmslrm:\n return [ \"lrm\" ]\n\n if mimeType == applicationlost_xml:\n return [ \"lostxml\" ]\n\n if mimeType == textxlog:\n return [ \"log\" ]\n\n if mimeType == audiousac:\n return [ \"loas\", \"xhe\" ]\n\n if mimeType == applicationxatarilynxrom:\n return [ \"lnx\" ]\n\n if mimeType == applicationxmsshortcut:\n return [ \"lnk\" ]\n\n if mimeType == textcoffeescript:\n return [ \"litcoffee\" ]\n\n if mimeType == applicationvndroute66link66_xml:\n return [ \"link66\" ]\n\n if mimeType == applicationxlhz:\n return [ \"lhz\" ]\n\n if mimeType == textxliteratehaskell:\n return [ \"lhs\" ]\n\n if mimeType == applicationxlha:\n return [ \"lha\", \"lzh\" ]\n\n if mimeType == applicationlgr_xml:\n return [ \"lgr\" ]\n\n if mimeType == textless:\n return [ \"less\" ]\n\n if mimeType == applicationvndhhelessonplayer:\n return [ \"les\" ]\n\n if mimeType == textxldif:\n return [ \"ldif\" ]\n\n if mimeType == applicationvndllamagraphicslifebalanceexchange_xml:\n return [ \"lbe\" ]\n\n if mimeType == applicationvndllamagraphicslifebalancedesktop:\n return [ \"lbd\" ]\n\n if mimeType == applicationvndlaslas_xml:\n return [ \"lasxml\" ]\n\n if mimeType == applicationxsharedlibraryla:\n return [ \"la\" ]\n\n if mimeType == applicationxkword:\n return [ \"kwd\", \"kwt\" ]\n\n if mimeType == applicationxkugar:\n return [ \"kud\" ]\n\n if mimeType == applicationvndkahootz:\n return [ \"ktz\", \"ktr\" ]\n\n if mimeType == imagektx2:\n return [ \"ktx2\" ]\n\n if mimeType == imagektx:\n return [ \"ktx\" ]\n\n if mimeType == textxkotlin:\n return [ \"kt\" ]\n\n if mimeType == textxkaitaistruct:\n return [ \"ksy\" ]\n\n if mimeType == applicationxkspread:\n return [ \"ksp\" ]\n\n if mimeType == applicationxkrita:\n return [ \"kra\", \"krz\" ]\n\n if mimeType == applicationvnddskeypoint:\n return [ \"kpxx\" ]\n\n if mimeType == applicationxkpresenter:\n return [ \"kpr\", \"kpt\" ]\n\n if mimeType == applicationxkpovmodeler:\n return [ \"kpm\" ]\n\n if mimeType == applicationxkontour:\n return [ \"kon\" ]\n\n if mimeType == applicationvndkinar:\n return [ \"kne\", \"knp\" ]\n\n if mimeType == applicationvndgoogleearthkmz:\n return [ \"kmz\" ]\n\n if mimeType == applicationvndgoogleearthkml_xml:\n return [ \"kml\" ]\n\n if mimeType == applicationxkillustrator:\n return [ \"kil\" ]\n\n if mimeType == applicationvndkidspiration:\n return [ \"kia\" ]\n\n if mimeType == applicationxkformula:\n return [ \"kfo\" ]\n\n if mimeType == applicationxkexiprojectshortcut:\n return [ \"kexis\" ]\n\n if mimeType == applicationxkexiconnectiondata:\n return [ \"kexic\" ]\n\n if mimeType == applicationxkexiprojectsqlite2:\n return [ \"kexi\" ]\n\n if mimeType == imagexkodakkdc:\n return [ \"kdc\" ]\n\n if mimeType == applicationxkeepass2:\n return [ \"kdbx\" ]\n\n if mimeType == applicationxkarbon:\n return [ \"karbon\" ]\n\n if mimeType == applicationxthomsoncassette:\n return [ \"k7\" ]\n\n if mimeType == imagexkodakk25:\n return [ \"k25\" ]\n\n if mimeType == imagejxss:\n return [ \"jxss\" ]\n\n if mimeType == imagejxsi:\n return [ \"jxsi\" ]\n\n if mimeType == imagejxsc:\n return [ \"jxsc\" ]\n\n if mimeType == imagejxs:\n return [ \"jxs\" ]\n\n if mimeType == imagejxrs:\n return [ \"jxrs\" ]\n\n if mimeType == imagejxra:\n return [ \"jxra\" ]\n\n if mimeType == imagejxr:\n return [ \"jxr\" ]\n\n if mimeType == imagejxl:\n return [ \"jxl\" ]\n\n if mimeType == textjsx:\n return [ \"jsx\" ]\n\n if mimeType == applicationjsonpatch_json:\n return [ \"jsonpatch\" ]\n\n if mimeType == applicationjsonml_json:\n return [ \"jsonml\" ]\n\n if mimeType == applicationld_json:\n return [ \"jsonld\" ]\n\n if mimeType == applicationjson5:\n return [ \"json5\" ]\n\n if mimeType == applicationjson:\n return [ \"json\", \"map\" ]\n\n if mimeType == textjavascript:\n return [ \"js\", \"jsm\", \"mjs\" ]\n\n if mimeType == applicationjrd_json:\n return [ \"jrd\" ]\n\n if mimeType == applicationxjbuilderproject:\n return [ \"jpr\", \"jpx\" ]\n\n if mimeType == imagejpm:\n return [ \"jpm\", \"jpgm\" ]\n\n if mimeType == imagejph:\n return [ \"jph\" ]\n\n if mimeType == videojpeg:\n return [ \"jpgv\" ]\n\n if mimeType == imagejpeg:\n return [ \"jpg\", \"jpeg\", \"jpe\" ]\n\n if mimeType == imagejpx:\n return [ \"jpf\" ]\n\n if mimeType == imagejp2:\n return [ \"jp2\", \"jpg2\" ]\n\n if mimeType == applicationvndjoostjodaarchive:\n return [ \"joda\" ]\n\n if mimeType == applicationxjavajnlpfile:\n return [ \"jnlp\" ]\n\n if mimeType == imagexjng:\n return [ \"jng\" ]\n\n if mimeType == applicationvndhpjlyt:\n return [ \"jlt\" ]\n\n if mimeType == imagejls:\n return [ \"jls\" ]\n\n if mimeType == applicationxjavakeystore:\n return [ \"jks\", \"ks\", \"cacerts\" ]\n\n if mimeType == applicationvndjisp:\n return [ \"jisp\" ]\n\n if mimeType == imagejphc:\n return [ \"jhc\" ]\n\n if mimeType == applicationxjavajcekeystore:\n return [ \"jceks\" ]\n\n if mimeType == textxjava:\n return [ \"java\" ]\n\n if mimeType == applicationxjavaarchivediff:\n return [ \"jardiff\" ]\n\n if mimeType == applicationxjavaarchive:\n return [ \"jar\" ]\n\n if mimeType == applicationvndjam:\n return [ \"jam\" ]\n\n if mimeType == textjade:\n return [ \"jade\" ]\n\n if mimeType == textvndsunj2meappdescriptor:\n return [ \"jad\" ]\n\n if mimeType == imagexjp2codestream:\n return [ \"j2c\", \"j2k\", \"jpc\" ]\n\n if mimeType == applicationvndimmervisionivu:\n return [ \"ivu\" ]\n\n if mimeType == applicationvndimmervisionivp:\n return [ \"ivp\" ]\n\n if mimeType == applicationits_xml:\n return [ \"its\" ]\n\n if mimeType == applicationvndshanainformedformtemplate:\n return [ \"itp\" ]\n\n if mimeType == applicationxit87:\n return [ \"it87\" ]\n\n if mimeType == audioxit:\n return [ \"it\" ]\n\n if mimeType == applicationxcdimage:\n return [ \"iso\", \"iso9660\" ]\n\n if mimeType == applicationvndirepositorypackage_xml:\n return [ \"irp\" ]\n\n if mimeType == applicationvndibmrightsmanagement:\n return [ \"irm\" ]\n\n if mimeType == applicationxipynb_json:\n return [ \"ipynb\" ]\n\n if mimeType == textxiptables:\n return [ \"iptables\" ]\n\n if mimeType == applicationxipspatch:\n return [ \"ips\" ]\n\n if mimeType == applicationvndshanainformedpackage:\n return [ \"ipk\" ]\n\n if mimeType == applicationipfix:\n return [ \"ipfix\" ]\n\n if mimeType == applicationvndastraeasoftwareiota:\n return [ \"iota\" ]\n\n if mimeType == textxinstall:\n return [ \"install\" ]\n\n if mimeType == applicationinkml_xml:\n return [ \"ink\", \"inkml\" ]\n\n if mimeType == textximelody:\n return [ \"imy\", \"ime\" ]\n\n if mimeType == applicationvndmsims:\n return [ \"ims\" ]\n\n if mimeType == applicationvndaccpacsimplyimp:\n return [ \"imp\" ]\n\n if mimeType == applicationvndshanainformedinterchange:\n return [ \"iif\" ]\n\n if mimeType == applicationvndmicrografxigx:\n return [ \"igx\" ]\n\n if mimeType == modeliges:\n return [ \"igs\", \"iges\" ]\n\n if mimeType == applicationvndinsorsigm:\n return [ \"igm\" ]\n\n if mimeType == applicationvndigloader:\n return [ \"igl\" ]\n\n if mimeType == applicationvndshanainformedformdata:\n return [ \"ifm\" ]\n\n if mimeType == imagexilbm:\n return [ \"iff\", \"ilbm\", \"lbm\" ]\n\n if mimeType == imageief:\n return [ \"ief\" ]\n\n if mimeType == textxidl:\n return [ \"idl\" ]\n\n if mimeType == imagevndmicrosofticon:\n return [ \"ico\" ]\n\n if mimeType == imagexicns:\n return [ \"icns\" ]\n\n if mimeType == xconferencexcooltalk:\n return [ \"ice\" ]\n\n if mimeType == applicationvndiccprofile:\n return [ \"icc\", \"icm\" ]\n\n if mimeType == applicationxica:\n return [ \"ica\" ]\n\n if mimeType == applicationvndintergeo:\n return [ \"i2g\" ]\n\n if mimeType == applicationxhwt:\n return [ \"hwt\" ]\n\n if mimeType == applicationxhwp:\n return [ \"hwp\" ]\n\n if mimeType == applicationvndyamahahvscript:\n return [ \"hvs\" ]\n\n if mimeType == applicationvndyamahahvvoice:\n return [ \"hvp\" ]\n\n if mimeType == applicationvndyamahahvdic:\n return [ \"hvd\" ]\n\n if mimeType == applicationvndkenameaapp:\n return [ \"htke\" ]\n\n if mimeType == textxcomponent:\n return [ \"htc\" ]\n\n if mimeType == imagehsj2:\n return [ \"hsj2\" ]\n\n if mimeType == textxhaskell:\n return [ \"hs\" ]\n\n if mimeType == applicationmacbinhex40:\n return [ \"hqx\" ]\n\n if mimeType == applicationvndhphps:\n return [ \"hps\" ]\n\n if mimeType == applicationvndhphpid:\n return [ \"hpid\" ]\n\n if mimeType == applicationvndhphpgl:\n return [ \"hpgl\" ]\n\n if mimeType == applicationwinhlp:\n return [ \"hlp\" ]\n\n if mimeType == applicationhjson:\n return [ \"hjson\" ]\n\n if mimeType == textxc__hdr:\n return [ \"hh\", \"hp\", \"hpp\", \"h\", \"hxx\" ]\n\n if mimeType == applicationxhfefloppyimage:\n return [ \"hfe\" ]\n\n if mimeType == applicationatscheld_xml:\n return [ \"held\" ]\n\n if mimeType == imagehej2k:\n return [ \"hej2\" ]\n\n if mimeType == imageheifsequence:\n return [ \"heifs\" ]\n\n if mimeType == imageheicsequence:\n return [ \"heics\" ]\n\n if mimeType == imageheif:\n return [ \"heic\", \"heif\", \"hif\" ]\n\n if mimeType == applicationxhdf:\n return [ \"hdf\", \"hdf4\", \"h4\", \"hdf5\", \"h5\" ]\n\n if mimeType == applicationxvirtualboxhdd:\n return [ \"hdd\" ]\n\n if mimeType == textxhandlebarstemplate:\n return [ \"hbs\" ]\n\n if mimeType == applicationvndhbci:\n return [ \"hbci\" ]\n\n if mimeType == applicationvndhal_xml:\n return [ \"hal\" ]\n\n if mimeType == videoh264:\n return [ \"h264\" ]\n\n if mimeType == videoh263:\n return [ \"h263\" ]\n\n if mimeType == videoh261:\n return [ \"h261\" ]\n\n if mimeType == applicationgzip:\n return [ \"gz\" ]\n\n if mimeType == applicationvndgeonext:\n return [ \"gxt\" ]\n\n if mimeType == applicationgxf:\n return [ \"gxf\" ]\n\n if mimeType == textxgcodegx:\n return [ \"gx\" ]\n\n if mimeType == textxgooglevideopointer:\n return [ \"gvp\" ]\n\n if mimeType == textvndgraphviz:\n return [ \"gv\" ]\n\n if mimeType == modelvndgtw:\n return [ \"gtw\" ]\n\n if mimeType == applicationvndgroovetoolmessage:\n return [ \"gtm\" ]\n\n if mimeType == audioxgsm:\n return [ \"gsm\" ]\n\n if mimeType == applicationvndgoogleappspresentation:\n return [ \"gslides\" ]\n\n if mimeType == applicationvndgoogleappsspreadsheet:\n return [ \"gsheet\" ]\n\n if mimeType == textxgenie:\n return [ \"gs\" ]\n\n if mimeType == applicationsrgs_xml:\n return [ \"grxml\" ]\n\n if mimeType == applicationvndgrooveinjector:\n return [ \"grv\" ]\n\n if mimeType == textxgroovy:\n return [ \"groovy\", \"gvy\", \"gy\", \"gsh\" ]\n\n if mimeType == applicationxgrampsxml:\n return [ \"gramps\" ]\n\n if mimeType == applicationsrgs:\n return [ \"gram\" ]\n\n if mimeType == textxgradle:\n return [ \"gradle\" ]\n\n if mimeType == applicationxgraphite:\n return [ \"gra\" ]\n\n if mimeType == applicationvndgrafeq:\n return [ \"gqf\", \"gqs\" ]\n\n if mimeType == applicationgpx_xml:\n return [ \"gpx\" ]\n\n if mimeType == applicationvndflographit:\n return [ \"gph\" ]\n\n if mimeType == applicationxgnuplot:\n return [ \"gp\", \"gplt\", \"gnuplot\" ]\n\n if mimeType == textxgo:\n return [ \"go\" ]\n\n if mimeType == applicationxgnumeric:\n return [ \"gnumeric\" ]\n\n if mimeType == applicationxgnucash:\n return [ \"gnucash\", \"gnc\", \"xac\" ]\n\n if mimeType == applicationgnunetdirectory:\n return [ \"gnd\" ]\n\n if mimeType == applicationvndgmx:\n return [ \"gmx\" ]\n\n if mimeType == applicationxprofile:\n return [ \"gmonout\" ]\n\n if mimeType == applicationxgettexttranslation:\n return [ \"gmo\", \"mo\" ]\n\n if mimeType == applicationgml_xml:\n return [ \"gml\" ]\n\n if mimeType == modelgltf_json:\n return [ \"gltf\" ]\n\n if mimeType == modelgltfbinary:\n return [ \"glb\" ]\n\n if mimeType == applicationxglade:\n return [ \"glade\" ]\n\n if mimeType == applicationvndgrooveidentitymessage:\n return [ \"gim\" ]\n\n if mimeType == imagexgimpgih:\n return [ \"gih\" ]\n\n if mimeType == imagegif:\n return [ \"gif\" ]\n\n if mimeType == applicationvndgroovehelp:\n return [ \"ghf\" ]\n\n if mimeType == applicationvndgeogebratool:\n return [ \"ggt\" ]\n\n if mimeType == applicationvndgeogebrafile:\n return [ \"ggb\" ]\n\n if mimeType == applicationxgamegearrom:\n return [ \"gg\" ]\n\n if mimeType == applicationxtexgf:\n return [ \"gf\" ]\n\n if mimeType == applicationvndgeometryexplorer:\n return [ \"gex\", \"gre\" ]\n\n if mimeType == applicationgeo_json:\n return [ \"geojson\" ]\n\n if mimeType == applicationvnddynageo:\n return [ \"geo\" ]\n\n if mimeType == applicationxgenesisrom:\n return [ \"gen\", \"sgd\" ]\n\n if mimeType == applicationxgedcom:\n return [ \"ged\", \"gedcom\" ]\n\n if mimeType == applicationxgodotshader:\n return [ \"gdshader\" ]\n\n if mimeType == applicationvndgoogleappsdocument:\n return [ \"gdoc\" ]\n\n if mimeType == modelvndgdl:\n return [ \"gdl\" ]\n\n if mimeType == applicationxgdromcue:\n return [ \"gdi\" ]\n\n if mimeType == applicationxgdscript:\n return [ \"gd\" ]\n\n if mimeType == textxgcode:\n return [ \"gcode\" ]\n\n if mimeType == applicationxgcacompressed:\n return [ \"gca\" ]\n\n if mimeType == imagexgimpgbr:\n return [ \"gbr\" ]\n\n if mimeType == applicationxgameboycolorrom:\n return [ \"gbc\", \"cgb\" ]\n\n if mimeType == applicationxgbarom:\n return [ \"gba\", \"agb\" ]\n\n if mimeType == applicationxgameboyrom:\n return [ \"gb\", \"sgb\" ]\n\n if mimeType == applicationxtads:\n return [ \"gam\" ]\n\n if mimeType == applicationvndgrooveaccount:\n return [ \"gac\" ]\n\n if mimeType == applicationvndgeospace:\n return [ \"g3w\" ]\n\n if mimeType == imageg3fax:\n return [ \"g3\" ]\n\n if mimeType == applicationvndgeoplan:\n return [ \"g2w\" ]\n\n if mimeType == applicationvndfuzzysheet:\n return [ \"fzs\" ]\n\n if mimeType == applicationvndadobefxp:\n return [ \"fxp\", \"fxpl\" ]\n\n if mimeType == videoxjavafx:\n return [ \"fxm\" ]\n\n if mimeType == videovndfvt:\n return [ \"fvt\" ]\n\n if mimeType == applicationvndanserwebfundstransferinitiation:\n return [ \"fti\" ]\n\n if mimeType == applicationvndfluxtimeclip:\n return [ \"ftc\" ]\n\n if mimeType == imagevndfst:\n return [ \"fst\" ]\n\n if mimeType == applicationvndfscweblaunch:\n return [ \"fsc\" ]\n\n if mimeType == imagevndfpx:\n return [ \"fpx\" ]\n\n if mimeType == applicationvndoasisopendocumenttextflatxml:\n return [ \"fodt\" ]\n\n if mimeType == applicationvndoasisopendocumentspreadsheetflatxml:\n return [ \"fods\" ]\n\n if mimeType == applicationvndoasisopendocumentpresentationflatxml:\n return [ \"fodp\" ]\n\n if mimeType == applicationvndoasisopendocumentgraphicsflatxml:\n return [ \"fodg\" ]\n\n if mimeType == textxxslfo:\n return [ \"fo\", \"xslfo\" ]\n\n if mimeType == applicationvndfrogansfnc:\n return [ \"fnc\" ]\n\n if mimeType == applicationvndframemaker:\n return [ \"fm\", \"frame\", \"maker\", \"book\" ]\n\n if mimeType == textvndfly:\n return [ \"fly\" ]\n\n if mimeType == textvndfmiflexstor:\n return [ \"flx\" ]\n\n if mimeType == applicationxkivio:\n return [ \"flw\" ]\n\n if mimeType == videoxflv:\n return [ \"flv\" ]\n\n if mimeType == applicationvndmicrografxflo:\n return [ \"flo\" ]\n\n if mimeType == videoxflic:\n return [ \"fli\", \"flc\" ]\n\n if mimeType == applicationvndflatpakrepo:\n return [ \"flatpakrepo\" ]\n\n if mimeType == applicationvndflatpakref:\n return [ \"flatpakref\" ]\n\n if mimeType == applicationvndflatpak:\n return [ \"flatpak\", \"xdgapp\" ]\n\n if mimeType == audioflac:\n return [ \"flac\" ]\n\n if mimeType == applicationxfluid:\n return [ \"fl\" ]\n\n if mimeType == applicationfits:\n return [ \"fits\", \"fit\", \"fts\" ]\n\n if mimeType == imagexxfig:\n return [ \"fig\" ]\n\n if mimeType == imagexfreehand:\n return [ \"fh\", \"fhc\", \"fh4\", \"fh5\", \"fh7\" ]\n\n if mimeType == applicationvndfujitsuoasysgp:\n return [ \"fg5\" ]\n\n if mimeType == textxgherkin:\n return [ \"feature\" ]\n\n if mimeType == applicationvnddenovofcselayoutlink:\n return [ \"fe_launch\" ]\n\n if mimeType == applicationfdt_xml:\n return [ \"fdt\" ]\n\n if mimeType == applicationxfdsdisk:\n return [ \"fds\" ]\n\n if mimeType == applicationfdf:\n return [ \"fdf\" ]\n\n if mimeType == applicationxrawfloppydiskimage:\n return [ \"fd\", \"qd\" ]\n\n if mimeType == applicationvndisacfcs:\n return [ \"fcs\" ]\n\n if mimeType == applicationvndadobeformscentralfcdt:\n return [ \"fcdt\" ]\n\n if mimeType == imagevndfastbidsheet:\n return [ \"fbs\" ]\n\n if mimeType == applicationxzipcompressedfb2:\n return [ \"fb2zip\" ]\n\n if mimeType == applicationxfictionbook_xml:\n return [ \"fb2\" ]\n\n if mimeType == textxfortran:\n return [ \"f\", \"f90\", \"f95\", \"for\", \"f77\" ]\n\n if mimeType == applicationvndezpixpackage:\n return [ \"ez3\" ]\n\n if mimeType == applicationvndezpixalbum:\n return [ \"ez2\" ]\n\n if mimeType == applicationandrewinset:\n return [ \"ez\" ]\n\n if mimeType == applicationvndnovadigmext:\n return [ \"ext\" ]\n\n if mimeType == imagexexr:\n return [ \"exr\" ]\n\n if mimeType == applicationexpress:\n return [ \"exp\" ]\n\n if mimeType == applicationexi:\n return [ \"exi\" ]\n\n if mimeType == applicationxmsdosexecutable:\n return [ \"exe\" ]\n\n if mimeType == textxelixir:\n return [ \"ex\", \"exs\" ]\n\n if mimeType == applicationxenvoy:\n return [ \"evy\" ]\n\n if mimeType == applicationxeva:\n return [ \"eva\" ]\n\n if mimeType == textxsetext:\n return [ \"etx\" ]\n\n if mimeType == applicationxetheme:\n return [ \"etheme\" ]\n\n if mimeType == applicationvndepsonesf:\n return [ \"esf\" ]\n\n if mimeType == applicationvndosgisubsystem:\n return [ \"esa\" ]\n\n if mimeType == applicationvndeszigno3_xml:\n return [ \"es3\", \"et3\" ]\n\n if mimeType == applicationecmascript:\n return [ \"es\", \"ecma\" ]\n\n if mimeType == textxerlang:\n return [ \"erl\" ]\n\n if mimeType == applicationepub_zip:\n return [ \"epub\" ]\n\n if mimeType == imagexgzeps:\n return [ \"epsgz\", \"epsigz\", \"epsfgz\" ]\n\n if mimeType == imagexbzeps:\n return [ \"epsbz2\", \"epsibz2\", \"epsfbz2\" ]\n\n if mimeType == imagexeps:\n return [ \"eps\", \"epsi\", \"epsf\" ]\n\n if mimeType == applicationvndmsfontobject:\n return [ \"eot\" ]\n\n if mimeType == audiovnddigitalwinds:\n return [ \"eol\" ]\n\n if mimeType == applicationxmlexternalparsedentity:\n return [ \"ent\" ]\n\n if mimeType == applicationxmsmetafile:\n return [ \"emz\" ]\n\n if mimeType == applicationvndemusicemusic_package:\n return [ \"emp\" ]\n\n if mimeType == applicationemotionml_xml:\n return [ \"emotionml\" ]\n\n if mimeType == applicationemma_xml:\n return [ \"emma\" ]\n\n if mimeType == messagerfc822:\n return [ \"eml\", \"mime\" ]\n\n if mimeType == imageemf:\n return [ \"emf\" ]\n\n if mimeType == textxemacslisp:\n return [ \"el\" ]\n\n if mimeType == applicationvndpgosasli:\n return [ \"ei6\" ]\n\n if mimeType == applicationxegon:\n return [ \"egon\" ]\n\n if mimeType == applicationvndpicsel:\n return [ \"efif\" ]\n\n if mimeType == applicationvndnovadigmedx:\n return [ \"edx\" ]\n\n if mimeType == applicationvndnovadigmedm:\n return [ \"edm\" ]\n\n if mimeType == audiovndnueraecelp9600:\n return [ \"ecelp9600\" ]\n\n if mimeType == audiovndnueraecelp7470:\n return [ \"ecelp7470\" ]\n\n if mimeType == audiovndnueraecelp4800:\n return [ \"ecelp4800\" ]\n\n if mimeType == textxeiffel:\n return [ \"e\", \"eif\" ]\n\n if mimeType == applicationvndspotfiredxp:\n return [ \"dxp\" ]\n\n if mimeType == imagevnddxf:\n return [ \"dxf\" ]\n\n if mimeType == imagevnddwg:\n return [ \"dwg\" ]\n\n if mimeType == modelvnddwf:\n return [ \"dwf\" ]\n\n if mimeType == applicationatscdwd_xml:\n return [ \"dwd\" ]\n\n if mimeType == applicationxgzdvi:\n return [ \"dvigz\" ]\n\n if mimeType == applicationxbzdvi:\n return [ \"dvibz2\" ]\n\n if mimeType == applicationxdvi:\n return [ \"dvi\" ]\n\n if mimeType == videovnddvbfile:\n return [ \"dvb\" ]\n\n if mimeType == videodv:\n return [ \"dv\" ]\n\n if mimeType == textxdevicetreesource:\n return [ \"dtsi\" ]\n\n if mimeType == audiovnddtshd:\n return [ \"dtshd\" ]\n\n if mimeType == audiovnddts:\n return [ \"dts\" ]\n\n if mimeType == applicationxmldtd:\n return [ \"dtd\" ]\n\n if mimeType == textxdevicetreebinary:\n return [ \"dtb\" ]\n\n if mimeType == applicationdssc_der:\n return [ \"dssc\" ]\n\n if mimeType == textxdsl:\n return [ \"dsl\" ]\n\n if mimeType == audioxdsf:\n return [ \"dsf\" ]\n\n if mimeType == textprslinestag:\n return [ \"dsc\" ]\n\n if mimeType == imagedicomrle:\n return [ \"drle\" ]\n\n if mimeType == audiovnddra:\n return [ \"dra\" ]\n\n if mimeType == applicationvnddpgraph:\n return [ \"dpg\" ]\n\n if mimeType == applicationvndosgidp:\n return [ \"dp\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentwordprocessingmltemplate:\n return [ \"dotx\" ]\n\n if mimeType == applicationvndmswordtemplatemacroenabled12:\n return [ \"dotm\" ]\n\n if mimeType == applicationmswordtemplate:\n return [ \"dot\" ]\n\n if mimeType == applicationvndopenxmlformatsofficedocumentwordprocessingmldocument:\n return [ \"docx\" ]\n\n if mimeType == applicationvndmsworddocumentmacroenabled12:\n return [ \"docm\" ]\n\n if mimeType == applicationmsword:\n return [ \"doc\" ]\n\n if mimeType == imagexadobedng:\n return [ \"dng\" ]\n\n if mimeType == applicationvnddna:\n return [ \"dna\" ]\n\n if mimeType == applicationxapplediskimage:\n return [ \"dmg\" ]\n\n if mimeType == imagevnddjvu:\n return [ \"djvu\", \"djv\" ]\n\n if mimeType == messagedispositionnotification:\n return [ \"disposition-notification\" ]\n\n if mimeType == applicationvndmobiusdis:\n return [ \"dis\" ]\n\n if mimeType == applicationxdirector:\n return [ \"dir\", \"dxr\", \"cst\", \"cct\", \"cxt\", \"w3d\", \"fgd\", \"swa\" ]\n\n if mimeType == textxpatch:\n return [ \"diff\", \"patch\" ]\n\n if mimeType == applicationdicom:\n return [ \"dicomdir\", \"dcm\" ]\n\n if mimeType == textxc:\n return [ \"dic\" ]\n\n if mimeType == applicationxdiadiagram:\n return [ \"dia\" ]\n\n if mimeType == applicationxdgccompressed:\n return [ \"dgc\" ]\n\n if mimeType == audioxdff:\n return [ \"dff\" ]\n\n if mimeType == applicationvnddreamfactory:\n return [ \"dfac\" ]\n\n if mimeType == applicationxdesktop:\n return [ \"desktop\", \"kdelnk\" ]\n\n if mimeType == applicationxx509cacert:\n return [ \"der\", \"crt\", \"cert\", \"pem\" ]\n\n if mimeType == applicationvnddebianbinarypackage:\n return [ \"deb\", \"udeb\" ]\n\n if mimeType == imagexdds:\n return [ \"dds\" ]\n\n if mimeType == applicationvndsyncmldmddf_xml:\n return [ \"ddf\" ]\n\n if mimeType == applicationvndfujixeroxddd:\n return [ \"ddd\" ]\n\n if mimeType == applicationvndomadd2_xml:\n return [ \"dd2\" ]\n\n if mimeType == textvndcurldcurl:\n return [ \"dcurl\" ]\n\n if mimeType == imagexkodakdcr:\n return [ \"dcr\" ]\n\n if mimeType == textxdcl:\n return [ \"dcl\" ]\n\n if mimeType == applicationxdocbook_xml:\n return [ \"dbk\", \"docbook\" ]\n\n if mimeType == applicationxdbf:\n return [ \"dbf\" ]\n\n if mimeType == applicationdavmount_xml:\n return [ \"davmount\" ]\n\n if mimeType == textxdart:\n return [ \"dart\" ]\n\n if mimeType == applicationxdar:\n return [ \"dar\" ]\n\n if mimeType == applicationvndmobiusdaf:\n return [ \"daf\" ]\n\n if mimeType == modelvndcollada_xml:\n return [ \"dae\" ]\n\n if mimeType == textxdsrc:\n return [ \"d\", \"di\" ]\n\n if mimeType == applicationprscww:\n return [ \"cww\" ]\n\n if mimeType == applicationcwl:\n return [ \"cwl\" ]\n\n if mimeType == applicationxappleworksdocument:\n return [ \"cwk\" ]\n\n if mimeType == textvndcurl:\n return [ \"curl\" ]\n\n if mimeType == imagexwinbitmap:\n return [ \"cur\" ]\n\n if mimeType == applicationxcue:\n return [ \"cue\" ]\n\n if mimeType == applicationcuseeme:\n return [ \"cu\" ]\n\n if mimeType == textcsvschema:\n return [ \"csvs\" ]\n\n if mimeType == textcsv:\n return [ \"csv\" ]\n\n if mimeType == textcss:\n return [ \"css\" ]\n\n if mimeType == applicationvndcommonspace:\n return [ \"csp\" ]\n\n if mimeType == applicationxcompressediso:\n return [ \"cso\" ]\n\n if mimeType == chemicalxcsml:\n return [ \"csml\" ]\n\n if mimeType == applicationvndcitationstylesstyle_xml:\n return [ \"csl\" ]\n\n if mimeType == applicationxcsh:\n return [ \"csh\" ]\n\n if mimeType == textxcsharp:\n return [ \"cs\" ]\n\n if mimeType == applicationvndrigcryptonote:\n return [ \"cryptonote\" ]\n\n if mimeType == applicationxchromeextension:\n return [ \"crx\" ]\n\n if mimeType == imagexcanoncrw:\n return [ \"crw\" ]\n\n if mimeType == applicationpkixcrl:\n return [ \"crl\" ]\n\n if mimeType == textxcredits:\n return [ \"credits\" ]\n\n if mimeType == applicationxmscardfile:\n return [ \"crd\" ]\n\n if mimeType == imagexcanoncr3:\n return [ \"cr3\" ]\n\n if mimeType == imagexcanoncr2:\n return [ \"cr2\" ]\n\n if mimeType == textxcrystal:\n return [ \"cr\" ]\n\n if mimeType == applicationmaccompactpro:\n return [ \"cpt\" ]\n\n if mimeType == textxc__src:\n return [ \"cpp\", \"cxx\", \"cc\", \"c\" ]\n\n if mimeType == applicationcpl_xml:\n return [ \"cpl\" ]\n\n if mimeType == applicationxcpiocompressed:\n return [ \"cpiogz\" ]\n\n if mimeType == applicationxcpio:\n return [ \"cpio\" ]\n\n if mimeType == applicationxcore:\n return [ \"core\" ]\n\n if mimeType == textxcopying:\n return [ \"copying\" ]\n\n if mimeType == applicationxmsdownload:\n return [ \"com\", \"bat\" ]\n\n if mimeType == applicationvndcoffeescript:\n return [ \"coffee\" ]\n\n if mimeType == applicationvndrimcod:\n return [ \"cod\" ]\n\n if mimeType == imagexcmx:\n return [ \"cmx\" ]\n\n if mimeType == applicationvndyellowrivercustommenu:\n return [ \"cmp\" ]\n\n if mimeType == chemicalxcml:\n return [ \"cml\" ]\n\n if mimeType == chemicalxcmdf:\n return [ \"cmdf\" ]\n\n if mimeType == applicationvndcosmocaller:\n return [ \"cmc\" ]\n\n if mimeType == textxcmake:\n return [ \"cmake\", \"cmakeliststxt\" ]\n\n if mimeType == applicationxmsclip:\n return [ \"clp\" ]\n\n if mimeType == applicationvndcrickclicker:\n return [ \"clkx\" ]\n\n if mimeType == applicationvndcrickclickerwordbank:\n return [ \"clkw\" ]\n\n if mimeType == applicationvndcrickclickertemplate:\n return [ \"clkt\" ]\n\n if mimeType == applicationvndcrickclickerpalette:\n return [ \"clkp\" ]\n\n if mimeType == applicationvndcrickclickerkeyboard:\n return [ \"clkk\" ]\n\n if mimeType == applicationxjava:\n return [ \"class\" ]\n\n if mimeType == applicationvndclaymore:\n return [ \"cla\" ]\n\n if mimeType == textxopenclsrc:\n return [ \"cl\" ]\n\n if mimeType == applicationnode:\n return [ \"cjs\" ]\n\n if mimeType == applicationvndmsartgalry:\n return [ \"cil\" ]\n\n if mimeType == applicationvndanserwebcertificateissueinitiation:\n return [ \"cii\" ]\n\n if mimeType == chemicalxcif:\n return [ \"cif\" ]\n\n if mimeType == applicationxkchart:\n return [ \"chrt\" ]\n\n if mimeType == applicationvndmshtmlhelp:\n return [ \"chm\" ]\n\n if mimeType == applicationxmamechd:\n return [ \"chd\" ]\n\n if mimeType == applicationxchat:\n return [ \"chat\" ]\n\n if mimeType == textxchangelog:\n return [ \"changelog\" ]\n\n if mimeType == imagecgm:\n return [ \"cgm\" ]\n\n if mimeType == applicationxcfscompressed:\n return [ \"cfs\" ]\n\n if mimeType == applicationpkixcert:\n return [ \"cer\" ]\n\n if mimeType == applicationvndcinderella:\n return [ \"cdy\" ]\n\n if mimeType == applicationvndchemdraw_xml:\n return [ \"cdxml\" ]\n\n if mimeType == chemicalxcdx:\n return [ \"cdx\" ]\n\n if mimeType == applicationvndcoreldraw:\n return [ \"cdr\" ]\n\n if mimeType == applicationcdmiqueue:\n return [ \"cdmiq\" ]\n\n if mimeType == applicationcdmiobject:\n return [ \"cdmio\" ]\n\n if mimeType == applicationcdmidomain:\n return [ \"cdmid\" ]\n\n if mimeType == applicationcdmicontainer:\n return [ \"cdmic\" ]\n\n if mimeType == applicationcdmicapability:\n return [ \"cdmia\" ]\n\n if mimeType == applicationvndmediastationcdkey:\n return [ \"cdkey\" ]\n\n if mimeType == applicationxdiscjugglercdimage:\n return [ \"cdi\" ]\n\n if mimeType == applicationcdfx_xml:\n return [ \"cdfx\" ]\n\n if mimeType == applicationxnetcdf:\n return [ \"cdf\", \"nc\" ]\n\n if mimeType == applicationvndcontactcmsg:\n return [ \"cdbcmsg\" ]\n\n if mimeType == applicationccxml_xml:\n return [ \"ccxml\" ]\n\n if mimeType == applicationxcocoa:\n return [ \"cco\" ]\n\n if mimeType == applicationxccmx:\n return [ \"ccmx\" ]\n\n if mimeType == applicationvndcomicbook_zip:\n return [ \"cbz\" ]\n\n if mimeType == applicationxcbt:\n return [ \"cbt\" ]\n\n if mimeType == applicationvndcomicbookrar:\n return [ \"cbr\" ]\n\n if mimeType == textxcobol:\n return [ \"cbl\", \"cob\" ]\n\n if mimeType == applicationxcbr:\n return [ \"cba\" ]\n\n if mimeType == applicationxcb7:\n return [ \"cb7\" ]\n\n if mimeType == applicationvndmspkiseccat:\n return [ \"cat\" ]\n\n if mimeType == applicationvndcurlcar:\n return [ \"car\" ]\n\n if mimeType == audioxcaf:\n return [ \"caf\" ]\n\n if mimeType == applicationvndmscabcompressed:\n return [ \"cab\" ]\n\n if mimeType == applicationvndclonkc4group:\n return [ \"c4g\", \"c4d\", \"c4f\", \"c4p\", \"c4u\" ]\n\n if mimeType == applicationvndcluetrustcartomobileconfigpkg:\n return [ \"c11amz\" ]\n\n if mimeType == applicationvndcluetrustcartomobileconfig:\n return [ \"c11amc\" ]\n\n if mimeType == applicationxbzip:\n return [ \"bz2\", \"bz\" ]\n\n if mimeType == imageprsbtif:\n return [ \"btif\", \"btf\" ]\n\n if mimeType == modelvndvalvesourcecompiledmap:\n return [ \"bsp\" ]\n\n if mimeType == applicationxbsdiff:\n return [ \"bsdiff\" ]\n\n if mimeType == applicationxbpspatch:\n return [ \"bps\" ]\n\n if mimeType == applicationxbzip2:\n return [ \"boz\" ]\n\n if mimeType == applicationvndpreviewsystemsbox:\n return [ \"box\" ]\n\n if mimeType == imagebmp:\n return [ \"bmp\", \"dib\" ]\n\n if mimeType == applicationvndbalsamiqbmml_xml:\n return [ \"bmml\" ]\n\n if mimeType == applicationvndbmi:\n return [ \"bmi\" ]\n\n if mimeType == applicationxblender:\n return [ \"blend\", \"blender\" ]\n\n if mimeType == applicationxblorb:\n return [ \"blb\", \"blorb\" ]\n\n if mimeType == applicationoctetstream:\n return [ \"bin\", \"dms\", \"lrf\", \"mar\", \"dist\", \"distz\", \"bpk\", \"dump\", \"elc\", \"deploy\", \"dll\", \"msp\", \"msm\", \"buffer\" ]\n\n if mimeType == videovndradgamettoolsbink:\n return [ \"bik\", \"bk2\" ]\n\n if mimeType == textxbibtex:\n return [ \"bib\" ]\n\n if mimeType == applicationvndfujitsuoasysprs:\n return [ \"bh2\" ]\n\n if mimeType == applicationvndrealvncbed:\n return [ \"bed\" ]\n\n if mimeType == applicationbdoc:\n return [ \"bdoc\" ]\n\n if mimeType == applicationxfontbdf:\n return [ \"bdf\" ]\n\n if mimeType == applicationxbcpio:\n return [ \"bcpio\" ]\n\n if mimeType == applicationxtrash:\n return [ \"bak\", \"old\", \"sik\" ]\n\n if mimeType == imagevndpcob16:\n return [ \"b16\" ]\n\n if mimeType == applicationvndamazonmobi8ebook:\n return [ \"azw3\", \"kfx\" ]\n\n if mimeType == applicationvndamazonebook:\n return [ \"azw\" ]\n\n if mimeType == imagevndairzipacceleratorazv:\n return [ \"azv\" ]\n\n if mimeType == applicationvndairzipfilesecureazs:\n return [ \"azs\" ]\n\n if mimeType == applicationvndairzipfilesecureazf:\n return [ \"azf\" ]\n\n if mimeType == videoannodex:\n return [ \"axv\" ]\n\n if mimeType == audioannodex:\n return [ \"axa\" ]\n\n if mimeType == applicationxawk:\n return [ \"awk\" ]\n\n if mimeType == audioamrwb:\n return [ \"awb\" ]\n\n if mimeType == applicationxapplixword:\n return [ \"aw\" ]\n\n if mimeType == imageavif:\n return [ \"avif\", \"avifs\" ]\n\n if mimeType == videoxmsvideo:\n return [ \"avi\", \"avf\", \"divx\" ]\n\n if mimeType == imageavcs:\n return [ \"avcs\" ]\n\n if mimeType == imageavci:\n return [ \"avci\" ]\n\n if mimeType == textxsystemdunit:\n return [ \"automount\", \"device\", \"mount\", \"path\", \"scope\", \"slice\", \"socket\", \"swap\", \"target\", \"timer\" ]\n\n if mimeType == textxauthors:\n return [ \"authors\" ]\n\n if mimeType == audiobasic:\n return [ \"au\", \"snd\" ]\n\n if mimeType == applicationvndantixgamecomponent:\n return [ \"atx\" ]\n\n if mimeType == applicationatomsvc_xml:\n return [ \"atomsvc\" ]\n\n if mimeType == applicationatomdeleted_xml:\n return [ \"atomdeleted\" ]\n\n if mimeType == applicationatomcat_xml:\n return [ \"atomcat\" ]\n\n if mimeType == applicationatom_xml:\n return [ \"atom\" ]\n\n if mimeType == applicationvndacucorp:\n return [ \"atc\", \"acutc\" ]\n\n if mimeType == audioxmsasx:\n return [ \"asx\", \"wax\", \"wvx\", \"wmx\" ]\n\n if mimeType == imageastc:\n return [ \"astc\" ]\n\n if mimeType == applicationxasp:\n return [ \"asp\" ]\n\n if mimeType == applicationvndaccpacsimplyaso:\n return [ \"aso\" ]\n\n if mimeType == applicationvndmsasf:\n return [ \"asf\" ]\n\n if mimeType == textxcommonlisp:\n return [ \"asd\", \"fasl\", \"lisp\", \"ros\" ]\n\n if mimeType == applicationxasar:\n return [ \"asar\" ]\n\n if mimeType == applicationxapplixspreadsheet:\n return [ \"as\" ]\n\n if mimeType == imagexsonyarw:\n return [ \"arw\" ]\n\n if mimeType == applicationxarj:\n return [ \"arj\" ]\n\n if mimeType == applicationxfreearc:\n return [ \"arc\" ]\n\n if mimeType == applicationvndlotusapproach:\n return [ \"apr\" ]\n\n if mimeType == applicationxmsapplication:\n return [ \"application\" ]\n\n if mimeType == applicationxiso9660appimage:\n return [ \"appimage\" ]\n\n if mimeType == imageapng:\n return [ \"apng\" ]\n\n if mimeType == applicationvndandroidpackagearchive:\n return [ \"apk\" ]\n\n if mimeType == audioxape:\n return [ \"ape\" ]\n\n if mimeType == applicationannodex:\n return [ \"anx\" ]\n\n if mimeType == videoxanim:\n return [ \"anim19j\" ]\n\n if mimeType == applicationxnavianimation:\n return [ \"ani\" ]\n\n if mimeType == audioxamzxml:\n return [ \"amz\" ]\n\n if mimeType == audioamr:\n return [ \"amr\" ]\n\n if mimeType == applicationvndamigaami:\n return [ \"ami\" ]\n\n if mimeType == applicationxalz:\n return [ \"alz\" ]\n\n if mimeType == applicationvnddvbait:\n return [ \"ait\" ]\n\n if mimeType == applicationvndadobeairapplicationinstallerpackage_zip:\n return [ \"air\" ]\n\n if mimeType == audioxaiff:\n return [ \"aiff\", \"aif\" ]\n\n if mimeType == audioxaifc:\n return [ \"aifc\", \"aiffc\" ]\n\n if mimeType == applicationillustrator:\n return [ \"ai\" ]\n\n if mimeType == applicationvndaheadspace:\n return [ \"ahead\" ]\n\n if mimeType == applicationvndage:\n return [ \"age\" ]\n\n if mimeType == imagexapplixgraphics:\n return [ \"ag\" ]\n\n if mimeType == applicationvndibmmodcap:\n return [ \"afp\", \"listafp\", \"list3820\" ]\n\n if mimeType == applicationxfontafm:\n return [ \"afm\" ]\n\n if mimeType == applicationvndaudiograph:\n return [ \"aep\" ]\n\n if mimeType == audioadpcm:\n return [ \"adp\" ]\n\n if mimeType == applicationxamigadiskformat:\n return [ \"adf\" ]\n\n if mimeType == textxadasrc:\n return [ \"adb\", \"ads\" ]\n\n if mimeType == applicationvndacucobol:\n return [ \"acu\" ]\n\n if mimeType == applicationxace:\n return [ \"ace\" ]\n\n if mimeType == applicationvndamericandynamicsacc:\n return [ \"acc\" ]\n\n if mimeType == audioac3:\n return [ \"ac3\" ]\n\n if mimeType == applicationpkixattrcert:\n return [ \"ac\" ]\n\n if mimeType == applicationxabiword:\n return [ \"abw\", \"abwcrashed\", \"abwgz\", \"zabw\" ]\n\n if mimeType == audiovndaudibleaax:\n return [ \"aax\" ]\n\n if mimeType == applicationxauthorwareseg:\n return [ \"aas\" ]\n\n if mimeType == applicationxauthorwaremap:\n return [ \"aam\" ]\n\n if mimeType == audioaac:\n return [ \"aac\", \"adts\" ]\n\n if mimeType == applicationxauthorwarebin:\n return [ \"aab\", \"x32\", \"u32\", \"vox\" ]\n\n if mimeType == audioxpnaudibleaudio:\n return [ \"aa\" ]\n\n if mimeType == applicationxatari7800rom:\n return [ \"a78\" ]\n\n if mimeType == applicationxatari2600rom:\n return [ \"a26\" ]\n\n if mimeType == applicationxarchive:\n return [ \"a\", \"ar\" ]\n\n if mimeType == applicationx7zcompressed:\n return [ \"7z\", \"7z001\" ]\n\n if mimeType == applicationxt602:\n return [ \"602\" ]\n\n if mimeType == model3mf:\n return [ \"3mf\" ]\n\n if mimeType == video3gpp:\n return [ \"3gp\", \"3gpp\", \"3ga\" ]\n\n if mimeType == video3gpp2:\n return [ \"3g2\", \"3gp2\", \"3gpp2\" ]\n\n if mimeType == applicationxnintendo3dsexecutable:\n return [ \"3dsx\" ]\n\n if mimeType == applicationxnintendo3dsrom:\n return [ \"3ds\", \"cci\" ]\n\n if mimeType == textvndin3d3dml:\n return [ \"3dml\" ]\n\n if mimeType == applicationxgenesis32xrom:\n return [ \"32x\", \"mdx\" ]\n\n if mimeType == applicationvnd1000mindsdecisionmodel_xml:\n return [ \"1km\" ]\n\n if mimeType == applicationvndlotus123:\n return [ \"123\", \"wk1\", \"wk3\", \"wk4\", \"wks\" ]\n \n return []",
"def _check_url_file_type(headers: Dict[str, str]) -> Optional[str]:\n content_type = headers.get(\"content-type\", \"\").lower()\n file_type = None\n\n for extension in SUPPORTED_MIME_TYPES.keys():\n for mime_type in SUPPORTED_MIME_TYPES.get(extension, []):\n if mime_type in content_type:\n file_type = extension\n break\n\n return file_type"
] | [
"0.82419276",
"0.763851",
"0.75550187",
"0.75023365",
"0.7006191",
"0.69769895",
"0.69073343",
"0.69062054",
"0.6781376",
"0.67640036",
"0.67325264",
"0.6723504",
"0.6719304",
"0.6687197",
"0.6671605",
"0.66456544",
"0.66422045",
"0.66077816",
"0.65924525",
"0.65606976",
"0.65226626",
"0.6508522",
"0.6476599",
"0.6474171",
"0.6473315",
"0.64422166",
"0.64230984",
"0.6411514",
"0.6403587",
"0.64011955"
] | 0.7667273 | 1 |
Make sure ``ValidationError`` is raised if uploaded file has no mimetype | def test_no_mimetype(self):
field = TypedFileField(required=False, type_whitelist=self.good_types, use_magic=False)
for t in self.good_types:
name = 'somefooname'
file = UploadedFile(name=name, size=1, content_type=t)
del file.content_type
with pytest.raises(forms.ValidationError):
field.clean(file) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_no_mimetype_magic(self, mock_get_content_type):\n mock_get_content_type.side_effect = ValueError\n\n field = TypedFileField(required=False, type_whitelist=self.good_types)\n\n for t in self.good_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def test_mimetypes(self):\n field = TypedFileField(required=False, type_whitelist=self.good_types, use_magic=False)\n\n for t in self.good_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n assert field.clean(file) is file\n\n for t in self.bad_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def test_invalid_file_type(self):\n file = SimpleUploadedFile(\"test.csv\", b\"\\xe8\")\n form = MomConnectImportForm(\n data={\"source\": \"MomConnect Import\"}, files={\"file\": file}\n )\n self.assertTrue(form.is_valid())\n instance = form.save()\n self.assertEqual(instance.status, MomConnectImport.Status.ERROR)\n [error] = instance.errors.all()\n self.assertEqual(error.error, \"File is not a CSV\")",
"def is_file_type_error(self):\n return self._tag == 'file_type_error'",
"def validate(self, file):\n if self.maxFileSize and file['size'] < self.maxFileSize:\n raise HTTPError(406, \"File too big\")\n\n if not self.acceptedFileType(file['type']):\n raise HTTPError(406, \"File of unsupported type\")",
"def _check_mimetype(self):\n if self.mimetype in Config.aliases:\n mimetype = Config.aliases[self.mimetype]\n else:\n mimetype = self.mimetype\n expected_extensions = mimetypes.guess_all_extensions(mimetype,\n strict=False)\n if expected_extensions:\n if self.has_extension and self.extension not in expected_extensions:\n # LOG: improve this string\n self.make_dangerous('expected extensions')",
"def test_upload_wrong_file_type(self):\n file = dict(\n file=(BytesIO(b'my file contents'), \"foto.doc\"),\n )\n response = self.client.post('/upload',\n content_type='multipart/form-data',\n data=file)\n self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code)\n if os.path.exists(PHOTOS_SAVE_PATH):\n self.assertNotIn('foto.jpg', os.listdir(PHOTOS_SAVE_PATH))",
"def check_file(file: UploadFile) -> bool:\n # accept all image, video and audio types\n mimetype = mimetypes.guess_type(file.filename)[0]\n if mimetype is not None and mimetype.split(\"/\")[0] in {\"image\", \"audio\", \"video\"}:\n return True\n # if not, only accept whitelisted file extensions\n ext = os.path.splitext(file.filename)[1]\n if ext not in settings.FILE_EXTENSION_WHITELIST:\n raise FileValidationError(f\"{file.filename} is an invalid file type\")\n return True",
"def file_extension_not_allowed(self, request, *args, **kwargs):\n raise FileTypeNotAllowed",
"def _validateFilename(self, filePath):\n # assert True\n raise NotImplementedError",
"def test_invalid_filetype(self):\n rv = self.post('/queue/',\n content={'image': (StringIO('This is not an image'),\n 'text.txt')},\n token=self.user_token)\n self.assertJSONError(rv, 'TagalleryInvalidFileExtension')\n return",
"def check_filekind(self):\n assert self.filekind in self.obs_package.FILEKINDS, \\\n \"Invalid filekind \" + repr(self.filekind) + \" in \" + repr(self.filename)",
"def test_mimetypes_magic(self, mock_get_content_type):\n\n def get_content_type(value):\n return value.content_type\n\n mock_get_content_type.side_effect = get_content_type\n\n field = TypedFileField(required=False, type_whitelist=self.good_types, use_magic=True)\n\n for t in self.good_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n assert field.clean(file) is file\n\n for t in self.bad_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def validateFilename(self, filePath):\n self._validateFilename(filePath)\n return self._fileType",
"def _validate(self):\n if not self._contents.has_key('type'):\n raise ValidationFailed(\"Metadata file %s contains no type field\" % (self._filename))\n \n if not self._contents.has_key('version'):\n raise ValidationFailed(\"Metadata file %s contains no version field\" %\n (self._filename))",
"def validate_file_extension(self):\n extension = os.path.splitext(self.name)[1] # [0] returns path+filename\n if extension.lower() in settings.CONTENT_TYPES:\n if self.size > int(settings.MAX_UPLOAD_SIZE):\n raise ValidationError(_(f'Veličina fajl-a mora da bude ispod'\n f' {filesizeformat(settings.MAX_UPLOAD_SIZE)}.'\n f' Trenutna veličina je {filesizeformat(self.size)}'))\n else:\n raise ValidationError('Nije podržan ovaj tip fajl-a. Mora biti .pdf formata!')",
"def test_upload_step__invalid_file(self):\n # Set Up\n self.go_to_step(FeedUpdateWizard.UPLOAD_STEP)\n\n # Test\n with open(f\"{ETL_TEST_DATA_DIR}invalid_extension.txt\", \"r\") as fp:\n response = self.client.post(\n self.WIZARD_URL,\n {\n self.WIZARD_CURRENT_STEP: FeedUpdateWizard.UPLOAD_STEP,\n self.SELECTED_ITEM: self.ITEM_UPLOAD_FILE,\n \"upload_file\": fp,\n \"submit\": \"submit\",\n },\n )\n\n # Assert\n self.assertEqual(response.status_code, 200)\n error_data = response.context_data[\"wizard\"][\"form\"].errors.get_json_data()\n self.assertEqual(\n response.context[\"wizard\"][\"steps\"].current, FeedUpdateWizard.UPLOAD_STEP\n )\n self.assertEqual(\n error_data,\n {\n \"upload_file\": [\n {\n \"code\": \"invalid\",\n \"message\": \"The file is not in a correct format\",\n }\n ]\n },\n )",
"def validate(self, data):\n logger.debug(data)\n upload = data['upload']\n config_type = data['config_type']\n content_type = validators.validate_content_type(upload, config_type)\n if config_type == 'PRESET':\n validators.validate_preset(upload)\n data['content_type'] = content_type\n fname = data['upload'].name\n data['filename'] = fname.replace(' ', '_').lower()\n return data",
"def test_missing_extension(client: FlaskClient):\n file = get_example_file(ExampleFileType.Txt)\n file.filename = \"test\"\n response = util.upload_file(client, DEFAULT_USER, file)\n assert response.status == \"400 BAD REQUEST\"",
"def validate(self, document) -> None:\n path = Path(document.text).expanduser()\n if self._is_file and not path.is_file():\n raise ValidationError(\n message=self._message,\n cursor_position=document.cursor_position,\n )\n elif self._is_dir and not path.is_dir():\n raise ValidationError(\n message=self._message,\n cursor_position=document.cursor_position,\n )\n elif not path.exists():\n raise ValidationError(\n message=self._message,\n cursor_position=document.cursor_position,\n )",
"def test_extensions(self):\n field = TypedFileField(required=False, ext_whitelist=self.good_extensions)\n\n for ext in self.good_extensions:\n name = 'somefooname.%s' % ext\n file = UploadedFile(name=name, size=1)\n assert field.clean(file) is file\n\n for ext in self.bad_extensions:\n name = 'somefooname.%s' % ext\n file = UploadedFile(name=name, size=1)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def _check_format(file_path, content):\n if not content:\n # testcase file content is empty\n err_msg = u\"Testcase file conetent is empty: {}\".format(file_path)\n logger.log_error(err_msg)\n raise exception.FileFormatError(err_msg)",
"def test_metadata_schema_json_invalid_file_upload(invalid_schema_file, mock_irods):\n\n schema_file_path = 'pytest/assets/{}'.format(invalid_schema_file)\n file_size = os.stat(schema_file_path).st_size\n assert file_size > 0\n file_to_upload = UploadedFile(file=open(schema_file_path, 'rb'),\n name=os.path.basename(schema_file_path), size=file_size)\n\n form_data = {\"mp_program_type\": \"Test Model Program\"}\n files = {\"mi_json_schema_file\": file_to_upload}\n metadata_validation_form = ModelProgramMetadataValidationForm(data=form_data, files=files)\n assert not metadata_validation_form.is_valid()",
"def _check_extension(self):\n if self.extension in Config.override_ext:\n expected_mimetype = Config.override_ext[self.extension]\n else:\n expected_mimetype, encoding = mimetypes.guess_type(self.src_path,\n strict=False)\n if expected_mimetype in Config.aliases:\n expected_mimetype = Config.aliases[expected_mimetype]\n is_known_extension = self.extension in mimetypes.types_map.keys()\n if is_known_extension and expected_mimetype != self.mimetype:\n # LOG: improve this string\n self.make_dangerous('expected_mimetype')",
"def validate_data(self, data):\n # TODO use schema\n assert \"file_contents\" in data, data\n assert \"type\" in data, data",
"def _validate_file(self, filepath: str):\n if not os.path.exists(filepath):\n raise FileNotFoundError(f\"No such file or directory: {filepath}\")\n if not os.path.isfile(filepath):\n raise IsADirectoryError(f\"Is a directory: {filepath}\")",
"def test_upload_bad_file(self):\n url = image_upload_url(self.reteta.id)\n res = self.client.post(url, {'image': 'notimage'}, format='multipart')\n self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)",
"def validate_single_file(self, **kwargs):\n if self.file_type not in self._format_registry:\n valid_result_cls = example_filetype_format.ValidationResults(\n errors=\"Your filename is incorrect! Please change your filename before you run the validator or specify --filetype if you are running the validator locally\",\n warnings=\"\",\n )\n else:\n mykwargs = {}\n for required_parameter in self._validate_kwargs:\n assert required_parameter in kwargs.keys(), (\n \"%s not in parameter list\" % required_parameter\n )\n mykwargs[required_parameter] = kwargs[required_parameter]\n mykwargs[\"project_id\"] = self._project.id\n\n validator_cls = self._format_registry[self.file_type]\n validator = validator_cls(\n syn=self._synapse_client,\n center=self.center,\n genie_config=self.genie_config,\n ancillary_files=self.ancillary_files,\n )\n filepathlist = [entity.path for entity in self.entitylist]\n valid_result_cls = validator.validate(filePathList=filepathlist, **mykwargs)\n\n # Complete error message\n message = valid_result_cls.collect_errors_and_warnings()\n return (valid_result_cls, message)",
"def validate(self,value):\n \n if value is not None and not isinstance(value, self.data_type):\n try: \n value = value.file.read()\n except AttributeError:\n value = None\n \n value = super(FileTypeBlobProperty, self).validate(value)\n \n return value",
"def test_fetch_or_create_requires_file_type():\n pytest.raises(ValueError, media.fetch_or_create_media_item, b'spam')"
] | [
"0.7776475",
"0.72709423",
"0.7118607",
"0.7027781",
"0.7002521",
"0.69914484",
"0.694859",
"0.68884635",
"0.686069",
"0.6736035",
"0.6707866",
"0.66616106",
"0.66365695",
"0.66283804",
"0.6582579",
"0.6581422",
"0.6486669",
"0.64784694",
"0.6478206",
"0.6458501",
"0.632179",
"0.6311145",
"0.6307271",
"0.6291811",
"0.6257448",
"0.62417513",
"0.6206984",
"0.61903656",
"0.6188878",
"0.61560404"
] | 0.79070336 | 0 |
Make sure ``ValidationError`` is raised if uploaded file has no mimetype | def test_no_mimetype_magic(self, mock_get_content_type):
mock_get_content_type.side_effect = ValueError
field = TypedFileField(required=False, type_whitelist=self.good_types)
for t in self.good_types:
name = 'somefooname'
file = UploadedFile(name=name, size=1, content_type=t)
with pytest.raises(forms.ValidationError):
field.clean(file) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_no_mimetype(self):\n field = TypedFileField(required=False, type_whitelist=self.good_types, use_magic=False)\n\n for t in self.good_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n del file.content_type\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def test_mimetypes(self):\n field = TypedFileField(required=False, type_whitelist=self.good_types, use_magic=False)\n\n for t in self.good_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n assert field.clean(file) is file\n\n for t in self.bad_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def test_invalid_file_type(self):\n file = SimpleUploadedFile(\"test.csv\", b\"\\xe8\")\n form = MomConnectImportForm(\n data={\"source\": \"MomConnect Import\"}, files={\"file\": file}\n )\n self.assertTrue(form.is_valid())\n instance = form.save()\n self.assertEqual(instance.status, MomConnectImport.Status.ERROR)\n [error] = instance.errors.all()\n self.assertEqual(error.error, \"File is not a CSV\")",
"def is_file_type_error(self):\n return self._tag == 'file_type_error'",
"def validate(self, file):\n if self.maxFileSize and file['size'] < self.maxFileSize:\n raise HTTPError(406, \"File too big\")\n\n if not self.acceptedFileType(file['type']):\n raise HTTPError(406, \"File of unsupported type\")",
"def _check_mimetype(self):\n if self.mimetype in Config.aliases:\n mimetype = Config.aliases[self.mimetype]\n else:\n mimetype = self.mimetype\n expected_extensions = mimetypes.guess_all_extensions(mimetype,\n strict=False)\n if expected_extensions:\n if self.has_extension and self.extension not in expected_extensions:\n # LOG: improve this string\n self.make_dangerous('expected extensions')",
"def test_upload_wrong_file_type(self):\n file = dict(\n file=(BytesIO(b'my file contents'), \"foto.doc\"),\n )\n response = self.client.post('/upload',\n content_type='multipart/form-data',\n data=file)\n self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code)\n if os.path.exists(PHOTOS_SAVE_PATH):\n self.assertNotIn('foto.jpg', os.listdir(PHOTOS_SAVE_PATH))",
"def check_file(file: UploadFile) -> bool:\n # accept all image, video and audio types\n mimetype = mimetypes.guess_type(file.filename)[0]\n if mimetype is not None and mimetype.split(\"/\")[0] in {\"image\", \"audio\", \"video\"}:\n return True\n # if not, only accept whitelisted file extensions\n ext = os.path.splitext(file.filename)[1]\n if ext not in settings.FILE_EXTENSION_WHITELIST:\n raise FileValidationError(f\"{file.filename} is an invalid file type\")\n return True",
"def file_extension_not_allowed(self, request, *args, **kwargs):\n raise FileTypeNotAllowed",
"def _validateFilename(self, filePath):\n # assert True\n raise NotImplementedError",
"def test_invalid_filetype(self):\n rv = self.post('/queue/',\n content={'image': (StringIO('This is not an image'),\n 'text.txt')},\n token=self.user_token)\n self.assertJSONError(rv, 'TagalleryInvalidFileExtension')\n return",
"def check_filekind(self):\n assert self.filekind in self.obs_package.FILEKINDS, \\\n \"Invalid filekind \" + repr(self.filekind) + \" in \" + repr(self.filename)",
"def test_mimetypes_magic(self, mock_get_content_type):\n\n def get_content_type(value):\n return value.content_type\n\n mock_get_content_type.side_effect = get_content_type\n\n field = TypedFileField(required=False, type_whitelist=self.good_types, use_magic=True)\n\n for t in self.good_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n assert field.clean(file) is file\n\n for t in self.bad_types:\n name = 'somefooname'\n file = UploadedFile(name=name, size=1, content_type=t)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def validateFilename(self, filePath):\n self._validateFilename(filePath)\n return self._fileType",
"def validate_file_extension(self):\n extension = os.path.splitext(self.name)[1] # [0] returns path+filename\n if extension.lower() in settings.CONTENT_TYPES:\n if self.size > int(settings.MAX_UPLOAD_SIZE):\n raise ValidationError(_(f'Veličina fajl-a mora da bude ispod'\n f' {filesizeformat(settings.MAX_UPLOAD_SIZE)}.'\n f' Trenutna veličina je {filesizeformat(self.size)}'))\n else:\n raise ValidationError('Nije podržan ovaj tip fajl-a. Mora biti .pdf formata!')",
"def _validate(self):\n if not self._contents.has_key('type'):\n raise ValidationFailed(\"Metadata file %s contains no type field\" % (self._filename))\n \n if not self._contents.has_key('version'):\n raise ValidationFailed(\"Metadata file %s contains no version field\" %\n (self._filename))",
"def test_upload_step__invalid_file(self):\n # Set Up\n self.go_to_step(FeedUpdateWizard.UPLOAD_STEP)\n\n # Test\n with open(f\"{ETL_TEST_DATA_DIR}invalid_extension.txt\", \"r\") as fp:\n response = self.client.post(\n self.WIZARD_URL,\n {\n self.WIZARD_CURRENT_STEP: FeedUpdateWizard.UPLOAD_STEP,\n self.SELECTED_ITEM: self.ITEM_UPLOAD_FILE,\n \"upload_file\": fp,\n \"submit\": \"submit\",\n },\n )\n\n # Assert\n self.assertEqual(response.status_code, 200)\n error_data = response.context_data[\"wizard\"][\"form\"].errors.get_json_data()\n self.assertEqual(\n response.context[\"wizard\"][\"steps\"].current, FeedUpdateWizard.UPLOAD_STEP\n )\n self.assertEqual(\n error_data,\n {\n \"upload_file\": [\n {\n \"code\": \"invalid\",\n \"message\": \"The file is not in a correct format\",\n }\n ]\n },\n )",
"def test_missing_extension(client: FlaskClient):\n file = get_example_file(ExampleFileType.Txt)\n file.filename = \"test\"\n response = util.upload_file(client, DEFAULT_USER, file)\n assert response.status == \"400 BAD REQUEST\"",
"def validate(self, data):\n logger.debug(data)\n upload = data['upload']\n config_type = data['config_type']\n content_type = validators.validate_content_type(upload, config_type)\n if config_type == 'PRESET':\n validators.validate_preset(upload)\n data['content_type'] = content_type\n fname = data['upload'].name\n data['filename'] = fname.replace(' ', '_').lower()\n return data",
"def validate(self, document) -> None:\n path = Path(document.text).expanduser()\n if self._is_file and not path.is_file():\n raise ValidationError(\n message=self._message,\n cursor_position=document.cursor_position,\n )\n elif self._is_dir and not path.is_dir():\n raise ValidationError(\n message=self._message,\n cursor_position=document.cursor_position,\n )\n elif not path.exists():\n raise ValidationError(\n message=self._message,\n cursor_position=document.cursor_position,\n )",
"def test_extensions(self):\n field = TypedFileField(required=False, ext_whitelist=self.good_extensions)\n\n for ext in self.good_extensions:\n name = 'somefooname.%s' % ext\n file = UploadedFile(name=name, size=1)\n assert field.clean(file) is file\n\n for ext in self.bad_extensions:\n name = 'somefooname.%s' % ext\n file = UploadedFile(name=name, size=1)\n with pytest.raises(forms.ValidationError):\n field.clean(file)",
"def _check_format(file_path, content):\n if not content:\n # testcase file content is empty\n err_msg = u\"Testcase file conetent is empty: {}\".format(file_path)\n logger.log_error(err_msg)\n raise exception.FileFormatError(err_msg)",
"def test_metadata_schema_json_invalid_file_upload(invalid_schema_file, mock_irods):\n\n schema_file_path = 'pytest/assets/{}'.format(invalid_schema_file)\n file_size = os.stat(schema_file_path).st_size\n assert file_size > 0\n file_to_upload = UploadedFile(file=open(schema_file_path, 'rb'),\n name=os.path.basename(schema_file_path), size=file_size)\n\n form_data = {\"mp_program_type\": \"Test Model Program\"}\n files = {\"mi_json_schema_file\": file_to_upload}\n metadata_validation_form = ModelProgramMetadataValidationForm(data=form_data, files=files)\n assert not metadata_validation_form.is_valid()",
"def _check_extension(self):\n if self.extension in Config.override_ext:\n expected_mimetype = Config.override_ext[self.extension]\n else:\n expected_mimetype, encoding = mimetypes.guess_type(self.src_path,\n strict=False)\n if expected_mimetype in Config.aliases:\n expected_mimetype = Config.aliases[expected_mimetype]\n is_known_extension = self.extension in mimetypes.types_map.keys()\n if is_known_extension and expected_mimetype != self.mimetype:\n # LOG: improve this string\n self.make_dangerous('expected_mimetype')",
"def validate_data(self, data):\n # TODO use schema\n assert \"file_contents\" in data, data\n assert \"type\" in data, data",
"def _validate_file(self, filepath: str):\n if not os.path.exists(filepath):\n raise FileNotFoundError(f\"No such file or directory: {filepath}\")\n if not os.path.isfile(filepath):\n raise IsADirectoryError(f\"Is a directory: {filepath}\")",
"def test_upload_bad_file(self):\n url = image_upload_url(self.reteta.id)\n res = self.client.post(url, {'image': 'notimage'}, format='multipart')\n self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)",
"def validate(self,value):\n \n if value is not None and not isinstance(value, self.data_type):\n try: \n value = value.file.read()\n except AttributeError:\n value = None\n \n value = super(FileTypeBlobProperty, self).validate(value)\n \n return value",
"def validate_single_file(self, **kwargs):\n if self.file_type not in self._format_registry:\n valid_result_cls = example_filetype_format.ValidationResults(\n errors=\"Your filename is incorrect! Please change your filename before you run the validator or specify --filetype if you are running the validator locally\",\n warnings=\"\",\n )\n else:\n mykwargs = {}\n for required_parameter in self._validate_kwargs:\n assert required_parameter in kwargs.keys(), (\n \"%s not in parameter list\" % required_parameter\n )\n mykwargs[required_parameter] = kwargs[required_parameter]\n mykwargs[\"project_id\"] = self._project.id\n\n validator_cls = self._format_registry[self.file_type]\n validator = validator_cls(\n syn=self._synapse_client,\n center=self.center,\n genie_config=self.genie_config,\n ancillary_files=self.ancillary_files,\n )\n filepathlist = [entity.path for entity in self.entitylist]\n valid_result_cls = validator.validate(filePathList=filepathlist, **mykwargs)\n\n # Complete error message\n message = valid_result_cls.collect_errors_and_warnings()\n return (valid_result_cls, message)",
"def test_fetch_or_create_requires_file_type():\n pytest.raises(ValueError, media.fetch_or_create_media_item, b'spam')"
] | [
"0.7906906",
"0.7269755",
"0.7116972",
"0.7030246",
"0.7002452",
"0.69941306",
"0.6949124",
"0.6888954",
"0.6862505",
"0.67371505",
"0.6708364",
"0.6663474",
"0.66362625",
"0.66306883",
"0.658149",
"0.65809655",
"0.64851123",
"0.647826",
"0.64769727",
"0.64585227",
"0.63203585",
"0.63111985",
"0.63056624",
"0.62946105",
"0.6255754",
"0.62430406",
"0.6206093",
"0.6189184",
"0.6188331",
"0.6156691"
] | 0.7776174 | 1 |
Initialize class with lfp data | def __init__(self, lfp_data):
self.lfp_data = lfp_data | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(self):\n \n self.load_PSF_data()",
"def __init__(self, *args, **kwargs):\n super(AbsLoopinData, self).__init__(\n # All set outside\n ('linl_lis', LinlLis()),\n ('linh', Byte()),\n *args, **kwargs\n )",
"def __init__(self, features, labels, bigdl_type=\"float\"):\n self.feature = features[0]\n self.features = features\n self.label = labels[0]\n self.bigdl_type = bigdl_type\n self.labels = labels",
"def __init__(self, data):\n self.data = data\n return",
"def __init__(self, data=None):\n self.data = data",
"def __init__(self, **kwargs):\n DataLoader.__init__(self, **kwargs)",
"def __init__(self, data):\n self.data = data",
"def __init__(self, data):\n self.data = data",
"def __init__(self, data):\n self.data = data",
"def __init__(self, data):\n self.data = data",
"def __init__(self, fp):\n self._fp = fp",
"def __init__(self, data_filename):\n with open(data_filename, 'rb') as data_file:\n loaded_features = pickle.load(data_file)\n self.title_nlp_tfidf_features = loaded_features['title_NLP_TFIDF_features']\n self.other_features = loaded_features['other_features']\n self.category1_features = loaded_features['category1_features']\n self.category2_features = loaded_features['category2_features']\n self.category3_features = loaded_features['category3_features']\n self.material_features = loaded_features['material_features']\n self.who_made_features = loaded_features['whoMade_features']\n self.when_made_features = loaded_features['whenMade_features']\n self.style1_features = loaded_features['style1_features']\n self.style2_features = loaded_features['style2_features']\n self.feature_labels = loaded_features['feature_labels']",
"def _init_data(self) -> None:\n self.dtype = dict()\n self.shape = dict()\n self.size = dict()\n self.attrs = dict()\n self.data_ptr = dict()\n\n if self.mode == 'r':\n for k in self.fp.keys():\n self.dtype[k] = self.fp[k].dtype\n self.shape[k] = self.fp[k].shape\n self.size[k] = self.fp[k].shape[0]\n self.data_ptr[k] = 0",
"def __init__(self, data):\n self.jssp_instance_data = data",
"def __init__(self, logFP):\n self.logFP = logFP",
"def __init__(self, data):\n\t\tself.protocol_version, self.le_state, self.playback_state, \\\n\t\t self.source, self.le_flags, self.playback_flags, \\\n\t\t self.source_flags, self.fullness, self.point_rate, \\\n\t\t self.point_count = \\\n\t\t\tstruct.unpack(\"<BBBBHHHHII\", data)",
"def __init__(self):\n\n data_extract=DataExtracter()\n self.data = tuple()",
"def __init__(self, data):\n self.__data = data\n self.__next = None",
"def __init__(self, dat):\n self.data = dat",
"def __init__(self,data):\n\n self.data = data",
"def __init__(self):\n self.bpf_lcut = 10\n self.bpf_hcut = 425\n self.lpf_lcut = 5\n self.lp_butter_order = 4\n self.bp_butter_order = 2\n self.data_rate = None\n self.process_time = []",
"def __init__(self, *args):\n _snap.TLFlt_swiginit(self, _snap.new_TLFlt(*args))",
"def __init__(self, data_path):\n self.perf_data = dill.load(open(data_path, 'rb'))\n #print(self.perf_data[0])\n print(len(self.perf_data))\n self.length = len(self.perf_data)\n\n # perform a few pre-processing steps\n for i in range(self.length):\n # store the length of the pitch contours for use later\n self.perf_data[i]['length'] = len(\n self.perf_data[i]['pitch_contour'])\n # store the length of the pitch contours for use later\n self.perf_data[i]['pitch_contour'] = self.normalize_pitch_contour(\n self.perf_data[i]['pitch_contour'])\n print(self.perf_data[0])",
"def __init__(self, data=None, next=None):\r\n pass",
"def __init__(self, data):\n\n self.data = data\n self.fhat = None\n self.dtol = 1e-3 * math.sqrt(data.dim)\n self.proposed_points = None\n self.budget = None\n self.fhat = None",
"def __init__(self, directory, fName, data_instance):\n self.fName = fName\n self.data_instance = data_instance\n self.directory = directory",
"def __init__(self, dataset_dir, listfile=None):\n Reader.__init__(self, dataset_dir, listfile)\n self._data = [line.split(',') for line in self._data]\n\n def process_ihm(x):\n return list(map(int, x.split(';')))\n\n def process_los(x):\n x = x.split(';')\n if x[0] == '':\n return ([], [])\n return (list(map(int, x[:len(x)//2])), list(map(float, x[len(x)//2:])))\n\n def process_ph(x):\n return list(map(int, x.split(';')))\n\n def process_decomp(x):\n x = x.split(';')\n if x[0] == '':\n return ([], [])\n return (list(map(int, x[:len(x)//2])), list(map(int, x[len(x)//2:])))\n\n self._data = [(fname, float(t), process_ihm(ihm), process_los(los),\n process_ph(pheno), process_decomp(decomp))\n for fname, t, ihm, los, pheno, decomp in self._data]",
"def __init__(self, data=None, filename=None, schema=None):\n self.data = None\n self.schema = None\n self.filename = None\n if schema:\n self.load_schema(schema)\n if filename:\n self.load_file(filename)\n if data:\n self.load_data(data)",
"def __init__(self, **kwargs):\n DataLoader.__init__(self, **kwargs)\n \n self._results_ = None",
"def __init__( self\n , _o_data\n ):\n self.o_data = _o_data"
] | [
"0.75963",
"0.67351204",
"0.6706532",
"0.66661716",
"0.6611448",
"0.6609953",
"0.66079646",
"0.66079646",
"0.66079646",
"0.66079646",
"0.6534196",
"0.6520125",
"0.65192205",
"0.650805",
"0.6506647",
"0.6488057",
"0.6477326",
"0.6463792",
"0.64620787",
"0.64137286",
"0.6397195",
"0.63915974",
"0.6308256",
"0.630286",
"0.6290625",
"0.62902385",
"0.62775457",
"0.62727344",
"0.6255645",
"0.6254759"
] | 0.88286996 | 0 |
Remove temporal mean from each trial | def remove_temporal_mean(self):
if not hasattr(self, 'detrended_data'):
self.detrend_data()
self.mean_removed_data = self.detrended_data - \
np.mean(self.detrended_data, axis=-1, keepdims=True) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def subtract_mean_across_trials(self):\n if not hasattr(self, 'std_divided_data'):\n self.divide_by_temporal_std()\n self.mean_across_trials_subtracted_data = \\\n self.std_divided_data - \\\n np.mean(self.std_divided_data, axis=1, keepdims=True)",
"def divide_by_temporal_std(self):\n if not hasattr(self, 'mean_removed_data'):\n self.remove_temporal_mean()\n self.std_divided_data = self.mean_removed_data / \\\n np.std(self.mean_removed_data, axis=-1, keepdims=True)",
"def demeaned(self):\n return self.data - self.mean",
"def remove_mean_drift(t, data, T=1.):\n # the convolution function is a Heaviside function to get the mean\n conv_func = np.ones(int(T/(t[1]-t[0]))) \n # the number of convoluted points is variable (boundary effect)\n conv_number = convolve(np.ones(len(data)), conv_func,\n mode='same')\n # the sliding mean that depends on the frequency\n sliding_mean = convolve(data, conv_func,\n mode='same')/conv_number\n return data-sliding_mean",
"def remove_invariable_features(tX):\n\n features = tX.T\n stds = np.std(features, axis=1)\n indices = np.where(stds == 0)\n new_tX = np.delete(features, indices, 0).T\n return new_tX",
"def rm_trend(self, dim=\"time\", nan_policy=\"none\"):\n return rm_trend(self._obj, dim=dim, nan_policy=nan_policy)",
"def untruncatedMean(self):\n return self._distribution.untrMean()",
"def time_per_part():\r\n return random.normalvariate(PT_MEAN, PT_SIGMA)",
"def trial_atr(trial, omit_missing_frames=True):\n frames = trial.HMM_MLE\n if omit_missing_frames:\n frames = frames[frames >= 0]\n\n runs = calc_run_lengths(trial.HMM_MLE)\n return_times = []\n current_return_time = 0\n for run in runs:\n if run.object == 0:\n return_times.append(current_return_time/60)\n current_return_time = 0\n else:\n current_return_time += run.length\n return np.mean(return_times)",
"def avgtr(self):\n return np.diff(self.trtimes).mean()",
"def isolate(samples):\n zeroed = samples.groupby(axis=1, level=0).apply(\n lambda group: group - group.iloc[0, 0]\n )\n return zeroed.groupby(axis=1, level=0).apply(_remove_other_timings)",
"def drop_table(self):\n for ss in self.spectrae:\n ss.tau[('H',1,1215)] = np.array([0])",
"def trial_ndt(trial, omit_missing_frames=True):\n frames = trial.HMM_MLE\n\n group_lengths = [(g[0], len(list(g[1]))) for g in itertools.groupby(frames)]\n\n if omit_missing_frames:\n group_lengths = [l for l in group_lengths if l[0] >= 0]\n\n mean_on_target_group_length = np.mean(\n [l[1] for l in group_lengths if l[0] == 0])\n mean_nonmissing_group_length = np.mean(\n [l[1] for l in group_lengths])\n\n return (mean_on_target_group_length - mean_nonmissing_group_length)/60",
"def removeData(vals, ti, ws, sensor, frac):\n removed = vals.copy()\n\n # Determine times at which values were measured\n knownTimes = np.where(~np.isnan(removed[ti:ti+ws, sensor]))[0] + ti\n\n # Choose times at which to delete known data\n removedTimes = np.sort(np.random.choice(knownTimes, int(frac * ws), replace=False))\n # Delete the data\n removed[removedTimes, sensor] = np.nan\n\n return removed, removedTimes",
"def reset_mean(cls, sensor):\n if sensor == 't':\n cls.mean_t.clear()\n return cls.mean_t == []\n if sensor == 'l':\n cls.mean_l.clear()\n return cls.mean_l == []",
"def unwhiten_back(self, sample):\n sample = sample*self.Y_std.unsqueeze(1) + self.Y_mean.unsqueeze(1)\n return sample",
"def unwhiten_back(self, sample):\n sample = sample*self.Y_std.unsqueeze(1) + self.Y_mean.unsqueeze(1)\n return sample",
"def untruncatedMean(self):\n self.raiseAnError(NotImplementedError,'untruncatedMean not yet implemented for ' + self.type)",
"def untruncatedMean(self):\n self.raiseAnError(NotImplementedError,'untruncatedMean not yet implemented for ' + self.type)",
"def test_reset_temporal_axis(PM_ds_control_3d_full):\r\n smooth = 10\r\n tsmooth_kws = {\"time\": smooth}\r\n first_ori = PM_ds_control_3d_full.time[0].values\r\n first_actual = _reset_temporal_axis(\r\n PM_ds_control_3d_full, tsmooth_kws=tsmooth_kws, dim=\"time\"\r\n ).time.values[0]\r\n first_expected = f\"{first_ori}-{first_ori+smooth*1-1}\"\r\n assert first_actual == first_expected",
"def _sample_from_null_frm_dist(mean_spike_count, total_baseline_time, total_effect_time, sample_size=10 ** 6):\n total_time = total_baseline_time + total_effect_time\n\n samples = (\n st.poisson(mean_spike_count * total_effect_time / total_time).rvs(sample_size) / total_effect_time\n -\n st.poisson(mean_spike_count * total_baseline_time / total_time).rvs(sample_size) / total_baseline_time\n )\n\n # convert 1/ms to 1/s (Hz)\n samples = samples / MS_TO_S\n\n return samples",
"def untruncatedMean(self, x):\n self.raiseAnError(NotImplementedError,'untruncatedMean not yet implemented for ' + self.type)",
"def samples_keep(self,index):\n\n\t\tif isinstance(index, (int, long)): index = range(self.samples)[-index:]\n\n\t\tself.sampled_topics = np.take(self.sampled_topics,index,axis=0)\n\t\tself.tt = np.take(self.tt,index,axis=2)\n\t\tself.dt = np.take(self.dt,index,axis=2)\n\n\t\tself.samples = len(index)",
"def trial_pfot(trial, omit_missing_frames=True):\n frames = trial.HMM_MLE\n if omit_missing_frames:\n frames = frames[frames >= 0]\n return np.mean(frames == 0)",
"def remove_annual_mean(data,data_obs,lats,lons,lats_obs,lons_obs):\n \n ### Import modulates\n import numpy as np\n import calc_Utilities as UT\n \n ### Create 2d grid\n lons2,lats2 = np.meshgrid(lons,lats)\n lons2_obs,lats2_obs = np.meshgrid(lons_obs,lats_obs)\n \n ### Calculate weighted average and remove mean\n data = data - UT.calc_weightedAve(data,lats2)[:,:,np.newaxis,np.newaxis]\n data_obs = data_obs - UT.calc_weightedAve(data_obs,lats2_obs)[:,np.newaxis,np.newaxis]\n \n return data,data_obs",
"def test_basic_orbit_mean(self):\n orbit_info = {'kind': 'local time', 'index': 'mlt'}\n self.testInst = pysat.Instrument('pysat', 'testing',\n clean_level='clean',\n orbit_info=orbit_info)\n self.testInst.bounds = self.bounds2\n ans = avg.mean_by_orbit(self.testInst, 'mlt')\n\n # Note last orbit is incomplete thus not expected to satisfy relation\n ans = ans[:-1]\n\n assert np.allclose(ans.values.tolist(), np.full(len(ans), 12.), 1.0E-2)\n\n return",
"def remove_mean(self, axes=None):\n axes = self._get_axes_numbers(axes)\n out = self\n if 0 in axes:\n out = self - self.mean(0)\n if 1 in axes:\n out = (self.T - self.mean(1)).T\n return out",
"def average_over_trials(metric: Callable, experiment):\n return np.nanmean(\n [metric(experiment.datatypes['eyetrack'].trials[trial_idx])\n for trial_idx in _TRIALS_TO_KEEP])",
"def reset_mean(self,new_mean):\n self.mean = new_mean\n return",
"def reset_mean(self,new_mean):\n self.mean = new_mean\n return"
] | [
"0.7029652",
"0.62682873",
"0.598299",
"0.5920819",
"0.5916785",
"0.58507323",
"0.5822636",
"0.5814729",
"0.5688639",
"0.5680869",
"0.5662162",
"0.5650724",
"0.5647669",
"0.55737066",
"0.5537115",
"0.55138284",
"0.55138284",
"0.5509475",
"0.5509475",
"0.54969525",
"0.54960907",
"0.5477475",
"0.54711515",
"0.5459071",
"0.5443633",
"0.53824466",
"0.5380055",
"0.5358148",
"0.53514886",
"0.53514886"
] | 0.7598146 | 0 |
Divide by temporal standard deviation | def divide_by_temporal_std(self):
if not hasattr(self, 'mean_removed_data'):
self.remove_temporal_mean()
self.std_divided_data = self.mean_removed_data / \
np.std(self.mean_removed_data, axis=-1, keepdims=True) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def stdev(values):\n mean = avg(values)\n diffs = [(value - mean) ** 2 for value in values]\n return avg(diffs) ** 0.5",
"def stdev(items):\n return Series.std(Series(items))",
"def stdev_from_mean(x):\r\n x = array(x)\r\n return (x - mean(x)) / std(x)",
"def _std(self, data):\n var = stats.var(data)\n if var>0.0:\n sd = math.sqrt(var)\n else:\n sd = 0.0\n return sd",
"def calculate_std_dev(temps, temp_average):\n\n variance_sum = 0\n for temp in temps:\n variance = (temp - temp_average) ** 2\n variance_sum += variance\n\n variance = variance_sum / len(temps)\n standard_deviation = variance ** 0.5\n\n return standard_deviation",
"def stdev(headers, data):\n\tcolumn_matrix=data.get_data(headers)\n\tmean_values=column_matrix.std(0)\n\tstd_values=mean_values.tolist()\n\treturn std_values",
"def divide_by_std_across_trials(self):\n if not hasattr(self, 'mean_across_trials_subtracted_data'):\n self.subtract_mean_across_trials()\n self.std_across_trials_divided_data = \\\n self.mean_across_trials_subtracted_data / \\\n np.std(self.mean_across_trials_subtracted_data,\n axis=1, keepdims=True)",
"def standard_deviation(data):\n\n return np.sqrt(variance(data))",
"def standard_deviation(data):\n\n return np.sqrt(variance(data))",
"def get_stddev(self):\r\n for i in range(1,len(self.data[0])):\r\n self.stddev.append(np.std(self.data[:,i]))",
"def z_normalize(ts):\n\n ts -= np.mean(ts)\n std = np.std(ts)\n\n if std == 0:\n raise ValueError(\"The Standard Deviation cannot be zero\")\n\n #ts /= std\n return ts / std",
"def stddev(data, ddof=0):\n n = len(data)\n if n < 2:\n return 0\n ss = _ss(data)\n pvar = ss/(n-ddof)\n return pvar**0.5",
"def normalize_standard_deviation(dataset):\n return dataset*(1/np.std(dataset))",
"def standard_deviation(self):\r\n\t\treturn self.variance()**(1/2)",
"def standard_deviation(xs: List[float]) -> float:\n return math.sqrt(variance(xs))",
"def standard_deviation(xs: List[float]) -> float:\n return math.sqrt(variance(xs))",
"def stddev(x: pd.Series, d: int or float) -> pd.Series:\n if isinstance(d, float):\n d = math.floor(d)\n\n if isinstance(x.index, pd.MultiIndex):\n return x.groupby(level=1).rolling(d).std()\n else:\n return x.rolling(d).std()",
"def _standardize(self):\n deviation = np.std(self.series)\n self.series = (self.series - np.mean(self.series)) / (deviation if deviation != 0 else 1)",
"def stddev(r):\n avg = average(r)\n sdsq = sum([(i - avg) ** 2 for i in r])\n return (sdsq / (len(r) - 1 or 1)) ** 0.5",
"def calculate_std(self) -> float:\n\n if self.data:\n return np.std(self.data)\n else:\n return self.sigma",
"def sd(vals):",
"def test_stdev_from_mean(self):\r\n x = [2.1, 4.2, 5.9, 8.4, 9.6]\r\n result = stdev_from_mean(x)\r\n self.assertFloatEqual(\r\n result,\r\n [-1.292463399014413,\r\n -0.60358696806764478,\r\n -0.045925095396451399,\r\n 0.77416589382589174,\r\n 1.1678095686526162])",
"def standard_deviation( values, sample=False ):\n return ma.sqrt( variance( values, sample ) )",
"def get_std_dev(self, data):\n mean = 0\n data_arr = []\n for i in data:\n data_arr.append(i[1])\n return statistics.stdev(data_arr)",
"def stdev(data, xbar=None):\n return math.sqrt(variance(data, xbar))",
"def std_dev(self) -> float:\n return math.sqrt(self.variance())",
"def standard_dev(self):\n return self.variance()**0.5",
"def std(self, data):\n ts_ = self.ts(data)\n if 'year' not in ts_.coords:\n return ts_\n return ts_.std('year')",
"def stddev(data, ddof=0):\n n = len(data)\n if n < 2:\n raise ValueError('variance requires at least two data points')\n ss = _ss(data)\n pvar = ss/(n-ddof)\n return pvar**0.5",
"def stdDev(data):\r\n sum = 0\r\n ave = average(data)\r\n for i in data:\r\n sum += (i-ave)**2\r\n return math.sqrt(sum/len(data))"
] | [
"0.70303154",
"0.70081013",
"0.6894062",
"0.68192405",
"0.680471",
"0.67917585",
"0.67652786",
"0.6757293",
"0.6757293",
"0.6733499",
"0.671972",
"0.6718157",
"0.67139",
"0.6705973",
"0.6677462",
"0.6677462",
"0.66751814",
"0.6662546",
"0.6652855",
"0.66227466",
"0.66132027",
"0.6595932",
"0.65955365",
"0.65746874",
"0.6557191",
"0.65359795",
"0.65242887",
"0.6504171",
"0.6487202",
"0.6485269"
] | 0.8091293 | 0 |
Subtract mean across trials from each trial (for each timepoint) | def subtract_mean_across_trials(self):
if not hasattr(self, 'std_divided_data'):
self.divide_by_temporal_std()
self.mean_across_trials_subtracted_data = \
self.std_divided_data - \
np.mean(self.std_divided_data, axis=1, keepdims=True) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def avgtr(self):\n return np.diff(self.trtimes).mean()",
"def trial_atr(trial, omit_missing_frames=True):\n frames = trial.HMM_MLE\n if omit_missing_frames:\n frames = frames[frames >= 0]\n\n runs = calc_run_lengths(trial.HMM_MLE)\n return_times = []\n current_return_time = 0\n for run in runs:\n if run.object == 0:\n return_times.append(current_return_time/60)\n current_return_time = 0\n else:\n current_return_time += run.length\n return np.mean(return_times)",
"def mean_STD(self,counter):\n \n \n pass",
"def average_over_trials(metric: Callable, experiment):\n return np.nanmean(\n [metric(experiment.datatypes['eyetrack'].trials[trial_idx])\n for trial_idx in _TRIALS_TO_KEEP])",
"def divide_by_std_across_trials(self):\n if not hasattr(self, 'mean_across_trials_subtracted_data'):\n self.subtract_mean_across_trials()\n self.std_across_trials_divided_data = \\\n self.mean_across_trials_subtracted_data / \\\n np.std(self.mean_across_trials_subtracted_data,\n axis=1, keepdims=True)",
"def averageTime(self):\n \n pass",
"def _get_mean(self, sums, step):\n\n return sums/step",
"def demeaned(self):\n return self.data - self.mean",
"def mean(self):\n\n return time_stat(self, stat=\"mean\")",
"def msub(trace):\n \n return(trace - np.mean(trace))",
"def averageTrialsByTriggers(trigger_indices, np_data):\n trialLen = trigger_indices[1] -trigger_indices[0] -1\n data_avg = [] \n data_std = [] \n\n for i in trigger_indices:\n data_avg.append(numpy.average(np_data[i+1:i+trialLen-1])) \n data_std.append(numpy.std(np_data[i+1:i+trialLen-1])) \n \n return (data_avg, data_std)",
"def mean_subtract(dataset):\n data = [dataset[i] for i in range(len(dataset))]\n data_numpy = [dataset[i].numpy() for i in range(len(dataset))]\n\n # mean\n mean = np.mean(data_numpy)\n\n # standard deviation\n std = np.std(data_numpy)\n\n # perform mean subtract\n new_dataset = []\n for i in range(len(dataset)):\n data[i] -= mean\n data[i] /= std\n new_dataset.append(data[i])\n return new_dataset, mean",
"def avg_temps(self):\r\n average_temp = 0\r\n for j in range(len(self.trip)):\r\n average_temp += self.trip[j].get_temperature(j)\r\n average_temp /= len(self.trip)\r\n return average_temp",
"def t_test(dataType):\n\n\t# read the data\n\tparser = ExperimentUtils()\n\tdata = parser.parse_data(dataType)\n\n\tN = len(data.keys()) # number participants\n\n\t# - for trial 1 and trial 2:\n\t# \tL2 norm over each timestep, then sum all the values together\n\t# - average over two trials for each participant \n\ttask_avgs = {}\n\n\t# participant ID can take values 0 - 9\n\tfor ID in data.keys():\n\t\tfor task in data[ID]:\n\t\t\t# dont include the familiarization task (task can take values 1,2,3)\n\t\t\tif task != 0:\n\t\t\t\tif task not in task_avgs:\n\t\t\t\t\ttask_avgs[task] = {}\n\t\t\t\t\ttask_avgs[task][\"A\"] = np.array([0.0]*N)\n\t\t\t\t\ttask_avgs[task][\"B\"] = np.array([0.0]*N)\n\n\t\t\t\ttrialAsum = [0.0,0.0]\n\t\t\t\ttrialBsum = [0.0,0.0]\n\t\t\t\t# trial can take values 1 or 2\n\t\t\t\tfor trial in data[ID][task]:\n\t\t\t\t\t# only compute metrics over data, not timestamps\n\t\t\t\t\tAdata = data[ID][task][trial]['A'][1:8]\n\t\t\t\t\tBdata = data[ID][task][trial]['B'][1:8]\n\t\t\t\n\t\t\t\t\t#print str(ID)+str(task)+str(trial)+\"A\"\n\t\t\t\t\t#print \"Adata: \" + str(Adata)\n\t\t\t\t\t#print str(ID)+str(task)+str(trial)+\"B\"\n\t\t\t\t\t#print \"Bdata: \" + str(Bdata)\n\n\t\t\t\t\t(h, w) = np.shape(Adata)\n\t\t\t\t\tfor i in range(w):\n\t\t\t\t\t\ttrialAsum[trial-1] += np.linalg.norm(Adata[:,i])\n\t\t\t\t\t(h, w) = np.shape(Bdata)\n\t\t\t\t\tfor i in range(w):\n\t\t\t\t\t\ttrialBsum[trial-1] += np.linalg.norm(Bdata[:,i])\n\t\t\t\tavg_methodA = (trialAsum[0]+trialAsum[1])/2.0\n\t\t\t\tavg_methodB = (trialBsum[0]+trialBsum[1])/2.0\n\n\t\t\t\ttask_avgs[task][\"A\"][ID] = avg_methodA\n\t\t\t\ttask_avgs[task][\"B\"][ID] = avg_methodB\n\n\t# comput independent two-sample t-test \n\t# NOTE: we can assume that the two sample sizes are the same, and \n\t#\t\tthat the two distributions have the same variance\n\tfor task in range(1,4):\n\t\ttaskA = task_avgs[task][\"A\"]\n\t\ttaskB = task_avgs[task][\"B\"]\n\n\t\tmeanA = np.mean(taskA)\n\t\tmeanB = np.mean(taskB)\n\t\tprint \"meanA: \" + str(meanA)\n\t\tprint \"meanB: \" + str(meanB)\n\t\tdiff = meanA - meanB\n\t\tprint \"diff: \" + str(diff)\n\n\t\t(statistic, pvalue) = stats.ttest_ind(a=taskA, b=taskB, equal_var=True)\n\n\t\tprint \"\\n\"\n\t\tprint \"task\"+str(task)+\" statistic: \" + str(statistic)\n\t\tprint \"task\"+str(task)+\" pvalue: \" + str(pvalue)",
"def reduce_by_averaging(trials, total, amount):\n new_averages = []\n\n for i in range(0, total):\n if i % amount == 0:\n new_averages.append(reduce(lambda x, y: x + y, trials[i:i+amount]) / \\\n amount)\n\n return new_averages",
"def current_mean(self):\r\n values = self._timings\r\n return np.mean(values)",
"def avg(self):\n return sum(self.times) / len(self.times)",
"def avg(self):\n return sum(self.times) / len(self.times)",
"def avg(self):\n return sum(self.times) / len(self.times)",
"def mean(self):\r\n\t\treturn sum(self.sample)/len(self.sample)",
"def trial_atd(trial, omit_missing_frames=True):\n frames = trial.HMM_MLE\n if omit_missing_frames:\n frames = frames[frames >= 0]\n total_frames = len(frames)\n num_runs = len([run for run in calc_run_lengths(frames)])\n if num_runs == 0:\n return float('nan')\n return (total_frames/num_runs)/60",
"def mean_subtraction_cumulation(timeseries):\r\n\r\n series = pandas.Series([x[1] if x[1] else 0 for x in timeseries])\r\n series = series - series[0:len(series) - 1].mean()\r\n stdDev = series[0:len(series) - 1].std()\r\n expAverage = pandas.stats.moments.ewma(series, com=15)\r\n\r\n return abs(series.iget(-1)) > 3 * stdDev",
"def mean_run_time(self) -> float:\n return float(self.result_array.sum(axis=0).mean())",
"def subMeanAll(data=None):\n datamean = data.mean(axis = 0)\n data[:,3:] = data[:,3:] - datamean[3:]\n return data",
"def mean_diff(dataSet, n):\n return np.mean(np.diff(dataSet, n))",
"def getAverage(die, numRolls, numTrials):",
"def mean_deviation(self):\r\n\t\t_mean = sum(self.sample)/len(self.sample)\r\n\t\treturn sum(map(lambda x: abs(x - _mean), self.sample))/len(self.sample)",
"def trial_pfot(trial, omit_missing_frames=True):\n frames = trial.HMM_MLE\n if omit_missing_frames:\n frames = frames[frames >= 0]\n return np.mean(frames == 0)",
"def trial_ndt(trial, omit_missing_frames=True):\n frames = trial.HMM_MLE\n\n group_lengths = [(g[0], len(list(g[1]))) for g in itertools.groupby(frames)]\n\n if omit_missing_frames:\n group_lengths = [l for l in group_lengths if l[0] >= 0]\n\n mean_on_target_group_length = np.mean(\n [l[1] for l in group_lengths if l[0] == 0])\n mean_nonmissing_group_length = np.mean(\n [l[1] for l in group_lengths])\n\n return (mean_on_target_group_length - mean_nonmissing_group_length)/60",
"def time_average(new_cube):\n\n time_average_cube = new_cube.collapsed('time', iris.analysis.MEAN)\n\n return time_average_cube"
] | [
"0.6891229",
"0.6561155",
"0.6152613",
"0.61516047",
"0.6104623",
"0.60979486",
"0.60725135",
"0.6048731",
"0.598965",
"0.5979206",
"0.59755933",
"0.5951833",
"0.5951288",
"0.5923516",
"0.58769214",
"0.58767086",
"0.5859882",
"0.5859882",
"0.5859882",
"0.584206",
"0.5766064",
"0.5751522",
"0.57475996",
"0.5738472",
"0.5732349",
"0.56854135",
"0.5685282",
"0.5681741",
"0.56461734",
"0.56402975"
] | 0.759768 | 0 |
Divide by standard deviation across trials (for each timepoint) | def divide_by_std_across_trials(self):
if not hasattr(self, 'mean_across_trials_subtracted_data'):
self.subtract_mean_across_trials()
self.std_across_trials_divided_data = \
self.mean_across_trials_subtracted_data / \
np.std(self.mean_across_trials_subtracted_data,
axis=1, keepdims=True) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def stdev(items):\n return Series.std(Series(items))",
"def calc_standard_deviation(data: list) -> float:\n mean = calc_mean(data)\n acc = 0.0\n for n in data:\n acc += (n - mean) ** 2\n acc /= len(data) - 1\n return math.sqrt(acc)",
"def stdDev(data):\r\n sum = 0\r\n ave = average(data)\r\n for i in data:\r\n sum += (i-ave)**2\r\n return math.sqrt(sum/len(data))",
"def stdev(values):\n mean = avg(values)\n diffs = [(value - mean) ** 2 for value in values]\n return avg(diffs) ** 0.5",
"def divide_by_temporal_std(self):\n if not hasattr(self, 'mean_removed_data'):\n self.remove_temporal_mean()\n self.std_divided_data = self.mean_removed_data / \\\n np.std(self.mean_removed_data, axis=-1, keepdims=True)",
"def std_run_time(self) -> float:\n return float(self.result_array.sum(axis=0).std())",
"def stddev(r):\n avg = average(r)\n sdsq = sum([(i - avg) ** 2 for i in r])\n return (sdsq / (len(r) - 1 or 1)) ** 0.5",
"def standard_deviation(xs: List[float]) -> float:\n return math.sqrt(variance(xs))",
"def standard_deviation(xs: List[float]) -> float:\n return math.sqrt(variance(xs))",
"def standard_deviation( values, sample=False ):\n return ma.sqrt( variance( values, sample ) )",
"def sd(vals):",
"def get_stddev(self):\r\n for i in range(1,len(self.data[0])):\r\n self.stddev.append(np.std(self.data[:,i]))",
"def normalize_standard_deviation(dataset):\n return dataset*(1/np.std(dataset))",
"def std(self, dset):\n avg = self.mean(dset)\n variance = sum([math.pow(x - avg, 2) for x in dset])\n std = math.sqrt(variance)\n return std",
"def calc_stdev(a, b, c, d, e):\n mean_of_num = (a + b + c + d + e) / 5\n return (((a - mean_of_num)**2 + (b - mean_of_num)**2 + (c - mean_of_num)**2\n + (d - mean_of_num)**2 + (e - mean_of_num)**2) / 5) ** 0.5",
"def _std(self, data):\n var = stats.var(data)\n if var>0.0:\n sd = math.sqrt(var)\n else:\n sd = 0.0\n return sd",
"def standard_deviation(data):\n\n return np.sqrt(variance(data))",
"def standard_deviation(data):\n\n return np.sqrt(variance(data))",
"def get_std_dev(self, data):\n mean = 0\n data_arr = []\n for i in data:\n data_arr.append(i[1])\n return statistics.stdev(data_arr)",
"def test_stddev(self):\n self.assertEqual(stddev(list1, sample=False), np.std(list1))\n self.assertEqual(stddev(list1), np.std(list1, ddof=1))",
"def std(dfs):\n df_mean = mean(dfs)\n df_sq = sum([(df - df_mean)*(df - df_mean) for df in dfs])\n return df_sq / len(dfs)",
"def stddev(x: pd.Series, d: int or float) -> pd.Series:\n if isinstance(d, float):\n d = math.floor(d)\n\n if isinstance(x.index, pd.MultiIndex):\n return x.groupby(level=1).rolling(d).std()\n else:\n return x.rolling(d).std()",
"def std(values, ave):\n return math.sqrt(float(sum((value-ave)**2 for value in values))/len(values))",
"def standard_deviation(list):\n num_items = len(list)\n mean = sum(list) / num_items\n differences = [x - mean for x in list]\n sq_differences = [d ** 2 for d in differences]\n ssd = sum(sq_differences)\n\n\n variance = ssd / num_items\n\n sd = sqrt(variance)\n\n return sd",
"def std_deviation(array):\n if not array or len(array) == 1:\n return 0\n\n average = AGGREGATES['mean_arithmetic'](array)\n variance = map(lambda x: (x-average)**2,array)\n stdev = AGGREGATES['mean_arithmetic'](variance)\n return math.sqrt(stdev)",
"def stdev_from_mean(x):\r\n x = array(x)\r\n return (x - mean(x)) / std(x)",
"def stddev(self, sample=True):\n distance_squared = list(map(lambda x: (x - sum(self.data)/self.size)**2, self.data))\n\n if sample == True:\n variance = sum(distance_squared)/(self.size - 1)\n stddev = variance**(1/2)\n if sample == False:\n variance = sum(distance_squared)/(self.size)\n stddev = variance**(1/2)\n return stddev",
"def calculate_mean_stdev(self):\n sentences = [self.tokens_from_string(x) + ['.']\n for x in self.testing_set.split(\".\")]\n probabilities = []\n for sentence in sentences:\n # skip short sentences\n if len(sentence) <= self.order:\n continue\n\n prob = self.prob_calculate(sentence)\n probabilities.append(prob / (len(sentence) - self.order))\n\n self.mean = statistics.mean(probabilities)\n self.stdev = statistics.stdev(probabilities)",
"def stddev(self, num_list):\n try:\n mean = self.average(num_list)\n\n minus_mean = []\n\n for number in num_list:\n try:\n minus_mean.append((number - mean) ** 2)\n except Exception as e:\n print(\"Error: \", e)\n\n meany_mean = self.average(minus_mean)\n\n meany_mean = meany_mean ** .5\n\n except Exception as e:\n print(\"Error: \", e)\n\n return meany_mean",
"def calculate_std(self):\n # from 6:00 to 1:00\n start = (6-4)*60*60 / self.interval_length_s - self.intervals_offset\n end = (25-4)*60*60 / self.interval_length_s - self.intervals_offset\n v = [] # this is for the 24h consumptions\n v2 = [] # this is for the std of the interval consumptions\n for i in range(start,end): # i: end-time of a day-interval / everything between 6:00 and 1:00\n for i1 in range(i,len(self.intervals)-1,self.intervals_per_day): # check all possible end-times (skip the very last interval)\n if i1 >= 0:\n i0 = i1 - self.intervals_per_day # i0: start of the day-interval\n if i0 >= 0: # within measured time?\n v.append(sum(self.intervals[i0:i1])) # 24h consumption\n cmp_interval = self.cmp_interval(i1)\n d = self.interval_consumption2power(self.intervals[i1] - self.consumption_per_interval_smoothed[cmp_interval]) # in W!\n v2.append(d*d)\n if len(v) > 5:\n self.std = np.std(v)\n if len(v2) > 5:\n self.std_intervals = sqrt(np.mean(v2))"
] | [
"0.7041086",
"0.6919613",
"0.6913145",
"0.6878369",
"0.68775237",
"0.6814619",
"0.67827666",
"0.67530537",
"0.67530537",
"0.67328584",
"0.6639971",
"0.6564795",
"0.6559348",
"0.65451086",
"0.65384686",
"0.65182203",
"0.64876235",
"0.64876235",
"0.64549816",
"0.6444664",
"0.64310056",
"0.641912",
"0.6416342",
"0.63875777",
"0.6334316",
"0.6319965",
"0.63146585",
"0.6308875",
"0.63078344",
"0.6306551"
] | 0.7416995 | 0 |
alpha = threshold for single test (will be Bonferroni corrected internally) wanted_fraction = minimum fraction of tests that should be significant (stationary) | def run_adfuller_test(preprocessed_data, alpha=0.05, wanted_fraction=0.95):
inds = list(np.ndindex(preprocessed_data.shape[:-1]))
def return_adfuller_pval(this_ind): return adfuller(
preprocessed_data[this_ind])[1]
pval_list = np.array(parallelize(return_adfuller_pval, inds, n_jobs=30))
alpha = 0.05
threshold = alpha/len(pval_list)
wanted_fraction = 0.95
if np.sum(pval_list < threshold) > wanted_fraction * len(pval_list):
print('Data is stationary')
else:
raise ValueError('Data is not stationary') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def pvalue_test(self, alpha=0.01):\n CL = int((1-alpha)*100) # confidence level\n \n if self.p_value < alpha:\n print(\"Null hypothesis rejected at {:d}%CL => distributions are different\".format(CL))\n else:\n print(\"Null hypothesis NOT rejected => distributions are the same\")",
"def test_compute_alphas(self):\n\t\tdetails = self.watcher.analyze(layers=[self.second_layer], pool=False, randomize=False, plot=False, mp_fit=False, pl_package=WW_POWERLAW)\n\t\t#d = self.watcher.get_details(results=results)\n\t\ta = details.alpha.to_numpy()\n\t\tself.assertAlmostEqual(a[0],1.74859, places=3)\n\t\tself.assertAlmostEqual(a[1],1.66595, places=3)\n\t\tself.assertAlmostEqual(a[3],1.43459, places=3)",
"def _get_g_test(self, data, alpha):\n n = len(data)\n significance_level = self._get_t_significance_level(alpha, n)\n t = stats.t.isf(significance_level, n-2)\n return ((n-1) / np.sqrt(n)) * (np.sqrt(t**2 / (n-2 + t**2)))",
"def test_intra_power_law_fit_no_model(self):\n\n\t\tdetails= self.watcher.analyze(model=self.model, layers=self.fc_layers, intra=True, randomize=False, vectors=False)\n\t\tactual_alpha = details.alpha[0]\n\n\t\texpected_alpha = 2.654 # not very accurate because of the sparisify transform\n\t\tself.assertAlmostEqual(actual_alpha,expected_alpha, places=1)",
"def test_compute_alphas(self):\n\t\tdetails = self.watcher.analyze(layers=[self.second_layer], pool=False, randomize=False, plot=False, mp_fit=False, \n\t\t\t\t\t\t\t\t\tsvd_method=ACCURATE_SVD, pl_package=POWERLAW_PACKAGE, xmax=XMAX_FORCE)\n\t\t#d = self.watcher.get_details(results=results)_method\n\t\ta = details.alpha.to_numpy()\n\t\tself.assertAlmostEqual(a[0],1.65014, places=3)\n\t\tself.assertAlmostEqual(a[1],1.57297, places=3)\n\t\tself.assertAlmostEqual(a[3],1.43459, places=3)\n\t\t\n\t\t# WHY DPOES THIS TEST FAIL NOW ?\n\n\n\t\n\t\tdetails2 = self.watcher.analyze(layers=[self.second_layer], pool=False, randomize=False, plot=False, mp_fit=False, \n\t\t\t\t\t\t\t\t\tpl_package=POWERLAW_PACKAGE, xmax=None)\n\t\t#d = self.watcher.get_details(results=results)WW_\n\t\ta2 = details2.alpha.to_numpy()\n\t\tself.assertAlmostEqual(a2[0],1.74859, places=3)\n\t\tself.assertAlmostEqual(a2[1],1.66595, places=3)\n\t\tself.assertAlmostEqual(a2[3],1.43459, places=3)",
"def calc_alpha(epsilon): \n return float(0.5 * np.log((1-epsilon)/epsilon))",
"def r_test(self, fraction, block=None):\n passed = random.uniform(0, 1) < fraction\n if passed:\n self.bot.log(\" r_test({f}%) passed from {b}\"\n .format(f=100*fraction, b=block))\n return passed",
"def optimal_alpha():\n\n # When I checked all of alphas, -0.01 was the best\n alpha = -0.01\n # np.random.choice([-0.06, -0.01, 0.04, 0.1])\n return alpha",
"def test_intra_power_law_fit(self):\n\n\t\tprint(type(self.fc_layers[0:2]), self.fc_layers[0:2])\n\t\tdetails= self.watcher.analyze(layers=self.fc_layers[0:2], intra=True, randomize=False, vectors=False, pl_package=POWERLAW_PACKAGE, xmax=XMAX_FORCE)\n\t\tactual_alpha = details.alpha[0]\n\t\t#actual_best_fit = details.best_fit[0]\n\t\t#print(actual_alpha,actual_best_fit)\n\n\t\texpected_alpha = 2.654 # not very accurate because of the sparisify transform\n\t\t#expected_best_fit = LOG_NORMAL\n\t\tself.assertAlmostEqual(actual_alpha,expected_alpha, places=1)\n\t\t#self.assertEqual(actual_best_fit, expected_best_fit)",
"def test_alpha_value_error(self, dmatrix_2x1_with_label, alpha):\n\n dummy_confo_model = DummyLeafNodeScaledConformalPredictor()\n\n with pytest.raises(\n ValueError, match=re.escape(\"alpha must be in range [0 ,1]\")\n ):\n\n dummy_confo_model.calibrate(\n data=dmatrix_2x1_with_label, alpha=alpha, response=np.array([0, 1])\n )",
"def test_exner_function():\n pressure = np.array([900., 500., 300., 100.]) * units.mbar\n truth = np.array([0.97034558, 0.82033536, 0.70893444, 0.51794747]) * units.dimensionless\n assert_array_almost_equal(exner_function(pressure), truth, 5)",
"def optimal_alpha(f, x0, Df, tol=1e-5, maxiter=15):\r\n #initialize alphas to check\r\n alphas = np.linspace(.001,1,100, endpoint=True)\r\n results = []\r\n for a in alphas:\r\n #run newton's method for all alphas\r\n new = newton(f,x0,Df,tol,maxiter,a)\r\n results.append(list(new))\r\n #look at just the iterations\r\n iters = np.array(results)[:,2]\r\n #plot graph\r\n plt.plot(alphas,iters)\r\n plt.xlabel('alpha')\r\n plt.ylabel('iterations')\r\n plt.title('Newton\\'s Method Comparisons')\r\n plt.show()\r\n #find index of least iterations\r\n smallest = np.argmin(iters)\r\n return alphas[smallest]",
"def test_fix_fingers_w_thresh(self):\n\t\tdetails = self.watcher.analyze(layers=[17], fix_fingers='clip_xmax', finger_thresh=10.0)\n\t\tactual_alpha = details.alpha.to_numpy()[0]\n\t\tactual_raw_alpha = details.raw_alpha.to_numpy()[0]\n\t\tactual_num_fingers = details.num_fingers.to_numpy()[0]\n\n\t\texpected_alpha = 6.883742\n\t\texpected_raw_alpha = expected_alpha\n\t\texpected_num_fingers = 0\n\t\tself.assertAlmostEqual(actual_alpha,expected_alpha, delta=0.1 )\n\t\tself.assertAlmostEqual(actual_raw_alpha,expected_raw_alpha, delta=0.01 )\n\t\tself.assertEqual(actual_num_fingers,expected_num_fingers)",
"def acceptance_fraction(self):\n return ValueError(\"acceptance_fraction function not set.\")",
"def calculate_sensitivity(n_excesses, n_background, alpha):\n significance = n_excesses / np.sqrt(n_background * alpha)\n sensitivity = 5 / significance * 100 # percentage of Crab\n\n return sensitivity",
"def test_positive_definite1(dist, alpha, divergence):\n assert divergence(dist, dist, alpha) == pytest.approx(0)\n assert hellinger_sum(dist, dist, alpha) == pytest.approx(1)",
"def obrien_fleming_test(z, K, current_k, alpha):\n c = obrien_fleming_cutoff(K, current_k, alpha)\n return c < abs(z)",
"def alpha_huber(y_true, y_pred):\n # abs_r = T.abs_(y_pred - y_true)\n # loss = 0.5 * T.sqr(abs_r)\n # epsilon = np.percentile(loss, alpha * 100)\n # idx = abs_r <= epsilon\n # loss[idx] = epsilon * abs_r[idx] - 0.5 * T.sqr(epsilon)\n #switch(cond, ift, iff)\n alpha=0.95\n abs_r = T.abs_(y_pred - y_true)\n epsilon = np.percentile(0.5 * T.sqr(abs_r), alpha * 100)\n loss =T.switch(T.le(abs_r,epsilon),epsilon * abs_r - 0.5 * T.sqr(epsilon),0.5 * T.sqr(abs_r))\n\n return loss",
"def correctalpha(desiredalpha, level):\n \n correctedalpha = 1 - (1 - desiredalpha) ** (1.0 / level)\n \n return correctedalpha",
"def test_intra_power_law_fit2(self):\n\t\tprint(type(self.fc_layers[0:2]), self.fc_layers[0:2])\n\t\tdetails= self.watcher.analyze(layers=self.fc_layers[0:2], intra=True, sparsify=False, pl_package=POWERLAW_PACKAGE, xmax=XMAX_FORCE)\n\t\tactual_alpha = details.alpha[0]\n\t\t#actual_best_fit = details.best_fit[0]\n\t\t#print(actual_alpha,actual_best_fit)\n\n\n\t\texpected_alpha = 2.719 # close to exact ?\n\t\t#expected_best_fit = LOG_NORMAL\n\t\tself.assertAlmostEqual(actual_alpha,expected_alpha, places=2)\n\t\t#self.assertEqual(actual_best_fit, expected_best_fit)",
"def flag_fraction(data):\n occ_f = np.sum(data.mask, axis=0) / float(data.shape[0])\n occ_t = np.sum(data.mask, axis=1) / float(data.shape[1])\n \n bad_f = occ_f > params.max_frac_f\n bad_t = occ_t > params.max_frac_t\n \n data.mask[bad_t, :] = True\n data.mask[:, bad_f] = True\n \n return data.mask",
"def test_prop_alpha(self):\n # reproducible arbitrariness\n np.random.seed(5001)\n\n self.conductor.out_step = np.random.randn(self.Nc)\n self.tutor.out_step = np.random.randn(self.Ns)\n\n self.rule.alpha = 1.0\n self.rule.beta = 0.0\n\n tmax = 5*self.dt\n factor = 1.3\n\n W0 = np.copy(self.syns.W)\n\n sim = simulation.Simulation(self.conductor, self.student, self.tutor,\n self.syns, self.rule, dt=self.dt)\n sim.run(tmax)\n\n change1 = self.syns.W - W0\n\n self.syns.W = np.copy(W0)\n self.rule.alpha *= factor\n sim.run(tmax)\n\n change2 = self.syns.W - W0\n\n self.assertTrue(np.allclose(change2, factor*change1))",
"def test_prop_alpha(self):\n # reproducible arbitrariness\n np.random.seed(5001)\n\n self.conductor.out_step = np.random.randn(self.Nc)\n self.tutor.out_step = np.random.randn(self.Ns)\n\n self.rule.alpha = 1.0\n self.rule.beta = 0.0\n\n tmax = 5*self.dt\n factor = 1.3\n\n W0 = np.copy(self.syns.W)\n\n sim = simulation.Simulation(self.conductor, self.student, self.tutor,\n self.syns, self.rule, dt=self.dt)\n sim.run(tmax)\n\n change1 = self.syns.W - W0\n\n self.syns.W = np.copy(W0)\n self.rule.alpha *= factor\n sim.run(tmax)\n\n change2 = self.syns.W - W0\n\n self.assertTrue(np.allclose(change2, factor*change1))",
"def test_ge(self):\n f12: Fraction = Fraction(1, 2)\n f34: Fraction = Fraction(3, 4)\n f93: Fraction = Fraction(9, 3)\n f124: Fraction = Fraction(12, 4)\n self.assertTrue(f12 >= f34)\n self.assertTrue(f93 >= f124)\n self.assertFalse(f93 >= f12)",
"def test_alpha_param(self):\n ## produce random data\n X, y = make_classification(\n n_samples=200,\n n_features=100,\n n_informative=20,\n n_redundant=10,\n n_classes=3,\n random_state=42,\n )\n\n ## Instantiate a UMAPVisualizer, provide custom alpha\n umap = UMAPVisualizer(random_state=64, alpha=0.5)\n\n # Test param gets set correctly\n assert umap.alpha == 0.5\n\n # Mock ax and fit the visualizer\n umap.ax = mock.MagicMock(autospec=True)\n umap.fit(X, y)\n\n # Test that alpha was passed to internal matplotlib scatterplot\n _, scatter_kwargs = umap.ax.scatter.call_args\n assert \"alpha\" in scatter_kwargs\n assert scatter_kwargs[\"alpha\"] == 0.5",
"def test_gt(self):\n f12: Fraction = Fraction(1, 2)\n f34: Fraction = Fraction(3, 4)\n f105: Fraction = Fraction(10, 5)\n self.assertTrue(f34 > f12)\n self.assertFalse(f12 > f105)\n self.assertFalse(f12 > f12)",
"def test_100_100(self):\n\n n_ed = 100.0\n m_ed = 100.0\n\n as_1 = 3.102643 * 10 ** -4\n as_2 = 1.80000 * 10 ** -4\n\n n_rd, m_rd = compression_diagnostic.main(h, b, a1, a2, m_ed, n_ed, as_1, as_2, eta_bet, lambda_bet, f_cd, f_ck)\n self.assertAlmostEqual(n_rd, n_ed, 0)\n self.assertAlmostEqual(m_rd, m_ed, 0)",
"def test_le(self):\n f12: Fraction = Fraction(1, 2)\n f34: Fraction = Fraction(3, 4)\n f48: Fraction = Fraction(4, 8)\n f66: Fraction = Fraction(6, 6)\n self.assertTrue(f12 <= f34)\n self.assertTrue(f12 <= f48)\n self.assertFalse(f66 <= f12)",
"def test_fraction_rich_comparisson(self):\n fract1 = source.Fraction(5, 2) # 2.5\n fract2 = source.Fraction(3, 2) # 1.5\n fract3 = source.Fraction(25, 10) # 2.5\n\n self.assertFalse(fract1 != fract3) # 2.5 != 2.5\n self.assertTrue(fract1 == fract3) # 2.5 == 2.5\n self.assertTrue(fract2 < fract3) # 1.5 < 2.5\n\n # Let's try the other way\n self.assertTrue(fract1 >= fract2) # 2.5 >= 1.5\n self.assertFalse(fract2 >= fract3) # 1.5 >= 2.5\n\n # Let's try with other types\n self.assertTrue(fract1 >= 2) # 2.5 >= 2\n self.assertTrue(fract2 == 1.5) # 1.5 == 1.5\n\n # Let's try the other way with other types\n self.assertTrue(2 <= fract1) # 2 <= 2.5\n self.assertTrue(1.5 == fract2) # 1.5 == 1.5\n\n self.assertTrue(10 > fract1) # 10 > 2.5\n self.assertFalse(10 < fract1) # 10 < 2.5\n self.assertTrue(fract1 < 10) # 2.5 < 10\n self.assertFalse(fract1 > 10) # 2.5 > 10",
"def test_extended_truncated_power_law_fit(self):\n\t\t\n\t\t#TODO: fix this; low priority\n\t\tdetails= self.watcher.analyze(layers=[self.fc1_layer], pl_package=POWERLAW_PACKAGE, fit=E_TPL)\n\t\tactual_alpha = details.alpha[0]\n\t\tactual_Lambda = details.Lambda[0]\n\n\t\tself.assertTrue(actual_Lambda > -1) #Lambda must be set for TPL\n\t\t\n\t\t# these numbers have not been independently verified yet\n\t\texpected_alpha = 2.3\n\t\texpected_Lambda = 0.006069\n\t\tself.assertAlmostEqual(actual_alpha,expected_alpha, places=2)\n\t\tself.assertAlmostEqual(actual_Lambda,expected_Lambda, places=2)"
] | [
"0.59391797",
"0.58778065",
"0.5851814",
"0.58380985",
"0.57947487",
"0.579216",
"0.57526",
"0.5734187",
"0.56922245",
"0.56904244",
"0.56162506",
"0.5595958",
"0.5584986",
"0.5574947",
"0.5539639",
"0.5525329",
"0.5498995",
"0.5490676",
"0.5487545",
"0.54838616",
"0.5480658",
"0.54753894",
"0.54753894",
"0.5470014",
"0.54632735",
"0.5453454",
"0.54344684",
"0.54313385",
"0.54216826",
"0.5380559"
] | 0.6295856 | 0 |
Calculate granger causality time_series = time x trials x channels | def calc_granger(time_series,
time_halfbandwidth_product=1,
sampling_frequency=1000,
time_window_duration=0.3,
time_window_step=0.05,
):
m = Multitaper(
time_series,
sampling_frequency=sampling_frequency, # in Hz
time_halfbandwidth_product=time_halfbandwidth_product,
start_time=0,
time_window_duration=time_window_duration, # in seconds
time_window_step=time_window_step, # in seconds
)
c = Connectivity.from_multitaper(m)
granger = c.pairwise_spectral_granger_prediction()
return granger, c | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def graphite_cracking_rate_Ai2020(T_dim):\n k_cr = 3.9e-20\n Eac_cr = 0 # to be implemented\n arrhenius = np.exp(Eac_cr / pybamm.constants.R * (1 / T_dim - 1 / 298.15))\n return k_cr * arrhenius",
"def calc_granger_actual(self):\n if not hasattr(self, 'input_data'):\n self.preprocess_and_check_stationarity()\n # input_data shape = (n_timepoints, n_trials, n_channels)\n # Calculate as many bootstrapped samples as n_shuffles\n trial_inds = np.random.randint(\n 0, self.input_data.shape[1],\n (self.n_shuffles, self.input_data.shape[1]))\n temp_dat = [self.input_data[:, trial_inds[i]]\n for i in trange(self.n_shuffles)]\n outs_temp = parallelize(self.calc_granger, temp_dat, n_jobs=30)\n time_vec = outs_temp[0][1].time\n freq_vec = outs_temp[0][1].frequencies\n outs_temp = [x[0] for x in outs_temp]\n self.granger_actual = np.array(outs_temp)\n self.time_vec = time_vec\n self.freq_vec = freq_vec\n #self.granger_actual, self.c_actual = \\\n # self.calc_granger(self.input_data)",
"def _c2g(self,timeaxis,coft):\n \n ta = timeaxis\n rr = numpy.real(coft)\n ri = numpy.imag(coft)\n sr = scipy.interpolate.UnivariateSpline(ta.data,\n rr,s=0).antiderivative()(ta.data)\n sr = scipy.interpolate.UnivariateSpline(ta.data,\n sr,s=0).antiderivative()(ta.data)\n si = scipy.interpolate.UnivariateSpline(ta.data,\n ri,s=0).antiderivative()(ta.data)\n si = scipy.interpolate.UnivariateSpline(ta.data,\n si,s=0).antiderivative()(ta.data)\n gt = sr + 1j*si\n return gt",
"def cracking_rate_Ai2020(T_dim):\n k_cr = 3.9e-20\n Eac_cr = 0 # to be implemented\n arrhenius = np.exp(Eac_cr / pybamm.constants.R * (1 / T_dim - 1 / 298.15))\n return k_cr * arrhenius",
"def _granger_causality(self):\r\n gc = dict(frequencies={}, gc_xy={}, gc_yx={}, gc_sim={},\r\n spectral_density={})\r\n for i, j in self.ij:\r\n w, f_x2y, f_y2x, f_xy, Sw = \\\r\n alg.granger_causality_xy(self.model_coef[i, j],\r\n self.error_cov[i, j],\r\n n_freqs=self._n_freqs)\r\n\r\n # All other measures are dependent on i, j:\r\n gc['gc_xy'][i, j] = f_x2y\r\n gc['gc_yx'][i, j] = f_y2x\r\n gc['gc_sim'][i, j] = f_xy\r\n gc['spectral_density'][i, j] = Sw\r\n\r\n return gc",
"def one_transition_spectrum_cd(self,tr):\n \n\n ta = tr[\"ta\"] # TimeAxis\n rr = tr[\"rr\"] # transition dipole strength\n om = tr[\"om\"] # frequency - rwa\n gg = tr[\"gg\"] # natural broadening (constant or time dependent)\n fwhm = tr[\"fwhm\"] # Additional gaussian broadening of the spectra\n sgm = fwhm/(2*numpy.sqrt(2*numpy.log(2)))\n \n # CD and fluorescence can be calculated in this step\n # TODO if rotatory strength defined calculate also circular dichroism spectra\n # TOOD calculate fluorescence spectra (for fluorescence there should be a switch because it should be calculated only for the first transition) \n \n \n if self.system._has_system_bath_coupling:\n# ct = tr[\"ct\"] # correlation function\n \n # convert correlation function to lineshape function\n #gt = self._c2g(ta,ct.data)\n gt = tr[\"gt\"]\n # calculate time dependent response\n at = numpy.exp(-gt -1j*om*ta.data)\n else:\n # calculate time dependent response\n at = numpy.exp(-1j*om*ta.data) \n# plt.figure()\n# plt.title(\"Absorption\")\n# plt.plot(ta.data,numpy.real(at))\n# plt.plot(ta.data,numpy.imag(at))\n \n \n if len(gg) == 1:\n gam = gg[0]\n rt = numpy.exp(gam*ta.data)\n at *= rt\n #print(\"Constant: \", rt[20], len(at))\n else:\n rt = numpy.exp((gg)*ta.data) \n at *= rt\n #print(\"Time dependent: len = \", rt[20], len(rt))\n \n if fwhm!=0.0:\n gauss = numpy.exp(-2*(numpy.pi**2)*(sgm**2)*(ta.data**2))\n at *= gauss\n \n # Fourier transform the result\n ft = rr*numpy.fft.hfft(at)*ta.step\n ft = numpy.fft.fftshift(ft)\n # invert the order because hfft is a transform with -i\n ft = numpy.flipud(ft) \n # cut the center of the spectrum\n Nt = ta.length #len(ta.data) \n return ft[Nt//2:Nt+Nt//2]",
"def time_surrogate_for_cc(self, sample_range=100, tau_max=1,\n lag_mode='all'):\n\n perm = numpy.random.permutation(\n range(tau_max, self.total_time - tau_max))[:sample_range]\n\n sample_array = numpy.empty((2*tau_max + 1, self.N, sample_range),\n dtype=\"float32\")\n\n for t in range(2 * tau_max + 1):\n tau = t - tau_max\n sample_array[t] = self.dataarray[:, perm + tau]\n sample_array[t] -= sample_array[t].mean(axis=1).reshape(self.N, 1)\n sample_array[t] /= sample_array[t].std(axis=1).reshape(self.N, 1)\n sample_array[t][numpy.isnan(sample_array[t])] = 0\n\n return self._calculate_cc(sample_array, corr_range=sample_range,\n tau_max=tau_max, lag_mode=lag_mode)",
"def instrument_strength_effect(etas, n_neurons, timesteps, n_trials):\n\n # Initialize corr array\n corr_data = np.zeros((n_trials, len(etas)))\n\n # Loop over trials\n for trial in range(n_trials):\n print(f\"simulation of trial {trial + 1} of {n_trials}\")\n\n # Loop over instrument strengths\n for j, eta in enumerate(etas):\n\n # Simulate system\n A, X, Z = simulate_neurons_iv(n_neurons, timesteps, eta, trial)\n\n # Compute IV estimate\n iv_V = get_iv_estimate_network(X, Z)\n\n # Compute correlation\n corr_data[trial, j] = np.corrcoef(A.flatten(), iv_V.flatten())[1, 0]\n\n return corr_data",
"def test_GrangerAnalyzer():\r\n\r\n # Start by generating some MAR processes (according to Ding and Bressler),\r\n a1 = np.array([[0.9, 0],\r\n [0.16, 0.8]])\r\n\r\n a2 = np.array([[-0.5, 0],\r\n [-0.2, -0.5]])\r\n\r\n am = np.array([-a1, -a2])\r\n\r\n x_var = 1\r\n y_var = 0.7\r\n xy_cov = 0.4\r\n cov = np.array([[x_var, xy_cov],\r\n [xy_cov, y_var]])\r\n\r\n L = 1024\r\n z, nz = utils.generate_mar(am, cov, L)\r\n\r\n # Move on to testing the Analyzer object itself:\r\n ts1 = ts.TimeSeries(data=z, sampling_rate=np.pi)\r\n g1 = gc.GrangerAnalyzer(ts1)\r\n\r\n # Check that things have the right shapes:\r\n npt.assert_equal(g1.frequencies.shape[-1], g1._n_freqs // 2 + 1)\r\n npt.assert_equal(g1.causality_xy[0, 1].shape, g1.causality_yx[0, 1].shape)\r\n\r\n # Test inputting ij:\r\n g2 = gc.GrangerAnalyzer(ts1, ij=[(0, 1), (1, 0)])\r\n\r\n # x => y for one is like y => x for the other:\r\n npt.assert_almost_equal(g1.causality_yx[1, 0], g2.causality_xy[0, 1])",
"def thetacm(t):\n return np.array([\n 0,\n 0,\n self.wz * t\n ])",
"def compute_gain_traj(self):\n time_steps = self._Phi.shape[1]\n state_dim, action_dim = self._B.shape\n\n K_traj = np.zeros([time_steps, state_dim, state_dim])\n k_traj = np.zeros([time_steps, action_dim])\n Sigma_u_traj = np.zeros([time_steps, action_dim, action_dim])\n\n for t in range(time_steps):\n\n K_traj[t, :, :], k_traj[t, :], Sigma_u_traj[t, :, :] = self.compute_gains(t)\n\n return K_traj, k_traj, Sigma_u_traj",
"def viscous_timescale(r):\n t_viscous = (2*np.pi)*r**(3.0/2.0) / ((H/R_out)**(2.0) * alpha)\n return t_viscous",
"def granger_causality_xy(a, cov, n_freqs=1024):\r\n\r\n w, Hw = transfer_function_xy(a, n_freqs=n_freqs)\r\n\r\n sigma = cov[0, 0]\r\n upsilon = cov[0, 1]\r\n gamma = cov[1, 1]\r\n\r\n # this transformation of the transfer functions computes the\r\n # Granger causality of Y on X\r\n gamma2 = gamma - upsilon ** 2 / sigma\r\n\r\n Hxy = Hw[0, 1]\r\n Hxx_hat = Hw[0, 0] + (upsilon / sigma) * Hxy\r\n\r\n xx_auto_component = (sigma * Hxx_hat * Hxx_hat.conj()).real\r\n cross_component = gamma2 * Hxy * Hxy.conj()\r\n Sxx = xx_auto_component + cross_component\r\n f_y_on_x = np.log(Sxx.real / xx_auto_component)\r\n\r\n # this transformation computes the Granger causality of X on Y\r\n sigma2 = sigma - upsilon ** 2 / gamma\r\n\r\n Hyx = Hw[1, 0]\r\n Hyy_hat = Hw[1, 1] + (upsilon / gamma) * Hyx\r\n yy_auto_component = (gamma * Hyy_hat * Hyy_hat.conj()).real\r\n cross_component = sigma2 * Hyx * Hyx.conj()\r\n Syy = yy_auto_component + cross_component\r\n f_x_on_y = np.log(Syy.real / yy_auto_component)\r\n\r\n # now compute cross densities, using the latest transformation\r\n Hxx = Hw[0, 0]\r\n Hyx = Hw[1, 0]\r\n Hxy_hat = Hw[0, 1] + (upsilon / gamma) * Hxx\r\n Sxy = sigma2 * Hxx * Hyx.conj() + gamma * Hxy_hat * Hyy_hat.conj()\r\n Syx = sigma2 * Hyx * Hxx.conj() + gamma * Hyy_hat * Hxy_hat.conj()\r\n\r\n # can safely throw away imaginary part\r\n # since Sxx and Syy are real, and Sxy == Syx*\r\n detS = (Sxx * Syy - Sxy * Syx).real\r\n f_xy = xx_auto_component * yy_auto_component\r\n f_xy /= detS\r\n f_xy = np.log(f_xy)\r\n\r\n return w, f_x_on_y, f_y_on_x, f_xy, np.array([[Sxx, Sxy], [Syx, Syy]])",
"def gain_corr(data, header, tel=None):\n\n if get_par(set_zogy.timing,tel):\n t = time.time()\n\n gain = get_par(set_bb.gain,tel)\n # channel image sections\n chan_sec, __, __, __, __ = define_sections(np.shape(data), tel=tel)\n\n nchans = np.shape(chan_sec)[0]\n for i_chan in range(nchans):\n data[chan_sec[i_chan]] *= gain[i_chan]\n header['GAIN{}'.format(i_chan+1)] = (gain[i_chan], '[e-/ADU] gain applied to '\n 'channel {}'.format(i_chan+1))\n\n if get_par(set_zogy.timing,tel):\n log_timing_memory (t0=t, label='gain_corr')\n\n return data\n\n # check if different channels in [set_bb.gain] correspond to the\n # correct channels; currently indices of gain correspond to the\n # channels as follows:\n #\n # [ 8, 9, 10, 11, 12, 13, 14, 15]\n # [ 0, 1, 2, 3, 4, 5, 6, 7]\n\n # g = gain()\n # height,width = 5300, 1500\n # for (j,i) in [(j,i) for j in range(2) for i in range(8)]:\n # data[height*j:height*(j+1),width*i:width*(i+1)]*=g[i+(j*8)]\n #\n # height, width = 5300, 1500\n # for (j,i) in [(j,i) for j in range(2) for i in range(8)]:\n # print (height*j, height*(j+1),width*i, width*(i+1), i+(j*8))\n # 0 5300 0 1500 0\n # 0 5300 1500 3000 1\n # 0 5300 3000 4500 2\n # 0 5300 4500 6000 3\n # 0 5300 6000 7500 4\n # 0 5300 7500 9000 5\n # 0 5300 9000 10500 6\n # 0 5300 10500 12000 7\n # 5300 10600 0 1500 8\n # 5300 10600 1500 3000 9\n # 5300 10600 3000 4500 10\n # 5300 10600 4500 6000 11\n # 5300 10600 6000 7500 12\n # 5300 10600 7500 9000 13\n # 5300 10600 9000 10500 14\n # 5300 10600 10500 12000 15",
"def granger(series_a, series_b, output_fio, maxlag):\n with capture_stdout(output_fio):\n data = list(zip(series_a, series_b))\n return grangercausalitytests(data, maxlag=maxlag)",
"def compute_gains(self, t, add_noise=True):\n #get the basis funtion at a time step\n basis, Dbasis = self.get_basis(t)\n\n if t < self._time_steps-1:\n basis_t_dt, _ = self.get_basis(t+1)\n else:\n basis_t_dt = np.zeros_like(basis)\n\n\n #part 1 equation 46\n B_pseudo = np.linalg.pinv(self._B)\n\n #equation 12 for t\n Sigma_t = np.dot(np.dot(basis, self._sigma_W), basis.T)\n\n #equation 12 for t+dt\n Sigma_t_dt = np.dot(np.dot(basis_t_dt, self._sigma_W), basis_t_dt.T)\n\n #Cross correlation between t, t+dt, Equation 49\n Ct = np.dot(np.dot(basis, self._sigma_W), basis_t_dt.T)\n\n #System noise Equation 51\n Sigma_s = (1./self._dt)* ( Sigma_t_dt - np.dot( np.dot( Ct.T, np.linalg.inv(Sigma_t) ), Ct) )\n\n #control noise Equation 52\n Sigma_u = np.dot(np.dot(B_pseudo, Sigma_s), B_pseudo.T)\n\n #part 2 equation 46\n tmp1 = np.dot(np.dot(Dbasis, self._sigma_W), basis.T)\n\n #part 3 equation 46\n tmp2 = np.dot(self._A, Sigma_t) + 0.5*Sigma_s\n\n #compute feedback gain; complete equation 46\n K = np.dot( np.dot(B_pseudo, (tmp1-tmp2) ), np.linalg.inv(Sigma_t))\n\n #part 1 equation 48\n tmp3 = np.dot(Dbasis, self._mean_W)\n\n #part 2 equation 48\n tmp4 = np.dot( (self._A + np.dot(self._B, K)), np.dot(basis, self._mean_W) )\n\n #compute feedforward gain; complete equation 48\n k = np.dot(B_pseudo, (tmp3-tmp4))\n\n return K, k, Sigma_u",
"def conv_gaus(array, sigma = 1.0):\r\n arrayout = fft2(array + 0.0J)\r\n arrayout = ifft2(arrayout * gaus_Fourier(array.shape[0], sigma))\r\n arrayout = np.array(arrayout, dtype=array.dtype)\r\n return arrayout",
"def acceleration(data_array, time=1):\n speed = DataOperation.speed(data_array)\n acc_values = np.zeros(speed.size)\n count = 1\n acc_values[0] = 0\n for d in speed[1:]:\n acc_values[count] = (d - speed[count-1])/3.6/time\n count += 1\n return acc_values",
"def calEachCrossflowAllAxialNode():\n AxialNodeno = 14 # axial node number in CFD data\n Nodes = []\n base = 'Node'\n for i in range(0, AxialNodeno):\n Nodes.append(base+str(i))\n \n crossFlow = pd.read_csv('Data_crossflow.csv', index_col = 'Unnamed: 0')\n lateralFactors = []\n for node in Nodes:\n lateralFactors.append(crossFlow[node]/0.8)\n #need to judge the sign of lateral flow according to CTF rule!!\n gapsToFlip = [2,4,6,7,9,11,13,14,16,18,20,21] #gaps in y direction\n gapsToFlipIndex = [x - 1 for x in gapsToFlip]\n for factors in lateralFactors:\n for index in gapsToFlipIndex:\n factors[index] = -factors[index] \n #note: lateralFactors is a list of list\n \n #below calculate factors averaged over all subchannels\n crossFlowAveFactor = crossFlow.apply(abs).mean(axis = 0)/0.8\n lateralFactorsAvelist = []\n for i in range(0,14):\n base = []\n for j in range(0,24):\n base.append(crossFlowAveFactor[i])\n lateralFactorsAvelist.append(base)\n \n \n for i in range(0, 14):\n for j in range(0, 24):\n #note, in the original model there is only one sign for all source\n #terms in one sub-channel. therefore -- sign(crossFlow.iloc[j,2])\n lateralFactorsAvelist[i][j] = lateralFactorsAvelist[i][j] *sign(crossFlow.iloc[j,2]) \n for each in lateralFactorsAvelist:\n for index in gapsToFlipIndex:\n each[index] = -each[index] \n \n \n return lateralFactors, lateralFactorsAvelist",
"def _get_g_test(self, data, alpha):\n n = len(data)\n significance_level = self._get_t_significance_level(alpha, n)\n t = stats.t.isf(significance_level, n-2)\n return ((n-1) / np.sqrt(n)) * (np.sqrt(t**2 / (n-2 + t**2)))",
"def one_transition_spectrum_fluor(self,tr):\n \n\n ta = tr[\"ta\"] # TimeAxis\n dd = tr[\"dd\"] # transition dipole strength\n om = tr[\"om\"] # frequency - rwa\n gg = tr[\"gg\"] # natural broadening (constant or time dependent)\n fwhm = tr[\"fwhm\"] # Additional gaussian broadening of the spectra\n sgm = fwhm/(2*numpy.sqrt(2*numpy.log(2)))\n \n # CD and fluorescence can be calculated in this step\n # TODO if rotatory strength defined calculate also circular dichroism spectra\n # TOOD calculate fluorescence spectra (for fluorescence there should be a switch because it should be calculated only for the first transition) \n \n \n if self.system._has_system_bath_coupling:\n# ct = tr[\"ct\"] # correlation function\n re = tr[\"re\"] # reorganisation energy\n \n # convert correlation function to lineshape function\n #gt = self._c2g(ta,ct.data)\n gt = tr[\"gt\"]\n # calculate time dependent response\n at = numpy.exp(-numpy.conjugate(gt) -1j*om*ta.data + 2j*re*ta.data)\n else:\n # calculate time dependent response\n at = numpy.exp(-1j*om*ta.data) \n# plt.figure()\n# plt.title(\"Absorption\")\n# plt.plot(ta.data,numpy.real(at))\n# plt.plot(ta.data,numpy.imag(at))\n \n \n if len(gg) == 1:\n gam = gg[0]\n rt = numpy.exp(gam*ta.data)\n at *= rt\n #print(\"Constant: \", rt[20], len(at))\n else:\n rt = numpy.exp((gg)*ta.data) \n at *= rt\n #print(\"Time dependent: len = \", rt[20], len(rt))\n \n if fwhm!=0.0:\n gauss = numpy.exp(-2*(numpy.pi**2)*(sgm**2)*(ta.data**2))\n at *= gauss\n \n # Fourier transform the result\n ft = dd*numpy.fft.hfft(at)*ta.step\n ft = numpy.fft.fftshift(ft)\n # invert the order because hfft is a transform with -i\n ft = numpy.flipud(ft) \n # cut the center of the spectrum\n Nt = ta.length #len(ta.data) \n return ft[Nt//2:Nt+Nt//2]",
"def t_test(dataType):\n\n\t# read the data\n\tparser = ExperimentUtils()\n\tdata = parser.parse_data(dataType)\n\n\tN = len(data.keys()) # number participants\n\n\t# - for trial 1 and trial 2:\n\t# \tL2 norm over each timestep, then sum all the values together\n\t# - average over two trials for each participant \n\ttask_avgs = {}\n\n\t# participant ID can take values 0 - 9\n\tfor ID in data.keys():\n\t\tfor task in data[ID]:\n\t\t\t# dont include the familiarization task (task can take values 1,2,3)\n\t\t\tif task != 0:\n\t\t\t\tif task not in task_avgs:\n\t\t\t\t\ttask_avgs[task] = {}\n\t\t\t\t\ttask_avgs[task][\"A\"] = np.array([0.0]*N)\n\t\t\t\t\ttask_avgs[task][\"B\"] = np.array([0.0]*N)\n\n\t\t\t\ttrialAsum = [0.0,0.0]\n\t\t\t\ttrialBsum = [0.0,0.0]\n\t\t\t\t# trial can take values 1 or 2\n\t\t\t\tfor trial in data[ID][task]:\n\t\t\t\t\t# only compute metrics over data, not timestamps\n\t\t\t\t\tAdata = data[ID][task][trial]['A'][1:8]\n\t\t\t\t\tBdata = data[ID][task][trial]['B'][1:8]\n\t\t\t\n\t\t\t\t\t#print str(ID)+str(task)+str(trial)+\"A\"\n\t\t\t\t\t#print \"Adata: \" + str(Adata)\n\t\t\t\t\t#print str(ID)+str(task)+str(trial)+\"B\"\n\t\t\t\t\t#print \"Bdata: \" + str(Bdata)\n\n\t\t\t\t\t(h, w) = np.shape(Adata)\n\t\t\t\t\tfor i in range(w):\n\t\t\t\t\t\ttrialAsum[trial-1] += np.linalg.norm(Adata[:,i])\n\t\t\t\t\t(h, w) = np.shape(Bdata)\n\t\t\t\t\tfor i in range(w):\n\t\t\t\t\t\ttrialBsum[trial-1] += np.linalg.norm(Bdata[:,i])\n\t\t\t\tavg_methodA = (trialAsum[0]+trialAsum[1])/2.0\n\t\t\t\tavg_methodB = (trialBsum[0]+trialBsum[1])/2.0\n\n\t\t\t\ttask_avgs[task][\"A\"][ID] = avg_methodA\n\t\t\t\ttask_avgs[task][\"B\"][ID] = avg_methodB\n\n\t# comput independent two-sample t-test \n\t# NOTE: we can assume that the two sample sizes are the same, and \n\t#\t\tthat the two distributions have the same variance\n\tfor task in range(1,4):\n\t\ttaskA = task_avgs[task][\"A\"]\n\t\ttaskB = task_avgs[task][\"B\"]\n\n\t\tmeanA = np.mean(taskA)\n\t\tmeanB = np.mean(taskB)\n\t\tprint \"meanA: \" + str(meanA)\n\t\tprint \"meanB: \" + str(meanB)\n\t\tdiff = meanA - meanB\n\t\tprint \"diff: \" + str(diff)\n\n\t\t(statistic, pvalue) = stats.ttest_ind(a=taskA, b=taskB, equal_var=True)\n\n\t\tprint \"\\n\"\n\t\tprint \"task\"+str(task)+\" statistic: \" + str(statistic)\n\t\tprint \"task\"+str(task)+\" pvalue: \" + str(pvalue)",
"def _get_ucb_beta_th(dim, time_step):\n return np.sqrt(5 * dim * np.log(2 * dim * time_step + 1))",
"def transitions(a, costM, costS):\n #counting IS, signal and trial type transitions\n \n numA = sum(a[:,1]); a = a.astype(int); nTrials = len(a[:,0])\n transitions = pd.DataFrame(columns = ['AA', 'AD', 'DA', 'DD', 'SS', 'SN', 'NS', 'NN'])\n transitions['AA'] = np.bitwise_and(a[1:,1], a[:-1, 1]); transitions['DA'] = np.bitwise_and(a[1:,1], 1-a[:-1, 1])\n transitions['AD'] = np.bitwise_and(1-a[1:,1], a[:-1, 1]);transitions['DD'] = np.bitwise_and(1-a[1:,1], 1-a[:-1, 1])\n transitions['SS'] = np.bitwise_and(a[1:,0], a[:-1, 0]); transitions['NS'] = np.bitwise_and(a[1:,0], 1-a[:-1, 0])\n transitions['SN'] = np.bitwise_and(1-a[1:,0], a[:-1, 0]);transitions['NN'] = np.bitwise_and(1-a[1:,0], 1-a[:-1, 0])\n \n transition = np.zeros((nTrials,4)) #H,M,FA,CR\n transition[:,0] = np.bitwise_and(a[:,0], a[:, 2]); transition[:,1] = np.bitwise_and(a[:,0], 1-a[:, 2])\n transition[:,2] = np.bitwise_and(1-a[:,0], a[:, 2]);transition[:,3] = np.bitwise_and(1-a[:,0], 1-a[:, 2])\n \n \n #no. of DA, DD, AD, AA transitions on SS, SN, NS, NN trials\n numSignal = np.zeros((4,4)) #DA, DD, AA, AD numbers for SS, SN, NS, NN\n numSignal[0,:] = [sum(np.bitwise_and(transitions['DA'], transitions['SS'])),\n sum(np.bitwise_and(transitions['DA'], transitions['SN'])), \n sum(np.bitwise_and(transitions['DA'], transitions['NS'])),\n sum(np.bitwise_and(transitions['DA'], transitions['NN']))]\n numSignal[1,:] = [sum(np.bitwise_and(transitions['DD'], transitions['SS'])),\n sum(np.bitwise_and(transitions['DD'], transitions['SN'])), \n sum(np.bitwise_and(transitions['DD'], transitions['NS'])),\n sum(np.bitwise_and(transitions['DD'], transitions['NN']))]\n numSignal[2,:] = [sum(np.bitwise_and(transitions['AA'], transitions['SS'])),\n sum(np.bitwise_and(transitions['AA'], transitions['SN'])), \n sum(np.bitwise_and(transitions['AA'], transitions['NS'])),\n sum(np.bitwise_and(transitions['AA'], transitions['NN']))]\n numSignal[3,:] = [sum(np.bitwise_and(transitions['AD'], transitions['SS'])),\n sum(np.bitwise_and(transitions['AD'], transitions['SN'])), \n sum(np.bitwise_and(transitions['AD'], transitions['NS'])),\n sum(np.bitwise_and(transitions['AD'], transitions['NN']))]\n \n #no. of DA, DD, AD, AA transitions on incongruent and congruent hits and crs\n numTypes = np.zeros((4,4))\n da = np.array(transitions.loc[:,'DA'])\n transition = transition.astype(int)\n for i in range(4):\n for j in range(4): \n numTypes[i,j] = sum(np.bitwise_and(np.bitwise_and(transition[1:,j], transition[:-1, i]), da))\n\n return numSignal, numTypes",
"def apply_two_channels(t_chan):\n\n # Sort the exposure times\n isort = np.argsort(t_chan)\n\n # Find channel that requires the longest exposure time\n imax = isort[-1]\n\n # Record longest time\n t_long = t_chan[imax]\n\n # Record sum of the other two channels\n t_sum = (np.sum(t_chan) - t_chan[imax])\n\n # Logic for deciding total exposure time\n if t_long > t_sum:\n # if the longest exposure channel is longer than the other two combined\n # then they can both be observed during t_long\n t_tot = t_long\n else:\n # Otherwise the t_long can be observed while the during both the second and third\n # longest exposure time channels\n t_tot = t_sum\n\n return t_tot",
"def coherency(self):\r\n data = self.input.data\r\n tseries_length = data.shape[0]\r\n spectrum_length = self.spectrum.shape[-1]\r\n\r\n coherency = np.zeros((tseries_length,\r\n tseries_length,\r\n spectrum_length), dtype=complex)\r\n\r\n for i in range(tseries_length):\r\n for j in range(i, tseries_length):\r\n coherency[i][j] = tsa.coherency_spec(self.spectrum[i][j],\r\n self.spectrum[i][i],\r\n self.spectrum[j][j])\r\n\r\n idx = tril_indices(tseries_length, -1)\r\n coherency[idx[0], idx[1], ...] = coherency[idx[1], idx[0], ...].conj()\r\n\r\n return coherency",
"def gaus(x,A,B,C):\n return A * np.exp( -(x-B)**2 / (2*C**2))",
"def calc_change (change_amnts, rate_of_transition, from_cohort, present):\n row, col = cuda.grid(2)\n\n if row < from_cohort.shape[0] and col < from_cohort.shape[1]:\n change_amnts[row,col] = \\\n rate_of_transition[row,col] * from_cohort[row,col] \n if present[row, col] and change_amnts[row, col] > from_cohort[row, col]:\n change_amnts[row, col] = from_cohort[row,col]",
"def _get_ucb_beta_th(dim, time_step):\n return np.sqrt(0.5 * dim * np.log(2 * dim * time_step + 1))",
"def gain_opt(machine, T):\n res = (np.arange(T)+1)\n return res * np.amax(machine)"
] | [
"0.5973435",
"0.5885312",
"0.5802749",
"0.574831",
"0.56886405",
"0.55259013",
"0.54229647",
"0.5363545",
"0.53522897",
"0.5351203",
"0.53407997",
"0.53008807",
"0.5212511",
"0.51963246",
"0.51937586",
"0.51852894",
"0.5148304",
"0.5119841",
"0.51136154",
"0.5099041",
"0.50903976",
"0.5083632",
"0.50763524",
"0.5055447",
"0.50542456",
"0.504881",
"0.5042431",
"0.503639",
"0.5035462",
"0.50280946"
] | 0.61610305 | 0 |
preprocessed_data = (n_channels, n_trials, n_timepoints) sampling_frequency = in Hz n_shuffles = number of shuffles to perform wanted_window = window to calculate granger causality in alpha = significance level multitaper_time_window_duration = duration of time window for multitaper multitaper_time_window_step = step of time window for multitaper | def __init__(self,
good_lfp_data,
# preprocessed_data,
sampling_frequency=1000,
n_shuffles=500,
wanted_window=[1500, 4000],
alpha=0.05,
multitaper_time_halfbandwidth_product=1,
multitaper_time_window_duration=0.3,
multitaper_time_window_step=0.05,
preprocess=True,
):
#self.preprocessed_data = preprocessed_data
#self.input_data = preprocessed_data.T[wanted_window[0]:wanted_window[1]]
self.preprocess_flag = preprocess
self.good_lfp_data = good_lfp_data
self.sampling_frequency = sampling_frequency
self.n_shuffles = n_shuffles
self.wanted_window = wanted_window
self.alpha = alpha
self.multitaper_time_halfbandwidth_product = \
multitaper_time_halfbandwidth_product
self.multitaper_time_window_duration = multitaper_time_window_duration
self.multitaper_time_window_step = multitaper_time_window_step | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cubetest_per_topic(topic_truth, topic_result, gamma, max_height, cutoff):\n subtopic_num = topic_truth[1]\n topic_truth = topic_truth[0]\n\n subtopic_height = Counter() # current height of every subtopic\n subtopic_count = Counter() # #docs found relevant to every subtopic (nrels)\n\n weight_per_subtopic = 1.0 / subtopic_num\n\n def gain_per_doc(doc_no):\n if doc_no not in topic_truth:\n return 0\n gain = 0\n for subtopic_id, rating in topic_truth[doc_no].items():\n if subtopic_height[subtopic_id] < max_height:\n discount_height = (gamma ** (subtopic_count[subtopic_id] + 1)) * rating\n if discount_height + subtopic_height[subtopic_id] > max_height:\n discount_height = max_height - subtopic_height[subtopic_id]\n\n gain += weight_per_subtopic * discount_height\n # print(doc_no, subtopic_id,\"original_height\", rating, \"discount height\", discount_height)\n subtopic_height[subtopic_id] += discount_height\n subtopic_count[subtopic_id] += 1\n # print(doc_no, gain)\n return gain\n\n sorted_result = sorted(topic_result.items(), key=lambda x: x[0])\n time = 0.0\n total_gain = 0\n accu_gain = 0\n doc_num = 0\n for iter_num, doclist in sorted_result:\n if iter_num >= cutoff:\n break\n time += 1\n # gain_per_iteration = 0\n for doc_no in doclist:\n total_gain += gain_per_doc(doc_no)\n accu_gain += (total_gain / max_height / time)\n doc_num += 1\n\n # print(time)\n if time != 0:\n ct = total_gain / max_height / time\n else:\n ct = 0\n # print(doc_num)\n if doc_num > 0:\n act = accu_gain / doc_num\n else:\n act = 0\n # print( accu_gain , total_gain)\n return total_gain / max_height, ct, act",
"def sample(timesteps):\n\n k = 32\n # pow_alpha = -0.7\n # pow_beta = -0.5\n \n # # P(i) = (1 / rank(i)) ^ alpha / sum((1 / rank(i)) ^ alpha)\n # pdf = [x ** pow_alpha for x in range(1, k+1)]\n # pdf_sum = math.fsum(pdf)\n # distribution = [x / pdf_sum for x in pdf]\n \n # # The expected value with stochastic updates depends on the behavior\n # # distribution to be the same as the updates. Since we are introducing\n # # bias with prioritized sweeps, we need to do introduce importance\n # # sampling.\n\n # # https://arxiv.org/pdf/1511.05952.pdf\n # # Importance sampling weight \n # # w_i = (N * P(i))^-B) / max(w_j)\n # importance = [(len(timesteps) * pi) ** pow_beta for pi in distribution]\n # max_importance = max(importance)\n # importance = [i / max_importance for i in importance]\n \n # # Sample timesteps\n # # timesteps.sort(key=lambda t: t.abs_error, reverse=True)\n # samples = []\n # b = 0\n # for start, stop in batch_indexes(len(timesteps), math.floor(len(timesteps) / k)):\n # if b == k: break\n # sample = np.random.choice(timesteps[start:stop])\n # sample.adjusted_target = sample.target * importance[b]\n # samples.append(sample)\n # b += 1\n \n for t in timesteps:\n t.adjusted_target = t.target\n random.shuffle(timesteps)\n return timesteps[:k]",
"def _shuffler(inputs):\r\n\r\n (true_starts, true_ends, transient_responses, position_unsynced,\r\n behav_period, position_synced, framePeriod, frames_to_include,\r\n nROIs, n_position_bins, initial_counts) = inputs\r\n\r\n shuffle_values = np.zeros((nROIs, n_position_bins))\r\n shuffle_counts = np.zeros((nROIs, n_position_bins))\r\n for cycle_true_starts, cycle_true_ends, cycle_responses, cycle_pos, \\\r\n cycle_pos_synced, cycle_frames, cycle_counts in it.izip(\r\n true_starts, true_ends, transient_responses, position_unsynced,\r\n position_synced, frames_to_include, initial_counts):\r\n\r\n for roi_idx, roi_starts, roi_ends, roi_responses in zip(\r\n it.count(), cycle_true_starts, cycle_true_ends,\r\n cycle_responses):\r\n\r\n shuffle_starts, shuffle_ends = shuffle_transients(\r\n true_starts=roi_starts, true_ends=roi_ends,\r\n frames_to_include=cycle_frames)\r\n\r\n v, c, _ = generate_tuning_curve(\r\n start_indices=shuffle_starts, end_indices=shuffle_ends,\r\n response_magnitudes=roi_responses,\r\n position_unsynced=cycle_pos, behavior_rate=behav_period,\r\n position_synced=cycle_pos_synced, imaging_rate=framePeriod,\r\n n_position_bins=n_position_bins, return_squared=False,\r\n initial_counts=cycle_counts)\r\n\r\n shuffle_values[roi_idx] += v\r\n shuffle_counts[roi_idx] += c\r\n\r\n # shuffled counts may become zero if there's an issue with the behavior\r\n # sampling rate\r\n assert np.any(np.sum(shuffle_counts, axis=0)) != 0\r\n\r\n return shuffle_values, shuffle_counts",
"def check_sample_correctishness_channelwise(f):\n\n batch_size = 27\n pool_size = 4\n n = pool_size * 21\n\n rng = np.random.RandomState([2012, 9, 26])\n zv = rng.randn(batch_size, n).astype(config.floatX) * 3.5 - 5.\n top_down_v = rng.randn(batch_size, n / pool_size).astype(config.floatX)\n\n z_th = T.matrix()\n z_th.tag.test_value = zv\n z_th.name = 'z_th'\n\n top_down_th = T.matrix()\n top_down_th.tag.test_value = top_down_v\n top_down_th.name = 'top_down_th'\n\n theano_rng = MRG_RandomStreams(rng.randint(2147462579))\n p_th, h_th, p_sth, h_sth = f(z_th, pool_size, top_down_th, theano_rng)\n\n prob_func = function([z_th, top_down_th], [p_th, h_th])\n pv, hv = prob_func(zv, top_down_v)\n\n sample_func = function([z_th, top_down_th], [p_sth, h_sth])\n\n acc_p = 0. * pv\n acc_h = 0. * hv\n\n # make sure the test gets good coverage, ie, that it includes\n # many different activation probs for both detector and pooling layer\n buckets = 10\n bucket_width = 1. / float(buckets)\n print(pv.min(), pv.max())\n print(hv.min(), hv.max())\n for i in xrange(buckets):\n lower_lim = i * bucket_width\n upper_lim = (i+1) * bucket_width\n print(lower_lim, upper_lim)\n\n assert np.any((pv >= lower_lim) * (pv < upper_lim))\n assert np.any((hv >= lower_lim) * (hv < upper_lim))\n\n assert upper_lim == 1.\n\n for i in xrange(10000):\n ps, hs = sample_func(zv, top_down_v)\n\n assert ps.shape == pv.shape\n assert hs.shape == hv.shape\n\n acc_p += ps\n acc_h += hs\n\n est_p = acc_p / float(i+1)\n est_h = acc_h / float(i+1)\n\n pd = np.abs(est_p-pv)\n hd = np.abs(est_h-hv)\n\n \"\"\"\n # plot maps of the estimation error, this is to see if it has some\n # spatial pattern this is useful for detecting bugs like not handling\n # the border correctly, etc.\n # from pylearn2.gui.patch_viewer import PatchViewer\n\n pv = PatchViewer((pd.shape[0],pd.shape[3]),(pd.shape[1],pd.shape[2]),\n is_color = False)\n for i in xrange(pd.shape[0]):\n for j in xrange(pd.shape[3]):\n pv.add_patch( (pd[i,:,:,j] / pd.max() )* 2.0 - 1.0, rescale = False)\n pv.show()\n\n pv = PatchViewer((hd.shape[0],hd.shape[3]),(hd.shape[1],hd.shape[2]),\n is_color = False)\n for i in xrange(hd.shape[0]):\n for j in xrange(hd.shape[3]):\n pv.add_patch( (hd[i,:,:,j] / hd.max() )* 2.0 - 1.0, rescale = False)\n pv.show()\n \"\"\"\n\n \"\"\"\n plot expectation to estimate versus error in estimation\n expect bigger errors for values closer to 0.5\n\n from matplotlib import pyplot as plt\n\n #nelem = reduce( lambda x, y : x*y, pd.shape)\n #plt.scatter( pv.reshape(nelem), pd.reshape(nelem))\n #plt.show()\n\n nelem = reduce( lambda x, y : x*y, hd.shape)\n plt.scatter( hv.reshape(nelem), hd.reshape(nelem))\n plt.show()\n \"\"\"\n\n # don't really know how tight this should be\n # but you can try to pose an equivalent problem\n # and implement it in another way\n # using a numpy implementation in softmax_acc.py\n # I got a max error of .17\n assert max(pd.max(), hd.max()) < .17\n\n # Do exhaustive checks on just the last sample\n assert np.all((ps == 0) + (ps == 1))\n assert np.all((hs == 0) + (hs == 1))\n\n for k in xrange(batch_size):\n for i in xrange(ps.shape[1]):\n p = ps[k, i]\n h = hs[k, i*pool_size:(i+1)*pool_size]\n assert h.shape == (pool_size,)\n assert p == h.max()\n assert h.sum() <= 1\n\n \"\"\" If you made it to here, it's correctish\n (cant tell if samples are perfectly \"correct\") \"\"\"",
"def calc_granger_shuffle(self):\n if not hasattr(self, 'input_data'):\n self.preprocess_and_check_stationarity()\n temp_series = [np.stack([np.random.permutation(x)\n for x in self.input_data.T]).T\n for i in trange(self.n_shuffles)]\n\n outs_temp = parallelize(self.calc_granger, temp_series, n_jobs=30)\n outs_temp = [x[0] for x in outs_temp]\n self.shuffle_outs = np.array(outs_temp)",
"def get_audio_features(sample, audio_data, max_len, data_truncating, data_filling, audio_cfg):\n if len(audio_data) > max_len:\n if data_truncating == \"fusion\":\n # fusion\n mel = get_mel(audio_data, audio_cfg)\n # split to three parts\n chunk_frames = max_len // audio_cfg['hop_size']+1 # the +1 related to how the spectrogram is computed\n total_frames = mel.shape[0]\n if chunk_frames == total_frames:\n # there is a corner case where the audio length is\n # larger than max_len but smaller than max_len+hop_size.\n # In this case, we just use the whole audio.\n mel_fusion = np.stack([mel, mel, mel, mel], axis=0)\n longer = [[False]]\n else:\n ranges = np.array_split(list(range(0, total_frames-chunk_frames+1)), 3)\n # print('total_frames-chunk_frames:', total_frames-chunk_frames,\n # 'len(audio_data):', len(audio_data),\n # 'chunk_frames:', chunk_frames,\n # 'total_frames:', total_frames)\n if len(ranges[1]) == 0:\n # if the audio is too short, we just use the first chunk\n ranges[1] = [0]\n if len(ranges[2]) == 0:\n # if the audio is too short, we just use the first chunk\n ranges[2] = [0]\n # randomly choose index for each part\n idx_front = np.random.choice(ranges[0])\n idx_middle = np.random.choice(ranges[1])\n idx_back = np.random.choice(ranges[2])\n # select mel\n mel_chunk_front = mel[idx_front:idx_front+chunk_frames, :]\n mel_chunk_middle = mel[idx_middle:idx_middle+chunk_frames, :]\n mel_chunk_back = mel[idx_back:idx_back+chunk_frames, :]\n\n # shrink the mel\n # Output may differ between torchvision.transforms.Resize and numpy.resize.\n #mel_shrink_torch = torch.from_numpy(mel[None])\n #mel_shrink_torch = torchvision.transforms.Resize(size=[chunk_frames, 64])(mel_shrink_torch)[0]\n #mel_shrink_torch = mel_shrink_torch.to('cpu').detach().numpy().copy()\n mel_shrink_numpy = np.resize(mel[None], (chunk_frames, 64))\n # logging.info(f\"mel_shrink.shape: {mel_shrink.shape}\")\n\n # stack\n mel_fusion = np.stack([mel_chunk_front, mel_chunk_middle, mel_chunk_back, mel_shrink_numpy], axis=0)\n longer = [[True]]\n # random crop to max_len (for compatibility)\n overflow = len(audio_data) - max_len\n idx = np.random.randint(0, overflow + 1)\n audio_data = audio_data[idx: idx + max_len]\n\n else: # padding if too short\n if len(audio_data) < max_len: # do nothing if equal\n if data_filling == \"repeatpad\":\n n_repeat = int(max_len/len(audio_data))\n audio_data = np.tile(audio_data, n_repeat)\n # audio_data = audio_data.unsqueeze(0).unsqueeze(0).unsqueeze(0)\n # audio_data = F.interpolate(audio_data,size=max_len,mode=\"bicubic\")[0,0,0]\n audio_data = np.pad(audio_data, [(0, max_len - len(audio_data))], \"constant\")\n elif data_filling == \"pad\":\n audio_data = np.pad(audio_data, [(0, max_len - len(audio_data))], \"constant\")\n elif data_filling == \"repeat\":\n n_repeat = int(max_len/len(audio_data))\n audio_data = np.tile(audio_data, n_repeat+1)[:max_len]\n \n if data_truncating == 'fusion':\n mel = get_mel(audio_data, audio_cfg)\n mel_fusion = np.stack([mel, mel, mel, mel], axis=0)\n longer = [[False]]\n\n return longer, mel_fusion, audio_data",
"def train(trial_num, image_num, filter_num, filter_size, input_size, channel_num, pooling_rate, left_upper_padding, right_lower_padding):\n\n input_batch_num = 1\n batch_num = 2\n\n init_filters = np.array(np.random.normal(size=filter_num * channel_num *\n filter_size*filter_size), dtype=\"float32\")\n #init_filters = np.array([1.0] * filter_num * channel_num * filter_size * filter_size, dtype=\"float32\")\n init_filters = 0.01 * init_filters.reshape(filter_num, channel_num*filter_size*filter_size)\n\n init_hbias = np.array([-0.1] * filter_num, dtype=\"float32\").reshape(filter_num, 1)\n\n init_vbias = np.array([0.0] * channel_num, dtype=\"float32\").reshape(channel_num, 1)\n\n libnvcrbm = __import__(\"nvcrbm\")\n cur_filters = libnvcrbm.init(filter_num, filter_size, \n input_batch_num, input_size, channel_num,\n pooling_rate, left_upper_padding, right_lower_padding,\n init_filters, init_hbias, init_vbias)\n\n imgs = cPickle.load(open(\"../data/kyoto_large_train.pkl\", \"r\"))\n img_size = imgs[0].shape[0]\n\n for trial_idx in xrange(trial_num):\n for img_idx in xrange(image_num):\n for batch_idx in xrange(batch_num):\n row_idx = np.arange(0, input_size) + np.random.random_integers(img_size - 2 * filter_size - input_size) + filter_size - 1\n col_idx = np.arange(0, input_size) + np.random.random_integers(img_size - 2 * filter_size - input_size) + filter_size - 1\n #row_idx = np.arange(0, input_size) + 200\n #col_idx = np.arange(0, input_size) + 200\n\n batch_data = imgs[img_idx][row_idx][:,col_idx]\n batch_data = batch_data - batch_data.mean()\n batch_data = np.asarray(batch_data.reshape(1, input_size * input_size), dtype=\"float32\")\n \n libnvcrbm.run_batch(trial_idx, img_idx, batch_idx, batch_data)\n\n libnvcrbm.print_result()\n cur_filters = libnvcrbm.get_gpu_filters()\n dump_filter_image(cur_filters, \"../data/kyoto/filters/trial_%d.png\" % trial_idx)\n\n first_layer = {}\n first_layer[\"filters\"] = cur_filters\n first_layer[\"bias\"] = libnvcrbm.get_gpu_hbias()\n cPickle.dump(first_layer, open(\"../data/first_layer.dat\", \"w+\"))",
"def extract_features(audio_filename, args):\n #print(\"Extract_features\")\n spec_type = args['spec_type']\n\n if spec_type == 'cqt':\n bin_multiple = args['bin_multiple']\n max_midi = args['max_midi']\n min_midi = args['min_midi']\n note_range = max_midi - min_midi + 1\n sr = args['sr']\n hop_length = args['hop_length']\n window_size = args['window_size']\n\n bins_per_octave = 12 * bin_multiple # should be a multiple of 12\n n_bins = note_range * bin_multiple\n\n # down-sample,mono-channel\n y, _ = librosa.load(audio_filename, sr)\n # y: an np.ndarray[ shape=(n,) ] giving the audio time series. librosa.load automatically downsamples to the\n # required sample rate sr\n # doku on librosa.cqt:\n # https://librosa.github.io/librosa/generated/librosa.core.cqt.html?highlight=cqt#librosa.core.cqts\n S = librosa.cqt(y, fmin=librosa.midi_to_hz(min_midi), sr=sr, hop_length=hop_length,\n bins_per_octave=bins_per_octave, n_bins=n_bins)\n S = S.T\n S = np.abs(S)\n min_db = np.min(S)\n print(np.min(S), np.max(S), np.mean(S))\n S = np.pad(S, ((window_size // 2, window_size // 2), (0, 0)), 'constant', constant_values=min_db)\n\n windows = []\n\n # IMPORTANT NOTE:\n # Since we pad the the spectrogram frame,\n # the onset frames are actually `offset` frames.\n # To obtain a window of the center frame at each true index, we take a slice from i to i+window_size\n # starting at frame 0 of the padded spectrogram\n for i in range(S.shape[0] - window_size + 1):\n w = S[i:i + window_size, :]\n windows.append(w)\n\n # print inputs\n x = np.array(windows)\n return x\n\n else:\n print(\"WARNING: feature type \" + spec_type + \" not implemented.\")\n return 0",
"def preprocess_single_chords_list(self, window_size=5, flattened_window=True, hop_length=4410, to_skip=5, norm_to_C=False, spectrogram_generator=log_mel_spectrogram, skip_coef=1) -> tuple:\n prep_data = []\n prep_targets = []\n k = 0\n # Iterate over all audio files\n for audio, chords, keys in zip(self.DATA, self.CHORDS, self.KEYS):\n print(k)\n k = k+1\n # Get log mel spectrogram\n spectrogram = IsophonicsDataset.preprocess_audio(waveform=audio.WAVEFORM, sample_rate=audio.SAMPLE_RATE, spectrogram_generator=spectrogram_generator, nfft=self.NFFT, hop_length=hop_length, norm_to_C=norm_to_C, key=keys.get_first_key())\n spectrogram = np.array(spectrogram)\n spec_length, num_samples = spectrogram.shape\n\n # Collect data for each spectrogram sample\n j = 0 # labels index\n for i in [index for index in range(num_samples) if index%to_skip==0]:\n # Get data window with zero margin\n n_pre_zeros, window_indices, n_post_zeros = IsophonicsDataset.get_flatten_indices(i, num_samples, skip_coef, window_size)\n if flattened_window:\n prep_data.append(\n np.concatenate((\n np.zeros((n_pre_zeros, spec_length)),\n np.array(spectrogram[:, window_indices]).swapaxes(0,1),\n np.zeros((n_post_zeros, spec_length))\n ), axis = 0).flatten()\n )\n else:\n prep_data.append(\n np.concatenate((\n np.zeros((n_pre_zeros, spec_length)),\n np.array(spectrogram[:, window_indices]).swapaxes(0,1),\n np.zeros((n_post_zeros, spec_length))\n ), axis = 0)\n )\n\n\n # Get label\n second = float(i)/(float(self.SAMPLE_RATE) / float(hop_length))\n while j < len(chords.START) and second > chords.START[j] :\n j = j + 1\n if j == len(chords.START):\n prep_targets.append(Dataset.get_integered_chord(\"N\", norm_to_C, keys.get_first_key()))\n else:\n prep_targets.append(Dataset.get_integered_chord(chords.CHORD[j], norm_to_C, keys.get_first_key()))\n\n print(\"[INFO] The Isophonics Dataset was successfully preprocessed.\")\n return np.array(prep_data), np.array(prep_targets)",
"def mixup_data(self, data_ratio_produce=2, alpha=0.2):\n real_samples_idx = np.argwhere(self.data['real']).ravel()\n n_training_samples = real_samples_idx.shape[0]\n # Make random mixup samples\n n_samples = int(n_training_samples * data_ratio_produce)\n data_new = dict()\n for key in self.data:\n data_new[key] = []\n for i in range(n_samples):\n # Mixup ratio\n lam = np.random.beta(alpha, alpha)\n # Should not happen, but just in case to detect bugs\n if lam < 0 or lam > 1:\n raise ValueError('Lam not between 0 and 1')\n # Images to choose for mixup, choose only from real samples\n idxs = np.random.choice(real_samples_idx, 2, replace=False)\n idx0 = idxs[0]\n idx1 = idxs[1]\n\n # Make mixup data\n data_new['greyscale'].append(\n self.data['greyscale'][idx0] * lam + self.data['greyscale'][idx1] * (1 - lam))\n data_new['sample'].append(\n '_'.join([str(self.data['sample'][idx0]), str(lam), str(str(self.data['sample'][idx1])), str(1 - lam)]))\n data_new['lifetime'].append(\n self.data['lifetime'][idx0] * lam + self.data['lifetime'][idx1] * (1 - lam))\n data_new['magnification'].append(\n self.data['magnification'][idx0] * lam + self.data['magnification'][idx1] * (1 - lam))\n data_new['uncertainty'].append(\n self.data['uncertainty'][idx0] * lam + self.data['uncertainty'][idx1] * (1 - lam))\n data_new['image_id'].append(\n '_'.join(\n [str(self.data['image_id'][idx0]), str(lam), str(self.data['image_id'][idx1]), str(1 - lam)]))\n data_new['real'].append(0)\n\n # Add mixup to data\n for key in self.data.keys():\n if len(data_new[key]) != n_samples:\n raise ValueError('Mixup data for %s not of corect length' % key)\n # Do not use np concat as it is slow - filling an array is quicker\n # data_temp = np.empty((self.data[key].shape[0] + len(data_new[key]), *self.data[key].shape[1:]),\n # dtype=self.data[key].dtype)\n # for i in range(self.data[key].shape[0]):\n # data_temp[i] = self.data[key][i]\n # # Add new data after old one (array positions starting after positions of original data)\n # for i in range(len(data_new[key])):\n # data_temp[i+self.data[key].shape[0]] = data_new[key][i]\n # self.data[key] = data_temp\n self.data[key] = np.concatenate([self.data[key], data_new[key]])",
"def _choose_sample(self):\n\n \t #periodically generate a new reconstruction for the purposes of sampling",
"def timbral_warmth(fname, dev_output=False, phase_correction=False, clip_output=False, max_FFT_frame_size=8192,\n max_WR = 12000, fs=0):\n '''\n Read input\n '''\n audio_samples, fs = timbral_util.file_read(fname, fs, phase_correction=phase_correction)\n\n # get the weighted high frequency content\n mean_wr, _, _, weighted_hf = warm_region_cal(audio_samples, fs)\n\n # calculate the onsets\n envelope = timbral_util.sample_and_hold_envelope_calculation(audio_samples, fs, decay_time=0.1)\n envelope_time = np.arange(len(envelope)) / float(fs)\n\n # calculate the onsets\n nperseg = 4096\n original_onsets = timbral_util.calculate_onsets(audio_samples, envelope, fs, nperseg=nperseg)\n # If onsets don't exist, set it to time zero\n if not original_onsets:\n original_onsets = [0]\n # set to start of file in the case where there is only one onset\n if len(original_onsets) == 1:\n original_onsets = [0]\n '''\n Initialise lists for storing features\n '''\n # set defaults for holding\n all_rms = []\n all_ratio = []\n all_SC = []\n all_WR_Ratio = []\n all_decay_score = []\n\n\n # calculate metrics for each onset\n for idx, onset in enumerate(original_onsets):\n if onset == original_onsets[-1]:\n # this is the last onset\n segment = audio_samples[onset:]\n else:\n segment = audio_samples[onset:original_onsets[idx+1]]\n\n segment_rms = np.sqrt(np.mean(segment * segment))\n all_rms.append(segment_rms)\n\n # get FFT of signal\n segment_length = len(segment)\n if segment_length < max_FFT_frame_size:\n freq, time, spec = spectrogram(segment, fs, nperseg=segment_length, nfft=max_FFT_frame_size)\n else:\n freq, time, spec = spectrogram(segment, fs, nperseg=max_FFT_frame_size, nfft=max_FFT_frame_size)\n\n # flatten the audio to 1 dimension. Catches some strange errors that cause crashes\n if spec.shape[1] > 1:\n spec = np.sum(spec, axis=1)\n spec = spec.flatten()\n\n # normalise for this onset\n spec = np.array(list(spec)).flatten()\n this_shape = spec.shape\n spec /= max(abs(spec))\n\n '''\n Estimate of fundamental frequency\n '''\n # peak picking algorithm\n peak_idx, peak_value, peak_x = timbral_util.detect_peaks(spec, freq=freq, fs=fs)\n # find lowest peak\n fundamental = np.min(peak_x)\n fundamental_idx = np.min(peak_idx)\n\n '''\n Warmth region calculation\n '''\n # estimate the Warmth region\n WR_upper_f_limit = fundamental * 3.5\n if WR_upper_f_limit > max_WR:\n WR_upper_f_limit = 12000\n tpower = np.sum(spec)\n WR_upper_f_limit_idx = int(np.where(freq > WR_upper_f_limit)[0][0])\n\n if fundamental < 260:\n # find frequency bin closest to 260Hz\n top_level_idx = int(np.where(freq > 260)[0][0])\n # sum energy up to this bin\n low_energy = np.sum(spec[fundamental_idx:top_level_idx])\n # sum all energy\n tpower = np.sum(spec)\n # take ratio\n ratio = low_energy / float(tpower)\n else:\n # make exception where fundamental is greater than\n ratio = 0\n\n all_ratio.append(ratio)\n\n '''\n Spectral centroid of the segment\n '''\n # spectral centroid\n top = np.sum(freq * spec)\n bottom = float(np.sum(spec))\n SC = np.sum(freq * spec) / float(np.sum(spec))\n all_SC.append(SC)\n\n '''\n HF decay\n - linear regression of the values above the warmth region\n '''\n above_WR_spec = np.log10(spec[WR_upper_f_limit_idx:])\n above_WR_freq = np.log10(freq[WR_upper_f_limit_idx:])\n np.ones_like(above_WR_freq)\n metrics = np.array([above_WR_freq, np.ones_like(above_WR_freq)])\n\n # create a linear regression model\n model = linear_model.LinearRegression(fit_intercept=False)\n model.fit(metrics.transpose(), above_WR_spec)\n decay_score = model.score(metrics.transpose(), above_WR_spec)\n all_decay_score.append(decay_score)\n\n\n '''\n get mean values\n '''\n mean_SC = np.log10(np.mean(all_SC))\n mean_decay_score = np.mean(all_decay_score)\n weighted_mean_ratio = np.average(all_ratio, weights=all_rms)\n\n if dev_output:\n return mean_SC, weighted_hf, mean_wr, mean_decay_score, weighted_mean_ratio\n else:\n\n '''\n Apply regression model\n '''\n all_metrics = np.ones(6)\n all_metrics[0] = mean_SC\n all_metrics[1] = weighted_hf\n all_metrics[2] = mean_wr\n all_metrics[3] = mean_decay_score\n all_metrics[4] = weighted_mean_ratio\n\n coefficients = np.array([-4.464258317026696,\n -0.08819320850778556,\n 0.29156539973575546,\n 17.274733561081554,\n 8.403340066029507,\n 45.21212125085579])\n\n warmth = np.sum(all_metrics * coefficients)\n\n # clip output between 0 and 100\n if clip_output:\n warmth = timbral_util.output_clip(warmth)\n\n return warmth",
"def calc_granger(time_series,\n time_halfbandwidth_product=1,\n sampling_frequency=1000,\n time_window_duration=0.3,\n time_window_step=0.05,\n ):\n m = Multitaper(\n time_series,\n sampling_frequency=sampling_frequency, # in Hz\n time_halfbandwidth_product=time_halfbandwidth_product,\n start_time=0,\n time_window_duration=time_window_duration, # in seconds\n time_window_step=time_window_step, # in seconds\n )\n c = Connectivity.from_multitaper(m)\n granger = c.pairwise_spectral_granger_prediction()\n return granger, c",
"def spec_aug(data, num_t_mask=2, num_f_mask=2, max_t=50, max_f=10, max_w=80):\n for sample in data:\n assert \"feat\" in sample\n x = sample[\"feat\"]\n assert isinstance(x, torch.Tensor)\n y = x.clone().detach()\n max_frames = y.size(0)\n max_freq = y.size(1)\n # time mask\n for i in range(num_t_mask):\n start = random.randint(0, max_frames - 1)\n length = random.randint(1, max_t)\n end = min(max_frames, start + length)\n y[start:end, :] = 0\n # freq mask\n for i in range(num_f_mask):\n start = random.randint(0, max_freq - 1)\n length = random.randint(1, max_f)\n end = min(max_freq, start + length)\n y[:, start:end] = 0\n sample[\"feat\"] = y\n yield sample",
"def sample_cmal(model: 'BaseModel', data: Dict[str, torch.Tensor], n_samples: int,\n scaler: Dict[str, Union[pd.Series, xarray.Dataset]]) -> Dict[str, torch.Tensor]:\n setup = _SamplingSetup(model, data, \"cmal\")\n\n # force model into train mode if mc_dropout\n if setup.mc_dropout:\n model.train()\n\n # make predictions:\n pred = model(data)\n\n # sample for different frequencies:\n samples = {}\n for freq_suffix in setup.freq_suffixes:\n # get predict_last_n for the given the mode:\n frequency_last_n = setup._get_frequency_last_n(freq_suffix=freq_suffix)\n\n # CMAL has 4 parts: means (m/mu), scales (b), asymmetries (t/) and weights (p/pi):\n m = pred[f'mu{freq_suffix}']\n b = pred[f'b{freq_suffix}']\n t = pred[f'tau{freq_suffix}']\n p = pred[f'pi{freq_suffix}']\n\n sample_points = []\n for nth_target in range(setup.number_of_targets):\n # sampling presets:\n m_target = _subset_target(m[:, -frequency_last_n:, :], nth_target, setup.cfg.n_distributions)\n b_target = _subset_target(b[:, -frequency_last_n:, :], nth_target, setup.cfg.n_distributions)\n t_target = _subset_target(t[:, -frequency_last_n:, :], nth_target, setup.cfg.n_distributions)\n p_target = _subset_target(p[:, -frequency_last_n:, :], nth_target, setup.cfg.n_distributions)\n\n m_target = torch.repeat_interleave(m_target, n_samples, dim=0)\n b_target = torch.repeat_interleave(b_target, n_samples, dim=0)\n t_target = torch.repeat_interleave(t_target, n_samples, dim=0)\n p_target = torch.repeat_interleave(p_target, n_samples, dim=0)\n\n # sampling procedure:\n values = torch.zeros((setup.batch_size_data * n_samples, frequency_last_n)).to(setup.device)\n values *= torch.tensor(float('nan')) # set target sample_points to nan\n for nth_timestep in range(frequency_last_n):\n\n mask_nan = ~torch.isnan(p_target[:, nth_timestep, 0])\n if any(mask_nan): # skip if the complete mini-batch is invalid\n sub_choices = torch.multinomial(p_target[mask_nan, nth_timestep, :], num_samples=1)\n t_sub = t_target[mask_nan, nth_timestep, :].gather(1, sub_choices)\n m_sub = m_target[mask_nan, nth_timestep, :].gather(1, sub_choices)\n b_sub = b_target[mask_nan, nth_timestep, :].gather(1, sub_choices)\n\n ids = np.ones(b_sub.shape, dtype=bool)\n values_unbound = _sample_asymmetric_laplacians(ids, m_sub, b_sub, t_sub)\n values[mask_nan, nth_timestep] = _handle_negative_values(\n setup.cfg,\n values_unbound,\n sample_values=lambda ids: _sample_asymmetric_laplacians(ids, m_sub, b_sub, t_sub),\n scaler=scaler,\n nth_target=nth_target)\n\n # add the values to the sample_points:\n values = values.permute(1, 0).reshape(frequency_last_n, -1, n_samples).permute(1, 0, 2)\n values = values.detach().cpu()\n sample_points.append(values)\n\n # add sample_points to dictionary of samples:\n freq_key = f'y_hat{freq_suffix}'\n samples.update({freq_key: torch.stack(sample_points, 2)})\n return samples",
"def preprocess_single_chords_list(self, window_size=5, flattened_window=True, hop_length=4410, to_skip=5, norm_to_C=False, spectrogram_generator=log_mel_spectrogram, skip_coef=1) -> tuple:\n prep_data = []\n prep_targets = []\n k = 0\n # Iterate over all audio files\n for audio, chords, desc in zip(self.DATA, self.CHORDS, self.DESC):\n print(k)\n k = k+1\n # Get log mel spectrogram\n spectrogram = IsophonicsDataset.preprocess_audio(waveform=audio.WAVEFORM, sample_rate=audio.SAMPLE_RATE, spectrogram_generator=spectrogram_generator, nfft=self.NFFT, hop_length=hop_length, norm_to_C=norm_to_C, key=desc.TONIC)\n spectrogram = np.array(spectrogram)\n spec_length, num_samples = spectrogram.shape\n\n # Collect data for each spectrogram sample\n j = 0 # labels index\n for i in [index for index in range(num_samples) if index%to_skip==0]:\n # Get data window with zero margin\n n_pre_zeros, window_indices, n_post_zeros = IsophonicsDataset.get_flatten_indices(i, num_samples, skip_coef, window_size)\n if flattened_window:\n prep_data.append(\n np.concatenate((\n np.zeros((n_pre_zeros, spec_length)),\n np.array(spectrogram[:, window_indices]).swapaxes(0,1),\n np.zeros((n_post_zeros, spec_length))\n ), axis = 0).flatten()\n )\n else:\n prep_data.append(\n np.concatenate((\n np.zeros((n_pre_zeros, spec_length)),\n np.array(spectrogram[:, window_indices]).swapaxes(0,1),\n np.zeros((n_post_zeros, spec_length))\n ), axis = 0)\n )\n\n\n # Get label\n second = float(i)/(float(self.SAMPLE_RATE) / float(hop_length))\n while j < len(chords.START) and second > chords.START[j] :\n j = j + 1\n if j == len(chords.START):\n prep_targets.append(Dataset.get_integered_chord(\"N\", norm_to_C, desc.TONIC))\n else:\n prep_targets.append(Dataset.get_integered_chord(chords.CHORD[j], norm_to_C, desc.TONIC))\n\n print(\"[INFO] The Billboard Dataset was successfully preprocessed.\")\n return np.array(prep_data), np.array(prep_targets)",
"def sample_run(df, n_epochs = 10, window_size = 500, com = 12, p_anoms = .5):\n import numpy as np\n\n # create arrays that will hold the results of batch AD (y_true) and online AD (y_pred)\n y_true = []\n y_pred = []\n run_times = []\n \n # check which unique machines, sensors, and timestamps we have in the dataset\n machineIDs = df['machineID'].unique()\n sensors = df.columns[2:]\n timestamps = df['datetime'].unique()[window_size:]\n \n # sample n_machines_test random machines and sensors \n random_machines = np.random.choice(machineIDs, n_epochs)\n random_sensors = np.random.choice(sensors, n_epochs)\n\n # we intialize an array with that will later hold a sample of timetamps\n random_timestamps = np.random.choice(timestamps, n_epochs)\n \n for i in range(0, n_epochs):\n # take a slice of the dataframe that only contains the measures of one random machine\n df_s = df[df['machineID'] == random_machines[i]]\n \n # smooth the values of one random sensor, using our run_avg function\n smooth_values = run_avg(df_s[random_sensors[i]].values, com)\n \n # create a data frame with two columns: timestamp, and smoothed values\n df_smooth = pd.DataFrame(data={'timestamp': df_s['datetime'].values, 'value': smooth_values})\n\n # load the results of batch AD for this machine and sensor\n anoms_s = anoms_batch[((anoms_batch['machineID'] == random_machines[i]) & (anoms_batch['errorID'] == random_sensors[i]))]\n \n # find the location of the t'th random timestamp in the data frame\n if np.random.random() < p_anoms:\n anoms_timestamps = anoms_s['datetime'].values\n np.random.shuffle(anoms_timestamps)\n counter = 0\n while anoms_timestamps[0] < timestamps[0]:\n if counter > 100:\n return 0.0, 9999.0\n np.random.shuffle(anoms_timestamps)\n counter += 1\n random_timestamps[i] = anoms_timestamps[0]\n \n # select the test case\n test_case = df_smooth[df_smooth['timestamp'] == random_timestamps[i]]\n test_case_index = test_case.index.values[0]\n\n\n # check whether the batch AD found an anomaly at that time stamps and copy into y_true at idx\n y_true_i = random_timestamps[i] in anoms_s['datetime'].values\n\n # perform online AD, and write result to y_pred\n y_pred_i, run_times_i = detect_ts_online(df_smooth, window_size, test_case_index)\n \n y_true.append(y_true_i)\n y_pred.append(y_pred_i)\n run_times.append(run_times_i)\n \n return fbeta_score(y_true, y_pred, beta=2), np.mean(run_times)",
"def reduce_sample_size(data,classes,times=2):\n data=data[range(0,data.shape[0],times)]\n classes=classes[range(0,classes.shape[0],times)]\n return data,classes",
"def calculate_mixture_features(data_type):\n workspace = config.workspace\n data_dir = config.data_dir\n speech_dir = os.path.join(data_dir,'{}_speech'.format(data_type))\n noise_dir = os.path.join(data_dir,'{}_noise'.format(data_type)) \n \n fs = config.sample_rate\n \n if data_type == 'train':\n snr = config.Tr_SNR\n elif data_type == 'test':\n snr = config.Te_SNR \n else:\n raise Exception(\"data_type must be train | test!\")\n \n \n # Open mixture csv. \n mixture_csv_path = os.path.join(workspace, \"mixture_csvs\", \"%s.csv\" % data_type)\n with open(mixture_csv_path, 'r') as f:\n reader = csv.reader(f, delimiter='\\t')\n lis = list(reader)\n \n t1 = time.time()\n cnt = 0\n for i1 in range(1, len(lis)):\n [speech_na, noise_na, noise_onset, noise_offset] = lis[i1]\n noise_onset = int(noise_onset)\n noise_offset = int(noise_offset)\n \n # Read speech audio. \n speech_path = os.path.join(speech_dir, speech_na)\n (speech_audio, _) = read_audio(speech_path, target_fs=fs)\n \n # Read noise audio. \n noise_path = os.path.join(noise_dir, noise_na)\n (noise_audio, _) = read_audio(noise_path, target_fs=fs)\n \n # Repeat noise to the same length as speech. \n if len(noise_audio) < len(speech_audio):\n n_repeat = int(np.ceil(float(len(speech_audio)) / float(len(noise_audio))))\n noise_audio_ex = np.tile(noise_audio, n_repeat)\n noise_audio = noise_audio_ex[0 : len(speech_audio)]\n # Truncate noise to the same length as speech. \n else:\n noise_audio = noise_audio[noise_onset : noise_offset]\n \n # Scale speech to given snr. \n scaler = get_amplitude_scaling_factor(speech_audio, noise_audio, snr=snr)\n speech_audio *= scaler\n \n # Get normalized mixture, speech, noise. \n (mixed_audio, speech_audio, noise_audio, alpha) = additive_mixing(speech_audio, noise_audio)\n\n # Write out mixed audio. \n out_bare_na = os.path.join(\"%s.%s\" % \n (os.path.splitext(speech_na)[0], os.path.splitext(noise_na)[0]))\n out_audio_path = os.path.join(workspace, \"mixed_audios\", \"spectrogram\", \n data_type, \"%ddb\" % int(snr), \"%s.wav\" % out_bare_na)\n create_folder(os.path.dirname(out_audio_path))\n write_audio(out_audio_path, mixed_audio, fs)\n\n # Extract spectrogram. \n mixed_complx_x = calc_sp(mixed_audio, mode='complex')\n speech_x = calc_sp(speech_audio, mode='magnitude')\n noise_x = calc_sp(noise_audio, mode='magnitude')\n\n # Write out features. \n out_feat_path = os.path.join(workspace, \"features\", \"spectrogram\", \n data_type, \"%ddb\" % int(snr), \"%s.p\" % out_bare_na)\n create_folder(os.path.dirname(out_feat_path))\n data = [mixed_complx_x, speech_x, noise_x, alpha, out_bare_na]\n pickle.dump(data, open(out_feat_path, 'wb'), protocol=pickle.HIGHEST_PROTOCOL)\n \n # Print. \n if cnt % 100 == 0:\n print(cnt)\n \n cnt += 1\n\n print(\"Extracting feature time: %s\" % (time.time() - t1))",
"def make_test_data(self):\r\n\r\n \r\n\r\n print (\"Creating Test Sample:\")\r\n\r\n print (' Period, rate, reps, phases: ', self.period, self.framerate, self.nrepetitions, self.nPhases)\r\n\r\n nframes = int(self.period * self.framerate * self.nrepetitions)\r\n\r\n print (' nframes: ', nframes)\r\n\r\n if self.bkgdNoise > 0.:\r\n\r\n d = np.random.normal(size=(nframes,self.imageSize[0],self.imageSize[1]),\r\n\r\n loc=self.bkgdIntensity, scale=self.bkgdNoise).astype('float32')\r\n\r\n else:\r\n\r\n d = self.bkgdIntensity*np.ones((nframes,self.imageSize[0],self.imageSize[1])).astype('float32')\r\n\r\n \r\n\r\n ds = d.shape\r\n\r\n print (' data shape: ', ds)\r\n\r\n dx = int(ds[2]/4)\r\n\r\n xc = int(ds[2]/2)\r\n\r\n xo = [xc-dx, xc+dx]\r\n\r\n ywidth = int(ds[2]/(self.nPhases+2))\r\n\r\n framedelay = 4\r\n\r\n\r\n\r\n if not self.mode:\r\n\r\n self.phasex = []\r\n\r\n self.phasey = []\r\n\r\n for i in range(0,self.nPhases):\r\n\r\n dy = int((i+1)*ds[2]/(self.nPhases+2)) # each phase is assigned to a region\r\n\r\n self.resp = np.zeros((nframes,))\r\n\r\n self.resp = np.cos(\r\n\r\n np.linspace(0, 2.0*np.pi*nframes/(self.period*self.framerate), nframes-framedelay)+i*np.pi/8 - np.pi/2.0)\r\n\r\n self.resp = np.concatenate((np.zeros(framedelay), self.resp))\r\n\r\n d[:, xo[0]:xo[1], dy:dy+ywidth ] += self.resp[:, np.newaxis, np.newaxis]\r\n\r\n self.phasey.append( (2+(dy+int(ds[2]/self.nPhases))/2))\r\n\r\n self.phasex.append((6+int(ds[1]/2)/2)) # make the signal equivalent of digitized one (baseline 3000, signal at 1e-4 of baseline)\r\n\r\n else:\r\n\r\n self.nPhases = 4\r\n\r\n self.spotsize = 16\r\n\r\n nrpts = 20\r\n\r\n nsites = 4\r\n\r\n one_rep = int(self.period*self.framerate)\r\n\r\n isi = int(self.period*self.framerate/self.nPhases)\r\n\r\n print('period, isi: ', self.period, isi)\r\n\r\n r = np.arange(0, nrpts, 1.)\r\n\r\n alpha = 4.\r\n\r\n A = r/alpha *np.exp(-(r-alpha)/alpha) # scaled alpha function\r\n\r\n self.spot= self.gauss_spot(self.spotsize, 3.) # the 2d spot\r\n\r\n sigsize = np.random.normal(size=self.nPhases, loc=self.signal_size, scale=self.signal_size*2)\r\n\r\n sigsize = [np.abs(s) for s in sigsize] # restrict to positive amplitudes\r\n\r\n print ('sigsize: ', sigsize)\r\n\r\n for j in range(self.nrepetitions):\r\n\r\n for i in range(self.nPhases):\r\n\r\n self.resp = np.zeros((nrpts, self.spot.shape[0], self.spot.shape[1]))\r\n\r\n for k in range(nrpts):\r\n\r\n self.resp[k,:,:] += sigsize[i]*A[k] * self.spot # make response an alpha time course of gaussian spot\r\n\r\n start = j*one_rep + i*isi + framedelay\r\n\r\n stop = start + nrpts\r\n\r\n dy = int((i+1)*ds[2]/(self.nPhases+2)) # location for phase\r\n\r\n #dy = dy + 2*z\r\n\r\n# print ('start, stop: ', start, stop)\r\n\r\n for z in range(nsites):\r\n\r\n #self.resp = np.concatenate((np.zeros(framedelay), self.resp))\r\n\r\n xp = xo[0] + i*10 - 10*z\r\n\r\n yp = dy - i*10 + 10*z\r\n\r\n d[start:stop, xp:xp+self.spotsize, yp:yp+self.spotsize ] += self.resp\r\n\r\n self.imageData = d # reduce to a 16-bit map to match camera data type\r\n\r\n self.nFrames = self.imageData.shape[0]\r\n\r\n self.times = np.arange(0, nframes/self.framerate, 1.0/self.framerate)\r\n\r\n print( \" Test Image Created\")\r\n\r\n # imv = pg.ImageView()\r\n\r\n # imv.show()\r\n\r\n # imv.setImage(self.imageData)\r\n\r\n\r\n\r\n if self.layout is not None:\r\n\r\n self.layout.addWidget(imv, 0, 0)\r\n\r\n\r\n\r\n avgImage = np.mean(self.imageData, axis=0)\r\n\r\n ima = pg.ImageView()\r\n\r\n ima.setImage(avgImage)\r\n\r\n self.layout.addWidget(ima, 0, 1)\r\n\r\n self.adjust_image_data()\r\n\r\n self.avgimg = np.mean(self.imageData, axis=0) # get mean image for reference later: average across all time\r\n\r\n print (' Test file, original Image Info: ')\r\n\r\n self.print_image_info()\r\n\r\n self.rebin_image()\r\n\r\n #self.clean_windowerrors()\r\n\r\n # pg.image(self.imageData)\r\n\r\n # pg.show()\r\n\r\n # mpl.figure(1)\r\n\r\n # mpl.show()\r\n\r\n if not self.mode: # FFT analysis\r\n\r\n self.analysis_fourier_map(target=1, mode=0)\r\n\r\n self.plot_maps(mode=2, gfilter=self.gfilter)\r\n\r\n else:\r\n\r\n self.analysis_dFF_map()\r\n\r\n mpl.show()",
"def collect_samples(self):\n # TODO refactor this to not to duplicate collect from DDPG\n # - not so easy due to logger :(\n collected = 0\n while collected < self.acm_pre_train_samples:\n obs = self.env.reset()\n end = False\n obs = torch.tensor(obs, dtype=torch.float32, device=self.device)\n obs = obs.unsqueeze(0)\n\n prev_idx = self.replay_buffer.add_obs(obs)\n ep_len = 0\n\n while not end:\n acm_action = AcMTrainer.initial_act(self, obs)\n self.replay_buffer.add_acm_action(acm_action)\n obs, rew, done, _ = self.env.step(acm_action)\n ep_len += 1\n\n obs = torch.tensor(obs, dtype=torch.float32, device=self.device)\n obs = obs.unsqueeze(0)\n\n end = done\n done = False if ep_len == self.max_ep_len else done\n\n next_idx = self.replay_buffer.add_obs(obs)\n self.replay_buffer.add_timestep(prev_idx, next_idx, obs, rew, done, end)\n prev_idx = next_idx\n collected += 1",
"def pack_features(data_type):\n workspace = config.workspace\n\n if data_type == 'train':\n snr = config.Tr_SNR\n elif data_type == 'test':\n snr = config.Te_SNR \n else:\n raise Exception(\"data_type must be train | test!\")\n \n n_concat = config.n_concat\n n_hop = config.n_hop\n \n x_all = [] # (n_segs, n_concat, n_freq)\n y_all = [] # (n_segs, n_freq)\n \n cnt = 0\n t1 = time.time()\n \n # Load all features. \n feat_dir = os.path.join(workspace, \"features\", \"spectrogram\", data_type, \"%ddb\" % int(snr))\n names = os.listdir(feat_dir)\n for na in names:\n # Load feature. \n feat_path = os.path.join(feat_dir, na)\n data = pickle.load(open(feat_path, 'rb'))\n [mixed_complx_x, speech_x, noise_x, alpha, na] = data\n mixed_x = np.abs(mixed_complx_x)\n\n # Pad start and finish of the spectrogram with boarder values. \n n_pad = int((n_concat - 1) / 2)\n mixed_x = pad_with_border(mixed_x, n_pad)\n speech_x = pad_with_border(speech_x, n_pad)\n \n # Cut input spectrogram to 3D segments with n_concat. \n mixed_x_3d = mat_2d_to_3d(mixed_x, agg_num=n_concat, hop=n_hop)\n x_all.append(mixed_x_3d)\n \n # Cut target spectrogram and take the center frame of each 3D segment. \n speech_x_3d = mat_2d_to_3d(speech_x, agg_num=n_concat, hop=n_hop)\n y = speech_x_3d[:, int((n_concat-1)/2), :]\n y_all.append(y)\n \n # Print. \n if cnt % 100 == 0:\n print(cnt)\n \n # if cnt == 3: break\n cnt += 1\n \n x_all = np.concatenate(x_all, axis=0) # (n_segs, n_concat, n_freq)\n y_all = np.concatenate(y_all, axis=0) # (n_segs, n_freq)\n \n x_all = log_sp(x_all).astype(np.float32)\n y_all = log_sp(y_all).astype(np.float32)\n \n # Write out data to .h5 file. \n out_path = os.path.join(workspace, \"packed_features\", \"spectrogram\", data_type, \"%ddb\" % int(snr), \"data.h5\")\n create_folder(os.path.dirname(out_path))\n with h5py.File(out_path, 'w') as hf:\n hf.create_dataset('x', data=x_all)\n hf.create_dataset('y', data=y_all)\n \n print(\"Write out to %s\" % out_path)\n print(\"Pack features finished! %s s\" % (time.time() - t1,))",
"def turn_to_intermediate_data(data, data_point_size, batch_size, num_steps, hop_step):\n samples = 0 # Number of samples of interpolating\n \n #counters = [Counter() for _ in xrange(num_labels)]\n\n sample_counter = 0\n for session_data in data:\n \n # This should be the correct number of sample for each session\n # But it could be different with the number of events in the session\n # There is some difference in the way events in session is created\n # For example, when create and annotate a session having frame from 0 to 79\n # I actually create events [0,20] to [60,80] so the right hand side brace should be \n # [0,20) -> Excluding last frame \n correct_no_samples = ( len(session_data[SESSION_DATA]) - num_steps ) // hop_step + 1\n# print ('session name = %s' % session_data[SESSION_NAME])\n# print ('len %d ' % len(session_data[SESSION_DATA]))\n# print ('correct %d ' % correct_no_samples)\n \n if correct_no_samples != len(session_data[SESSION_EVENTS]):\n # A step to find session that has problem to fix\n print (session_data[SESSION_NAME])\n print (\"correct_no_samples \" + str(correct_no_samples))\n print (\"session_data_events \" + str(len(session_data[SESSION_EVENTS])))\n \n print (\"=========================PROBLEMATIC========================\")\n else:\n samples += len(session_data[SESSION_EVENTS])\n \n print('Total number of samples' + str(samples))\n \n interpolated_data = np.zeros([samples * num_steps, data_point_size], dtype=np.float32)\n interpolated_lbls = np.zeros([samples, num_labels], dtype=np.int32)\n # Use a string of maximum 16 characters to store some info about a data sample \n interpolated_info = np.zeros([samples], dtype='|S16')\n \n for session_data in data:\n session_data_vals = session_data[SESSION_DATA]\n session_data_events = session_data[SESSION_EVENTS]\n \n correct_no_samples = ( len(session_data_vals) - num_steps ) // hop_step + 1\n if correct_no_samples == len(session_data_events):\n for i in range(len(session_data_events)):\n for j in range(num_steps):\n interpolated_data[( ( sample_counter + i ) * num_steps + j)] =\\\n session_data_vals[i * hop_step + j]\n\n event_labels = session_data[SESSION_EVENTS][i]['label']\n \n interpolated_lbls[sample_counter + i] = list(event_labels)\n\n interpolated_info[sample_counter + i] = session_data[SESSION_NAME] + '_' + str(i)\n \n sample_counter += len(session_data_events)\n \n # Number of epoch, each epoch has a batch_size of data \n epoch_size = samples // batch_size\n \n # Divide the first dimension from samples * num_steps -> (samples, num_steps)\n rearranged_data = interpolated_data.reshape((samples, num_steps, data_point_size))\n # Divide first dimenstion from samples -> epoch_size * batch_size (remove remaining)\n rearranged_data = rearranged_data[:epoch_size * batch_size].\\\n reshape((epoch_size, batch_size, num_steps, data_point_size))\n \n rearranged_lbls = interpolated_lbls[:epoch_size * batch_size].\\\n reshape((epoch_size, batch_size, num_labels))\n\n rearranged_info = interpolated_info[:epoch_size * batch_size].\\\n reshape((epoch_size, batch_size))\n \n return (rearranged_data, rearranged_lbls, rearranged_info)",
"def generate_samples(self, n_samples):",
"def generate_samples(self, n_samples):",
"def get_preprocessed_dataset(self, hop_length=512, norm_to_C=False, spectrogram_generator=log_mel_spectrogram, n_frames=500, separately = True) -> tuple:\n FEATURESs = []\n CHORDs = self.CHORDS\n TIME_BINSs = []\n KEYs = []\n k = 0\n separate_data, separate_targets = [], []\n for data, desc, chords in zip(self.DATA, self.DESC, self.CHORDS):\n print(k)\n if separately:\n if isinstance(data, BillboardFeatures):\n features, time_bins = data.CHROMA, data.TIME_BINS\n elif isinstance(data, Audio):\n features = IsophonicsDataset.preprocess_audio(waveform=data.WAVEFORM, sample_rate=data.SAMPLE_RATE, spectrogram_generator=spectrogram_generator, nfft=self.NFFT, hop_length=hop_length, norm_to_C=norm_to_C, key=desc.TONIC).swapaxes(0,1)\n num_samples, _ = features.shape\n time_bins = [float(i)/(float(self.SAMPLE_RATE) / float(hop_length)) for i in range(num_samples)]\n prep_data, prep_targets = Dataset.songs_to_sequences(FEATURESs=[features], CHORDs=[chords], TIME_BINSs=[time_bins], KEYs=[desc.TONIC], n_frames=n_frames, norm_to_C=norm_to_C)\n separate_data.append(prep_data)\n separate_targets.append(prep_targets)\n else:\n if isinstance(data, BillboardFeatures):\n FEATURESs.append(data.CHROMA)\n TIME_BINSs.append(data.TIME_BINS)\n elif isinstance(data, Audio):\n FEATURESs.append((IsophonicsDataset.preprocess_audio(waveform=data.WAVEFORM, sample_rate=data.SAMPLE_RATE, spectrogram_generator=spectrogram_generator, nfft=self.NFFT, hop_length=hop_length, norm_to_C=norm_to_C, key=desc.TONIC).swapaxes(0,1)))\n num_samples, _ = FEATURESs[-1].shape\n TIME_BINSs.append([float(i)/(float(self.SAMPLE_RATE) / float(hop_length)) for i in range(num_samples)])\n KEYs.append(desc.TONIC)\n k = k + 1\n if separately:\n return separate_data, separate_targets\n else:\n return Dataset.songs_to_sequences(FEATURESs=FEATURESs, CHORDs=CHORDs, TIME_BINSs=TIME_BINSs, KEYs=KEYs, n_frames=n_frames, norm_to_C=norm_to_C)",
"def __iter__(self):\n\n batch_sp = []\n batch_noise = []\n batch_mix = []\n batch_count = 0\n\n while True:\n\n # Randomizing wav lists\n random.shuffle(self._lst_spk_files)\n random.shuffle(self._lst_noise_files)\n\n for spk_file, noise_file in zip(self._lst_spk_files, self._lst_noise_files):\n\n # Read wav files\n sig_spk, rate = self.__read_wav_file(spk_file)\n sig_noise, _ = self.__read_wav_file(noise_file)\n\n # Skip silence file\n if np.mean(sig_spk ** 2) < self.energy_silence_threshold or \\\n np.mean(sig_noise ** 2) < self.energy_silence_threshold:\n continue\n\n # Apply reverberations\n if self._enable_rir:\n rev_prob = np.random.uniform(0, 1) < self._rir_prob\n if rev_prob:\n filter_num = random.randint(0, self._rir_filters_num - 1)\n\n filter_sp_name = self.RIR_PREF + str(filter_num) + self.RIR_SP_SUF\n filter_n_name = self.RIR_PREF + str(filter_num) + self.RIR_N_SUF\n\n sig_spk = reverb_util.reverb_matlab(sig_spk, rate, os.path.join(self._rir_dir, filter_sp_name))\n sig_noise = reverb_util.reverb_matlab(sig_noise, rate, os.path.join(self._rir_dir, filter_n_name))\n\n # Align signal\n min_length = min(sig_spk.shape[0], sig_noise.shape[0])\n spk_length = sig_spk.shape[0]\n noise_length = sig_noise.shape[0]\n\n if min_length < self._fftsize:\n raise Exception(\"ERROR: Too short signals in dataset\")\n\n if spk_length > min_length:\n start_ind = random.randint(0, spk_length - min_length)\n sig_spk = sig_spk[start_ind:start_ind + min_length]\n elif noise_length > min_length:\n start_ind = random.randint(0, noise_length - min_length)\n sig_noise = sig_noise[start_ind:start_ind + min_length]\n\n # Generate need SNR\n need_snr = random.uniform(self._min_snr, self._max_snr)\n\n # Calc scaled signals\n sig_spk, sig_noise = self.__mix_with_snr(sig_spk, sig_noise, need_snr)\n\n # Normalization\n norm_const = np.max([np.max(np.abs(sig_spk)), np.max(np.abs(sig_noise))])\n sig_spk /= norm_const\n sig_noise /= norm_const\n\n # Calc STFT\n stft_spk = stft(sig_spk, fftsize=self._fftsize, overlap=self._overlap)\n stft_noise = stft(sig_noise, fftsize=self._fftsize, overlap=self._overlap)\n stft_mix = stft_spk + stft_noise\n\n # Skip small segments\n frames, bin = stft_mix.shape\n if frames <= self._context_size:\n continue\n\n # Collect batch\n i = 0\n while i + self._context_size < frames:\n\n batch_sp.append(stft_spk[i:i + self._context_size, :])\n batch_noise.append(stft_noise[i:i + self._context_size, :])\n batch_mix.append(stft_mix[i:i + self._context_size, :])\n\n i += self._context_size // 2\n batch_count += 1\n\n if batch_count == self._batch_size:\n sp = np.array(batch_sp).reshape((self._batch_size,\n self._context_size, -1))\n noise = np.array(batch_noise).reshape((self._batch_size,\n self._context_size, -1))\n mix = np.array(batch_mix).reshape((self._batch_size,\n self._context_size, -1))\n yield sp, noise, mix\n\n batch_sp = []\n batch_noise = []\n batch_mix = []\n batch_count = 0",
"def three_experiments_with_trials(family_with_trials, single_with_trials):",
"def calc_granger_actual(self):\n if not hasattr(self, 'input_data'):\n self.preprocess_and_check_stationarity()\n # input_data shape = (n_timepoints, n_trials, n_channels)\n # Calculate as many bootstrapped samples as n_shuffles\n trial_inds = np.random.randint(\n 0, self.input_data.shape[1],\n (self.n_shuffles, self.input_data.shape[1]))\n temp_dat = [self.input_data[:, trial_inds[i]]\n for i in trange(self.n_shuffles)]\n outs_temp = parallelize(self.calc_granger, temp_dat, n_jobs=30)\n time_vec = outs_temp[0][1].time\n freq_vec = outs_temp[0][1].frequencies\n outs_temp = [x[0] for x in outs_temp]\n self.granger_actual = np.array(outs_temp)\n self.time_vec = time_vec\n self.freq_vec = freq_vec\n #self.granger_actual, self.c_actual = \\\n # self.calc_granger(self.input_data)",
"def back_test(self, turnover_frequency):"
] | [
"0.57132494",
"0.5595162",
"0.5551956",
"0.5525474",
"0.5525352",
"0.55149806",
"0.5493697",
"0.54556054",
"0.5432164",
"0.5368782",
"0.53298104",
"0.53258",
"0.53194267",
"0.53181934",
"0.5303471",
"0.5290147",
"0.52515495",
"0.5232192",
"0.52276266",
"0.52183676",
"0.52091897",
"0.5202513",
"0.5170894",
"0.51703525",
"0.51703525",
"0.5165582",
"0.516265",
"0.5158803",
"0.5158464",
"0.51353294"
] | 0.68344384 | 0 |
Calculate bootstrapped actual granger causality to allow for estimation of error | def calc_granger_actual(self):
if not hasattr(self, 'input_data'):
self.preprocess_and_check_stationarity()
# input_data shape = (n_timepoints, n_trials, n_channels)
# Calculate as many bootstrapped samples as n_shuffles
trial_inds = np.random.randint(
0, self.input_data.shape[1],
(self.n_shuffles, self.input_data.shape[1]))
temp_dat = [self.input_data[:, trial_inds[i]]
for i in trange(self.n_shuffles)]
outs_temp = parallelize(self.calc_granger, temp_dat, n_jobs=30)
time_vec = outs_temp[0][1].time
freq_vec = outs_temp[0][1].frequencies
outs_temp = [x[0] for x in outs_temp]
self.granger_actual = np.array(outs_temp)
self.time_vec = time_vec
self.freq_vec = freq_vec
#self.granger_actual, self.c_actual = \
# self.calc_granger(self.input_data) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def binom_bca_bootstrap_err(k, n, B=10000, CL=[0.025, 0.975], acceleration=True, return_full=False):\n theta_MLE = k/n\n k_i = bootstrap_sample_binomial(k, n, B)\n\n # Bootstrap estimates of the parameter\n theta_i = k_i / n\n theta0_star = np.sum(theta_i) / B\n print(f'theta_MLE = {theta_MLE}, theta0_star = {theta0_star}')\n\n # -------------------------------------------------\n # Original binomial sample created as a vector\n x = np.zeros(n) \n x[0:k] = 1 # 1 == success\n\n # We are interested in the mean\n def t_func(x):\n return np.sum(x) / n\n\n # Jackknife the sample\n mu,sigma,d = jackknife_1D(x, t_func)\n print(f'mu = {mu}, sigma = {sigma}')\n\n # Calculate acceleration\n if acceleration:\n a = bootstrap_acceleration(d)\n else:\n a = 0\n print(f'a = {a}')\n # -------------------------------------------------\n\n # Empirical CDF\n xs,ys = ecdf(theta_i)\n \n # Construct CDF and inverse CDF\n G_cdf = lambda x : interp1d(xs, ys, kind='nearest', fill_value='extrapolate')(x)\n G_invcdf = lambda y : interp1d(ys, xs, kind='nearest', fill_value='extrapolate')(y)\n\n # z0 = \\Phi^{-1} \\hat{G}( \\hat{\\theta} )\n z0 = norm.ppf( G_cdf( theta_MLE ) )\n print(f'z0 = {z0}')\n \n # BCA interval estimates\n interval = np.zeros(len(CL))\n for i in range(len(CL)):\n z_alpha = norm.ppf(CL[i])\n interval[i] = G_invcdf( norm.cdf(z0 + (z0 + z_alpha)/(1 - a*(z0 + z_alpha))) )\n\n if return_full == True:\n return interval,d,a,k_i\n else:\n return interval",
"def get_total_BMA_effect_size(self):\n \n if self.total_bma_es is None:\n # clean up these long expressions on Isle 2\n log_evidences = [self.results[kernel].summary(b=self.b)['evidence']['md'] \n for kernel in self.kernel_dict.keys()] + \\\n [self.results[kernel].summary(b=self.b)['evidence']['mc'] \n for kernel in self.kernel_dict.keys()]\n \n M = len(log_evidences)\n Z = logSumExp(log_evidences)\n evidences = np.exp(log_evidences - Z)\n disc_stats = [self.results[kernel].summary(b=self.b)['es_disc_stats'] \n for kernel in self.kernel_dict.keys()]\n nsamples = 50000\n samples = list() \n for i in range(int(M/2)):\n samples += list(np.random.normal(loc=disc_stats[i][0], \n scale=disc_stats[i][1], \n size=int(nsamples*evidences[i])))\n samples += list(np.zeros(nsamples - len(samples)))\n \n if np.sum(np.abs(samples))==0:\n xrange = np.linspace(-2, 2, 500)\n ix = np.argmin((xrange-self.b)**2)\n es_bma = np.zeros((500))\n es_bma[ix] = 1.0/ (xrange[1] - xrange[0])\n else: \n kde_fit = stats.gaussian_kde(samples, bw_method='silverman')\n xrange = np.linspace(np.min(samples), np.max(samples), 500)\n es_bma = kde_fit(xrange)\n self.total_bma_es = np.sum(xrange*es_bma) * (xrange[1]-xrange[0])\n self.total_bma_pdf = (xrange, es_bma)\n return self.total_bma_es",
"def binom_ratio_bca_bootstrap_err(k1, n1, k2, n2, B=1000, CL=[0.025, 0.975], acceleration=True, return_full=False):\n theta_MLE = (k1/n1) / (k2/n2)\n\n # --------------------------------------------------\n interval,d1,a1,k1_i = binom_bca_bootstrap_err(k1, n1, B, CL=CL, return_full=True)\n interval,d2,a2,k2_i = binom_bca_bootstrap_err(k2, n2, B, CL=CL, return_full=True)\n\n # Bootstrap estimates of the parameter\n theta_i = (k1_i/n1) / (k2_i/n2)\n\n nn = np.min([len(d1),len(d2)])\n \n # Calculate acceleration\n if acceleration:\n a = bootstrap_acceleration(d1[0:nn] / d2[0:nn])\n else:\n a = 0\n print(f'a = {a}')\n # --------------------------------------------------\n\n # Empirical CDF\n xs,ys = ecdf(theta_i)\n \n # Construct CDF and inverse CDF\n G_cdf = lambda x : interp1d(xs, ys, kind='nearest', fill_value='extrapolate')(x)\n G_invcdf = lambda y : interp1d(ys, xs, kind='nearest', fill_value='extrapolate')(y)\n\n # z0 = \\Phi^{-1} \\hat{G}( \\hat{\\theta} )\n z0 = norm.ppf( G_cdf( theta_MLE ) )\n print(f'z0 = {z0}')\n\n # BCA interval estimates\n interval = np.zeros(len(CL))\n for i in range(len(CL)):\n z_alpha = norm.ppf(CL[i])\n interval[i] = G_invcdf( norm.cdf(z0 + (z0 + z_alpha)/(1 - a*(z0 + z_alpha))) )\n\n if return_full == True:\n return interval, d1,a1,k1_i, d2,a2,k2_i\n else:\n return interval",
"def compute_random_baseline(self, classes):\n\n # based on the class distribution of the data\n sum_benefit = 0\n\n # c is the actual label\n # if the label in y is unseen when training, skip it, don't include it in the error\n for i, c in enumerate(self.y_chunk):\n for j, cprime in enumerate(classes):\n\n # (1) compute the benefit matrix\n benefit_c_cprime = 0\n if cprime == self.fraud_label:\n benefit_c_cprime = self.X_chunk[i][-1] - self.cost if c == self.fraud_label else -self.cost\n\n # (2) get the probability\n probab_ic = 1 / len(classes)\n sum_benefit += probab_ic * benefit_c_cprime\n\n return sum_benefit",
"def Bates_Granger_2(df_train, df_test, nu=None):\n\n # number of individual forecasts and number of periods\n K = df_test.shape[1]\n T = df_train.shape[0]\n\n # the default length of the relevant window is equal to sample length\n if nu is None:\n nu = T\n\n if nu > T:\n raise ValueError('Parameter nu must be <= length of training sample')\n\n # check whether there is enough observations, so sigma is invertible\n if nu < K:\n raise ValueError('Parameter nu must be >= no. of individual forecasts')\n\n # forecast errors\n errors = df_train.iloc[:, 1:].subtract(df_train.iloc[:, 0], axis=0)\n\n # initialize the covariance matrix sigma\n sigma = np.full((K, K), fill_value=0, dtype=float)\n\n # fill the covariance matrix sigma\n for i in range(K):\n\n for j in range(K):\n\n sigma[i, j] = np.dot(errors.iloc[errors.shape[0]-nu:, i],\n errors.iloc[errors.shape[0]-nu:, j]) / nu\n\n # combining weights\n nominator = np.linalg.solve(sigma, np.full(K, fill_value=1))\n denominator = np.dot(np.full(K, fill_value=1), nominator)\n comb_w = nominator / denominator\n\n # censoring the combining weights\n for i in range(K):\n if comb_w[i] < 0:\n comb_w[i] = 0\n if comb_w[i] > 1:\n comb_w[i] = 1\n\n # rescale the weights so that their sum equals 1\n comb_w = comb_w/comb_w.sum()\n\n # predictions\n df_pred = pd.DataFrame({\"Bates-Granger (2)\": df_test.dot(comb_w)})\n\n return df_pred",
"def Bates_Granger_5(df_train, df_test, W):\n\n # number of individual forecasts and number of periods\n K = df_test.shape[1]\n T = df_train.shape[0]\n\n # check whether there is enough observations, so sigma is invertible\n if K > T:\n raise ValueError('No. forecasts must be <= length of training sample')\n\n # forecast errors\n errors = df_train.iloc[:, 1:].subtract(df_train.iloc[:, 0], axis=0)\n\n # initialize the covariance matrix sigma\n sigma = np.full((K, K), fill_value=0, dtype=float)\n\n # exponential error weights\n error_w = np.full(T, fill_value=W, dtype=float)**(np.arange(T)+1)\n error_w_sum = error_w.sum()\n\n # fill the covariance matrix sigma\n for i in range(K):\n\n for j in range(K):\n\n # elements in sigma matrix are weighted with W\n sigma[i, j] = np.dot(error_w*errors.iloc[:, i],\n errors.iloc[:, j]) / error_w_sum\n\n # combining weights\n nominator = np.linalg.solve(sigma, np.full(K, fill_value=1))\n denominator = np.dot(np.full(K, fill_value=1), nominator)\n comb_w = nominator / denominator\n\n # censoring the combining weights\n for i in range(K):\n if comb_w[i] < 0:\n comb_w[i] = 0\n if comb_w[i] > 1:\n comb_w[i] = 1\n\n # rescale the weights so that their sum equals 1\n comb_w = comb_w/comb_w.sum()\n\n # predictions\n df_pred = pd.DataFrame({\"Bates-Granger (5)\": df_test.dot(comb_w)})\n\n return df_pred",
"def get_bootstrap_CI(self, alpha, num_samples):\n return None",
"def get_bootstrap_CI(self, alpha, num_samples):\n return None",
"def get_bootstrap_CI(self, alpha, num_samples):\n\n # set random number generator seed\n numpy.random.seed(1)\n\n # initialize delta array\n delta = numpy.zeros(num_samples)\n\n # obtain bootstrap samples\n for i in range(num_samples):\n sample_i = numpy.random.choice(self._data, size=self._n, replace=True)\n delta[i] = sample_i.mean() - self.get_mean()\n\n # return [l, u]\n return self.get_mean() - numpy.percentile(delta, [100*(1-alpha / 2.0), 100*alpha / 2.0])",
"def eg_bootmu():\n\n a = []\n b = []\n\n for _ in range(100):\n a.append(utils.gaussian(10, 1))\n\n print(\"\", \"mu\", \"sd\", \"cliffs\", \"boot\", \"both\", sep=\"\\t\")\n print(\"\", \"--\", \"--\", \"------\", \"----\", \"----\", sep=\"\\t\")\n\n for mu in range(100, 111):\n b = []\n\n for _ in range(100):\n b.append(utils.gaussian(mu / 10, 1))\n\n cl = utils.cliffsDelta(a, b)\n bs = stats.bootstrap(a, b)\n\n print(\"\", mu / 10, 1, cl, bs, cl and bs, sep=\"\\t\")",
"def choice_C(data, L_plus, delta_min, wdw_length, scale, start=1, stop=10,\n step=1, delay=0, L_minus=None, k=None, n=36000, n_series=500, \n epsilon=0.001, block_length=None, BB_method='MBB', confusion=False, \n verbose=True): \n assert BB_method in ['MBB', 'NBB', 'CBB', 'MABB'], \"Undefined block bootstrap procedure\"\n if BB_method == 'MBB': \n blocks = bb.MBB(data, block_length) \n elif BB_method == 'NBB':\n blocks = bb.NBB(data, block_length) \n elif BB_method == 'CBB':\n blocks = bb.CBB(data, block_length) \n \n if 'blocks' in locals():\n n_blocks = int(np.ceil(n_series/blocks.shape[1]))\n \n wdw_length = int(np.ceil(wdw_length)) #should be integer\n \n delay = int(delay)\n n = int(n)\n assert n > 0, \"n must be strictly positive\"\n if n % 3 == 2: #n should be multiple of 3\n n += 1\n if n % 3 == 1: \n n += 2\n \n if L_minus is None:\n L_minus = -L_plus\n if k is None:\n k = delta_min/2\n \n sign = 1\n n_test = int(n/5) #n testing instances\n n_train = n - n_test #n training instances\n \n n_C = int(np.ceil((stop-start)/step))\n MAPE = np.zeros((n_C)); MSE = np.zeros((n_C)); accuracy = np.zeros((n_C))\n count = 0\n C_values = np.arange(start, stop, step)\n for C in np.arange(start, stop, step):\n \n ### training\n input_train = np.zeros((n_train, wdw_length))\n size_train = np.zeros((n_train))\n form_train = np.zeros((n_train))\n rnd = halfnorm(scale=scale).rvs(size=n_train) + delta_min #size of shifts\n delay_rnd = 0\n for b in range(0, n_train-2, 3):\n \n shift = rnd[b]*sign\n if BB_method == 'MABB': \n series = bb.resample_MatchedBB(data, block_length, n=n_series)\n else:\n series = resample(blocks, replace=True, n_samples=n_blocks).flatten()[:n_series]\n \n #simulate a random delay\n if delay > 0 :\n delay_rnd = np.random.randint(delay) \n\n \n for rnd_form in range(3):\n \n boot = np.copy(series)\n \n if rnd_form == 0: \n boot[wdw_length:] = boot[wdw_length:] + shift\n form_train[b] = 0\n elif rnd_form == 1:\n power = np.random.uniform(1.5,2)\n boot = shift/(n_series) * (np.arange(0,n_series)**power) + boot\n form_train[b] = 1\n else:\n eta = np.random.uniform(np.pi/(wdw_length), 3*np.pi/wdw_length)\n boot = np.sin(eta*np.pi*np.arange(n_series))*shift*boot\n form_train[b] = 2\n \n size_train[b] = shift\n \n input_plus = boot[wdw_length:wdw_length*2]\n C_plus = np.zeros((n_series, 1))\n for i in range(wdw_length + delay_rnd, n_series): #start the monitoring after random delay \n C_plus[i] = max(0, C_plus[i-1] + boot[i] - k)\n if C_plus[i] > L_plus:\n input_plus = boot[i+1-wdw_length:i+1] \n break \n \n input_minus = boot[wdw_length:wdw_length*2]\n C_minus = np.zeros((n_series, 1)) \n for j in range(wdw_length + delay_rnd, n_series):\n C_minus[j] = min(0, C_minus[j-1] + boot[j] + k)\n if C_minus[j] < L_minus:\n input_minus = boot[j+1-wdw_length:j+1] \n break\n \n if i > j: #save first alert recorded\n input_train[b,:] = input_minus\n else:\n input_train[b,:] = input_plus\n \n b += 1\n sign = -sign\n \n ### train the models\n regressor = SVR(C=C, epsilon=epsilon)\n regressor.fit(input_train, size_train)\n clf = svm.SVC(C=C)\n clf.fit(input_train, form_train)\n \n ###testing \n input_test = np.zeros((n_test, wdw_length))\n label_test = np.zeros((n_test))\n form_test = np.zeros((n_test))\n rnd = halfnorm(scale=scale).rvs(size=n_test) + delta_min\n delay_rnd = 0\n for b in range(0, n_test-2, 3):\n \n shift = rnd[b]*sign\n if BB_method == 'MABB': \n series = bb.resample_MatchedBB(data, block_length, n=n_series)\n else:\n series = resample(blocks, replace=True, n_samples=n_blocks).flatten()[:n_series]\n \n #simulate a random delay\n if delay > 0 :\n delay_rnd = np.random.randint(delay) \n \n for rnd_form in range(3):\n \n boot = np.copy(series)\n \n if rnd_form == 0:\n boot[wdw_length:] = boot[wdw_length:] + shift\n form_test[b] = 0\n elif rnd_form == 1:\n power = np.random.uniform(1.5,2)\n boot = shift/(n_series) * (np.arange(0,n_series)**power) + boot\n form_test[b] = 1\n else:\n eta = np.random.uniform(np.pi/(wdw_length), 3*np.pi/wdw_length)\n boot = np.sin(eta*np.pi*np.arange(n_series))*shift*boot\n form_test[b] = 2\n label_test[b] = shift\n \n input_plus = boot[wdw_length:wdw_length*2]\n C_plus = np.zeros((n_series, 1))\n for i in range(wdw_length + delay_rnd, n_series):\n C_plus[i] = max(0, C_plus[i-1] + boot[i] - k)\n if C_plus[i] > L_plus:\n input_plus = boot[i+1-wdw_length:i+1] \n break \n \n input_minus = boot[wdw_length:wdw_length*2]\n C_minus = np.zeros((n_series, 1)) \n for j in range(wdw_length + delay_rnd, n_series):\n C_minus[j] = min(0, C_minus[j-1] + boot[j] + k)\n if C_minus[j] < L_minus:\n input_minus = boot[j+1-wdw_length:j+1]\n break\n \n if i > j: #first alert recorded\n input_test[b,:] = input_minus\n else:\n input_test[b,:] = input_plus\n \n b += 1 \n sign = -sign\n \n ### compute accuracy and other precision measures \n label_pred = regressor.predict(input_test)\n label_pred_clf = clf.predict(input_test)\n \n #regressor\n MAPE[count] = (1/len(label_pred)) * sum(np.abs((np.abs(label_test) - np.abs(label_pred))/np.abs(label_test)))*100\n MSE[count] = (1/len(label_pred)) * sum((label_test - label_pred)**2)\n #classifier\n accuracy[count] = sum(label_pred_clf == form_test)*100 / len(label_pred_clf)\n \n ### compute the confusion matrix \n if confusion : \n class_names = ['jump', 'drift', 'oscill.']\n titles_options = [(\"Confusion matrix, without normalization\", None),\n (\"Normalized confusion matrix\", 'true')]\n for title, normalize in titles_options:\n disp = plot_confusion_matrix(clf, input_test, form_test,\n display_labels=class_names,\n cmap=plt.cm.Blues,\n normalize=normalize)\n disp.ax_.set_title(title)\n print(title)\n print(disp.confusion_matrix)\n plt.show()\n \n count += 1\n \n min_MAPE = C_values[np.argmin(MAPE)]\n min_MSE = C_values[np.argmin(MSE)] \n max_accuracy = C_values[np.argmax(accuracy)]\n \n if verbose:\n print('C value that minimizes the MAPE:', min_MAPE)\n print('C value that minimizes the MSE:', min_MSE)\n print('C value that maximizes the accuracy:', max_accuracy)\n \n return min_MAPE, min_MSE, max_accuracy",
"def get_bootstrap_CI(self, alpha, num_samples):\n raise NotImplementedError(\"This is an abstract method and needs to be implemented in derived classes.\")",
"def eg_pre():\n\n print(\"\\teg3\")\n\n d = 1\n\n for _ in range(10):\n t1 = []\n t2 = []\n\n for _ in range(32):\n t1.append(utils.gaussian(10, 1))\n t2.append(utils.gaussian(d * 10, 1))\n\n print(\"\", \"\", d, d < 1.1, stats.bootstrap(\n t1, t2), stats.bootstrap(t1, t1), sep=\"\\t\")\n\n d = round(d + .05, 2)",
"def _compute_bn(self, lvl):\n bn = [0] # number of samples crossing the left/right boundary\n for n in range(lvl):\n # 1. down-sampling of N samples by the factor scl gives (N-1)//scl + 1 samples\n # 2. bn[-1]+M-1 is the number of samples acrossing the left/right boundary, with M being the number of freqeuncies\n # => hence after the downsampling the number of boundary crossing samples is:\n bn.append((bn[-1]+self.nfreq-2)//self.scaling+1)\n bn.append(bn[-1]) # repeat the value of the coarsest scale for the approximation coefficient\n return bn[1:][::-1]",
"def bootstrap_errors(self, arr, k=100):\n val = np.zeros(k)\n\n for i in range(k):\n rand_selection = np.random.choice(arr, len(arr))\n val[i] = self.varience(rand_selection)\n sigma = np.sqrt(np.average(np.square(val))\n - np.square(np.average(val)))\n return sigma",
"def classical_value(self) -> float:\n (\n num_alice_outputs,\n num_bob_outputs,\n num_alice_inputs,\n num_bob_inputs,\n ) = self.pred_mat.shape\n\n for x_alice_in in range(num_alice_inputs):\n for y_bob_in in range(num_bob_inputs):\n self.pred_mat[:, :, x_alice_in, y_bob_in] = (\n self.prob_mat[x_alice_in, y_bob_in] * self.pred_mat[:, :, x_alice_in, y_bob_in]\n )\n p_win = float(\"-inf\")\n if num_alice_outputs**num_alice_inputs < num_bob_outputs**num_bob_inputs:\n self.pred_mat = np.transpose(self.pred_mat, (1, 0, 3, 2))\n (\n num_alice_outputs,\n num_bob_outputs,\n num_alice_inputs,\n num_bob_inputs,\n ) = self.pred_mat.shape\n self.pred_mat = np.transpose(self.pred_mat, (0, 2, 1, 3))\n\n # Paralleize for loop.\n # if num_bob_outputs ** num_bob_inputs <= 10 ** 6:\n # parallel_threads = 1\n # else:\n # parallel_threads = 5\n\n for i in range(num_alice_outputs**num_bob_inputs):\n # Convert :code:`number` to the base :code:`base` with digits :code:`digits`.\n number = i\n base = num_bob_outputs\n digits = num_bob_inputs\n b_ind = np.zeros(digits)\n for j in range(digits):\n b_ind[digits - j - 1] = np.mod(number, base)\n number = np.floor(number / base)\n pred_alice = np.zeros((num_alice_outputs, num_alice_inputs))\n\n for y_bob_in in range(num_bob_inputs):\n pred_alice = pred_alice + self.pred_mat[:, :, int(b_ind[y_bob_in]), y_bob_in]\n tgval = np.sum(np.amax(pred_alice, axis=0))\n p_win = max(p_win, tgval)\n return p_win",
"def get_KL_divergence(self):\n KL_loss = 0\n if(self.Bayesian):\n for i in range(self.num_layers):\n KL_loss += getattr(self, 'LSTMCell%i'%(i+1)).get_KL_divergence()\n \n return KL_loss",
"def Bates_Granger_3(df_train, df_test, alpha, nu=None):\n\n # number of individual forecasts and number of periods\n K = df_test.shape[1]\n T = df_train.shape[0]\n\n # the default length of the relevant window is equal to sample length\n if nu is None:\n nu = T\n\n if nu > T:\n raise ValueError('Parameter nu must be <= length of training sample')\n\n if nu < K:\n raise ValueError('Parameter nu must be >= no. of individual forecasts')\n\n # matrix of combination weights (t = 1,...,T, T+1), T+1 is for the final c.\n mat_comb_w = np.full((T+1, K), fill_value=0, dtype=float)\n\n # initialize with equal weights\n mat_comb_w[:nu, :] = np.full(K, fill_value=1 / K, dtype=float)\n\n # roll over the training period and calculate the combining weights\n for i in range(nu, T+1):\n\n # compute the weights using Bates-Granger method 1\n # forecast errors\n errors = df_train.iloc[:i, 1:].subtract(df_train.iloc[:i, 0], axis=0)\n sq_errors = errors**2\n\n # combining weights\n nominator = 1 / sq_errors.iloc[sq_errors.shape[0]-nu:, :].sum(axis=0)\n denominator = nominator.sum()\n method_1_comb_w = nominator / denominator\n\n # calculate and store the combined combining weights\n mat_comb_w[i, :] = alpha*mat_comb_w[i-1, :] + (1-alpha)*method_1_comb_w\n\n # final combining weights (weights for period T+1 = index T)\n comb_w = mat_comb_w[T, :]\n\n # predictions\n df_pred = pd.DataFrame({\"Bates-Granger (3)\": df_test.dot(comb_w)})\n\n return df_pred",
"def _ucb(x, gp, kappa):\n with warnings.catch_warnings():\n warnings.simplefilter(\"ignore\")\n mean, std = gp.predict(x, return_std=True)\n\n return mean + kappa * std",
"def _granger_causality(self):\r\n gc = dict(frequencies={}, gc_xy={}, gc_yx={}, gc_sim={},\r\n spectral_density={})\r\n for i, j in self.ij:\r\n w, f_x2y, f_y2x, f_xy, Sw = \\\r\n alg.granger_causality_xy(self.model_coef[i, j],\r\n self.error_cov[i, j],\r\n n_freqs=self._n_freqs)\r\n\r\n # All other measures are dependent on i, j:\r\n gc['gc_xy'][i, j] = f_x2y\r\n gc['gc_yx'][i, j] = f_y2x\r\n gc['gc_sim'][i, j] = f_xy\r\n gc['spectral_density'][i, j] = Sw\r\n\r\n return gc",
"def ComputeNrb(self):\r\n pass",
"def cf_profile(self):\n x = np.abs(self.gen_profile() / self.sam_sys_inputs['system_capacity'])\n return x",
"def _lcb(self, s, k):\n if self.confidence_method == 'ucb-standard':\n ucb_factor = self._ucb_standard_factor(s, k)\n return self.mu[k][s] - ucb_factor\n elif self.confidence_method == 'ucb-standard-relaxed':\n ucb_factor = self._ucb_standard_factor(s, k) + self.ucb_eps\n return self.mu[k][s] - ucb_factor\n elif self.confidence_method == 'clopper-pearson-ucb':\n return self._bernoulli_lower(self.mu[k][s], self.count[k][s],\n self.delta(k, s))\n elif self.confidence_method == 'clopper-pearson-ucb-relaxed':\n return self._bernoulli_lower(self.mu[k][s], self.count[k][s],\n self.delta(k, s)) + self.ucb_eps\n else:\n raise ValueError('Did not recognise confidence method {}'.format(\n self.confidence_method))",
"def bootstrap_acceleration(d):\n return np.sum(d**3) / np.sum(d**2)**(3.0/2.0) / 6.0",
"def computeB(linsys_setup):\n datamaps, ninvs, beams, freqs, power_2d, precond_2d, clumaps, g_nu, \\\n map_prop = linsys_setup\n nx, ny, pixScaleX, pixScaleY = map_prop\n nFreq = len(g_nu); nCluster = len(clumaps[0])\n ksz = False\n if len(clumaps)==2: ksz = True\n \n def computeCMBY(d0):\n \"\"\"\n For CMB, y = S^1/2 A N^-1 d, where S is CMB signal covariance matrix (Cl's)\n \"\"\"\n # N.B. Reshaping operations required to go between 2D pixel arrays and \n # 1D vector (for linear system)\n d2 = 0\n for freq in range(nFreq):\n d1 = d0[freq].data.copy().reshape((ny,nx))\n d1 *= ninvs[freq]\n a_l = fft.fft(d1,axes=[-2,-1])\n a_l *= beams[freq]*precond_2d\n d1 = numpy.real(fft.ifft(a_l,axes=[-2,-1],normalize=True))\n d1 = numpy.reshape(d1,(nx*ny))\n d2 += d1\n return d2\n \n def computeClusterY(d0):\n \"\"\"\n For cluster, y = F^T A^T N^-1 d, where F is TSZ spatial template for cluster.\n \"\"\"\n d2 = numpy.zeros(nCluster)\n for ic in range(nCluster):\n for freq in range(nFreq):\n d1 = d0[freq].data.copy().reshape((ny, nx))\n d2[ic] += numpy.sum(d1 * ninvs[freq] * clumaps[0][ic][freq] * g_nu[freq])\n return d2\n \n def computeClusterKSZY(d0):\n \"\"\"\n For cluster, y = K^T A^T N^-1 d, where K is KSZ spatial template for cluster.\n \"\"\"\n d2 = numpy.zeros(nCluster)\n for ic in range(nCluster):\n for freq in range(nFreq):\n d1 = d0[freq].data.copy().reshape((ny, nx))\n d2[ic] += numpy.sum(d1 * ninvs[freq] * clumaps[1][ic][freq])\n return d2\n \n def computeMonopoleY(d0):\n \"\"\"\n Overall monopole amplitude.\n \"\"\"\n d2 = 0\n for freq in range(nFreq):\n d1 = d0[freq].data.copy().reshape((ny, nx))\n d2 += numpy.sum(d1 * ninvs[freq])\n return(d2)\n \n \n # CMB realisation; convolve white noise map with beam and multiply by \n # signal covmat S^1/2 in harmonic space\n b0 = numpy.random.randn(ny,nx)\n a_l = numpy.fft.fft2(b0, b0.shape)\n a_l *= precond_2d * power_2d**(-0.5)\n b0 = numpy.fft.irfft2(a_l, b0.shape)\n \n # Calculate per-band noise realisation.\n # Multiply by pixel-space N^1/2, convolve with beam, and sum over \n # cluster pixels to get RHS\n b1 = 0; b4 = 0\n b2 = numpy.zeros(nCluster)\n if ksz: b3 = numpy.zeros(nCluster)\n \n for freq in range(nFreq):\n _b = numpy.random.randn(ny,nx) * ninvs[freq]**0.5\n a_l = numpy.fft.fft2(_b) * beams[freq] * precond_2d\n b1 += numpy.fft.irfft2(a_l, _b.shape)\n b4 += numpy.sum(_b)\n for ic in range(nCluster):\n b2[ic] += numpy.sum( _b * g_nu[freq] * clumaps[0][ic][freq] )\n if ksz: b3[ic] += numpy.sum( _b * clumaps[1][ic][freq] )\n\n b0 = numpy.reshape(b0,(nx*ny))\n b1 = numpy.reshape(b1,(nx*ny))\n \n\n # Compute CMB and cluster data parts of b\n b_CMB = computeCMBY(datamaps) + b0 + b1\n b_mono = computeMonopoleY(datamaps) + b4\n b_tsz = computeClusterY(datamaps) + b2\n if ksz: b_ksz = computeClusterKSZY(datamaps) + b3\n \n # Return total b vector (Ncmbpix + 1 + (1|2)*Ncluster elements in vector)\n b = numpy.append(b_CMB, b_mono)\n b = numpy.append(b, b_tsz)\n if ksz: b = numpy.append(b, b_ksz)\n return b",
"def boost(self):\n ch = self.gamma\n sh = self.gamma*self.beta\n return( np.array( [ [ch, -sh], [-sh, ch] ] ) )",
"def treat(self):\r\n if self.noiseS > 0:\r\n self.evaluations = min((self.evaluations * self.alphaevals, self.maxevals))\r\n return self.alphasigma\r\n else:\r\n self.evaluations = max((self.evaluations * self.alphaevalsdown, self.minevals))\r\n return 1.0",
"def B(self, t):\n return np.sqrt((3 * self.eta_B * self.snr.L0 *\n YEAR_TO_SEC * self.pulsar.tau_0) /\n (self.r(t) * PC_TO_CM) ** 3 *\n (1 - (1 + (t / self.pulsar.tau_0)) ** (-1)))",
"def compute_divergence(self):\n d_tr_a = []\n d_te_a = []\n for k in self.synth_keys:\n d_tr_a.append(self.divergence('tr', k))\n d_te_a.append(self.divergence('te', k))\n\n training = np.mean(np.array(d_tr_a))\n testing = np.mean(np.array(d_te_a))\n return training, testing",
"def KsCB(fiter):\n \n fiter.mean = RooRealVar(\"mean1\",\"mean1\",490,510)#5168.)\n fiter.sigma = RooRealVar(\"sigma\",\"sigma\", 2,6)#20., 12.,40.)#35.)\n \n fiter.n = RooRealVar(\"exponent\", \"exponent\",1.)#, 0., 12 )\n \n fiter.a = RooRealVar(\"transition\",\"transition\", 0.5, 3) ## Transition point, in sigmas\n fiter.sig = RooCBShape(\"Sigmodel\",\"Sigmodel\", fiter.mass, fiter.mean, fiter.sigma, fiter.a, fiter.n) \n return 1"
] | [
"0.62454176",
"0.6181481",
"0.5973252",
"0.58579695",
"0.5794351",
"0.57939696",
"0.57455033",
"0.57455033",
"0.5720694",
"0.57071906",
"0.5694225",
"0.5676966",
"0.56485546",
"0.56252354",
"0.56042624",
"0.55519134",
"0.55401033",
"0.55351764",
"0.5533983",
"0.55071896",
"0.54824746",
"0.54779726",
"0.5460376",
"0.5458895",
"0.54487455",
"0.54434085",
"0.54311806",
"0.5413616",
"0.54096025",
"0.53824437"
] | 0.63861793 | 0 |
Calculate shuffled granger causality | def calc_granger_shuffle(self):
if not hasattr(self, 'input_data'):
self.preprocess_and_check_stationarity()
temp_series = [np.stack([np.random.permutation(x)
for x in self.input_data.T]).T
for i in trange(self.n_shuffles)]
outs_temp = parallelize(self.calc_granger, temp_series, n_jobs=30)
outs_temp = [x[0] for x in outs_temp]
self.shuffle_outs = np.array(outs_temp) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def calc_granger_actual(self):\n if not hasattr(self, 'input_data'):\n self.preprocess_and_check_stationarity()\n # input_data shape = (n_timepoints, n_trials, n_channels)\n # Calculate as many bootstrapped samples as n_shuffles\n trial_inds = np.random.randint(\n 0, self.input_data.shape[1],\n (self.n_shuffles, self.input_data.shape[1]))\n temp_dat = [self.input_data[:, trial_inds[i]]\n for i in trange(self.n_shuffles)]\n outs_temp = parallelize(self.calc_granger, temp_dat, n_jobs=30)\n time_vec = outs_temp[0][1].time\n freq_vec = outs_temp[0][1].frequencies\n outs_temp = [x[0] for x in outs_temp]\n self.granger_actual = np.array(outs_temp)\n self.time_vec = time_vec\n self.freq_vec = freq_vec\n #self.granger_actual, self.c_actual = \\\n # self.calc_granger(self.input_data)",
"def shuffle_opacities(mutated_genome):\n mutated_genome",
"def test_case_2(self):\n print(\"-------------------gsr_shuffle-----------------------------------\")\n for _ in range(5):\n deck_size = np.random.randint(low=1, high=10000)\n deck = np.arange(deck_size)\n shuffle_deck = gsr_shuffle(deck)\n self.assertEqual(sum(shuffle_deck), deck_size * (deck_size - 1)//2)\n self.assertEqual(len(deck), len(shuffle_deck))\n self.assertSetEqual(set(shuffle_deck), set(deck))\n\n print(\"input sequence preserve ok: PASS\")\n print(\"shuffle contain unique value ok: PASS\")\n print(\"shuffle contain same set of value as deck ok: PASS\")",
"def shuffle(self) -> List[int]:",
"def sample(self):\n L = e ** (-self.lamb)\n k, p = 1, rand()\n while p > L:\n k += 1\n p *= rand()\n return k - 1",
"def shuffle(self):\n\t\t\trandom.seed(231)\n\t\t\trandom.shuffle(self.Ind)\n\t\t\tself.Ind = self.Ind[:int(len(self.Ind)/5)*5].reshape((self.cv_iters, -1))\n\t\t\t#index of valication set\n\t\t\tself.CVindex = 1\n\t\t\tself.Testindex = 0",
"def shuffle_colors(mutated_genome):\n mutated_genome",
"def test_case_3(self):\n\n print(\"-------------------------shuffle-----------------------------------\")\n\n deck_size = 10\n deck = np.arange(deck_size)\n shuffle_deck = shuffle(deck, my_seed=2)\n self.assertSequenceEqual(list(shuffle_deck), [9, 8, 7, 6, 5, 4, 3, 2, 1, 0])\n\n deck_size = 100\n deck = np.arange(deck_size)\n shuffle_deck = shuffle(deck, my_seed=39)\n self.assertSequenceEqual(list(shuffle_deck),\n [8, 7, 6, 5, 4, 3, 2, 1, 0, 99, 98, 97, 96, 95, 94, 93, 92, 91, 90, 89, 88, 87, 86, 85,\n 84, 83, 82, 81, 80, 79, 78, 77, 76, 75, 74, 73, 72, 71, 70, 69, 68, 67, 66, 65, 64,\n 63, 62, 61, 60, 59, 58, 57, 56, 55, 54, 53, 52, 51, 50, 49, 48, 47, 46, 45, 44, 43,\n 42, 41, 40, 39, 38, 37, 36, 35, 34, 33, 32, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22,\n 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9])\n\n deck_size = 333\n deck = np.arange(deck_size)\n shuffle_deck = shuffle(deck, my_seed=139)\n self.assertSequenceEqual(list(shuffle_deck),\n [332, 331, 330, 329, 328, 327, 326, 325, 324, 323, 322, 321, 320, 319, 318, 317, 316,\n 315, 314, 313, 312, 311, 310, 309, 308, 307, 306, 305, 304, 303, 302, 301, 300, 299,\n 298, 297, 296, 295, 294, 293, 292, 291, 290, 289, 288, 287, 286, 285, 284, 283, 282,\n 281, 280, 279, 278, 277, 276, 275, 274, 273, 272, 271, 270, 269, 268, 267, 266, 265,\n 264, 263, 262, 261, 260, 259, 258, 257, 256, 255, 254, 253, 252, 251, 250, 249, 248,\n 247, 246, 245, 244, 243, 242, 241, 240, 239, 238, 237, 236, 235, 234, 233, 232, 231,\n 230, 229, 228, 227, 226, 225, 224, 223, 222, 221, 220, 219, 218, 217, 216, 215, 214,\n 213, 212, 211, 210, 209, 208, 207, 206, 205, 204, 203, 202, 201, 200, 199, 198, 197,\n 196, 195, 194, 193, 192, 191, 190, 189, 188, 187, 186, 185, 184, 183, 182, 181, 180,\n 179, 178, 177, 176, 175, 174, 173, 172, 171, 170, 169, 168, 167, 166, 165, 164, 163,\n 162, 161, 160, 159, 158, 157, 156, 155, 154, 153, 152, 151, 150, 149, 148, 147, 146,\n 145, 144, 143, 142, 141, 140, 139, 138, 137, 136, 135, 134, 133, 132, 131, 130, 129,\n 128, 127, 126, 125, 124, 123, 122, 121, 120, 119, 118, 117, 116, 115, 114, 113, 112,\n 111, 110, 109, 108, 107, 106, 105, 104, 103, 102, 101, 100, 99, 98, 97, 96, 95, 94,\n 93, 92, 91, 90, 89, 88, 87, 86, 85, 84, 83, 82, 81, 80, 79, 78, 77, 76, 75, 74, 73,\n 72, 71, 70, 69, 68, 67, 66, 65, 64, 63, 62, 61, 60, 59, 58, 57, 56, 55, 54, 53, 52,\n 51, 50, 49, 48, 47, 46, 45, 44, 43, 42, 41, 40, 39, 38, 37, 36, 35, 34, 33, 32, 31,\n 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 13, 12, 11, 10, 9,\n 8, 7, 6, 5, 4, 3, 2, 1, 0])\n\n print(\"random shuffle check for different seed ok: PASS\")",
"def compute_cramer(contingency_tb, expected_tb):\n keys_A = contingency_tb.columns[:-1].tolist()\n keys_B = contingency_tb.index[:-1].tolist()\n cont_tb = contingency_tb.drop('total',axis=1)\n cont_tb = cont_tb.drop('total',axis=0)\n cont_tb[cont_tb.isnull()] = 0\n chi_2 = sum(((cont_tb - expected_tb) * (cont_tb - expected_tb) / expected_tb).sum(axis=1))\n \n l = len(keys_A)\n c = len(keys_B)\n n = contingency_tb.ix[len(keys_B),len(keys_A)]\n chi_2_max = n*(min(l,c)-1)\n \n cramer = math.sqrt(float(chi_2)/float(chi_2_max))\n return cramer",
"def back_test(self, turnover_frequency):",
"def shuffle_chromosomes(mutated_genome):\n random.shuffle(mutated_genome)",
"def rand(self):\n self.state = (self.a * self.state + self.c)\n return self.state",
"def cubetest_per_topic(topic_truth, topic_result, gamma, max_height, cutoff):\n subtopic_num = topic_truth[1]\n topic_truth = topic_truth[0]\n\n subtopic_height = Counter() # current height of every subtopic\n subtopic_count = Counter() # #docs found relevant to every subtopic (nrels)\n\n weight_per_subtopic = 1.0 / subtopic_num\n\n def gain_per_doc(doc_no):\n if doc_no not in topic_truth:\n return 0\n gain = 0\n for subtopic_id, rating in topic_truth[doc_no].items():\n if subtopic_height[subtopic_id] < max_height:\n discount_height = (gamma ** (subtopic_count[subtopic_id] + 1)) * rating\n if discount_height + subtopic_height[subtopic_id] > max_height:\n discount_height = max_height - subtopic_height[subtopic_id]\n\n gain += weight_per_subtopic * discount_height\n # print(doc_no, subtopic_id,\"original_height\", rating, \"discount height\", discount_height)\n subtopic_height[subtopic_id] += discount_height\n subtopic_count[subtopic_id] += 1\n # print(doc_no, gain)\n return gain\n\n sorted_result = sorted(topic_result.items(), key=lambda x: x[0])\n time = 0.0\n total_gain = 0\n accu_gain = 0\n doc_num = 0\n for iter_num, doclist in sorted_result:\n if iter_num >= cutoff:\n break\n time += 1\n # gain_per_iteration = 0\n for doc_no in doclist:\n total_gain += gain_per_doc(doc_no)\n accu_gain += (total_gain / max_height / time)\n doc_num += 1\n\n # print(time)\n if time != 0:\n ct = total_gain / max_height / time\n else:\n ct = 0\n # print(doc_num)\n if doc_num > 0:\n act = accu_gain / doc_num\n else:\n act = 0\n # print( accu_gain , total_gain)\n return total_gain / max_height, ct, act",
"def random_table(self):\n rule_set = []\n for i in range(self.k ** (2 * self.r + 1) - 1):\n g = np.random.rand()\n if g > self.lambda_param:\n g = 0\n else:\n g = np.random.randint(1, self.k)\n rule_set.append(g)\n rule_set.append(0)\n return rule_set",
"def act(self):\n return np.random.randint(self.k)",
"def anneal():\n best_sol = list(range(SIZE))\n best_sum = get_sum(best_sol)\n shuffle(best_sol)\n\n temp = 10000000\n cool_rate = 0.0003\n\n counter = 0\n while temp > 1:\n new_sol = best_sol.copy()\n i, j = randint(0, SIZE - 1), randint(0, SIZE - 1)\n new_sol[i], new_sol[j] = new_sol[j], new_sol[i]\n new_energy = get_sum(new_sol)\n cur_energy = best_sum\n if calculate_probability(cur_energy, new_energy, temp) > random():\n best_sol = new_sol.copy()\n best_sum = new_energy\n temp *= 1 - cool_rate\n counter += 1\n\n print(counter)\n\n print(best_sol)\n print(best_sum)\n return best_sol, best_sum",
"def shuffle(self):\n self.__c_elem().melange()",
"def scramble(self):\n\t\tfor key in self.activations.keys():\n\t\t\tself.weights[key] = r.randint(0, 1)",
"def scramble(self):\n\t\tfor key in self.activations.keys():\n\t\t\tself.weights[key] = r.randint(0, 1)",
"def chance(dice):\n return sum(dice)",
"def test_case_1(self):\n print(\"-------------------shuffle-----------------------------------\")\n for _ in range(10):\n deck_size = np.random.randint(low=1, high=100000)\n deck = np.arange(deck_size)\n shuffle_deck = shuffle(deck)\n self.assertEqual(sum(shuffle_deck), deck_size * (deck_size - 1) // 2)\n self.assertEqual(len(deck), len(shuffle_deck))\n self.assertSetEqual(set(shuffle_deck), set(deck))\n\n print(\"input sequence preserve ok: PASS\")\n print(\"shuffle contain unique value ok: PASS\")\n print(\"shuffle contain same set of value as deck ok: PASS\")",
"def random_gains():\n # RGB gain represents brightening.\n rgb_gain = 1.0 / random.gauss(mu=0.8, sigma=0.1)\n\n # Red and blue gains represent white balance.\n red_gain = random.uniform(1.9, 2.4)\n blue_gain = random.uniform(1.5, 1.9)\n return rgb_gain, red_gain, blue_gain",
"def marcovNuc (i = random.choice(stateSpace), step = 100):\n # matrix of transition probabilities\n #matrix = [[0.25, 0.25, 0.25, 0.25], [0.25, 0.25, 0.25, 0.25], [0.25, 0.25, 0.25, 0.25], [0.25, 0.25, 0.25, 0.25]] \n matrix = [[0.4, 0.3, 0.2, 0.1], [0.4, 0.3, 0.2, 0.1], [0.4, 0.3, 0.2, 0.1], [0.4, 0.3, 0.2, 0.1]] \n step += 1 # add one to the range because we remove it at the end\n sims = [] # List to hold the results of the Marcov chain\n sims.append(i) # append the seed value to the sims list\n for x in range(step):\n \n if sims[-1] == 'A':\n w = np.random.random() # Random number generator\n # the next set of if statements determine where the random number \n # sits on the number line of probabilities\n if matrix[0][0] > w:\n sims.append('A')\n elif matrix[0][1] + matrix[0][0] > w:\n sims.append('C')\n elif matrix[0][2] + matrix[0][1] + matrix[0][0] > w:\n sims.append('G')\n else:\n sims.append('T')\n elif sims[-1] == 'C':\n x = np.random.random()\n if matrix[1][0] > x:\n sims.append('A')\n elif matrix[1][1] + matrix[1][0] > x:\n sims.append('C')\n elif matrix[1][2] + matrix[1][1] + matrix[1][0] > x:\n sims.append('G')\n else:\n sims.append('T')\n \n elif sims[-1] == 'G':\n y = np.random.random()\n if matrix[2][0] > y:\n sims.append('A')\n elif matrix[2][1] + matrix[2][0] > y:\n sims.append('C')\n elif matrix[2][2] + matrix[2][1] + matrix[2][0] > y:\n sims.append('G')\n else:\n sims.append('T')\n\n else:\n z = np.random.random()\n if matrix[3][0] > z:\n sims.append('A')\n elif matrix[3][1] + matrix[3][0] > z:\n sims.append('C')\n elif matrix[3][2] + matrix[3][1] + matrix[3][0] > z:\n sims.append('G')\n else:\n sims.append('T')\n\n return sims[1:-1] # remove the initial value (the seed)",
"def rand_init(flag):\n global aa, bb, cc, randrsl, randcnt\n aa = bb = cc = 0\n a = b = c = d = e = f = g = h = 0x9e3779b9\n\n for i in range(0, 4):\n a, b, c, d, e, f, g, h = mix(a, b, c, d, e, f, g, h)\n\n for i in range(0, 256, 8):\n if flag:\n a += randrsl[i]\n b += randrsl[i + 1]\n c += randrsl[i + 2]\n d += randrsl[i + 3]\n e += randrsl[i + 4]\n f += randrsl[i + 5]\n g += randrsl[i + 6]\n h += randrsl[i + 7]\n\n a, b, c, d, e, f, g, h = mix(a, b, c, d, e, f, g, h)\n mm[i] = a\n mm[i + 1] = b\n mm[i + 2] = c\n mm[i + 3] = d\n mm[i + 4] = e\n mm[i + 5] = f\n mm[i + 6] = g\n mm[i + 7] = h\n\n if flag:\n for i in range(0, 256, 8):\n a += mm[i]\n b += mm[i + 1]\n c += mm[i + 2]\n d += mm[i + 3]\n e += mm[i + 4]\n f += mm[i + 5]\n g += mm[i + 6]\n h += mm[i + 7]\n\n a, b, c, d, e, f, g, h = mix(a, b, c, d, e, f, g, h)\n\n mm[i] = a\n mm[i + 1] = b\n mm[i + 2] = c\n mm[i + 3] = d\n mm[i + 4] = e\n mm[i + 5] = f\n mm[i + 6] = g\n mm[i + 7] = h\n\n isaac_()\n randcnt = 0",
"def random_test(self):\r\n return 1",
"def random_test(self):\r\n return 1",
"def magic_sample(self, ys):\n\n #for each non-zero element in y\n #we want to multiply the initial state by HGate(i) SGate(i) HGate(i)\n #this turns out to be equivalent to multiplying the whole final state by\n #U H_k S_k H_k U^\\dagger\n #but H_k S_k H_k = e^{i\\pi/4} \\frac{1}{\\sqrt{2}} (I -i X_k)\n #so now we evolve identity forward by U (trivial)\n #and evolve X_k forward by U (using the AGState)\n #then we have to send the resulting Pauli through UC and UH\n #giving a third Pauli\n #then the state is of the form (we^{i\\pi/4}) UC UH (I + i^d P)/sqrt(2) |s>\n #then we apply Bravyi et al's prop. 4 to turn this into a new ch form\n \n\n chCopy = deepcopy(self.chState) #we update this copy as we go\n\n for i, y in enumerate(ys):\n if y:\n #we want to know what U_c^\\dagger U X_i U^\\dagger U_c is\n #firstly we use the A-G info\n # U X_i U^\\dagger is the i'th destabiliser\n x = self.agState.x[self.n+i]\n z = self.agState.z[self.n+i]\n r = self.agState.r[self.n+i]\n\n #print(x,z,r)\n x_col = np.array([x]).T\n z_col = np.array([z]).T\n \n #now we apply U_c to this using the CH-form info\n x_mat = chCopy.F * x_col\n z_mat = (chCopy.M * x_col + chCopy.G*z_col) % np.uint8(2)\n r = (r + util.sort_pauli_string(x_mat, z_mat)) % np.uint8(2)\n\n u = (x @ chCopy.F) % np.uint8(2)\n h = (x @ chCopy.M + z @ chCopy.G) % np.uint8(2)\n\n g = (x @ (z + chCopy.g)) % np.uint8(4)\n\n #now U_c^dag U X_i U^dag U_C = (-1)^r i^g prod_j Z_j^{h_j} X_j^{u_j}\n #we want to conjugate this by U_H\n #everywhere chCopy.v == 1 we flip a z to an x and an x to a z\n #everywhere chCopy.v == 1 and u == 1 and h == 1 we need to swap the order of our x and z so we get a minus sign\n\n u2 = u*(np.uint8(1) ^ chCopy.v) ^ (h*chCopy.v)\n h2 = (u*chCopy.v) ^ (h*(np.uint8(1) ^ chCopy.v))\n\n r = (r + (u*h*chCopy.v).sum()) % np.uint8(2)\n \n \n #now U_H^dag U_c^dag U X_i U^dag U_C U_H = (-1)^r i^g prod_j Z_j^{h2_j} X_j^{u2_j}\n\n t = u2 ^ chCopy.s\n r = (r + h2 @ t) % np.uint8(2)\n\n #now we have w UC UH |s> = w (-1)^r (i)^g UC UH |t>\n\n if all(t == chCopy.s):\n chCopy.w *= np.exp(1j*np.pi/4) * (1 + (1j)**(g+2*r -1) )/ np.sqrt(2)\n else:\n phase, VCList, v, s = util.desuperpositionise(chCopy.s, t, (g+2*r -1)%np.uint8(4), chCopy.v)\n\n chCopy.w *= phase*np.exp(1j*np.pi/4)/np.sqrt(2)\n chCopy.v = v\n chCopy.s = s\n\n for gate in VCList:\n gate.rightMultiplyC(chCopy)\n \n return chCopy",
"def random_gen(self, total: int):\n\t\tstats = self.get_stats()\n\n\t\tdict_len = len(stats)\n\t\tkeys_list = list(stats.keys())\n\t\t# Ensure no bias like the one in Monty Hall Problem. \n\t\t# The algorithm used to ensure uniform distribution as much as possible.\n\t\t# It is based on this StackOverflow link:\n\t\t# https://stackoverflow.com/questions/8064629/random-numbers-that-add-to-100-matlab/8068956#8068956\n\t\trandom.shuffle(keys_list)\n\t\tif total - dict_len < dict_len:\n\t\t\tresults = [0] + [round(random.uniform(0, total - dict_len), 2) for i in range(dict_len - 1)] + [total - dict_len]\n\t\telse:\n\t\t\tresults = [0] + [random.randint(0, total - dict_len) for i in range(dict_len - 1)] + [total - dict_len] \n\t\tresults.sort()\n\t\tfor i in range(1, len(results)):\n\t\t\tstats[list(stats.keys())[i-1]] = 1 + results[i] - results[i-1] \n\n\t\t#TODO refactor\n\t\tself.attack = stats[\"attack\"]\n\t\tself.defense = stats[\"defense\"]\n\t\tself.special_attack = stats[\"special_attack\"]\n\t\tself.special_defense = stats[\"special_defense\"]\n\t\tself.speed = stats[\"speed\"]\n\t\tself.hp = stats[\"hp\"]",
"def test_human_selection_shuffle(seed, lambd):\n iterations = 10\n array_size = 20\n group_size = 5\n\n total_deltas = iterations * array_size\n delta_threshold = 0.5\n cumulative_delta = 0\n\n total_progressions = iterations * (math.ceil(array_size / group_size) - 1)\n progression_threshold = 0.5\n cumulative_progression = 0\n\n unshuffled = range(array_size)\n\n for _ in range(iterations):\n array = list(range(array_size))\n human_selection_shuffle(array, lambd=lambd)\n shuffled = array\n assert len(shuffled) == array_size\n cumulative_delta += sum(abs(s - u) for s, u in zip(shuffled, unshuffled))\n\n shuffled_averages = average_by_group(shuffled, group_size)\n prior = None\n for value in shuffled_averages:\n if prior:\n cumulative_progression += (1 if value >= prior else -1)\n prior = value\n\n # These can randomly fail, but they should be extraordinarily rare\n assert cumulative_delta / total_deltas >= delta_threshold\n assert cumulative_progression / total_progressions >= progression_threshold",
"def test_shuffled(self):\n self.setup_flags()\n self.io_args.matches = os.path.join(\n self.io_args.output_root, \"shuffled\", \"matches.json\"\n )\n self._calibration_error_test(\"shuffled\", \"GeometricCalibration\")"
] | [
"0.62754345",
"0.62195766",
"0.60361123",
"0.5824733",
"0.58219045",
"0.563918",
"0.5584704",
"0.5575912",
"0.5573002",
"0.55611855",
"0.54603237",
"0.54584056",
"0.5441802",
"0.5441032",
"0.5434684",
"0.5428206",
"0.5410833",
"0.54057086",
"0.54057086",
"0.53993386",
"0.53783035",
"0.5367927",
"0.53663385",
"0.53541607",
"0.53501284",
"0.53501284",
"0.53474027",
"0.5327627",
"0.53221774",
"0.5314427"
] | 0.6487357 | 0 |
Mask is True when granger causality is NOT SIGNIFICANT | def get_granger_sig_mask(self):
if not hasattr(self, 'percentile_granger'):
self.calc_shuffle_threshold()
if not hasattr(self, 'granger_actual'):
self.calc_granger_actual()
mean_granger_actual = np.mean(self.granger_actual, axis=0)
self.masked_granger = np.ma.masked_where(
mean_granger_actual < self.percentile_granger, mean_granger_actual)
self.mask_array = np.ma.getmask(self.masked_granger) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def mask(self):",
"def cmask(self):\n mask = np.zeros(18)\n if 'full' in self.CONS: mask[:] = 1\n if 'f0' in self.CONS: mask[0] = 1\n if 'f1' in self.CONS: mask[1:4] = 1\n if 'f2' in self.CONS: mask[4:10] = 1\n if 'vx' in self.CONS: mask[10] = 1\n if 'vy' in self.CONS: mask[11] = 1\n if 'vz' in self.CONS: mask[12] = 1\n if 'TG' in self.CONS: mask[13:18] = 1\n return mask>0",
"def _mask(self):\n if self.__mask is None:\n # need this to be *exactly* the numpy boolean False\n return nomask\n return self.__mask",
"def no_classes(mask):\n extrema = ImageStat.Stat(mask).extrema\n r = extrema[0][1]\n g = extrema[1][1]\n b = extrema[2][1]\n\n if r == 0 and g == 0 and b == 0:\n return True\n\n return False",
"def Mask(self) -> int:",
"def get_causal_mask(model_dim):\n attn_shape = (1, model_dim, model_dim)\n mask = torch.triu(torch.ones(attn_shape, dtype=torch.uint8), diagonal=1) # 1 for subsequent positions\n return mask == 0 # True for attending positions",
"def check_masked(self):\n if self._alternate == 'N': # If our alternate allele is masked, or an 'N'\n return True # Return True\n else: # Otherwise\n return False # Return False",
"def get_sample_mask(self):",
"def get_mask(self):\n # use the feature array a to calculate which channels to include etc\n sums = np.sum(self.feature_array, 0)\n feature_mask = np.repeat(np.ones(4, dtype=int), self.n_features)\n # if there are \"missing\" channels use the older version of KK\n zero_sums = sums == 0\n if np.any(zero_sums):\n self.distribution = 1\n feature_mask[zero_sums] = 0\n self.feature_mask = feature_mask\n return feature_mask",
"def is_masked(self):\n return False",
"def build_attention_mask_3d_causal(source_mask, target_mask):\n causal_mask = make_inference_history_mask_3d(target_mask)\n mask = make_attention_mask_3d(source_mask, target_mask)\n mask = mask * causal_mask\n # invert mask for Megatron\n return mask < 0.5",
"def mod_mask(self):\n # Check the *_masq values\n self.__log.debug(\"Checking the *_masq arrays\")\n # Retrieve the kid boxes\n masq_names = np.unique([\"{}_masq\".format(item[1]) for item in self.list_detector])\n self.__check_attributes(masq_names, read_missing=False)\n # Check that they are all the same\n warnings.warn(\"Temporary fix to int8\")\n masqs = [getattr(self, masq).astype(np.int8) for masq in masq_names]\n\n if np.any(np.std(masqs, axis=0) != 0):\n self.__log.error(\"*_masq is varying -- Please check : {}\".format(pprint_list(masq_names, \"_masq\")))\n\n # AB private comm) main_flag should be the bitwise_or of all boxes\n # Well not exactly....\n # cast into 8 bit, is more than enough, only 3 bits used anyway...\n masq = np.bitwise_or.reduce(masqs, axis=0).astype(np.int8)\n\n # AB (#CONCERTO_DAQ January 11 13:02)\n # _flag_balayage_en_cours & _flag_blanking_synthe\n # Ainsi on aura la modulation en bit0 et 1 et le flag blanking en bit\n # AB (#CONCERTO_DAQ February 11 11:07)\n # bit 1 & 2 code the modulation as a signed integer -1 0 1 : 11 00 01 ie 3 0 1\n # bit 3 is a blanking bit, which does not exist for KISS, but should not be taken into account for CONCERTO\n\n # Thus as a temporary fix, let's clear the 3rd bit, actually a bad idea...\n # self.__log.warning(\"Temporary fix : clearing the 3rd bit of masq\")\n # masq = masq & ~(1 << 2)\n\n return masq",
"def set_mask_good(self, _=None):\n self.set_mask_type(\"good\")",
"def flag_fraction(data):\n occ_f = np.sum(data.mask, axis=0) / float(data.shape[0])\n occ_t = np.sum(data.mask, axis=1) / float(data.shape[1])\n \n bad_f = occ_f > params.max_frac_f\n bad_t = occ_t > params.max_frac_t\n \n data.mask[bad_t, :] = True\n data.mask[:, bad_f] = True\n \n return data.mask",
"def codage(nbr):\n\tmask=1\n\tresult=0\n\tfor index in range(len(G)):\n\t\tif ((mask<<index)&nbr) != 0:\n\t\t\tresult^=G[len(G)-index-1]\n\treturn result",
"def causal_attention_mask(nd, ns, dtype):\n i = tf.range(nd)[:, None]\n j = tf.range(ns)\n m = i >= j - ns + nd\n return tf.cast(m, dtype)",
"def invert_mask(attention_mask):\r\n assert attention_mask.dim() == 2\r\n return attention_mask.eq(0)",
"def has_mask(self):\r\n return hasattr(self, '_has_mask')",
"def mask(n):\n if n >= 0:\n return 2**n - 1\n else:\n return 0",
"def mask(n):\n if n >= 0:\n return 2**n - 1\n else:\n return 0",
"def mask(self):\n return self._mask",
"def mask(self):\n return self._mask",
"def mask(self):\n return self._mask",
"def mask(self):\n return self._mask",
"def applymask(self,mask):\n self.spec[mask==0]=np.nan",
"def _make_guided_attention_mask(ilen, olen, sigma):\n grid_x, grid_y = paddle.meshgrid(\n paddle.arange(olen), paddle.arange(ilen))\n grid_x = grid_x.cast(dtype=paddle.float32)\n grid_y = grid_y.cast(dtype=paddle.float32)\n return 1.0 - paddle.exp(-(\n (grid_y / ilen - grid_x / olen)**2) / (2 * (sigma**2)))",
"def invert_mask(attention_mask):\n assert attention_mask.dim() == 2\n return attention_mask.eq(0)",
"def mask_no_self(task:dict, mask_params:dict, qn_pair, verbose=False):\n\n # unpack quantum numbers\n (qnf,qni) = qn_pair\n (Ji,gi,ni) = qni\n (Jf,gf,nf) = qnf\n\n # calculate mask value\n allow = (qnf!=qni)\n\n return allow",
"def test_odd(self):\n actual = cm.circle_mask((5, 5), 2)\n expected = np.array([[False, False, True, False, False],\n [False, True, True, True, False],\n [True, True, True, True, True],\n [False, True, True, True, False],\n [False, False, True, False, False]])\n self.assertIsNone(np.testing.assert_array_equal(actual, expected))",
"def flag_absolute(data):\n data.mask = np.logical_or(data.mask, data > params.thr_max)\n data.mask = np.logical_or(data.mask, data < params.thr_min)\n return data.mask"
] | [
"0.6703793",
"0.63538605",
"0.63142383",
"0.6256875",
"0.61741894",
"0.6091233",
"0.60839707",
"0.6051521",
"0.5936672",
"0.59186983",
"0.5887224",
"0.58444947",
"0.57600707",
"0.5741366",
"0.57228017",
"0.5708867",
"0.5700054",
"0.5693821",
"0.56917423",
"0.56917423",
"0.5683241",
"0.5683241",
"0.5683241",
"0.5683241",
"0.5667506",
"0.5647131",
"0.56456226",
"0.5644469",
"0.56403726",
"0.5599103"
] | 0.66818357 | 1 |
A list of return codes of all processes launched by the pipe | def returncodes(self):
for p in self.processes:
p.wait()
codes = [p.poll() for p in self.processes]
if set(codes) == set([0]):
return []
return codes | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ListProcesses(self):\n stdout, stderr = self.RunCmdOnDevice(\n ['/bin/ps', '--no-headers', '-A', '-o', 'pid,ppid,args:4096,state'],\n quiet=True)\n assert stderr == '', stderr\n procs = []\n for l in stdout.split('\\n'):\n if l == '':\n continue\n m = re.match(r'^\\s*(\\d+)\\s+(\\d+)\\s+(.+)\\s+(.+)', l, re.DOTALL)\n assert m\n procs.append(\n (int(m.group(1)), m.group(3).rstrip(), int(m.group(2)), m.group(4)))\n logging.debug(\"ListProcesses(<predicate>)->[%i processes]\" % len(procs))\n return procs",
"def get_processes():\n yield from psutil.process_iter()",
"def ListProcesses(self):\n stdout, stderr = self.RunCmdOnDevice(\n [\n '/bin/ps', '--no-headers', '-A', '-o', 'pid,ppid,args:4096,state'\n ],\n quiet=True)\n assert stderr == '', stderr\n procs = []\n for l in stdout.split('\\n'):\n if l == '':\n continue\n m = re.match(r'^\\s*(\\d+)\\s+(\\d+)\\s+(.+)\\s+(.+)', l, re.DOTALL)\n assert m\n procs.append((int(m.group(1)), m.group(3).rstrip(), int(m.group(2)),\n m.group(4)))\n logging.debug(\"ListProcesses(<predicate>)->[%i processes]\" % len(procs))\n return procs",
"def get_all_current_processes():\n p = subprocess.Popen(['ps', '-A'], stdout=subprocess.PIPE)\n out, err = p.communicate()\n return out",
"def _get_return_codes(self):\n return self.__return_codes",
"def returncode(self):\n return self._proc.returncode",
"def get_pid_list():\r\n pids = [int(x) for x in os.listdir('/proc') if x.isdigit()]\r\n return pids",
"def subprocess_wait_all(procs, poll=True):\n # type: (list, bool) -> list\n if procs is None or len(procs) == 0:\n raise ValueError('procs is invalid')\n rcodes = [None] * len(procs)\n stdout = [None] * len(procs)\n stderr = [None] * len(procs)\n while True:\n for i in range(0, len(procs)):\n if rcodes[i] is None:\n if poll:\n if procs[i].poll() is not None:\n rcodes[i] = procs[i].returncode\n else:\n stdout[i], stderr[i] = procs[i].communicate()\n rcodes[i] = procs[i].returncode\n if all(x is not None for x in rcodes):\n break\n time.sleep(0.1)\n return rcodes, stdout, stderr",
"def get_exit_code(self):",
"def return_code(self):\n return self.__process.returncode",
"def list_java_processes():\n for line in shell_command_output('jps -l').splitlines():\n line = line.strip()\n if len(line) == 0:\n continue\n (pid, class_name) = line.split()\n yield (int(pid), class_name)",
"def process_results(process_object):\n (stdout, stderr)=process_object.communicate()\n return (process_object.returncode, stdout, stderr)",
"def returncode(self: \"ShellOutput\") -> Artefact[int]:\n self.__check_len()\n return self.returncodes[0]",
"def get_processes_running():\r\n p = [] #array of processes\r\n if platform == \"linux\" or platform == \"linux2\":\r\n for proc in psutil.process_iter():\r\n try:\r\n tmp=Process(proc.name(),int(proc.pid),proc.username(),int(0),int(0))\r\n p.append(tmp)\r\n except:\r\n continue\r\n return (p)\r\n\t\t\t\r\n tasks = check_output(['tasklist']).decode('cp866', 'ignore').split(\"\\r\\n\")\r\n for task in tasks:\r\n m = re.match(b'(.*?)\\\\s+(\\\\d+)\\\\s+(\\\\w+)\\\\s+(\\\\w+)\\\\s+(.*?)\\\\s.*', task.encode())\r\n if m is not None:\r\n tmp=Process(m.group(1).decode(),int(m.group(2).decode()),m.group(3).decode(),int(m.group(4).decode()),int(m.group(5).decode('ascii', 'ignore')))\r\n p.append(tmp)\r\n #m.group(1).decode() image name\r\n #m.group(2).decode() process id\r\n #m.group(3).decode() session_name\r\n #m.group(4).decode() session_num\r\n #m.group(5).decode('ascii', 'ignore') memory usage\r\n return(p)",
"def subprocess_wait_any(procs):\n # type: (list) -> list\n if procs is None or len(procs) == 0:\n raise ValueError('procs is invalid')\n while True:\n for i in range(0, len(procs)):\n if procs[i].poll() is not None:\n return i, procs[i].returncode\n time.sleep(0.1)",
"def check(self):\n if self.processes[-1].poll() is None:\n return None\n\n result = PipelineResult()\n for p in self.processes:\n if p.poll() is None:\n raise PipelineError(\"The last process of a pipeline has exited but an earlier process is still running. ({})\".format(p.args))\n stderr = None\n stdout = None\n if not p.stdout.closed:\n stdout = p.stdout.read()\n p.stdout.close()\n if not p.stderr.closed:\n stderr = p.stderr.read()\n p.stderr.close()\n result.returncodes.append(p.returncode)\n result.stdouts.append(stdout)\n result.stderrs.append(stderr)\n result.cmds.append(p.args)\n\n return result",
"def old_get_status_output(cmd):\r\n if sys.platform[:3] != \"win\":\r\n cmd = \"{ \" + cmd + \"; }\"\r\n pipe = os.popen(cmd + \" 2>&1\", \"r\")\r\n text = list()\r\n for item in pipe:\r\n text.append(item.rstrip())\r\n try:\r\n sts = pipe.close()\r\n except IOError:\r\n sts = 1\r\n if sts is None: sts = 0\r\n return sts, text",
"def PIDs():\n from ctypes import windll,c_ulong,byref,sizeof\n PIDs = (c_ulong*512)()\n size_of_PIDs = c_ulong()\n windll.psapi.EnumProcesses(byref(PIDs),sizeof(PIDs),byref(size_of_PIDs))\n nPIDs = size_of_PIDs.value/sizeof(c_ulong())\n pidProcess = sorted([int(i) for i in PIDs][:nPIDs])\n return pidProcess",
"def get_open_fds(self):\n #By shaunc - http://stackoverflow.com/questions/2023608/check-what-files-are-open-in-python \n import subprocess\n import os\n \n pid = os.getpid()\n procs = subprocess.check_output( \n [ \"lsof\", '-w', '-Ff', \"-p\", str( pid ) ] )\n \n fprocs = filter(\n lambda s: s and s[ 0 ] == 'f' and s[1: ].isdigit(),\n procs.split( '\\n' ) \n )\n \n return fprocs",
"def getstatusoutput(*args, **kwargs):\n p = subprocess.Popen(*args, **kwargs)\n stdout, stderr = p.communicate()\n return (p.returncode, stdout, stderr)",
"def sum_exit_status(self):\n return sum([sd.exit_status for sd in self.node.script_deployments])",
"def exit_code(self):\n return self._process.exitcode",
"def get_process_list() -> Dict:\n return {proc.pid: proc.name() for proc in psutil.process_iter()}",
"def pipe_open(commands: list):\n process = Popen(commands, stdout=PIPE, stderr=PIPE)\n output, error = process.communicate()\n return output, error",
"def get_status_output(cmd):\r\n on_win = (sys.platform[:3] == \"win\")\r\n if not on_win:\r\n cmd = \"{ \" + cmd + \"; }\"\r\n pipe = os.popen(cmd + \" 2>&1\", \"r\")\r\n text = list()\r\n for item in pipe:\r\n text.append(item.rstrip())\r\n try:\r\n sts = pipe.close()\r\n if sts is None: sts = 0\r\n if not on_win:\r\n sts = (sts >> 8)\r\n except IOError:\r\n sts = 1\r\n if sts is None: sts = 0\r\n return sts, text",
"def pid_processes(self):\n return [(process.namespec(), process.infos[self.address_name]['pid'])\n for process in self.processes.values()\n if process.pid_running_on(self.address_name)]",
"def procs_running():\n \n return __proc_stat('procs_running')",
"def pids(node, java_class):\n cmd = \"ps -C java -wwo pid,args | grep '%s' | awk -F' ' '{print $1}'\" % java_class\n\n return [int(pid) for pid in node.account.ssh_capture(cmd, allow_fail=True)]",
"def getstatusoutput(cmd):\n # pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')\n pipe = os.popen(cmd + ' 2>&1', 'r')\n text = pipe.read()\n sts = pipe.close()\n if sts is None: sts = 0\n if text[-1:] == '\\n': text = text[:-1]\n return sts, text",
"def get_status_output(cmd):\r\n on_win = (sys.platform[:3] == \"win\")\r\n if not on_win:\r\n cmd = \"{ \" + cmd + \"; }\"\r\n pipe = os.popen(cmd + \" 2>&1\", \"r\")\r\n text = list()\r\n for item in pipe:\r\n text.append(item.rstrip())\r\n try:\r\n sts = pipe.close()\r\n if sts is None: sts = 0\r\n if not on_win:\r\n sts = (sts >> 8)\r\n except IOError:\r\n sts = 1\r\n if sts is None: sts = 0\r\n if sts > 200:\r\n sts = (sts >> 8)\r\n return sts, text"
] | [
"0.6246769",
"0.6245669",
"0.6231879",
"0.6155173",
"0.61222064",
"0.6060783",
"0.5963847",
"0.5958712",
"0.59503996",
"0.5923416",
"0.59230506",
"0.5919986",
"0.59160084",
"0.58853406",
"0.58609474",
"0.58411664",
"0.5837716",
"0.5834148",
"0.5834146",
"0.58337736",
"0.5833421",
"0.5818683",
"0.5747236",
"0.57246345",
"0.5703976",
"0.5692286",
"0.56898",
"0.5686575",
"0.5682932",
"0.5672538"
] | 0.7935474 | 0 |
combined stderr of all processes | def stderr(self):
if self._stderr is None:
stderr = [p.stderr.read() for p in self.processes if p.stderr]
output = b'\n'.join(stderr).strip()
if not isinstance(output, str):
output = output.decode(self.encoding, 'ignore')
self._stderr = output
return self._stderr | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def nostderr():\n save_stderr = sys.stderr\n sys.stderr = cStringIO.StringIO()\n yield\n sys.stderr = save_stderr",
"def stderr(self, stderr: str) -> Tuple[List[Message], List[AnnotateCode], str]:\n return [], [], stderr",
"def result_stderr(result):\n return result[1][1]",
"def get_stderr(self):\n stderr = [val.get_stderr() for val in self._args_list]\n return '\\n'.join(stderr)",
"def get_stderr(self):\n return self._get_log('stderr')",
"def stderr(self):\n return self.__stderr",
"def nostderr():\n savestderr = sys.stderr\n\n class Devnull(object):\n def write(self, _):\n pass\n\n def flush(self):\n pass\n\n sys.stderr = Devnull()\n try:\n yield\n finally:\n sys.stderr = savestderr",
"def print_err(self, *lst):\n self.print2file(self.stderr, False, True, *lst)",
"def stderr(self) -> str:\n _args: list[Arg] = []\n _ctx = self._select(\"stderr\", _args)\n return _ctx.execute_sync(str)",
"def output_on_fail(process, outputs):\n status = _maybe_use_running_output(process, outputs)\n if status is not None:\n return status\n\n def reader(handle, input_queue):\n \"\"\"Thread which reads handle, until EOF.\"\"\"\n input_queue.put(handle.read())\n\n with thread_output(target=reader, args=(outputs[0], )) as stdout_queue:\n with thread_output(target=reader,\n args=(outputs[1], )) as stderr_queue:\n stdout = stdout_queue.get()\n stderr = stderr_queue.get()\n\n status = process.wait()\n\n if status != 0:\n IndentedLogger.message(\"\\n\")\n IndentedLogger.message(stdout.decode(\"utf-8\"))\n IndentedLogger.message(stderr.decode(\"utf-8\"))\n\n return status",
"def __readStderr(self):\n if self.process is not None:\n self.errorGroup.show()\n s = str(self.process.readAllStandardError(),\n Preferences.getSystem(\"IOEncoding\"),\n 'replace')\n self.errors.insertPlainText(s)\n self.errors.ensureCursorVisible()",
"def merge_stdout_stderr(iterator):\n\n for (stdout, stderr) in iterator:\n if stdout:\n yield stdout\n else:\n yield stderr",
"def print_err(self, *args):\r\n strings = []\r\n for arg in args:\r\n strings.append(str(arg))\r\n self.stderr.write(\",\".join(strings))",
"def finish(self):\n for msg, info in self.errors.iteritems():\n hosts = [ self.job_to_str_func(job) for job in info['jobs'] ]\n\n max_jobs_num = self.max_jobs_num\n if max_jobs_num < 0 or max_jobs_num > len(hosts):\n hosts_msg = ': %s' % ' '.join(hosts)\n elif max_jobs_num == 0:\n hosts_msg = ''\n else:\n hosts_msg = ': %s (and %s more)' % (' '.join(sorted(hosts)[:self.max_jobs_num]), \\\n len(hosts) - self.max_jobs_num)\n\n ex = info['exception']\n msg = '%s.%s: %s' % (ex.__class__.__module__, \\\n ex.__class__.__name__, \\\n str(ex).split('\\n')[0])\n print >> self.outfile, \"Exception '%s' in %s jobs%s.\" % (msg, len(hosts), hosts_msg)\n print >> self.outfile, exception_description(ex).strip()\n if info['trace'] != None:\n print >> self.outfile, 'Traceback:'\n print >> self.outfile, ''.join(info['trace'])\n\n print >> self.outfile",
"def errRun( *cmd, **kwargs ):\n # By default we separate stderr, don't run in a shell, and don't echo\n stderr = kwargs.get( 'stderr', PIPE )\n shell = kwargs.get( 'shell', False )\n echo = kwargs.get( 'echo', False )\n\n if echo:\n # cmd goes to stderr, output goes to stdout\n logger.error( cmd, '\\n' )\n if len( cmd ) == 1:\n cmd = cmd[ 0 ]\n # Allow passing in a list or a string\n if isinstance( cmd, BaseString ) and not shell:\n cmd = cmd.split( ' ' )\n cmd = [ str( arg ) for arg in cmd ]\n elif isinstance( cmd, list ) and shell:\n cmd = \" \".join( arg for arg in cmd )\n\n # logger.error( '*** errRun:', str(cmd), '\\n' )\n\n popen = Popen( cmd, stdout=PIPE, stderr=stderr, shell=shell )\n # We use poll() because select() doesn't work with large fd numbers,\n # and thus communicate() doesn't work either\n out, err = '', ''\n poller = poll()\n poller.register( popen.stdout, POLLIN )\n fdToFile = { popen.stdout.fileno(): popen.stdout }\n fdToDecoder = { popen.stdout.fileno(): getincrementaldecoder() }\n outDone, errDone = False, True\n if popen.stderr:\n fdToFile[ popen.stderr.fileno() ] = popen.stderr\n fdToDecoder[ popen.stderr.fileno() ] = getincrementaldecoder()\n poller.register( popen.stderr, POLLIN )\n errDone = False\n while not outDone or not errDone:\n readable = poller.poll()\n for fd, event in readable:\n f = fdToFile[ fd ]\n decoder = fdToDecoder[ fd ]\n if event & POLLIN:\n data = decoder.decode( f.read( 1024 ) )\n if echo:\n logger.debug( data )\n if f == popen.stdout:\n out += data\n if data == '':\n outDone = True\n elif f == popen.stderr:\n err += data\n if data == '':\n errDone = True\n else: # POLLHUP or something unexpected\n if f == popen.stdout:\n outDone = True\n elif f == popen.stderr:\n errDone = True\n poller.unregister( fd )\n\n returncode = popen.wait()\n # Python 3 complains if we don't explicitly close these\n popen.stdout.close()\n if stderr == PIPE:\n popen.stderr.close()\n logger.info( out, err, returncode )\n return out, err, returncode",
"def result(x):\n\t\t\told_stderr = sys.stderr\n\t\t\tsys.stderr = x\n\t\t\tyield\n\t\t\tsys.stder = old_stderr",
"def getErrors(script):\n\tp = subprocess.Popen(['./'+script], stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n\tout, err = p.communicate()\n\treturn err",
"def capture_output():\r\n stdout, stderr = sys.stdout, sys.stderr\r\n sys.stdout, sys.stderr = StringIO(), StringIO()\r\n out, err = [], []\r\n try:\r\n yield out, err\r\n finally:\r\n out.extend(sys.stdout.getvalue().splitlines())\r\n err.extend(sys.stderr.getvalue().splitlines())\r\n sys.stdout, sys.stderr = stdout, stderr",
"def tasks_with_errors(self):\n errs = []\n while True:\n try:\n errs.append(self._errq.get_nowait())\n except Queue.Empty:\n break\n return errs",
"def redirect_stderr():\n\n class LoggerWriter:\n \"\"\"https://github.com/apache/airflow/pull/6767/files\"\"\"\n def __init__(self, target_logger, level=logging.INFO):\n self.logger = target_logger\n self.level = level\n\n def write(self, message):\n if message and not message.isspace():\n self.logger.log(self.level, message)\n\n def fileno(self):\n \"\"\"\n Returns the stdout file descriptor 1.\n For compatibility reasons e.g python subprocess module stdout redirection.\n \"\"\"\n return 1\n\n def flush(self):\n \"\"\"MUST define flush method to exit gracefully\"\"\"\n\n sys.stderr = LoggerWriter(logger, logging.ERROR)",
"def stderr(self: \"ShellOutput\") -> Artefact[bytes]:\n self.__check_len()\n return self.stderrs[0]",
"def errprint(*args):\n sys.stderr.write(' '.join(map(str,args)) + '\\n')",
"def outputs(self) -> str:\n return self.stdout + self.stderr",
"def get_stderr(self):\n _ = self.get() # force finished wait\n if self._stderr is not None:\n if wait_until_exists(self._stderr):\n with open(self._stderr) as f:\n self._err = f.read()\n return self._err",
"def error(self) -> list:\n return self.__err",
"def err(*s):\n sys.stderr.write(TERM.bold_red)\n sys.stderr.write('Error: ')\n for part in s:\n sys.stderr.write(part)\n sys.stderr.write(TERM.normal)\n sys.stderr.write('\\n')",
"def compiler_output(\n self, stdout: str, stderr: str\n ) -> Tuple[List[Message], List[AnnotateCode], str, str]:\n return [], [], stdout, stderr",
"def std_err(self):\n return self._std_err",
"def get_stderr(self) :\n\t\tif self.__stderr is not None :\n\t\t\tself.__stderr.flush()\n\t\t\treturn self.__stderr.getvalue()",
"def suppress_stdout_stderr():\n with open(devnull, 'w') as fnull:\n with redirect_stderr(fnull) as err, redirect_stdout(fnull) as out:\n yield (err, out)"
] | [
"0.6715245",
"0.66839",
"0.6574798",
"0.65431535",
"0.6496524",
"0.64380735",
"0.63869214",
"0.6328884",
"0.62826824",
"0.6277269",
"0.6198289",
"0.6133846",
"0.60173345",
"0.5962739",
"0.5962001",
"0.5930159",
"0.59266466",
"0.5872949",
"0.58673817",
"0.5847088",
"0.5786829",
"0.57839096",
"0.5754765",
"0.5733826",
"0.5722772",
"0.5706592",
"0.5651207",
"0.5617659",
"0.56161493",
"0.5614734"
] | 0.68787926 | 0 |
Run processes in background. Return the last piped Popen object | def bg(self):
p = None
self.processes = []
self._stderr = None
stdin = sys.stdin
cmds = self.commands
if [c for c in cmds if c._cmd_args[:1] == ['sudo']]:
check_sudo()
for cmd in cmds:
if isinstance(cmd, Stdin):
stdin = cmd.iter_stdout
elif isinstance(cmd, PyPipe):
cmd.stdin = p.stdout
stdin = cmd.iter_stdout
p = cmd
else:
args = cmd.command_line(cmd.kwargs.get('shell', False))
kwargs = dict(
stdin=stdin, stderr=PIPE,
stdout=PIPE
)
kwargs.update(cmd.kwargs)
env_ = kwargs.pop('env', env)
log.debug('Popen(%r, **%r)', args, kwargs)
kwargs['env'] = env_
try:
p = Popen(args, **kwargs)
except OSError:
self._raise()
self.processes.append(p)
stdin = p.stdout
return p | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def call(*args, **kwargs):\n return Popen(*args, **kwargs).wait()",
"def start(self):\n last_stdout = None\n self.processes = []\n for cmd in self.cmds:\n # TODO: handle exceptions raised by Popen\n p = subprocess.Popen(cmd, stdin=last_stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n if last_stdout is not None:\n last_stdout.close()\n last_stdout = p.stdout\n self.processes.append(p)",
"def main_function(self, args_list: [[str]], is_foreground=True) -> None:\n children_pids = []\n new_fds, old_fds = [], []\n\n if not is_foreground: # background support not implemented\n while True:\n _input = input('pysh: background process not implement yet. Rerun on foreground? [y/n] ')\n if _input == 'y':\n args_list[-1].pop()\n is_foreground = True\n break\n elif _input == 'n':\n return\n else:\n print('\\tenter either \"y\" or \"n\"')\n\n def _clean_up(error: OSError) -> None:\n map(lambda _pid: os.kill(_pid, signal.SIGKILL), children_pids)\n print(f'{args_list[i][0]}: {error}', file=sys.stderr)\n\n pid = -1\n\n try:\n for i in range(len(args_list)):\n if i < len(args_list) - 1: # if there is a next cmd\n new_fds = os.pipe()\n\n pid = os.fork()\n if pid == 0:\n redirect_result, args_list[i] = PyShell.redirection_handler(args_list[i])\n\n if i < len(args_list) - 1: # if there is a next cmd\n os.close(new_fds[0])\n os.dup2(new_fds[1], sys.stdout.fileno())\n os.close(new_fds[1])\n\n if redirect_result[sys.stdout.fileno()] is True:\n raise OSError('invalid usage of redirection and (or) piping')\n\n if i > 0: # if there is a previous cmd\n os.dup2(old_fds[0], sys.stdin.fileno())\n os.close(old_fds[0])\n os.close(old_fds[1])\n\n if redirect_result[sys.stdin.fileno()] is True:\n raise OSError('invalid usage of redirection and (or) piping')\n\n os.execvp(args_list[i][0], args_list[i])\n\n else:\n children_pids.append(pid)\n if i > 0:\n os.close(old_fds[0])\n os.close(old_fds[1])\n if i < len(args_list) - 1:\n old_fds = new_fds\n\n if is_foreground:\n self.jobs.append(('fg', children_pids))\n try:\n for i in children_pids:\n os.waitpid(i, 0)\n self.jobs.pop()\n except ChildProcessError:\n pass\n else:\n self.jobs.append(('bg', children_pids))\n print(f'[{len(self.jobs) - 1}] new job added')\n\n except OSError as e:\n _clean_up(e)\n if pid == 0:\n exit(1)\n else:\n return",
"def subprocess_nowait(cmd, shell=False, cwd=None, env=None):\n # type: (str, bool, str, dict) -> subprocess.Process\n return subprocess.Popen(cmd, shell=shell, cwd=cwd, env=env)",
"def popen(self, args, **kwargs):\n self.log.debug(\"popen %s\", ' '.join(args))\n return vaping.io.subprocess.Popen(args, **kwargs)",
"def non_blocking_streamlit(process: psutil.Popen) -> None:\n while process.is_running():\n process.communicate()",
"def run_commands(self):\n processes = []\n\n i = 0\n ## get list of commands\n commands = self.get_commands()\n cnum = multiprocessing.cpu_count()\n\n while len(commands)>0:\n while len(processes)<cnum-1:\n c = commands.pop()\n i+=1\n print \"command #\",i, c\n ## run commands\n processes.append((i,subprocess.Popen(c, shell=True)))\n\n for j,p in processes:\n if p.poll() is not None:\n print j, \" status: \", p.poll()\n processes.remove((j,p))\n break\n else:\n time.sleep(10)\n return",
"def Popen(self, *unargs, **kwargs):\r\n cmdline = None\r\n if 'args' in kwargs:\r\n cmdline = kwargs['args']\r\n else:\r\n cmdline = unargs[0]\r\n return PopenWrapper.WaitWrapper(subprocess_.Popen(*unargs, **kwargs), self, cmdline)",
"def popen(self, args, bufsize=0, stdin=None, stdout=None, stderr=None, cwd=None, env=None, tty=False, compress=False): \n return subprocess.Popen(args, bufsize=bufsize, cwd=cwd, env=env, stdin=stdin, stdout=stdout, stderr=stderr)",
"def execute(args):\n print '################################'\n print 'args: ', args\n p = subprocess.Popen(args, shell=True, executable='/bin/bash')\n # p = subprocess.call(args, shell=True, executable='/bin/bash')\n p.wait()\n return p\n print '################################'",
"def get_processes_running():\r\n p = [] #array of processes\r\n if platform == \"linux\" or platform == \"linux2\":\r\n for proc in psutil.process_iter():\r\n try:\r\n tmp=Process(proc.name(),int(proc.pid),proc.username(),int(0),int(0))\r\n p.append(tmp)\r\n except:\r\n continue\r\n return (p)\r\n\t\t\t\r\n tasks = check_output(['tasklist']).decode('cp866', 'ignore').split(\"\\r\\n\")\r\n for task in tasks:\r\n m = re.match(b'(.*?)\\\\s+(\\\\d+)\\\\s+(\\\\w+)\\\\s+(\\\\w+)\\\\s+(.*?)\\\\s.*', task.encode())\r\n if m is not None:\r\n tmp=Process(m.group(1).decode(),int(m.group(2).decode()),m.group(3).decode(),int(m.group(4).decode()),int(m.group(5).decode('ascii', 'ignore')))\r\n p.append(tmp)\r\n #m.group(1).decode() image name\r\n #m.group(2).decode() process id\r\n #m.group(3).decode() session_name\r\n #m.group(4).decode() session_num\r\n #m.group(5).decode('ascii', 'ignore') memory usage\r\n return(p)",
"def bash(cmd, prnt=True, wait=True):\n p = Popen(cmd, stdout=PIPE, stderr=STDOUT, shell=True)\n if wait:\n p.wait()\n while True and prnt:\n line = p.stdout.readline()\n if line:\n print(line)\n else:\n break\n\n return (p)",
"def popenAndCall(onExit, *popenArgs, **popenKWArgs):\n def runInThread(onExit, popenArgs, popenKWArgs):\n global proc\n proc = subprocess.Popen(*popenArgs, **popenKWArgs)\n print(type(proc))\n proc.wait()\n onExit()\n return\n\n thread = threading.Thread(target=runInThread,\n args=(onExit, popenArgs, popenKWArgs))\n thread.start()\n\n return thread # returns immediately after the thread starts",
"def spawn(self):\n self._proc = subprocess.Popen(\n self._args, stdout=subprocess.PIPE, stderr=subprocess.PIPE\n )",
"def _launch_command(args, out_cb, err_cb, done=None, **kwargs):\n\n def pump_stream(callback, stream):\n \"\"\"Pump the stream\"\"\"\n for line in stream:\n callback(line)\n callback(None)\n\n def joiner():\n \"\"\"Wait for streams to finish, then call done callback\"\"\"\n for th in threads:\n th.join()\n done(process)\n\n kwargs = kwargs.copy()\n in_data = kwargs.get(\"input\")\n if \"input\" in kwargs:\n del kwargs[\"input\"]\n assert kwargs.get(\"stdin\") is None, kwargs[\"stdin\"]\n kwargs[\"stdin\"] = PIPE\n elif \"stdin\" not in kwargs:\n kwargs[\"stdin\"] = DEVNULL\n kwargs.setdefault(\"stdout\", PIPE)\n kwargs.setdefault(\"stderr\", PIPE)\n kwargs[\"universal_newlines\"] = True # Text streams, not byte streams\n process = Popen(args, **kwargs)\n threads = []\n if process.stdout:\n thread = Thread(\n target=pump_stream, args=(out_cb, process.stdout), daemon=True\n )\n thread.start()\n threads.append(thread)\n if process.stderr:\n thread = Thread(\n target=pump_stream, args=(err_cb, process.stderr), daemon=True\n )\n thread.start()\n threads.append(thread)\n if done and threads:\n Thread(target=joiner, daemon=True).start()\n if in_data:\n process.stdin.write(str(in_data, \"utf-8\"))\n process.stdin.close()\n return process",
"def start_process():\n global command, process\n\n def on_data(data):\n data = data.decode().strip()\n print('{}'.format(data))\n\n cmd = command.split(' ')\n\n if process:\n process.terminate()\n\n process = MySubprocess(cmd, -1, functools.partial(on_data), None, None)",
"def create_process(self, args=[], *popenargs, **kwargs):\n try:\n startupinfo = subprocess.STARTUPINFO()\n startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW\n kwargs.setdefault('startupinfo', startupinfo)\n except:\n pass\n kwargs.setdefault('universal_newlines', True)\n kwargs.setdefault('stdin', sys.stdin)\n return subprocess.Popen(self.build_args(args), *popenargs, **kwargs)",
"def run_cmd(cmd, callback=None, watch=False, background=False, shell=False):\r\n\r\n if watch and not callback:\r\n raise RuntimeError(\r\n \"You must provide a callback when watching a process.\"\r\n )\r\n\r\n output = None\r\n\r\n if shell:\r\n proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)\r\n else:\r\n proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE)\r\n\r\n if background:\r\n # Let task run in background and return pmid for monitoring:\r\n return proc.pid, proc\r\n\r\n if watch:\r\n while proc.poll() is None:\r\n line = proc.stdout.readline()\r\n if line != \"\":\r\n callback(line)\r\n\r\n # Sometimes the process exits before we have all of the output, so\r\n # we need to gather the remainder of the output.\r\n remainder = proc.communicate()[0]\r\n if remainder:\r\n callback(remainder)\r\n else:\r\n output = proc.communicate()[0]\r\n\r\n if callback and output is not None:\r\n return callback(output)\r\n\r\n return output",
"def start(self):\r\n return self.start_subprocess()",
"def spawn(*args):\n # Adapted from ranger.ext.spawn\n process = Popen(args, stdout=PIPE, shell=True)\n stdout, stderr = process.communicate()\n return stdout.decode('utf-8')",
"def Spawn(proc):\n proc.start()\n return proc",
"def compute(self):\n parfile = self.create_parfile()\n self._command = [self.class_exe, parfile]\n process = subprocess.Popen(self._command)\n try:\n # process.wait(timeout=300)\n process.wait()\n # except (KeyboardInterrupt, subprocess.TimeoutExpired) as e: # TimeoutExpired only in Python >= 3.3\n except Exception as e:\n process.kill()\n raise e\n return",
"def get_all_current_processes():\n p = subprocess.Popen(['ps', '-A'], stdout=subprocess.PIPE)\n out, err = p.communicate()\n return out",
"def run_subprocess(self, *cmd_and_args):\n\n command_line = \" \".join(cmd_and_args)\n self.logger.debug(\"Running: %s\", command_line)\n\n return subprocess.Popen(command_line, shell=True, close_fds=True)",
"def spawn_subprocess(args, loop=None):\n if not _IS_XOS_ASYNC:\n return spawn_subprocess_not_xos(args, loop=loop)\n else:\n return spawn_subprocess_xos(args, loop=loop)",
"def popener(\n args: models.CommandArgs,\n *,\n stdin: Optional[int] = DEVNULL,\n stdout: Optional[int] = DEVNULL,\n stderr: Optional[int] = DEVNULL,\n shell: Optional[bool] = None,\n text: bool = False,\n) -> Popen:\n return Popen(\n args,\n stdin=stdin,\n stdout=stdout,\n stderr=stderr,\n shell=_get_shell(args, shell),\n universal_newlines=text,\n )",
"def start_processes(program_calls):\n processes = [subprocess.Popen(c,stdout=subprocess.PIPE) \\\n for c in program_calls]\n return processes",
"def _run(proc: Popen, timeout):\n try:\n return proc.wait(timeout=timeout)\n except TimeoutExpired:\n pass\n if sys.platform != 'win32':\n proc.send_signal(signal.SIGINT)\n try:\n return proc.wait(timeout=5)\n except TimeoutExpired:\n pass\n\n proc.terminate() # SIGTERM\n try:\n return proc.wait(timeout=5)\n except TimeoutExpired:\n pass\n\n proc.kill() # SIGKILL\n return proc.wait(timeout=5)",
"def run_process(cmd, out_log=None, err_log=None):\r\n return run_multi_processes([cmd], out_log=out_log, err_log=err_log)",
"def shell(args, wait=True, msg=None):\n\n # Fix Windows error if passed a string\n if isinstance(args, str):\n args = shlex.split(args, posix=(os.name != \"nt\"))\n if os.name == \"nt\":\n args = [arg.replace('/', '\\\\') for arg in args]\n\n if wait:\n proc = Popen(args, stdin=PIPE, stdout=PIPE, stderr=PIPE)\n out, err = proc.communicate(input=msg)\n exitcode = proc.returncode\n if exitcode != 0:\n debug('<<<< shell call failed; error message below >>>>')\n debug(err.decode('utf-8'))\n debug('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')\n raise IOError()\n return out\n else:\n DETACHED_PROCESS = 0x00000008\n proc = Popen(args, creationflags=DETACHED_PROCESS)"
] | [
"0.6664132",
"0.64688516",
"0.64178354",
"0.6342972",
"0.6325031",
"0.62582344",
"0.62191474",
"0.60025233",
"0.5937454",
"0.5910544",
"0.5850933",
"0.58419496",
"0.584046",
"0.5808457",
"0.5794157",
"0.5777509",
"0.5762344",
"0.5744797",
"0.572683",
"0.56833637",
"0.5670176",
"0.5657175",
"0.5646626",
"0.563818",
"0.56328934",
"0.56275785",
"0.5607536",
"0.5606036",
"0.55753917",
"0.5570612"
] | 0.7648508 | 0 |
Generate chut scripts contained in location | def chutifab(self, *args):
ll = logging.getLogger(posixpath.basename(sys.argv[0]))
level = ll.level
ll.setLevel(logging.WARN)
if not args:
args = ['.']
for location in args:
Generator(destination='.chutifab')(location)
ll.setLevel(level)
self.scripts = sorted(sh.ls('.chutifab'))
return self.scripts | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _add_scripts(prefix):\n mapping = {\"MAST_HOME\": prefix}\n if \"Windows\" in platform.system():\n script_dir = os.path.join(INSTALL_DIR, \"files\", \"windows\")\n files = [\n \"mast.bat\",\n \"mast-system.bat\",\n \"mast-accounts.bat\",\n \"mast-backups.bat\",\n \"mast-crypto.bat\",\n \"mast-deployment.bat\",\n \"mast-developer.bat\",\n \"mast-network.bat\",\n \"test-mast.bat\",\n \"mast-version.bat\",\n \"mast-web.bat\",\n \"mastd.bat\",\n \"mast-ssh.bat\",\n \"set-env.bat\",\n ]\n elif \"Linux\" in platform.system():\n script_dir = os.path.join(INSTALL_DIR, \"files\", \"linux\")\n files = [\n \"mast\",\n \"mast-system\",\n \"mast-accounts\",\n \"mast-backups\",\n \"mast-crypto\",\n \"mast-deployment\",\n \"mast-developer\",\n \"mast-network\",\n \"test-mast\",\n \"mast-version\",\n \"mast-web\",\n \"mast-ssh\",\n \"mastd\",\n \"set-env\",\n ]\n\n for f in files:\n dst = os.path.join(prefix, f)\n src = os.path.join(script_dir, f)\n print(\"{} -> {}\".format(src, dst))\n content = render_template_file(src, mapping)\n write_file(dst, content)\n if \"Linux\" in platform.system():\n os.chmod(dst, 0o755)\n\n if \"Windows\" in platform.system():\n # copy python27.dll to site-packages/win32 directory to get around\n # issue when starting mastd\n src = os.path.join(prefix, \"miniconda\", \"python27.dll\")\n dst = os.path.join(\n prefix,\n \"miniconda\",\n \"Lib\",\n \"site-packages\",\n \"win32\",\n \"python27.dll\"\n )\n copyfile(src, dst)\n for filename in [\"pythoncom27.dll\", \"pythoncomloader27.dll\", \"pywintypes27.dll\"]:\n src = os.path.join(\n prefix,\n \"miniconda\",\n \"Lib\",\n \"site-packages\",\n \"pywin32_system32\",\n filename,\n )\n dst = os.path.join(\n prefix,\n \"miniconda\",\n \"Lib\",\n \"site-packages\",\n \"win32\",\n filename,\n )\n copyfile(src, dst)\n copytree(\n os.path.join(INSTALL_DIR, \"files\", \"bin\"),\n os.path.join(prefix, \"bin\")\n )\n copytree(\n os.path.join(INSTALL_DIR, \"files\", \"etc\"),\n os.path.join(prefix, \"etc\")\n )\n copytree(\n os.path.join(INSTALL_DIR, \"files\", \"var\"),\n os.path.join(prefix, \"var\")\n )\n copytree(\n os.path.join(INSTALL_DIR, \"files\", \"usrbin\"),\n os.path.join(prefix, \"usrbin\")\n )\n copytree(\n os.path.join(INSTALL_DIR, \"files\", \"tmp\"),\n os.path.join(prefix, \"tmp\")\n )\n copytree(\n os.path.join(INSTALL_DIR, \"files\", \"doc\"),\n os.path.join(prefix, \"doc\")\n )\n copytree(\n os.path.join(INSTALL_DIR, \"files\", \"contrib\"),\n os.path.join(prefix, \"contrib\")\n )",
"def script_generator(self):\n py = self.global_setting.get('python', sys.executable)\n ex_options = self.global_setting.get('evaluate_options', str())\n train_py = \"/home/haihuam/Projects/RepPoints/mmdetection/tools/train.py\"\n if os.access(py, os.X_OK):\n content = \"set -e \\n\"\n content += \"export CUDA_VISIBLE_DEVICES=\" + \\\n \",\".join(self.selected_gpus)+ \" \\n\"\n content += \"cd %s \\n\"%(self.run_dir)\n \n content += \"%s %s %s --work_dir %s --validate %s &> train.log \\n\"%(py, \n train_py,\n self.setting['config_file'],\n self.run_dir,\n ex_options)\n content += \"touch evaluate.done \\n\"\n\n self.script_content = content\n else:\n print(\"Error: %s is not executable.\"%py)\n sys.exit(0)",
"def script(self):",
"def generate_all_files():\n for (name, fn) in lang_module.targets.items():\n path = of_g.options.install_dir + '/' + name\n os.system(\"mkdir -p %s\" % os.path.dirname(path))\n with open(path, \"w\") as outfile:\n fn(outfile, os.path.basename(name))\n print(\"Wrote contents for \" + name)",
"def main():\n p = Path.cwd()\n path = str(p)\n\n files = tracked_files()\n scripts = search_dir(p, path, files, '.py')\n scripts = [i for i in scripts if 'tests/' not in i[:7]]\n scripts = list(map(partial(process, p), scripts))\n\n for script in scripts:\n script['display'] = script['name'].replace('_', '\\_')\n write_readme(scripts)",
"def script_generator(self):\n analyze_tool = \"/home/haihuam/Projects/RepPoints/mmdetection/tools/analyze_logs.py\"\n ex_options = self.global_setting.get('analyze_options', str())\n py = self.global_setting.get('python', sys.executable)\n if os.access(py, os.X_OK):\n content = \"set -e \\n\" \n content += \"cd %s \\n\"%(self.run_dir)\n content += \"%s %s plot_curve *.log.json \"%(py, analyze_tool)\n content += \"--keys loss loss_cls loss_pts_init \"\n content += \"loss_pts_refine \"\n content += \"--out losses.pdf %s &> analyze.log \\n\"%(ex_options)\n\n content += \"touch analyze.done \\n\"\n self.script_content = content\n else:\n print(\"Error: %s is not executable.\"%py)\n sys.exit(0)",
"def run_commands (self):\n cwd = os.getcwd()\n data = []\n data.append('config_dir = %r' % os.path.join(cwd, \"config\"))\n data.append(\"install_data = %r\" % cwd)\n data.append(\"install_scripts = %r\" % cwd)\n self.create_conf_file(data)\n super(MyDistribution, self).run_commands()",
"def generate_js_dir():\n\n return pkg_resources.resource_filename('linkedin.mobster.har.visualization.js', None)",
"def script_generator(self):\n\n self._get_free_tcp_port()\n\n train_py = \"/home/haihuam/Projects/RepPoints/mmdetection/tools/train.py\"\n py = self.global_setting.get('python', sys.executable)\n ex_options = self.global_setting.get('train_options', str())\n\n if not os.access(py, os.X_OK):\n py = \"/home/haihuam/anaconda3/envs/RepPoints/bin/python\"\n \n if os.access(py, os.X_OK):\n content = \"set -e \\n\"\n content += \"export CUDA_VISIBLE_DEVICES=\" + \\\n \",\".join(self.selected_gpus)+ \" \\n\"\n\n content += \"cd %s \\n\"%(self.run_dir)\n content += \"%s -m torch.distributed.launch \"%(py)\n content += \"--nproc_per_node=%s \"%(self.setting['train_num_gpu'])\n content += \"--master_port %s \"%(self.dist_train_port)\n content += \"%s %s --launcher pytorch \"%(train_py, self.setting['config_file'])\n content += \"--work_dir %s \"%(self.run_dir)\n content += \"--resume_from latest.pth \"\n content += \"--validate %s &> %s.log \\n\"%(ex_options, self.stage)\n content += \"touch train.done \\n\"\n # return content\n self.script_content = content\n else:\n print(\"Error: %s is not executable.\"%py)\n sys.exit(0)",
"def main():\n\targs = getArgs()\n\tid_question = args.id_question\n\tlang = args.language\n\tdir_cp = None\n\twith open('config.json') as json_file:\n\t\tconfig_data = json.load(json_file)\n\t\tdir_cp = config_data['dir_cp']\n\n\t\n\t\n\t# sample_io = GetData(args.id_question).get_uri_io_sample()\n\ttemplate = FileUtil(id_question, dir_cp['path'], lang)\n\ttemplate.write_template()\n\t# print(sample_io)",
"def install():\n src = None\n if len(sys.argv) == 2:\n src = sys.argv[1]\n elif len(sys.argv) > 2:\n print >> sys.stderr, 'USAGE: rbco_nautilusscripts_install [SOURCE_DIR]'\n sys.exit(1)\n\n paths = (\n '~/.gnome2/nautilus-scripts',\n '~/.gnome2/nemo-scripts',\n '~/.config/caja/scripts',\n )\n\n for path in paths:\n print 'Creating in {0} ...'.format(path)\n dest = os.path.expanduser(path)\n link_scripts(dest, src_dir=src)\n print",
"def write_shell_scripts(airfoils, qsh_template, nsetup, ntype, out_dir):\n for nairfoil, sim_setup in airfoils.iteritems():\n for aoa in sim_setup['aoas']:\n # Create simulation name\n sim_name = create_sim_name(nairfoil, ntype, nsetup, aoa)\n # Create fluent journal file\n with open(qsh_template, 'r') as f:\n qtxt = f.read()\n # Start to replace parameters inside the journal\n qtxt = qtxt.replace('SIMNAME', sim_name)\n qtxt = qtxt.replace('in.jou', sim_name + '.jou')\n qtxt = qtxt.replace('fluent.out', sim_name + '.out')\n # Write new shell script to out_dir\n qout = sim_name + '.qsh'\n if not os.path.exists(out_dir):\n os.makedirs(out_dir)\n with open(os.path.join(out_dir, qout), 'w') as f:\n f.write(qtxt)\n return True",
"def setup_scripts(self, context):\n path = os.path.abspath(os.path.dirname(__file__))\n path = os.path.join(path, 'scripts')\n self.install_scripts(context, path)",
"def write_scripts(self, out, ref, file1, file2):\n for config in self.configurations:\n program_folder = os.path.join(out, self.out)\n config.write_MuTect2_script(program_folder, self.path2exe, ref, file1, file2)\n return None",
"def generate():\n local('cd doc && make clean && make html')",
"def write_scripts(self, out, ref, file1, file2):\n for config in self.configurations:\n program_folder = os.path.join(out, self.out)\n config.write_Strelka_script(program_folder, self.path2exe, ref, file1, file2, self.template_config)\n return None",
"def create_bootstrap_script(scratch_dir):\n install_script = os.path.join(os.path.dirname(os.path.abspath(__file__)), \"install\")\n shutil.copy(install_script, os.path.join(scratch_dir, \"install\"))",
"def apply_dart(self):\n shutil.copyfile(self.env['DART_JS_BOOTSTRAP'], self.outdir.make_node('dart.js').abspath())\n for filetype in ['dartfiles','jsfiles','htmlfiles','cssfiles','otherfiles']:\n files = getattr(self, filetype)\n for f in files:\n if f.is_bld():\n outf = self.outdir.make_node(f.path_from(self.path.get_bld()))\n elif f.is_src():\n outf = self.outdir.make_node(f.path_from(self.path.get_src()))\n else:\n raise Exception(\"I don't know what I'm doing anymore.\")\n self.create_task('copytask',f,outf)",
"def generate(self):\n current_path = os.getcwd()\n shell_package = ShellPackage(current_path)\n if not shell_package.is_tosca():\n click.echo(\"Code generation supported in TOSCA based shells only\", err=True)\n return\n\n shell_name = shell_package.get_name_from_definition()\n shell_filename = shell_name + \".zip\"\n package_full_path = path.join(current_path, \"dist\", shell_filename)\n destination_path = path.join(current_path, \"src\")\n\n cloudshell_config = self.cloudshell_config_reader.read()\n\n click.echo(\"Connecting to Cloudshell server ...\")\n\n self.driver_generator.generate_driver(\n cloudshell_config=cloudshell_config,\n destination_path=destination_path,\n package_full_path=package_full_path,\n shell_filename=shell_filename,\n shell_name=shell_name,\n )",
"def js():\n with lcd(BASEDIR):\n js_ext = (\n 'submodules/jquery-cookie/src/jquery.cookie.js',\n 'submodules/jquery-treegrid/js/jquery.treegrid.js',\n 'submodules/bootstrap/dist/js/bootstrap.js',\n )\n js_own = (\n 'js/variables.js',\n 'js/bmf-autocomplete.js',\n 'js/bmf-calendar.js',\n 'js/bmf-editform.js',\n 'js/bmf-inlineform.js',\n 'js/bmf-buildform.js',\n 'js/menu.js',\n )\n\n local('cp submodules/bootstrap/dist/js/bootstrap.min.js djangobmf/static/djangobmf/js/')\n local('yui-compressor --type js -o djangobmf/static/djangobmf/js/jquery.cookie.min.js submodules/jquery-cookie/src/jquery.cookie.js')\n local('yui-compressor --type js -o djangobmf/static/djangobmf/js/jquery.treegrid.min.js submodules/jquery-treegrid/js/jquery.treegrid.js')\n\n local('cat %s > djangobmf/static/djangobmf/js/djangobmf.js' % ' '.join(js_ext + js_own))\n local('yui-compressor --type js -o djangobmf/static/djangobmf/js/djangobmf.min.js djangobmf/static/djangobmf/js/djangobmf.js')\n local('cat %s > djangobmf/static/djangobmf/js/djangobmf.js' % ' '.join(js_own))",
"def gen_auto_script(preload, is_peti):\n dest = os.path.join('bee2', 'inject', 'auto_run.nut')\n if not preload and not is_peti:\n return # Don't add for hammer maps\n\n with open(dest, 'w') as file:\n if not preload:\n # Leave it empty, don't write an empty function body.\n file.write('//---\\n')\n return\n\n file.write('function Precache() {\\n')\n for entry in preload:\n if entry.startswith('precache_sound:'):\n file.write('\\tself.PrecacheSoundScript(\"{}\");\\n'.format(\n entry[15:],\n ))\n file.write('}\\n')",
"def run():\n names=[i.__name__ for i in modList]\n res,action=kcs_ui.string_select('fake vitesse generator',\n 'Please select the module you want to generate fake vitesse py.',\n 'Press option to generate for all modules.',\n names)\n if res==kcs_util.ok():\n mod=modList[action-1]\n des=kcs_ui.string_req('Where do you want to place the file?',r'C:\\temp')\n if des[0]==kcs_util.ok():\n# des = os.path.join(os.path.join(os.getcwd(), \"FakeVitesse\"))\n fname = des[1] + \"\\\\\" + mod.__name__ + \".py\"\n GenPy(mod, fname)\n elif res==kcs_util.options():\n des=kcs_ui.string_req('Where do you want to place the file?',r'C:\\temp')\n if des[0]==kcs_util.ok():\n for mod in modList:\n fname = des[1] + \"\\\\\" + mod.__name__ + \".py\"\n GenPy(mod, fname)",
"def scriptGen(self,tmpd='/tmp/jose',libRev='last',submode='qsub',\n redirect=1,PBSoptions=''):\n jobname=self.name\n outdir=self.outd\n qsubdir=scratchdir+'/qsub/'+todayDate() #subdirectory to deposit the script\n if not os.path.exists(qsubdir): pastry('/bin/mkdir -p '+qsubdir)\n script=qsubdir+'/'+jobname+'.sh' #full script file name\n\n if len(jobname) > 15:\n sys.stderr.write('Error: job name '+jobname+' cannot exceed 15 characters')\n return ''\n if not os.path.exists(outdir): os.system('/bin/mkdir -p '+outdir)\n buf=''\n ulimit=int(float(mem_limit)*1024) #maximum resident memory size (Kb) to prevent swapping\n wd=tmpd+'/${PBS_JOBID}'\n #wd=tmpd+'/'+ re.compile('\\W').sub('',self.name) +'_$$' #working directory\n logname=jobname+'.log'\n local_log=wd+'/'+logname\n remote_log=outdir+'/'+logname\n buf= '#!/bin/bash\\n\\n'\n buf+= PBSoptions+'\\n\\n'\n buf+= '#bash function to update library\\n'\n buf+= self.updateNodeLib(libRev)+'\\n\\n'\n buf+= '#bash function to import temporary libs\\n'\n buf+= self.shared_temporal_libraries()+'\\n\\n'\n buf+= '#bash function to clean exit\\n'\n buf+= self.cleanup_exit(submode=submode)+'\\n\\n'\n buf+= 'echo \"'+script+'\"\\n' #write script name withing script body\n buf+= 'hostname\\n' #node where job will be run\n buf+= 'echo $PBS_JOBID\\n'\n buf+= 'ulimit -m '+`ulimit`+' #maximum memory\\n'\n buf+= 'source ~/.bash_profile >/dev/null #environment variables\\n'\n buf+= 'wd='+wd+' #working directory\\n'\n buf+= '/bin/mkdir -p $wd\\n'\n buf+= 'export LOCAL_LOG=\"'+local_log+'\"\\n'\n buf+= '/bin/touch $LOCAL_LOG\\n'\n if submode=='sub' and redirect:\n buf+='exec &> $LOCAL_LOG #redirect STODOUT, STDERR to LOCAL_LOG\\n' \n buf+= 'export REMOTE_LOG=\"'+remote_log+'\"\\n'\n\n but+= '#clean up old log file\\n'\n buf+= 'if [ -f $REMOTE_LOG ]; then\\n' \n buf+= ' /bin/rm -f $REMOTE_LOG\\n'\n buf+= 'fi\\n\\n'\n\n buf+= 'trap \"cleanup_exit 1\" TERM #in case of killing job\\n\\n'\n\n buf+= '#update node code library && import libraries\\n'\n buf+= 'if !('\n buf+= 'updateNodeLib && ' \n buf+= 'shared_temporal_libraries _PREPARE_'\n buf+= ');then\\n'\n buf+= ' cleanup_exit 1\\n'\n buf+= 'fi\\n\\n'\n \n buf+= '/bin/cp '+' '.join(self.inpl)+' $wd #bring input files\\n' \n buf+= 'cd $wd\\n\\n'\n buf+= '#Test command success\\n'\n buf+= 'exs=0 #variable holding script exit status\\n'\n buf+= 'if !('\n buf+= self.exe\n buf+= ');then\\n'\n buf+= ' exs=1\\n'\n buf+= 'fi\\n\\n'\n buf+= '#move even partial results (exs=1)\\n'\n buf+= '/bin/mv '+' '.join(self.outl)+' '+outdir+'\\n'\n buf+= 'cleanup_exit $exs'\n\n open(script,'w').write(buf)\n pastry('chmod u+x '+script)\n\n return script",
"def convert_appendices():\n local('cd import_scripts;../bin/python import_appendices.py import_appendices')",
"def command(self):\n #Output directory is current folder unless specified vi output command.\n output_dir = os.path.join(self.options.output_dir, 'apache')\n #Input where the templates are at.\n input_dir= source_filename = os.path.join(os.path.dirname(__file__), 'templates/apache')\n #Finding directory that has egg info\n egg_info_dir = pluginlib.find_egg_info_dir(os.getcwd())\n #Name of the module\n plugins= os.path.splitext(os.path.basename(egg_info_dir))[0]\n #print os.path.splitext(os.path.basename(egg_info_dir))[0]\n dist_name= pluginlib.get_distro(plugins)\n vars={}\n #If PKG-INFO exists read it and add it to vars\n if dist_name.has_metadata('PKG-INFO'):\n data=dist_name.get_metadata('PKG-INFO')\n for add_info in pluginlib.parse_lines(data):\n (key,value) = add_info.split(':',1)\n vars[key]=value\n #Add package names\n vars['project']=plugins\n vars['package']=plugins\n vars['egg']=pluginlib.egg_name(str(dist_name))\n \n #Copy my template direcotry to destination.\n copy_dir(input_dir, output_dir, vars, verbosity=1, simulate=False, use_cheetah=True)\n print 'Thank you for using modwsgideploy!'\n print 'Please read README.txt in apache folder.'\n print 'http://lucasmanual.com/mywiki/modwsgideploy'\n print 'Made in Chicago,IL USA'",
"def main():\n\td = Hugo(\"cmd\")\n\tpass",
"def to_egg(dest_dir):\n return os.path.join('EGG-INFO', 'scripts', dest_dir)",
"def main():\n if getattr(sys, 'frozen', False):\n folderCurrent = os.path.dirname(sys.executable)\n else:\n folderCurrent = os.path.abspath(os.path.dirname(__file__))\n\n replaceAll(folderCurrent)",
"def regenerate():\n local('pelican -o {} -r -s pelicanconf.py'.format(env.deploy_path))",
"def setup(self, **kwargs):\n if self.bash_script:\n src = os.fspath(FILES / self.bash_script)\n dst = os.fspath(self.project_dir / self.bash_script)\n shutil.copy(src, dst)"
] | [
"0.6096537",
"0.6009406",
"0.59709835",
"0.5967113",
"0.5920322",
"0.59109473",
"0.58882314",
"0.5841919",
"0.57877",
"0.5781333",
"0.5743029",
"0.57190037",
"0.5708141",
"0.5695298",
"0.5688432",
"0.56231797",
"0.5622278",
"0.55802655",
"0.5565092",
"0.55514556",
"0.55338746",
"0.54904646",
"0.5490114",
"0.54860306",
"0.5477263",
"0.54384524",
"0.54238915",
"0.54143953",
"0.5400379",
"0.5397144"
] | 0.67919713 | 0 |
Upload a script and run it. ``args`` are used as command line arguments. ``kwargs`` are passed to `fabric`'s `run` | def run(self, script, *args, **kwargs):
return self._run('run', script, *args, **kwargs) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def script_run(ctx: click.Context, name, script_arguments):\n subcommand_script.cmd_run(ctx.obj, name, script_arguments)",
"def run(args):\n\n drive_uid = str(args[\"drive_uid\"])\n file_uid = str(args[\"file_uid\"])\n chunk_idx = int(args[\"chunk_index\"])\n secret = str(args[\"secret\"])\n data = string_to_bytes(args[\"data\"])\n checksum = str(args[\"checksum\"])\n\n drive = DriveInfo(drive_uid=drive_uid)\n\n drive.upload_chunk(file_uid=file_uid, chunk_index=chunk_idx,\n secret=secret, chunk=data, checksum=checksum)\n\n return True",
"def run(self, script_args):\n run_url = '{0}/{1}/run'.format(self.url, self.script_name)\n headers = {'Content-Type': 'text/plain'}\n payload = json.dumps(script_args)\n\n resp = False\n if self.get():\n log.debug('Running script: {0}'.format(self.script_name))\n req = requests.post(run_url, auth=(self.username, self.password), headers=headers, data=payload)\n if req.status_code == 204 or 200:\n resp = req.json()\n return resp\n log.error('Failed running script: {0}\" Reason: {1} {2}'.format(self.script_name, req.status_code, req.json()))\n\n return resp",
"def run_remote_script(self, script_file, args=None, log_error=True, additional_files=None):\n script_name = os.path.basename(script_file)\n self.__sftp_client.put(script_file, script_name)\n if not args:\n args = []\n return self.run_remote_command(\n [\"/bin/bash\", \"--login\", script_name] + args, log_error=log_error, additional_files=additional_files\n )",
"def submit(self, script, **kwargs):\n raise NotImplementedError()",
"def main():\n parser = argparse.ArgumentParser(description='Upload a zipped spreadsheet of condition-genes.')\n parser.add_argument('f', metavar='file', type=str)\n parser.add_argument(\"--check\", help=\"Check first for gene/CUI presence\",\n action=\"store_true\")\n parser.add_argument(\"--verbose\", help=\"Enable logging\",\n action=\"store_true\")\n\n args = parser.parse_args()\n\n if args.verbose:\n logging.basicConfig(level=logging.INFO)\n _LOGGER.info(\"Running script with args: verbose {} check {}\".format(args.verbose, args.check))\n\n upload_condition_genes(args.f, args.check)",
"def execute(self, task, script, **kwargs):\n locals().update(kwargs)\n exec(script)",
"def run_script(self, script_name, script_args=None, node_paths=None):\n # TODO: consider add a pants.util function to manipulate command line.\n package_manager_args = self._get_run_script_args()\n package_manager_args.append(script_name)\n if script_args:\n package_manager_args.append('--')\n package_manager_args.extend(script_args)\n return self.run_command(args=package_manager_args, node_paths=node_paths)",
"def do_exec(self, arg):\n self.run_file(arg['path'])",
"def run(self, args, **kwargs):\n raise NotImplementedError()",
"def upload(\n path: Path = typer.Argument(..., help=\"Path to your source code\"),\n entrypoint: str = typer.Argument(..., help=\"Your program entrypoint\"),\n channel: Optional[str] = typer.Option(default=None, help=help_strings.CHANNEL),\n memory: int = typer.Option(\n sdk_settings.DEFAULT_VM_MEMORY, help=\"Maximum memory allocation on vm in MiB\"\n ),\n vcpus: int = typer.Option(\n sdk_settings.DEFAULT_VM_VCPUS, help=\"Number of virtual cpus to allocate.\"\n ),\n timeout_seconds: float = typer.Option(\n sdk_settings.DEFAULT_VM_TIMEOUT,\n help=\"If vm is not called after [timeout_seconds] it will shutdown\",\n ),\n private_key: Optional[str] = typer.Option(\n sdk_settings.PRIVATE_KEY_STRING, help=help_strings.PRIVATE_KEY\n ),\n private_key_file: Optional[Path] = typer.Option(\n sdk_settings.PRIVATE_KEY_FILE, help=help_strings.PRIVATE_KEY_FILE\n ),\n print_messages: bool = typer.Option(False),\n print_code_message: bool = typer.Option(False),\n print_program_message: bool = typer.Option(False),\n runtime: str = typer.Option(\n None,\n help=\"Hash of the runtime to use for your program. Defaults to aleph debian with Python3.8 and node. You can also create your own runtime and pin it\",\n ),\n beta: bool = typer.Option(False),\n debug: bool = False,\n persistent: bool = False,\n persistent_volume: Optional[List[str]] = typer.Option(\n None,\n help=\"\"\"Takes 3 parameters \n A persistent volume is allocated on the host machine at any time \n eg: Use , to seperate the parameters and no spaces \n --persistent_volume persistence=host,name=my-volume,size=100 ./my-program main:app\n \"\"\",\n ),\n ephemeral_volume: Optional[List[str]] = typer.Option(\n None,\n help=\"\"\"Takes 1 parameter Only \n Ephemeral volumes can move and be removed by the host,Garbage collected basically, when the VM isn't running \n eg: Use , to seperate the parameters and no spaces \n --ephemeral-volume size_mib=100 ./my-program main:app \"\"\",\n ),\n immutable_volume: Optional[List[str]] = typer.Option(\n None,\n help=\"\"\"Takes 3 parameters \n Immutable volume is one whose contents do not change \n eg: Use , to seperate the parameters and no spaces \n --immutable-volume ref=25a393222692c2f73489dc6710ae87605a96742ceef7b91de4d7ec34bb688d94,use_latest=true,mount=/mnt/volume ./my-program main:app\n \"\"\",\n ),\n):\n\n setup_logging(debug)\n\n path = path.absolute()\n\n try:\n path_object, encoding = create_archive(path)\n except BadZipFile:\n typer.echo(\"Invalid zip archive\")\n raise typer.Exit(3)\n except FileNotFoundError:\n typer.echo(\"No such file or directory\")\n raise typer.Exit(4)\n\n account: AccountFromPrivateKey = _load_account(private_key, private_key_file)\n\n runtime = (\n runtime\n or input(f\"Ref of runtime ? [{sdk_settings.DEFAULT_RUNTIME_ID}] \")\n or sdk_settings.DEFAULT_RUNTIME_ID\n )\n\n volumes = []\n\n # Check if the volumes are empty\n if (\n persistent_volume is None\n or ephemeral_volume is None\n or immutable_volume is None\n ):\n for volume in prompt_for_volumes():\n volumes.append(volume)\n typer.echo(\"\\n\")\n\n # else Parse all the volumes that have passed as the cli parameters and put it into volume list\n else:\n if len(persistent_volume) > 0:\n persistent_volume_dict = volume_to_dict(volume=persistent_volume)\n volumes.append(persistent_volume_dict)\n if len(ephemeral_volume) > 0:\n ephemeral_volume_dict = volume_to_dict(volume=ephemeral_volume)\n volumes.append(ephemeral_volume_dict)\n if len(immutable_volume) > 0:\n immutable_volume_dict = volume_to_dict(volume=immutable_volume)\n volumes.append(immutable_volume_dict)\n\n subscriptions: Optional[List[Dict]]\n if beta and yes_no_input(\"Subscribe to messages ?\", default=False):\n content_raw = input_multiline()\n try:\n subscriptions = json.loads(content_raw)\n except json.decoder.JSONDecodeError:\n typer.echo(\"Not valid JSON\")\n raise typer.Exit(code=2)\n else:\n subscriptions = None\n\n with AuthenticatedAlephClient(\n account=account, api_server=sdk_settings.API_HOST\n ) as client:\n # Upload the source code\n with open(path_object, \"rb\") as fd:\n logger.debug(\"Reading file\")\n # TODO: Read in lazy mode instead of copying everything in memory\n file_content = fd.read()\n storage_engine = (\n StorageEnum.ipfs\n if len(file_content) > 4 * 1024 * 1024\n else StorageEnum.storage\n )\n logger.debug(\"Uploading file\")\n user_code: StoreMessage\n status: MessageStatus\n user_code, status = client.create_store(\n file_content=file_content,\n storage_engine=storage_engine,\n channel=channel,\n guess_mime_type=True,\n ref=None,\n )\n logger.debug(\"Upload finished\")\n if print_messages or print_code_message:\n typer.echo(f\"{user_code.json(indent=4)}\")\n program_ref = user_code.item_hash\n\n # Register the program\n message, status = client.create_program(\n program_ref=program_ref,\n entrypoint=entrypoint,\n runtime=runtime,\n storage_engine=StorageEnum.storage,\n channel=channel,\n memory=memory,\n vcpus=vcpus,\n timeout_seconds=timeout_seconds,\n persistent=persistent,\n encoding=encoding,\n volumes=volumes,\n subscriptions=subscriptions,\n )\n logger.debug(\"Upload finished\")\n if print_messages or print_program_message:\n typer.echo(f\"{message.json(indent=4)}\")\n\n item_hash: ItemHash = message.item_hash\n hash_base32 = (\n b32encode(b16decode(item_hash.upper())).strip(b\"=\").lower().decode()\n )\n\n typer.echo(\n f\"Your program has been uploaded on aleph.im .\\n\\n\"\n \"Available on:\\n\"\n f\" {settings.VM_URL_PATH.format(hash=item_hash)}\\n\"\n f\" {settings.VM_URL_HOST.format(hash_base32=hash_base32)}\\n\"\n \"Visualise on:\\n https://explorer.aleph.im/address/\"\n f\"{message.chain}/{message.sender}/message/PROGRAM/{item_hash}\\n\"\n )",
"def upload(ctx: click.Context, **kwargs):\n root_commands.cmd_upload(ctx.obj, **kwargs)",
"def run(args):\n write_request(args.num_tests)\n\n if args.request == 'check':\n script_files = ['modules/check_vault.py']\n elif args.request == 'create':\n script_files = ['modules/create_vault.py']\n else:\n raise AssertionError\n\n if args.version is None:\n python_bins = [python2_exec, python3_exec]\n elif args.version == 2:\n python_bins = [python2_exec]\n elif args.version == 3:\n python_bins = [python3_exec]\n else:\n raise AssertionError\n\n for script_file in script_files:\n for python_bin in python_bins:\n cmd = [python_bin, script_file]\n subprocess.check_call(cmd)\n\n os.unlink('request.txt')\n\n # This allows to call this test from another script, that runs other\n # tests as well.\n if not args.is_background:\n send_notification('regression')",
"def up(self, args):\n try:\n assert len(args) > 0\n path = args[0]\n compress = should('Compress file?')\n self.prepare_upload(path, compress)\n except AssertionError:\n print(\"I need a file name!\")",
"def execute_script(self, action, *args):\n self.host.cmd(('./%s' + len(args) * ' %s') % (action, *args))",
"def main(args):\n\n # Get additional args for the script\n if len(args) > 1:\n scriptargs = args[1:]\n else:\n scriptargs = None\n\n scriptpath = find_script(args[0])\n\n if not scriptpath:\n print('\\nCannot find that script!: {}'.format(args[0]))\n return 1\n\n # Check for extra input needed.\n forgotargs = check_input(scriptpath, scriptargs)\n if forgotargs:\n if not scriptargs:\n scriptargs = forgotargs\n else:\n scriptargs.extend(forgotargs)\n # Shell script..\n shellret = shell_script(scriptpath, scriptargs)\n if shellret:\n print('wrun: script returned non-zero!: {}'.format(shellret))\n\n return shellret",
"def execute_script(self, script, enterpreter='/bin/sh'):\n destination = '/tmp/' + ''.join(\n random.choice(string.lowercase) for i in range(16))\n\n self.upload(script, destination)\n self.execute('%s %s' % (enterpreter, destination))\n self.execute('rm %s' % destination)",
"def main():\n parser = argparse.ArgumentParser()\n parser.add_argument(\"-slices\", type=int, required=True)\n args = parser.parse_args()\n\n upload_model_data(args.slices)",
"def setup_script(self, *args, **kwargs):\n pass",
"def call_script(self, script):\n filename, callable = script.rsplit(':', 1)\n filename = os.path.abspath(filename)\n module = imp.load_source('script', filename)\n script = getattr(module, callable.strip())\n\n try:\n script(self.options, self.buildout, self.augmented_environment())\n except TypeError:\n # BBB: Support hook scripts that do not take the environment as\n # the third parameter\n script(self.options, self.buildout)",
"def quick_run(self, *args):\n self.inputs(*args)\n self.run()",
"def exec_from_args(args):\n outfolder = args.folder + '/normal/'\n check(outfolder, 'm')\n\n makeconfig(str(args.gene_names), str(args.sequences), str(args.org_included),\n len_threshold=args.len_threshold,\n its=str(args.its), query_cover=str(args.query_cover), identity=str(args.identity),\n distance=str(args.string_distance), subsp=str(args.subsp), excluded=str(args.org_excluded),\n remote=str(args.remote_blast), folder=args.folder, date=args.today, blacklist=args.blacklist,\n synonyms=args.synonyms)\n\n r = Run('n', args.folder, args.debug)\n r.start()",
"def run(self, *args, **kwargs):\n # @@@ async? (consider how args and kwargs should be serialized;\n # that may change things quite a bit)\n self.process((args, kwargs))",
"def run(self, args):\n pass",
"def run_app():\n args = get_args()\n if valid_args(args):\n transfer_ownership(args)",
"def publish(self, path, recipient, *args):\n if not os.environ.get(\"OS_PROJECT_ID\", None):\n logging.log(\n logging.ERROR,\n \"Openstack RC file hasn't been sourced in the working %s%s\",\n \"environment. Please source an Openstack RC file to enable\",\n \" the use of Openstack tools.\",\n )\n sys.exit(-1)\n\n container = \"shared-upload-\" + recipient + \"-\" + time.strftime(\"%Y%m%d-%H%M%S\")\n\n subprocess.call([\"swift\", \"upload\", container, path]) # nosec\n\n self.share(container, recipient, *args)",
"def execute():\n command_line_args = argv[1:]\n args = cli(command_line_args)\n\n callback = args.callback\n kwargs = {\n k: v\n for k, v in args.__dict__.items()\n if k != \"callback\"\n }\n\n main(callback, **kwargs)",
"def run(self, args: argparse.Namespace) -> None:\n pass",
"def call_script_on_staging(name, args=[]):\n def fn(): lib.call([os.path.join(env.staging_path, \"scripts\", name)] + args)\n time_duration(fn)",
"def execute(self, args=\"\"):\r\n return super(PythonScript, self).execute(_EXECUTABLE, args)"
] | [
"0.6333176",
"0.6069562",
"0.60669595",
"0.5997453",
"0.5979731",
"0.59490883",
"0.5912231",
"0.59050536",
"0.59018415",
"0.5843688",
"0.5803377",
"0.57960784",
"0.5723168",
"0.56878614",
"0.5683986",
"0.5648805",
"0.5643183",
"0.5575065",
"0.5519657",
"0.5519369",
"0.5487893",
"0.5481601",
"0.5475217",
"0.5465563",
"0.5444742",
"0.5437741",
"0.5434984",
"0.5431965",
"0.5428334",
"0.5420962"
] | 0.68447685 | 0 |
Upload a script and run it using sudo. ``args`` are used as command line arguments. ``kwargs`` are passed to `fabric`'s `sudo` | def sudo(self, script, *args, **kwargs):
return self._run('sudo', script, *args, **kwargs) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _upload_template(filename, destination, **kwargs):\n user = kwargs.pop('user')\n kwargs['use_sudo'] = True\n upload_template(filename, destination, **kwargs)\n sudo('chown %(user)s:%(user)s %(dest)s' % {'user': user, 'dest': destination})",
"def run_remote_script(self, script_file, args=None, log_error=True, additional_files=None):\n script_name = os.path.basename(script_file)\n self.__sftp_client.put(script_file, script_name)\n if not args:\n args = []\n return self.run_remote_command(\n [\"/bin/bash\", \"--login\", script_name] + args, log_error=log_error, additional_files=additional_files\n )",
"def execute_script(self, script, enterpreter='/bin/sh'):\n destination = '/tmp/' + ''.join(\n random.choice(string.lowercase) for i in range(16))\n\n self.upload(script, destination)\n self.execute('%s %s' % (enterpreter, destination))\n self.execute('rm %s' % destination)",
"def upload(ctx: click.Context, **kwargs):\n root_commands.cmd_upload(ctx.obj, **kwargs)",
"def run(args):\n\n drive_uid = str(args[\"drive_uid\"])\n file_uid = str(args[\"file_uid\"])\n chunk_idx = int(args[\"chunk_index\"])\n secret = str(args[\"secret\"])\n data = string_to_bytes(args[\"data\"])\n checksum = str(args[\"checksum\"])\n\n drive = DriveInfo(drive_uid=drive_uid)\n\n drive.upload_chunk(file_uid=file_uid, chunk_index=chunk_idx,\n secret=secret, chunk=data, checksum=checksum)\n\n return True",
"def runUploads():\n os.system(\"chmod u+x uploadCommands.sh\")\n os.system(\"./uploadCommands.sh\")\n os.system(\"rm uploadCommands.sh\")",
"def sudo_from_args(command):\n return Effect(Sudo.from_args(command))",
"def upload(\n path: Path = typer.Argument(..., help=\"Path to your source code\"),\n entrypoint: str = typer.Argument(..., help=\"Your program entrypoint\"),\n channel: Optional[str] = typer.Option(default=None, help=help_strings.CHANNEL),\n memory: int = typer.Option(\n sdk_settings.DEFAULT_VM_MEMORY, help=\"Maximum memory allocation on vm in MiB\"\n ),\n vcpus: int = typer.Option(\n sdk_settings.DEFAULT_VM_VCPUS, help=\"Number of virtual cpus to allocate.\"\n ),\n timeout_seconds: float = typer.Option(\n sdk_settings.DEFAULT_VM_TIMEOUT,\n help=\"If vm is not called after [timeout_seconds] it will shutdown\",\n ),\n private_key: Optional[str] = typer.Option(\n sdk_settings.PRIVATE_KEY_STRING, help=help_strings.PRIVATE_KEY\n ),\n private_key_file: Optional[Path] = typer.Option(\n sdk_settings.PRIVATE_KEY_FILE, help=help_strings.PRIVATE_KEY_FILE\n ),\n print_messages: bool = typer.Option(False),\n print_code_message: bool = typer.Option(False),\n print_program_message: bool = typer.Option(False),\n runtime: str = typer.Option(\n None,\n help=\"Hash of the runtime to use for your program. Defaults to aleph debian with Python3.8 and node. You can also create your own runtime and pin it\",\n ),\n beta: bool = typer.Option(False),\n debug: bool = False,\n persistent: bool = False,\n persistent_volume: Optional[List[str]] = typer.Option(\n None,\n help=\"\"\"Takes 3 parameters \n A persistent volume is allocated on the host machine at any time \n eg: Use , to seperate the parameters and no spaces \n --persistent_volume persistence=host,name=my-volume,size=100 ./my-program main:app\n \"\"\",\n ),\n ephemeral_volume: Optional[List[str]] = typer.Option(\n None,\n help=\"\"\"Takes 1 parameter Only \n Ephemeral volumes can move and be removed by the host,Garbage collected basically, when the VM isn't running \n eg: Use , to seperate the parameters and no spaces \n --ephemeral-volume size_mib=100 ./my-program main:app \"\"\",\n ),\n immutable_volume: Optional[List[str]] = typer.Option(\n None,\n help=\"\"\"Takes 3 parameters \n Immutable volume is one whose contents do not change \n eg: Use , to seperate the parameters and no spaces \n --immutable-volume ref=25a393222692c2f73489dc6710ae87605a96742ceef7b91de4d7ec34bb688d94,use_latest=true,mount=/mnt/volume ./my-program main:app\n \"\"\",\n ),\n):\n\n setup_logging(debug)\n\n path = path.absolute()\n\n try:\n path_object, encoding = create_archive(path)\n except BadZipFile:\n typer.echo(\"Invalid zip archive\")\n raise typer.Exit(3)\n except FileNotFoundError:\n typer.echo(\"No such file or directory\")\n raise typer.Exit(4)\n\n account: AccountFromPrivateKey = _load_account(private_key, private_key_file)\n\n runtime = (\n runtime\n or input(f\"Ref of runtime ? [{sdk_settings.DEFAULT_RUNTIME_ID}] \")\n or sdk_settings.DEFAULT_RUNTIME_ID\n )\n\n volumes = []\n\n # Check if the volumes are empty\n if (\n persistent_volume is None\n or ephemeral_volume is None\n or immutable_volume is None\n ):\n for volume in prompt_for_volumes():\n volumes.append(volume)\n typer.echo(\"\\n\")\n\n # else Parse all the volumes that have passed as the cli parameters and put it into volume list\n else:\n if len(persistent_volume) > 0:\n persistent_volume_dict = volume_to_dict(volume=persistent_volume)\n volumes.append(persistent_volume_dict)\n if len(ephemeral_volume) > 0:\n ephemeral_volume_dict = volume_to_dict(volume=ephemeral_volume)\n volumes.append(ephemeral_volume_dict)\n if len(immutable_volume) > 0:\n immutable_volume_dict = volume_to_dict(volume=immutable_volume)\n volumes.append(immutable_volume_dict)\n\n subscriptions: Optional[List[Dict]]\n if beta and yes_no_input(\"Subscribe to messages ?\", default=False):\n content_raw = input_multiline()\n try:\n subscriptions = json.loads(content_raw)\n except json.decoder.JSONDecodeError:\n typer.echo(\"Not valid JSON\")\n raise typer.Exit(code=2)\n else:\n subscriptions = None\n\n with AuthenticatedAlephClient(\n account=account, api_server=sdk_settings.API_HOST\n ) as client:\n # Upload the source code\n with open(path_object, \"rb\") as fd:\n logger.debug(\"Reading file\")\n # TODO: Read in lazy mode instead of copying everything in memory\n file_content = fd.read()\n storage_engine = (\n StorageEnum.ipfs\n if len(file_content) > 4 * 1024 * 1024\n else StorageEnum.storage\n )\n logger.debug(\"Uploading file\")\n user_code: StoreMessage\n status: MessageStatus\n user_code, status = client.create_store(\n file_content=file_content,\n storage_engine=storage_engine,\n channel=channel,\n guess_mime_type=True,\n ref=None,\n )\n logger.debug(\"Upload finished\")\n if print_messages or print_code_message:\n typer.echo(f\"{user_code.json(indent=4)}\")\n program_ref = user_code.item_hash\n\n # Register the program\n message, status = client.create_program(\n program_ref=program_ref,\n entrypoint=entrypoint,\n runtime=runtime,\n storage_engine=StorageEnum.storage,\n channel=channel,\n memory=memory,\n vcpus=vcpus,\n timeout_seconds=timeout_seconds,\n persistent=persistent,\n encoding=encoding,\n volumes=volumes,\n subscriptions=subscriptions,\n )\n logger.debug(\"Upload finished\")\n if print_messages or print_program_message:\n typer.echo(f\"{message.json(indent=4)}\")\n\n item_hash: ItemHash = message.item_hash\n hash_base32 = (\n b32encode(b16decode(item_hash.upper())).strip(b\"=\").lower().decode()\n )\n\n typer.echo(\n f\"Your program has been uploaded on aleph.im .\\n\\n\"\n \"Available on:\\n\"\n f\" {settings.VM_URL_PATH.format(hash=item_hash)}\\n\"\n f\" {settings.VM_URL_HOST.format(hash_base32=hash_base32)}\\n\"\n \"Visualise on:\\n https://explorer.aleph.im/address/\"\n f\"{message.chain}/{message.sender}/message/PROGRAM/{item_hash}\\n\"\n )",
"def install_agent(self, platform_uuid, fileargs):\n rpc = 'platforms.uuid.{}.install'.format(platform_uuid)\n return self.do_rpc(rpc, files=[fileargs])",
"def run_app():\n args = get_args()\n if valid_args(args):\n transfer_ownership(args)",
"def run(self, script, *args, **kwargs):\n return self._run('run', script, *args, **kwargs)",
"def execute_script(self, action, *args):\n self.host.cmd(('./%s' + len(args) * ' %s') % (action, *args))",
"def submit(self, script, **kwargs):\n raise NotImplementedError()",
"def run_as_root(command, *args, **kwargs):\n if whoami() != 'root':\n command = 'sudo ' + command\n return local_run(command, *args, **kwargs)",
"def cli_upload(parser):\n subparser = argparse.ArgumentParser(description='Upload to vault',\n parents=[parser])\n\n subparser.add_argument('-l', '--local',\n required=True,\n type=str,\n help='Local path')\n subparser.add_argument('-f', '--force',\n action='store_true',\n dest='overwrite',\n default=False)\n subparser.add_argument('-s', '--storage',\n type=str,\n required=False,\n default='drop.jarvice.com',\n dest='storage',\n help='Vault address')\n subparser.add_argument('-d', '--drop_remote',\n type=str,\n required=False,\n dest='remote',\n help='Remote path')\n args = subparser.parse_args()\n\n local = args.local\n store = args.storage\n remote = args.remote\n overwrite = args.overwrite\n\n utils.upload(config['username'], config['apikey'],\n local, store, remote, overwrite=overwrite)",
"def upload():\n sh('python setup.py register sdist upload')",
"def publish(self, path, recipient, *args):\n if not os.environ.get(\"OS_PROJECT_ID\", None):\n logging.log(\n logging.ERROR,\n \"Openstack RC file hasn't been sourced in the working %s%s\",\n \"environment. Please source an Openstack RC file to enable\",\n \" the use of Openstack tools.\",\n )\n sys.exit(-1)\n\n container = \"shared-upload-\" + recipient + \"-\" + time.strftime(\"%Y%m%d-%H%M%S\")\n\n subprocess.call([\"swift\", \"upload\", container, path]) # nosec\n\n self.share(container, recipient, *args)",
"def script_run(ctx: click.Context, name, script_arguments):\n subcommand_script.cmd_run(ctx.obj, name, script_arguments)",
"def sudo(command):\n return Effect(Sudo(command=command))",
"def up(self, args):\n try:\n assert len(args) > 0\n path = args[0]\n compress = should('Compress file?')\n self.prepare_upload(path, compress)\n except AssertionError:\n print(\"I need a file name!\")",
"def install_script_stored_on_remote(script_dir, script_name, mode=775, owner='root'):\n full_path = os.path.join(script_dir, script_name)\n\n with cd(script_dir):\n sudo(\"chmod {} {}\".format(mode, script_name))\n sudo(\"chown {} {}\".format(owner, script_name))\n sudo(\"ln -sf {} {}\".format(full_path, env.system_script_dir))",
"def share(self, container, recipient, *args):\n logging.log(logging.INFO, \"share called\")\n logging.log(logging.INFO, args)\n tenant = os.environ.get(\"OS_PROJECT_ID\", None)\n if not tenant:\n logging.log(\n logging.ERROR,\n \"Openstack RC file hasn't been sourced in the working %s%s\",\n \"environment. Please source an Openstack RC file to enable\",\n \" the use of Openstack tools.\",\n )\n sys.exit(-1)\n command = [\"swift\", \"post\", container]\n rights = []\n # If read access is specified in arguments, grant read access.\n if \"r\" in args:\n command.append(\"--read-acl\")\n command.append(recipient + \":*\")\n rights.append(\"r\")\n rights.append(\"l\")\n # If write access is specified in arguments, grant write access.\n if \"w\" in args:\n command.append(\"--write-acl\")\n command.append(recipient + \":*\")\n rights.append(\"w\")\n\n logging.log(logging.INFO, f\"Running POST: {command}\")\n subprocess.call(command) # nosec\n\n asyncio.run(self._push_share(container, [recipient], rights))",
"def setup(self, **kwargs):\n if self.bash_script:\n src = os.fspath(FILES / self.bash_script)\n dst = os.fspath(self.project_dir / self.bash_script)\n shutil.copy(src, dst)",
"def run_script_on_vm(script, arguments=None):\n\n # Stop the program if no init has occurred.\n Vagrant.stop_if_not_init()\n\n # What is the path to the script files on the VM?\n path = \"sudo -u vagrant bash \" + Settings.scripts_on_vm + script\n\n # Are there any arguments?\n if arguments:\n path = path + ' ' + arguments\n\n # Make sure the cabal path is in the environment's PATH.\n path = \"export PATH=/home/vagrant/.cabal/bin:$PATH; \" + path\n\n # Run vagrant ssh -c \"<commands>\" from the vagrant folder.\n command = ['vagrant', 'ssh', '-c', path]\n cwd = Settings.devbox_folder\n try:\n result = subprocess.check_call(command, cwd=cwd)\n except subprocess.CalledProcessError:\n Utilities.log(\"Running \" + script + \" on the VM failed.\")\n exit(1)",
"def main(cli_args=None):\n if cli_args is None:\n cli_args = sys.argv[1:]\n args = parse_cli(cli_args)\n try:\n upload_url = get_upload_url(vcenter=args.vcenter,\n vcenter_user=args.vcenter_user,\n vcenter_password=args.vcenter_password,\n the_vm=args.the_vm,\n username=args.username,\n password=args.password,\n file=args.file,\n upload_dir=args.upload_dir)\n except vim.fault.InvalidGuestLogin:\n printerr('Invalid password for VM user {}'.format(args.username))\n sys.exit(1)\n except vim.fault.CannotAccessFile:\n printerr('VM user {} lacks permission to write to {}'.format(args.username, args.upload_dir))\n sys.exit(1)\n\n if args.no_upload:\n print('Upload URL is: {}'.format(upload_url))\n print('To upload the file with curl, the syntax would be:')\n print(\"curl -k --fail -X PUT -d @{} {}\".format(args.file, upload_url.replace('&', '\\&')))\n else:\n with open(args.file) as the_file:\n stime = time.time()\n resp = requests.put(upload_url, data=the_file, verify=False)\n delta = time.time() - stime\n if not resp.ok:\n printerr('Upload failure')\n printerr('HTTP Response: {}'.format(resp.status))\n printerr('Response body: {}'.format(resp.content))\n sys.exit(1)\n print('Uploade {} bytes in {} seconds'.format(os.stat(args.file).st_size, delta))",
"def run_upload_dir(args):\n if not settings.UPLOAD_ENABLED:\n logger.info(\"Note that uploads are disabled\")\n run_temp_dir(args)\n elif settings.UPLOAD_DIR:\n logger.info(\"Upload directory is %s\", settings.UPLOAD_DIR)\n run_temp_dir(args)\n else:\n with tempfile.TemporaryDirectory(prefix=\"scelvis.upload\") as tmpdir:\n logger.info(\"Creating upload directory %s\", tmpdir)\n settings.UPLOAD_DIR = tmpdir\n run_temp_dir(args)",
"def upload_a_file(self, package, version, file_path):\n cmd_args = [self._push_executable]\n cmd_args += [\"--user\", self._username]\n cmd_args += [\"--api_key\", self._api_key]\n cmd_args += [\"--subject\", self._subject]\n cmd_args += [\"--repo\", self._repo]\n cmd_args += [\"--package\", package]\n cmd_args += [\"--version\", version]\n cmd_args += [\"--file_path\", file_path]\n\n if self._component:\n cmd_args += [\"--component\", self._component]\n if self._distribution:\n cmd_args += [\"--distribution\", self._distribution]\n if self._architecture:\n cmd_args += [\"--architecture\", self._architecture]\n\n cmd_args += [\"--package\", package]\n cmd_args += [\"--version\", version]\n cmd_args += [\"--file_path\", file_path]\n\n try:\n proc = subprocess.Popen(cmd_args,\n stderr=subprocess.PIPE,\n stdout=subprocess.PIPE,\n shell=False)\n (out, err) = proc.communicate()\n if proc.returncode != 0:\n raise RuntimeError(err)\n except subprocess.CalledProcessError as ex:\n raise RuntimeError(\"Failed to upload file {0} due to {1}\".format(file_path, ex))\n\n return True",
"def upload(ctx, release, rebuild, version):\n\n dist_path = Path(DIST_PATH)\n if rebuild is False:\n if not dist_path.exists() or not list(dist_path.glob('*')):\n print(\"No distribution files found. Please run 'build' command first\")\n return\n else:\n ctx.invoke(build, force=True, version=version)\n\n if release:\n args = ['twine', 'upload', 'dist/*']\n else:\n repository = 'https://test.pypi.org/legacy/'\n args = ['twine', 'upload', '--repository-url', repository, 'dist/*']\n\n env = os.environ.copy()\n\n p = subprocess.Popen(args, env=env)\n p.wait()",
"def runmecall():\n os.system('sudo /home/pi/flask-video-streaming-v1.5/RunMe.sh')",
"def run(self, script_args):\n run_url = '{0}/{1}/run'.format(self.url, self.script_name)\n headers = {'Content-Type': 'text/plain'}\n payload = json.dumps(script_args)\n\n resp = False\n if self.get():\n log.debug('Running script: {0}'.format(self.script_name))\n req = requests.post(run_url, auth=(self.username, self.password), headers=headers, data=payload)\n if req.status_code == 204 or 200:\n resp = req.json()\n return resp\n log.error('Failed running script: {0}\" Reason: {1} {2}'.format(self.script_name, req.status_code, req.json()))\n\n return resp"
] | [
"0.6268761",
"0.589509",
"0.58233005",
"0.58182013",
"0.5760758",
"0.57154745",
"0.5654159",
"0.5632504",
"0.5609804",
"0.5580056",
"0.556147",
"0.5543384",
"0.5473087",
"0.54669285",
"0.5439649",
"0.5400255",
"0.5376021",
"0.5350383",
"0.5334175",
"0.5298767",
"0.5295162",
"0.52790564",
"0.5271897",
"0.5219731",
"0.51980513",
"0.5174302",
"0.5152396",
"0.51507825",
"0.51285714",
"0.51247746"
] | 0.6812293 | 0 |
Return a random ktuple of unique elements selected from population. | def rand_tuple(population, k, required_inds=None):
if isinstance(population, int):
population = xrange(population)
if required_inds is None:
required_inds = []
if not isinstance(required_inds, collections.Iterable):
required_inds = [required_inds]
t = set(random.sample(population, k)) - set(required_inds)
t = required_inds + list(t)
return tuple(t[:k]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def random_sample(population, k):\r\n \r\n newpopulation = population[:]\r\n if len(population) < k:\r\n raise ValueError, \"sample larger than population\"\r\n\r\n retlist = []\r\n populationsize = len(population)-1\r\n\r\n for num in range(k):\r\n pos = random_randint(0,populationsize-num)\r\n retlist.append(newpopulation[pos])\r\n del newpopulation[pos]\r\n\r\n return retlist",
"def random_generic_vertex_set(self, k, E=None):\n if E is None:\n E = set()\n S = [None for _ in xrange(k)]\n E = list(E)\n for i in xrange(k):\n S[i] = (ifilter(lambda x: x not in E, produce(self.L.random_element)).next(),\n self.K.unit_group.random_element())\n E.append(S[i][0])\n return tuple(S)",
"def uniform_selection(random, population, args):\r\n num_selected = args.setdefault('num_selected', 1)\r\n selected = []\r\n for _ in range(num_selected):\r\n selected.append(population[random.randint(0, len(population)-1)])\r\n return selected",
"def random_vertex_set(self, k, E=None):\n if E is None:\n E = set()\n S = [None for _ in xrange(k)]\n E = list(E)\n for i in xrange(k):\n S[i] = ifilter(lambda x: x not in E, produce(self.random_vertex)).next()\n E.append(S[i][0])\n return tuple(S)",
"def tournament_selector(population, size = 5):\n while True:\n sample_ix = nprand.random_integers(0, len(population) - 1, size)\n # because of sorted-ness, best ind is in smallest ix\n yield population[sample_ix.min()]",
"def tournament(population, k=3, elitism=0, kw=None, **kwargs):\n _population = sorted(population, reverse=True, key=itemgetter('fitness'))\n\n # pop off the N best individuals where N is elitism\n pool = map(_population.pop, [0] * elitism)\n\n # update the value of k to reflect the elitism count\n _k = min(k, len(_population)) + elitism\n\n while len(pool) < _k:\n pool.append(random.choice(_population))\n pool = [dict(t) for t in set([tuple(d.items()) for d in pool])]\n return sorted(pool, key=lambda x: x['fitness'], reverse=True)",
"def get_random_population():\r\n return [ get_random_individual() for _ in range(POPULATION_COUNT) ]",
"def random_set(k,n):\n if k > n:\n raise ValueError(\"You must pick k smaller than n\")\n S= set()\n j=0\n while j<k:\n S.add(randint(n))\n j = len(S)\n return S",
"def samplePopulation(self, k) -> \"Population\":\n sampledPopulation = Population(self.population_size, self.map)\n sampledPopulation.population = random.choices(self.population, k=k)\n sampledPopulation.sortPopulation()\n return sampledPopulation",
"def population_selection(population, sack, max_weight):\n sorted_population = population_performance(population, sack, max_weight)\n new_gen = []\n \n for fit_member in range(len(sorted_population) - 2): #killing two weakest\n new_gen.append(sorted_population[fit_member][0])\n\n return new_gen",
"def sample_n_k(n, k):\n\n if not 0 <= k <= n:\n raise ValueError(\"Sample larger than population or is negative\")\n if k == 0:\n return np.empty((0,), dtype=np.int64)\n elif 3 * k >= n:\n return np.random.choice(n, k, replace=False)\n else:\n result = np.random.choice(n, 2 * k)\n selected = set()\n selected_add = selected.add\n j = k\n for i in range(k):\n x = result[i]\n while x in selected:\n x = result[i] = result[j]\n j += 1\n if j == 2 * k:\n # This is slow, but it rarely happens.\n result[k:] = np.random.choice(n, k)\n j = k\n selected_add(x)\n return result[:k]",
"def sample_without_replacement(k, xsc):\n xs = list(xsc)\n ans = []\n\n while (k > 0) and (xsc != []):\n i = randint(0, len(xsc) - 1)\n ans.append(xsc.pop(i))\n k -= 1\n\n return ans",
"def random_select(candidates, size=50):\n llist = list(candidates)\n\n np.random.shuffle(llist)\n\n return llist[:size]",
"def random_indices(n,k):\n return np.sort(np.random.permutation(n)[:k])",
"def tournament_selection(pool):\n return max(random.sample(pool, len(pool) // 5))",
"def Chose_rand():\r\n total_list=list(range(1,467681))\r\n select=13788\r\n random_selected= random.sample(total_list,select)\r\n return (random_selected)",
"def movie_subset(k):\n return np.array(movies)[np.random.permutation(len(movies))[:k]].tolist()",
"def uniform_stochastic(population, next_gen_number, random_seed=42):\n\n np.random.seed(random_seed)\n\n indices = list(range(len(population)))\n indices_array = np.array(indices)\n\n selected_indices = np.random.choice(\n indices_array, size=next_gen_number\n )\n\n selected = []\n for indx in selected_indices:\n selected.append(population[indx])\n\n return selected",
"def tournament_selection(population, tournament_size=3):\n winners = []\n while len(winners) < GENERATION_SIZE:\n competitors = random.sample(population, tournament_size)\n competitors.sort(reverse=True)\n winners.append(competitors[0])\n return winners",
"def unused(permutation, nb_elements):\n return tuple(set(range(nb_elements)) - set(permutation))",
"def rank_selector(population):\n # make a cdf of probability of being selected\n invrange = np.arange(len(population), 0, -1, dtype = np.double)\n cdf = np.r_[0., (invrange / invrange.sum()).cumsum()]\n while True:\n u = nprand.random()\n ix = np.searchsorted(cdf, u) - 1\n yield population[ix]",
"def _select(self):\n sel = []\n\n # choose randomly while favouring fit individuals\n lp = len(self.population) // 2\n for _ in range(lp):\n idx1 = idx2 = int(math.sqrt(random.randrange(lp**2+1)))\n while idx1 == idx2:\n idx2 = int(math.sqrt(random.randrange(lp**2+1)))\n\n p1, p2 = self.population[idx1], self.population[idx2]\n sel.append((p1, p2))\n\n return sel",
"def random(self, k=1000, n=100):\n a = numpy.random.randint(k, size=n)\n _, self.counts = numpy.unique(a, return_counts=1)\n self.nk, self.zk = numpy.unique(self.counts, return_counts=1)\n return self",
"def select_unique_ids(self):\n utk = self.metadata\n utk_ids = []\n for gg in set(utk['gender']):\n for rg in set(utk['race']):\n for ag in set(utk['age']):\n try:\n intersection_ids = list(utk[np.logical_and(utk['gender'] == gg,\n np.logical_and(utk['race'] == rg,\n utk['age'] == ag))]['filename'])\n if len(intersection_ids) <= CAP:\n utk_ids += intersection_ids\n else:\n x = list(np.random.choice(intersection_ids, CAP, replace=False))\n utk_ids += x\n\n except:\n continue\n self.unique_ids = utk_ids\n return utk_ids",
"def __generate_random_nodes(self,k=3):\n if k < 3:\n k = 3\n\n k = min(k,len(self.G.nodes()))\n self.__logger.info(\"RANDOM_NODES: try to generate a set of {} nodes sampled with uniform distribution\".format(k))\n \n return random.sample(self.G.nodes(),k)",
"def tournament_selection(self, population: List[IndividualType]) -> List[IndividualType]:\n survivors: List[IndividualType] = []\n for _ in range(self.configuration.n_survivors):\n # Choose participants\n rooster: List[IndividualType] = random.sample(population, self.configuration.rooster_size)\n # Select fittest of participants as survivor\n fittest_individual_of_rooster = self.get_best_individual(rooster)\n population.remove(fittest_individual_of_rooster)\n survivors.append(fittest_individual_of_rooster)\n return survivors",
"def getSample(iterator, k):\n # fill the reservoir to start\n result = [next(iterator) for _ in range(k)]\n\n n = k\n\n for item in iterator:\n n += 1\n s = random.randint(0, n)\n if s < k:\n result[s] = item\n\n return result",
"def random_subset(array, count):\n indices = np.random.permutation(len(array))[:count]\n return array[indices]",
"def _rand_subset(self, iterable, num_elems):\n\n lst = list(iterable)\n assert num_elems <= len(lst)\n\n out = []\n\n while len(out) < num_elems:\n elem = self._rand_elem(lst)\n lst.remove(elem)\n out.append(elem)\n\n return out",
"def random_combination(iterable, r):\n pool = tuple(iterable)\n n = len(pool)\n indices = sorted(random.sample(xrange(n), r))\n return tuple(pool[i] for i in indices)"
] | [
"0.7027959",
"0.6738937",
"0.67178",
"0.6524128",
"0.649793",
"0.6422931",
"0.6414868",
"0.63981503",
"0.6355274",
"0.63087744",
"0.6304614",
"0.61815244",
"0.6177249",
"0.6150533",
"0.6144496",
"0.61237365",
"0.610168",
"0.60970324",
"0.60753673",
"0.6051624",
"0.6015738",
"0.60022324",
"0.6001233",
"0.60007954",
"0.5999606",
"0.5996472",
"0.59947073",
"0.5978212",
"0.59558475",
"0.59547955"
] | 0.78772885 | 0 |
If a CNAME RR is present at a node, no other data should be present; this ensures that the data for a canonical name and its aliases cannot be different." | def check_for_cname(record):
CNAME = cydns.cname.models.CNAME
if hasattr(record, 'label'):
if CNAME.objects.filter(domain=record.domain,
label=record.label).exists():
raise ValidationError("A CNAME with this name already exists.")
else:
if CNAME.objects.filter(label='', domain=record.domain).exists():
raise ValidationError("A CNAME with this name already exists.") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_cname_response(self):\n fqdn = \"cname.github.com\"\n answer = self.resolver.query(fqdn, \"CNAME\")\n for rr in answer:\n if rr.target.to_text() != \"github.map.fastly.net.\":\n raise TestException(\"Unexpected target for {0}: {1}\"\n .format(fqdn, rr.target))\n log.debug(\"[%-15s]: CNAME query for %s succeeded\",\n self.resolver.nameservers[0],\n fqdn)",
"def test_followCanonicalName(self):\n servers = {\n ('1.1.2.3', 53): {\n ('example.com', A): {\n 'answers': [('example.com', Record_CNAME('example.net'))],\n },\n ('example.net', A): {\n 'answers': [('example.net', Record_A('10.0.0.5'))],\n },\n },\n }\n resolver = self._getResolver(servers)\n d = resolver.lookupAddress('example.com')\n d.addCallback(lambda results: results[0]) # Get the answer section\n d.addCallback(\n self.assertEqual,\n [RRHeader('example.com', CNAME, payload=Record_CNAME('example.net')),\n RRHeader('example.net', A, payload=Record_A('10.0.0.5'))])\n return d",
"def test_detectCanonicalNameLoop(self):\n servers = {\n ('1.1.2.3', 53): {\n ('example.com', A): {\n 'answers': [('example.com', Record_CNAME('example.net')),\n ('example.net', Record_CNAME('example.com'))],\n },\n },\n }\n resolver = self._getResolver(servers)\n d = resolver.lookupAddress('example.com')\n return self.assertFailure(d, ResolverError)",
"def test_returnCanonicalName(self):\n servers = {\n ('1.1.2.3', 53): {\n (b'example.com', A): {\n 'answers': [(b'example.com', Record_CNAME(b'example.net')),\n (b'example.net', Record_A('10.0.0.7'))],\n },\n },\n }\n resolver = self._getResolver(servers)\n d = resolver.lookupAddress(b'example.com')\n d.addCallback(lambda results: results[0]) # Get the answer section\n d.addCallback(\n self.assertEqual,\n [RRHeader(b'example.com', CNAME, payload=Record_CNAME(b'example.net')),\n RRHeader(b'example.net', A, payload=Record_A('10.0.0.7'))])\n return d",
"def add_cname(self, alias_name, name):\n rdataa = dns.rdata.from_text(dns.rdataclass.IN,dns.rdatatype.CNAME,str(name))\n rdataseta = dns.rdataset.from_rdata(300,rdataa)\n self.update.add(alias_name,rdataseta)\n return dns.query.tcp(self.update,self.server_address)",
"def check_origin(self):\n if self.relativize:\n name = dns.name.empty\n else:\n name = self.origin\n if self.get_rdataset(name, dns.rdatatype.SOA) is None:\n raise NoSOA\n if self.get_rdataset(name, dns.rdatatype.NS) is None:\n raise NoNS",
"def test_missingName(self):\n servers = {\n ('1.1.2.3', 53): {\n (b'foo.example.com', A): {\n 'rCode': ENAME,\n },\n },\n }\n resolver = self._getResolver(servers)\n d = resolver.lookupAddress(b'foo.example.com')\n return self.assertFailure(d, DNSNameError)",
"def test_delegationLookupError(self):\n servers = {\n ('1.1.2.3', 53): {\n ('example.com', A): {\n 'authority': [('example.com', Record_NS('ns1.example.com'))],\n },\n ('ns1.example.com', A): {\n 'rCode': ENAME,\n },\n },\n }\n resolver = self._getResolver(servers)\n d = resolver.lookupAddress('example.com')\n return self.assertFailure(d, DNSNameError)",
"def test_delegationLookupEmpty(self):\n servers = {\n ('1.1.2.3', 53): {\n ('example.com', A): {\n 'authority': [('example.com', Record_NS('ns1.example.com'))],\n },\n ('ns1.example.com', A): {\n },\n },\n }\n resolver = self._getResolver(servers)\n d = resolver.lookupAddress('example.com')\n return self.assertFailure(d, ResolverError)",
"def set_dns_cname ( route53_conn, dns_name, cname_value ) :\n r53 = boto.route53.record.ResourceRecordSets( route53_conn, route_53_hosted_zoneid )\n monitor_dns = r53.add_change( 'UPSERT', dns_name, 'CNAME', ttl=60 )\n monitor_dns.add_value( cname_value )\n r53.commit( )",
"def is_valid_cname(common_name: str) -> bool:\n return True if Band.band_range(common_name) else False",
"def get_cname(self, host):\n cname = None\n\n if self.is_local(host):\n # Don't perform DNS lookup for localhost.\n cname = host\n else:\n self.log(\"Resolving host: \" + host)\n\n try:\n ans = self._resolver.query(host, 'CNAME')\n\n if len(ans.rrset.items) == 1:\n # Remove last (blank) field from host name.\n labels = ans[0].target.labels[0:-1]\n labels = map(lambda s: str(s, 'utf-8'), labels)\n cname = '.'.join(labels)\n\n except dns.resolver.NoAnswer as e:\n self.log(\"No answer\")\n except dns.resolver.NXDOMAIN as e:\n pass\n except dns.exception.DNSException as e:\n self.log(\"Exception: \" + str(type(e)))\n\n return cname",
"def test_nslookup_resolution(self):\n assert _get_ns_records_domains_for_domain(\"google.com\")",
"def _VerifyHostName(self, hostname, certificate):\n if 'subjectAltName' in certificate:\n names = [name for (name_type, name) in certificate['subjectAltName']\n if name_type.lower() == 'dns']\n else:\n names = [value for ((key, value),) in certificate['subject']\n if key.lower() == 'commonname']\n\n for name in names:\n if re.match(name.replace('.', '\\.').replace('*', '[^.]*'), hostname,\n re.I) is not None:\n return\n\n raise Error('Host name \"' + self.host + '\" does not match any name listed '\n 'in its SSL certificate!')",
"def reverse_dns_sna(ipaddress):\n\n r = requests.get(\"http://api.statdns.com/x/%s\" % ipaddress)\n\n if r.status_code == 200:\n names = []\n\n for item in r.json()['answer']:\n name = str(item['rdata']).strip(\".\")\n names.append(name)\n\n return names\n elif r.json()['code'] == 503:\n # NXDOMAIN - no PTR record\n return None",
"def test_assigning_when_same_country(self):\n def _asset_from_country(iso3_country):\n asset = BOAssetFactory()\n hostname = asset.hostname.replace('XXX', iso3_country)\n asset.hostname = hostname\n asset.save()\n return asset\n asset = _asset_from_country(self.owner_country_name)\n old_hostname = asset.hostname\n asset._try_assign_hostname(True)\n self.assertEqual(asset.hostname, old_hostname)",
"def node_dns(self, obj):\n if obj.node.node_dns:\n return obj.node.node_dns\n return None",
"def update_dns(self):\n if self.ptr:\n which_zone = None\n zones = dns.models.Zone.objects.all()\n for zone in zones:\n if self.ptr.endswith(zone.name) or self.ptr.endswith(zone.name + '.'):\n which_zone = zone\n break\n\n if which_zone:\n zone_name = which_zone.name\n record_name = self.ptr[:-len(zone_name)] if not self.ptr.endswith('.') else self.ptr[:-len(zone_name) - 1]\n if record_name.endswith('.'):\n record_name = record_name[:-1]\n record_type = 'A' if self.family == 4 else 'AAAA'\n\n dns.models.Record.objects.get_or_create(\n name=record_name,\n record_type=record_type,\n zone=which_zone,\n address=self\n )",
"def test_domain_unchanged_new_sitename_empty_domain_matches(self):\n from django.contrib.sites.models import Site\n\n site_1 = Site.objects.create(name='site1.com', domain='site1.com')\n site_2 = Site.objects.create(name='site2.com', domain='site2.com')\n\n with self.settings(ALDRYN_SITES_DOMAINS={\n site_1.pk: {'name': '', 'domain': 'site1.com'},\n site_2.pk: {'name': None, 'domain': 'site2.com'},\n }):\n utils.set_site_names(force=True)\n\n s = Site.objects.get(id=site_1.pk)\n self.assertEquals(s.name, 'site1.com')\n self.assertEquals(s.domain, 'site1.com')\n\n s = Site.objects.get(id=site_2.pk)\n self.assertEquals(s.name, 'site2.com')\n self.assertEquals(s.domain, 'site2.com')",
"def test_verify_hostname(self):\n verify_certificate_hostname(X509_DNS_ONLY, u\"twistedmatrix.com\")",
"def verifyHostname(self, connection, x509, errnum, depth, okay):\n commonName = x509.get_subject().commonName\n logging.debug(\"Received cert at level %d: '%s'\" % (depth, commonName))\n\n # We only want to verify that the hostname matches for the level 0\n # certificate:\n if okay and (depth == 0):\n cn = commonName.replace('*', '.*')\n hostnamesMatch = re.search(cn, self.hostname)\n if not hostnamesMatch:\n logging.warn(\"Invalid certificate subject CN for '%s': '%s'\"\n % (self.hostname, commonName))\n return False\n logging.debug(\"Valid certificate subject CN for '%s': '%s'\"\n % (self.hostname, commonName))\n return True",
"def validateNCName(value, space):\n ret = libxml2mod.xmlValidateNCName(value, space)\n return ret",
"def test_domain_changes_new_sitename_empty_domain_matches(self):\n from django.contrib.sites.models import Site\n\n site_1 = Site.objects.create(name='site1.com', domain='site1.com')\n site_2 = Site.objects.create(name='site2.com', domain='site2.com')\n\n with self.settings(ALDRYN_SITES_DOMAINS={\n site_1.pk: {'name': '', 'domain': 'other-site1.com'},\n site_2.pk: {'name': None, 'domain': 'other-site2.com'},\n }):\n utils.set_site_names(force=True)\n\n s = Site.objects.get(id=site_1.pk)\n self.assertEquals(s.name, 'other-site1.com')\n self.assertEquals(s.domain, 'other-site1.com')\n\n s = Site.objects.get(id=site_2.pk)\n self.assertEquals(s.name, 'other-site2.com')\n self.assertEquals(s.domain, 'other-site2.com')",
"def test_record_fqdn(self):\n zone = Zone('test.example.com')\n record = Record(zone, 'test-record', {'type': 'A', 'ttl': 300})\n self.assertEqual(record.fqdn, 'test-record.test.example.com')",
"def test_missingGlue(self):\n servers = {\n ('1.1.2.3', 53): {\n (b'foo.example.com', A): {\n 'authority': [(b'foo.example.com', Record_NS(b'ns1.example.org'))],\n # Conspicuous lack of an additional section naming ns1.example.com\n },\n (b'ns1.example.org', A): {\n 'answers': [(b'ns1.example.org', Record_A('10.0.0.1'))],\n },\n },\n ('10.0.0.1', 53): {\n (b'foo.example.com', A): {\n 'answers': [(b'foo.example.com', Record_A('10.0.0.2'))],\n },\n },\n }\n resolver = self._getResolver(servers)\n d = resolver.lookupAddress(b'foo.example.com')\n d.addCallback(getOneAddress)\n d.addCallback(self.assertEqual, '10.0.0.2')\n return d",
"def test_returns_proper_fqdn(self):\n self.mock_getaddrinfo.return_value = (\n [0, 1, 2, 3, ['take_me', 'not_me']],)\n self.mock_gethostbyaddr.return_value = ['proper_FQDN']\n return_value = sanitize_host('erroneous_host')\n self.mock_getaddrinfo.assert_called_once_with(\n 'erroneous_host', 443, socket.AF_INET, socket.SOCK_STREAM)\n self.mock_gethostbyaddr.assert_called_once_with('take_me')\n self.assertEqual(return_value, 'proper_FQDN')",
"def test_updatednsrecord(kasserver, kasapi):\n kasserver.add_dns_record(\"test.example.com\", \"CNAME\", \"www.example2.com\")\n assert kasapi.requests_contains(\"update_dns_settings\")",
"def test_inconsistent_name(self):\n entries = {'uid=test,ou=people,dc=esmgquadrivium,dc=nl': {\n 'uid': ['test'],\n 'givenName': ['Maarten'],\n 'sn': ['Visscher'],\n 'cn': ['Wessel']}\n }\n with self.assertRaises(CloneError):\n clone(entries)",
"def test_cn_ids_are_used_as_fallback(self):\n with pytest.warns(SubjectAltNameWarning):\n rv = extract_ids(X509_CN_ONLY)\n assert [\n DNSPattern(b\"www.microsoft.com\")\n ] == rv",
"def lookupNameservers(self, name, timeout=None):\n name_is_self = name in [self.wildcard_domain, self.ns_domain]\n if name.endswith('.' + self.wildcard_domain) or name_is_self:\n # If we're responsible for this domain, return NS records\n payload = dns.Record_NS(name=self.ns_domain)\n answer = dns.RRHeader(name=name, type=dns.NS,\n payload=payload, auth=True, ttl=TTL)\n\n # Additional section: NS ip address\n additional_payload = dns.Record_A(address=self.my_ip)\n additional_answer = dns.RRHeader(name=name,\n payload=additional_payload, ttl=TTL)\n\n answers = [answer]\n authority = []\n additional = [additional_answer]\n\n return defer.succeed((answers, authority, additional))\n\n # fail for domains that are not handled by our server\n return defer.fail(failure.Failure(dns.AuthoritativeDomainError(name)))"
] | [
"0.66299236",
"0.660377",
"0.6489522",
"0.6237036",
"0.5940322",
"0.5899176",
"0.5880195",
"0.5746405",
"0.56934726",
"0.5651654",
"0.54630584",
"0.5213084",
"0.5147239",
"0.5126222",
"0.507616",
"0.507153",
"0.50488186",
"0.50211823",
"0.49776897",
"0.4961214",
"0.49337912",
"0.49281493",
"0.4925947",
"0.49083367",
"0.49021533",
"0.48789802",
"0.48636916",
"0.4856376",
"0.48533544",
"0.4836904"
] | 0.6656125 | 0 |
If an object's domain is delegated it should not be able to be changed. Delegated domains cannot have objects created in them. | def check_for_delegation(record):
try:
if not record.domain.delegated:
return
except ObjectDoesNotExist:
return
if not record.pk: # We don't exist yet.
raise ValidationError("No objects can be created in the {0}"
"domain. It is delegated."
.format(record.domain.name)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _adddomain(self, domain: Domain):\n\n domain = copy.deepcopy(domain)\n if self.model is not None:\n # Check that model and domain are compatible\n self._validate_model_domain(self.model, domain)\n\n # Add in domain\n self.domain = domain\n\n # Setup base namelists\n self._set_base_namelists()\n else:\n self.domain = domain",
"def domain(self, domain):",
"def test_change_domain_type_assignment_rule(self):\n pass",
"def test_is_delegate_inherited_and_local(self):\n self.make_assignment(self.category, self.user_bob, self.role_delegate)\n self.make_assignment(self.project, self.user_bob, self.role_delegate)\n self.assertTrue(self.project.is_delegate(self.user_bob))",
"def test_validate_delegate_inherited(self):\n self.make_assignment(self.category, self.user_bob, self.role_delegate)\n # Limit should not be reached\n delegate_as = self.make_assignment(\n self.project, self.user_carol, self.role_delegate\n )\n self.assertIsInstance(delegate_as, RoleAssignment)",
"def test_update_domain_only(self):\n self.test_update()",
"def __has_no_dependents (self, obj, constraints):\n failed = False\n while constraints and not failed:\n c = constraints [0]\n\n if c [1] == obj:\n failed = True\n\n constraints = constraints [1:]\n\n return not failed",
"def __init__(self):\n warn_bounded_domain(self)",
"def is_still_owner(self):\n raise tooz.NotImplemented",
"def proxy_check(self, proxy):",
"def __ne__(self, obj):\r\n return assert_(self.obj != obj, '%r == %r' % (self.obj, obj))",
"def test_validate_delegate_no_limit(self):\n self.make_assignment(self.project, self.user_bob, self.role_delegate)\n try:\n self.make_assignment(\n self.project, self.user_carol, self.role_delegate\n )\n except ValidationError as e:\n self.fail(e)",
"def __ne__(self, other: 'Origin') -> bool:\n return not self == other",
"def __ne__(self, other):\n if not isinstance(other, DomainCdnConfig):\n return True\n\n return self.to_dict() != other.to_dict()",
"def proxy_other(self):\n return None",
"def __nonzero__(self):\n raise TypeError(\"PObject is a distributed object, bool(PObject) is not allowed.\")",
"def __ne__(self, obj):\n return not self.__eq__(obj)",
"def __ne__(self, other: 'GatewayChangeRequestGatewayClientGatewayCreate') -> bool:\n return not self == other",
"def _discretize(self, constraints_object):\n pass",
"def exclude_domain(self) -> None:\n self.exclude_domains.add(current_domain.get())",
"def __ne__(self, other):\n if not isinstance(other, RuntimeAntiMalwareRule):\n return True\n\n return self.to_dict() != other.to_dict()",
"def allow_relation(self, obj1, obj2, **hints):\n return True",
"def __ne__(self, other: object) -> bool:\n if self.__eq__(other):\n return False\n return True",
"def test_add_domain_type_assignment_rule(self):\n pass",
"def __ne__(self, other):\n return not isinstance(other, self.__class__)",
"def allow_relation(self, obj1, obj2, **hints):\n return None",
"def __ne__(self, other):\n if not isinstance(other, IamDomainGroupAllOf):\n return True\n\n return self.to_dict() != other.to_dict()",
"def __ne__(self, other: 'Gateway') -> bool:\n return not self == other",
"def __ne__(self, other: 'CrossConnectRouter') -> bool:\n return not self == other",
"def make_eligible(self):\n pass"
] | [
"0.58941036",
"0.5639527",
"0.5625555",
"0.5514755",
"0.54754555",
"0.54411316",
"0.53744304",
"0.5370402",
"0.5310718",
"0.5309314",
"0.52955467",
"0.5281646",
"0.5263368",
"0.5252164",
"0.5234074",
"0.5162429",
"0.51530653",
"0.5148863",
"0.51440006",
"0.5141241",
"0.5120889",
"0.51153785",
"0.5113904",
"0.5113356",
"0.5099705",
"0.50671655",
"0.5067012",
"0.50645155",
"0.50586104",
"0.5051228"
] | 0.6326696 | 0 |
Return the taxicab distance between two intersections. >>> times_square = intersection(46, 7) >>> ess_a_bagel = intersection(51, 3) >>> taxicab(times_square, ess_a_bagel) 9 >>> taxicab(ess_a_bagel, times_square) 9 | def taxicab(a, b):
"*** YOUR CODE HERE ***"
return abs(street(a)-street(b)) + abs(avenue(a)-avenue(b)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def taxicab(a, b):\n street_1, street_2 = street(a), street(b)\n avenue_1, avenue_2 = avenue(a), avenue(b)\n return abs(street_1 - street_2) + abs(avenue_1 - avenue_2)",
"def taxicab(a, b):\n \"*** YOUR CODE HERE ***\"\n return abs(street(a) - street(b)) + abs(avenue(a) - avenue(b))",
"def taxicab(a, b):\n \"*** YOUR CODE HERE ***\"\n return abs(street(a) - street(b)) + abs(avenue(a) - avenue(b))",
"def taxicab(a, b):\n \"*** YOUR CODE HERE ***\"\n return abs(street(a)-street(b))+abs(avenue(a)-avenue(b))",
"def taxicab(a, b):\n \"*** YOUR CODE HERE ***\"\n return abs (street(a)-street(b))+(abs(avenue(a)-avenue(b)))",
"def karatsuba(a, b):\n if len(str(a)) == 1 or len(str(b)) == 1:\n return (a * b)\n else:\n m1 = max(len(str(a)), len(str(b)))\n m2 = m1 // 2\n\n a1, a2 = divmod(a, 10**m2)\n b1, b2 = divmod(b, 10**m2)\n\n x = karatsuba(a2, b2)\n y = karatsuba((a1 + a2), (b1 + b2))\n z = karatsuba(a1, b1)\n\n return ((z * 10**(2 * m2)) + ((y - z - x) * 10**(m2)) + (x))",
"def manhattan(a, b):\n return abs(a[0] - b[0]) + abs(a[1] - b[1])",
"def akendalltau(x,y):\r\n n1 = 0\r\n n2 = 0\r\n iss = 0\r\n for j in range(len(x)-1):\r\n for k in range(j,len(y)):\r\n a1 = x[j] - x[k]\r\n a2 = y[j] - y[k]\r\n aa = a1 * a2\r\n if (aa): # neither array has a tie\r\n n1 = n1 + 1\r\n n2 = n2 + 1\r\n if aa > 0:\r\n iss = iss + 1\r\n else:\r\n iss = iss -1\r\n else:\r\n if (a1):\r\n n1 = n1 + 1\r\n else:\r\n n2 = n2 + 1\r\n tau = iss / math.sqrt(n1*n2)\r\n svar = (4.0*len(x)+10.0) / (9.0*len(x)*(len(x)-1))\r\n z = tau / math.sqrt(svar)\r\n prob = erfcc(abs(z)/1.4142136)\r\n return tau, prob",
"def T_int(A, B, Rab2):\n return A * B / (A + B) * (3.0 - 2.0 * A * B * Rab2 / (A + B)) * (np.pi / (A + B)) ** 1.5 * np.exp(\n -A * B * Rab2 / (A + B))",
"def _histogram_intersection_distance(a, b):\n # branching version\n #return np.vstack((a, b)).min(axis=0).sum()\n\n # Non-branching version\n # noinspection PyUnresolvedReferences\n return (a + b - np.abs(a - b)).sum() * 0.5",
"def karatsuba(num1, num2):\n\n if num1 < 10 or num2 < 10:\n return num1 * num2\n\n power = max(len(str(num1)), len(str(num2)))\n\n power = int(power/2)\n\n # Numbers are split in the form,\n # `num1_1 * 10^power + num1_2` and\n # `num2_1 * 10^power + num2_2`\n\n num1_1 = int((num1) / (math.pow(10, power)))\n num2_1 = int((num2) / (math.pow(10, power)))\n num1_2 = int((num1) % (math.pow(10, power)))\n num2_2 = int((num2) % (math.pow(10, power)))\n\n # calculates the product of\n # the two split terms of the original numbers and\n # the product of hte sum of the first terms and second terms of the numbers.\n num1_12 = karatsuba(num1_1, num2_1)\n num2_12 = karatsuba(num2_2, num1_2)\n num12 = karatsuba(num1_1 + num1_2, num2_1 + num2_2)\n\n return num1_12 * pow(10, power * 2) + num2_12 + (num12 - num1_12 - num2_12) * pow(10, power)",
"def distance(self, first_tape, second_tape):\n pairs = zip(first_tape, second_tape)\n return math.sqrt(abs(sum(map((lambda n: self.subsq(*n)), pairs))))",
"def make_abba(a, b):\n return a + b * 2 + a",
"def get_minus_ab(a, b): # IN= 2'int' / OUT= 1'foat'\n return float(a-b)",
"def triarea(a, b, c):\n return 0.5 * edist(a, b) * pdist(c, a, b)",
"def overlap_coefficient(a, b):\n\ta_words, b_words = map(norm.set_clean_tokens, [a,b])\n\n\tintersection = set.intersection(a_words, b_words)\n\tlength_a_words, length_b_words = len(a_words), len(b_words)\n\n\t#empty sets\n\tif length_a_words == 0 or length_b_words == 0: return 0\n\n\t#try to compute overlap_coefficient\n\ttry: overlap_coefficient = len(intersection)/min(length_a_words,length_b_words)\n\texcept: overlap_coefficient = 0\n\n\treturn overlap_coefficient",
"def intersection(a, b):\n x = max(a[0],b[0])\n y = max(a[1],b[1])\n w = min(a[2],b[2]) - x\n h = min(a[3],b[3]) - y\n \n if h<0 or w<0 :\n return 0\n \n return h*w",
"def tau_calculation(self, det, gt):\n return np.round((self.area_of_intersection(det, gt) / self.area(det)), 2)",
"def overlap_area(a, b):\n return min(a[2] - b[0], b[2] - a[0]) * min(a[3] - b[1], b[3] - a[1])",
"def manhattan_dist(c1, c2):\n return abs(c1[0] - c2[0]) + abs(c1[1] - c2[1]) + abs(c1[2] - c2[2])",
"def trapezoid_area(first_base, second_base, distance):\n area = float(distance) * (float(first_base) + float(second_base))\n area = area / 2\n\n return area",
"def calculate_manhattan(node_a, node_b):\n return (abs(node_a.x - node_b.x) + abs(node_a.y - node_b.y))",
"def karatsuba(x, y):\n print(x, y)\n if len(str(x)) == 1 or len(str(y)) == 1:\n return x * y\n else:\n n = max(len(str(x)), len(str(y)))\n nby2 = int(n / 2)\n\n a = int(x / 10 ** (nby2))\n b = int(x % 10 ** (nby2))\n c = int(y / 10 ** (nby2))\n d = int(y % 10 ** (nby2))\n\n ac = karatsuba(a, c)\n bd = karatsuba(b, d)\n ad_plus_bc = karatsuba(a + b, c + d) - ac - bd\n\n # this little trick, writing n as 2*nby2 takes care of both even and odd n\n prod = ac * 10 ** (2 * nby2) + (ad_plus_bc * 10 ** nby2) + bd\n\n return prod",
"def manhattan(x,y):\n\tassert (isinstance(x, BayesNet) and isinstance(y, BayesNet)), 'Must pass in BayesNet objects.'\n\tassert (x==y), 'Passed-in BayesNet objects are not structurally equal.'\n\n\tdistance = np.sum( np.abs( x.flat_cpt() - y.flat_cpt() ) )\n\treturn distance",
"def intersectsAB(self, ray):\n v1 = ray.origin - self.pointA\n v2 = self.pointB - self.pointA\n v3 = Point(-ray.direction.y, ray.direction.x)\n dot = v2.dot(v3)\n if (abs(dot) < 0.000001):\n return None\n t1 = v2.cross(v1) / dot\n t2 = v1.dot(v3) / dot\n if (t1 >= 0.0 and (t2 >= 0.0 and t2 <= 1.0)):\n return t1\n return None",
"def test_get_taxon_abundance(self):\n taxon_abundance = TaxonAbundanceResult(**{\n 'by_tool': {\n 'kraken': flow_model(),\n 'metaphlan2': flow_model()\n }\n })\n self.generic_getter_test(taxon_abundance, MODULE_NAME,\n verify_fields=('by_tool',))",
"def calcAsymptote(A, B, C):\n h = -B/A/2\n a2 = C - A * h*h\n b2 = a2/A\n m0 = math.sqrt(a2/b2)\n b0 = -m0 * h\n return m0, b0, h",
"def raySegmentIntersectAB(self, ray):\n v1 = ray.origin - self.pointA\n v2 = self.pointB - self.pointA\n v3 = Point(-ray.direction.y, ray.direction.x)\n dot = v2.dot(v3)\n if (abs(dot) < 0.000001):\n return None\n t1 = v2.cross(v1) / dot\n t2 = v1.dot(v3) / dot\n if (t1 >= 0.0 and (t2 >= 0.0 and t2 <= 1.0)):\n return t1\n return None",
"def intersect_trees(tree1, tree2):\n for tree in (tree1, tree2):\n if has_duplicates(tree):\n raise ValueError('Either tree has duplicated taxa.')\n taxa1 = set([tip.name for tip in tree1.tips()])\n taxa2 = set([tip.name for tip in tree2.tips()])\n taxa_lap = taxa1.intersection(taxa2)\n if len(taxa_lap) == 0:\n raise ValueError('Trees have no overlapping taxa.')\n tree1_lap = tree1.shear(taxa_lap)\n tree2_lap = tree2.shear(taxa_lap)\n return (tree1_lap, tree2_lap)",
"def hypotenuse(a, b):\r\n return (a**2 + b**2)**0.5"
] | [
"0.7120618",
"0.67128575",
"0.67128575",
"0.65796095",
"0.65704274",
"0.5223073",
"0.50749713",
"0.5042515",
"0.5000128",
"0.48629433",
"0.48522303",
"0.4831717",
"0.47960907",
"0.4795959",
"0.478697",
"0.4773556",
"0.4723682",
"0.4699743",
"0.46614695",
"0.46481127",
"0.46472895",
"0.46434182",
"0.46233553",
"0.45847306",
"0.45681974",
"0.45653227",
"0.45601678",
"0.45499885",
"0.4535196",
"0.45334193"
] | 0.68399566 | 1 |
Return the value of G(n), computed recursively. >>> g(1) 1 >>> g(2) 2 >>> g(3) 3 >>> g(4) 10 >>> g(5) 22 >>> from construct_check import check >>> check(HW_SOURCE_FILE, 'g', ['While', 'For']) True | def g(n):
"*** YOUR CODE HERE ***"
if n <= 3:
return n
else:
return g(n-1) + 2*g(n-2) + 3*g(n-3) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def g(n):\n \"*** YOUR CODE HERE ***\"\n if n < 4:\n return n\n else:\n return g(n-1) + 2*g(n-2) + 3*g(n-3)",
"def g(n):\n \"*** YOUR CODE HERE ***\"\n if n <=3:\n return n\n else:\n return g(n-1)+2*g(n-2)+3*g(n-3)",
"def g(n):\n \"*** YOUR CODE HERE ***\"\n if n <= 3:\n return n\n else:\n return g(n - 1) + 2 * g(n - 2) + 3 * g(n - 3)",
"def g(n):\n \"*** YOUR CODE HERE ***\"\n if n <= 3:\n return n\n else:\n return g(n - 1) + 2 * g(n - 2) + 3 * g(n - 3)",
"def g(n):\n \"*** YOUR CODE HERE ***\"\n store = {}\n if n in store:\n return store[n]\n elif n <= 3:\n store[n] = n\n return n\n else:\n store[n] = g(n-1) + 2 * g(n-2) + 3 * g(n-3)\n return store[n]",
"def g_iter(n):\n \"*** YOUR CODE HERE ***\"\n if n < 4:\n return n\n else:\n g1 = 1\n g2 = 2\n g3 = 3\n i = 3\n while(i < n):\n i += 1\n t = g3 + 2*g2 + 3*g1\n g1 = g2\n g2 = g3\n g3 = t\n return g3",
"def g_iter(n):\n\tif n <= 3:\n\t\treturn n\n\telse:\n\t\tg_n_1, g_n_2, g_n_3 = 3, 2, 1\n\t\t# always update the g_i until reach the final n\n\t\tfor i in range(4,n+1):\n\t\t\tg_i = g_n_1 + 2*g_n_2 + 3*g_n_3\n# \t\t\tupdate the g(n-1), g(n-2), g(n-3)\n\t\t\tg_n_1, g_n_2, g_n_3 = g_i, g_n_1, g_n_2\n\t\treturn g_i\n\t\"*** YOUR CODE HERE ***\"",
"def _g(X, g, n):\n if n == 3:\n n = 3.001 # for numerical stability\n xi = 1 + X**2\n hyp2f1_term = hyp2f1((n - 3) / 2, g / 2, n / 2, 1 / xi)\n beta_term_1 = beta((n - 3) / 2, (3-g)/2)\n beta_term_2 = beta((n-3)/2, 1.5)\n return 0.5 * (beta_term_1 - beta_term_2 * hyp2f1_term * xi ** ((3-n)/2))",
"def g_iter(n):\n \"*** YOUR CODE HERE ***\"\n g1, g2, g3, cur, ind = 1, 2, 3, 0, 3\n if n < 3:\n return n\n else:\n while ind < n:\n cur = g3 + 2 * g2 + 3 * g1\n ind += 1\n g1, g2, g3 = g2, g3, cur\n return g3",
"def gcd(f, g):\n lev, dom, per, F, G = f.unify(g)\n return per(dmp_gcd(F, G, lev, dom))",
"def generation(x, g):\n return int(x/g)",
"def G(k):\n return k^(k>>1)",
"def evaluate(self, g):\n pass",
"def evaluate(self, g):\n raise NotImplementedError",
"def zzx_heu_gcd(f, g, **flags):\n def interpolate(h, x):\n f = []\n\n while h:\n g = h % x\n\n if g > x // 2:\n g -= x\n\n f.insert(0, g)\n h = (h-g) // x\n\n return f\n\n def finalize(h, cff, cfg, gcd):\n h = zzx_mul_const(h, gcd)\n return h, cff, cfg\n\n if not (f or g):\n return [], [], []\n elif not f:\n return g, [], [1]\n elif not g:\n return f, [1], []\n\n df = zzx_degree(f)\n dg = zzx_degree(g)\n\n cf = zzx_content(f)\n cg = zzx_content(g)\n\n gcd = igcd(cf, cg)\n\n f = [ c // gcd for c in f ]\n g = [ c // gcd for c in g ]\n\n if df == 0 or dg == 0:\n return [gcd], f, g\n\n f_norm = zzx_max_norm(f)\n g_norm = zzx_max_norm(g)\n\n B = 2*min(f_norm, g_norm) + 29\n\n x = max(min(B, 99*INT_TYPE(isqrt(B))),\n 2*min(f_norm // abs(poly_LC(f)),\n g_norm // abs(poly_LC(g))) + 2)\n\n for i in xrange(0, 6):\n ff = zzx_eval(f, x)\n gg = zzx_eval(g, x)\n\n if ff and gg:\n h = igcd(ff, gg)\n\n cff = ff // h\n cfg = gg // h\n\n h = interpolate(h, x)\n h = zzx_primitive(h)[1]\n\n cff_, r = zzx_div(f, h)\n\n if not r:\n cfg_, r = zzx_div(g, h)\n\n if not r:\n return finalize(h, cff_, cfg_, gcd)\n\n cff = interpolate(cff, x)\n\n h, r = zzx_div(f, cff)\n\n if not r:\n cfg_, r = zzx_div(g, h)\n\n if not r:\n return finalize(h, cff, cfg_, gcd)\n\n cfg = interpolate(cfg, x)\n\n h, r = zzx_div(g, cfg)\n\n if not r:\n cff_, r = zzx_div(f, h)\n\n if not r:\n return finalize(h, cff_, cfg, gcd)\n\n x = INT_TYPE(2.7319*x*isqrt(isqrt(x)))\n\n raise HeuristicGCDFailed('no luck')",
"def solution(m,f):\r\n\r\n m = int(m)\r\n f = int(f)\r\n\r\n # number of accumulated generations\r\n n = 0 \r\n\r\n ## A tail-recursive implementation with constant memory usage\r\n ## is easily possible, but unfortunately Python cannot handle/optimize\r\n ## tail recursion, and is still limited by maximum recursion depth.\r\n ## We'll thus implement tail-recursion implicitly via a loop:\r\n while True:\r\n # The entire generation poset is symmetric,\r\n # so we can reduce the problem to m >= f\r\n # without affecting the output in any way\r\n if m < f:\r\n m, f = f, m\r\n\r\n ## Base cases: \r\n # Base case 1: Negative or zero inputs? --> impossible\r\n if m<1 or f<1:\r\n return \"impossible\"\r\n\r\n # Base case 2: (m, 1)\r\n # It takes (m-1) generations to generate the tuple (m, 1), i.e.\r\n # when always choosing the transition (m,1) -> (m+1, 1) \r\n if f==1:\r\n return str(n + m-1)\r\n\r\n ## Recursive case: go down the tree\r\n # (m,f) could have been generated from (m-f, f) or (m, f-m)\r\n # (or their symmetries) but we know that m >= f\r\n # so f-m will be <1 and we can ignore that branch.\r\n\r\n # As long as m will remain greater than f after the update,\r\n # we already know that would end up in the recursive case again\r\n # in the next step, so we can directly take multiple steps at\r\n # once in order to avoid unnecessary iterations:\r\n steps = m // f\r\n\r\n n += steps\r\n m -= steps * f",
"def main(number):\n data = [int(input()) for _ in range(number)]\n frist = data[0]\n for i in range(1, len(data)):\n frist = gcd(frist, data[i])\n print(frist)",
"def n(G):\n return G._n",
"def g():",
"def test_ggn_implementation(problem):\n problem.set_up()\n\n diag_ggn_from_full = AutogradExtensions(problem).diag_ggn_via_ggn()\n diag_ggn_from_block = AutogradExtensions(problem).diag_ggn()\n\n check_sizes_and_values(diag_ggn_from_full, diag_ggn_from_block)\n problem.tear_down()",
"def evaluate(self, g):\n return NotImplementedError",
"def evaluate(self, g):\n return NotImplementedError",
"def fG(self):\n pass",
"def rvg(g: Generator):\n # noinspection PyUnreachableCode\n try:\n return next(g)\n except StopIteration as r:\n return r.value",
"def evaluate_g(self, x, out=None, **kwargs):\n return self._base_nlp.evaluate_g(x, out=out, **kwargs)",
"def g(self, p):\n re = self._re(p)\n Le = self._Le(p)\n wf = self._wf(p)\n rf = self._rf(p)\n A = Le @ np.einsum('...ij,...j', self.Ee, re)\n B = wf @ np.einsum('...ij,...j', self.Ff, rf)\n return (B - A) * G * self.d",
"def zzx_mod_gcd(f, g, **flags):\n if not (f or g):\n return [], [], []\n elif not f:\n return g, [], [1]\n elif not g:\n return f, [1], []\n\n n = zzx_degree(f)\n m = zzx_degree(g)\n\n cf = zzx_content(f)\n cg = zzx_content(g)\n\n gcd = igcd(cf, cg)\n\n f = [ c // gcd for c in f ]\n g = [ c // gcd for c in g ]\n\n if n == 0 or m == 0:\n return [gcd], f, g\n\n A = max(zzx_abs(f) + zzx_abs(g))\n b = igcd(poly_LC(f), poly_LC(g))\n\n B = int(ceil(2**n*A*b*int(sqrt(n + 1))))\n k = int(ceil(2*b*log((n + 1)**n*A**(2*n), 2)))\n l = int(ceil(log(2*B + 1, 2)))\n\n prime_max = max(int(ceil(2*k*log(k))), 51)\n\n while True:\n while True:\n primes = set([])\n unlucky = set([])\n\n ff, gg, hh = {}, {}, {}\n\n while len(primes) < l:\n p = randprime(3, prime_max+1)\n\n if (p in primes) or (b % p == 0):\n continue\n\n F = gf_from_int_poly(f, p)\n G = gf_from_int_poly(g, p)\n\n H = gf_gcd(F, G, p)\n\n primes.add(p)\n\n ff[p] = F\n gg[p] = G\n hh[p] = H\n\n e = min([ gf_degree(h) for h in hh.itervalues() ])\n\n for p in set(primes):\n if gf_degree(hh[p]) != e:\n primes.remove(p)\n unlucky.add(p)\n\n del ff[p]\n del gg[p]\n del hh[p]\n\n if len(primes) < l // 2:\n continue\n\n while len(primes) < l:\n p = randprime(3, prime_max+1)\n\n if (p in primes) or (p in unlucky) or (b % p == 0):\n continue\n\n F = gf_from_int_poly(f, p)\n G = gf_from_int_poly(g, p)\n\n H = gf_gcd(F, G, p)\n\n if gf_degree(H) != e:\n unlucky.add(p)\n else:\n primes.add(p)\n\n ff[p] = F\n gg[p] = G\n hh[p] = H\n\n break\n\n fff, ggg = {}, {}\n\n for p in primes:\n fff[p] = gf_quo(ff[p], hh[p], p)\n ggg[p] = gf_quo(gg[p], hh[p], p)\n\n F, G, H = [], [], []\n\n crt_mm, crt_e, crt_s = crt1(primes)\n\n for i in xrange(0, e + 1):\n C = [ b * poly_nth(hh[p], i) for p in primes ]\n c = crt2(primes, C, crt_mm, crt_e, crt_s, True)\n\n H.insert(0, c)\n\n H = zzx_strip(H)\n\n for i in xrange(0, zzx_degree(f) - e + 1):\n C = [ poly_nth(fff[p], i) for p in primes ]\n c = crt2(primes, C, crt_mm, crt_e, crt_s, True)\n\n F.insert(0, c)\n\n for i in xrange(0, zzx_degree(g) - e + 1):\n C = [ poly_nth(ggg[p], i) for p in primes ]\n c = crt2(primes, C, crt_mm, crt_e, crt_s, True)\n\n G.insert(0, c)\n\n H_norm = zzx_l1_norm(H)\n\n F_norm = zzx_l1_norm(F)\n G_norm = zzx_l1_norm(G)\n\n if H_norm*F_norm <= B and H_norm*G_norm <= B:\n break\n\n return zzx_mul_const(H, gcd), F, G",
"def _calc_g(self, lambdify=True):\n g = None\n g_func = None\n # check to see if we have our gravity term saved in file\n g, g_func = self._load_from_file('g', lambdify)\n\n if g is None and g_func is None:\n # if no saved file was loaded, generate function\n print('Generating gravity compensation function')\n\n # get the Jacobians for each link's COM\n J_links = [self._calc_J('link%s' % ii, x=self.x_zeros,\n lambdify=False)\n for ii in range(self.N_LINKS)]\n J_joints = [self._calc_J('joint%s' % ii, x=self.x_zeros,\n lambdify=False)\n for ii in range(self.N_JOINTS)]\n\n # sum together the effects of each arm segment's inertia\n g = sp.zeros(self.N_JOINTS, 1)\n for ii in range(self.N_LINKS):\n # transform each inertia matrix into joint space\n g += (J_links[ii].T * self._M_LINKS[ii] * self.gravity)\n # sum together the effects of each joint's inertia on each motor\n for ii in range(self.N_JOINTS):\n # transform each inertia matrix into joint space\n g += (J_joints[ii].T * self._M_JOINTS[ii] * self.gravity)\n g = sp.Matrix(g)\n\n # save to file\n abr_control.utils.os_utils.makedirs(\n '%s/g' % self.config_folder)\n cloudpickle.dump(g, open(\n '%s/g/g' % self.config_folder, 'wb'))\n\n if lambdify is False:\n # if should return expression not function\n return g\n\n if g_func is None:\n g_func = self._generate_and_save_function(\n filename='g', expression=g,\n parameters=self.q)\n return g_func",
"def zzX_gcd(f, g, **flags):\n return zzX_cofactors(f, g, **flags)[0]",
"def zzX_heu_gcd(f, g, **flags):\n if poly_univariate_p(f):\n return zzx_heu_gcd(f, g, **flags)\n\n def interpolate(h, x):\n f = []\n\n while not zzX_zero_p(h):\n g = zzX_zz_trunc(h, x)\n f.insert(0, g)\n h = zzX_sub(h, g)\n h = zzX_quo_const(h, x)\n\n return f\n\n def finalize(h, cff, cfg, gcd):\n if zzX_zz_LC(h) > 0:\n h = zzX_mul_const(h, gcd)\n else:\n h = zzX_mul_const(h, -gcd)\n cff = zzX_neg(cff)\n cfg = zzX_neg(cfg)\n\n return h, cff, cfg\n\n zero_f = zzX_zero_p(f)\n zero_g = zzX_zero_p(g)\n\n l = poly_level(f)\n z = zzX_zero(l)\n\n if zero_f and zero_g:\n return z, z, z\n elif zero_f:\n return g, z, zzX_const(l, 1)\n elif zero_g:\n return f, zzX_const(l, 1), z\n\n df = zzX_degree(f)\n dg = zzX_degree(g)\n\n cf = zzX_zz_content(f)\n cg = zzX_zz_content(g)\n\n gcd = igcd(cf, cg)\n\n f = zzX_quo_const(f, gcd)\n g = zzX_quo_const(g, gcd)\n\n f_norm = zzX_max_norm(f)\n g_norm = zzX_max_norm(g)\n\n B = 2*min(f_norm, g_norm) + 29\n\n x = max(min(B, 99*INT_TYPE(isqrt(B))),\n 2*min(f_norm // abs(zzX_zz_LC(f)),\n g_norm // abs(zzX_zz_LC(g))) + 2)\n\n for i in xrange(0, 6):\n ff = zzX_eval(f, x)\n gg = zzX_eval(g, x)\n\n if not (zzX_zero_p(ff) or zzX_zero_p(gg)):\n h, cff, cfg = zzX_heu_gcd(ff, gg, **flags)\n\n h = interpolate(h, x)\n h = zzX_zz_primitive(h)[1]\n\n cff_, r = zzX_div(f, h)\n\n if zzX_zero_p(r):\n cfg_, r = zzX_div(g, h)\n\n if zzX_zero_p(r):\n return finalize(h, cff_, cfg_, gcd)\n\n cff = interpolate(cff, x)\n\n h, r = zzX_div(f, cff)\n\n if zzX_zero_p(r):\n cfg_, r = zzX_div(g, h)\n\n if zzX_zero_p(r):\n return finalize(h, cff, cfg_, gcd)\n\n cfg = interpolate(cfg, x)\n\n h, r = zzX_div(g, cfg)\n\n if zzX_zero_p(r):\n cff_, r = zzX_div(f, h)\n\n if zzX_zero_p(r):\n return finalize(h, cff_, cfg, gcd)\n\n x = INT_TYPE(2.7319*x*isqrt(isqrt(x)))\n\n raise HeuristicGCDFailed('no luck')"
] | [
"0.6509589",
"0.6436936",
"0.63279766",
"0.63279766",
"0.615563",
"0.6023759",
"0.5937535",
"0.55640376",
"0.5536395",
"0.55231243",
"0.54222506",
"0.5410513",
"0.539917",
"0.53932846",
"0.5342397",
"0.5292581",
"0.5288211",
"0.52790266",
"0.5253362",
"0.5252296",
"0.51862425",
"0.51862425",
"0.5178257",
"0.5168518",
"0.5158104",
"0.51114196",
"0.5103306",
"0.5100043",
"0.5092412",
"0.50871426"
] | 0.65330666 | 0 |
Return the value of G(n), computed iteratively. >>> g_iter(1) 1 >>> g_iter(2) 2 >>> g_iter(3) 3 >>> g_iter(4) 10 >>> g_iter(5) 22 >>> from construct_check import check >>> check(HW_SOURCE_FILE, 'g_iter', ['Recursion']) True | def g_iter(n):
if n <= 3:
return n
else:
g_n_1, g_n_2, g_n_3 = 3, 2, 1
# always update the g_i until reach the final n
for i in range(4,n+1):
g_i = g_n_1 + 2*g_n_2 + 3*g_n_3
# update the g(n-1), g(n-2), g(n-3)
g_n_1, g_n_2, g_n_3 = g_i, g_n_1, g_n_2
return g_i
"*** YOUR CODE HERE ***" | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def g_iter(n):\n \"*** YOUR CODE HERE ***\"\n if n < 4:\n return n\n else:\n g1 = 1\n g2 = 2\n g3 = 3\n i = 3\n while(i < n):\n i += 1\n t = g3 + 2*g2 + 3*g1\n g1 = g2\n g2 = g3\n g3 = t\n return g3",
"def g_iter(n):\n \"*** YOUR CODE HERE ***\"\n g1, g2, g3, cur, ind = 1, 2, 3, 0, 3\n if n < 3:\n return n\n else:\n while ind < n:\n cur = g3 + 2 * g2 + 3 * g1\n ind += 1\n g1, g2, g3 = g2, g3, cur\n return g3",
"def g_iter(n):\n \"*** YOUR CODE HERE ***\"\n vals = [1, 2, 3]\n if n <= 3:\n return vals[n-1]\n for i in range(n - 3):\n new_val = 3 * vals[0] + 2 * vals[1] + 1 * vals[2]\n vals = vals[1:] + [new_val]\n return vals[-1]",
"def g_iter(n):\n \"*** YOUR CODE HERE ***\"\n if n <= 3:\n return n\n else:\n i = 3\n x, y, z = 1, 2, 3\n new = 1\n while i < n:\n new = z + (2*y) + (3*x)\n x, y, z = y, z, new \n i += 1\n return new",
"def g(n):\n\t\"*** YOUR CODE HERE ***\"\n\tif n <= 3:\n\t\treturn n\n\telse:\n\t\treturn g(n-1) + 2*g(n-2) + 3*g(n-3)",
"def _g(X, g, n):\n if n == 3:\n n = 3.001 # for numerical stability\n xi = 1 + X**2\n hyp2f1_term = hyp2f1((n - 3) / 2, g / 2, n / 2, 1 / xi)\n beta_term_1 = beta((n - 3) / 2, (3-g)/2)\n beta_term_2 = beta((n-3)/2, 1.5)\n return 0.5 * (beta_term_1 - beta_term_2 * hyp2f1_term * xi ** ((3-n)/2))",
"def rvg(g: Generator):\n # noinspection PyUnreachableCode\n try:\n return next(g)\n except StopIteration as r:\n return r.value",
"def generation(x, g):\n return int(x/g)",
"def g(n):\n \"*** YOUR CODE HERE ***\"\n if n < 4:\n return n\n else:\n return g(n-1) + 2*g(n-2) + 3*g(n-3)",
"def g(n):\n \"*** YOUR CODE HERE ***\"\n if n <=3:\n return n\n else:\n return g(n-1)+2*g(n-2)+3*g(n-3)",
"def g(n):\n \"*** YOUR CODE HERE ***\"\n if n <= 3:\n return n\n else:\n return g(n - 1) + 2 * g(n - 2) + 3 * g(n - 3)",
"def g(n):\n \"*** YOUR CODE HERE ***\"\n if n <= 3:\n return n\n else:\n return g(n - 1) + 2 * g(n - 2) + 3 * g(n - 3)",
"def gf(self, tolerance = None, maxiter = 1000):\n w0 = self.lead[0]\n w1 = self.lead[-1]\n w2 = self.lead[1]\n\n if tolerance is None:\n tolerance = 1e-10 * max(max(abs(w0.max()), abs(w1).max()), abs(w2).max())\n\n self.gf_r = greens_function(\n w0,\n w1,\n w2,\n tolerance,\n maxiter,\n )\n\n return self.gf_r",
"def evaluate(self, g):\n raise NotImplementedError",
"def g(n):\n \"*** YOUR CODE HERE ***\"\n store = {}\n if n in store:\n return store[n]\n elif n <= 3:\n store[n] = n\n return n\n else:\n store[n] = g(n-1) + 2 * g(n-2) + 3 * g(n-3)\n return store[n]",
"def evaluate(self, g):\n pass",
"def n(G):\n return G._n",
"def g_iter(n):\n \"*** YOUR CODE HERE ***\"\n counter = 0\n term1 = 3\n term2 = 2\n term3 = 1\n loop = n-3\n\n if n<=3:\n return n\n\n while counter<loop:\n term1,term2,term3=term1+2*term2+3*term3,term1,term2\n counter +=1\n return term1",
"def g(i):\n return int(np.log2(gc(i)^gc(i+1)))",
"def _g_prime(self, x):\n return self._g(x)*(1 - self._g(x))",
"def geom_iter(self, g_nums):\n # Using the custom coded pack_tups to not have to care whether the\n # input is iterable\n from .utils import pack_tups\n\n vals = pack_tups(g_nums)\n for val in vals:\n yield self.geom_single(val[0])",
"def next(self):\n temp = self.n\n try:\n self.n = next(self.g)\n except Exception as e:\n self._hasNext = False\n return temp",
"def evaluate_g(self, x, out=None, **kwargs):\n return self._base_nlp.evaluate_g(x, out=out, **kwargs)",
"def generation(self) -> int:\n return self._g",
"def Geometric(reduction_factor):\n prob = 1 # probability to continue\n while True:\n yield 1 - prob\n prob = prob * reduction_factor",
"def g_prime(z):\n return np.multiply(g(z), 1-g(z))",
"def evaluate(self, g):\n return NotImplementedError",
"def evaluate(self, g):\n return NotImplementedError",
"def cLCG(G):\n \n gens = []\n \n for g in G:\n gens.append(LCG(*g))\n \n m0 = G[0][3]-1\n \n while True:\n yield sum([(-1**j)*next(g) for j,g in enumerate(gens)]) % m0",
"def compute_g(self, i, j):\n #Compute variance and mean denominator (same denominator for both)\n g_next = 0\n if (i+1 < self.nb_days-1):\n g_next = self.g_heat[i+1,j]\n\n denominator = 2 * self.sigma2\n numerator_mean = self.sigma2 * (self.g_heat[i-1,j] + g_next)\n if (self.u_heat > self.temperatures[i]):\n denominator = denominator + self.sigma_g_star_2[0, j] * ((self.temperatures[i] - self.u_heat)**2)\n numerator_mean = numerator_mean + \\\n self.sigma_g_star_2[0, j] * (self.temperatures[i] - self.u_heat) * (self.consumptions[i] - self.s[i,j] * self.kappa[self.daytypes[i]])\n\n #Mean\n mean = numerator_mean / denominator\n\n #Compute variance numerator\n variance_numerator = (self.sigma2 * self.sigma_g_star_2[0, j])\n #Variance\n variance = variance_numerator / denominator\n\n self.g_heat[i,j] = self.truncated_norm(-inf, 0, mean, variance)"
] | [
"0.71208894",
"0.68869877",
"0.6281551",
"0.622201",
"0.6130775",
"0.6093902",
"0.60279924",
"0.5978761",
"0.59504557",
"0.59047425",
"0.5899456",
"0.5899456",
"0.5845385",
"0.58453226",
"0.58441585",
"0.583596",
"0.57956004",
"0.5788619",
"0.570271",
"0.56376046",
"0.5634359",
"0.5633822",
"0.56333196",
"0.5584421",
"0.55154586",
"0.55058455",
"0.54779094",
"0.54779094",
"0.54773027",
"0.54733497"
] | 0.73005706 | 0 |
Returns True if at least one of the digits of k is a 7, False otherwise. >>> has_seven(3) False >>> has_seven(7) True >>> has_seven(2734) True >>> has_seven(2634) False >>> has_seven(734) True >>> has_seven(7777) True | def has_seven(k):
if k % 10 == 7:
return True
elif k < 10:
return False
else:
return has_seven(k // 10) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def has_seven(k):\n if k == 0:\n return False\n else:\n if k%10 == 7:\n return True\n return has_seven(k//10)",
"def has_seven(k):\n \n if k % 10 == 7:\n return True\n else:\n if k<10:\n return False\n return has_seven(k//10)",
"def has_seven(k):\n if k % 10 == 7:\n return True\n elif k < 10:\n return False\n else:\n return has_seven(k // 10)",
"def has_seven(k):\n if k % 10 == 7:\n return True\n elif k < 10:\n return False\n else:\n return has_seven(k // 10)",
"def has_seven(k):\n if k % 10 == 7:\n return True\n elif k < 10:\n return False\n else:\n return has_seven(k // 10)",
"def isLucky(x):\n return x == 7",
"def is_set(x, k):\n\n return get_bit(x, k) == 1",
"def verify(n):\n\n # Take the sum of all digits.\n sum_of_digits = sum(luhn_digits(n))\n\n # The number is valid iff the sum of digits modulo 10 is equal to 0\n return sum_of_digits % 10 == 0",
"def check(self, number: int) -> bool:\n return (number in self.numbers_set)",
"def has_picked_week(self, week):\n return self.find_pick_for_week(week, key_only=True) is not None",
"def _can_do_sum_of_squares(n, k):\n if k < 1:\n return False\n if n < 0:\n return False\n if n == 0:\n return True\n if k == 1:\n return is_square(n)\n if k == 2:\n if n in (1, 2):\n return True\n if isprime(n):\n if n % 4 == 1:\n return 1 # signal that it was prime\n return False\n else:\n f = factorint(n)\n for p, m in f.items():\n # we can proceed iff no prime factor in the form 4*k + 3\n # has an odd multiplicity\n if (p % 4 == 3) and m % 2:\n return False\n return True\n if k == 3:\n if (n//4**multiplicity(4, n)) % 8 == 7:\n return False\n # every number can be written as a sum of 4 squares; for k > 4 partitions\n # can be 0\n return True",
"def isHappy(self, n):\n nxt = 0\n appeared = set()\n while True:\n nxt += (n%10)*(n%10)\n n /= 10\n if n == 0:\n if nxt == 1:\n return True\n if nxt in appeared:\n return False\n\n appeared.add(nxt)\n n = nxt\n nxt = 0",
"def is_harshad(n):\n return n % euler.sum_digits(n) == 0",
"def check_k(bigK):\n kk = bigK.flatten()\n k = kk[0]\n check = np.equal(k,kk)\n return all(check)",
"def check(self, number: int) -> bool:\n return number in self.nums",
"def is_prime(k: int) -> bool:\n if k < 2 or k % 2 == 0:\n return False\n elif k == 2:\n return True\n else:\n for x in range(3, int(math.sqrt(k) + 1), 2):\n if k % x == 0:\n return False\n return True",
"def sat(n: int, nums=[77410, 23223, 54187], lower_bound=2):\n return all(i % n == 0 for i in nums) and n >= lower_bound",
"def is_key(number):\n res = False\n if is_integer(number):\n if int(number) > 0:\n res = True\n return res",
"def is_prime(k):\n\n for i in xrange(2, int(k / 2) + 1):\n if k % i == 0:\n return False\n\n return True",
"def num_sevens(n):\n if n < 10 and n != 7:\n return 0\n else:\n return (n%10 == 7) + num_sevens(n//10)",
"def sum_n(k, lst):\n seen = set()\n for num in lst:\n if k - num in seen:\n return True\n seen.add(num)\n return False",
"def is_repetition(self, num: int = 3) -> bool:\n\n if sum(self.history_board == self.board_fen()) == num:\n return True\n return False",
"def sat(li: List[int], k=5):\n def prod(nums):\n ans = 1\n for i in nums:\n ans *= i\n return ans\n\n return min(li) > 1 and len(li) == k and all((1 + prod(li[:i] + li[i + 1:])) % li[i] == 0 for i in range(k))",
"def sat(n: int):\n return pow(2, n, n) == 3",
"def isHappy(n):\n def check_num(n):\n res = 0\n while n > 0:\n n, digits = divmod(n, 10)\n res += digits ** 2\n return res\n \n \n seen = set()\n while n != 1 and n not in seen:\n seen.add(n)\n n = check_num(n)\n \n return n == 1",
"def binary(f, k=1):\n from numpy import asarray\n f=asarray(f)\n return (f >= k)",
"def contains(self, key: int) -> bool:\n return self._find_key(key, find_empty=False) >= 0",
"def sat(nums: List[int], n=12345):\n return len(nums) <= 4 and sum(i ** 2 for i in nums) == n",
"def contains(self, key):\n if key in self.nums:\n return True\n return False",
"def sat(n: int, g=3, p=17, t=13):\n return pow(g, n, p) == t"
] | [
"0.8716564",
"0.8706798",
"0.8690393",
"0.8690393",
"0.8690393",
"0.61994755",
"0.58175665",
"0.57838863",
"0.57414204",
"0.5732691",
"0.55530447",
"0.5547129",
"0.55352366",
"0.5535023",
"0.5497758",
"0.5434485",
"0.5352723",
"0.53297573",
"0.5328972",
"0.5325144",
"0.53192306",
"0.52997285",
"0.52992195",
"0.5291894",
"0.52890116",
"0.52791435",
"0.5258899",
"0.5255543",
"0.52498204",
"0.5244709"
] | 0.8931878 | 0 |
Return the number of ways to make change for amount. >>> count_change(7) 6 >>> count_change(10) 14 >>> count_change(20) 60 >>> count_change(100) 9828 | def count_change(amount):
options = [2**i for i in range(amount+1) if 2**i <= amount]
options = sorted(options, reverse = True)
length = len(options)
# print(length)
def helper(remains, i, options, length):
# loop until reaching the smallest coin
if i >= length :
return 0
# check the remains
if remains == 0:
return 1
elif remains < 0:
return 0
# every amount can be expressed by with_i + without_i
else:
with_i = helper(remains - options[i], i, options, length)
without_i = helper(remains, i+1, options, length)
return with_i + without_i
# use a helper function
return helper(amount, 0, options, length)
"*** YOUR CODE HERE ***" | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def count_change(amount):\n def change_with_maxcoin(total, maxcoin):\n if total == 0:\n return 1\n if maxcoin == 0:\n return 0\n cnt = 0\n num_change = 0\n while cnt * maxcoin <= total:\n num_change += change_with_maxcoin(total - cnt * maxcoin, maxcoin // 2)\n cnt = cnt + 1\n return num_change\n\n maxcoin = 1\n while maxcoin < amount:\n maxcoin = maxcoin * 2\n if maxcoin > amount:\n maxcoin = maxcoin // 2\n\n return change_with_maxcoin(amount, maxcoin)",
"def count_change(amount): \n def count_partitions(cash, coins):\n if cash == 0:\n return 1\n if coins == 0:\n return 1\n elif cash < 0:\n return 0\n else:\n with_m = count_partitions(cash - 2**coins, coins)\n without_m = count_partitions(cash, coins - 1)\n return with_m + without_m\n def helper(highest, count = 0):\n if 2**count > highest:\n return count - 1\n else:\n return helper(highest, count+1)\n coins = helper(amount)\n return count_partitions(amount, coins)",
"def count_change(amount):\n \"*** YOUR CODE HERE ***\"\n def counts(amount, least_denomination):\n if amount == 0:\n return 1\n elif amount < 0:\n return 0\n elif amount == 0:\n return 0\n elif amount < 2 ** least_denomination:\n return 0\n else:\n return counts(amount - 2 ** least_denomination, least_denomination) + counts(amount, least_denomination + 1)\n\n return counts(amount, 0)",
"def count_change(amount):\n \"*** YOUR CODE HERE ***\"\n if amount < 1:\n return 0\n elif amount == 1:\n return 1\n elif amount == 2:\n return 2\n else:\n return count_change(amount - 1)",
"def count_change(amount, denominations):\n if amount == 0:\n return 1\n elif denominations == []:\n return 0\n else:\n count = 0\n n = amount/denominations[0]\n for i in range(n+1): \n change = denominations[0]*i\n count += count_change(amount-change, denominations[1:])\n return count",
"def count_change(amount, denoms = (50, 25, 10, 5, 1)):\n if amount == 0: return 1\n elif len(denoms) == 0: return 0\n elif amount >= denoms[0]:\n return count_change(amount-denoms[0], denoms) \\\n + count_change(amount, denoms[1:])\n else:\n return count_change(amount, denoms[1:])",
"def count_change(amount):\n def count_partitions(number, at_most):\n if number < 0:\n # There is no way to represent a negative number\n return 0\n elif number == 0:\n # There is only one way to represent zero\n return 1\n elif at_most == 0:\n # There is only one way to represent a number using one (2^0)\n return 1\n else:\n # The representation may contains 2^at_most or not\n contains = count_partitions(number - pow(2, at_most), at_most)\n not_contains = count_partitions(number, at_most - 1)\n return contains + not_contains\n\n def find_at_most(number, k = 0):\n if 2**k <= number:\n return find_at_most(number, k + 1)\n else:\n return k - 1\n\n at_most = find_at_most(amount, 1)\n\n return count_partitions(amount, at_most)",
"def count_change(amount):\n \"*** YOUR CODE HERE ***\"\n\n return helper(1, amount)",
"def count_change(amount):\n \"*** YOUR CODE HERE ***\"\n\n def findm(pig):\n i = 0\n a = 1\n while 2**i < pig:\n i += 1\n a = 2**(i-1)\n return a\n\n def count_partitions(n, m):\n \"\"\"Count the ways to partition n using parts up to m.\"\"\"\n # print(n, m)\n if n == 0:\n return 1\n elif n < 0:\n return 0\n elif m == 0:\n return 0\n else:\n return count_partitions(n-m, m) + count_partitions(n, m//2)\n\n \n \n c = findm(amount)\n b = count_partitions(amount, c)\n # print(b)\n return b\n # return count_partitions(amount, b)",
"def change(amount: int, coins: [int]) -> int:\n # DP array to hold the number of ways an amount i can be made up, where i is\n # the index of the array.\n # Base case, there is only 1 way to make an amount = 0,\n # i.e. select nothing.\n combinations = [1] + [0] * (amount - 1)\n\n # For all available denominations,\n for denomination in coins:\n # For all the amounts, upto the given amount,\n for amt in range(amount):\n # we check if this denomination can make up something <= amount ?\n # if yes, then the number of ways to make up (amt + denomination) =\n # number of ways to make up amt,\n # so we accumulate the total number of ways to make up any amount i.\n if amt + denomination <= amount:\n combinations[amt + denomination] += combinations[amt]\n\n # return the total number of ways to make up the amount.\n return combinations[amount]",
"def count_change(amnt, l):\n if amnt == 0:\n return 1 \n if amnt < 0 or len(l) == 0:\n return 0\n else:\n return count_change(amnt, l[1:]) + count_change(amnt - l[0], l)",
"def making_change_recursive(amt: int, denominations: list) -> int:\n # === Base case === #\n if amt == 0: # Only one way to make 0\n return 1\n if amt < 0: # No way to make negative\n return 0\n\n # Keep track of results\n count = 0\n cache = []\n\n # === Recursive case === #\n # Loop through the coins\n for i in range(len(denominations)):\n coin_val = denominations[i] # Current coin's value\n # Find remaining value after current coin value is accounted for\n # I.e. what still has to be broken up (made into change)\n remaining_val = amt - coin_val\n if remaining_val not in cache: # Check if calculation has already been done\n # Number of combinations of current is sum of current and successive coins\n count += making_change_recursive(remaining_val, denominations[i:])\n cache.append(remaining_val) # Add the current calculation to cache\n\n return count",
"def get_min_num_coins_for_change(cents):\n coins = [100, 50, 25, 10, 5, 1] # american coins\n num_of_coins = 0\n for coin in coins:\n num_of_coins += (cents / coin)\n cents %= coin\n return num_of_coins",
"def coinChange(self, coins: List[int], amount: int) -> int:\n \n self.amount_visited = dict()\n self.coins = coins\n \n return self.kernel(amount)",
"def making_change(amt: int, coins: list) -> int:\n # calc[i] represents the number of ways to get to amount i\n calc = [0] * (amt + 1)\n\n # 1 way to get zero\n calc[0] = 1\n\n # Pick all coins one by one and update calc[] values after the\n # index greater than or equal to the value of the picked coin\n for coin_val in coins:\n for j in range(coin_val, amt + 1):\n calc[j] += calc[j - coin_val]\n\n return calc[amt]",
"def makeChange(coins, total):\n\n if total <= 0:\n return 0\n\n coins.sort(reverse=True)\n stack = total\n mv = 0\n cnt = 0\n\n while (mv < len(coins)):\n if stack == 0:\n return cnt\n\n if coins[mv] > stack:\n mv += 1\n\n else:\n stack -= coins[mv]\n cnt += 1\n\n return -1",
"def coin_change(coins: List[int], change: int) -> int:\r\n matrix = [[0 for m in range(change + 1)] for m in range(len(coins) + 1)]\r\n for i in range(change + 1):\r\n matrix[0][i] = i\r\n for c in range(1, len(coins) + 1):\r\n for r in range(1, change + 1):\r\n if coins[c - 1] == r:\r\n matrix[c][r] = 1\r\n elif coins[c - 1] > r:\r\n matrix[c][r] = matrix[c - 1][r]\r\n else:\r\n matrix[c][r] = min(matrix[c - 1][r], 1 + matrix[c][r - coins[c - 1]])\r\n for c in range(0, len(coins) + 1):\r\n for r in range(0, change + 1):\r\n print(matrix[c][r], \"|\", end=\"\")\r\n print()\r\n return matrix[-1][-1]",
"def get_change(amount, coins=eur_coins): # equal sign means will default to eur_coins unless we supply argument(usd)\n \n \n change = []\n for coin in coins:\n while coin <= amount: #if coin value <= value we passed in..\n #while coin <= amount..keep adding until it isnt. then move on or rtn change\n amount -= coin #deduct amount of coin from amount we sent in\n change.append(coin) #add that onto our change\n return change #return change list",
"def final_frequency(changes: Sequence[int]) -> int:\n return sum(changes)",
"def get_pattern_count(left, coins):\r\n if len(coins) == 0:\r\n return 1\r\n # Get next coin\r\n coin = coins[0]\r\n # See how many could go into left\r\n most = left // coin\r\n # Loop through possible\r\n count = 0\r\n for i in range(0, most + 1):\r\n remaining = left - i * coin\r\n count += get_pattern_count(remaining, coins[1:])\r\n\r\n return count",
"def makeChange(coins, total):\n if total <= 0:\n return 0\n\n current_total = 0\n coin_used = 0\n coins = sorted(coins, reverse=True)\n for coin in coins:\n r = (total-current_total)//coin\n current_total += r*coin\n coin_used += r\n if current_total == total:\n return coin_used\n return -1",
"def make_count_change():\n \"*** YOUR CODE HERE ***\"",
"def find_solution_count(target, max_coin=None):\n if target == 0:\n return 1\n if max_coin is None:\n max_coin = 200\n key = '%d_%d' % (target, max_coin)\n if key in seen:\n return seen[key]\n count = 0\n for coin in DENOMINATIONS:\n if coin > max_coin:\n continue\n if coin <= target:\n count += find_solution_count(target - coin, coin)\n seen[key] = count\n return count",
"def get_num_of_shares(stock, investment):\n return int(investment // float(stock['Price']))",
"def _get_reviewer_change_count(reviewer, project_name, from_datetime):\n if project_name == PROJECT_ALL:\n # changes across all projects after from_datetime\n changes = reviewer.changes.filter(\n timestamp__gte=from_datetime).distinct()\n else:\n # changes in given project after from_datetime\n changes = reviewer.changes.filter(\n project_name=project_name,\n timestamp__gte=from_datetime).distinct()\n\n return changes.count()",
"def calculate_number_of_guesses(self, range):\r\n # Python 2.7.3: math.ceil() is a float\r\n # CodeSculptor: math.ceil() is an integer\r\n return int(math.ceil(math.log(range,2)))",
"def makeChange(coins, total):\n temp_value = 0\n coins.sort(reverse=True)\n\n if total < 0:\n return 0\n\n for coin in coins:\n if total % coin <= total:\n temp_value += total // coin\n total = total % coin\n\n return temp_value if total == 0 else -1",
"def calcul_buy_nb_action(self):\n nb_action = self.max_loss / (self.buy_price - self.stop_loss)\n invest = self.max_loss / (self.buy_price - self.stop_loss) * self.buy_price\n\n if invest > self.capital:\n return round(self.capital / self.buy_price, 9)\n else:\n return round(nb_action, 9)",
"def __numHeads(self):\n count = 1\n\n while (self.__coinFlip() == 1):\n count += 1\n return count",
"def count_total_sales(value):\n count = len(value)\n print(f\"We have found a total of {count} sale(s).\\n\")\n return count"
] | [
"0.7939031",
"0.76921844",
"0.7645819",
"0.7632281",
"0.751326",
"0.735496",
"0.7268972",
"0.69675857",
"0.6723433",
"0.6232168",
"0.6041933",
"0.59812564",
"0.59323406",
"0.58486235",
"0.5755835",
"0.56916153",
"0.5571448",
"0.5469678",
"0.54315627",
"0.5374645",
"0.53648406",
"0.52743113",
"0.5171694",
"0.5166676",
"0.51653683",
"0.5123488",
"0.50777286",
"0.5062631",
"0.5061025",
"0.5051774"
] | 0.79869974 | 0 |
Implementation of Focal Loss from the paper in multiclass classification | def categorical_focal_loss(gamma=2.0, alpha=0.25):
def focal_loss(y_true, y_pred):
# Define epsilon so that the backpropagation will not result in NaN for 0 divisor case
epsilon = backend.epsilon()
# Add the epsilon to prediction value
#y_pred = y_pred + epsilon
# Clip the prediction value
y_pred = backend.clip(y_pred, epsilon, 1.0-epsilon)
# Calculate cross entropy
cross_entropy = -y_true*backend.log(y_pred)
# Calculate weight that consists of modulating factor and weighting factor
weight = alpha * y_true * backend.pow((1-y_pred), gamma)
# Calculate focal loss
loss = weight * cross_entropy
# Sum the losses in mini_batch
loss = backend.sum(loss, axis=1)
return loss
return focal_loss | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def focal_loss(self,labels, logits, gamma=2):\n y_pred = tf.nn.softmax(logits, dim=-1) # [batch_size,num_classes]\n # labels = tf.one_hot(labels, depth=y_pred.shape[1])\n L = -labels * ((1 - y_pred) ** gamma) * tf.log(y_pred)\n L = tf.reduce_sum(L, axis=1)\n return L",
"def focal_loss(labels, logits, alpha, gamma):\n BCLoss = F.binary_cross_entropy_with_logits(\n input=logits, target=labels, reduction=\"none\")\n\n if gamma == 0.0:\n modulator = 1.0\n else:\n modulator = torch.exp(-gamma * labels * logits - gamma * torch.log(1 +\n torch.exp(-1.0 * logits)))\n\n loss = modulator * BCLoss\n\n weighted_loss = alpha * loss\n focal_loss = torch.sum(weighted_loss)\n\n focal_loss /= torch.sum(labels)\n return focal_loss",
"def binary_focal_loss_fixed(y_true, y_pred):\r\n y_true = tf.cast(y_true, tf.float32)\r\n alpha_t = y_true*alpha + (K.ones_like(y_true)-y_true)*(1-alpha)\r\n\r\n p_t = y_true*y_pred + (K.ones_like(y_true)-y_true)*(K.ones_like(y_true)-y_pred) + K.epsilon()\r\n focal_loss = - alpha_t * K.pow((K.ones_like(y_true)-p_t),gamma) * K.log(p_t)\r\n return K.mean(focal_loss)",
"def focal_loss_sigmoid(labels,logits,alpha=0.25,gamma=2):\n y_pred=tf.nn.sigmoid(logits)\n labels=tf.to_float(labels)\n L=-labels*(1-alpha)*((1-y_pred)*gamma)*tf.log(y_pred)-\\\n (1-labels)*alpha*(y_pred**gamma)*tf.log(1-y_pred)\n return L",
"def focal_loss_multilabel_v2(sigmoid_p, target_tensor, weights=None, alpha=0.5, gamma=2):\n# sigmoid_p = tf.nn.sigmoid(prediction_tensor)\n zeros = array_ops.zeros_like(sigmoid_p, dtype=sigmoid_p.dtype)\n \n # For poitive prediction, only need consider front part loss, back part is 0;\n # target_tensor > zeros <=> z=1, so poitive coefficient = z - p.\n pos_p_sub = array_ops.where(target_tensor > zeros, target_tensor - sigmoid_p, zeros)\n \n # For negative prediction, only need consider back part loss, front part is 0;\n # target_tensor > zeros <=> z=1, so negative coefficient = 0.\n neg_p_sub = array_ops.where(target_tensor > zeros, zeros, sigmoid_p)\n per_entry_cross_ent = - alpha * (pos_p_sub ** gamma) * tf.log(tf.clip_by_value(sigmoid_p, 1e-8, 1.0)) \\\n - (1 - alpha) * (neg_p_sub ** gamma) * tf.log(tf.clip_by_value(1.0 - sigmoid_p, 1e-8, 1.0))\n return tf.reduce_sum(per_entry_cross_ent, axis=1)",
"def focal_loss_fixed(y_true, y_pred):\r\n epsilon = 1.e-9\r\n y_true = tf.convert_to_tensor(y_true, tf.float32)\r\n y_pred = tf.convert_to_tensor(y_pred, tf.float32)\r\n\r\n model_out = tf.add(y_pred, epsilon)\r\n ce = tf.multiply(y_true, -tf.log(model_out))\r\n weight = tf.multiply(y_true, tf.pow(tf.subtract(1., model_out), gamma))\r\n fl = tf.multiply(alpha, tf.multiply(weight, ce))\r\n reduced_fl = tf.reduce_max(fl, axis=1)\r\n return tf.reduce_mean(reduced_fl)",
"def focal_loss_sigmoid(labels, logits, alpha=0.5, gamma=2):\n\n prob = logits.sigmoid()\n labels = torch.nn.functional.one_hot(labels.squeeze().long(), num_classes=prob.shape[1])\n\n cross_ent = torch.clamp(logits, min=0) - logits * labels + torch.log(1+torch.exp(-torch.abs(logits)))\n prob_t = (labels*prob) + (1-labels) * (1-prob)\n modulating = torch.pow(1-prob_t, gamma)\n alpha_weight = (labels*alpha)+(1-labels)*(1-alpha)\n\n focal_cross_entropy = modulating * alpha_weight * cross_ent\n return focal_cross_entropy",
"def focal_loss_softmax(labels,logits , gamma=2):\n y_pred=tf.nn.softmax(logits,dim=-1) # [batch_size,num_classes]\n labels=tf.one_hot(labels,depth=y_pred.shape[1])\n L=-labels*((1-y_pred)**gamma)*tf.log(y_pred)\n L=tf.reduce_sum(L,axis=1)\n return L",
"def __init__(self, gamma=2, alpha=0.25):\n super(FocalLoss, self).__init__()\n self.gamma = gamma\n self.alpha = alpha\n self.neg_pos_ratio = 7\n self.use_CrossEntropy = False",
"def binary_focal_loss(y_true, y_pred, gamma=2., alpha=.25):\n \"\"\"\n :param y_true: A tensor of the same shape as `y_pred`\n :param y_pred: A tensor resulting from a sigmoid\n :return: Output tensor.\n \"\"\"\n pt_1 = tf.where(tf.equal(y_true, 1), y_pred, tf.ones_like(y_pred))\n pt_0 = tf.where(tf.equal(y_true, 0), y_pred, tf.zeros_like(y_pred))\n\n epsilon = K.epsilon()\n # clip to prevent NaN's and Inf's\n pt_1 = K.clip(pt_1, epsilon, 1. - epsilon)\n pt_0 = K.clip(pt_0, epsilon, 1. - epsilon)\n\n return -K.sum(alpha * K.pow(1. - pt_1, gamma) * K.log(pt_1)) \\\n -K.sum((1 - alpha) * K.pow(pt_0, gamma) * K.log(1. - pt_0))",
"def focal_conf_objectness_loss(self, conf_data, conf_t):\n\n conf_t = conf_t.view(-1) # [batch_size*num_priors]\n conf_data = conf_data.view(\n -1, conf_data.size(-1)\n ) # [batch_size*num_priors, num_classes]\n\n # Ignore neutral samples (class < 0)\n keep = (conf_t >= 0).float()\n conf_t[conf_t < 0] = 0\n\n background = (conf_t == 0).float()\n at = (1 - cfg.focal_loss_alpha) * background + cfg.focal_loss_alpha * (\n 1 - background\n )\n\n logpt = (\n F.logsigmoid(conf_data[:, 0]) * (1 - background)\n + F.logsigmoid(-conf_data[:, 0]) * background\n )\n pt = logpt.exp()\n\n obj_loss = -at * (1 - pt) ** cfg.focal_loss_gamma * logpt\n\n pos_mask = conf_t > 0\n conf_data_pos = (conf_data[:, 1:])[\n pos_mask\n ] # Now this has just 80 classes\n conf_t_pos = conf_t[pos_mask] - 1 # So subtract 1 here\n\n class_loss = F.cross_entropy(\n conf_data_pos, conf_t_pos, reduction=\"sum\"\n )\n\n return cfg.conf_alpha * (class_loss + (obj_loss * keep).sum())",
"def focal_tversky(input, target, alpha, beta, gamma):\n\n nclasses = input.shape[1]\n\n probs = torch.softmax(input, axis=1)\n target_onehot = make_one_hot(target, nclasses)\n\n #probs = probs.view(-1)\n #target_onehot = target_onehot.view(-1)\n\n smooth = 1.0\n\n dims = (0,2,3)\n TP = torch.sum(probs * target_onehot, dims)\n FN = torch.sum(target_onehot * (1.0 - probs), dims)\n FP = torch.sum((1 - target_onehot) * probs, dims)\n\n TR = ((TP + smooth) / (TP + alpha * FN + beta * FP + smooth))\n\n TR = 1.0 - TR\n TR = TR.pow(gamma)\n FTR = TR.sum()\n\n return FTR",
"def loss(self, X, y):\n\n # Initialize the loss to zero.\n loss = 0.0\n num_classes = self.W.shape[0] # C = num_classes\n num_train = X.shape[0]\n \n exp_a = np.zeros((num_classes,num_train))\n # ================================================================ #\n # YOUR CODE HERE:\n # Calculate the normalized softmax loss. Store it as the variable loss.\n # (That is, calculate the sum of the losses of all the training \n # set margins, and then normalize the loss by the number of \n # training examples.)\n # ================================================================ #\n \n \n for i in np.arange(num_train):\n \n Loss = 0.0\n\n class_scores = np.dot(self.W,X[i,:].T) # calculating class scores (C x 1 vector)\n class_scores -= np.max(class_scores) # considering the possible issue for numerical instability and account for it\n\n exp_a[:,i] = np.exp(class_scores) # turning class scores to probabilities (C x 1 vector), without normalization\n\n Loss -= np.log(exp_a[y[i],i]/np.sum(exp_a[:,i]))\n \n\n #p[:,i] = exp_a[:,i]/np.sum(exp_a[:,i]) # p now is a valid probability matrix\n #print(p[:,i])\n\n loss += Loss \n #print(Loss,i) \n \n pass\n loss /= num_train\n # ================================================================ #\n # END YOUR CODE HERE\n # ================================================================ #\n\n return loss",
"def compute_loss(self):",
"def focal_loss(input_tensor, target_tensor, alpha, gamma):\n preds = tf.nn.sigmoid(input_tensor)\n zeros = array_ops.zeros_like(preds, dtype=preds.dtype)\n ones = array_ops.ones_like(preds, dtype=preds.dtype)\n neg_pred = ones - preds\n\n zeros_active = tf.equal(target_tensor, zeros)\n zeros_active = tf.cast(zeros_active, tf.float32)\n neg_part = - zeros_active * (1.0 - alpha) * tf.pow(preds, gamma) * tf.log(tf.clip_by_value(neg_pred, 1e-8, 1.0))\n\n ones_active = tf.equal(target_tensor, ones)\n ones_active = tf.cast(ones_active, tf.float32)\n pos_part = - ones_active * alpha * tf.pow(neg_pred, gamma) * tf.log(tf.clip_by_value(preds, 1e-8, 1.0))\n\n loss = pos_part + neg_part\n loss = tf.reduce_sum(loss, 1)\n loss = tf.reduce_mean(loss)\n return loss",
"def forward(self, x):\n \n x = F.relu(self.conv1_bn(self.conv1(self.conv0_bn(x))))\n x = F.relu(self.conv2_bn(self.conv2(x)))\n x = F.relu(self.conv3_bn(self.conv3( self.maxpool2(x))))\n x = F.relu(self.conv4_bn(self.conv4( self.maxpool3(x))))\n x = self.maxpool4(x) \n x = x.view(-1, 1184)\n x = F.relu(self.fc1(x))\n x = self.dense1_bn(x)\n x = F.dropout(x, training=self.training)\n x = self.fc2(x)\n return F.log_softmax(x)",
"def forward(self, images):\n # assuming that the precomputed features are not already l2-normalized\n x = l2norm(images.view( images.size(0), -1))\n #print(images.shape, self.fc )\n x = self.relu(self.fc1(x))\n x = self.fc2(x)\n #x = F.log_softmax(x) #no need of log softmax here if we use cross entropy as loss\n #x = self.softmax(x)\n # normalize in the joint embedding space\n \n\n return x",
"def binary_focal_loss_fixed(y_true, y_pred):\n y_true = tf.cast(y_true, tf.float32)\n alpha_t = y_true * alpha + (K.ones_like(y_true) - y_true) * (1 - alpha)\n\n p_t = y_true * y_pred + (K.ones_like(y_true) - y_true) * (K.ones_like(y_true) - y_pred) + K.epsilon()\n focal_loss = - alpha_t * K.pow((K.ones_like(y_true) - p_t), gamma) * K.log(p_t)\n return K.mean(focal_loss)",
"def binary_focal_loss_fixed(y_true, y_pred):\n y_true = tf.cast(y_true, tf.float32)\n alpha_t = y_true * alpha + (K.ones_like(y_true) - y_true) * (1 - alpha)\n\n p_t = y_true * y_pred + (K.ones_like(y_true) - y_true) * (K.ones_like(y_true) - y_pred) + K.epsilon()\n focal_loss = - alpha_t * K.pow((K.ones_like(y_true) - p_t), gamma) * K.log(p_t)\n return K.mean(focal_loss)",
"def focal_loss(prediction_tensor, target_tensor, alpha=0.05, gamma=2):\r\n target_tensor = tf.one_hot(target_tensor, depth=2)\r\n\r\n softmax_p = tf.nn.softmax(prediction_tensor)\r\n zeros = array_ops.zeros_like(prediction_tensor, dtype=softmax_p.dtype)\r\n \r\n pos_p_sub = array_ops.where(target_tensor > 0.5, 1 - softmax_p, zeros)\r\n neg_p_sub = array_ops.where(target_tensor > 0.5, zeros, 1 - softmax_p)\r\n \r\n \r\n per_entry_cross_ent = - (1 - alpha) * (pos_p_sub ** gamma) * tf.log(tf.clip_by_value(softmax_p, 1e-8, 1.0)) \\\r\n - alpha * (neg_p_sub ** gamma) * tf.log(tf.clip_by_value(1.0 - softmax_p, 1e-8, 1.0))\r\n return tf.reduce_mean(per_entry_cross_ent)",
"def focal_loss(prediction_tensor, target_tensor, weights=None, alpha=0.5, gamma=2):\n sigmoid_p = tf.nn.sigmoid(prediction_tensor)\n print(sigmoid_p.get_shape())\n zeros = array_ops.zeros_like(sigmoid_p, dtype=sigmoid_p.dtype)\n pos_p_sub = array_ops.where(target_tensor >= sigmoid_p, target_tensor - sigmoid_p, zeros)\n neg_p_sub = array_ops.where(target_tensor > zeros, zeros, sigmoid_p)\n per_entry_cross_ent = - alpha * (pos_p_sub ** gamma) * tf.log(tf.clip_by_value(sigmoid_p, 1e-8, 1.0)) \\\n - (1 - alpha) * (neg_p_sub ** gamma) * tf.log(tf.clip_by_value(1.0 - sigmoid_p, 1e-8, 1.0))\n return tf.reduce_mean(per_entry_cross_ent)",
"def forward(self, fiiqa_preds, fiiqa_targets):\n fiiqa_prob = F.softmax(fiiqa_preds, dim=1)\n fiiqa_expect = torch.sum(Variable(torch.arange(0, 200)).float() * fiiqa_prob, 1)\n fiiqa_loss = F.smooth_l1_loss(fiiqa_expect, fiiqa_targets.float())\n return fiiqa_loss",
"def focal_loss_multilabel(prediction_tensor, target_tensor, weights=None, alpha=0.75, gamma=2):\n sigmoid_p = tf.nn.sigmoid(prediction_tensor)\n zeros = array_ops.zeros_like(sigmoid_p, dtype=sigmoid_p.dtype)\n \n # For poitive prediction, only need consider front part loss, back part is 0;\n # target_tensor > zeros <=> z=1, so poitive coefficient = z - p.\n pos_p_sub = array_ops.where(target_tensor > zeros, target_tensor - sigmoid_p, zeros)\n \n # For negative prediction, only need consider back part loss, front part is 0;\n # target_tensor > zeros <=> z=1, so negative coefficient = 0.\n neg_p_sub = array_ops.where(target_tensor > zeros, zeros, sigmoid_p)\n per_entry_cross_ent = - alpha * (pos_p_sub ** gamma) * tf.log(tf.clip_by_value(sigmoid_p, 1e-8, 1.0)) \\\n - (1 - alpha) * (neg_p_sub ** gamma) * tf.log(tf.clip_by_value(1.0 - sigmoid_p, 1e-8, 1.0))\n return tf.reduce_sum(per_entry_cross_ent, axis=1)",
"def focal_loss(pred, y, alpha=0.5, gamma=4):\n zeros = tf.zeros_like(pred, dtype=pred.dtype)\n\n # For positive prediction, only need consider front part loss, back part is 0;\n # target_tensor > zeros <=> z=1, so positive coefficient = z - p.\n pos_p_sub = tf.where(y > zeros, y - pred, zeros) # positive sample 寻找正样本,并进行填充\n\n # For negative prediction, only need consider back part loss, front part is 0;\n # target_tensor > zeros <=> z=1, so negative coefficient = 0.\n neg_p_sub = tf.where(y > zeros, zeros, pred) # negative sample 寻找负样本,并进行填充\n per_entry_cross_ent = - alpha * (pos_p_sub ** gamma) * tf.log(tf.clip_by_value(pred, 1e-8, 1.0)) \\\n - (1 - alpha) * (neg_p_sub ** gamma) * tf.log(tf.clip_by_value(1.0 - pred, 1e-8, 1.0))\n\n return tf.reduce_sum(per_entry_cross_ent)",
"def focal_loss_fixed(gamma=2., alpha=0.25):\r\n def focal_loss(y_true, y_pred):\r\n \"\"\"\r\n :param y_true: A tensor of the same shape as `y_pred`\r\n :param y_pred: A tensor resulting from a sigmoid\r\n :return: Output tensor.\r\n \"\"\"\r\n pt_1 = tf.where(tf.equal(y_true, 1), y_pred, tf.ones_like(y_pred))\r\n pt_0 = tf.where(tf.equal(y_true, 0), y_pred, tf.zeros_like(y_pred))\r\n\r\n epsilon = K.epsilon()\r\n # clip to prevent NaN's and Inf's\r\n pt_1 = K.clip(pt_1, epsilon, 1. - epsilon)\r\n pt_0 = K.clip(pt_0, epsilon, 1. - epsilon)\r\n\r\n return -K.mean(alpha * K.pow(1. - pt_1, gamma) * K.log(pt_1)) \\\r\n -K.mean((1 - alpha) * K.pow(pt_0, gamma) * K.log(1. - pt_0))\r\n\r\n return focal_loss",
"def focal_loss(prediction_tensor, target_tensor, weights=None, alpha=0.25, gamma=2):\n sigmoid_p = tf.nn.sigmoid(prediction_tensor)\n zeros = array_ops.zeros_like(sigmoid_p, dtype=sigmoid_p.dtype)\n pos_p_sub = array_ops.where(target_tensor >= sigmoid_p, target_tensor - sigmoid_p, zeros)\n neg_p_sub = array_ops.where(target_tensor > zeros, zeros, sigmoid_p)\n per_entry_cross_ent = - alpha * (pos_p_sub ** gamma) * tf.log(tf.clip_by_value(sigmoid_p, 1e-8, 1.0)) \\\n - (1 - alpha) * (neg_p_sub ** gamma) * tf.log(tf.clip_by_value(1.0 - sigmoid_p, 1e-8, 1.0))\n return tf.reduce_mean(per_entry_cross_ent)",
"def focal_loss(y_true: Tensor,\n y_pred: Tensor,\n gamma: float = 2.0,\n alpha: float = 0.25,\n from_logits: bool = False,\n normalize: bool = True,\n shape_reduction: str = \"sum\",\n sample_reduction: str = \"mean\") -> Tensor:\n if gamma is None or gamma < 0:\n raise ValueError(\"Value of gamma should be greater than or equal to zero.\")\n\n if alpha is None or (alpha < 0 or alpha > 1):\n raise ValueError(\"Value of alpha can either be -1 or None or within range (0, 1)\")\n\n if tf.is_tensor(y_true):\n y_true = tf.cast(y_true, dtype=y_pred.dtype)\n fl = SigmoidFocalCrossEntropy(from_logits=from_logits,\n alpha=alpha,\n gamma=gamma,\n reduction=tf.keras.losses.Reduction.NONE)(y_pred=y_pred, y_true=y_true)\n gt_shape = tf.shape(y_true)\n fl_shape = tf.shape(fl)\n elif isinstance(y_true, torch.Tensor):\n y_true = y_true.to(y_pred.dtype)\n fl = pytorch_focal_loss(y_pred=y_pred, y_true=y_true, alpha=alpha, gamma=gamma, from_logits=from_logits)\n gt_shape = y_true.shape\n fl_shape = fl.shape\n else:\n raise ValueError(\"Unsupported tensor type.\")\n\n focal_reduce_axis = [*range(1, len(fl_shape))]\n # normalize along the batch size based on number of positive classes\n if normalize:\n gt_reduce_axis = [*range(1, len(gt_shape))]\n gt_count = clip_by_value(reduce_sum(y_true, axis=gt_reduce_axis), min_value=1)\n gt_count = gt_count[(..., ) + (None, ) * len(focal_reduce_axis)]\n fl = fl / gt_count\n\n if shape_reduction == \"sum\":\n fl = reduce_sum(fl, axis=focal_reduce_axis)\n elif shape_reduction == \"mean\":\n fl = reduce_mean(fl, axis=focal_reduce_axis)\n\n if sample_reduction == \"mean\":\n fl = reduce_mean(fl)\n elif sample_reduction == \"sum\":\n fl = reduce_sum(fl)\n\n return fl",
"def loss_function(\n self, x_p, const, target, reconstructed_original, confidence, min_, max_):\n\n ## get the output of model before softmax\n x_p.requires_grad = True\n logits = self.model.get_logits(x_p).to(self.device)\n\n ## find the largest class except the target class\n targetlabel_mask = (torch.from_numpy(onehot_like(np.zeros(self.classnum), target))).double()\n secondlargest_mask = (torch.from_numpy(np.ones(self.classnum)) - targetlabel_mask).to(self.device)\n\n secondlargest = np.argmax((logits.double() * secondlargest_mask).cpu().detach().numpy(), axis = 1)\n\n is_adv_loss = logits[0][secondlargest] - logits[0][target]\n\n # is_adv is True as soon as the is_adv_loss goes below 0\n # but sometimes we want additional confidence\n is_adv_loss += confidence\n\n if is_adv_loss == 0:\n is_adv_loss_grad = 0\n else:\n is_adv_loss.backward()\n is_adv_loss_grad = x_p.grad\n\n is_adv_loss = max(0, is_adv_loss)\n\n s = max_ - min_\n squared_l2_distance = np.sum( ((x_p - reconstructed_original) ** 2).cpu().detach().numpy() ) / s ** 2\n total_loss = squared_l2_distance + const * is_adv_loss\n\n\n squared_l2_distance_grad = (2 / s ** 2) * (x_p - reconstructed_original)\n\n #print(is_adv_loss_grad)\n total_loss_grad = squared_l2_distance_grad + const * is_adv_loss_grad\n return total_loss, total_loss_grad",
"def forward(self, state):\n x = F.relu(self.fc1(state))\n x = F.relu(self.fc2(x))\n #return self.fc3(x)\n return F.softmax(self.fc3(x), dim=1)",
"def compute_loss(self, features, mode, params, precomputed):\n raise NotImplementedError(\"Model does not implement loss.\")"
] | [
"0.6775613",
"0.6576758",
"0.6491725",
"0.64844924",
"0.6475553",
"0.64732516",
"0.64350957",
"0.6420379",
"0.63795304",
"0.6373628",
"0.63676196",
"0.63623226",
"0.63240296",
"0.63087744",
"0.6286535",
"0.62419695",
"0.62229663",
"0.6222136",
"0.6222136",
"0.62179106",
"0.62144196",
"0.61757994",
"0.6172643",
"0.6168113",
"0.6162704",
"0.61368173",
"0.6133445",
"0.6118983",
"0.6115886",
"0.6105703"
] | 0.7187947 | 0 |
Perform actions on all infected members of the population in a random order | def turn(grid):
# Select infected people
rows, cols = np.where(grid == 1)
#print(f"Infected at {rows}, {cols}")
# In random order, go through each infected
idx = np.arange(len(rows))
np.random.shuffle(idx)
for i in idx:
# Chance to heal
if np.random.binomial(1, heal_rate):
grid[rows[i], cols[i]] = -1
# Chance to die
if np.random.binomial(1, kill_rate):
grid[rows[i], cols[i]] = 2
# chance to infect
else:
infect(rows[i], cols[i])
# Re-count everything
add_tally(grid)
return grid | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _mutate(self, individuals):\n for cur in individuals:\n if random.random() < self.mutation_probability:\n self.op.mutate(cur['individual'])\n cur['fitness'] = None",
"def hesitant_action(self):\n if not self.agent.done:\n if not self.opponenet.done:\n self.EV = self.opponenet.pumps - np.random.randint(1,5)\n else:\n if self.opponenet.cashed:\n self.EV = self.opponenet.pumps + 1\n elif self.opponenet.popped:\n if not self.stopCount:\n if self.agent.pumps == 0:\n self.EV = np.random.randint(1,10)\n else:\n self.EV = self.agent.pumps\n self.stopCount = True\n self.action_gating()",
"def mutate(self):\n\n if len(self.genes) < 250:\n for g in self.genes:\n\n if MUTATION_CHANCE < random.random(): # random.random() gives float in [0,1)\n g.mutate()\n\n else:\n k = int(MUTATION_CHANCE*len(self.genes))\n for g in random.sample(self.genes,int(k)): #int(k)\n g.mutate()\n\n #To add random gene\n if ADD_GENE_CHANCE < random.random():\n self.genes.append(Gene(self.size)) #Call to Gene to add to genes list\n\n #To randomly remove genes\n\n if REM_GENE_CHANCE < random.random() and len(self.genes)>0:\n self.genes.remove(random.choice(self.genes))",
"def eval_randoms(count):\n\t\tfor person in Simulation.community:\n\t\t\tSimulation.community[person].eval_random_strategy(count)",
"def get_random_population():\r\n return [ get_random_individual() for _ in range(POPULATION_COUNT) ]",
"def __step(self, G):\n new_infected_node_set = self.infected_node_set.copy()\n #look for new infections\n for node in self.infected_node_set:\n #try to infect neighbors\n for neighbor in G.neighbors(node):\n if random() < self.p:\n new_infected_node_set.add(neighbor)\n\n #look for recuperations\n for node in self.infected_node_set:\n #try to recuperate\n if random() < self.q:\n new_infected_node_set.remove(node)\n #set new infected nodes\n self.infected_node_set = new_infected_node_set",
"def all_animals_eat(self):\n for cell in itertools.chain.from_iterable(self.map):\n if type(cell).__name__ in self.allowed_cells:\n cell.gen_fodder()\n cell.eat_herbivore()\n cell.eat_carnivore()",
"def evaluate_fitness_against_random(self):\n #self.normalize() # Normalize before evaluating\n for i in tqdm(range(self.population_size)):\n self.individual.load_chromosome(self.population[i])\n self.fitness[i] = evaluate_agent(self.individual, self.evaluations_per_chromosome * 4) / (self.evaluations_per_chromosome * 4)\n print(self.fitness)",
"def populateUnborn(self):\n for action in getLegalActionsNoStop(self.gameState, self.index):\n self.unbornChildren.append(action)\n random.shuffle(self.unbornChildren)",
"def newGeneration(self):\n for i in range(0, len(self.population)):\n [ind1, ind2] = self.randomSelection()\n child = self.crossover(ind1, ind2)\n self.population[i].setGene(child)\n self.mutation(self.population[i])",
"def step(self):\n\t\tnumpy.random.shuffle(self.agents_list)\n\t\tfor agent in self.agents_list:\n\t\t\tagent.produce()\n\t\tfor agent in self.agents_list:\n\t\t\tagent.charge()\n\t\tfor agent in self.agents_list:\n\t\t\tif agent.strategy == 0: \n\t\t\t\tagent.retribute()\n\t\tfor agent in self.agents_list:\n\t\t\tif agent.strategy == 0: \n\t\t\t\tagent.give()\n\t\tfor agent in self.agents_list:\n\t\t\tagent.consume()\n\t\tfor agent in self.agents_list:\n\t\t\tagent.solve_consumption_deficit()\n\t\tfor site in self.sites:\n\t\t\tsite.recovery()\n\t\tfor agent in self.agents_list:\n\t\t\tagent.sprout()",
"def mutation(self):\n\n for r in range(self.pop_num*3, 5): # Mutation.\n for w in range(0,self.length): \n if random.random()<0.2: \n self.par_and_sons[r].A[w] = self.par_and_sons[r].A[w] + np.random.randint(-20, 20) # Offset + -20 pixels.",
"def expand_influence(self):\n user_reach = int(len(self.model.users) * self._influence)\n # print(f'User reach of {self._influence} is {user_reach} / {len(self.model.users)}')\n for user in random.sample(self.model.users, user_reach):\n self.add_friend(user)",
"def __mutate(self, chromosomes, mutation_probability):\n\n for chromosome in chromosomes:\n for i in range(self.chromosome_size):\n if random.randint(1, 100) <= mutation_probability:\n logging.getLogger().debug(\n \"---> Mutation in Chromosome \" + str(\n chromosome.chromosome_id) + \"in gene \" + str(i)\n + \" <---\")\n chromosome.genes[i] = random.choice(self.gene_pool)",
"def genPopulation(self):\r\n self.population_list = []\r\n for i in xrange(0, self.pop_size):\r\n individual = bitarray(self.indv_size)\r\n # Loop for randomizing the 'individual' string.\r\n for j in xrange(0, self.board_size):\r\n vert_pos = random.randint(0, self.board_size-1)\r\n vert_pos_bitnum = toBitArray(vert_pos, self.pos_bits_size)\r\n # print \"\\t\\t\", j, vert_pos_bitnum, vert_pos\r\n for k in range(0, self.pos_bits_size):\r\n individual[j * self.pos_bits_size + k] = vert_pos_bitnum[k]\r\n self.population_list.append(individual)\r\n # print \"\\t\", i, individual\r",
"def testrandom(self):\n for i in range(100):\n WeaponAbility()",
"def run(self, generations=1000):\n gcount = 0\n \n while gcount<=generations:\n try:\n print \"Gen: \"+str(gcount),\n self.population = zip (self.population, [self.target]*len(self.population))\n self.population = self.pool.map(f, self.population)\n except:\n pass\n for i in self.population:\n print i[0],i[1]\n self.population = [organism.Organism(x[0], x[1]) for x in self.population]\n self.population.sort()\n print \" Max fitness: \"+str(self.population[::-1][1].fitness)\n try:\n if self.population[0] <= self.ppop[0]:\n self.ppop = self.population[::-1][0:10] # The top ten organisms\n else:\n self.population = self.ppop # We got worse! go back!\n except:\n self.ppop = self.population\n self.population = self.population[::-1][0:10]\n try:\n self.breed()\n except:\n print \"Breeding error\"\n gcount+=1",
"def selection(self,parents,popSize):\n for i in range(popSize):\n idx1 = np.random.randint(0,popSize)\n idx2 = np.random.randint(0,popSize)\n if parents.individuals[idx1].violationSum < parents.individuals[idx2].violationSum:\n self.individuals[i] = parents.individuals[idx1]\n elif parents.individuals[idx1].violationSum > parents.individuals[idx2].violationSum:\n self.individuals[i] = parents.individuals[idx2]\n elif parents.individuals[idx1].objectiveFunction[0] < parents.individuals[idx2].objectiveFunction[0]:\n self.individuals[i] = parents.individuals[idx1]\n else:\n self.individuals[i] = parents.individuals[idx2]\n \"\"\"\n print(\"Offsprings(self) Impresso dentro de selection (FIM).\")\n self.printPopulation(popSize)\n print(\"Parents Impresso dentro de selection (FIM).\")\n parents.printPopulation(popSize)\n \"\"\"",
"def evolve(self, elitism='on', save='off', probability=0.05, rate=0.05):\n if self.state == 'dead':\n\n self.member_fitness = [self.members[i].fitness for i in range(self.size)]\n\n self.fittest_brain = self.members[self.member_fitness.index(max(self.member_fitness))]\n\n if save == 'on':\n self.fittest_brain.save_as('fittest_brain')\n\n self.total_population_fitness = sum(self.member_fitness)\n\n print('Total population fitness is %s' % (self.total_population_fitness))\n\n self.mating_pool = [[self.members[i]] * round(self.member_fitness[i] * 1000 / self.total_population_fitness) for i in range(self.size)]\n\n self.mating_pool = [brain for sublist in self.mating_pool for brain in sublist]\n\n self.children = []\n\n if elitism == 'on':\n\n self.children.append(self.fittest_brain)\n\n for i in range(self.size - 1):\n parent1 = random.choice(self.mating_pool)\n parent2 = random.choice(self.mating_pool)\n child = crossover(parent1, parent2)\n child.mutate(probability, rate)\n self.children.append(child)\n else:\n for i in range(self.size):\n parent1 = random.choice(self.mating_pool)\n parent2 = random.choice(self.mating_pool)\n child = crossover(parent1, parent2)\n child.mutate(probability, rate)\n self.children.append(child)\n\n self.members = self.children\n\n self.members[0].state = 'alive'\n\n self.state = 'alive'\n self.generation += 1\n\n else:\n print('Cannot evolve: some members are still alive')",
"async def randping(self, ctx):\r\n while True:\r\n memb = random.choice(ctx.guild.members)\r\n if not memb.bot:\r\n break\r\n memb = memb.mention\r\n await ctx.send(memb)",
"def makePopulation(self):\n # nHearing = int(round(float(self.nAgents) * (1-self.propDeaf)))\n #nDeaf = int(math.ceil(float(self.nAgents) * self.propDeaf))\n # create population of agents (just an array of agentEpsilon instances)\n self.pop = [agentEpsilon(self.alpha,self.nManualSigns, self.nSpokenSigns , deaf=False, sex=random.choice([0,1]), ID= i) for i in range(self.nAgents)]",
"def move(self):\n for agent in self.agents:\n if not agent.fidelity:\n options = agent.get_move_options(agent.hex, self.kernel_size, None, extend=True)\n target = random36.choices(population=options,weights=[x.quality**2 for x in options])\n agent.move(target[0])",
"def _generate_random_population(self, pop_size):\n\n random_population = []\n for agent in range(pop_size):\n random_population.append(self._generate_random_agent())\n return random_population",
"def doAllIn(self):\n self.doRaise(self.avatar.getChips())",
"async def infect(self, ctx, *, member: Infectable):\n rate = await self.config.rate()\n chance = random.randint(1, 100)\n if chance <= rate:\n result = await self.infect_user(ctx=ctx, user=member)\n await ctx.send(result)\n else:\n await ctx.send(\n f\"Luckily **{member.name}** was wearing a mask so they didn't get infected.\"\n )",
"def run(self, iterations):\n # print(f'Before:\\n {self.population}\\n')\n # self.best()\n # print(f'Best Genome before: {self.best_genome.array}, fitness={self.best_genome.fitness} ')\n\n mutator = Rand1MutationOperator(self.population, self.bounds, 0.2)\n mixer = ExponentialCrossoverOperator(self.minfun)\n replacer = ElitistReplacementOperator()\n\n for _ in range(iterations):\n candidate_population = Population(None, None, 0)\n for target in self.population.collection:\n # List with genomes who will be the donors\n mutant = mutator.apply(target)\n # Genome modified by replacing a few random positions\n candidate_genome = mixer.apply(target, mutant)\n\n candidate_population.add(candidate_genome)\n\n # Targets are replaced by candidates from the population if candidate has less fitness than target\n self.population = replacer.apply(self.population, candidate_population)\n\n # print(f'After:\\n {self.population}\\n')\n # self.best()\n # print(f'Best Genome after: {self.best_genome.array}, fitness={self.best_genome.fitness} ')",
"def generational_replacement(random, population, parents, offspring, args):\r\n num_elites = args.setdefault('num_elites', 0)\r\n population.sort(reverse=True)\r\n offspring.extend(population[:num_elites])\r\n offspring.sort(reverse=True)\r\n survivors = offspring[:len(population)]\r\n return survivors",
"def random_replacement(random, population, parents, offspring, args):\n num_elites = args.setdefault('num_elites', 0)\n population.sort(reverse=True)\n num_to_replace = min(len(offspring), len(population) - num_elites) \n valid_indices = range(num_elites, len(population))\n rep_index = random.sample(valid_indices, num_to_replace)\n for i, repind in enumerate(rep_index):\n population[repind] = offspring[i]\n return population",
"async def guild_infected(self, ctx, *, guild: discord.Guild = None):\n if not guild:\n guild = ctx.guild\n user_list = await self.config.all_users()\n infected_list = []\n for user, data in user_list.items():\n user = guild.get_member(user)\n if user:\n userState = data[\"gameState\"]\n if userState == \"infected\":\n infected_list.append(f\"{user.mention} - {user}\")\n if infected_list:\n infected_list = \"\\n\".join(infected_list)\n color = await ctx.embed_color()\n if len(infected_list) > 2000:\n embeds = []\n infected_pages = list(pagify(infected_list))\n for index, page in enumerate(infected_pages, start=1):\n embed = discord.Embed(color=color, title=\"Infected Members\", description=page)\n embed.set_footer(text=f\"{index}/{len(infected_pages)}\")\n embeds.append(embed)\n await menu(ctx, embeds, DEFAULT_CONTROLS)\n else:\n await ctx.send(\n embed=discord.Embed(\n color=color,\n title=\"Infected Members\",\n description=infected_list,\n )\n )\n else:\n await ctx.send(\"No one has been infected yet..\")",
"def test_mutate(self):\n f0 = 5 * (np.random.rand(10, 5) - 0.5)\n ga = population.Evolver(f0, eval_one_max)\n\n self.assertFalse(ga.generations[-1].new)\n\n for i in range(10):\n ga.mutate()\n\n self.assertTrue(ga.generations[-1].new)"
] | [
"0.6152134",
"0.6125817",
"0.61022955",
"0.6062612",
"0.5987989",
"0.5987563",
"0.59344083",
"0.5924505",
"0.5913674",
"0.5815777",
"0.58083546",
"0.5803261",
"0.5792966",
"0.578544",
"0.5772891",
"0.5767429",
"0.57490784",
"0.5739122",
"0.572956",
"0.57269686",
"0.57267815",
"0.57209593",
"0.57135737",
"0.5706034",
"0.5694513",
"0.56898856",
"0.5688485",
"0.5663607",
"0.5657347",
"0.56559736"
] | 0.6190832 | 0 |
removes the old repo in server and clones a new one. the configures the host. | def flush_repo():
server = get_server()
run("rm -rf %(project_name)s" % env)
git.clone()
server.setup() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def update_source(self):\n cwd = None\n if os.path.exists(self.path):\n cwd = self.path\n cmd = 'git fetch && git reset --hard origin/master'\n else:\n cmd = 'git clone %s %s' % (self.repo_url, self.path)\n Command(cmd, cwd=cwd)",
"def deploy_pull_master(self, restart=True):\n self.ops.local(\"cd \"+self.local_path+\"/src && git reset --hard HEAD && git pull origin master && git submodule update\")\n PiService.deploy(self, restart)",
"def _clone_gitrepo():\n # Puts git repo in ~/.ssh/config to avoid interaction due to missing known_hosts\n git_server = urllib.splituser(urllib.splittype(env.project['git_repo'])[0])[1]\n if not files.exists('~/.ssh/config') or not files.contains('~/.ssh/config', git_server):\n files.append('~/.ssh/config', ['host %s' % git_server, ' StrictHostKeyChecking no'])\n\n branch = env.project.get('git_branch', 'master')\n if files.exists(_interpolate(DJANGO_PROJECT_DIR)):\n print _interpolate('project %(project)s already exists, updating')\n remote('git pull origin %s' % branch)\n else:\n with cd(_interpolate(VIRTUALENV_DIR)):\n run(_interpolate('git clone %(git_repo)s %(project)s'))\n if branch != 'master':\n remote('git fetch origin %s:%s' % (branch, branch))\n remote('git checkout %s' % branch)",
"def clone_repo():\n with settings(warn_only=True):\n run('git clone %(repository_url)s %(repo_path)s' % env)",
"def cloneDB():\n print(\"::cloning db\")\n filepath = confighome+\"config\"\n\n # open config to get credentials for ssh \n with open(filepath,mode='r', encoding='utf-8') as f:\n jconfig = json.load(f)\n creds=jconfig[0]\n\n # locally clone the \"db\"\n cmd_full=\"git clone \"+creds['db']['username']+\"@\"+creds['db']['host']+\":swrss_database\"\n print(\"::cmd=\",cmd_full)\n retval= os.system(cmd_full)\n if (retval==0):\n print(\"::synced successfully\")\n\n print(\"::system returned \",retval)",
"def update_repo(self):\n utils.render_template_to_file(\n self.repo_template_path,\n self.repo_config_path,\n {\n 'name': '{0}_nailgun'.format(self.version),\n 'baseurl': self.host_system_config['repo_master'],\n 'gpgcheck': 0,\n 'skip_if_unavailable': 0,\n })\n utils.exec_cmd('yum clean all')",
"def clone_repository():\n try:\n ssh = paramiko.SSHClient()\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n ssh.connect(hostname=exec_server_address, username=exec_server_username, password=exec_server_password)\n command = 'cd ' + exec_server_working_directory + '; rm -rf ' + robot_tests_directory\n ssh.exec_command(command=command, timeout=180)\n command = 'cd ' + exec_server_working_directory + '; git clone ' + bitbucket_repository_url\n ssh.exec_command(command=command, timeout=1800)\n ssh.close()\n except Exception as error:\n print(\"Failed to connect to execution server \" + exec_server_address)",
"def update_from_repo():\n\treturn",
"def clone_remote_theme(save_dir: str, config: dict):\r\n os.makedirs(save_dir, exist_ok=True)\r\n\r\n repo_dir = os.path.join(save_dir, config['name'])\r\n\r\n if os.path.exists(repo_dir):\r\n force_rmtree(repo_dir)\r\n\r\n repo_url = config['url']\r\n repo_branch = config.get('branch', 'master')\r\n repo_tag = config.get('tag', '')\r\n\r\n def safe_run(command, cwd):\r\n try:\r\n run(command, cwd)\r\n except Exception:\r\n raise TemplateError('Cannot fetch theme from ' + repo_url)\r\n\r\n safe_run('git clone -b %s %s %s' % (repo_branch, repo_url, repo_dir), '.')\r\n if repo_tag != '':\r\n safe_run('git checkout %s' & repo_tag, repo_dir)",
"def update_code_from_git():\n if not files.exists(REMOTE_REPO_DIR):\n with cd(HOME_DIR):\n run(\"git clone %s\" % MAIN_GITHUB_REP )\n with cd(REMOTE_REPO_DIR):\n run(\"git pull\")",
"def test_returns_cloned_repo_by_name_auto_host(self):\n # Need to set up a git repo with origin info.\n full_path = path.join(settings.REPO_ROOT, 'test')\n envoy.run('git init {0}'.format(full_path))\n fake_origin = 'git://localhost'\n envoy.run('git -C {0} remote add origin {1}'.format(full_path,\n fake_origin))\n url = reverse(\"find\", kwargs={'name': 'test'})\n\n del settings.REPO_URL\n\n response = self.client.get(url, HTTP_HOST='test-host')\n\n self.assertEqual(200, response.status_code)\n result = json.loads(response.content.decode())\n expected_url = 'git://test-host/test'\n self.assertEqual(result['url'], expected_url)\n self.assertEqual(result['name'], u'test')",
"def clone_into_project(git_repo_name):\n repo_dir = git_dir + \"/%s.git\" % git_repo_name\n with cd(remote_dir):\n run('rm -rf myproject')\n run(\"git clone %s %s\" % (repo_dir, project_name))\n run(\"echo 'MY_ENV=\\\"prod\\\"' > %s/%s/site_settings.py\" % (project_name,project_name))\n update_conf_file()",
"def test_pull_default_remote(self, repo):\n dest = os.path.join(self._tmpdir, 'cloned_repo')\n clone(['arg0', repo.path, dest])\n cloned = ComponentTestGitRepository(dest)\n self._check_repo_state(cloned, 'master', ['master'])\n eq_(pull(['argv0']), 0)\n assert len(repo.get_commits()) == 1",
"def clone_repos():\n with open(repolist_file, \"r+\") as repofile:\n repolist = repofile.readlines()\n for idx in range(0,len(repolist)):\n l = repolist[idx].strip()\n if re.match('^[^\\six#]',l):\n # clone repo\n repo = l\n if not git(\"clone\", \"--mirror\", repo, cwd = clone_dir):\n continue\n # mark as cloned\n repo = \"i {0}\\n\".format(repo)\n repolist[idx] = repo\n repofile.seek(0)\n repofile.truncate(0)\n repofile.flush()\n repofile.writelines(repolist)\n pass",
"def _ensure_remotes(self, repo):\n\n remote_names = [r.name for r in repo.remotes]\n if 'origin' not in remote_names:\n repo.create_remote('origin', REPO_FROM)\n\n if 'destiny' not in remote_names:\n repo.create_remote('destiny', REPO_TO)",
"def patch_repos(self):",
"def clone(ctx, path_base, repo_url, dir_target):\n if 'github' in repo_url:\n # Just to make sure ssh agent forwarding works well.\n ctx.run('ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts', warn=True)\n ctx.run('ssh -T [email protected]', warn=True)\n\n with ctx.cd(path_base):\n ctx.run(f'git clone -v {repo_url} {dir_target}')",
"def create_repo_clone(self, path, https):\n _, _, login, remote_dir = path.split('/', 3) # 3 x '/' before real path\n remote_dir = os.path.dirname(remote_dir) # final segment from clone\n print remote_dir\n cmd = ['ssh', login, 'mkdir', '-p', remote_dir]\n print cmd\n check_output(cmd)\n cmd = ['ssh', login, 'cd', remote_dir, ';', 'hg', 'clone', https]\n #cmd = ['ssh', login, 'cd {} ; hg clone {}'.format(remote_dir, path.replace('ssh:', 'https:'))]\n print cmd\n check_output(cmd)",
"def prepare_repository(self):\n # Check necessary settings and revert a snapshot\n if not self.custom_pkgs_mirror:\n return\n logger.info(\"Custom mirror with new packages: {0}\"\n .format(settings.CUSTOM_PKGS_MIRROR))\n\n # Modify admin resolv.conf to use local host resolver\n dns_server = self.env.router()\n new_resolv_conf = [\"nameserver {0}\".format(dns_server)]\n\n # Set the local router as nameserver that will allow\n # the admin node to access the Mirantis custom repositories.\n old_resolv_conf = self.env.modify_resolv_conf(new_resolv_conf)\n\n if settings.OPENSTACK_RELEASE_UBUNTU in settings.OPENSTACK_RELEASE:\n # Ubuntu\n master_tools = ['dpkg', 'dpkg-devel']\n self.install_tools(master_tools)\n self.get_pkgs_list_ubuntu()\n pkgs_local_path = ('{0}/pool/'\n .format(self.local_mirror_ubuntu))\n self.download_pkgs(pkgs_local_path)\n self.update_yaml(self.ubuntu_yaml_versions)\n self.regenerate_repo(self.ubuntu_script, self.local_mirror_ubuntu)\n else:\n # CentOS\n master_tools = ['createrepo']\n self.install_tools(master_tools)\n self.get_pkgs_list_centos()\n pkgs_local_path = '{0}/Packages/'.format(self.local_mirror_centos)\n self.download_pkgs(pkgs_local_path)\n self.update_yaml(self.centos_yaml_versions)\n self.regenerate_repo(self.centos_script, self.local_mirror_centos)\n\n # Restore original admin resolv.conf\n self.env.modify_resolv_conf(old_resolv_conf, merge=False)",
"def d_ploy():\n\tlocal(\"git push origin --all\")\n\twith cd(LIVE_ROOT):\n\t\trun(\"git pull\")",
"def deploy():\n remote_dir = os.path.abspath(os.path.join(REMOTE_BASE_DIR, REPO_NAME))\n \n with settings(warn_only=True):\n if run(\"test -d %s\" % (remote_dir)).failed:\n puts(red(\"[Repo %s does not exist on remote at: %s]\" % (REPO_NAME, remote_dir)))\n with cd(REMOTE_BASE_DIR):\n run(\"git clone %s %s\" % (REPO_URL, REPO_NAME))\n\n puts(yellow(\"[Write logs]\"))\n run(\"echo '-----------------------------' > %s\" % REMOTE_ERR_FILE)\n run(\"echo `date` >> %s\" % REMOTE_ERR_FILE)\n run(\"echo '-----------------------------' >> %s\" % REMOTE_ERR_FILE)\n run(\"echo '-----------------------------' > %s\" % REMOTE_LOG_FILE)\n run(\"echo `date` >> %s\" % REMOTE_LOG_FILE)\n run(\"echo '-----------------------------' >> %s\" % REMOTE_LOG_FILE)\n\n puts(yellow(\"[Update repo: %s]\" % REPO_NAME))\n with cd(remote_dir):\n run(\"git pull origin master >> %s 2>> %s\" %\n (REMOTE_LOG_FILE, REMOTE_ERR_FILE))\n\n # reminder new static files\n puts(yellow('Do not forget to run collect staticfiles on DJANGO server.'))",
"def test_pull_from_origin(tmpdir):\n gitwrapper.clone_from('git://github.com/Tinche/bower-cache', tmpdir)\n gitwrapper.pull_from_origin(tmpdir)",
"def local_remote_repository(svc_client, tmp_path, mock_redis, identity_headers, real_sync):\n from click.testing import CliRunner\n from git.config import GitConfigParser, get_config_path\n from marshmallow import pre_load\n\n from renku.cli import cli\n from renku.core.utils.contexts import chdir\n from renku.service.config import PROJECT_CLONE_NO_DEPTH\n from renku.service.serializers import cache\n\n # NOTE: prevent service from adding an auth token as it doesn't work with local repos\n def _no_auth_format(self, data, **kwargs):\n return data[\"git_url\"]\n\n orig_format_url = cache.ProjectCloneContext.format_url\n cache.ProjectCloneContext.format_url = _no_auth_format\n\n # NOTE: mock owner/project so service is happy\n def _mock_owner(self, data, **kwargs):\n data[\"owner\"] = \"dummy\"\n\n data[\"name\"] = \"project\"\n data[\"slug\"] = \"project\"\n\n return data\n\n orig_set_owner = cache.ProjectCloneContext.set_owner_name\n cache.ProjectCloneContext.set_owner_name = pre_load(_mock_owner)\n\n remote_repo_path = tmp_path / \"remote_repo\"\n remote_repo_path.mkdir()\n\n remote_repo = Repo.init(remote_repo_path, bare=True)\n remote_repo_checkout_path = tmp_path / \"remote_repo_checkout\"\n remote_repo_checkout_path.mkdir()\n\n remote_repo_checkout = remote_repo.clone(str(remote_repo_checkout_path))\n\n home = tmp_path / \"user_home\"\n home.mkdir()\n\n with modified_environ(HOME=str(home), XDG_CONFIG_HOME=str(home)):\n try:\n with GitConfigParser(get_config_path(\"global\"), read_only=False) as global_config:\n global_config.set_value(\"user\", \"name\", \"Renku @ SDSC\")\n global_config.set_value(\"user\", \"email\", \"[email protected]\")\n\n # NOTE: init \"remote\" repo\n runner = CliRunner()\n with chdir(remote_repo_checkout_path):\n\n result = runner.invoke(\n cli, [\"init\", \".\", \"--template-id\", \"python-minimal\", \"--force\"], \"\\n\", catch_exceptions=False\n )\n assert 0 == result.exit_code, format_result_exception(result)\n\n remote_name = remote_repo_checkout.active_branch.tracking_branch().remote_name\n remote = remote_repo_checkout.remotes[remote_name]\n result = remote.push()\n finally:\n try:\n shutil.rmtree(home)\n except OSError: # noqa: B014\n pass\n\n payload = {\"git_url\": f\"file://{remote_repo_path}\", \"depth\": PROJECT_CLONE_NO_DEPTH}\n response = svc_client.post(\"/cache.project_clone\", data=json.dumps(payload), headers=identity_headers)\n\n assert response\n assert {\"result\"} == set(response.json.keys()), response.json\n\n project_id = response.json[\"result\"][\"project_id\"]\n assert isinstance(uuid.UUID(project_id), uuid.UUID)\n\n try:\n yield svc_client, identity_headers, project_id, remote_repo, remote_repo_checkout\n finally:\n cache.ProjectCloneContext.format_url = orig_format_url\n cache.ProjectCloneContext.set_owner_name = orig_set_owner\n\n try:\n shutil.rmtree(remote_repo_path)\n except OSError: # noqa: B014\n pass\n\n try:\n shutil.rmtree(remote_repo_checkout_path)\n except OSError: # noqa: B014\n pass",
"def clone_repo():\n\n with cd(env.root):\n sudo('git clone %(repo)s %(code_root)s' % env, user=env.deploy_user)",
"def reset_repository(self):\n # Remove index lock just in case.\n lock_file = f\"{self.repo.working_tree_dir}/.git/index.lock\"\n try:\n os.remove(lock_file)\n logging.info(f\"removed {lock_file}\")\n except FileNotFoundError:\n logging.info(f\"{lock_file} does not exist\")\n logging.info('Syncing local, origin and upstream...')\n if 'upstream' not in self.repo.remotes:\n self.repo.create_remote('upstream', url=LLVM_GITHUB_URL)\n self.repo.remotes.upstream.fetch()\n self.repo.git.clean('-ffxdq')\n self.repo.git.reset('--hard')\n self.repo.git.fetch('--all')\n if self.find_commit('main') is None:\n origin = self.repo.remotes.origin\n self.repo.create_head('main', origin.refs.main)\n self.repo.heads.main.set_tracking_branch(origin.refs.main)\n self.repo.heads.main.checkout()\n self.repo.git.pull('origin', 'main')\n self.repo.git.pull('upstream', 'main')\n if self.push_branch:\n self.repo.git.push('origin', 'main')",
"def clone_github_repo(self):\n repository_local_destination = os.path.join(MODULES_PATH, 'github', self.username, self.repository_name)\n if not os.path.exists(repository_local_destination):\n Repo.clone_from(self.repo_url, repository_local_destination, branch='master')\n init_filename = os.path.join(repository_local_destination, '__init__.py')\n open(init_filename, 'a').close()",
"def clone():\n require('PROJECT_NAME')\n require('PROJECT_REPO')\n require('MERCURIAL_BIN')\n\n # Create the \"apps\" directory if it does not exist.\n run('mkdir -p {}'.format(utils.home('apps')))\n\n if files.exists(utils.home('apps', env.PROJECT_NAME)):\n delete()\n\n with cd(utils.home('apps')):\n run('{0} clone {1} {2}'.format(env.MERCURIAL_BIN,\n env.PROJECT_REPO,\n env.PROJECT_NAME))",
"def _mocked_repo_reset(self, project):\n repo.git.reset(\"--hard\", current_head)",
"def repository_create_hosted():\n pass",
"def fetch(self) -> None:\n try:\n repository = Repo.clone_from(\n self._parsed_url.original_url,\n self._output_dir,\n depth=1,\n no_single_branch=True,\n env={\"GIT_TERMINAL_PROMPT\": \"0\"},\n )\n except Exception:\n raise REANAFetcherError(\n \"Cannot clone the given Git repository. Please check that the provided \"\n \"URL is correct and that the repository is publicly accessible.\"\n )\n\n if self._git_ref:\n try:\n repository.remote().fetch(self._git_ref, depth=1)\n repository.git.checkout(self._git_ref)\n except Exception:\n raise REANAFetcherError(\n f'Cannot checkout the given Git reference \"{self._git_ref}\"'\n )\n\n shutil.rmtree(os.path.join(self._output_dir, \".git\"))"
] | [
"0.61914355",
"0.61613387",
"0.61292326",
"0.610431",
"0.6018636",
"0.600334",
"0.5974268",
"0.59159434",
"0.5908282",
"0.5882658",
"0.58382523",
"0.5837231",
"0.5782798",
"0.5774126",
"0.57253504",
"0.56959504",
"0.5688279",
"0.5687978",
"0.5686983",
"0.56722623",
"0.56397384",
"0.56268114",
"0.5599533",
"0.5586577",
"0.55842763",
"0.5575114",
"0.5569373",
"0.55638754",
"0.55548435",
"0.55514795"
] | 0.7116184 | 0 |
return a list with items with the same student | def get_all_by_student(self, stud_id):
l = []
for item in self._items:
if item.get_student() == stud_id:
l.append(item)
return l[:] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def students(self):\n\t\treturn self.grade_set.all().distinct()",
"def find_duplicate(student_list):\r\n place_holder = student_info('null', 'null', '0', '0')\r\n current = place_holder\r\n dupe = []\r\n final = []\r\n for student in student_list:\r\n previous = current\r\n current = student\r\n if current.first == previous.first:\r\n if previous in final:\r\n dupe.append(final.pop())\r\n dupe.append(student)\r\n elif current.first != previous.first:\r\n if len(dupe) > 1:\r\n dupe.sort(key=lambda x: x[1])\r\n for student_dupe in dupe:\r\n final.append(student_dupe)\r\n final.append(student)\r\n dupe = []\r\n else:\r\n final.append(student)\r\n if len(dupe) > 1:\r\n dupe.sort(key=lambda x: x[1])\r\n for student_dupe in dupe:\r\n final.append(student_dupe)\r\n for student_final in final:\r\n print(student_format(student_final))",
"def get_students(self) -> List['Student']:\n return self.students.values()",
"def get_students_of_class(students, class_name):\n for row in students:\n class_list = [row for row in students if row[4]== class_name]\n return class_list",
"def get_list_of_students(self):\n return self._students",
"def Students_in_majors(l:list,i:list)->list:\n result=[]\n for s in l:\n if s.major in i:\n result.append(s)\n return result",
"def get_students(user):\n students = Student.query(ancestor=get_parent_key(user)).order(Student.rose_username).fetch()\n students_map = {}\n teams = []\n for student in students:\n students_map[student.key] = student\n if student.team not in teams:\n teams.append(student.team)\n return students, students_map, teams",
"def __eq__(self, student):\n return self._name == student.name and self._id == student.id",
"def find_students(self):\n from quizzer.models.attendance import Attendance\n from quizzer.models.semester import Semester\n\n semester = Semester.get_current()\n\n for attendance in Attendance.objects: # TODO: Use indexed query later.\n if attendance.semester == semester and attendance.class_ == self:\n yield attendance.student",
"def getStudents(self):\n if not self.isSorted:\n self.students.sort()\n self.isSorted = True\n return self.students[:] #return copy of list of students",
"def all_students(self):\n \n with sqlite3.connect(self.db_path) as conn:\n # conn.row_factory = self.create_student\n conn.row_factory = lambda cursor, row: Student(\n row[1], row[2], row[3], row[5]\n )\n \n \n db_cursor = conn.cursor()\n\n db_cursor.execute(\"\"\"\n select s.Id,\n s.FirstName,\n s.LastName,\n s.SlackHandle,\n s.CohortId,\n c.Name\n from Student s\n join Cohort c on s.CohortId = c.Id\n order by s.CohortId\n \"\"\")\n\n all_students = db_cursor.fetchall()\n\n # for student in all_students:\n # print(f'{student[1]} {student[2]} is in {student[5]}')\n\n # for student in all_students:\n # print(f'{student[1]} {student[2]} is in {student[5]}')\n\n for student in all_students:\n print(student)",
"def search_student(student):\n result=[]\n for name,age in alumnos.items():\n if student.lower() in name.lower():\n result.append(name)\n\n print(f\"Result {result}\")\n return result",
"def get_eval_list(assessment, current_user):\n for team in Team.objects.all():\n if current_user in team.student.all() and team.course == assessment.course: # get the team\n evaluated_list = [student for student in team.student.all() if student != current_user]\n return evaluated_list",
"def roster(self) -> list:\n return [student\n for grade in sorted(self.students)\n for student in self.students[grade]]",
"def Students_in_class(l:list,d:str,c:str)->list:\n result=[]\n for s in l:\n if Student_is_enrolled(s,d,c):\n result.append(s)\n return result",
"def func_Q1(db):\n grades_collection = db.grades\n student_list = list(grades_collection.distinct(\"student_id\", {}))\n\n return len(student_list)",
"def find_duplication(homework):\n re_id = re.compile(r'(?P<stuid>[0-9]{10,11})')\n dup_check = dict()\n with open(homework, 'r') as data:\n lines = data.readlines()\n for ln in lines:\n dt = ln.split()\n csum, right = dt[0], dt[1]\n if csum not in dup_check:\n dup_check[csum] = list()\n m = re_id.search(right)\n if m is not None:\n stu_id = m.group('stuid')\n dup_check[csum].append(stu_id)\n dup_check = filter(lambda k, v: len(v) > 1, dup_check.items())\n dup_check = [(key, sorted(val)) for key, val in dup_check]\n return dup_check",
"def get_students(self):\n if not self.is_sorted:\n self.students.sort()\n self.is_sorted = True\n for s in self.students:\n yield s",
"def set_of_courses(students_list: list) -> set:\n return set(student['course'] for student in students_list)",
"def allStudents(self):\n if not self.isSorted:\n self.students.sort()\n self.isSorted=True\n #return copy of list of students\n for s in self.students:\n yield s",
"def Student_names(l:list)->list:\n result=[]\n for s in l:\n result.append(s.name)\n return result",
"def get_all_student_courses(cls, user):\n member_record = CourseMember.objects.filter(user=user)\n member_teacher = member_record.filter(type = 3)\n student_list = []\n\n for member in member_teacher:\n if member.course.pk not in student_list:\n student_list.append(member.course.pk)\n\n return student_list",
"def get_students(self):\n self.cur = self.conn.cursor(pymysql.cursors.DictCursor)\n self.cur.execute(\"SELECT * FROM studenten\")\n self.cur.close()\n\n return self.cur.fetchall()",
"def find_result_set(result_sets, student, course):\n # find the team that the student is in\n for team in Team.objects.filter(course=course).all():\n if student in team.student.all():\n the_team = team\n break\n return result_sets.filter(student=student, team=the_team).first()",
"def get_all_failing_students(self):\n students = MyCollection()\n for student in self.__student_repository.get_all():\n failing = False\n for grade in self.__grade_repository.get_all():\n if \".\" + str(student.entity_id) in grade.entity_id and grade.grade_value < 5:\n failing = True\n if failing:\n students.append(student)\n return students",
"def get_sorted_students(self):\n results = self.__create_student_and_grade_dto()\n results.sort(self.__compare_dtos_on_grade)\n return results",
"def all_students(records):\n \n student_number_and_name = {}\n for each_tuple in records:\n all_student_information = each_tuple[1]\n student_number = int(all_student_information[0])\n student_name = (all_student_information[1:3])\n \n student_number_and_name[student_number] = student_name\n \n return student_number_and_name",
"def list(self, request):\n\n marks = Marks.objects.all()\n\n mark_id_5 = marks.filter(name='5')\n mark_id_4 = marks.filter(name='4')\n mark_id_3 = marks.filter(name='3')\n mark_id_2 = marks.filter(name='2')\n mark_id_pass = marks.filter(name='Зачтено')\n mark_id_not_pass = marks.filter(name='Незачтено')\n mark_id_not_appointed = marks.filter(name='Неявка')\n\n records = filter_against_records(request)\n \n students = Students.objects.all()\n students_to_return = []\n \n for student in students:\n student_records = records.filter(student_id=student.id)\n \n counter_5 = 0\n counter_4 = 0\n counter_3 = 0\n counter_2 = 0\n counter_all = 0\n\n for record in student_records:\n mark_dict = model_to_dict(record.mark_id) \n mark = mark_dict['id'] \n\n if str(mark) == str(mark_id_5[0].id):\n counter_5 += 1\n counter_all += 1\n\n elif str(mark) == str(mark_id_4[0].id):\n counter_4 += 1\n counter_all += 1\n\n elif str(mark) == str(mark_id_3[0].id):\n counter_3 += 1\n counter_all += 1\n\n elif str(mark) == str(mark_id_2[0].id):\n counter_2 += 1\n counter_all += 1\n\n elif str(mark) == str(mark_id_pass[0].id):\n counter_all += 1\n\n elif str(mark) == str(mark_id_not_pass[0].id):\n counter_all += 1\n\n elif str(mark) == str(mark_id_not_appointed[0].id):\n counter_all += 1\n\n if counter_all == len(student_records) and len(student_records) > 0 and counter_2 > counter_3 + counter_4 + counter_5:\n students_to_return.append(student)\n \n students_to_send = normalize_students(students_to_return)\n return Response(students_to_send)",
"def student_match(request):\n takes = Take.objects.filter(student__username=request.data[\"sid\"])\n match_dict={}\n\n student=Student.objects.get(username=request.data[\"sid\"])\n match_dict[\"sid\"]=student.username\n match_dict[\"sname\"] = student.name\n\n match_dict[\"tid\"]=[]\n match_dict[\"tname\"] = []\n match_dict[\"cid\"] = []\n match_dict[\"cname\"] = []\n for take in takes:\n if take.teacher.username not in match_dict[\"tid\"]:\n print(take.teacher.username)\n #match_dict[\"tid\"].append(take.teacher.username)\n #match_dict[\"tname\"].append(take.teacher.name)\n if take.course.course_id not in match_dict[\"cid\"]:\n print(take.course.name)\n #match_dict[\"cid\"].append(take.course.course_id)\n #match_dict[\"cname\"].append(take.course.name)\n return Response(match_dict)",
"def list(self, request):\n\n marks = Marks.objects.all()\n\n mark_id_5 = marks.filter(name='5')\n mark_id_4 = marks.filter(name='4')\n mark_id_3 = marks.filter(name='3')\n mark_id_2 = marks.filter(name='2')\n mark_id_pass = marks.filter(name='Зачтено')\n mark_id_not_pass = marks.filter(name='Незачтено')\n mark_id_not_appointed = marks.filter(name='Неявка')\n\n records = filter_against_records(request)\n \n students = Students.objects.all()\n students_to_return = []\n \n for student in students:\n student_records = records.filter(student_id=student.id)\n \n counter_5 = 0\n counter_4 = 0\n counter_3 = 0\n counter_2 = 0\n counter_all = 0\n\n for record in student_records:\n mark_dict = model_to_dict(record.mark_id) \n mark = mark_dict['id'] \n\n if str(mark) == str(mark_id_5[0].id):\n counter_5 += 1\n counter_all += 1\n\n elif str(mark) == str(mark_id_4[0].id):\n counter_4 += 1\n counter_all += 1\n\n elif str(mark) == str(mark_id_3[0].id):\n counter_3 += 1\n counter_all += 1\n\n elif str(mark) == str(mark_id_2[0].id):\n counter_2 += 1\n counter_all += 1\n\n elif str(mark) == str(mark_id_pass[0].id):\n counter_all += 1\n\n elif str(mark) == str(mark_id_not_pass[0].id):\n counter_all += 1\n\n elif str(mark) == str(mark_id_not_appointed[0].id):\n counter_all += 1\n\n if counter_all == len(student_records) and len(student_records) > 0 and counter_2 < counter_3 + counter_4 + counter_5 and counter_2 > 0:\n students_to_return.append(student)\n \n students_to_send = normalize_students(students_to_return)\n return Response(students_to_send)"
] | [
"0.6781505",
"0.65362155",
"0.6199378",
"0.60326993",
"0.601739",
"0.5969962",
"0.5895063",
"0.583807",
"0.5774767",
"0.57600665",
"0.5728087",
"0.57097805",
"0.5650832",
"0.56497097",
"0.5617477",
"0.560614",
"0.55885255",
"0.55699545",
"0.55072105",
"0.5501444",
"0.5463087",
"0.5455778",
"0.5445733",
"0.54437566",
"0.5441116",
"0.5435814",
"0.54151374",
"0.5409635",
"0.53935504",
"0.53834814"
] | 0.6604642 | 1 |
return a list with items with the same discipline | def get_all_by_discipline(self, disc_id):
l = []
for i in self._items:
if i.get_id_disciplina() == disc_id:
l.append(i)
return l[:] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def deduped(items):\n \n return list(set(items))",
"def Collection_select_cheap(C:list, n: float)->list:\r\n result = []\r\n for rest in C:\r\n if Restaurant_is_cheap(rest, n) == True:\r\n result.append(rest)\r\n return result",
"def duplicates(items):\n duplicate_items = set()\n for item in items:\n if items.count(item) > 1:\n duplicate_items.add(item)\n duplicate_list = list(duplicate_items)\n\n\n return sorted(duplicate_list)",
"def removeDup(item, seq):\n return [x for x in seq if x != item]",
"def Collection_return_selected(C:list) -> list:\r\n p = int(input(\"Please enter the maximum average price: \"))\r\n c = input(\"Please enter preferred cuisine: \")\r\n cheap = Collection_select_cheap(C, p)\r\n cuisine = Collection_select_cuisine(C, c)\r\n result = []\r\n for r in cheap:\r\n for rc in cuisine:\r\n if r == rc:\r\n result.append(r)\r\n return result",
"def Deduplicate(items):\n seen = set()\n for it in items:\n if it not in seen:\n seen.add(it)\n yield it",
"def Collection_select_cuisine(C:list, cuisine:str) -> list:\r\n return [r for r in C if r.cuisine == cuisine]",
"def similar(self) -> List[Item]:\n return self._similar",
"def remove_duplicates_badSolution( li ):\n newli=[]\n seen = set()\n for item in li:\n if item not in seen:\n seen.add( item )\n newli.append(item)\n\n return newli",
"def get_duplicates(input: List[str]) -> List[str]:\n\n output = input # Replace with your logic\n\n return output",
"def FindDuplicates(seq):\n dup = set()\n seen = set()\n\n for item in seq:\n if item in seen:\n dup.add(item)\n else:\n seen.add(item)\n\n return list(dup)",
"def returnIdDisciplina(self):\r\n self.cursor.execute(\"SELECT ID FROM DISCIPLINA ORDER BY NOME;\")\r\n self.__result = self.cursor.fetchall()\r\n self.__lista = []\r\n try:\r\n for self.__i in self.__result:\r\n self.__lista.append(self.__i[0])\r\n return self.__lista\r\n except:\r\n return []",
"def get_sorted_disciplines(self):\n results = self.__create_discipline_and_grade_dto()\n results.sort(self.__compare_dtos_on_grade)\n return results",
"def _get_dups(self, ticket_id):\n db = self.env.get_db_cnx()\n cursor = db.cursor()\n cursor.execute(\"\"\"\nSELECT t.id, c.value\nFROM ticket t, ticket_custom c\nWHERE c.name='dup_of' AND t.id=c.ticket AND c.value=%s\n\"\"\" % ticket_id)\n ids = []\n for dup_id, m_id in cursor:\n ids.append(dup_id)\n\n return ids",
"def remove_duplicates_in_items(items: list, id_key: str) -> list:\n ids = {}\n new_items = []\n for item in items:\n item_id = item.get(id_key)\n if item_id not in ids:\n ids[item_id] = True\n new_items.append(item)\n\n return new_items",
"def ordonare_cresc_dupa_pret(lst):\n\treturn sorted(lst, key=get_pret_achizitie)",
"def get_duplicates(song_list):\n duplicates = [key for key in Counter(song_list).keys()\n if Counter(song_list)[key] > 1]\n return duplicates",
"def organize(inventory, grocery_list, exists=set()):\n\tlst = sorted(inventory, key=lambda x : x.aisle) #sort by aisle - O(N*logN)\n\taisles = [[] for y in lst if not exist_test(y.aisle, exists)] #create unique aisles only - O(N)\n\t[aisles[y.aisle].append(y.grocery) for y in lst if y.grocery in grocery_list] #append groceries - O(N*G) \n\treturn aisles",
"def get_continents():\n data=country_populations.split('\\n')\n continents=[]\n unique_cont=[]\n\n for country in data:\n split_data= country.split('\\t')\n continents.append(split_data[2])\n for cont in continents:\n if cont != \"Continent\":\n if continents.count(cont)>1:\n while continents.count(cont)>1:\n continents.remove(cont)\n unique_cont.append(cont)\n return unique_cont",
"def removeduplicates(facts: List[FHIRObservationFact]) -> List[FHIRObservationFact]:\n fact_keys: List[Tuple[int, str, str]] = []\n rval: List[FHIRObservationFact] = []\n for fact in facts:\n k = (fact.instance_num, fact.concept_cd, fact.modifier_cd)\n if k not in fact_keys:\n fact_keys.append(k)\n rval.append(fact)\n return rval",
"def unique_contigs_are_unique(scaffold_list, unique_contigs_list):\n i= 0\n old_scaffold_list = copy.deepcopy(scaffold_list)\n old_scaffold_list = purge_redundancy(old_scaffold_list)\n new_scaffold_list = []\n while new_scaffold_list != old_scaffold_list and i < 20:\n \n i += 1\n if i != 1: \n old_scaffold_list = copy.deepcopy(new_scaffold_list)\n #new list is now old list\n new_scaffold_list = new_resolve_unique_contigs(old_scaffold_list, unique_contigs_list) \n new_scaffold_list = purge_redundancy(new_scaffold_list)\n\n return new_scaffold_list",
"def captain_room(room_list):\n captain_room = '' \n #store the list in total_rooms\n total_rooms = room_list \n #get the unique rooms without repetition of any room\n unique_rooms_num = set(total_rooms) \n #Remove the unique room from the list of total rooms\n for n in unique_rooms_num:\n total_rooms.remove(n) \n without_captain_room = total_rooms\n #The original total room list does not contain captain room number anymore\n #check by print(total_rooms)\n\n #Now, Compare the unique room number: that contains captain number with\n #list without_captain_room\n for i in unique_rooms_num:\n if i not in without_captain_room: \n captain_room = i\n \n return captain_room",
"def getDisciplinesList():\n return Gw2Spidy._request('disciplines')['results']",
"def remove_duplicates(my_list):\n result = []\n for item in my_list:\n if item not in result:\n result.append(item)\n return result",
"def list_duplicates(seq):\n # https://stackoverflow.com/questions/5419204\n tally = defaultdict(list)\n for i, item in enumerate(seq):\n tally[item].append(i)\n return ((key, locs) for key, locs in tally.items() if len(locs) > 1)",
"def get_duplicates(input_list):\n size = len(input_list)\n duplicates = list()\n for i in range(size):\n k = i + 1\n for j in range(k, size):\n if input_list[i] == input_list[j] and input_list[i] not in duplicates:\n duplicates.append(input_list[i])\n return duplicates",
"def _remove_dupes(recs, input, bad_movies, hist_list=[], feedback_list=[]):\n all_rated = input + bad_movies + hist_list + feedback_list\n nonlocal dupes\n dupes = [x for x in recs if x[0] in input]\n return [x for x in recs if x[0] not in all_rated]",
"def objets_uniques(self):\n objets = []\n for membre in self.membres:\n for objet in membre.equipe:\n if objet.unique:\n objets.append(objet)\n objets.extend(objet.prototype.objets_contenus(objet))\n if membre.tenu and membre.tenu.unique:\n objet = membre.tenu\n objets.append(objet)\n objets.extend(objet.prototype.objets_contenus(objet))\n\n return objets",
"def remove_duplicates(mylist):\n return list(set(mylist))",
"def list_duplicates(seq):\n tally = defaultdict(list)\n for i, item in enumerate(seq):\n try:\n if item.mask == True:\n continue\n except:\n tally[item].append(i)\n return ((key, locs) for key, locs in tally.items() if len(locs) > 1)"
] | [
"0.57249516",
"0.559361",
"0.54721725",
"0.5366635",
"0.5349736",
"0.53270197",
"0.532001",
"0.53142184",
"0.5301576",
"0.5295073",
"0.52504534",
"0.5219532",
"0.5206376",
"0.5200724",
"0.51740515",
"0.5167879",
"0.5167069",
"0.514371",
"0.51328325",
"0.51251054",
"0.51210004",
"0.5082407",
"0.5063247",
"0.5045138",
"0.5043102",
"0.5041876",
"0.50401133",
"0.50397676",
"0.5039437",
"0.50354695"
] | 0.6530238 | 0 |
Sliding window algorithm realization Output 'segments' contains start and end indexes for each step Assumption data is contiguous data | def segment_sliding_window(data, winSizeMillisecond=1000, stepSizeMillisecond=100):
logger.info("Sliding window with win size %.2f second and step size %.2f second",
winSizeMillisecond, stepSizeMillisecond)
if stepSizeMillisecond <= 0:
raise ValueError("Step size must be larger than 0!")
startTime = data['Time'].iloc[0]
endTime = data['Time'].iloc[-1]
segmentStart = np.arange(startTime, endTime - winSizeMillisecond, stepSizeMillisecond)
segmentEnd = segmentStart + winSizeMillisecond
segment = pd.DataFrame({'Start': segmentStart,
'End': segmentEnd},
columns=['Start', 'End'])
return segment | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _segment(data, segment_length=200,\n seq_length=None,\n stride=None,\n input_type='trials'):\n x_out = []\n if input_type == 'trials':\n seq_length = 1\n\n if not stride:\n stride = segment_length\n\n for jj, xx in enumerate(data):\n\n n_ch, n_t = xx.shape\n last_segment_start = n_t - segment_length\n\n starts = np.arange(0, last_segment_start+1, stride)\n\n segments = [xx[..., s:s+segment_length] for s in starts]\n\n if input_type == 'seq':\n if not seq_length:\n seq_length = len(segments)\n seq_bins = np.arange(seq_length, len(segments)+1, seq_length)\n segments = np.split(segments, seq_bins, axis=0)[:-1]\n x_new = np.array(segments)\n else:\n x_new = np.stack(segments, axis=0)\n# if not events:\n# x_new = np.expand_dims(x_new, 1)\n\n x_out.append(x_new)\n if len(x_out) > 1:\n X = np.concatenate(x_out)\n else:\n X = x_out[0]\n print(\"Segmented as: {}\".format(input_type), X.shape)\n return X",
"def sliding_window_offsets(data, window_size=500, shift_size=1):\n offsets = np.asarray(_sliding_window_chunkoffsets(data, window_size, shift_size))\n return offsets",
"def sliding_window_analysis(sequence, function,\n window_size=100000, step_size=50000):\n for start in range(0, len(sequence), step_size):\n end = start + window_size\n if end > len(sequence):\n break\n yield start, end, function(sequence[start:end])",
"def segment(data):",
"def get_segments(label_file, window=5):\n labels = pd.read_csv(label_file).sort_values('start').reset_index(\n drop=True)\n wlabels = labels.copy()\n wlabels.start -= window\n wlabels.stop += window\n # union segments\n b = []\n for x in wlabels.itertuples():\n if len(b) == 0:\n b.append([x.start, x.stop])\n elif x.start > b[-1][1]:\n b.append([x.start, x.stop])\n elif x.stop > b[-1][1]:\n b[-1][1] = x.stop\n # update labels times to new chunks\n prevchunks = 0\n for j, (start, stop) in enumerate(b):\n mask = (labels.start >= start) & (labels.stop <= stop)\n offset = -start + prevchunks\n labels.loc[mask, [\"start\", \"stop\"]] += offset\n prevchunks += stop - start\n return np.array(b), labels",
"def test_sliding_window(data, x):\n\n win_dim = data.draw(st.integers(1, x.ndim), label=\"win_dim\")\n win_shape = data.draw(\n st.tuples(*(st.integers(1, s) for s in x.shape[-win_dim:])), label=\"win_shape\"\n )\n step = data.draw(\n st.tuples(*(st.integers(1, s) for s in x.shape[-win_dim:])), label=\"step\"\n )\n\n max_dilation = np.array(x.shape[-win_dim:]) // win_shape\n dilation = data.draw(\n st.one_of(st.none(), st.tuples(*(st.integers(1, s) for s in max_dilation))),\n label=\"dilation\",\n )\n y = sliding_window_view(x, window_shape=win_shape, step=step, dilation=dilation)\n\n if dilation is None:\n dilation = np.ones((len(win_shape),), dtype=int)\n\n for ind in np.ndindex(*y.shape[:win_dim]):\n slices = tuple(\n slice(i * s, i * s + w * d, d)\n for i, w, s, d in zip(ind, win_shape, step, dilation)\n )\n assert_allclose(actual=y[tuple([*ind])], desired=x[(..., *slices)])",
"def sliding_window(frame_length, step, Xsampleslist, ysampleslist):\n Xsamples = []\n ysamples = []\n for j in range(len(Xsampleslist)):\n X = Xsampleslist[j]\n ybinary = ysampleslist[j]\n for i in range(0, X.shape[0] - frame_length, step):\n xsub = X[i:i + frame_length, :]\n ysub = ybinary\n Xsamples.append(xsub)\n ysamples.append(ysub)\n return Xsamples, ysamples",
"def slidingWindow(data,binSize=50,shiftSize=10,nproc=10):\n \n tLength = data.shape[0]\n nTrials = data.shape[1]\n \n ###\n \n inputs = []\n for trial in range(nTrials):\n inputs.append(data[:,trial],binSize,shiftSize)\n \n pool = mp.Pool(processes=nproc)\n results = pool.map_async(_slide,inputs).get()\n pool.close()\n pool.join()\n \n out = []\n for result in results:\n out.append(result.T)\n \n outarray = np.zeros((result.shape[0],nTrials))\n for i in range(nTrials):\n outarray[:,i] = out[i]\n\n return outarray",
"def win_slide(stream, start_time, win_size, step_size, max_windows):\n stream_list=[]\n for i in range(max_windows):\n ts = start_time + (i*step_size)\n st = stream.slice(ts, ts+win_size)\n # skip missing data\n if len(st)!=3: continue\n if not st[0].stats.starttime == st[1].stats.starttime == st[2].stats.starttime: continue\n if not st[0].stats.endtime == st[1].stats.endtime == st[2].stats.endtime: continue\n if len(st[0])!=int(win_size*100+1): continue\n if st.max()[0]==0.0 or st.max()[1]==0.0 or st.max()[2]==0.0: continue\n # add preprocessed time window\n st = preprocess_stream(st)\n stream_list.append(st)\n return stream_list",
"def slidingWindow(sequence,winSize,step):\n \n # Verify the inputs\n try: it = iter(sequence)\n except TypeError:\n raise Exception(\"**ERROR** sequence must be iterable.\")\n if not ((type(winSize) == type(0)) and (type(step) == type(0))):\n raise Exception(\"**ERROR** type(winSize) and type(step) must be int.\")\n if step > winSize:\n raise Exception(\"**ERROR** step must not be larger than winSize.\")\n if winSize > len(sequence):\n raise Exception(\"**ERROR** winSize must not be larger than sequence\\\n length.\")\n # Pre-compute number of chunks to emit\n numOfChunks = ((len(sequence)-winSize)/step)+1\n # Do the work\n for i in range(0,int(numOfChunks)*step,step):\n yield sequence[i:i+winSize]",
"def sliding_window(top, step=10, window_size=(20,20)):\n\tfor x in range(0, top.shape[0], step):\n\t\tif x + window_size[0] > top.shape[0]:\n\t\t\tx = top.shape[0] - window_size[0]\n\t\tfor y in range(0, top.shape[1], step):\n\t\t\tif y + window_size[1] > top.shape[1]:\n\t\t\t\ty = top.shape[1] - window_size[1]\n\t\t\tyield x, y, window_size[0], window_size[1]",
"def segmenter(data_stream: numpy.ndarray) -> Sequence[int]:\n diff = numpy.median(\n numpy.convolve(\n numpy.abs(numpy.diff(data_stream)), numpy.array([1, 1, 1, 1, 1]) / 5\n )\n )\n return cast(\n Sequence[int],\n numpy.where(\n numpy.abs(numpy.diff(data_stream, prepend=data_stream[0])) > diff * 5\n )[0],\n )",
"def get_test_segments(data):\n n_channels = data.shape[0]\n n_steps = data.shape[1]\n factor = 2\n n_segments = n_steps // factor\n\n segments = []\n for i_segment in range(n_segments):\n for i_channel in range(n_channels):\n segment = {\n 'index': i_segment + i_channel * n_segments,\n 'start': i_segment,\n 'stop': i_segment + 1,\n 'weight': data[i_channel, factor * i_segment],\n }\n if n_channels > 1:\n segment['channel'] = i_channel\n segments.append(segment)\n\n return segments",
"def to_sliding_window(x, y, timesteps, predict_ahead, index=None):\n\n xnew = sliding_window(x, timesteps, predict_ahead)\n #print(xnew)\n ynew = y[timesteps+predict_ahead:]\n\n if index is not None:\n idxnew = index[timesteps+predict_ahead:]\n return xnew, ynew, idxnew\n\n return xnew, ynew, None",
"def sliding_window(self, incr):\n\n start_val = incr - self._sw_val\n end_val = incr\n\n if end_val < len((self._array_dict['RSSI'])):\n for norm_arrays, sw_arrays in zip(self._array_dict.values(), self._sw_dict.items()):\n self._sw_dict[sw_arrays[0]] = norm_arrays[start_val:incr]\n\n else:\n if self._quiet is False:\n print('Sliding window can no longer be implemented due to end of frames approaching!')",
"def segment_data(x):\n return (x.shift(1) != x).astype(int).cumsum()",
"def split_sequence(sequence, window, horizon):\n X, y = list(), list()\n for i in range(len(sequence)):\n # find the end of this pattern\n end_ix = i + window\n out_end_ix = end_ix + horizon\n # check if we are beyond the sequence\n if out_end_ix > len(sequence):\n break\n # gather input and output parts of the pattern\n seq_x, seq_y = sequence[i:end_ix], sequence[end_ix:out_end_ix]\n X.append(seq_x)\n y.append(seq_y)\n return np.array(X), np.array(y)",
"def _sliding_windows(a, N):\n a = np.asarray(a)\n p = np.zeros(N - 1, dtype=a.dtype)\n b = np.concatenate((p, a, p))\n s = b.strides[0]\n return np.lib.stride_tricks.as_strided(\n b[N - 1:],\n shape=(N, len(a) + N - 1),\n strides=(-s, s),\n )",
"def consecutive_sections(): # noqa: D416",
"def get_indices_entire_sequence(data: pd.Dataframe, window_size: int, step_size: int) -> list:\n stop_position = len(data)-1 # 1- because of 0 indexing\n\n # Start the first sub-sequence at index position 0\n subseq_first_idx = 0\n\n subseq_last_idx = subseq_first_idx + window_size\n\n indices = []\n\n while subseq_last_idx <= stop_position:\n indices.append((subseq_first_idx, subseq_last_idx))\n subseq_first_idx += step_size\n subseq_last_idx += step_size\n return indices",
"def SplitGap(data,gapsize,medwin,fluxdiff):\n \n # defining new empty lists and stuff\n pcount=0\n istamps=[]\n outData={}\n \n data['x'].mask = data['UnMasked']\n data['y'].mask = data['UnMasked']\n data['yerr'].mask = data['UnMasked']\n \n # median smoothing the lightcurve\n mvavg1 = movingMedian(data['y'],medwin)\n mvavg1 = num.append(mvavg1,mvavg1[-1])\n mvavg1 = data['y']\n # first derivative of smoothed lightcurve\n diff1 = num.diff(mvavg1)\n diff1 = num.hstack((diff1,diff1[-1]))\n \n # second derivative of smoothed lightcurve\n diff2 = num.diff(diff1)\n diff2 = num.hstack((diff2[-1],diff2))\n\n # compute ourlier resistant sigma\n sig = compute1Sigma(diff1)\n #pylab.plot(diff1,'g.')\n #pylab.plot([0,6000],[5*sig,5*sig],'k-')\n #pylab.plot([0,6000],[3*sig,3*sig],'k-')\n #pylab.plot([0,6000],[1*sig,1*sig],'k-')\n #pylab.show()\n\n # The grand master loop >=}\n # to make portion slices\n for i in range(len(data['x'])-1):\n dt = data['x'][i+1]- data['x'][i]\n j1 = max(0,i-medwin)\n j2 = i + medwin\n if pcount == 0:\n i0 = 0\n if pcount > 0:\n i0 = i1+1\n if dt > gapsize:\n i1 = i\n istamps.append([i0,i1])\n pcount += 1\n #if num.abs(diff1[i]) > 5*sig:\n #i1 = i\n #istamps.append([i0,i1])\n #pcount += 1\n #print num.abs(diff1[i]/data['y'][i]), diff1[i], data['y'][i], diff1[i+1], data['y'][i+1]\n #print i, ' test flux gap'\n i1 = i+1\n istamps.append([i0,i1])\n \n \n \n if data['bool']==False:\n # Applying slices\n for j in range(len(istamps)):\n #print istamps[j][0], istamps[j][1]\n outData['portion' + str(j+1)] = {'kid':data['kid'],'x':data['x'][istamps[j][0]:istamps[j][1]+1], 'y':data['y'][istamps[j][0]:istamps[j][1]+1], 'yerr':data['yerr'][istamps[j][0]:istamps[j][1]+1],'UnMasked':data['UnMasked'][istamps[j][0]:istamps[j][1]+1],'bool':False}\n else:\n # Applying slices\n for j in range(len(istamps)):\n #print istamps[j][0], istamps[j][1]\n outData['portion' + str(j+1)] = {'kid':data['kid'],'x':data['x'][istamps[j][0]:istamps[j][1]+1], 'y':data['y'][istamps[j][0]:istamps[j][1]+1], 'yerr':data['yerr'][istamps[j][0]:istamps[j][1]+1], 'TransitMask':data['TransitMask'][istamps[j][0]:istamps[j][1]+1],'UnMasked':data['UnMasked'][istamps[j][0]:istamps[j][1]+1],'bool':True}\n \n return outData",
"def dataset_to_windows(dataset, windowsize):\n windows = []\n row, col = dataset.shape\n for i in range(col):\n if i > 0:\n windows.append(lag(np.array(dataset)[:,i], windowsize))\n return np.array(windows)",
"def window_data(data: np.ndarray):\n\n w_len = 128\n stride = w_len // 2\n\n no_offset_windows = np.split(data, 10)\n offset_windows = np.split(data[stride:-stride], 9)\n windows = [0] * 19\n windows[::2] = no_offset_windows\n windows[1::2] = offset_windows\n windows = np.array(windows, dtype=np.float32)\n\n return windows",
"def segment_by_window(self, dt, criterion):\n data_to_segment_by = list(self.get_column(criterion.column_name))\n fitted_distr = UnivariateEmpiricalDistribution(data_to_segment_by)\n\n # day_ahead_value is the number which we wish to segment by\n day_ahead_value = self.get_dayahead_value(criterion.column_name, dt)\n\n window_size = criterion.window_size\n\n segmenter_data_cdf_val = fitted_distr.cdf(day_ahead_value) # on 0,1\n if segmenter_data_cdf_val < window_size / 2:\n # Slide up window\n lower_cdf, upper_cdf = (0, window_size)\n elif segmenter_data_cdf_val > 1 - window_size / 2:\n # Slide down window\n lower_cdf, upper_cdf = (1 - window_size, 1)\n else:\n # Window fits in data\n lower_cdf, upper_cdf = (segmenter_data_cdf_val - window_size / 2,\n segmenter_data_cdf_val + window_size / 2)\n\n lower_bound, upper_bound = (fitted_distr.cdf_inverse(lower_cdf),\n fitted_distr.cdf_inverse(upper_cdf))\n segment = self.window(criterion.column_name, lower_bound, upper_bound)\n\n return RollingWindow(self.name, segment.data, self.source_type,\n self.dayahead_data)",
"def movingWindow(rawData, n):\n data = np.array([rawData[i:i+n] for i in range(rawData.shape[0] - (n-1))])\n return data",
"def segment_by_enumerate(self, dt, criterion):\n column = criterion.column_name\n dayahead_value = self.get_dayahead_value(column, dt)\n segment = self.enumerate(column, dayahead_value)\n return RollingWindow(self.name, segment.data, self.source_type,\n self.dayahead_data)",
"def _slice_indices(self, s):\n\n if s.step is None:\n k = 1\n elif s.step == 0:\n raise ValueError(\"Slice step must be a non-zero integer.\")\n else:\n k = s.step\n\n i = self._index_normalize(s.start)\n j = self._index_normalize(s.stop)\n if k > 0:\n i = self._slice_lower(i)\n j = self._slice_upper(j)\n else:\n i = self._slice_upper(i, -1)\n j = self._slice_lower(j, -1)\n\n index = i\n indices = []\n if k > 0:\n while index < j:\n indices.append(index)\n index += k\n else:\n while index > j:\n indices.append(index)\n index += k\n return indices",
"def window_index(serieslength,windowsize,overlap):\r\n\r\n p1=0\r\n p2=p1 + windowsize\r\n pt1=[p1]\r\n pt2=[p2]\r\n while p2 < serieslength:\r\n p1 = p2 - overlap\r\n p2 = min((p1 + windowsize, serieslength))\r\n pt1.append(p1)\r\n pt2.append(p2)\r\n \r\n return pt1, pt2",
"def get_segmented_epochs(data, window_len, shift_len, sample_rate):\n\n num_classes = data.shape[0]\n num_chan = data.shape[1]\n num_trials = data.shape[3]\n\n duration = int(window_len * sample_rate)\n data_overlap = (window_len - shift_len) * sample_rate\n # 分段数目\n number_of_segments = int(math.ceil((data.shape[2] - data_overlap) /\n (duration - data_overlap)))\n\n segmented_data = np.zeros((data.shape[0], data.shape[1],\n data.shape[3], number_of_segments, duration))\n\n for target in range(0, num_classes):\n for channel in range(0, num_chan):\n for trial in range(0, num_trials):\n segmented_data[target, channel, trial, :, :] = buffer(data[target, channel, :, trial],\n duration, data_overlap)\n\n return segmented_data",
"def segment(lengths, params):\n\t\n\tlengths = lengths[:,0]\n\tS = tf.cumsum(lengths)\n\tindex = tf.range(S[-1], dtype=tf.int64)\n\tsid = tf.searchsorted(S, index, side='right', out_type=tf.dtypes.int64)\n\touts = multi_gather(sid, [lengths-S, *params])\n\tindex += outs[0] # offset\n\touts = outs[1:]\n\treturn sid, index, outs"
] | [
"0.67323786",
"0.66023433",
"0.6593062",
"0.65148705",
"0.6402961",
"0.6359427",
"0.6332763",
"0.6283209",
"0.6280443",
"0.62024176",
"0.61759365",
"0.6168342",
"0.6120425",
"0.6099909",
"0.6057716",
"0.6040668",
"0.5987201",
"0.5953549",
"0.5936035",
"0.5916517",
"0.5867777",
"0.58479744",
"0.5845947",
"0.58248246",
"0.58187383",
"0.57979465",
"0.57823426",
"0.5769124",
"0.5759304",
"0.5749454"
] | 0.7024497 | 0 |
Convert django model to geojson | def to_geojson(model, contrib_id):
feature_collection = []
for record in model.objects.filter(contributer_id=contrib_id):
try:
properies = {
"name": record.name,
"address": record.address,
"email": record.email,
"website": record.website,
"phone_number": record.phone_number,
}
my_point = Point((record.longitude, record.latitude))
my_feature = Feature(geometry=my_point, properties=properies)
feature_collection.append(my_feature)
except ValueError:
pass
return FeatureCollection(feature_collection) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def default(self, o): \n if isinstance(o, GEOSGeometry):\n dictval = json.loads(o.geojson)\n #raise Exception(o.ewkt)\n dictval['__GEOSGeometry__'] = ['__init__', [o.ewkt]] #json class hint; see http://json-rpc.org/wiki/specification\n return dictval\n else:\n super(DjangoGEOJSONEncoder, self).default(o)",
"def data_geojson(self):\n coordinates = self.value\n if not coordinates:\n return\n\n title = getattr(self.context, 'title', '') or ''\n description = getattr(self.context, 'description', '') or ''\n\n geo_json = {\n 'type': 'FeatureCollection',\n 'features': [\n {\n 'type': 'Feature',\n 'properties': {\n 'popup': u'<h3>{0}</h3><p>{1}</p>'.format(\n safe_unicode(title),\n safe_unicode(description)\n )\n },\n 'geometry': {\n 'type': 'Point',\n 'coordinates': [\n coordinates[1], # lng\n coordinates[0] # lat\n ]\n }\n },\n ]\n }\n\n if self.mode == 'input':\n properties = geo_json['features'][0]['properties']\n properties['editable'] = True\n properties['no_delete'] = True\n properties['latinput'] = u'#{0}'.format(self.id_input_lat)\n properties['lnginput'] = u'#{0}'.format(self.id_input_lng)\n\n return json.dumps(geo_json)",
"def get_geojson(self, sql, context):\n result = self.db.query(sql).getresult()\n geo_objects = []\n\n for poly in result:\n poly = poly[0]\n geo_objects.append(geojson.Feature(geometry=geojson.loads(poly)))\n\n crs = {\n \"type\": \"name\",\n \"properties\": {\n \"name\": \"EPSG:\" + str(context[\"crs\"])\n }\n }\n collection = geojson.FeatureCollection(geo_objects, crs=crs)\n\n return {\n 'type': 'result',\n 'result': geojson.dumps(collection)\n }",
"def to_representation(self, object):\n\n measurement_serializer = MeasurementSerializer(\n object.measurements.all(),\n many=True,\n context=self.context\n )\n\n return {\n 'type': 'Feature',\n 'geometry': json.loads(object.geometry.geojson),\n 'id': object.id,\n 'name': object.name,\n 'created': str(object.created),\n 'properties': object.properties,\n 'measurements': measurement_serializer.data\n }",
"def as_geojson(self):\n return _property_op(arctern.ST_AsGeoJSON, self)",
"def serialize(self):\n return {\n 'id': self.id,\n 'name': self.name,\n 'latitude': self.latitude,\n 'longitude': self.longitude\n }",
"def geojson(self):\n return {\n \"type\": \"FeatureCollection\",\n \"features\": [f.geojson(i) for i, f in self._features.items()]\n }",
"def geojson(self, feature_id):\n lat, lon = self.lat_lon\n return {\n 'type': 'Feature',\n 'id': feature_id,\n 'geometry': {\n 'type': 'Point',\n 'coordinates': (lon, lat),\n },\n }",
"def to_representation(self, instance):\n # prepare OrderedDict geojson structure\n feature = OrderedDict()\n # the list of fields that will be processed by get_properties\n # we will remove fields that have been already processed\n # to increase performance on large numbers\n fields = list(self.fields.values())\n\n # optional id attribute\n if self.Meta.id_field:\n field = self.fields[self.Meta.id_field]\n value = field.get_attribute(instance)\n feature[self.Meta.identifier] = field.to_representation(value)\n fields.remove(field)\n\n # required type attribute\n # must be \"Feature\" according to GeoJSON spec\n feature[\"type\"] = \"Feature\"\n\n # required geometry attribute\n # MUST be present in output according to GeoJSON spec\n field = self.fields[self.Meta.geo_field]\n geo_value = field.get_attribute(instance)\n feature[\"geometry\"] = field.to_representation(geo_value)\n fields.remove(field)\n # Bounding Box\n # if auto_bbox feature is enabled\n # bbox will be determined automatically automatically\n if self.Meta.auto_bbox and geo_value:\n feature[\"bbox\"] = geo_value.extent\n # otherwise it can be determined via another field\n elif self.Meta.bbox_geo_field:\n field = self.fields[self.Meta.bbox_geo_field]\n value = field.get_attribute(instance)\n feature[\"bbox\"] = value.extent if hasattr(value, 'extent') else None\n fields.remove(field)\n\n # GeoJSON properties\n feature[\"properties\"] = self.get_properties(instance, fields)\n\n return feature",
"def geo_transform(self):\n pass",
"def geojson2postgis(self, filepath, table_name, geo_type):\n map_data = gpd.GeoDataFrame.from_file(filepath)\n # Maybe you want to change link address\n link = \"postgresql://{0}:{1}@{3}:5432/{2}\".format(self.username, self.password, self.dbname, self.host)\n engine = create_engine(link, encoding='utf-8')\n map_data = self.dict_to_json(map_data)\n map_data['geometry'] = map_data['geometry'].apply(lambda x: WKTElement(x.wkt, 4326))\n # Maybe you want to change 'replace' to 'append' in the future\n map_data.to_sql(\n name=table_name,\n con=engine,\n if_exists='replace',\n dtype={'geometry': Geometry(geometry_type=geo_type, srid=4326)}\n )",
"def serialize(self):\n return {\n 'id' : self.id,\n 'description': self.description,\n 'longitude' : self.longitude,\n 'latitude' : self.latitude,\n 'created_on' : self.created_on,\n 'created_by' : self.created_by,\n 'likes' : self.likes\n }",
"def toJSON(self):\n (latitude, longitude, altitude_msl) = self.getPosition()\n data = {\n 'latitude': latitude,\n 'longitude': longitude,\n 'altitude_msl': altitude_msl,\n 'sphere_radius': self.sphere_radius\n }\n return data",
"def toJSON(self):\n if self.gps_position is None:\n latitude = 0\n longitude = 0\n else:\n latitude = self.gps_position.latitude\n longitude = self.gps_position.longitude\n data = {\n 'latitude': latitude,\n 'longitude': longitude,\n 'cylinder_radius': self.cylinder_radius,\n 'cylinder_height': self.cylinder_height\n }\n return data",
"def item_gewest_adapter(obj, request):\n return {\n 'id': obj.id,\n 'namen': obj._namen,\n 'centroid': obj.centroid,\n 'bounding_box': obj.bounding_box\n }",
"def __geo_interface__(self):\r\n if HASARCPY:\r\n template = {\r\n \"type\": \"FeatureCollection\",\r\n \"features\": []\r\n }\r\n geom_type = self.geometry_type\r\n if geom_type.lower() == \"point\":\r\n geom_type = \"Point\"\r\n elif geom_type.lower() == \"polyline\":\r\n geom_type = \"LineString\"\r\n elif geom_type.lower() == \"polygon\":\r\n geom_type = \"Polygon\"\r\n df_copy = self.copy(deep=True)\r\n df_copy['geom_json'] = self.geometry.JSON\r\n df_copy['SHAPE'] = df_copy['geom_json']\r\n del df_copy['geom_json']\r\n for index, row in df_copy.iterrows():\r\n geom = row['SHAPE']\r\n del row['SHAPE']\r\n template['features'].append(\r\n {\"type\" : geom_type,\r\n \"geometry\" : pd.io.json.loads(geom),\r\n \"attributes\":row}\r\n )\r\n return pd.io.json.dumps(template)",
"def geoJSON(self, as_json=True, using_style_template=True):\n properties_main = self.properties or {}\n properties_built = dict(id=self.id,\n status=self.status,\n analyst=self.analyst.username,\n created_at=datetime.strftime(self.created_at, '%Y-%m-%dT%H:%M:%S%Z'),\n updated_at=datetime.strftime(self.updated_at, '%Y-%m-%dT%H:%M:%S%Z'),\n )\n properties_template = self.template.properties or {}\n\n # properties_template can return a list from it's backing model, make sure we get the Dict\n if type(properties_template) == types.ListType:\n properties_template = properties_template[0]\n\n # srj: if using_style_template set, we're styling object from its feature id, else we'll\n # just use the style properties (which should already be included if defined for feature)\n # (we may want to set some defaults later on to make sure)\n if using_style_template:\n properties_built['template'] = self.template.id if hasattr(self.template, \"id\") else None\n\n properties = dict(properties_built.items() + properties_main.items() + properties_template.items())\n\n feature_type = FeatureType.objects.get(id=self.template.id)\n\n geojson = SortedDict()\n geojson[\"type\"] = \"Feature\"\n geojson[\"properties\"] = properties\n geojson[\"geometry\"] = json.loads(self.the_geom.json)\n\n if feature_type and using_style_template:\n geojson[\"style\"] = feature_type.style_to_geojson()\n else:\n geojson[\"style\"] = feature_type.style\n\n if(as_json):\n return clean_dumps(geojson)\n else:\n for key in properties:\n if isinstance(properties[key],str) or isinstance(properties[key], unicode):\n properties[key] = properties[key].replace('<', '<l').replace('>', '>').replace(\"javascript:\", \"j_script-\")\n return geojson",
"def json(self):\r\n return {\"id\": self.id, \"code\": self.code, \"description\": self.description, \"xCoor\": self.x_coor, \"yCoor\": self.y_coor, \"latitude\": self.latitude,\r\n \"longitude\": self.longitude, \"waterschapId\": self.waterschap_id, \"watertypeId\": self.watertype_id, \"watertypeKrwId\": self.watertype_krw_id}",
"def convert_to_geojson(type, coords):\n\tgeojson = {\"type\": \"FeatureCollection\", \"features\": None}\n\n\tif type == \"location_field\":\n\t\tgeojson[\"features\"] = merge_location_features_in_one(coords)\n\telif type == \"coordinates\":\n\t\tgeojson[\"features\"] = create_gps_markers(coords)\n\n\treturn geojson",
"def as_json(self):",
"def sites_geojson():\n\n with Config() as config:\n with db.Connection(config) as con:\n features = con.features()\n features = list(features)\n return flask.jsonify(features)",
"def to_dict(self):\n return {'lat': self.lat,\n 'lon': self.lon}",
"def _get_geometry(self, val):\n g = OGRGeometry(val)\n return json.loads(g.json)",
"def planned_site_geojson(request):\n feature = serialize('geojson',\n PlannedSiteStatus.objects.all(),\n fields='eez_boundary, status'\n )\n return HttpResponse(feature, content_type='application/json')",
"def ser(self):\n return {\n 'lat': self.lat,\n 'lon': self.lon,\n 'title': self.title,\n 'datasource': self.datasource,\n 'author': self.author,\n 'start_time': self.start_time\n }",
"def serialize(self):\n return {\n 'name' :self.name,\n 'points' :self.pts,\n 'id' :self.id,\n 'league_id':self.league_id,\n 'userID':self.user_id\n }",
"def geometry(self, objectId):\n\n objectId = GeometryReference(objectId, self)\n req = urllib2.Request(self.baseUri + 'geometry/%d' % objectId.id)\n r = urllib2.urlopen(req)\n\n data = json.load(r)\n r.close()\n return data",
"def get_geojson():\n\n # check the file was already downloaded\n global GEOJSON\n if GEOJSON: return GEOJSON\n\n conn = None\n cur = None\n try:\n\n conn = utils.pgconnect(**settings.DEFAULT_CONNECTION)\n cur = conn.cursor()\n cur.execute( \"\"\"SELECT row_to_json(fc) FROM \n ( SELECT 'FeatureCollection' As type, array_to_json(array_agg(f)) As features\n FROM (SELECT 'Feature' As type , ST_AsGeoJSON(lg.geom)::json As geometry, row_to_json(lp) As properties\n FROM exercise.states As lg INNER JOIN (SELECT gid,name FROM exercise.states) As lp\n ON lg.gid = lp.gid ) As f) As fc;\"\"\", (AsIs(settings.STATES_TABLE_NAME)))\n result = cur.fetchone()[0]\n\n #print(result)\n\n #make the result global\n GEOJSON = result\n return GEOJSON\n\n except Exception as e:\n raise Exception(e)\n\n finally:\n if conn: conn = None\n if cur: cur = None",
"def _to_gisdb(self):\n self._ways.to_postgis(name=\"ways\", con=self._gisdb, if_exists=\"append\")\n self._nodes.to_sql(name=\"nodes\", con=self._gisdb, if_exists=\"append\")\n gdf_nodes, gdf_edges = osmnx.utils_graph.graph_to_gdfs(self._graph, node_geometry=False,\n fill_edge_geometry=False)\n gdf_edges[['id', 'length', 'u', 'v', 'key']].to_postgis(name=\"graph_edges\", con=self._gisdb, if_exists=\"append\")\n gdf_nodes[['id']].to_postgis(name=\"graph_nodes\", con=self._gisdb, if_exists=\"append\")\n self._nodes.to_sql(name=\"nodes\", con=self._gisdb, if_exists=\"append\")",
"def get_allpoints_geojson():\n\n # check the file was already downloaded\n global GEOJSON\n if GEOJSON: return GEOJSON\n\n conn = None\n cur = None\n try:\n\n conn = utils.pgconnect(**settings.DEFAULT_CONNECTION)\n cur = conn.cursor()\n cur.execute( \"\"\"SELECT row_to_json(fc) FROM \n ( SELECT 'FeatureCollection' As type, array_to_json(array_agg(f)) As features\n FROM (SELECT 'Feature' As type , ST_AsGeoJSON(lg.geom)::json As geometry, row_to_json(lp) As properties\n FROM %s As lg INNER JOIN (SELECT id,label,size FROM %s) As lp\n ON lg.id = lp.id ) As f) As fc;\"\"\", (AsIs(settings.BOOKMARKS_TABLE_NAME),AsIs(settings.BOOKMARKS_TABLE_NAME)))\n result = cur.fetchone()[0]\n\n #print(result)\n\n #make the result global\n GEOJSON = result\n return GEOJSON\n\n except Exception as e:\n raise Exception(e)\n\n finally:\n if conn: conn = None\n if cur: cur = None"
] | [
"0.6924277",
"0.6681148",
"0.6647204",
"0.6602624",
"0.6577234",
"0.6551587",
"0.6533658",
"0.6529714",
"0.64571106",
"0.645591",
"0.6319082",
"0.6317624",
"0.62623715",
"0.6253182",
"0.62173903",
"0.6201155",
"0.6189135",
"0.61708796",
"0.6170053",
"0.61695904",
"0.6141281",
"0.61173636",
"0.61092734",
"0.6089726",
"0.6032944",
"0.6012693",
"0.60107285",
"0.60081816",
"0.59430933",
"0.58988965"
] | 0.6926797 | 0 |
returns the candidates as a simple string | def cand_str(self):
return "".join([str(x) for x in self.cands]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_candidates(self):\n return u', '.join([c.identifier for c in self.candidates.all()])",
"def get_all_candidates(self) -> list:",
"def view_candidates(self):\n items = ['id', self.filter, 'half_light', 'separation', 'P_c']\n for add_on in ['P_O', 'P_Ox']:\n if add_on in self.candidates.keys():\n items += [add_on]\n print(self.candidates[items])",
"def displayCandidates(self, language):\n vertex = next((node for node in self.vertices if node['value'].lower() == language.strip().lower()), None)\n outputStr = '\\n\\n--------Function displayCandidates --------\\n\\nList of Candidates who can speak {language}:'.format(language = language)\n if (vertex == None):\n self.printOutput('\\n\\n--------Function displayCandidates --------\\n\\n No language found with this name')\n else :\n outputData = [self.vertices[index] for index, value in enumerate(self.edges[vertex['index']]) if value == 1]\n for outputDataItem in outputData:\n outputStr += '\\n\\n{name}'.format(name = outputDataItem['value'].title())\n self.printOutput(outputStr)\n analysisOutput = '\\n\\n--------Function displayCandidates --------\\n\\nFunction for time complexity is F(n) = (n + n)\\n\\nWorst Complexity = O(n)\\n\\n'\n analysisOutput += '\\n\\nHence, Runtime Complexity = {n}'.format(n = len(self.vertices))\n self.printAnalysis(analysisOutput)",
"def __str__(self):\n return \"{}\".format(self._matches.keys())",
"def _get_choices_str(self):\n return ', '.join(\n '\"%s\"' % choice\n for choice in self.choices\n )",
"def __str__(self):\n st=\"\"\n for g in self:\n st+=g.fasta()\n st+=\"\\n\"\n return st",
"def candidates_all():\n return jsonify(candidate.get_candidates())",
"def __str__(self):\n return str((self.code, self.fitness,))",
"def __str__(self):\r\n\t\tstrRepr = ''\r\n\t\tfor i, person in enumerate(self.people):\r\n\t\t\tassignmentsString = ','.join((self.assignments[i,:].astype(int)).astype(str))\r\n\t\t\tstrRepr += '{0},{1}\\n'.format(person.uid, assignmentsString)\r\n\t\treturn strRepr",
"def toString():",
"def _generate_base_candidates(self, target_text):\n\n result_list = []\n tagged_text = tag(target_text)\n\n for i in range(1, 5):\n temp = []\n grams = find_ngrams(tagged_text, i)\n\n for gram in grams:\n phrase = \" \".join(list(map(lambda x: x[0], gram)))\n pos = \" \".join(list(map(lambda x: x[1], gram)))\n\n if pos in self.candidate_pattern:\n temp.append(phrase)\n\n result_list.append(temp)\n\n return result_list",
"def get_answer_sets_string(self):\n return self._output",
"def __str__(self):\n L = []\n for s,e in self.normalized():\n if s == e:\n L.append(str(s))\n else:\n L.append(str(s) + \"-\" + str(e))\n return \",\".join(L)",
"def fetch_candidate_name(self):\r\n # variable to save possible matches\r\n possible_names = []\r\n\r\n # source text is input document in text format\r\n nlp_text = self.doc # := nlp(self.stringtext)\r\n\r\n # Add patterns to match proper names\r\n patterns = [[{'POS': 'PROPN'}]]\r\n self.matcher.add('NAME', patterns) \r\n matches = self.matcher(nlp_text) \r\n\r\n # fetch the matches\r\n for match_id, start, end in matches:\r\n span = nlp_text[start:end] \r\n possible_names += [span.text] \r\n if len(possible_names) >= 2: \r\n break\r\n\r\n # Extract candidates\r\n doc_entities = self.doc.ents\r\n\r\n # Subset to person type entities\r\n doc_persons = filter(lambda x: x.label_ == 'PERSON', doc_entities)\r\n doc_persons = filter(lambda x: len(\r\n x.text.strip().split()) >= 2, doc_persons)\r\n doc_persons = map(lambda x: x.text.strip(), doc_persons)\r\n doc_persons = list(doc_persons)\r\n\r\n # Assume the first Person entity with more than two tokens is the candidate's name\r\n if len(doc_persons) > 0:\r\n return possible_names + [doc_persons[0]]\r\n\r\n return \"NOT FOUND\"",
"def __str__(self):\n if not self.has_converged or self.parameters is None:\n log.warning(\"The fit has not converged. Try again!\")\n return ''\n\n result = []\n for parameter in self.parameters.keys():\n if parameter in self.fit_for:\n parameter_string = self.get_parameter_string(parameter)\n if parameter_string is not None:\n result.append(parameter_string)\n\n rms = self.get_parameter_format('kelvin') % self.rms\n result.append(f\"[{rms} K rms]\")\n return '\\n'.join(result)",
"def __str__(self):\n \n return reduce(lambda a,b : str(a)+str(b),self.list)",
"def cv_list(self):\n\n mystr = \"\"\n for p in self.mypapers:\n mystr += f\"{p.title[0]}\\n\"\n if len(p.author) > 12:\n a = f\"{p.author[0]} et al. \"\n elif len(p.author) > 2:\n a = \", \".join(p.author[:-1]) + f\" & {p.author[-1]} \"\n elif len(p.author) == 2:\n a = f\"{p.author[0]} & {p.author[1]} \"\n else:\n a = f\"{p.author[0]} \"\n\n mystr += f\"{a}\"\n mystr += f\"{p.year}, {p.pub}\"\n if p.volume is not None:\n mystr += f\", {p.volume}\"\n if p.issue is not None:\n mystr += f\", {p.issue}\"\n if p.page is not None:\n mystr += f\", {p.page[0]}\"\n mystr += \"\\n\\n\"\n return mystr",
"def __str__(self):\n return ''.join(\n f'Chromosome - {index} {chromosome} / Fitness = {chromosome.fitness}\\n'\n for index, chromosome\n in enumerate(self)\n )",
"def PrintResult(self):\n if len(self.matchList) <= 0:\n return \"Unfortunately: \" + self.name + \" did not match any applicants\"\n mystring = self.name + \" matched: \"\n isFirst = True\n for eachApplicant in self.matchList:\n if isFirst:\n isFirst = False\n else:\n mystring += \"\\n\"\n mystring += eachApplicant.name\n return mystring",
"def get_students(self):\n return u', '.join([c.student.username for c in self.candidates.all()])",
"def __str__(self):\n return ''.join(['(', ', '.join(self.filenamePatterns()), ')'])",
"def __str__(self):\n sentence_id = self.sentence_id or 'x'\n if self._reranked:\n from six.moves import cStringIO\n combined = cStringIO()\n combined.write('%d %s\\n' % (len(self.parses), sentence_id))\n for parse in self.parses:\n combined.write('%.12f %.12f\\n%s\\n' % (parse.reranker_score,\n parse.parser_score,\n parse.ptb_parse))\n combined.seek(0)\n return combined.read()\n else:\n if self._parses:\n return parser.asNBestList(self._parses, str(sentence_id))\n else:\n return '0 %s' % sentence_id",
"def ainvs_to_string(ainvs):\n return ainvs if type(ainvs)==type('') else \";\".join([NFelt(ai) for ai in ainvs])",
"def solution(self) -> str:\n\n # \"Starting after the cup labeled 1, collect the other cups' labels clockwise into a single string with no\n # extra characters.\"\n\n self.current = 1\n eight_cups = self.pick_up_cups(8) # 9 cups in the circle, so all cups except '1' is 8 cups.\n\n answer = ''\n for cup in eight_cups:\n answer += str(cup)\n return answer",
"def buildstrings(self):\n slist = []\n if 0 == len(self._pctnumbers):\n slist.append(self.buildonestring('NNNN'))\n else:\n for pctnumber in sorted(self._pctnumbers):\n slist.append(self.buildonestring(pctnumber))\n\n return slist",
"def __str__(self):\n return f'{self.text}: {self.chs}, correct answer: {self.solution}'",
"def str (self, max_len_first, max_len_following=0) :\r\n\r\n\t\tresult = [\"EQUIVALENCE \"]\r\n\t\tindent = len(result[0])\r\n\t\tmax_len_first -= indent+1 ## we assume it fits on the line\r\n\t\tif not max_len_following == 0 :\r\n\t\t\tmax_len_following -= indent\r\n\r\n\t\tnames = []\r\n\t\tfor l in self.equ_lists :\r\n\t\t\tnames += [\"(\", []]\r\n\t\t\tfor var_name in l :\r\n\t\t\t\tnames[-1] += var_name + [\",\"]\r\n\t\t\tdel names[-1][-1]\r\n\r\n\t\tparams = tokenizer.join_tokens(names, max_len_first, max_len_following)\r\n\r\n\t\tresult[0] += params[0]\r\n\t\tfor line in params[1:] :\r\n\t\t\tresult.append (indent*\" \" + line)\r\n\r\n\t\treturn [result]",
"def subsets(self):\n return ','.join(\n [subset for subset in sorted(self.in_subset, key=lambda s: s.lower())]) if self.in_subset else ''",
"def __str__(self):\n # on trie la liste de valeurs par ordre croissant\n self.vals.sort()\n return '{' + ','.join([str(e) for e in self.vals]) + '}'"
] | [
"0.7313972",
"0.634011",
"0.6055387",
"0.59636027",
"0.5877696",
"0.585959",
"0.5841727",
"0.5791971",
"0.5778096",
"0.5705545",
"0.5683101",
"0.56824327",
"0.5674035",
"0.5649382",
"0.56227845",
"0.5609205",
"0.5592801",
"0.5583754",
"0.5580004",
"0.55474",
"0.55360365",
"0.5533771",
"0.5530438",
"0.55283296",
"0.5523092",
"0.55169505",
"0.5513806",
"0.55078924",
"0.5502195",
"0.55002534"
] | 0.6516208 | 1 |
Disables provided function from one or multiple channels which are specified. A function can be any of the commands, plugins or galaxies which are allowed to be disabled. | async def disable(self, ctx, function: typing.Union[CommandConverter, PluginConverter, GalaxyConverter],
*channels: discord.TextChannel):
channels = channels or (ctx.channel, )
await ctx.guild_profile.permissions.disable_function(function, channels)
# noinspection PyUnresolvedReferences
await ctx.send_line(f"{ctx.emotes.web_emotion.galka} {function.name} has been disabled in specified channels.") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def disable(func):\n return func",
"async def disable_channel(self, ctx, *channels: discord.TextChannel):\n channels = channels or (ctx.channel, )\n await ctx.guild_profile.permissions.disable_channels(channels)\n await ctx.send_line(f\"{ctx.emotes.web_emotion.galka} Bot commands and messages has been disabled in specified channels.\")",
"def disable(*args, value: bool=True, **kwargs)->None:\n pass",
"async def enable(self, ctx, function: typing.Union[CommandConverter, PluginConverter, GalaxyConverter],\n *channels: discord.TextChannel):\n channels = channels or (ctx.channel, )\n await ctx.guild_profile.permissions.enable_function(function, channels)\n # noinspection PyUnresolvedReferences\n await ctx.send_line(f\"{ctx.emotes.web_emotion.galka} {function.name} has been enabled back in specified channels.\")",
"def disabled(func):\n\tfunc.enabled = False\n\n\tdoctext = 'This handler is disabled by default.'\n\t_addDoc(func, doctext)\n\n\treturn func",
"def not_blacklisted_channel(blacklist):\n async def predicate(ctx):\n channel = ctx.message.channel\n server = bot.get_guild(SERVER_ID)\n for c in blacklist:\n if channel == discord.utils.get(server.text_channels, name=c):\n raise CommandNotAllowedInChannel(channel, \"Command was invoked in a blacklisted channel.\")\n return True\n \n return commands.check(predicate)",
"def _canDisable(func):\n def wrapper(*args, **kwargs):\n if _DISABLE_ASSERTIONS == 0:\n return func(*args, **kwargs)\n return wrapper",
"def Disable(self):\n handler = self.get_command_object(\"Disable\")\n handler()",
"async def disable(self, ctx):\n await self.config.guild(ctx.guild).auto.set(True)\n await ctx.send(_(\"Automatic voicechannel creation disabled.\"))",
"def disable(self, name, channel):\n if channel not in self.disabled_extensions:\n self.disabled_extensions[channel] = set()\n\n if name not in self.extension_names:\n return False\n\n logger.info('Disabling %s on %s' % (name, channel))\n self.disabled_extensions[channel].add(name)\n\n return True",
"def disable():\n if _status_apf():\n return __apf_cmd(\"-f\")",
"async def ccallow(self, ctx, channel: discord.TextChannel):\n channel_list = await self.config.guild(ctx.guild).channel_deny()\n if channel.id in channel_list:\n channel_list.remove(channel.id)\n else:\n return await ctx.send(\"Channel is not on the deny list.\")\n await self.config.guild(ctx.guild).channel_deny.set(channel_list)\n await ctx.send(f\"{channel.mention} will be allowed for chatchart use.\")",
"async def tc_disable(self, ctx):\n await self.config.guild(ctx.guild).private_textchannels_enabled.set(False)\n await ctx.send(_(\"Private text channels disabled.\"))",
"def disable(self):",
"def disable(self) -> None:",
"def turn_off(self, **kwargs):\n self.enabled = self.fritz_box.set_call_forwarding(self.uid, 0)",
"def exceptcancommand(self, module, command):\n self._exceptcancommand(module, command)",
"async def blacklist_commands(self, ctx):\r\n await self.amethyst.send_command_help(ctx)",
"def device_bypass(self, device_ids, enable):\n return self._device_action(device_ids, \"BYPASS\", self._action_toggle(enable))",
"def disable_irq() -> int:",
"def remove_command(self, func):\n del self.commands[func.__name__]",
"async def unlock(ctx):\n member = ctx.message.author\n channel = ctx.message.channel\n\n if (channel.category.name in [\"beta\", \"staff\", \"Pi-Bot\"]):\n return await ctx.send(\"This command is not suitable for this channel because of its category.\")\n\n if (channel.category.name == CATEGORY_SO or channel.category.name == CATEGORY_GENERAL):\n await ctx.send(\"Synced permissions with channel category.\")\n return await channel.edit(sync_permissions=True)\n\n member_role = discord.utils.get(member.guild.roles, name=ROLE_MR)\n if (channel.category.name != CATEGORY_STATES):\n await ctx.channel.set_permissions(member_role, add_reactions=True, send_messages=True, read_messages=True)\n else:\n await ctx.channel.set_permissions(member_role, add_reactions=True, send_messages=True)\n\n wiki_role = discord.utils.get(member.guild.roles, name=ROLE_WM)\n gm_role = discord.utils.get(member.guild.roles, name=ROLE_GM)\n aRole = discord.utils.get(member.guild.roles, name=ROLE_AD)\n bRole = discord.utils.get(member.guild.roles, name=ROLE_BT)\n await ctx.channel.set_permissions(wiki_role, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.channel.set_permissions(gm_role, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.channel.set_permissions(aRole, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.channel.set_permissions(bRole, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.send(\"Unlocked the channel to Member access. Please check if permissions need to be synced.\")",
"async def blacklist(self, ctx: commands.Context):\n if ctx.invoked_subcommand is None:\n await ctx.send_help()",
"def not_random(func):\n func.random = False\n return func",
"def setEnabled(*args):",
"def setEnabled(*args):",
"def setEnabled(*args):",
"def setEnabled(*args):",
"def setEnabled(*args):",
"def setEnabled(*args):"
] | [
"0.7478053",
"0.6590839",
"0.63677406",
"0.61984825",
"0.5935865",
"0.5847337",
"0.57535195",
"0.5705342",
"0.57022613",
"0.5698396",
"0.56209135",
"0.5606587",
"0.5573692",
"0.55229694",
"0.55216694",
"0.5494032",
"0.54801023",
"0.5455171",
"0.5423171",
"0.5408966",
"0.5388431",
"0.5380561",
"0.53347087",
"0.53323776",
"0.5327827",
"0.5327827",
"0.5327827",
"0.5327827",
"0.5327827",
"0.5327827"
] | 0.8369485 | 0 |
Enables provided function in all of the specified channels. A function can be any of the commands, plugins or galaxies. | async def enable(self, ctx, function: typing.Union[CommandConverter, PluginConverter, GalaxyConverter],
*channels: discord.TextChannel):
channels = channels or (ctx.channel, )
await ctx.guild_profile.permissions.enable_function(function, channels)
# noinspection PyUnresolvedReferences
await ctx.send_line(f"{ctx.emotes.web_emotion.galka} {function.name} has been enabled back in specified channels.") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def enable_channel(self, ctx, *channels: discord.TextChannel):\n channels = channels or (ctx.channel, )\n await ctx.guild_profile.permissions.enable_channels(channels)\n await ctx.send_line(f\"{ctx.emotes.web_emotion.galka} Bot commands and messages has been enabled in specified channels.\")",
"def func(self):\n from evennia.comms.models import ChannelDB\n\n caller = self.caller\n if self.args not in (\"on\", \"off\"):\n return super(CmdArxAllCom, self).func()\n if self.args == \"on\":\n # get names of all channels available to listen to\n # and activate them all\n channels = [\n chan\n for chan in ChannelDB.objects.get_all_channels()\n if chan.access(caller, \"listen\")\n ]\n for channel in channels:\n unmuted = channel.unmute(caller)\n if unmuted:\n self.msg(\"You unmute channel %s.\" % channel)\n else:\n caller.execute_cmd(\"addcom %s\" % channel.key)\n return\n channels = ChannelDB.objects.get_subscriptions(caller)\n for channel in channels:\n if channel.mute(caller):\n self.msg(\"You mute channel %s.\" % channel)",
"async def disable(self, ctx, function: typing.Union[CommandConverter, PluginConverter, GalaxyConverter],\n *channels: discord.TextChannel):\n channels = channels or (ctx.channel, )\n await ctx.guild_profile.permissions.disable_function(function, channels)\n # noinspection PyUnresolvedReferences\n await ctx.send_line(f\"{ctx.emotes.web_emotion.galka} {function.name} has been disabled in specified channels.\")",
"def enable_all(self, enable):\n logging.debug(\"Called enable_all with value {}\".format(enable))\n\n if enable:\n # Loop over all quads and channels in system, adding enable command to deferred\n # executor queue\n for quad_idx in range(len(self.quad)):\n for channel in range(Quad.NUM_CHANNELS):\n self.deferred_executor.enqueue(\n self.quad_enable_channel, self.quad_enable_interval, quad_idx, channel\n )\n self.__all_enabled = True\n else:\n # Clear any pending turn-on command from the queue first, then turn off all channels\n # immediately.\n num_enables_pending = self.deferred_executor.pending()\n if num_enables_pending > 0:\n logging.debug(\"Clearing {} pending quad enable commands from queue\".format(\n num_enables_pending\n ))\n self.deferred_executor.clear()\n for quad_idx in range(len(self.quad)):\n for channel in range(Quad.NUM_CHANNELS):\n self.quad[quad_idx].set_enable(channel, False)\n self.__all_enabled = False",
"def enable(self, name, channel):\n if channel not in self.disabled_extensions:\n self.disabled_extensions[channel] = set()\n\n if name not in self.extension_names:\n return False\n\n logger.info('Enabling %s on %s' % (name, channel))\n self.disabled_extensions[channel].discard(name)\n\n return True",
"def add_channels(self, channels):\n for i in range(len(channels)):\n self.task.ai_channels.add_ai_voltage_chan(channels[i])",
"async def managechannels(self, ctx:commands.Context):",
"def i2c_activate_als_all_sensors(pi, i2c_multiplexer_handle, i2c_sensor_handle, channel_numbers):\n for channel_number in channel_numbers:\n i2c_multiplexer_select_channel(pi,\n i2c_multiplexer_handle, channel_number)\n i2c_sensor_handle.enable_ambient_light_sensor()",
"def register(self, funcs):\n for name, func in funcs.items():\n self.functions[name] = func",
"def setup_channels():\n\n # Setup channel encoders\n for c in channels:\n channels[c].setup()\n print()",
"async def _cmdf_chenable(self, substr, msg, privilege_level):\n enabled_str = None\n if utils.str_says_true(substr) or (len(substr) == 0):\n self._ch_msg_isenabled = True\n enabled_str = \"enabled.\"\n else:\n self._ch_msg_isenabled = False\n enabled_str = \"disabled.\"\n self._save_settings()\n\n buf = \"In-channel greetings is now \" + enabled_str\n await self._client.send_msg(msg, buf)\n return",
"def both_channels(func):\n\n def func_wrapper():\n channel = ClarifaiChannel.get_grpc_channel()\n func(channel)\n\n channel = ClarifaiChannel.get_json_channel()\n func(channel)\n\n return func_wrapper",
"def setEnabled(*args):",
"def setEnabled(*args):",
"def setEnabled(*args):",
"def setEnabled(*args):",
"def setEnabled(*args):",
"def setEnabled(*args):",
"def setEnabled(*args):",
"def setEnabled(*args):",
"def setEnabled(*args):",
"def setEnabled(*args):",
"def setEnabled(*args):",
"def enable(self, *args, **kwargs):\n pass",
"def setEnableCondition(*args):",
"def setEnableCondition(*args):",
"def setEnableCondition(*args):",
"def setEnableCondition(*args):",
"def setEnableCondition(*args):",
"def setEnableCondition(*args):"
] | [
"0.6668181",
"0.6548446",
"0.6338657",
"0.5989139",
"0.574073",
"0.56826746",
"0.56814885",
"0.55161613",
"0.54051787",
"0.5351172",
"0.5333117",
"0.5286296",
"0.52859515",
"0.52859515",
"0.52859515",
"0.52859515",
"0.52859515",
"0.52859515",
"0.52859515",
"0.52859515",
"0.52859515",
"0.52859515",
"0.52859515",
"0.5272507",
"0.52243316",
"0.52243316",
"0.52243316",
"0.52243316",
"0.52243316",
"0.52243316"
] | 0.78735584 | 0 |
Disables bot commands and most of its automatic messages in current or provided channels. | async def disable_channel(self, ctx, *channels: discord.TextChannel):
channels = channels or (ctx.channel, )
await ctx.guild_profile.permissions.disable_channels(channels)
await ctx.send_line(f"{ctx.emotes.web_emotion.galka} Bot commands and messages has been disabled in specified channels.") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def blacklist_commands(self, ctx):\r\n await self.amethyst.send_command_help(ctx)",
"async def disable(self, ctx, function: typing.Union[CommandConverter, PluginConverter, GalaxyConverter],\n *channels: discord.TextChannel):\n channels = channels or (ctx.channel, )\n await ctx.guild_profile.permissions.disable_function(function, channels)\n # noinspection PyUnresolvedReferences\n await ctx.send_line(f\"{ctx.emotes.web_emotion.galka} {function.name} has been disabled in specified channels.\")",
"async def tc_disable(self, ctx):\n await self.config.guild(ctx.guild).private_textchannels_enabled.set(False)\n await ctx.send(_(\"Private text channels disabled.\"))",
"async def disable(self, ctx):\n await self.config.guild(ctx.guild).auto.set(True)\n await ctx.send(_(\"Automatic voicechannel creation disabled.\"))",
"async def meow_disable(self, ctx: vbu.Context):\n\n try:\n self.meow_chats.remove(ctx.channel)\n except KeyError:\n return await ctx.send(\"Meow chat is already disabled in this channel.\")\n await ctx.send(f\"Meow chat has been disabled in {ctx.channel.mention} :<\")\n\n # See if there's a running task to keep it alive\n _, current_task = self.meow_disable_tasks.pop(ctx.channel.id, (None, None))\n if current_task:\n current_task.cancel()",
"async def _msgvote_off(self, ctx):\n\n channel = ctx.message.channel\n if channel.id not in self.settings[\"channels_enabled\"]:\n await self.bot.say(\"Msgvote mode is already off in this channel.\")\n else:\n self.settings[\"channels_enabled\"].remove(channel.id)\n dataIO.save_json(self.settings_path, self.settings)\n await self.bot.say(\"Msgvote mode is now off in this channel.\")",
"async def blacklist(self, ctx: commands.Context):\n if ctx.invoked_subcommand is None:\n await ctx.send_help()",
"def Disable(self):\n handler = self.get_command_object(\"Disable\")\n handler()",
"async def team_ignore(self, ctx: commands.Context):\n await self.config.user(ctx.author).do_not_message.set(True)\n await ctx.send('Okay, I won\\'t DM about this anymore.')",
"def bot_commands_only(cmd):\n @functools.wraps(cmd)\n async def bc_cmd(self, ctx, *args, **kwargs):\n if ctx.guild:\n settings = self.bot.settings[ctx.guild]\n if settings.bot_commands_channels\\\n and ctx.channel.id not in settings.bot_commands_channels\\\n and ctx.author.id not in settings.admin_ids:\n\n for channel_id in settings.bot_commands_channels:\n bc_ch = discord.utils.get(ctx.guild.channels, id=channel_id)\n if bc_ch:\n await ctx.send(f\"Please use {bc_ch.mention} for that command\")\n return\n return await cmd(self, ctx, *args, **kwargs)\n return bc_cmd",
"async def _antiadv(self, ctx):\r\n serverid = ctx.message.server.id\r\n if ctx.invoked_subcommand is None:\r\n await send_cmd_help(ctx)\r\n if serverid not in self.adkillr:\r\n self.adkillr[serverid] = {'toggle': False, 'message': '{0.mention} don\\'t send links!', 'filters': []}\r\n dataIO.save_json(\"data/adkillr/adkillr.json\", self.adkillr)",
"async def admin_disable(self, ctx: commands.Context):\n if ctx.guild.id not in self.guilds:\n await ctx.send('Team management is already disabled in this guild.')\n return\n await self._disable_guild(guild=ctx.guild)\n await ctx.send('Team management disabled.')",
"async def unmute(self, ctx, member: discord.Member):\n for channel in ctx.guild.text_channels:\n permissions = channel.permissions_for(member)\n\n if permissions.read_messages:\n # This removes the PermissionOverwrite on the channel, it\n # does not grant send_messages=True\n await channel.set_permissions(member, overwrite=None)",
"async def team_unignore(self, ctx: commands.Context):\n await self.config.user(ctx.author).do_not_message.set(False)\n await ctx.send('Okay, I\\'ll include you back in team-wide DMs.')",
"async def disable(self, ctx: Context, guild: int = None):\n\n if guild is None:\n guild = ctx.guild\n else:\n guild = self.bot.get_guild(guild)\n\n if not guild:\n return await ctx.message.add_reaction(\"⚠\")\n\n if guild.id not in self.active_guilds:\n return await ctx.message.add_reaction(\"⚠\")\n\n self._config_cache.pop(guild.id)\n self.config.delete(f\"guilds:{guild.id}\")\n\n await ctx.message.add_reaction(\"✅\")",
"async def unlock(ctx):\n member = ctx.message.author\n channel = ctx.message.channel\n\n if (channel.category.name in [\"beta\", \"staff\", \"Pi-Bot\"]):\n return await ctx.send(\"This command is not suitable for this channel because of its category.\")\n\n if (channel.category.name == CATEGORY_SO or channel.category.name == CATEGORY_GENERAL):\n await ctx.send(\"Synced permissions with channel category.\")\n return await channel.edit(sync_permissions=True)\n\n member_role = discord.utils.get(member.guild.roles, name=ROLE_MR)\n if (channel.category.name != CATEGORY_STATES):\n await ctx.channel.set_permissions(member_role, add_reactions=True, send_messages=True, read_messages=True)\n else:\n await ctx.channel.set_permissions(member_role, add_reactions=True, send_messages=True)\n\n wiki_role = discord.utils.get(member.guild.roles, name=ROLE_WM)\n gm_role = discord.utils.get(member.guild.roles, name=ROLE_GM)\n aRole = discord.utils.get(member.guild.roles, name=ROLE_AD)\n bRole = discord.utils.get(member.guild.roles, name=ROLE_BT)\n await ctx.channel.set_permissions(wiki_role, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.channel.set_permissions(gm_role, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.channel.set_permissions(aRole, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.channel.set_permissions(bRole, add_reactions=True, send_messages=True, read_messages=True)\n await ctx.send(\"Unlocked the channel to Member access. Please check if permissions need to be synced.\")",
"def not_blacklisted_channel(blacklist):\n async def predicate(ctx):\n channel = ctx.message.channel\n server = bot.get_guild(SERVER_ID)\n for c in blacklist:\n if channel == discord.utils.get(server.text_channels, name=c):\n raise CommandNotAllowedInChannel(channel, \"Command was invoked in a blacklisted channel.\")\n return True\n \n return commands.check(predicate)",
"def disable_everything(self):\n zhinst.utils.disable_everything(self.daq, self.device_id)\n self.log.info(\"Disabled everything.\")",
"async def disable(self, ctx):\n\n server = ctx.message.server\n\n settings = self.bot.dota_ticker_settings.get(server.id)\n\n if settings is not None:\n settings['enabled'] = False\n await self.bot.dota_ticker_settings.put(server.id, settings)\n\n await self.bot.say('The match ticker has been disabled on {0.name}.'.format(server))",
"async def deny(self, ctx, user: discord.Member, *, reason: str=None):\n self.data_check(ctx)\n server = ctx.message.server\n try:\n defchannel = self.riceCog2[server.id][\"defchannel\"]\n except:\n defchannel = default_channel\n try:\n channelmute = self.riceCog2[server.id][\"channelmute\"]\n except:\n channelmute = defchannelmute \n channel = discord.utils.get(server.channels, name = defchannel)\n if channel is None:\n msg = await self.bot.say (\"I was unable to write to your log channel. Please make sure there is a channel called {} on the server!\".format(defchannel))\n return\n else:\n pass\n if reason is None:\n msg = await self.bot.say(\"Please enter a reason for the warning!\")\n await asyncio.sleep(5)\n await self.bot.delete_message(msg)\n return\n if user.id in self.norole[server.id]:\n if self.norole[server.id][user.id]['Role'] == True:\n msg = await self.bot.say(\"This user has already been denied access to the channel.\")\n await asyncio.sleep(8)\n await self.bot.delete_message(msg) \n await self.bot.delete_message(ctx.message)\n return\n else:\n nobnl = discord.utils.get(server.roles, name = \"NoBNL\")\n role = nobnl \n mod = ctx.message.author\n await self.bot.delete_message(ctx.message)\n await self.bot.add_roles(user, nobnl)\n dmuser = await self.bot.start_private_message(user)\n await self.bot.send_message(dmuser, \"Howdy!\\nThis is to let you know that you have been denied access to the channel for the reason:\\n\\n```{}``` \\nPlease speak to a member of staff if you have an issue.\".format(reason))\n user=user\n reason=reason\n ID = uuid.uuid4()\n embed=discord.Embed(title=\"User Denied:\", color=0xA00000)\n embed.add_field(name=\"Case ID:\", value=ID, inline=False)\n embed.add_field(name=\"Moderator:\", value=mod, inline=False)\n embed.add_field(name=\"User:\", value=\"{0} ({0.id})\".format(user), inline=False)\n embed.add_field(name=\"Reason:\", value=reason, inline=False)\n react = await self.bot.send_message(channel, embed=embed)\n await self.bot.add_reaction(react, \"\\U0001f44d\")\n await self.bot.add_reaction(react, \"\\U0001f44e\")\n await self.bot.add_reaction(react, \"\\U0001f937\")\n self.norole[server.id][user.id] = {\n 'Reason': reason,\n 'Mod': ctx.message.author.id,\n 'Role': True\n }\n dataIO.save_json(self.warninglist, self.norole)\n channel = discord.utils.get(server.channels, name = channelmute)\n for channel in server.channels:\n perms = discord.PermissionOverwrite()\n \n if channel.type == discord.ChannelType.text:\n perms.send_messages = False\n perms.read_messages = False\n await self.bot.edit_channel_permissions(channel, role, overwrite=perms) \n else:\n nobnl = discord.utils.get(server.roles, name = \"NoBNL\")\n role = nobnl \n mod = ctx.message.author\n await self.bot.delete_message(ctx.message)\n await self.bot.add_roles(user, nobnl)\n dmuser = await self.bot.start_private_message(user)\n await self.bot.send_message(dmuser, \"Howdy!\\nThis is to let you know that you have been denied access to the channel for the reason:\\n\\n```{}``` \\nPlease speak to a member of staff if you have an issue.\".format(reason))\n user=user\n reason=reason\n ID = uuid.uuid4()\n embed=discord.Embed(title=\"User Denied:\", color=0xA00000)\n embed.add_field(name=\"Case ID:\", value=ID, inline=False)\n embed.add_field(name=\"Moderator:\", value=mod, inline=False)\n embed.add_field(name=\"User:\", value=\"{0} ({0.id})\".format(user), inline=False)\n embed.add_field(name=\"Reason:\", value=reason, inline=False)\n react = await self.bot.send_message(channel, embed=embed)\n await self.bot.add_reaction(react, \"\\U0001f44d\")\n await self.bot.add_reaction(react, \"\\U0001f44e\")\n await self.bot.add_reaction(react, \"\\U0001f937\")\n self.norole[server.id][user.id] = {\n 'Reason': reason,\n 'Mod': ctx.message.author.id,\n 'Role': True\n }\n dataIO.save_json(self.warninglist, self.norole)\n channel = discord.utils.get(server.channels, name = channelmute)\n for channel in server.channels:\n perms = discord.PermissionOverwrite()\n \n if channel.type == discord.ChannelType.text:\n perms.send_messages = False\n perms.read_messages = False\n await self.bot.edit_channel_permissions(channel, role, overwrite=perms)",
"async def ccdenylist(self, ctx):\n no_channels_msg = \"Chatchart is currently allowed everywhere in this server.\"\n channel_list = await self.config.guild(ctx.guild).channel_deny()\n if not channel_list:\n msg = no_channels_msg\n else:\n msg = \"Chatchart is not allowed in:\\n\"\n remove_list = []\n for channel in channel_list:\n channel_obj = self.bot.get_channel(channel)\n if not channel_obj:\n remove_list.append(channel)\n else:\n msg += f\"{channel_obj.mention}\\n\"\n if remove_list:\n new_list = [x for x in channel_list if x not in remove_list]\n await self.config.guild(ctx.guild).channel_deny.set(new_list)\n if len(remove_list) == len(channel_list):\n msg = no_channels_msg\n await ctx.send(msg)",
"async def __remove_unused_commands(self):\r\n to_remove: List[AppCommand] = list()\r\n\r\n for api_cmd in self._api_commands:\r\n doesnt_exist = all(\r\n api_cmd.name != loc_cmd.app.name\r\n for loc_cmd in ChatCommandHandler.register.values()\r\n )\r\n\r\n if doesnt_exist:\r\n to_remove.append(api_cmd)\r\n\r\n for cmd in to_remove:\r\n await self.client.http.delete(\r\n f\"applications/{self.client.bot.id}/commands/{cmd.id}\"\r\n )\r\n\r\n self._api_commands = [\r\n cmd for cmd in self._api_commands if cmd not in to_remove\r\n ]",
"def disable_mute(self):\n self.mute = False",
"async def ccallow(self, ctx, channel: discord.TextChannel):\n channel_list = await self.config.guild(ctx.guild).channel_deny()\n if channel.id in channel_list:\n channel_list.remove(channel.id)\n else:\n return await ctx.send(\"Channel is not on the deny list.\")\n await self.config.guild(ctx.guild).channel_deny.set(channel_list)\n await ctx.send(f\"{channel.mention} will be allowed for chatchart use.\")",
"def func(self):\n from evennia.comms.models import ChannelDB\n\n caller = self.caller\n if self.args not in (\"on\", \"off\"):\n return super(CmdArxAllCom, self).func()\n if self.args == \"on\":\n # get names of all channels available to listen to\n # and activate them all\n channels = [\n chan\n for chan in ChannelDB.objects.get_all_channels()\n if chan.access(caller, \"listen\")\n ]\n for channel in channels:\n unmuted = channel.unmute(caller)\n if unmuted:\n self.msg(\"You unmute channel %s.\" % channel)\n else:\n caller.execute_cmd(\"addcom %s\" % channel.key)\n return\n channels = ChannelDB.objects.get_subscriptions(caller)\n for channel in channels:\n if channel.mute(caller):\n self.msg(\"You mute channel %s.\" % channel)",
"def disable(self):\n self.registrar.unregister_service(\"say\", namespace=__name__)",
"def turn_off(self, **kwargs):\n self.robot.pause_cleaning()\n time.sleep(1)\n self.robot.send_to_base()",
"async def managechannels(self, ctx:commands.Context):",
"async def async_turn_off(self, **kwargs: Any) -> None:\n await self.entity_description.set_command(self, False)",
"async def watchlist(self, ctx):\r\n channel_list = await self.config.guild(ctx.guild).watching()\r\n msg = \"Bad gifs will be removed in:\\n\"\r\n for channel in channel_list:\r\n channel_obj = self.bot.get_channel(channel)\r\n if channel_obj is None: # Catch deleted/unexisting channels\r\n continue\r\n msg += f\"{channel_obj.mention}\\n\"\r\n await ctx.send(msg)"
] | [
"0.7108723",
"0.7096003",
"0.69496554",
"0.67883027",
"0.6707562",
"0.6565387",
"0.6368427",
"0.63573396",
"0.6339441",
"0.6334304",
"0.63192993",
"0.62177265",
"0.61687315",
"0.61560297",
"0.6059689",
"0.6038491",
"0.5994542",
"0.59823614",
"0.5947161",
"0.59250474",
"0.5920933",
"0.59029114",
"0.5876394",
"0.5874728",
"0.584111",
"0.5821244",
"0.58049846",
"0.5801123",
"0.58006835",
"0.57917166"
] | 0.78277546 | 0 |
Enables back bot commands and its automatic messages in current or provided channels if it was disabled previously. | async def enable_channel(self, ctx, *channels: discord.TextChannel):
channels = channels or (ctx.channel, )
await ctx.guild_profile.permissions.enable_channels(channels)
await ctx.send_line(f"{ctx.emotes.web_emotion.galka} Bot commands and messages has been enabled in specified channels.") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def func(self):\n from evennia.comms.models import ChannelDB\n\n caller = self.caller\n if self.args not in (\"on\", \"off\"):\n return super(CmdArxAllCom, self).func()\n if self.args == \"on\":\n # get names of all channels available to listen to\n # and activate them all\n channels = [\n chan\n for chan in ChannelDB.objects.get_all_channels()\n if chan.access(caller, \"listen\")\n ]\n for channel in channels:\n unmuted = channel.unmute(caller)\n if unmuted:\n self.msg(\"You unmute channel %s.\" % channel)\n else:\n caller.execute_cmd(\"addcom %s\" % channel.key)\n return\n channels = ChannelDB.objects.get_subscriptions(caller)\n for channel in channels:\n if channel.mute(caller):\n self.msg(\"You mute channel %s.\" % channel)",
"async def enable(self, ctx, function: typing.Union[CommandConverter, PluginConverter, GalaxyConverter],\n *channels: discord.TextChannel):\n channels = channels or (ctx.channel, )\n await ctx.guild_profile.permissions.enable_function(function, channels)\n # noinspection PyUnresolvedReferences\n await ctx.send_line(f\"{ctx.emotes.web_emotion.galka} {function.name} has been enabled back in specified channels.\")",
"async def enable(self, ctx):\n await self.config.guild(ctx.guild).auto.set(True)\n await ctx.send(_(\"Automatic voicechannel creation enabled.\"))",
"async def disable_channel(self, ctx, *channels: discord.TextChannel):\n channels = channels or (ctx.channel, )\n await ctx.guild_profile.permissions.disable_channels(channels)\n await ctx.send_line(f\"{ctx.emotes.web_emotion.galka} Bot commands and messages has been disabled in specified channels.\")",
"async def managechannels(self, ctx:commands.Context):",
"async def disable(self, ctx):\n await self.config.guild(ctx.guild).auto.set(True)\n await ctx.send(_(\"Automatic voicechannel creation disabled.\"))",
"async def _msgvote_on(self, ctx):\n\n channel = ctx.message.channel\n if channel.id in self.settings[\"channels_enabled\"]:\n await self.bot.say(\"Msgvote mode is already on in this channel.\")\n else:\n self.settings[\"channels_enabled\"].append(channel.id)\n dataIO.save_json(self.settings_path, self.settings)\n await self.bot.say(\"Msgvote mode is now on in this channel.\")",
"async def admin_enable(self, ctx: commands.Context):\n if ctx.guild.id in self.guilds:\n await ctx.send('Team management is already enabled in this guild.')\n return\n await self._enable_guild(guild=ctx.guild)\n await ctx.send('Team management enabled.')",
"async def _cmdf_chenable(self, substr, msg, privilege_level):\n enabled_str = None\n if utils.str_says_true(substr) or (len(substr) == 0):\n self._ch_msg_isenabled = True\n enabled_str = \"enabled.\"\n else:\n self._ch_msg_isenabled = False\n enabled_str = \"disabled.\"\n self._save_settings()\n\n buf = \"In-channel greetings is now \" + enabled_str\n await self._client.send_msg(msg, buf)\n return",
"async def tc_enable(self, ctx):\n await self.config.guild(ctx.guild).private_textchannels_enabled.set(True)\n await ctx.send(_(\"Private text channels enabled.\"))",
"async def enable(self, ctx: Context, *, guild: int = None):\n\n if guild is None:\n guild = ctx.guild\n else:\n guild = self.bot.get_guild(guild)\n\n if not guild:\n return await ctx.message.add_reaction(\"⚠\")\n\n self._create_guild_config(guild)\n\n await ctx.message.add_reaction(\"✅\")",
"async def enable(self, ctx, *, channel: discord.Channel=None):\n\n server = ctx.message.server\n\n temp = self.bot.dota_ticker_settings.get(server.id)\n\n if temp is not None and temp['enabled']:\n await self.bot.say('The match ticker has already been enabled on this server.')\n return\n\n if channel is None:\n channel = server.default_channel\n\n settings = {'enabled': True, 'channel_id': channel.id}\n\n await self.bot.dota_ticker_settings.put(server.id, settings)\n await self.bot.say('The match ticker has been enabled on {0.mention}.'.format(channel))",
"async def togglegames(self, ctx, *, channel: discord.TextChannel = None):\n channel = channel or ctx.channel\n user = await self.ex.get_user(ctx.author.id)\n if not channel:\n log.console(f\"Could not find text channel. -> User: {user.id} - Moderator.togglegames\")\n msg = await self.ex.get_msg(user, \"moderator\", \"channel_not_found\")\n return await ctx.send(msg)\n\n enabled_msg = \"enabled\" if await self.ex.u_moderator.toggle_games(channel.id) else \"disabled\"\n\n msg = await self.ex.get_msg(user, \"moderator\", \"channel_toggled\", [\n [\"name\", ctx.author.display_name], [\"text_channel\", channel.name], [\"result\", enabled_msg]\n ])\n\n return await ctx.send(msg)",
"async def enable(self, ctx):\n self.bot.db.execute(\"UPDATE starboards SET enabled = 1 WHERE channel_id = ?\", (ctx.channel.id,))\n await ctx.say(\"star.enabled\")",
"async def disable(self, ctx, function: typing.Union[CommandConverter, PluginConverter, GalaxyConverter],\n *channels: discord.TextChannel):\n channels = channels or (ctx.channel, )\n await ctx.guild_profile.permissions.disable_function(function, channels)\n # noinspection PyUnresolvedReferences\n await ctx.send_line(f\"{ctx.emotes.web_emotion.galka} {function.name} has been disabled in specified channels.\")",
"def bot_commands_only(cmd):\n @functools.wraps(cmd)\n async def bc_cmd(self, ctx, *args, **kwargs):\n if ctx.guild:\n settings = self.bot.settings[ctx.guild]\n if settings.bot_commands_channels\\\n and ctx.channel.id not in settings.bot_commands_channels\\\n and ctx.author.id not in settings.admin_ids:\n\n for channel_id in settings.bot_commands_channels:\n bc_ch = discord.utils.get(ctx.guild.channels, id=channel_id)\n if bc_ch:\n await ctx.send(f\"Please use {bc_ch.mention} for that command\")\n return\n return await cmd(self, ctx, *args, **kwargs)\n return bc_cmd",
"def do_setup(bot, msg, **kwargs):\n #channel = kwargs.get('event').get('channel')\n\n bot.refresh()\n\n return True",
"def enable(self, name, channel):\n if channel not in self.disabled_extensions:\n self.disabled_extensions[channel] = set()\n\n if name not in self.extension_names:\n return False\n\n logger.info('Enabling %s on %s' % (name, channel))\n self.disabled_extensions[channel].discard(name)\n\n return True",
"async def admin_msg(self, message):\n for channel in self.admin_channels.values():\n if channel:\n await channel.send(message)",
"async def _msgvote_off(self, ctx):\n\n channel = ctx.message.channel\n if channel.id not in self.settings[\"channels_enabled\"]:\n await self.bot.say(\"Msgvote mode is already off in this channel.\")\n else:\n self.settings[\"channels_enabled\"].remove(channel.id)\n dataIO.save_json(self.settings_path, self.settings)\n await self.bot.say(\"Msgvote mode is now off in this channel.\")",
"def _fillChannels(self):\n\n # Add extra disabled channels as needed\n index = len(self.channels)\n while index < self.iface.myInfo.max_channels:\n ch = channel_pb2.Channel()\n ch.role = channel_pb2.Channel.Role.DISABLED\n ch.index = index\n self.channels.append(ch)\n index += 1",
"async def meow_enable(self, ctx: vbu.Context, duration: vbu.TimeValue = None):\n\n self.meow_chats.add(ctx.channel)\n if duration:\n await ctx.send(f\"Meow chat has been enabled in {ctx.channel.mention} for {duration.clean_full} owo\")\n else:\n await ctx.send(f\"Meow chat has been enabled in {ctx.channel.mention} owo\")\n\n # See if we want to disable meow chat after a while\n if duration:\n async def waiter():\n await asyncio.sleep(duration.delta.total_seconds())\n try:\n self.meow_chats.remove(ctx.channel)\n await ctx.send(\"Turned off meow chat as scheduled :<\")\n except KeyError:\n pass\n _, current_task = self.meow_disable_tasks.get(ctx.channel.id, (None, None))\n if current_task:\n current_task.cancel()\n self.meow_disable_tasks[ctx.channel.id] = (discord.utils.utcnow() + duration.delta, self.bot.loop.create_task(waiter()))",
"def enable_all(self, enable):\n logging.debug(\"Called enable_all with value {}\".format(enable))\n\n if enable:\n # Loop over all quads and channels in system, adding enable command to deferred\n # executor queue\n for quad_idx in range(len(self.quad)):\n for channel in range(Quad.NUM_CHANNELS):\n self.deferred_executor.enqueue(\n self.quad_enable_channel, self.quad_enable_interval, quad_idx, channel\n )\n self.__all_enabled = True\n else:\n # Clear any pending turn-on command from the queue first, then turn off all channels\n # immediately.\n num_enables_pending = self.deferred_executor.pending()\n if num_enables_pending > 0:\n logging.debug(\"Clearing {} pending quad enable commands from queue\".format(\n num_enables_pending\n ))\n self.deferred_executor.clear()\n for quad_idx in range(len(self.quad)):\n for channel in range(Quad.NUM_CHANNELS):\n self.quad[quad_idx].set_enable(channel, False)\n self.__all_enabled = False",
"async def setwelcomechannel(self, ctx, *, channel : discord.TextChannel = None):\n\n isAdmin = ctx.message.author.permissions_in(ctx.message.channel).administrator\n if not isAdmin:\n checkAdmin = self.settings.getServerStat(ctx.message.guild, \"AdminArray\")\n for role in ctx.message.author.roles:\n for aRole in checkAdmin:\n # Get the role that corresponds to the id\n if str(aRole['ID']) == str(role.id):\n isAdmin = True\n\n # Only allow admins to change server stats\n if not isAdmin:\n await ctx.channel.send('You do not have sufficient privileges to access this command.')\n return\n\n if channel == None:\n self.settings.setServerStat(ctx.message.guild, \"WelcomeChannel\", \"\")\n if self._getDefault(ctx.guild):\n msg = 'Welcome and goodbye messages will be displayed in the default channel (**{}**).'.format(self._getDefault(ctx.guild).mention)\n else:\n msg = \"Welcome and goodbye messages will **not** be displayed.\"\n await ctx.channel.send(msg)\n return\n\n # If we made it this far - then we can add it\n self.settings.setServerStat(ctx.message.guild, \"WelcomeChannel\", channel.id)\n\n msg = 'Welcome and goodbye messages will be displayed in **{}**.'.format(channel.mention)\n await ctx.channel.send(msg)",
"async def cmd_galenable(self, ctx):\n\n # ===== SET LOCAL COG VARIABLE\n self.cogset['enable']= True\n\n # ===== ADD THE FUNCTION TO THE SCHEDULER\n self.scheduler.add_job(call_schedule,\n 'date',\n id=\"_delete_gallery_messages\",\n run_date=get_next(hours=self.cogset['text_expirein']),\n kwargs={\"func\": \"_delete_gallery_messages\"}\n )\n\n # ===== SAVE SETTINGS \n await cogset.SAVE(self.cogset, cogname=self.qualified_name)\n\n await ctx.channel.send(content=\"Galleries are **enabled**.\")\n\n return",
"async def rebind(self, ctx, channel: discord.TextChannel=None):\n player = self.bot.lavalink.players.get(ctx.guild.id)\n if not player.is_connected:\n return await ctx.send(\"I'm not connected to a voice channel :no_entry:\")\n if not channel:\n channel = ctx.channel\n player.store('channel', channel.id)\n await ctx.send(\"Messages will now be sent in {} <:done:403285928233402378>\".format(channel.mention))",
"async def botmsg(self, ctx, type: bool):\n async with self.config.toggles() as toggles:\n if type:\n toggles[\"botmessages\"] = True\n await ctx.send(\"Bot message notifications have been enabled.\")\n else:\n toggles[\"botmessages\"] = False\n await ctx.send(\"Bot message notifications have been disabled.\")",
"async def applysetup(self, ctx: commands.Context):\n pred = MessagePredicate.yes_or_no(ctx)\n role = MessagePredicate.valid_role(ctx)\n\n applicant = get(ctx.guild.roles, name=\"Staff Applicant\")\n channel = get(ctx.guild.text_channels, name=\"applications\")\n\n await ctx.send(\n \"This will create required channel and role. Do you wish to continue? (yes/no)\"\n )\n try:\n await self.bot.wait_for(\"message\", timeout=30, check=pred)\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n if not pred.result:\n return await ctx.send(\"Setup cancelled.\")\n if not applicant:\n try:\n applicant = await ctx.guild.create_role(\n name=\"Staff Applicant\", reason=\"Application cog setup\"\n )\n except discord.Forbidden:\n return await ctx.send(\n \"Uh oh. Looks like I don't have permissions to manage roles.\"\n )\n if not channel:\n await ctx.send(\n \"Do you want everyone to see the applications channel? (yes/no)\"\n )\n try:\n await self.bot.wait_for(\"message\", timeout=30, check=pred)\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n if pred.result:\n overwrites = {\n ctx.guild.default_role: discord.PermissionOverwrite(\n send_messages=False\n ),\n ctx.guild.me: discord.PermissionOverwrite(send_messages=True),\n }\n else:\n overwrites = {\n ctx.guild.default_role: discord.PermissionOverwrite(\n read_messages=False\n ),\n ctx.guild.me: discord.PermissionOverwrite(read_messages=True),\n }\n try:\n channel = await ctx.guild.create_text_channel(\n \"applications\",\n overwrites=overwrites,\n reason=\"Application cog setup\",\n )\n except discord.Forbidden:\n return await ctx.send(\n \"Uh oh. Looks like I don't have permissions to manage channels.\"\n )\n await ctx.send(f\"What role can accept or reject applicants?\")\n try:\n await self.bot.wait_for(\"message\", timeout=30, check=role)\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n accepter = role.result\n await self.config.guild(ctx.guild).applicant_id.set(applicant.id)\n await self.config.guild(ctx.guild).channel_id.set(channel.id)\n await self.config.guild(ctx.guild).accepter_id.set(accepter.id)\n await ctx.send(\n \"You have finished the setup! Please, move your new channel to the category you want it in.\"\n )",
"async def applysetup(self, ctx: commands.Context):\n pred = MessagePredicate.yes_or_no(ctx)\n role = MessagePredicate.valid_role(ctx)\n\n applicant = get(ctx.guild.roles, name=\"Staff Applicant\")\n channel = get(ctx.guild.text_channels, name=\"staff-applications\")\n\n await ctx.send(\n \"This will create required channel and role. Do you wish to continue? (yes/no)\"\n )\n try:\n await self.bot.wait_for(\"message\", timeout=30, check=pred)\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n if not pred.result:\n return await ctx.send(\"Setup cancelled.\")\n if not applicant:\n try:\n applicant = await ctx.guild.create_role(\n name=\"Staff Applicant\", reason=\"Application cog setup\"\n )\n except discord.Forbidden:\n return await ctx.send(\n \"Uh oh. Looks like I don't have permissions to manage roles.\"\n )\n if not channel:\n await ctx.send(\"Do you want everyone to see the applications channel? (yes/no)\")\n try:\n await self.bot.wait_for(\"message\", timeout=30, check=pred)\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n if pred.result:\n overwrites = {\n ctx.guild.default_role: discord.PermissionOverwrite(send_messages=False),\n ctx.guild.me: discord.PermissionOverwrite(send_messages=True),\n }\n else:\n overwrites = {\n ctx.guild.default_role: discord.PermissionOverwrite(read_messages=False),\n ctx.guild.me: discord.PermissionOverwrite(read_messages=True),\n }\n try:\n channel = await ctx.guild.create_text_channel(\n \"staff-applications\",\n overwrites=overwrites,\n reason=\"Application cog setup\",\n )\n except discord.Forbidden:\n return await ctx.send(\n \"Uh oh. Looks like I don't have permissions to manage channels.\"\n )\n await ctx.send(f\"What role can accept or reject applicants?\")\n try:\n await self.bot.wait_for(\"message\", timeout=30, check=role)\n except asyncio.TimeoutError:\n return await ctx.send(\"You took too long. Try again, please.\")\n accepter = role.result\n await self.config.guild(ctx.guild).applicant_id.set(applicant.id)\n await self.config.guild(ctx.guild).channel_id.set(channel.id)\n await self.config.guild(ctx.guild).accepter_id.set(accepter.id)\n await ctx.send(\n \"You have finished the setup! Please, move your new channel to the category you want it in.\"\n )",
"async def toggle(self, ctx: BBContext):\n\n self.code_enabled = not self.code_enabled\n e = 'enabled.' if self.code_enabled else 'disabled.'\n await ctx.send(f\"Bunker code auto reaction has been : **{e}**\")\n self.bot.logger.info('Bunker code listener %s by %s', e, str(ctx.author))"
] | [
"0.67751026",
"0.6625209",
"0.65478224",
"0.6417928",
"0.6308489",
"0.610151",
"0.60823476",
"0.5960483",
"0.5899421",
"0.5865763",
"0.5858996",
"0.58107245",
"0.580726",
"0.5788629",
"0.56712127",
"0.56574357",
"0.5640026",
"0.5632828",
"0.56324667",
"0.5491965",
"0.5487777",
"0.5460501",
"0.53856504",
"0.5375579",
"0.5361948",
"0.53510016",
"0.53294945",
"0.5323976",
"0.5307654",
"0.53053236"
] | 0.73707277 | 0 |
Name scope. Must be defined by implementations. | def name_scope(self):
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def scope(self, name):\r\n raise NotImplementedError",
"def _set_name_scope(self):\n if self.name is None:\n self._name_scope = self.__class__.__name__\n elif self.name == '<lambda>':\n self._name_scope = 'lambda'\n else:\n # E.g. '_my_loss' => 'my_loss'\n self._name_scope = self.name.strip('_')",
"def enterScope(self, name):",
"def scope(self): # noqa: ANN201",
"def test_sets_name(self):\n scope = Scope()\n self.assertEqual(scope.__name__, None)\n\n @Scope\n def scope2(cls): pass\n\n self.assertEqual(scope2.__name__, 'scope2')",
"def name(self, name):\n pass",
"def scope_name(self):\n return self.__scope_name",
"def name(self):\n pass",
"def scope_name(self, name):\n self._scope_name = name",
"def name(self):\r\n pass",
"def name(self):\n ...",
"def _getScopeName(self):\r\n return self.name # + \"@b%d\" % self.blscope_ids[-1]\r",
"def name(self):\n raise NotImplementedError # pragma: no cover",
"def name(self) -> str: # pragma: no cover",
"def name():\n pass",
"def name():\n pass",
"def scope_name(self):\n return self._scope_name",
"def name(self) -> str:\n ...",
"def name(self) -> str:\n ...",
"def name(self):\n raise NotImplementedError",
"def name(self):\n raise NotImplementedError",
"def name(self):\n raise NotImplementedError",
"def name(self):\n raise NotImplementedError",
"def name(self):\n raise NotImplementedError",
"def name(self):\n raise NotImplementedError",
"def name(self):\n raise NotImplementedError",
"def name(self):\n raise NotImplementedError",
"def name(self):\n raise NotImplementedError",
"def name(self):\n raise NotImplementedError",
"def hasScope(self, name):"
] | [
"0.83334064",
"0.78577036",
"0.7441628",
"0.7270988",
"0.72071725",
"0.71555185",
"0.7115489",
"0.7110687",
"0.71026707",
"0.7065051",
"0.6968165",
"0.6964567",
"0.69457537",
"0.69046456",
"0.6899518",
"0.6899518",
"0.6892065",
"0.6835145",
"0.6835145",
"0.6819266",
"0.6819266",
"0.6819266",
"0.6819266",
"0.6819266",
"0.6819266",
"0.6819266",
"0.6819266",
"0.6819266",
"0.6819266",
"0.6765305"
] | 0.8675015 | 0 |
Whether to dynamically check the number of anchors generated. Can be overridden by implementations that would like to disable this behavior. | def check_num_anchors(self):
return True | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def num_anchors_per_location(self):\n pass",
"def adjust_anchors(self):\n pass",
"def is_anchor_valid(self):\n return self.properties.get('IsAnchorValid', None)",
"def _assert_correct_number_of_anchors(self, anchors_list,\n feature_map_shape_list):\n expected_num_anchors = 0\n actual_num_anchors = 0\n for num_anchors_per_location, feature_map_shape, anchors in zip(\n self.num_anchors_per_location(), feature_map_shape_list, anchors_list):\n expected_num_anchors += (num_anchors_per_location\n * feature_map_shape[0]\n * feature_map_shape[1])\n actual_num_anchors += anchors.num_boxes()\n return tf.assert_equal(expected_num_anchors, actual_num_anchors)",
"def check_linked(self):\n\n count = 0\n for house in self.houses.values():\n if house.link:\n count += 1\n if count is 150:\n return True\n else:\n return False",
"def use_proxy(self, request):\n if self.adsl:\n return True\n if \"depth\" in request.meta and int(request.meta['depth']) <= 2:\n return False\n i = random.randint(1, 10)\n return i <= 2",
"def generate_anchors(self):\n self.anchors = np.zeros((self.anchor_num, 4), dtype=np.float32)\n size = self.stride * self.stride\n count = 0\n for r in self.ratios:\n ws = int(math.sqrt(size * 1. / r))\n hs = int(ws * r)\n\n for s in self.scales:\n w = ws * s\n h = hs * s\n self.anchors[count][:] = [-w * 0.5, -h * 0.5, w * 0.5, h * 0.5][:]\n count += 1",
"def dynamic_links(self) -> bool:\n return pulumi.get(self, \"dynamic_links\")",
"def check_hits(self):\n return 3 <= len(self.seqdata.hits) <= 10",
"def get_anchor_points(self):\n rows, cols = np.where(self.overlap_mask)\n self.anchor_points = tuple(zip(rows, cols))[:: self.sampling_int]\n print(\"# of anchors: {}\".format(len(self.anchor_points)))",
"def provides_defaults_for(self, rule):\n return not self.is_build_only and \\\n self.endpoint == rule.endpoint and self != rule and \\\n self.arguments == rule.arguments",
"def enable_all_link_aliasing(self):\n return self._enable_all_link_aliasing",
"def ExclusiveAddressUse(self) -> bool:",
"def ExclusiveAddressUse(self) -> bool:",
"def ExclusiveAddressUse(self) -> bool:",
"def ExclusiveAddressUse(self) -> bool:",
"def __len__(self) -> int:\n return len(self.links)",
"def isAnchor(node):\n # TODO What is considered an anchor needs to be subject to an option\n return bool((isinstance(node, nodes.target)\n or isinstance(node, nodes.Structural))\n and node[DuAttrIds]\n and not node.get(DuAttrRefuri, None))",
"def use_proxy(self, request):\n # if \"depth\" in request.meta and int(request.meta['depth']) <= 2:\n # return False\n # i = random.randint(1, 10)\n # return i <= 2\n return True",
"def num_adversaries(self) -> int:\n pass",
"def use_proxy(self, request):\n #if \"depth\" in request.meta and int(request.meta['depth']) <= 2:\n # return False\n #i = random.randint(1, 10)\n #return i <= 2\n return True",
"def _should_restore_default_page_size(self, params):\n params = True if params else False\n return params and not self.request_record_limit_is_reduced and self.last_api_call_is_successful",
"def num_anchors_per_localization(self):\n num_rot = len(self._rotations)\n num_size = np.array(self._sizes).reshape([-1, 3]).shape[0]\n return num_rot * num_size",
"def sectional_overwrite_check(self):\n\n for rule in self.options['sectional_overwrite']:\n if self.lineage_test(rule):\n return True\n return False",
"def onGoal(self):\n return self.index == len(self.path)",
"def isAutomaticLink(self):\n return _osgAnimation.AnimationManagerBase_isAutomaticLink(self)",
"def sectional_overwrite_no_negate_check(self):\n\n for rule in self.options[\n 'sectional_overwrite_no_negate']:\n if self.lineage_test(rule):\n return True\n return False",
"def anchors(self):\n dims = self.dims\n anchors = []\n for peak in self:\n possible_anchors = []\n for combination in combinations(range(dims), 2):\n spins = [peak[i] for i in combination]\n if any(s.res_num is None or s.atom is None for s in spins):\n continue\n res_nums = [spin.res_num for spin in spins]\n atoms = [spin.atom for spin in spins]\n elements = [atom[0] for atom in atoms]\n positions = [atom[1:] for atom in atoms]\n same_res_num = res_nums[0] == res_nums[1]\n valid_pairs = [set(('H', 'N')), set(('H', 'C'))]\n is_proton_heavy_pair = set(elements) in valid_pairs\n same_position = all(c[0] == c[1] for c in zip(*positions))\n if same_res_num and is_proton_heavy_pair and same_position:\n if '' in positions and set(elements) != set(('H', 'N')):\n # One of the atom names must have been 'H', 'N' or 'C'\n # Of these, only the amide proton anchor is valid\n continue\n if elements[0] == 'H':\n possible_anchors.append(combination)\n else:\n possible_anchors.append(combination[::-1])\n if len(possible_anchors) > 1:\n pa_sets = [set(pa) for pa in possible_anchors]\n overlap = set.intersection(*pa_sets)\n if overlap:\n # Ambiguous, overlapping anchors\n continue\n for poss_anc in possible_anchors:\n if poss_anc not in anchors:\n anchors.append(poss_anc)\n anchors = tuple(anchors)\n return anchors",
"def have_to_paginate(self):\n if self.get_max_per_page() and self.get_nb_results() > self.get_max_per_page():\n return True\n return False",
"def is_anchored(self):\n return self.anchor is not None"
] | [
"0.68195313",
"0.6357228",
"0.6135154",
"0.5779674",
"0.5483983",
"0.54224265",
"0.5373215",
"0.5297422",
"0.5296277",
"0.5272198",
"0.52359825",
"0.5194844",
"0.51695627",
"0.51695627",
"0.51695627",
"0.51695627",
"0.5161574",
"0.51068735",
"0.51042",
"0.5082342",
"0.507974",
"0.50734025",
"0.50408137",
"0.50343597",
"0.5019171",
"0.50056624",
"0.49957573",
"0.49922353",
"0.49905455",
"0.49815124"
] | 0.77952874 | 0 |
Returns the number of anchors per spatial location. | def num_anchors_per_location(self):
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def num_anchors_per_localization(self):\n num_rot = len(self._rotations)\n num_size = np.array(self._sizes).reshape([-1, 3]).shape[0]\n return num_rot * num_size",
"def num_locations(self):\n return len(self.locations)",
"def get_location_count(self):\n return len(self.matrix)",
"def _assert_correct_number_of_anchors(self, anchors_list,\n feature_map_shape_list):\n expected_num_anchors = 0\n actual_num_anchors = 0\n for num_anchors_per_location, feature_map_shape, anchors in zip(\n self.num_anchors_per_location(), feature_map_shape_list, anchors_list):\n expected_num_anchors += (num_anchors_per_location\n * feature_map_shape[0]\n * feature_map_shape[1])\n actual_num_anchors += anchors.num_boxes()\n return tf.assert_equal(expected_num_anchors, actual_num_anchors)",
"def location_length(self, location):\n if location == \"parent_blocks\":\n return np.sum(self.cbc.array.astype(np.bool))\n return self.num_cells",
"def location_length(self, location):\n if location == \"parent_blocks\":\n return np.sum(self.cbc.array.astype(np.bool))\n return self.num_cells",
"def location_length(self, location):\n if location == \"parent_blocks\":\n return np.sum(self.cbc.array.astype(np.bool))\n return self.num_cells",
"def location_length(self, location):\n if location == \"vertices\":\n return self.num_nodes\n return self.num_cells",
"def get_num_landmarks(self):\n return len(self.landmarks_info)",
"def location_length(self, location):\n if location == 'cells':\n return self.num_cells\n return self.num_nodes",
"def num_links(self):\n count=0.0\n for cluster in self.clusters:\n if self.clusters[cluster] == self.clusters[cluster].antecessor:\n numberofmembers=self.clusters[cluster].number_of_members\n count+=numberofmembers\n return count",
"def get_anchor_points(self):\n rows, cols = np.where(self.overlap_mask)\n self.anchor_points = tuple(zip(rows, cols))[:: self.sampling_int]\n print(\"# of anchors: {}\".format(len(self.anchor_points)))",
"def length(self):\n total_length = 0\n for location_a, location_b in zip(\n self.locations[:-1], self.locations[1:]):\n total_length += Line(location_a, location_b).length\n return total_length",
"def numCoords(self):\n return self.nCoords",
"def location_length(self, location):\n return self.num_cells",
"def check_num_anchors(self):\n return True",
"def locations_n(self):\n return self.locations[1]",
"def nspatials(self):\n return int(len(self)/2)",
"def get_position_count(self):\n return self.positions.count()",
"def Points_Counting(self):\n return len(self.__traectory_list)",
"def get_num_link_geometries(self, body, link_idx=0):\n with self.lock:\n return self.send_command('get_num_link_geometries ' + body.GetName()\n + ' ' + str(link_idx))",
"def num_neighbors(self):\n return self._num_neighbors",
"def num_arcs(self):\n return sum(len(state) for state in self)",
"def get_num_level_anchors_inside(self, num_level_anchors, inside_flags):\n split_inside_flags = torch.split(inside_flags, num_level_anchors)\n num_level_anchors_inside = [\n int(flags.sum()) for flags in split_inside_flags\n ]\n return num_level_anchors_inside",
"def size(self):\n try:\n return len(self._adjacency_list)\n except Exception as error:\n print(f'An error occurred: {error}')",
"def number_of_open_sites(self):\n return sum(sum(line) for line in self._grid)",
"def size(self):\n return len(self._adjacency_list.keys())",
"def get_num_level_anchors_inside(self, num_level_anchors, inside_flags):\n\n split_inside_flags = torch.split(inside_flags, num_level_anchors)\n num_level_anchors_inside = [\n int(flags.sum()) for flags in split_inside_flags\n ]\n return num_level_anchors_inside",
"def size(self):\n return len(self._adjacency_list)",
"def number_of_locations_by_area(area_id):\n\n if area_id == '':\n raise Exception(\"area ID could not be ''\")\n\n locations = db_access.get_locations_for_area(area_id)\n\n return len(locations)"
] | [
"0.749816",
"0.6937425",
"0.66813695",
"0.6656154",
"0.6582445",
"0.6582445",
"0.6582445",
"0.6580193",
"0.65188473",
"0.65087336",
"0.6447474",
"0.63943326",
"0.6365181",
"0.6354651",
"0.6345681",
"0.6322735",
"0.6313625",
"0.6276137",
"0.62072563",
"0.6178259",
"0.61153775",
"0.604023",
"0.59424984",
"0.59311706",
"0.5919097",
"0.5913937",
"0.5910243",
"0.5892373",
"0.58897513",
"0.58889234"
] | 0.8555305 | 0 |
Assert that correct number of anchors was generated. | def _assert_correct_number_of_anchors(self, anchors_list,
feature_map_shape_list):
expected_num_anchors = 0
actual_num_anchors = 0
for num_anchors_per_location, feature_map_shape, anchors in zip(
self.num_anchors_per_location(), feature_map_shape_list, anchors_list):
expected_num_anchors += (num_anchors_per_location
* feature_map_shape[0]
* feature_map_shape[1])
actual_num_anchors += anchors.num_boxes()
return tf.assert_equal(expected_num_anchors, actual_num_anchors) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def check_num_anchors(self):\n return True",
"def test_generation_length(self):\n for i in range(1, 20, 3):\n test_obj = FakeOrderBuilder(n=i).build()\n self.assertIs(len(test_obj), i)",
"def num_anchors_per_location(self):\n pass",
"def test_vote_generator(self):\n self.assertEqual(len(self.vote_ballot), 6)",
"def test_addsLinkBetweenOrderAndListing(self):\n DataManagerUnitTest.dm.reload()\n result = DataManagerUnitTest.dm.onlineStoreDatabase.getAllOrderListingLinks()\n orders = DataManagerUnitTest.dm.getAllOrders()\n # one link per item in an order, count the number of links expected\n numLinks = reduce(lambda x, y : x + y, [len(x['items']) for x in orders])\n self.assertEqual(numLinks, len(result))",
"def test_target_number_less_than_alp(self):\n alp = list(range(10))\n targets = generate_targets(alp, 5)\n self.assertEqual(len(targets), 5)\n self.assertEqual(len(targets), len(set(targets)))",
"def test_remainder(self):\n alp = list(range(5))\n targets = generate_targets(alp, 12)\n\n counts = Counter(targets)\n for item in alp:\n self.assertGreaterEqual(counts[item], 2)\n self.assertLessEqual(counts[item], 3)",
"def test_item_count(self):\n self.assertEqual(len(self.items), 2)",
"def assert_same_size(**keywords):\n keys = keywords.keys()\n n = len(keys)\n for i in xrange(n):\n for j in xrange(i + 1, n):\n assert len(keywords[keys[i]]) == len(keywords[keys[j]]), 'len({}) should eq len({})'.format(keys[i], keys[j])",
"def assertOrphanCount(self, course_key, number):\n self.assertEqual(len(self.store.get_orphans(course_key)), number)",
"def test_n_path_reactions(self):\n self.assertEqual(self.Npath, 3)",
"def testsize(self):\n for size in range(5):\n a = AmuletAbility('Skepticism', size=size+1)\n self.assert_(str(size+1) in str(a))\n self.assertEqual(a.size, size+1)\n self.assertTrue(isinstance(a.AC, int))\n self.assertTrue(isinstance(a.description(), str))",
"def test_target_greater_than_alp(self):\n alp = list(range(5))\n targets = generate_targets(alp, 10)\n self.assertEqual(len(targets), 10)\n\n counts = Counter(targets)\n\n for item in alp:\n self.assertEqual(counts[item], 2)",
"def test_length(self):\n fibonacci = list(generators.fibonacci(10))\n self.assertEqual(len(fibonacci), 10)",
"def test_len(self):\n self.assertEqual(len(self.tester), 27)",
"def test_len(self):\n self.assertEqual(len(self.tester), 27)",
"def assert_count_equal(self, result, count):\n self.assertGreater(count, 0)\n self.assertEqual(len(result['edges']), count)",
"def test_len(self):\n self.assertEqual(len(self.tester), 21)",
"def testAnchor(self):\n self.assertEqual(['http://foo.com/page.html#anchor'], grab('http://foo.com/page.html#anchor', self.needScheme))",
"def generate_anchors(self):\n self.anchors = np.zeros((self.anchor_num, 4), dtype=np.float32)\n size = self.stride * self.stride\n count = 0\n for r in self.ratios:\n ws = int(math.sqrt(size * 1. / r))\n hs = int(ws * r)\n\n for s in self.scales:\n w = ws * s\n h = hs * s\n self.anchors[count][:] = [-w * 0.5, -h * 0.5, w * 0.5, h * 0.5][:]\n count += 1",
"def test_get_all_urls_are_urls():\n # setup\n all_links = get_urls.get_all_urls(constants[\"URLS\"][\"TESTED_URL\"])\n for link in all_links:\n valid = validators.url(link)\n assert valid",
"def test_n_cells(mock_amg):\n assert mock_amg.n_cells == 12",
"def final_check(self, test_collection):\n assert True",
"def test_len(self):\n self.assertEqual(len(self.tester), 30)",
"def test_all_count(self):\n self.assertEqual(2, self.alice_inbox.all_count)\n self.assertEqual(3, self.bob_inbox.all_count)\n self.assertEqual(0, self.carol_inbox.all_count)",
"def test_generate_nb_testing(self):\n pass",
"def assert_same_size(sequences):\n seq_size = len(sequences[0])\n for seq in sequences:\n if len(seq) != seq_size:\n raise SizeError",
"def test_AlgorithmsHandler_GET100Algorithms(self):\n wrong_list = []\n right_list = []\n create_test_algorithm_list(wrong_list, 1)\n create_test_algorithm_list(right_list, 100)\n wrong_list[0]['linkURL'] = 'wrongLinkURL'\n for i in range(100):\n document = search_algorithm.create_document(right_list[i]['algorithmId'],\n right_list[i]['algorithmSummary'],\n right_list[i]['displayName'],\n right_list[i]['linkURL'])\n search.Index(name=search_algorithm._INDEX_STRING).put(document)\n response = self.testapp.get('/algorithms/')\n self.assertEqual(200, response.status_int, msg='The response was other then 200 OK')\n self.assertIsNotNone(response.charset)\n self.assertItemsEqual(right_list, json.loads(response.normal_body.decode(encoding=response.charset)),\n msg='The list of algorithms is not the same as in database')\n self.assertNotIn(wrong_list[0], json.loads(response.normal_body.decode(encoding=response.charset)),\n msg='The list of algorithms contains nonexistent data')\n self.assertEqual('application/json', response.content_type, msg='Wrong content type of an answer')",
"def get_anchor_points(self):\n rows, cols = np.where(self.overlap_mask)\n self.anchor_points = tuple(zip(rows, cols))[:: self.sampling_int]\n print(\"# of anchors: {}\".format(len(self.anchor_points)))",
"def test_new_count(self):\n self.assertEqual(2, self.alice_inbox.new_count)\n self.assertEqual(3, self.bob_inbox.new_count)\n self.assertEqual(0, self.carol_inbox.new_count)"
] | [
"0.7573695",
"0.67272305",
"0.6482859",
"0.6289629",
"0.6163478",
"0.61331123",
"0.6032624",
"0.5879324",
"0.5873571",
"0.5858247",
"0.5818788",
"0.58075005",
"0.57869184",
"0.5784455",
"0.57567227",
"0.57567227",
"0.5755385",
"0.5755148",
"0.57010067",
"0.5676276",
"0.5675537",
"0.566705",
"0.56600106",
"0.5631396",
"0.56246746",
"0.56211525",
"0.56046695",
"0.5604391",
"0.5595996",
"0.5588322"
] | 0.7683266 | 0 |
run cfg2json() on a predefined list of .cfg files | def batch_run_cfg2json():
cfg_path = os.environ.get("CFG_FILE_PATH")
cfg_list = ['any_n1.cfg',
'ir_grism_n2.cfg',
'ir_grism_n4.cfg',
'ir_any_n2.cfg',
'ir_any_n4.cfg',
'uvis_any_n2.cfg',
'uvis_any_n4.cfg',
'uvis_any_n6.cfg',
'uvis_any_pre2012_n2.cfg',
'uvis_any_pre2012_n4.cfg',
'uvis_any_pre2012_n6.cfg',
'wfc_any_n2.cfg',
'wfc_any_n4.cfg',
'wfc_any_n6.cfg',
'sbc_blue_n2.cfg',
'sbc_blue_n6.cfg',
'sbc_any_n2.cfg',
'sbc_any_n6.cfg',
'hrc_any_n2.cfg',
'hrc_any_n4.cfg',
'hrc_any_n6.cfg']
for cfgfile in cfg_list:
cfgfile = os.path.join(cfg_path, cfgfile)
cfg2json(cfgfile)
cfg_path = os.path.realpath(__file__).replace("devutils/pars_utils.py", "pars/")
out_path = os.path.realpath(__file__).replace("devutils/pars_utils.py", "pars/hap_pars/any/")
cfg_list = ["astrodrizzle_filter_hap.cfg", "astrodrizzle_single_hap.cfg", "astrodrizzle_total_hap.cfg"]
for cfgfile in cfg_list:
cfgfile = os.path.join(cfg_path, cfgfile)
cfg2json(cfgfile, outpath=out_path) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def cfg2json(cfgfilename, outpath=None):\n # open cfg file and load up the output dictionary\n cfg_data = teal.load(cfgfilename, strict=False)\n del cfg_data['_task_name_']\n del cfg_data['_RULES_']\n\n out_dict = {\"parameters\": cfg_data, \"default_values\": cfg_data}\n\n # build output json filename\n json_filename = cfgfilename.split(\"/\")[-1].replace(\".cfg\", \".json\")\n\n if not outpath:\n code_dir = os.path.abspath(__file__)\n base_dir = os.path.dirname(os.path.dirname(code_dir))\n out_dir = os.path.join(base_dir, \"pars/hap_pars\")\n det = json_filename.split(\"_\")[0]\n json_filename = json_filename.replace(det, det+\"_astrodrizzle\")\n if det == \"any\":\n json_filename = os.path.join(out_dir, det, json_filename)\n else:\n if det in [\"hrc\", \"sbc\", \"wfc\"]:\n inst = \"acs\"\n if det in [\"ir\", \"uvis\"]:\n inst = \"wfc3\"\n json_filename = \"{}_{}\".format(inst, json_filename)\n json_filename = os.path.join(out_dir, inst, det, json_filename)\n else:\n json_filename = os.path.join(outpath, \"any_\"+json_filename)\n json_filename = json_filename.replace(\"hap.json\", \"hap_basic.json\")\n\n # write out data.\n if os.path.exists(json_filename):\n os.remove(json_filename)\n with open(json_filename, 'w') as fp:\n json.dump(out_dict, fp, indent=4)\n print(\"Wrote {}\".format(json_filename))",
"def _get_config_map():\n path = os.path.join(os.path.dirname(__file__), \"nadamw_configs.json\")\n configs = json.loads(open(path).read())\n return configs",
"def build_configs():",
"def converter_to_JSON(config_txt_file, config_JSON_file):\n try:\n with open(config_txt_file, 'r') as txt_config:\n content_of_file = txt_config.readlines()\n copy_of_content = content_of_file\n i = 0\n content_of_file = [x.strip() for x in content_of_file]\n with open(config_JSON_file, 'w') as json_config:\n count_space = 4\n for x in content_of_file:\n if x == ')':\n copy_of_content[i - 1] = copy_of_content[i - 1][:copy_of_content[i - 1].__len__() - 3] + '\\n'\n count_space -= 4\n copy_of_content[i] = count_space * ' ' + '},' + '\\n'\n else:\n line = (x.split(\"=\"))\n if line[1] == '(':\n copy_of_content[i] = count_space * ' ' + '\"' + line[0] + '\"' + ':' + ' ' + '{' + '\\n'\n count_space += 4\n else:\n copy_of_content[i] = count_space * ' ' + '\"' + line[0] + '\"' + ':' + ' ' + line[1] + ', ' + '\\n'\n i += 1\n copy_of_content[i - 1] = copy_of_content[i - 1][:copy_of_content[i - 1].__len__() - 3] + '\\n'\n json_config.write('{\\n')\n json_config.writelines(copy_of_content)\n json_config.write('}')\n except BaseException:\n return 1\n else:\n return 0",
"def convert_ini_to_json(ini_dir: str, json_path: str):\n python_json_store = {}\n\n ini_file_paths = ['/chip_tool_config.alpha.ini', '/chip_tool_config.beta.ini', '/chip_tool_config.gamma.ini']\n counter = 1\n\n for path in ini_file_paths:\n full_path = ini_dir + path\n if (exists(full_path)):\n logging.critical(f\"Found chip tool INI file at: {full_path} - Converting...\")\n create_repl_config_from_init(ini_file=full_path,\n json_dict=python_json_store, replace_suffix=str(counter))\n counter = counter + 1\n\n json_file = open(json_path, 'w')\n json.dump(python_json_store, json_file, ensure_ascii=True, indent=4)",
"def get_cfg_files(self):\n\t\tcfg_files = []\n\t\tfor config_object, config_value in self.maincfg_values:\n\t\t\t\n\t\t\t## Add cfg_file objects to cfg file list\n\t\t\tif config_object == \"cfg_file\" and os.path.isfile(config_value):\n\t\t\t\t\tcfg_files.append(config_value)\n\n\t\t\t## Parse all files in a cfg directory\n\t\t\tif config_object == \"cfg_dir\":\n\t\t\t\tdirectories = []\n\t\t\t\traw_file_list = []\n\t\t\t\tdirectories.append( config_value )\n\t\t\t\t# Walk through every subdirectory and add to our list\n\t\t\t\twhile len(directories) > 0:\n\t\t\t\t\tcurrent_directory = directories.pop(0)\n\t\t\t\t\t# Nagios doesnt care if cfg_dir exists or not, so why should we ?\n\t\t\t\t\tif not os.path.isdir( current_directory ): continue\n\t\t\t\t\tlist = os.listdir(current_directory)\n\t\t\t\t\tfor item in list:\n\t\t\t\t\t\t# Append full path to file\n\t\t\t\t\t\titem = \"%s\" % (os.path.join(current_directory, item.strip() ) )\n\t\t\t\t\t\tif os.path.islink( item ):\n\t\t\t\t\t\t\titem = os.readlink( item )\n\t\t\t\t\t\tif os.path.isdir(item):\n\t\t\t\t\t\t\tdirectories.append( item )\n\t\t\t\t\t\tif raw_file_list.count( item ) < 1:\n\t\t\t\t\t\t\traw_file_list.append( item )\n\t\t\t\tfor raw_file in raw_file_list:\n\t\t\t\t\tif raw_file.endswith('.cfg'):\n\t\t\t\t\t\tif os.path.exists(raw_file):\n\t\t\t\t\t\t\t'Nagios doesnt care if cfg_file exists or not, so we will not throws errors'\n\t\t\t\t\t\t\tcfg_files.append(raw_file)\n\n\t\treturn cfg_files",
"def config2json(task, sha1, filename, indent=2):\n ServerManager.get()\n try:\n result = ServerManager.api.config2json(task, sha1)\n if filename is not None:\n write_config_file(result, filename)\n else:\n click.echo(json.dumps(result, indent=indent))\n except ApiException as e:\n click.echo(click.style(json.loads(e.body)['detail'], fg='red'))",
"def ParseGNArgs(gn_args):\n configs = json.loads(gn_args)\n build_configs = {}\n\n for config in configs:\n key = config[\"name\"]\n if key not in ALLOWLISTED_CONFIGS:\n continue\n if 'current' in config:\n build_configs[key] = config['current']['value']\n else:\n build_configs[key] = config['default']['value']\n\n return build_configs",
"def read_cfg(file):\n result = []\n if isfile(file):\n with open(file) as f:\n cfg = json.load(f)\n for entry in cfg:\n if \"start\" in entry:\n filter = (entry[\"start\"], entry.get(\"end\", None))\n result.append(filter)\n return result",
"def main():\n for db_csv_export in current_dir.glob(\"template*.csv\"):\n data_projects = load_projects(db_csv_export)\n json_path = db_csv_export.with_suffix(\".json\")\n with open(json_path, \"w\") as fh:\n json.dump(data_projects, fh, indent=2)",
"def _get_json(self, path):\n cur_dir = path_stroke_fix(path)\n path = f\"{cur_dir}config/config.json\"\n return json.load(open(path, 'r'))",
"def export_configurations():\n pass",
"def save_config_to_json(config, filename):\n with open(filename, 'w+') as f:\n json.dump(vars(config), f)",
"def read_json_files(config):\n with open(config) as jsn_std:\n jparams = json.load(jsn_std)\n\n # Remove empty strings and convert unicode characters to strings\n params = {}\n for key, val in jparams.iteritems():\n # Make sure all keys are strings\n _key = str(key)\n\n # ignore empty strings and comments\n if val == \"\" or _key == \"#\":\n pass\n # convert unicode values to strings\n elif isinstance(val, unicode):\n params[_key] = str(val)\n else:\n params[_key] = val\n\n return params",
"def verilog_to_json(verilog_filename, json_filename):\n system(f'yosys -p \"read_verilog {verilog_filename}\" '\n '-p \"write_json {json_filename}\"')",
"def read_custom_configs():\n current_path = Path(__file__).parent.parent.parent\n string_path = str(current_path) + '/configuracoes_programa.json'\n\n\n # Getting credentials\n with open(Path(string_path), 'r') as myfile:\n credential_json = json.load(myfile)\n\n json_inputs = {\n 'input_pasta': credential_json['PASTAS_FUNCIONAMENTO']['PASTA_DOWNLOAD_ARQUIVOS'],\n 'start_date': credential_json['BUSCA_AUTOMATIZADA']['DATA_INICIO'],\n 'end_date': credential_json['BUSCA_AUTOMATIZADA']['DATA_FIM'],\n 'abrir_auto': credential_json['CONTROLE']['ABRIR_AUTOMATICAMENTE_XLSX'],\n 'tempo_download': credential_json['CONTROLE']['TEMPO_DOWNLOAD'],\n\n }\n return json_inputs",
"def write_configs(logconf_dir):\n for name in list_logging_conf():\n conf = load_logging_conf(name)\n with io.open(os.path.join(logconf_dir, name), 'w') as f:\n f.write(json.dumps(conf))",
"def load_config():\n model_type, run_name, run_comment, epoch, verbose = get_args()\n name = run_name + '-' + run_comment\n if model_type == \"s2s\": \n run_title = \"seq2seq\"\n else:\n run_title = \"def2vec\"\n path = \"outputs/{}/logs/{}/config.json\".format(run_title, name)\n config = None\n with open(path) as f:\n config = dict(json.load(f))\n config = load_config(eval=True)\n return (config, name, model_type)",
"def annotations_to_json(eaf_dir, json_dir):\n for file in os.listdir(eaf_dir):\n if file.endswith(\".eaf\"):\n print(\"converting\", file, \"to json\")\n file_name = os.path.join(json_dir, file[:-4]) + \".json\"\n file = os.path.join(eaf_dir, file)\n file_elan = Elan.Eaf(file)\n\n # Get all the data under the engagement_tier tier\n annotation_data = file_elan.get_annotation_data_for_tier(\"engagement_tier\")\n labels_for_annotation = elan_annotation_to_binary(annotation_data)\n\n # Create a json file storing the dictionary of {\"timeslot1,timeslot2\": 0/1(engaged/disengaged)}\n j = json.dumps(labels_for_annotation)\n f = open(file_name, \"w\")\n f.write(j)\n f.close()",
"def get_config():\n\n return json.loads(CONFIG_FILE.read_text())",
"def main(args):\n\n with open(args.cfg_fn, 'r') as cfg_fd:\n config = cfg_fd.read().split(\"\\n\")\n\n with open(args.opt_fn, 'r') as opt_fd:\n for oline in opt_fd:\n option, value = oline.strip().split(\"=\")\n\n conf_addition = \"%s=%s\" % (option, value)\n added = False\n for line_nr, line in enumerate(config):\n if \"# %s is not set\" % option in line or \\\n \"%s=\" % option in line:\n config[line_nr] = conf_addition\n added = True\n break\n\n if not added:\n config.append(conf_addition)\n\n with open(args.cfg_fn, 'w') as cfg_fd:\n cfg_fd.write(\"\\n\".join(config))",
"def parse_cfg(cfgfile):\n file = open(cfgfile, \"r\")\n lines = file.read().split(\"\\n\") # store the lines in a list\n lines = [x for x in lines if len(x) > 0] # get read of the empty lines \n lines = [x for x in lines if x[0] != \"#\"] # get rid of comments\n lines = [x.rstrip().lstrip() for x in lines] # get rid of fringe whitespaces\n\n block = {}\n blocks = []\n\n for line in lines:\n if line[0] == \"[\":\n if len(block) != 0:\n blocks.append(block)\n block = {}\n block[\"type\"] = line[1:-1].rstrip()\n else:\n key, value = line.split(\"=\")\n block[key.rstrip()] = value.lstrip()\n blocks.append(block)\n return blocks",
"def config():\n with open(config_path) as config_file:\n data = json.load(config_file)\n return data",
"def process_config(json_file):\n config, _ = get_config_from_json(json_file)\n print(\" THE Configuration of your experiment ..\")\n pprint(config)\n print(\" *************************************** \")\n try:\n config.summary_dir = os.path.join(\"experiments\", config.exp_name, \"summaries/\")\n config.checkpoint_dir = os.path.join(\"experiments\", config.exp_name, \"checkpoints/\")\n config.out_dir = os.path.join(\"experiments\", config.exp_name, \"out/\")\n create_dirs([config.summary_dir, config.checkpoint_dir, config.out_dir])\n except AttributeError as e:\n print(\"ERROR!!..Please provide the exp_name in json file..\")\n exit(-1)\n return config",
"def main(filename):\n with open(filename) as json_file:\n data = json.load(json_file)\n\n course_dict = {}\n course_dict['course_id'] = str(os.path.split(filename.strip('/'))[-1])\n course_dict['blocks'] = build_course_map(data)\n\n filename = '%s' % course_dict['course_id']\n filepath = os.path.join('../input/', filename)\n\n with open(filepath, 'w') as outfile:\n json.dump(course_dict, outfile, indent=4)",
"def write_config_file(config, args):\n config_filename = 'testsuite_cfg.json'\n args.config = config_filename\n config_file = open(config_filename, 'w')\n config_file.write(str(json.dumps(config)))\n config_file.close()",
"def build():\n for root, dirs, files in os.walk(IN_PATH):\n for filename in files:\n if filename.endswith('.csv'):\n with open(os.path.join(IN_PATH, filename), encoding='utf-8') as f:\n reader = csv.reader(f)\n next(reader)\n data = nested_dict()\n web_data = nested_dict()\n for row in reader:\n if row[0].startswith('report.') or row[0].startswith('cardset.'):\n d = data\n elif row[0].startswith('web.'):\n d = web_data\n path = row[0].split('.')\n for i in range(len(path)):\n if i == len(path) - 1:\n d[path[i]] = row[1]\n else:\n d = d[path[i]]\n with open (os.path.join(OUT_PATH, filename.replace('.csv', '.json')), 'w', encoding='utf-8') as fout:\n json.dump({**data, **web_data}, fout)\n with open (os.path.join(WEB_PATH, filename.replace('.csv', '.js')), 'w', encoding='utf-8') as fout:\n fout.write('var STRINGS = {};'.format(json.dumps(web_data)))\n\n with open(os.path.join(IN_PATH, 'en_US.csv'), encoding='utf-8') as f:\n reader = csv.reader(f)\n next(reader)\n data = nested_dict()\n web_data = nested_dict()\n for row in reader:\n path = row[0].split('.')\n if row[0].startswith('report.') or row[0].startswith('cardset.'):\n d = data\n elif row[0].startswith('web.'):\n d = web_data\n\n for i in range(len(path)):\n if i == len(path) - 1:\n d[path[i]] = zz_string(row[1], row[0])\n else:\n d = d[path[i]]\n with open(os.path.join(OUT_PATH, 'zz_ZZ.json'), 'w', encoding='utf-8') as fout:\n json.dump({**data, **web_data}, fout)\n with open(os.path.join(WEB_PATH, 'zz_ZZ.js'), 'w', encoding='utf-8') as fout:\n fout.write('var STRINGS = {};'.format(json.dumps(web_data)))",
"def main(configs, outfile='Linker.json', verbose=False):\n combined = {}\n\n for config in configs:\n with open(config) as fd:\n config_dict = json.load(fd)\n combined.update(config_dict)\n\n if verbose:\n print('\\n')\n print(json.dumps(combined, indent=4, sort_keys=True))\n\n if verbose:\n print('\\n')\n print('wrote new linker file: ' + outfile)\n json.dump(combined, open(outfile, 'w'), indent=4, sort_keys=True)",
"def write_to_json(config: dict, filename: str):\n\n with open(filename, 'w', encoding='utf-8') as f:\n mmengine.dump(config, f, file_format='json')",
"def _load_repo_configs(path):\n with open(path) as f:\n return json.loads(f.read())"
] | [
"0.725863",
"0.6178261",
"0.614552",
"0.6062128",
"0.5994763",
"0.5942238",
"0.57629395",
"0.57319415",
"0.569644",
"0.5682584",
"0.55984646",
"0.5595738",
"0.5571803",
"0.55675125",
"0.5555411",
"0.55549204",
"0.5554568",
"0.5542356",
"0.5514675",
"0.55100006",
"0.54999024",
"0.54955494",
"0.54889303",
"0.54722005",
"0.54596275",
"0.54560584",
"0.5447271",
"0.5442547",
"0.5430043",
"0.542803"
] | 0.84875107 | 0 |
Reformat userspecifed input json file to use standard (indent = 4) format | def reformat_json_file(infilename, outfilename, clobber=False):
# open input json file
with open(infilename) as json_file:
json_string = json_file.read()
json_data = json.loads(json_string)
# see if output file already exists and determine course of action
if os.path.exists(outfilename):
if clobber:
os.remove(outfilename)
else:
sys.exit("Error: output file {} already exists and would be overwritten!".format(outfilename))
# write out reformatted json file
with open(outfilename, 'w') as f:
json.dump(json_data, f, indent=4) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def collapse_json(text, indent=4):\n initial = \" \" * indent\n out = [] # final json output\n sublevel = [] # accumulation list for sublevel entries\n pending = None # holder for consecutive entries at exact indent level\n for line in text.splitlines():\n if line.startswith(initial):\n if line[indent] == \" \":\n # found a line indented further than the indent level, so add\n # it to the sublevel list\n if pending:\n # the first item in the sublevel will be the pending item\n # that was the previous line in the json\n sublevel.append(pending)\n pending = None\n item = line.strip()\n sublevel.append(item)\n if item.endswith(\",\"):\n sublevel.append(\" \")\n elif sublevel:\n # found a line at the exact indent level *and* we have sublevel\n # items. This means the sublevel items have come to an end\n sublevel.append(line.strip())\n out.append(\"\".join(sublevel))\n sublevel = []\n else:\n # found a line at the exact indent level but no items indented\n # further, so possibly start a new sub-level\n if pending:\n # if there is already a pending item, it means that\n # consecutive entries in the json had the exact same\n # indentation and that last pending item was not the start\n # of a new sublevel.\n out.append(pending)\n pending = line.rstrip()\n else:\n if pending:\n # it's possible that an item will be pending but not added to\n # the output yet, so make sure it's not forgotten.\n out.append(pending)\n pending = None\n if sublevel:\n out.append(\"\".join(sublevel))\n out.append(line)\n return \"\\n\".join(out)",
"def json_format(filename, indent=DEFAULT_INDENT_SIZE, **kwargs):\n console = kwargs.get(\"console\", logging.getLogger(\"console\"))\n encoding = kwargs.get(\"encoding\", None)\n dry_run = kwargs.get(\"dry_run\", False)\n if indent is None:\n sort_keys = False\n else:\n sort_keys = True\n\n message = \"%s ...\" % filename\n# if not (os.path.exists(filename) and os.path.isfile(filename)):\n# console.error(\"%s ERROR: file not found.\", message)\n# return 0\n\n contents = open(filename, \"r\").read()\n if PYTHON_VERSION >= (3, 1):\n # -- NOTE: encoding keyword is deprecated since python 3.1\n data = json.loads(contents)\n else:\n data = json.loads(contents, encoding=encoding)\n contents2 = json.dumps(data, indent=indent, sort_keys=sort_keys)\n contents2 = contents2.strip()\n contents2 = \"%s\\n\" % contents2\n if contents == contents2:\n console.info(\"%s SKIP (already pretty)\", message)\n return 2 #< SKIPPED.\n elif not dry_run:\n outfile = open(filename, \"w\")\n outfile.write(contents2)\n outfile.close()\n console.warning(\"%s OK\", message)\n return 1 #< OK",
"def collapse_json(text, indent=12):\n initial = \" \" * indent\n out = [] # final json output\n sublevel = [] # accumulation list for sublevel entries\n pending = None # holder for consecutive entries at exact indent level\n for line in text.splitlines():\n if line.startswith(initial):\n if line[indent] == \" \":\n # found a line indented further than the indent level, so add\n # it to the sublevel list\n if pending:\n # the first item in the sublevel will be the pending item\n # that was the previous line in the json\n sublevel.append(pending)\n pending = None\n item = line.strip()\n sublevel.append(item)\n if item.endswith(\",\"):\n sublevel.append(\" \")\n elif sublevel:\n # found a line at the exact indent level *and* we have sublevel\n # items. This means the sublevel items have come to an end\n sublevel.append(line.strip())\n out.append(\"\".join(sublevel))\n sublevel = []\n else:\n # found a line at the exact indent level but no items indented\n # further, so possibly start a new sub-level\n if pending:\n # if there is already a pending item, it means that\n # consecutive entries in the json had the exact same\n # indentation and that last pending item was not the start\n # of a new sublevel.\n out.append(pending)\n pending = line.rstrip()\n else:\n if pending:\n # it's possible that an item will be pending but not added to\n # the output yet, so make sure it's not forgotten.\n out.append(pending)\n pending = None\n if sublevel:\n out.append(\"\".join(sublevel))\n out.append(line)\n return \"\\n\".join(out)",
"def beautify_json(self) -> None:\n for letter in self.data:\n for category in self.data[letter]:\n self.data[letter][category] = str(self.data[letter][category.lower()])\n self.save()\n with open(dict_path, encoding='utf-8') as x:\n data = x.read()\n with open(dict_path, 'w', encoding='utf-8') as y:\n data2 = data.replace('\"[', '[').replace(']\"', ']').replace(\"'\", '\"')\n y.write(data2)",
"def main():\n\n json_str = sys.stdin.read()\n\n try:\n object_json = json.loads(json_str)\n\n except json.JSONDecodeError:\n print(\"Error with the JSON input. Please check your JSON file.\")\n sys.exit(1)\n\n final_object = flatten_json(object_json, prefix=\"\")\n object_json_output = json.dumps(final_object, indent=4)\n\n print(object_json_output)",
"def pretty_print_json_file(set_file_path: str) -> None:\n with open(set_file_path, \"r\", encoding=\"utf8\") as set_file:\n set_data = json.load(set_file)\n\n with open(set_file_path, \"w\", encoding=\"utf8\") as set_file:\n json.dump(set_data, set_file, indent=2)",
"def main(args=None):\n if args is None:\n args = sys.argv[1:]\n\n usage_ = \"\"\"%prog [OPTIONS] JsonFile [MoreJsonFiles...]\nFormat/Beautify one or more JSON file(s).\"\"\"\n parser = OptionParser(usage=usage_, version=VERSION)\n parser.add_option(\"-i\", \"--indent\", dest=\"indent_size\",\n default=DEFAULT_INDENT_SIZE, type=\"int\",\n help=\"Indent size to use (default: %default).\")\n parser.add_option(\"-c\", \"--compact\", dest=\"compact\",\n action=\"store_true\", default=False,\n help=\"Use compact format (default: %default).\")\n parser.add_option(\"-n\", \"--dry-run\", dest=\"dry_run\",\n action=\"store_true\", default=False,\n help=\"Check only if JSON is well-formed (default: %default).\")\n options, filenames = parser.parse_args(args) #< pylint: disable=W0612\n if not filenames:\n parser.error(\"OOPS, no filenames provided.\")\n if options.compact:\n options.indent_size = None\n\n # -- STEP: Init logging subsystem.\n format_ = \"json.format: %(message)s\"\n logging.basicConfig(level=logging.WARN, format=format_)\n console = logging.getLogger(\"console\")\n\n # -- DOS-SHELL SUPPORT: Perform filename globbing w/ wildcards.\n skipped = 0\n filenames2 = []\n for filename in filenames:\n if \"*\" in filenames:\n files = glob.glob(filename)\n filenames2.extend(files)\n elif os.path.isdir(filename):\n # -- CONVENIENCE-SHORTCUT: Use DIR as shortcut for JSON files.\n files = glob.glob(os.path.join(filename, \"*.json\"))\n filenames2.extend(files)\n if not files:\n console.info(\"SKIP %s, no JSON files found in dir.\", filename)\n skipped += 1\n elif not os.path.exists(filename):\n console.warning(\"SKIP %s, file not found.\", filename)\n skipped += 1\n continue\n else:\n assert os.path.exists(filename)\n filenames2.append(filename)\n filenames = filenames2\n\n # -- NORMAL PROCESSING:\n errors = json_formatall(filenames, options.indent_size,\n dry_run=options.dry_run)\n console.error(\"Processed %d files (%d with errors, skipped=%d).\",\n len(filenames), errors, skipped)\n if not filenames:\n errors += 1\n return errors",
"def parse_and_format(file_in_path: str, file_out_path: str) -> None:\n\n with open(file_in_path) as input:\n payload = json.load(input)\n\n records = [\"# Data is in the format:\",\n \"# kem_id,kdf_id,aead_id,info,skRm,skEm,pkRm,pkEm,exporter_context,L,exported_value\"]\n\n for key in payload:\n # Skip these to test only capabilities exposed by BoringSSL\n if (key[\"mode\"] != MODE_BASE or\n key[\"kem_id\"] != KEM_DHKEM_X25519_SHA256 or\n key[\"kdf_id\"] != KDF_HKDF_SHA256 or\n key[\"aead_id\"] == AEAD_EXPORT_ONLY):\n continue\n\n for exportKey in key[\"exports\"]:\n records.append(\"{},{},{},{},{},{},{},{},{},{},{}\"\n .format(str(key[\"kem_id\"]),\n str(key[\"kdf_id\"]),\n str(key[\"aead_id\"]),\n str(key[\"info\"]),\n str(key[\"skRm\"]),\n str(key[\"skEm\"]),\n str(key[\"pkRm\"]),\n str(key[\"pkEm\"]),\n str(exportKey[\"exporter_context\"]),\n str(exportKey[\"L\"]),\n str(exportKey[\"exported_value\"])))\n\n\n with open(file_out_path, \"w\") as output:\n output.write(\"\\n\".join(records))",
"def json_dumps(data, indent=2, indent_increment=None, toplevel=True, one_line_max_width=200, object_fields_sorting_key=None):\n\n def simple(d):\n r = True\n if isinstance(d, dict):\n r = not any(isinstance(v, (list, tuple, set, dict)) for v in d.values()) and len(d) < 17\n elif isinstance(d, (tuple, list)):\n r = not any(isinstance(v, (list, tuple, set, dict)) for v in d)\n return r\n\n def end(symbol, indent):\n if indent > indent_increment:\n r = \"{:{}s}{}\".format(\"\", indent - indent_increment, symbol)\n else:\n r = symbol\n return r\n\n def make_one_line(data):\n if isinstance(data, set):\n s = json.dumps(sorted(data, key=object_fields_sorting_key), ensure_ascii=False)\n elif isinstance(data, dict):\n s = \"{\"\n for no, k in enumerate(sorted(data, key=object_fields_sorting_key), start=1):\n comma = \", \" if no < len(data) else \"\"\n s += \"{}: {}{}\".format(json.dumps(k, ensure_ascii=False), json_dumps(data[k], indent=0, indent_increment=None, toplevel=False, object_fields_sorting_key=object_fields_sorting_key), comma)\n s += \"}\"\n else:\n s = json.dumps(data, sort_keys=True, ensure_ascii=False)\n return s\n\n def make_object(data):\n if toplevel:\n r = [\"{{{:<{}s}\\\"_\\\":\\\"-*- js-indent-level: {} -*-\\\",\".format(\"\", indent_increment - 1, indent_increment)]\n else:\n r = [\"{\"]\n for no, k in enumerate(sorted(data, key=object_fields_sorting_key), start=1):\n comma = \",\" if no < len(data) else \"\"\n r.append(\"{:{}s}{}: {}{}\".format(\"\", indent, json.dumps(k, ensure_ascii=False), json_dumps(data[k], indent + indent_increment, indent_increment, toplevel=False, object_fields_sorting_key=object_fields_sorting_key), comma))\n r.append(end(\"}\", indent))\n return r\n\n # --------------------------------------------------\n\n if indent_increment is None:\n indent_increment = indent\n if indent == 0 or simple(data):\n s = make_one_line(data)\n else:\n r = []\n if isinstance(data, dict):\n r.extend(make_object(data))\n elif isinstance(data, (tuple, list)):\n r.append(\"[\")\n for no, v in enumerate(data, start=1):\n comma = \",\" if no < len(data) else \"\"\n r.append(\"{:{}s}{}{}\".format(\"\", indent, json_dumps(v, indent + indent_increment, indent_increment, toplevel=False, object_fields_sorting_key=object_fields_sorting_key), comma))\n r.append(end(\"]\", indent))\n else:\n raise ValueError(\"Cannot serialize: {!r}\".format(data))\n s = \"\\n\".join(r)\n if \"\\n\" in s and len(s) < one_line_max_width:\n s = make_one_line(data)\n return s",
"def reformat():\n toolkit.reformat()",
"def _pretty_json_dump(d):\n return json.dumps(d, sort_keys=True, indent=3)",
"def pretty_format_json(data):\n return json.dumps(data, ensure_ascii=False, indent=4, cls=RssantJSONEncoder)",
"def pretty(data, sort=True, indent=4):\n return json.dumps(data, sort_keys=sort, indent=indent,\n separators=(',', ': '))",
"def pretty_format(something):\r\n try:\r\n return pretty_format(json.loads(something))\r\n except Exception:\r\n try:\r\n return json.dumps(something, indent=5)\r\n except Exception:\r\n return str(something)",
"def dump(f, indent):\n\n j = json.load(f)\n json.dump(j, sys.stdout, indent=indent)",
"def pretty_print(data):\n print json.dumps(data, sort_keys=True, indent=4, separators=(',', ': '))",
"def format_json(json_data):\n return json_data[json_data.index('{'):json_data.rfind('}')+1]",
"def process_json(json_str):\n\tjson_str = json_str.replace(\"\\n\", \" \")\n\tjson_str = json_str.replace(\"\\t\", \" \")\n\t\n\twhile json_str.find(\" \") > -1:\n\t\tjson_str = json_str.replace(\" \", \" \")\n\t\n\treturn json_str",
"def converter_to_JSON(config_txt_file, config_JSON_file):\n try:\n with open(config_txt_file, 'r') as txt_config:\n content_of_file = txt_config.readlines()\n copy_of_content = content_of_file\n i = 0\n content_of_file = [x.strip() for x in content_of_file]\n with open(config_JSON_file, 'w') as json_config:\n count_space = 4\n for x in content_of_file:\n if x == ')':\n copy_of_content[i - 1] = copy_of_content[i - 1][:copy_of_content[i - 1].__len__() - 3] + '\\n'\n count_space -= 4\n copy_of_content[i] = count_space * ' ' + '},' + '\\n'\n else:\n line = (x.split(\"=\"))\n if line[1] == '(':\n copy_of_content[i] = count_space * ' ' + '\"' + line[0] + '\"' + ':' + ' ' + '{' + '\\n'\n count_space += 4\n else:\n copy_of_content[i] = count_space * ' ' + '\"' + line[0] + '\"' + ':' + ' ' + line[1] + ', ' + '\\n'\n i += 1\n copy_of_content[i - 1] = copy_of_content[i - 1][:copy_of_content[i - 1].__len__() - 3] + '\\n'\n json_config.write('{\\n')\n json_config.writelines(copy_of_content)\n json_config.write('}')\n except BaseException:\n return 1\n else:\n return 0",
"def _format_dict(self, dict_, indent=0):\n prefix = indent*\" \"*4\n output = \"{\\n\"\n for key, val in sorted(dict_.items()):\n if isinstance(val, dict):\n rval = self._format_dict(val, indent+1)\n else:\n rval = repr(val)\n output += prefix + \" \"*4 + repr(key) + \" : \" + rval + \",\\n\"\n output += prefix + \"}\"\n return output",
"def json_format_dict(self, data, pretty=False):\n if pretty:\n return json.dumps(data, sort_keys=True, indent=2)\n else:\n return json.dumps(data)",
"def sanitize_json_files(unsanitized_json_file):\n with open(develop_baseline_dir + unsanitized_json_file) as f:\n lines = f.readlines()\n\n with open('sanitizedJson/sanitized_{}'.format(unsanitized_json_file), 'w') as w:\n w.writelines([item for item in lines[:-1]])",
"def _jsonPretty(j):\n return json.dumps(j, sort_keys=True, indent=4, separators=(',', ': '))",
"def dump_json(self, unformatted_path: str, data: Dict):\n formatted_path = self.format_path(unformatted_path)\n with open(formatted_path, \"w\") as outfile:\n json.dump(data, outfile, indent=4)",
"def cleanJson(filename):\n saveFile = 'newFile.json'\n\n # check if file exists and exit if it does\n if os.path.exists(saveFile):\n return\n\n with open(filename) as f:\n contents = f.readlines()\n\n with open('newFile.json', 'a') as outfile:\n for line in contents:\n # convert line from JSON to Python dictionary\n dic = decode(line)\n # convert dic to a string representing a json object\n json_obj = json.dumps(dic)\n outfile.write(json_obj) # write to outfile\n outfile.write(\"\\n\")",
"def transform_file(input_file_path: str, output_file_path: str):\n\n # Open json\n with open(input_file_path, mode=\"r\") as in_file:\n input_data = json.load(in_file)\n\n # Transform and write\n with jsonlines.open(output_file_path, mode=\"w\", compact=True) as out_file:\n for item in input_data[\"items\"]:\n out_file.write(transform_item(item))",
"def test_json_with_tabs() -> None:\n temp_schema_file = tempfile.NamedTemporaryFile(mode=\"w+\", delete=False, suffix=\".json\")\n created_filename = temp_schema_file.name\n with open(os.path.abspath(os.path.join(examples_dir, \"cases\", f\"basic.json\"))) as schema_fp:\n for line in schema_fp:\n temp_schema_file.write(line.replace(\" \", \"\\t\"))\n with tempfile.NamedTemporaryFile(mode=\"w+\") as temp_html_file:\n temp_schema_file.seek(0)\n generate_from_file_object(temp_schema_file, temp_html_file, True, False, False, True)\n temp_schema_file.close()\n os.remove(created_filename)\n temp_html_file.seek(0)\n soup = BeautifulSoup(temp_html_file.read(), \"html.parser\")\n tests.html_schema_doc_asserts.assert_basic_case(soup)",
"def _pretty_print(self, json_dict):\n if self.prettyprint:\n return \"\\n\" + json.dumps(json_dict, indent=self.indent)\n return json.dumps(json_dict)",
"def printAsFormattedJSON(jsonObject):\n print(json.dumps(jsonObject, indent=2)[0:1000])",
"def pretty_print_content(content):\n try:\n parsed_content = json.loads(content)\n except ValueError:\n return '{\"error\": \"Invalid JSON\"}'\n return json.dumps(parsed_content, sort_keys=True, indent=4, separators=(',', ': '))"
] | [
"0.64213634",
"0.6361781",
"0.6326946",
"0.5934598",
"0.5909397",
"0.58976054",
"0.5836589",
"0.5784594",
"0.57603955",
"0.5724058",
"0.5709791",
"0.5688288",
"0.56765765",
"0.5670549",
"0.5666758",
"0.5661027",
"0.56540006",
"0.5612286",
"0.5568545",
"0.5535113",
"0.54417205",
"0.54180974",
"0.539917",
"0.5396961",
"0.53862125",
"0.5365857",
"0.5364785",
"0.53591514",
"0.53500456",
"0.53484964"
] | 0.6531704 | 0 |
Parse attributes buffer into a list of (type, data) tuples. | def parse_attrs(buf):
attrs = []
while buf:
t = ord(buf[0])
l = ord(buf[1])
if l < 2:
break
d, buf = buf[2:l], buf[l:]
attrs.append((t, d))
return attrs | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _read_attributes(root):\n output_list = []\n for _, value in enumerate(root[0][2]):\n attr = Attribute(value)\n output_list.append(attr)\n return output_list",
"def _parse_attr(self, attr_proto):\n attrs = {}\n for a in attr_proto:\n for f in ['f', 'i', 's']:\n if a.HasField(f):\n attrs[a.name] = getattr(a, f)\n for f in ['floats', 'ints', 'strings']:\n if list(getattr(a, f)):\n assert a.name not in attrs, \"Only one type of attr is allowed\"\n attrs[a.name] = tuple(getattr(a, f))\n for f in ['t', 'g']:\n if a.HasField(f):\n attrs[a.name] = getattr(a, f)\n for f in ['tensors', 'graphs']:\n if list(getattr(a, f)):\n raise NotImplementedError(\"Filed {} is not supported in mxnet.\".format(f))\n if a.name not in attrs:\n raise ValueError(\"Cannot parse attribute: \\n{}\\n.\".format(a))\n return attrs",
"def _get_attr(self, buf):\n fields = [\"s\", \"i\", \"f\", \"b\", \"type\", \"shape\", \"tensor\", \"func\"]\n\n x = buf\n\n ret = []\n\n try:\n from tensorflow.python.framework import dtypes\n except ImportError as e:\n raise ImportError(f\"Unable to import tensorflow which is required {e}\")\n\n # Treat an empty oneof value as an empty list.\n if not x.WhichOneof(\"value\"):\n return ret\n if x.HasField(\"list\"):\n for f in fields:\n if getattr(x.list, f):\n if f == \"type\":\n ret += [dtypes.as_dtype(x) for x in list(getattr(x.list, f))]\n else:\n ret += list(getattr(x.list, f))\n else:\n for f in fields:\n if x.HasField(f):\n if f == \"type\":\n ret = dtypes.as_dtype(getattr(x, f))\n else:\n ret = getattr(x, f)\n return ret",
"def read_attribs(self):\n\n attribs = {}\n while self.index < self.length:\n self.ignore_whitespaces()\n if self.xtext[self.index] == '>':\n break\n name = self.read_until('=')\n self.index += 1\n self.read_until('\"')\n self.index += 1\n value = self.read_until('\"')\n self.index += 1\n\n attribs[name] = value\n\n return attribs",
"def extractAttrs(data):\n\treturn [instance[1:] for instance in data]",
"def _parse_attr(self, attr_proto):\n attrs = {}\n for key, value in attr_proto.items():\n attrs[key] = self._get_attr(value)\n\n return attrs",
"def get_attribs(self, attribs: List[str]) -> Iterable[Tuple[str, str]]:\n binfo = self.build_info\n for attrib in attribs:\n try:\n val = self._get_attrib(attrib, binfo)\n except Exception as e:\n logger.error(f'could not get attribute {attrib}: {e}')\n raise e\n if self.type_strict and not isinstance(val, str):\n raise ValueError(f'wrong value found for attribute: {attrib}')\n if val is not None:\n yield ((attrib, val))\n elif self.exist_strict:\n raise ValueError(f'no such attribute: {attrib}')",
"def parseAttribute(self, attr_str):\r\n parts = []\r\n lastpos = 0\r\n while lastpos < len(attr_str):\r\n newpos = self.nextString(attr_str, lastpos)\r\n s = attr_str[lastpos:newpos-1]\r\n if (s[0] == \"(\" and s[-1] == \")\"): # list, recurse\r\n parts.append(self.parseAttribute(s[1:-1]))\r\n else:\r\n try:\r\n parts.append(float(s)) # number, any kind\r\n except ValueError:\r\n if s[0] == \"'\" and s[-1] == \"'\": # string\r\n parts.append(s[1:-1])\r\n elif s == \"$\":\r\n parts.append(None)\r\n else:\r\n parts.append(s) # ref, enum or other\r\n\r\n lastpos = newpos\r\n \r\n return parts",
"def attr_info_array_to_list(array_pointer, length):\n data_array = driver.ble_gattc_attr_info_array.frompointer(array_pointer)\n data_list = _populate_list(data_array, length)\n return data_list",
"def get_attrs(self):\n req_attrv = self._ptr.contents.attrv\n attrs = {}\n if bool(req_attrv):\n i = 0\n while 1:\n s = bytestostr(req_attrv[i])\n i += 1\n if s == None:\n break\n try:\n k, v = s.split(\"=\", 1)\n attrs[k] = v\n except:\n pass\n return attrs",
"def parseAttrs(file_name):\n\tlines = file(file_name).read().strip().split('\\n')\n\tlines = [x.strip() for x in lines if len(x.strip()) > 0]\n\treturn [parseAttrLine(x) for x in lines]",
"def _unpack_tuple(self, buff):\n cardinality = struct_L.unpack_from(buff)[0]\n _tuple = ['']*cardinality\n offset = 4 # The first 4 bytes in the response body is the <count> we have already read\n for i in xrange(cardinality):\n field_size, offset = self._unpack_int_base128(buff, offset)\n field_data = struct.unpack_from(\"<%ds\" % field_size, buff, offset)[0]\n _tuple[i] = field(field_data)\n offset += field_size\n\n return tuple(_tuple)",
"def _parse_attributes(self, attributes, node):\n for attr in attributes:\n if attr.value.ByteSize() > self.MAX_NODE_ATTRIBUTE_VALUE_BYTES:\n message = f\"The attribute value of node({node.name}) \" \\\n f\"is over {self.MAX_NODE_ATTRIBUTE_VALUE_BYTES} Bytes, will ignore.\"\n logger.warning(message)\n continue\n if attr.name in ('input_is_dynamic_shape', 'output_is_dynamic_shape') and not \\\n node.is_dynamic_shape_node and attr.value.bool_val:\n node.is_dynamic_shape_node = True\n node.add_attr({attr.name: str(attr.value)})",
"def decode_replay_attributes_events(contents):\n buffer = BitPackedBuffer(contents, 'little')\n attributes = {}\n if not buffer.done():\n attributes['source'] = buffer.read_bits(8)\n attributes['mapNamespace'] = buffer.read_bits(32)\n count = buffer.read_bits(32)\n attributes['scopes'] = {}\n while not buffer.done():\n value = {}\n value['namespace'] = buffer.read_bits(32)\n value['attrid'] = attrid = buffer.read_bits(32)\n scope = buffer.read_bits(8)\n value['value'] = buffer.read_aligned_bytes(4)[::-1].strip(b'\\x00')\n if not scope in attributes['scopes']:\n attributes['scopes'][scope] = {}\n if not attrid in attributes['scopes'][scope]:\n attributes['scopes'][scope][attrid] = []\n attributes['scopes'][scope][attrid].append(value)\n return attributes",
"def attributes(self, *args):\n kwargs = {}\n if args:\n kwargs[\"attributenames\"] = args\n\n r = self._token_id_request(urljoin(self._url, Client._attribute_resource), **kwargs)\n\n # parse contennt looking for all attributes\n attributes = []\n for line in r.text.splitlines():\n r = re.match(\"(userdetails\\.attribute\\.name)=(.*)\", line)\n if r:\n name = r.groups()[1]\n attributes.append([name, None])\n continue # next line\n\n r = re.match(\"(userdetails\\.attribute\\.value)=(.*)\", line)\n if r:\n value = r.groups()[1]\n # last name parsed is where it has to\n # be stacked\n if attributes[-1][1] == None:\n attributes[-1][1] = value\n if isinstance(attributes[-1][1], list):\n attributes[-1][1].append(value)\n else:\n # cast to list\n attributes[-1].append([attributes[-1][1], value])\n\n return dict([(item[0], item[1]) for item in attributes])",
"def process_attrs(attrs):\n if attrs.getLength() == 0:\n return {}\n tmp_dict = {}\n for name in attrs.getNames():\n tmp_dict[name] = attrs.getValue(name)\n return tmp_dict",
"def parse_event_attlog(self):\n uid = ''\n ver_type = -1\n date_str = ''\n if self.last_event_code == DEFS.EF_ATTLOG:\n uid = self.last_payload_data[0:9].decode('ascii').\\\n replace('\\x00', '')\n ver_type = struct.unpack('<H', self.last_payload_data[24:26])[0]\n date_str = \"20%i/%i/%i %i:%i:%i\" %\\\n tuple(self.last_payload_data[26:32])\n\n return [uid, ver_type, date_str]",
"def attrs_to_tuple(obj):\n return tuple(getattr(obj, a) for a in attrs)",
"def get_attributes_data(attributes_to_scrape):\n\tattributes_data = []\n\tfor attribute in attributes_to_scrape:\n\t\tdata = get_element_by_attribure(attribute)\n\t\t#time attribute's handling is different than the rest\n\t\tif attribute == 'time': \t\t\t\t\t\t\t\t\n\t\t\tdata = [x.get_attribute('title') for x in data]\n\t\telse:\n\t\t\tdata = [x.text for x in data]\n\t\t\t#tags attribute's handling is different than the rest\n\t\t\tif attribute == 'tags':\t\t\t\t\t\t\t\t\n\t\t\t\tdata = check_tags(data)\n\t\t\t#description attribute's handling is different than the rest\n\t\t\telif attribute == 'description':\n\t\t\t\tdata = check_description(data, attributes_data[0])\n\t\tattributes_data.append(data)\n\treturn attributes_data",
"def from_buffer(data):\n opcode = ustruct.unpack(ATT.struct_format, data[:ATT.struct_size])[0]\n\n # att = uctypes.struct(\n # uctypes.addressof(data[:ATT.struct_size]),\n # ATT_STRUCT,\n # uctypes.LITTLE_ENDIAN\n # )\n\n data = data[ATT.struct_size:]\n return ATT(opcode, data)",
"def _get_active_attributes(self):\n\n count = gl.glGetProgramiv(self.handle, gl.GL_ACTIVE_ATTRIBUTES)\n attributes = []\n\n # This match a name of the form \"name[size]\" (= array)\n regex = re.compile(\"\"\"(?P<name>\\w+)\\s*(\\[(?P<size>\\d+)\\])\"\"\")\n\n for i in range(count):\n name, size, gtype = gl.glGetActiveAttrib(self.handle, i)\n\n # This checks if the attribute is an array\n # Name will be something like xxx[0] instead of xxx\n m = regex.match(name)\n # When attribute is an array, size corresponds to the highest used index\n if m:\n name = m.group('name')\n if size >= 1:\n for i in range(size):\n name = '%s[%d]' % (m.group('name'),i)\n attributes.append((name, gtype))\n else:\n attributes.append((name, gtype))\n return attributes",
"def getDataAttributes(self):\n asRet = [];\n asAttrs = dir(self);\n for sAttr in asAttrs:\n if sAttr[0] == '_' or sAttr[0] == 'k':\n continue;\n if sAttr in self.kasInternalAttributes:\n continue;\n oValue = getattr(self, sAttr);\n if callable(oValue):\n continue;\n asRet.append(sAttr);\n return asRet;",
"def _parse_attributes(self, attributes):\n\n var_value_pairs = attributes.split()\n\n self.logger.debug('attributes:{} pairs:{}'.format(attributes, var_value_pairs))\n\n for var_value_pair in var_value_pairs:\n (var, separator, value) = var_value_pair.partition('=')\n value = value.replace('\"', '')\n self._current_element.add_attribute(var, value)",
"def _attrlist(self,obj, attrs):\n vlist = [obj.__getattribute__(attr) for attr in attrs]\n return vlist",
"def parse_attributes(cls):\n cls._fields = []\n cls._tables = []\n for attr_k in dir(cls):\n try:\n attr = object.__getattribute__(cls, attr_k)\n except AttributeError:\n continue\n if issubclass(attr.__class__, ReferenceManyField):\n cls._tables.append(attr_k)\n elif issubclass(attr.__class__, Field):\n cls._fields.append(attr_k)",
"def get_attrs(str):\n return _scanner.scan(str)[0]",
"def _iterattrs(self, handle=\"\"):\n if not handle:\n handle = self.handle\n attr = gv.firstattr(handle)\n while gv.ok(attr):\n yield gv.nameof(attr), decode_page(gv.getv(handle, attr))\n attr = gv.nextattr(handle, attr)",
"def thrift_attrs(obj_or_cls) -> List[str]:\n return [v[1] for v in obj_or_cls.thrift_spec.values()]",
"def parseTag(self) :\n pos = self.position\n tagtype = self.tags[ord(self._data[pos])]\n if tagtype == 'end-of-attributes-tag':\n return 0\n pos += 1\n posend = pos2 = pos + 2\n namelength = unpack(\">H\", self._data[pos:pos2])[0]\n if not namelength :\n name = self._curname\n else :\n posend += namelength\n self._curname = name = self._data[pos2:posend]\n pos2 = posend + 2\n valuelength = unpack(\">H\", self._data[posend:pos2])[0]\n posend = pos2 + valuelength\n value = self._data[pos2:posend]\n if tagtype in (\"integer\", \"enum\") :\n value = unpack(\">I\", value)[0]\n elif tagtype == \"boolean\" :\n value = ord(value)\n try :\n (oldname, oldval) = self._curattributes[-1][-1]\n if oldname == name :\n oldval.append((tagtype, value))\n else :\n raise IndexError\n except IndexError :\n self._curattributes[-1].append((name, [(tagtype, value)]))\n self.logDebug(\"%s(%s) : %s\" % (name, tagtype, value))\n return posend - self.position",
"def parseAttrs(self,attrs,date_type):\n\tattrs=copy.copy(attrs) #make sure we don't change user/group attributes\n \tattr_holders=self.getAttrHolders(attrs)\n\tmap(lambda x:x.setDateType(date_type),attr_holders)\n\tmap(lambda x:attrs.update(x.getParsedDic()),attr_holders)\n\treturn attrs"
] | [
"0.6657035",
"0.6440315",
"0.64095205",
"0.61371845",
"0.6111216",
"0.6109647",
"0.60146755",
"0.59958047",
"0.59727055",
"0.5928471",
"0.590856",
"0.5885479",
"0.5834907",
"0.5802745",
"0.57771",
"0.57714987",
"0.5754614",
"0.57352376",
"0.57304335",
"0.5678931",
"0.5669214",
"0.566849",
"0.5610122",
"0.5608643",
"0.55973095",
"0.5594626",
"0.5594198",
"0.55436814",
"0.55363935",
"0.5520676"
] | 0.7819394 | 0 |
Returns the current userdefined configuration from the database | def get_user_config():
config = models.Config.query.get(0)
if config is None:
config = models.Config()
config.id = 0
config.save()
return config | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def user_config(self):\n\n return self.__user_config",
"def get_config():\n CONFIG.clear() #clear config\n sql=\"SELECT * FROM config\"\n conn=sqlite3.connect(CONNECTION_STRING)\n c=conn.cursor()\n c.execute(sql)\n results=c.fetchall()\n # iterate through the results now...\n for r in results:\n CONFIG[r[1]]=r[2]\n conn.commit()\n conn.close()",
"def config_db():",
"def get_settings():\n return db.get_data()",
"def default_db_config():\n return read_json_file(db_config_file)",
"def get_config_db():\n\n datab = {'db_name': 'database_name',\n 'db_url': 'database_url'}\n\n return datab",
"def config(self):\n return self._cfg",
"def config_data(self):\n if self._key not in self._user_keys():\n raise CouldNotFindUserException(self._key, self._user_keys())\n return next(data for data in self.CONFIG_DATA if data.key == self._key)",
"def GetConfigInformation(self):\n if self.cur_uid is None:\n return",
"def config(self):\r\n return self._config",
"def database():\n return conf().database",
"def get_db_params(self):\n return self.get_section_config('db')",
"def get_config(self):\n return self.config",
"def config(self):\n return self._config",
"def read_db():\n # read config file\n config = configparser.ConfigParser()\n config.read_file(open(\"options.cfg\"))\n\n return config['DEFAULT']['DatabaseFilename']",
"def configuration(self):\n return self._config",
"def config(self):\n return CurrentProject().config.config[self.key]",
"def _get_config_data(self, cr, uid):\n\n model_conf = self.pool.get('customer.support.settings')\n args = [('selected', '=', True)] \n ids = model_conf.search(cr, uid, args)\n config = model_conf.browse(cr, uid, ids[0])\n\n return {\n 'tor_api_key': config.tor_api_key,\n 'tor_domain': config.tor_domain,\n 'company': config.company\n }",
"def get_details(self):\n return self.__config_data",
"def get(self) -> dict:\n return Config.get()",
"def config(self):\n return self.__config",
"def config(self):\n return self.__config",
"def get_configuration(self) -> dict:\n return self.config",
"def db_config(self) -> \"DBConfigType\":\n if self._db_config is None:\n raise ConfigurationError(\n \"DB configuration not initialised. Make sure to call \"\n \"Tortoise.init with a valid configuration before attempting \"\n \"to create connections.\"\n )\n return self._db_config",
"def get_config(self):\n return {\"name\": self.name, \"tunable\": self.tunable}",
"def get_full_config(self):\n return self._read_config()",
"def getSettings(self):\n return self.cfg",
"def config(self):\n return self._config",
"def config(self):\n return self._config",
"def config(self):\n return self._config"
] | [
"0.7134669",
"0.69565564",
"0.69028026",
"0.6877286",
"0.6810725",
"0.6754715",
"0.6690209",
"0.6661265",
"0.66424286",
"0.6558801",
"0.6545969",
"0.65415466",
"0.653074",
"0.65302026",
"0.649621",
"0.64914185",
"0.64679104",
"0.64291793",
"0.6418399",
"0.64072263",
"0.63998896",
"0.63998896",
"0.63728976",
"0.6372451",
"0.63581514",
"0.6347036",
"0.6338448",
"0.6322001",
"0.6322001",
"0.6322001"
] | 0.73315537 | 0 |
Get an i18ned message from the appropriate json file for the given key. | def get_json_message(message_key):
file_path = (os.getcwd() + '/ufo/static/locales/' +
flask.session['language_prefix'] + '/messages.json')
try:
with open(file_path) as json_file:
messages = json.load(json_file)
return messages[message_key]
except:
return message_key | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_localized_string(key):\n return _localized_strings[key]",
"def get(self, key, domain=None, language=None, context=None):\n\n if domain is None:\n if self.default_domain is None:\n raise ValueError('No domain given!')\n domain = self.default_domain\n messages = self.get_domain(domain, language)\n\n if not key in messages and self.update_on_missing:\n messages = self.get_domain(domain, language, force_download=True)\n\n if not key in messages:\n raise ValueError('No message for the key {0}!'.format(key))\n\n message = messages[key]\n \n if context is not None:\n for i in range(0, len(context)):\n placeholder = \"${0}\".format(i + 1)\n message = message.replace(placeholder, unicode(context[i]))\n \n return message",
"def load_message(message_id):\n pathname = \"messages/{}.json\".format(message_id)\n return _load_message(pathname)",
"def _load_message(message_filename):\n with open(message_filename) as raw_file:\n msg_data = json.load(raw_file)\n msg = {} # Because this homework makes me salty\n\n # Using os, we split the filename from its path and extension.\n msg[\"id\"] = os.path.splitext(os.path.basename(message_filename))[0]\n\n # Using datetime, we convert the str to a datetime object\n msg[\"time\"] = datetime.strptime(msg_data[\"time\"], DATE_FORMAT)\n\n # Filling in the rest of msg keys\n for k in (\"to\", \"from\", \"subject\", \"body\"):\n msg[k] = msg_data[k]\n return msg",
"def gettext(self, message):\n if self._translations.has_key(message):\n return self._translations[message]\n return super(Translations, self).gettext(message)",
"def get(self, msgid):\r\n return self.trans.get(msgid, str(msgid))",
"def get_messages(file: str, section: str = \"\") -> dict:\n if type(file) is not str:\n raise ValueError(\"file must be a string value\")\n if type(section) is not str:\n raise ValueError(\"section must be a string value\")\n else:\n return json.load(\n open(\"messages/{section}{file}.json\".format(section=section + \"/\", file=file), encoding=\"utf-8\"))",
"def _read_translation_file(language_code: str, filename: str):\n twlight_home = settings.TWLIGHT_HOME\n filepath = \"{twlight_home}/locale/{language_code}/{filename}.json\".format(\n twlight_home=twlight_home, language_code=language_code, filename=filename\n )\n if os.path.isfile(filepath):\n with open(filepath, \"r\") as translation_file:\n translation_dict = json.load(translation_file)\n\n # Remove the \"@metadata\" key from the dictionary\n if \"@metadata\" in translation_dict:\n translation_dict.pop(\"@metadata\")\n return translation_dict\n else:\n return {}",
"def localize(self, msg):\n return self.translations.get(msg, msg)",
"def localize(self, msg):\n return self.translations.get(msg, msg)",
"def _translation(basename, props_dir, languages, key_language=None):\n props_dir = os.path.abspath(props_dir)\n if os.path.isfile(props_dir):\n props_dir = os.path.dirname(props_dir)\n trans = None\n use_key_as_lang = False\n for lang in languages:\n while True:\n trans = _try_file \\\n (props_dir, basename + \"_\" + lang + \".properties\", lang, trans)\n # Use identity mapping instead (or in addition to) file?\n if lang == key_language:\n use_key_as_lang = True\n # We need no more fallbacks after identity mapping\n break;\n lang_up = lang.rsplit(\"_\", 1)[0]\n if lang_up == lang:\n break\n lang = lang_up\n # Finally try properties file without language specification\n trans = _try_file(props_dir, basename + \".properties\", None, trans)\n if trans:\n trans._add_fallback_unchecked(BaseTranslations()) # last resort\n else:\n if use_key_as_lang:\n trans = BaseTranslations(key_language)\n else:\n trans = BaseTranslations()\n return trans",
"def get_key_from_file():\n json_data = request.get_json()\n \n is_reference = json_data['is_reference']\n filename = json_data['filename']\n key_name = json_data['key_name']\n\n \n settings.setOptionsFile(get_info('uid'))\n f = ROOT.TFile(filename)\n\n d = eval(cppyy.gbl.getDictionary(f,key_name))\n \n f.Close()\n return jsonify(d)",
"def _translate(self, key):\n return self.TRANSLATE.get(key, key)",
"def json_get(name, key):\n cmpd_file = name + '.json'\n data_dirs = [dir for dir in os.listdir('.') if dir.endswith('_data')]\n dir = data_dirs[0]\n for fname in os.listdir(dir):\n if fname.endswith(name + '.json'):\n with open(os.path.join(dir,fname)) as f:\n data = json.load(f)\n return data[key]",
"def load(self, key: str) -> str:\n pass",
"def interpolate_insted_of_translate(\n self, msgid, mapping=None, *args, **kw): # pragma: no cover webdriver\n return zope.i18n.interpolate(msgid, mapping)",
"def get_translation(self, command, keyword=None, item=None):\n key = item if item is not None else \\\n keyword if keyword is not None else command\n cata = self.get_catalog(command)\n if cata is not None:\n dtr = cata.definition.get(\"translation\", {})\n trans = dtr.get(key)\n if trans is not None:\n return unicode(trans, 'utf-8')\n return GLOBAL_DICT.get(key, key)",
"def get_translation_dict_from_file(path, lang, app):\n\tjson_content = {}\n\tif os.path.exists(path):\n\t\twith open(path, 'r') as f:\n\t\t\tjson_content = json.loads(f.read())\n\n\treturn json_content",
"def load_user_msg():\n FILE = \"./usr_msg.json\"\n with open(FILE, 'r') as file:\n f = json.load(file)\n return f",
"def get_key(self, language_name, key):\n self.language_name_global = language_name\n self.key_global = key\n package_path = f\"language_manager/packages/{language_name}.langpkg\"\n if not self.__copy_check_same__(package_path):\n self.__copy__(package_path)\n\n # Import the temp Python file coming out of the package\n import temp\n self.__log__(f\"Getting key \\\"{key}\\\"\\n\")\n # Take the key (type: str)\n return_val = temp.LANGUAGE[\"contents\"][key]\n self.__log__(\"Done.\")\n # Delete the language package from the RAM to free up space\n del temp\n self.__log__(\"\\n--- Finished function call ---\\n\\n\")\n # Return the value of the key\n return return_val",
"def ugettext(self, message):\n if isinstance(message, unicode):\n msg = message.encode(\"utf-8\")\n else:\n msg = message\n if self._translations.has_key(msg):\n return unicode(self._translations[msg], \"utf-8\")\n return super(Translations, self).ugettext(message)",
"def _extract_18n_messages():\n BabelCLI().run(['', 'extract', '-F', 'babel.cfg', '-k', '_t', '--no-location', '--sort-output',\n '--omit-header', '-o', os.path.join(I18N_PATH, 'messages.pot'), 'aliquis'])",
"def get_api_key(filename: str) -> str:\n file_contents = load_json(filename)\n return file_contents[\"key\"]",
"def i18n(self) -> 'outputs.I18nConfigResponse':\n return pulumi.get(self, \"i18n\")",
"def message_for_key(self, key, context):\n raise NotImplementedError('message_for_key() should have been replaced by a metaclass')",
"def get_word(key: str, language: str):\n if key not in word_keys:\n return \"LOCALIZATION KEY {} NOT FOUND FOR LANGUAGE {}\".format(key, language)\n words = word_keys[key]\n\n # If the word doesn't exist, just show word in English\n if language not in words or words[language] == \"\":\n return words[EN]\n else:\n return words[language]",
"def get_message(request):\r\n message_key = request.GET.get('message', None)\r\n message = None\r\n message_type = None\r\n\r\n if ((not message_key or message_key == 'upload_success') and\r\n QIFParser.get_status() == 'in_progress'):\r\n message_key = 'in_progress_quicken_file'\r\n\r\n try:\r\n message = Message.MESSAGES[message_key]\r\n message_type = message['type']\r\n message = message['message']\r\n except KeyError:\r\n pass\r\n\r\n return {'message': message,\r\n 'message_key': message_key,\r\n 'message_type': message_type}",
"def get_msgid():\n fp = open(config.msgidfile,\"rb\")\n msgid = pickle.load(fp)\n return msgid",
"def read_json(self, inputfile):\n transtransfile = json.load(inputfile)\n self.language = transfile['lang']\n self.translations = transfile['strings']",
"def get_lang(context, field):\n lang = json.load(open(\"json/lang.json\", \"r\"))\n conf = json.load(open(\"json/serverconfig.json\", \"r\"))\n return lang[conf[str(context)][\"lang\"]][field]"
] | [
"0.67976195",
"0.6331842",
"0.6149216",
"0.61009747",
"0.598111",
"0.5842774",
"0.5832391",
"0.5799556",
"0.5783316",
"0.5783316",
"0.56813073",
"0.56516814",
"0.56434065",
"0.5634109",
"0.56184775",
"0.56084794",
"0.5544183",
"0.5531985",
"0.5520745",
"0.55069876",
"0.5451151",
"0.54314506",
"0.54123896",
"0.54063886",
"0.5399518",
"0.53940445",
"0.5371051",
"0.5360479",
"0.5305448",
"0.5262159"
] | 0.8176998 | 0 |
Make the resources for the oauth configuration component. | def make_oauth_configration_resources_dict():
config = get_user_config()
return {
'config': config.to_dict(),
'oauth_url': oauth.getOauthFlow().step1_get_authorize_url(),
} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def rest_api_config(self):\n with self.resource_lock:\n pass",
"def _get_resources():\n return {\n 'searchPageUrl': flask.url_for('search_page'),\n 'searchJsonUrl': flask.url_for('search'),\n 'userAddIconUrl': flask.url_for('static', filename='img/add-users.svg'),\n 'logoutUrl': flask.url_for('logout'),\n 'settingsUrl': flask.url_for('setup') + '#settingsDisplayTemplate',\n 'listAdminUrl': flask.url_for('admin_list'),\n 'addAdminUrl': flask.url_for('add_admin'),\n 'changeAdminPasswordUrl': flask.url_for('change_admin_password'),\n 'removeAdminUrl': flask.url_for('delete_admin'),\n 'loginUrl': flask.url_for('login'),\n 'recaptchaKey': ufo.app.config.get('RECAPTCHA_SITE_KEY', ''),\n 'setupUrl': flask.url_for('setup'),\n 'setupAdminUrl': flask.url_for('setup_admin'),\n 'setupOauthUrl': flask.url_for('setup_oauth'),\n 'download_chrome_policy': flask.url_for('download_chrome_policy'),\n 'policy_filename': 'chrome_policy.json',\n 'proxyServerAddUrl': flask.url_for('proxyserver_add'),\n 'proxyServerAddIconUrl': flask.url_for('static',\n filename='img/add-servers.svg'),\n 'proxyServerInverseAddIconUrl': flask.url_for(\n 'static', filename='img/add-servers-inverse.svg'),\n 'proxyServerListId': 'proxyList',\n 'proxyServerListUrl': flask.url_for('proxyserver_list'),\n 'listLimit': 10,\n 'proxyServerDetailsButtonId': 'serverDetailsButton',\n 'editButtonId': 'serverEditButton',\n 'proxyServerDetailsOverlayId': 'serverDetailsOverlay',\n 'proxyServerEditUrl': flask.url_for('proxyserver_edit'),\n 'proxyServerDeleteUrl': flask.url_for('proxyserver_delete'),\n 'proxyServerIconUrl': flask.url_for('static', filename='img/server.svg'),\n 'proxyServerAddButtonId': 'addServerButton',\n 'proxyServerModalId': 'serverModal',\n 'textAreaMaxRows': 10,\n 'ipInput': 'ipInput',\n 'nameInput': 'nameInput',\n 'sshPrivateKeyInput': 'sshPrivateKeyInput',\n 'hostPublicKeyInput': 'hostPublicKeyInput',\n 'getSettingsUrl': flask.url_for('get_settings'),\n 'settingsEditUrl': flask.url_for('edit_settings'),\n 'userAddUrl': flask.url_for('add_user'),\n 'userInverseAddIconUrl': flask.url_for(\n 'static', filename='img/add-users-inverse.svg'),\n 'userListId': 'userList',\n 'userListUrl': flask.url_for('user_list'),\n 'revokeToggleUrl': flask.url_for('user_toggle_revoked'),\n 'rotateKeysUrl': flask.url_for('user_get_new_key_pair'),\n 'inviteCodeUrl': flask.url_for('user_get_invite_code'),\n 'userDeleteUrl': flask.url_for('delete_user'),\n 'userDetailsButtonId': 'userDetailsButton',\n 'userDetailsOverlayId': 'userDetailsOverlay',\n 'userIconUrl': flask.url_for('static', filename='img/user.svg'),\n 'userAddButtonId': 'addUserButton',\n 'userModalId': 'userModal',\n 'groupAddTabId': 'groupAddTab',\n 'groupAddFormId': 'groupAdd',\n 'groupAddInputName': 'group_key',\n 'userAddTabId': 'userAddTab',\n 'userAddFormId': 'userAdd',\n 'userAddInputName': 'user_key',\n 'domainAddTabId': 'domainAddTab',\n 'domainAddFormId': 'domainAdd',\n 'manualAddTabId': 'manualAddTab',\n 'manualAddFormId': 'manualAdd',\n 'regexes': regex.REGEXES_AND_ERRORS_DICTIONARY,\n 'jsonPrefix': ufo.XSSI_PREFIX,\n 'maxFailedLoginsBeforeRecaptcha': ufo.MAX_FAILED_LOGINS_BEFORE_RECAPTCHA,\n 'userAddListFlipperId': 'userAddListFlipper',\n 'proxyServerAddListFlipperId': 'proxyServerAddListFlipper',\n 'userAddTabsId': 'userAddTabs',\n 'proxyServerAddFormId': 'serverAddFormHolder',\n }",
"def resources(self):",
"def generate_config(context):\n\n properties = context.properties\n\n base_resource = get_type(context)\n\n resources = []\n\n if 'dependsOn' in properties:\n dependson = {'metadata': {'dependsOn': properties['dependsOn']}}\n dependson_root = properties['dependsOn']\n else:\n dependson = {}\n dependson_root = []\n\n for role in properties['roles']:\n for member in role['members']:\n suffix = sha1(\n '{}-{}'.format(role['role'], member).encode('utf-8')).hexdigest()[:10]\n policy_get_name = '{}-{}'.format(context.env['name'], suffix)\n\n resource_name = '{}-{}'.format(policy_get_name,\n base_resource['postfix'])\n iam_resource = {\n 'name': resource_name,\n # TODO - Virtual type documentation needed\n 'type': base_resource['dm_type'],\n 'properties': {\n base_resource['dm_resource_property']: base_resource['id'],\n 'role': role['role'],\n 'member': member,\n }\n }\n iam_resource.update(dependson)\n resources.append(iam_resource)\n\n dependson = {'metadata': {'dependsOn': [\n resource_name] + dependson_root}}\n\n return {\"resources\": resources}",
"def oauth_config(url_base):\n return {\n \"credentials\": {\n \"auth_type\": \"oauth2_confidential_application\",\n \"client_secret\": \"test_client_secret\",\n \"client_id\": \"test_client_id\",\n \"audience\": f\"{url_base}/api/v2\",\n },\n \"base_url\": url_base,\n }",
"def generate_config(context):\n resources = []\n\n # Create an initial 'STARTED' pubsub notification.\n if 'pubsubTopic' in context.properties:\n resources.extend(\n create_pubsub_notification(\n context,\n depends_on=[],\n status_string='STARTED',\n ))\n\n # Required properties.\n billing_account_id = context.properties['billingAccountId']\n parent_organization = context.properties['parentOrganization']\n project_id = context.properties['projectId']\n\n # Optional properties, with defaults.\n high_security_network = context.properties.get('highSecurityNetwork', False)\n private_ip_google_access = context.properties.get('privateIpGoogleAccess', False)\n storage_bucket_lifecycle = context.properties.get('storageBucketLifecycle', 180)\n billing_account_friendly_name = context.properties.get('billingAccountFriendlyName', billing_account_id)\n # Use a project name if given, otherwise it's safe to fallback to use the\n # project ID as the name.\n project_name = context.properties.get('projectName', project_id)\n labels_obj = context.properties.get('labels', {})\n\n # Save this template's version number and all parameters inputs to the project metadata to keep track of what\n # operations were performed on a project.\n labels_obj.update({\n \"firecloud-project-template-version\" : str(FIRECLOUD_PROJECT_TEMPLATE_VERSION_ID)\n })\n\n for k, v in context.properties.items():\n label_k, label_v = satisfy_label_requirements('param--' + str(k), v)\n labels_obj.update({\n label_k: label_v\n })\n\n\n if high_security_network:\n labels_obj.update({\n \"vpc-network-name\" : FIRECLOUD_VPC_NETWORK_NAME,\n \"vpc-subnetwork-name\" : FIRECLOUD_VPC_SUBNETWORK_NAME\n })\n\n if 'parentFolder' in context.properties:\n parent_obj = {\n 'id': context.properties['parentFolder'],\n 'type': 'folder',\n }\n else:\n parent_obj = {\n 'id': context.properties['parentOrganization'],\n 'type': 'organization',\n }\n\n # Create the main project resource.\n resources.append({\n 'type': 'templates/project.py',\n 'name': 'fc-project',\n 'properties': {\n 'activateApis': FIRECLOUD_REQUIRED_APIS,\n 'billingAccountId': billing_account_id,\n 'billingAccountFriendlyName': billing_account_friendly_name,\n 'iamPolicies': create_iam_policies(context),\n 'labels': labels_obj,\n 'name': project_name,\n # The project parent. For FireCloud, this should refer to the\n # firecloud.org (or equivalent) GCP organization ID.\n 'parent': parent_obj,\n 'projectId': project_id,\n # If true, this would remove the default compute egine service\n # account. FireCloud doesn't use this SA, but we're leaving this set\n # to False to avoid changing any legacy behavior, at least initially.\n 'removeDefaultSA': False,\n # Removes the default VPC network for projects requiring stringent\n # network security configurations.\n 'removeDefaultVPC': high_security_network,\n 'createUsageExportBucket': False,\n # Always set up the storage logs and cromwell auth buckets for Firecloud\n 'storageLogsBucket': True,\n 'storageBucketLifecycle': storage_bucket_lifecycle,\n 'cromwellAuthBucket': True\n }\n })\n\n if high_security_network:\n resources.extend(create_high_security_network(context))\n resources.extend(create_firewall(context))\n if private_ip_google_access:\n resources.extend(create_private_google_access_dns_zone(context))\n else:\n resources.extend(create_default_network(context))\n\n if 'pubsubTopic' in context.properties:\n resources.extend(\n create_pubsub_notification(\n context,\n # This is somewhat hacky, but we can't simply collect the name of each\n # collected resource since template call nodes aren't \"real\" resources\n # that can be part of a dependsOn stanza. So instead, we collect the\n # names of all resources that are output by the network (which itself\n # depends on the project). It doesn't seem to be possible to concatenate\n # dependsOn arrays within the reference syntax, otherwise we could make\n # this depend explicitly on all resources from the template nodes.\n depends_on='$(ref.fc-network.resourceNames)',\n status_string='COMPLETED'))\n\n return {'resources': resources}",
"def configure(app):\n api.add_resource(Event, '/event/')\n api.add_resource(EventItem, '/event/<event_id>')\n app.register_blueprint(bp_restapi)",
"def async_request_configuration(hass, config, oauth):\n if len(_CONFIGURING) > 0:\n return\n configurator = hass.components.configurator\n global OAUTH_CLIENT_ID\n OAUTH_CLIENT_ID = oauth.client_id\n\n async def async_configuration_callback(data):\n \"\"\"Handle configuration changes.\"\"\"\n _LOGGER.info('Spotify async_configuration_callback')\n\n def success():\n \"\"\"Signal successful setup.\"\"\"\n req_config = _CONFIGURING.pop(OAUTH_CLIENT_ID)\n configurator.request_done(req_config)\n\n hass.async_add_job(success)\n async_setup_spotify(hass, config, configurator)\n\n _CONFIGURING[OAUTH_CLIENT_ID] = configurator.async_request_config(\n DEFAULT_NAME,\n async_configuration_callback,\n link_name=CONFIGURATOR_LINK_NAME,\n link_url=oauth.get_authorize_url(),\n description=CONFIGURATOR_DESCRIPTION,\n submit_caption=CONFIGURATOR_SUBMIT_CAPTION\n )\n setUrl(oauth.get_authorize_url())",
"def get_api_resources(self, **kwargs):\n\n all_params = []\n all_params.append('callback')\n\n params = locals()\n for key, val in iteritems(params['kwargs']):\n if key not in all_params:\n raise TypeError(\n \"Got an unexpected keyword argument '%s'\"\n \" to method get_api_resources\" % key\n )\n params[key] = val\n del params['kwargs']\n\n\n resource_path = '/oapi/v1'.replace('{format}', 'json')\n path_params = {}\n\n query_params = {}\n\n header_params = {}\n\n form_params = []\n local_var_files = {}\n\n body_params = None\n\n # HTTP header `Accept`\n header_params['Accept'] = self.api_client.\\\n select_header_accept(['application/json', 'application/yaml'])\n if not header_params['Accept']:\n del header_params['Accept']\n\n # HTTP header `Content-Type`\n header_params['Content-Type'] = self.api_client.\\\n select_header_content_type(['application/json', 'application/yaml'])\n\n # Authentication setting\n auth_settings = []\n\n response = self.api_client.call_api(resource_path, 'GET',\n path_params,\n query_params,\n header_params,\n body=body_params,\n post_params=form_params,\n files=local_var_files,\n response_type=None,\n auth_settings=auth_settings,\n callback=params.get('callback'))\n return response",
"def build_configs():",
"def configure(self):\n inject(self.urls, self.names_for(\"urls\"))\n inject(self.models, self.names_for(\"models\"))\n self.load_admin()",
"def urls(self):\n \n from django.conf.urls.defaults import url, include\n from tastypie.utils.urls import trailing_slash\n from client.views import checkin, checkout, login, logout, register, create_anonymous, delete_anonymous\n \n pattern_list = [\n url(r\"^(?P<api_name>%s)%s$\" % (self.api_name, trailing_slash()), self.wrap_view('top_level'), name=\"api_%s_top_level\" % self.api_name),\n ]\n\n for name in sorted(self._registry.keys()):\n self._registry[name].api_name = self.api_name\n pattern_list.append((r\"^(?P<api_name>%s)/resources/\" % self.api_name, include(self._registry[name].urls)))\n\n ## then add the actions\n pattern_list.extend([\n url(r\"^%s/actions/create_anonymous/$\" % self.api_name, create_anonymous, name=\"create_anonymous\"),\n url(r\"^%s/actions/delete_anonymous/$\" % self.api_name, delete_anonymous, name=\"delete_anonymous\"),\n url(r\"^%s/actions/register/$\" % self.api_name, register, name=\"register\"),\n url(r\"^%s/actions/login/$\" % self.api_name, login, name=\"login\"),\n url(r\"^%s/actions/logout/$\" % self.api_name, logout, name=\"logout\"),\n url(r\"^%s/actions/checkin/$\" % self.api_name, checkin, name=\"checkin\"),\n url(r\"^%s/actions/checkout/$\" % self.api_name, checkout, name=\"checkout\")\n ])\n\n urlpatterns = self.prepend_urls()\n \n urlpatterns += patterns('',\n *pattern_list\n )\n return urlpatterns",
"def _load_resources(self):\n puts = (getattr(self, 'project', None) or self).puts\n for resource_type, resource_cls in six.iteritems(AVAILABLE_RESOURCES):\n for name in self.settings.get(resource_type, {}):\n extra = {\n 'project': getattr(self, 'project', None) or self,\n 'app': self if hasattr(self, 'project') else None,\n }\n\n with indent(4 if hasattr(self, 'project') else 2):\n puts(colored.green(u\"✓ {}:{}\".format(resource_type, name)))\n\n self._resources[resource_type].append(\n resource_cls.factory(\n name=name,\n settings=self.settings.get(resource_type, {})[name],\n **extra\n )\n )",
"def config(self, request):\n config = OtterConfig(self.store, self.tenant_id, self.group_id,\n self.dispatcher)\n return config.app.resource()",
"def configure(self):\n configurations = config.Configurations()\n self.credentials = configurations.credentials\n self.config = configurations.config",
"def generate_config(context):\n\n project_id = context.env['project']\n owners_group = context.properties['owners_group']\n auditors_group = context.properties['auditors_group']\n resources = []\n\n # The GCS bucket to hold logs.\n logs_bucket = context.properties.get('logs_gcs_bucket')\n if logs_bucket:\n resources.append({\n 'name': logs_bucket['name'],\n 'type': 'storage.v1.bucket',\n 'properties': {\n 'location': logs_bucket['location'],\n 'storageClass': logs_bucket['storage_class'],\n 'lifecycle': {\n 'rule': [{\n 'action': {\n 'type': 'Delete'\n },\n 'condition': {\n 'age': logs_bucket['ttl_days'],\n 'isLive': True,\n },\n }],\n },\n },\n 'accessControl': {\n 'gcpIamPolicy': {\n 'bindings': [\n {\n 'role': 'roles/storage.admin',\n 'members': ['group:' + owners_group,],\n },\n {\n 'role': 'roles/storage.objectCreator',\n 'members': ['group:[email protected]'],\n },\n {\n 'role': 'roles/storage.objectViewer',\n 'members': ['group:' + auditors_group,],\n },\n ],\n },\n },\n })\n\n # BigQuery dataset to hold audit logs.\n logs_dataset = context.properties.get('logs_bigquery_dataset')\n if logs_dataset:\n dataset_id = logs_dataset['name']\n resources.append({\n 'name': dataset_id,\n 'type': 'bigquery.v2.dataset',\n 'properties': {\n 'datasetReference': {\n 'datasetId': dataset_id,\n },\n 'location': logs_dataset['location'],\n },\n })\n\n # Update permissions for the dataset. This also removes the deployment\n # manager service account's access.\n resources.append({\n 'name': 'update-' + dataset_id,\n 'action': 'gcp-types/bigquery-v2:bigquery.datasets.patch',\n 'properties': {\n 'projectId':\n project_id,\n 'datasetId':\n dataset_id,\n 'access': [\n {\n 'role': 'OWNER',\n 'groupByEmail': owners_group,\n },\n {\n 'role': 'READER',\n 'groupByEmail': auditors_group,\n },\n {\n 'role': 'WRITER',\n 'userByEmail': logs_dataset['log_sink_service_account'],\n },\n ],\n },\n 'metadata': {\n 'dependsOn': [dataset_id],\n },\n })\n\n return {'resources': resources}",
"def _load_api(self):\n self.app.add_url_rule('/scheduler', 'get_scheduler_info', self._apply_auth(api.get_scheduler_info))\n self.app.add_url_rule('/scheduler/jobs', 'add_job', self._apply_auth(api.add_job), methods=['POST'])\n self.app.add_url_rule('/scheduler/jobs', 'get_jobs', self._apply_auth(api.get_jobs))\n self.app.add_url_rule('/scheduler/jobs/reload_jobs', 'reload_jobs', self._apply_auth(api.reload_jobs), methods=['POST'])\n self.app.add_url_rule('/scheduler/jobs/<job_id>', 'get_job', self._apply_auth(api.get_job))\n self.app.add_url_rule('/scheduler/jobs/<job_id>', 'delete_job', self._apply_auth(api.delete_job), methods=['DELETE'])\n self.app.add_url_rule('/scheduler/jobs/<job_id>', 'update_job', self._apply_auth(api.update_job), methods=['PATCH'])\n self.app.add_url_rule('/scheduler/jobs/<id>/reschedule', 'reschedule_job', self._apply_auth(api.reschedule_job), methods=['PATCH'])\n self.app.add_url_rule('/scheduler/jobs/<id>/reschedule_once', 'reschedule_job_once', self._apply_auth(api.reschedule_job_once), methods=['PATCH'])\n self.app.add_url_rule('/scheduler/jobs/<job_id>/pause', 'pause_job', self._apply_auth(api.pause_job), methods=['POST'])\n self.app.add_url_rule('/scheduler/jobs/<job_id>/resume', 'resume_job', self._apply_auth(api.resume_job), methods=['POST'])\n self.app.add_url_rule('/scheduler/jobs/<job_id>/run', 'run_job', self._apply_auth(api.run_job), methods=['POST'])",
"def initialize_routes(app):\n # Authentification \n app.add_resource(auth.LoginApi, '/auth/login')\n app.add_resource(auth.SignupApi, '/auth/SignupApi')\n # Intialisation et activation d'un parking\n app.add_resource(parkingInit.InitilizeAParking, '/administrate/add')\n app.add_resource(parkingInit.ActivateParking, '/administrate/activate')\n app.add_resource(parkingInit.InitilizeAParking, '/administrate/getall', endpoint='getall')\n # Gestion de Clients\n app.add_resource(useresResources.GestionUstilisateurs, '/administrate/usesrs/get')\n app.add_resource(useresResources.GestionUstilisateurs, '/administrate/usesrs/getById/<int:idUser>', endpoint='get_by_id')\n # statistiques financéres\n app.add_resource(stats.Money, '/administrate/finance/monthly', endpoint='monthly')\n app.add_resource(stats.Money, '/administrate/finance/yearly', endpoint='yearly')",
"def __init__(\n self,\n title,\n description,\n base_url,\n realm,\n app_key=None,\n icon=None,\n scopes=\"openid\",\n **kwargs\n ):\n app_key = app_key or \"KEYCLOAK_APP_CREDENTIALS\"\n base_url = \"{}/\".format(base_url.rstrip(\"/\")) # add leading `/`\n\n self._realm_url = \"{}auth/realms/{}\".format(base_url, realm)\n\n access_token_url = self.make_url(self._realm_url, \"token\")\n authorize_url = self.make_url(self._realm_url, \"auth\")\n self._user_info_url = self.make_url(self._realm_url, \"userinfo\")\n\n super().__init__(\n title,\n description,\n base_url,\n app_key,\n icon=icon,\n request_token_params={\"scope\": scopes},\n access_token_url=access_token_url,\n authorize_url=authorize_url,\n **kwargs\n )\n\n self._handlers = dict(\n authorized_handler=\"invenio_oauthclient.handlers:authorized_signup_handler\",\n disconnect_handler=\"invenio_oauthclient.contrib.keycloak.handlers:disconnect_handler\",\n signup_handler=dict(\n info=\"invenio_oauthclient.contrib.keycloak.handlers:info_handler\",\n info_serializer=\"invenio_oauthclient.contrib.keycloak.handlers:info_serializer_handler\",\n setup=\"invenio_oauthclient.contrib.keycloak.handlers:setup_handler\",\n view=\"invenio_oauthclient.handlers:signup_handler\",\n ),\n )\n\n self._rest_handlers = dict(\n authorized_handler=\"invenio_oauthclient.handlers.rest:authorized_signup_handler\",\n disconnect_handler=\"invenio_oauthclient.contrib.keycloak.handlers:disconnect_rest_handler\",\n signup_handler=dict(\n info=\"invenio_oauthclient.contrib.keycloak.handlers:info_handler\",\n info_serializer=\"invenio_oauthclient.contrib.keycloak.handlers:info_serializer_handler\",\n setup=\"invenio_oauthclient.contrib.keycloak.handlers:setup_handler\",\n view=\"invenio_oauthclient.handlers.rest:signup_handler\",\n ),\n response_handler=(\n \"invenio_oauthclient.handlers.rest:default_remote_response_handler\"\n ),\n authorized_redirect_url=\"/\",\n disconnect_redirect_url=\"/\",\n signup_redirect_url=\"/\",\n error_redirect_url=\"/\",\n )",
"def generate_config(context):\n\n resources = []\n properties = context.properties\n project_id = properties.get('project', context.env['project'])\n name = properties.get('name', context.env['name'])\n\n resource = {\n 'name': context.env['name'],\n # https://cloud.google.com/filestore/docs/reference/rest/v1beta1/projects.locations.instances/create\n 'type': 'gcp-types/file-v1beta1:projects.locations.instances',\n 'properties': {\n 'parent': 'projects/{}/locations/{}'.format(project_id, properties['location']),\n 'instanceId': name,\n }\n }\n\n optional_props = [\n 'description',\n 'tier',\n 'labels',\n 'fileShares',\n 'networks',\n ]\n\n for prop in optional_props:\n if prop in properties:\n resource['properties'][prop] = properties[prop]\n\n resources.append(resource)\n\n return {\n 'resources':\n resources,\n 'outputs':\n [\n {\n 'name': 'name',\n 'value': name\n },\n {\n 'name': 'fileShares',\n 'value': '$(ref.{}.fileShares)'.format(context.env['name'])\n },\n {\n 'name': 'networks',\n 'value': '$(ref.{}.networks)'.format(context.env['name'])\n }\n ]\n }",
"def init_rest(app_):\n\n rest_api = Api(app_)\n rest_api.add_resource(views.rest_resources.AppListResource,\n ActiveConfig.REST_URL_APPS_LIST,\n ActiveConfig.REST_URL_APPS_LIST + '/')\n rest_api.add_resource(views.rest_resources.AppResource,\n ActiveConfig.REST_URL_APPS_ITEM,\n ActiveConfig.REST_URL_APPS,\n ActiveConfig.REST_URL_APPS + '/')",
"def configure_apispec(app):\n pass",
"def oauth():\n return {\"consumer_key\": \"Insert consumer key HERE\",\n \"consumer_secret\": \"Insert consumer secret HERE\",\n \"token_key\": \"Insert token key HERE\",\n \"token_secret\": \"Insert token secret HERE\"}",
"def make_oauth_client(base_url) -> requests.Session:\n config_file = os.path.join(os.environ['HOME'], '.allurarc')\n cp = ConfigParser()\n cp.read(config_file)\n\n REQUEST_TOKEN_URL = base_url + '/rest/oauth/request_token'\n AUTHORIZE_URL = base_url + '/rest/oauth/authorize'\n ACCESS_TOKEN_URL = base_url + '/rest/oauth/access_token'\n oauth_key = option(cp, base_url, 'oauth_key',\n 'Forge API OAuth Consumer Key (%s/auth/oauth/): ' % base_url)\n oauth_secret = option(cp, base_url, 'oauth_secret',\n 'Forge API Oauth Consumer Secret: ')\n\n try:\n oauth_token = cp.get(base_url, 'oauth_token')\n oauth_token_secret = cp.get(base_url, 'oauth_token_secret')\n except NoOptionError:\n oauthSess = OAuth1Session(oauth_key, client_secret=oauth_secret, callback_uri='oob')\n request_token = oauthSess.fetch_request_token(REQUEST_TOKEN_URL)\n pin_url = oauthSess.authorization_url(AUTHORIZE_URL, request_token['oauth_token'])\n if isinstance(webbrowser.get(), webbrowser.GenericBrowser):\n print(\"Go to %s\" % pin_url)\n else:\n webbrowser.open(pin_url)\n oauth_verifier = input('What is the PIN? ')\n access_token = oauthSess.fetch_access_token(ACCESS_TOKEN_URL, oauth_verifier)\n oauth_token = access_token['oauth_token']\n oauth_token_secret = access_token['oauth_token_secret']\n\n cp.set(base_url, 'oauth_token', oauth_token)\n cp.set(base_url, 'oauth_token_secret', oauth_token_secret)\n # save oauth token for later use\n cp.write(open(config_file, 'w'))\n print(f'Saving oauth tokens in {config_file} for later re-use')\n print()\n\n else:\n oauthSess = OAuth1Session(oauth_key, client_secret=oauth_secret,\n resource_owner_key=oauth_token, resource_owner_secret=oauth_token_secret)\n\n return oauthSess",
"def generate_config(self):\n\n # Change crypto-config.yaml and add organizations\n yaml = YAML()\n with open(os.path.join(self.config_path, \"crypto-config-template.yaml\"), \"r\") as crypto_config_file:\n config = yaml.load(crypto_config_file)\n\n config[\"OrdererOrgs\"][0][\"Specs\"] = []\n for orderer_index in range(1, self.num_validators + 1):\n orderer_host, _ = self.experiment.get_peer_ip_port_by_id(orderer_index)\n config[\"OrdererOrgs\"][0][\"Specs\"].append({\n \"Hostname\": \"orderer%d\" % orderer_index,\n \"SANS\": [orderer_host]\n })\n\n config[\"PeerOrgs\"] = []\n for organization_index in range(1, self.num_validators + 1):\n organization_host, _ = self.experiment.get_peer_ip_port_by_id(organization_index)\n organization_config = {\n \"Name\": \"Org%d\" % organization_index,\n \"Domain\": \"org%d.example.com\" % organization_index,\n \"EnableNodeOUs\": True,\n \"Template\": {\n \"Count\": 1,\n \"SANS\": [organization_host]\n },\n \"Users\": {\n \"Count\": 1\n }\n }\n config[\"PeerOrgs\"].append(organization_config)\n\n with open(os.path.join(self.config_path, \"crypto-config.yaml\"), \"w\") as crypto_config_file:\n yaml.dump(config, crypto_config_file)\n\n # Change configtx.yaml\n yaml = YAML()\n with open(os.path.join(self.config_path, \"configtx-template.yaml\"), \"r\") as configtx_file:\n config = yaml.load(configtx_file)\n\n config[\"Profiles\"][\"TwoOrgsChannel\"][\"Application\"][\"Organizations\"] = []\n config[\"Profiles\"][\"SampleMultiNodeEtcdRaft\"][\"Consortiums\"][\"SampleConsortium\"][\"Organizations\"] = []\n\n for organization_index in range(1, self.num_validators + 1):\n org_admin = \"Org%dMSP.admin\" % organization_index\n org_peer = \"Org%dMSP.peer\" % organization_index\n org_client = \"Org%dMSP.client\" % organization_index\n organization_host, _ = self.experiment.get_peer_ip_port_by_id(organization_index)\n\n organization_config = {\n \"Name\": \"Org%dMSP\" % organization_index,\n \"ID\": \"Org%dMSP\" % organization_index,\n \"MSPDir\": \"crypto-config/peerOrganizations/org%d.example.com/msp\" % organization_index,\n \"Policies\": {\n \"Readers\": {\n \"Type\": \"Signature\",\n \"Rule\": \"OR('%s', '%s', '%s')\" % (org_admin, org_peer, org_client)\n },\n \"Writers\": {\n \"Type\": \"Signature\",\n \"Rule\": \"OR('%s', '%s')\" % (org_admin, org_peer)\n },\n \"Admins\": {\n \"Type\": \"Signature\",\n \"Rule\": \"OR('%s')\" % (org_admin)\n }\n },\n \"AnchorPeers\": [{\n \"Host\": organization_host,\n \"Port\": 7000 + organization_index\n }]\n }\n\n commented_map = CommentedMap(organization_config)\n commented_map.yaml_set_anchor(\"Org%d\" % organization_index, always_dump=True)\n config[\"Organizations\"].append(commented_map)\n config[\"Profiles\"][\"TwoOrgsChannel\"][\"Application\"][\"Organizations\"].append(commented_map)\n config[\"Profiles\"][\"SampleMultiNodeEtcdRaft\"][\"Consortiums\"][\"SampleConsortium\"][\"Organizations\"]\\\n .append(commented_map)\n\n config[\"Profiles\"][\"SampleMultiNodeEtcdRaft\"][\"Orderer\"][\"EtcdRaft\"][\"Consenters\"] = []\n config[\"Profiles\"][\"SampleMultiNodeEtcdRaft\"][\"Orderer\"][\"Addresses\"] = []\n\n for organization_index in range(1, self.num_validators + 1):\n organization_host, _ = self.experiment.get_peer_ip_port_by_id(organization_index)\n consenter_port = 7000 + organization_index\n consenter_info = {\n \"Host\": organization_host,\n \"Port\": consenter_port,\n \"ClientTLSCert\": \"crypto-config/ordererOrganizations/example.com/orderers/\"\n \"orderer%d.example.com/tls/server.crt\" % organization_index,\n \"ServerTLSCert\": \"crypto-config/ordererOrganizations/example.com/orderers/\"\n \"orderer%d.example.com/tls/server.crt\" % organization_index\n }\n config[\"Profiles\"][\"SampleMultiNodeEtcdRaft\"][\"Orderer\"][\"EtcdRaft\"][\"Consenters\"].append(consenter_info)\n config[\"Profiles\"][\"SampleMultiNodeEtcdRaft\"][\"Orderer\"][\"Addresses\"].append(\n \"%s:%d\" % (organization_host, consenter_port))\n\n with open(os.path.join(self.config_path, \"configtx.yaml\"), \"w\") as configtx_file:\n round_trip_dump(config, configtx_file, Dumper=RoundTripDumper)",
"def build_routes(config):\r\n\r\n config.add_route(\"home\", \"/\")\r\n config.add_route(\"dashboard\", \"/dashboard\")\r\n\r\n # Add routes for the combo loader to match up to static file requests.\r\n config.add_route('convoy', '/combo')\r\n\r\n JS_FILES = config.get_settings()['app_root'] + '/bookie/static/js/build'\r\n application = combo_app(JS_FILES)\r\n config.add_view(\r\n wsgiapp2(application),\r\n route_name='convoy')\r\n\r\n # auth routes\r\n config.add_route(\"login\", \"login\")\r\n config.add_route(\"logout\", \"logout\")\r\n config.add_route(\"reset\", \"{username}/reset/{reset_key}\")\r\n config.add_route(\"signup\", \"signup\")\r\n config.add_route(\"signup_process\", \"signup_process\")\r\n\r\n # celery routes\r\n config.add_route(\"celery_hourly_stats\", \"jobhourly\")\r\n\r\n # bmark routes\r\n config.add_route(\"bmark_recent\", \"recent\")\r\n config.add_route(\"bmark_recent_tags\", \"recent/*tags\")\r\n\r\n config.add_route(\"bmark_recent_rss\", \"rss\")\r\n config.add_route(\"bmark_recent_rss_tags\", \"rss/*tags\")\r\n\r\n config.add_route(\"bmark_readable\", \"bmark/readable/{hash_id}\")\r\n\r\n # user based bmark routes\r\n config.add_route(\"user_bmark_recent\", \"{username}/recent\")\r\n config.add_route(\"user_bmark_recent_tags\", \"{username}/recent/*tags\")\r\n\r\n config.add_route(\"user_bmark_rss\", \"{username}/rss\")\r\n config.add_route(\"user_bmark_rss_tags\", \"{username}/rss/*tags\")\r\n\r\n config.add_route(\"user_bmark_edit\", \"{username}/edit/{hash_id}\")\r\n config.add_route(\"user_bmark_edit_error\",\r\n \"{username}/edit_error/{hash_id}\")\r\n config.add_route(\"user_bmark_new\", \"{username}/new\")\r\n config.add_route(\"user_bmark_new_error\", \"{username}/new_error\")\r\n config.add_route(\r\n \"user_delete_all_bookmarks\",\r\n \"{username}/account/delete_all_bookmarks\")\r\n\r\n # config.add_route(\"bmark_delete\", \"/bmark/delete\")\r\n # config.add_route(\"bmark_confirm_delete\", \"/bmark/confirm/delete/{bid}\")\r\n\r\n # tag related routes\r\n config.add_route(\"tag_list\", \"tags\")\r\n config.add_route(\"tag_bmarks\", \"tags/*tags\")\r\n\r\n # user tag related\r\n config.add_route(\"user_tag_list\", \"{username}/tags\")\r\n config.add_route(\"user_tag_bmarks\", \"{username}/tags/*tags\")\r\n\r\n config.add_route(\"user_import\", \"{username}/import\")\r\n config.add_route(\"search\", \"search\")\r\n config.add_route(\"user_search\", \"{username}/search\")\r\n\r\n config.add_route(\"search_results\", \"results\")\r\n config.add_route(\"user_search_results\", \"{username}/results\")\r\n\r\n # matches based on the header\r\n # HTTP_X_REQUESTED_WITH\r\n # ajax versions are used in the mobile search interface\r\n config.add_route(\"search_results_ajax\", \"results/*terms\", xhr=True)\r\n config.add_route(\"search_results_rest\", \"results/*terms\")\r\n config.add_route(\"user_search_results_ajax\",\r\n \"{username}/results*terms\",\r\n xhr=True)\r\n config.add_route(\"user_search_results_rest\", \"{username}/results*terms\")\r\n\r\n config.add_route(\"redirect\", \"redirect/{hash_id}\")\r\n config.add_route(\"user_redirect\", \"{username}/redirect/{hash_id}\")\r\n\r\n config.add_route(\"user_account\", \"{username}/account\")\r\n config.add_route(\"user_export\", \"{username}/export\")\r\n config.add_route(\"user_stats\", \"{username}/stats\")\r\n\r\n #\r\n # NEW API\r\n #\r\n\r\n # stats\r\n config.add_route('api_bookmark_stats',\r\n '/api/v1/stats/bookmarks',\r\n request_method='GET')\r\n config.add_route('api_user_stats',\r\n '/api/v1/stats/users',\r\n request_method='GET')\r\n\r\n # ping checks\r\n config.add_route('api_ping',\r\n '/api/v1/{username}/ping',\r\n request_method='GET')\r\n config.add_route('api_ping_missing_user',\r\n '/api/v1/ping',\r\n request_method='GET')\r\n config.add_route('api_ping_missing_api',\r\n '/ping',\r\n request_method='GET')\r\n\r\n # auth related\r\n config.add_route(\"api_user_account\",\r\n \"/api/v1/{username}/account\",\r\n request_method=\"GET\")\r\n config.add_route(\"api_user_account_update\",\r\n \"/api/v1/{username}/account\",\r\n request_method=\"POST\")\r\n config.add_route(\"api_user_api_key\",\r\n \"/api/v1/{username}/api_key\",\r\n request_method=\"GET\")\r\n config.add_route(\"api_reset_api_key\",\r\n \"/api/v1/{username}/api_key\",\r\n request_method=\"POST\")\r\n config.add_route(\"api_user_reset_password\",\r\n \"/api/v1/{username}/password\",\r\n request_method=\"POST\")\r\n\r\n config.add_route(\"api_user_suspend_remove\",\r\n \"api/v1/suspend\",\r\n request_method=\"DELETE\")\r\n config.add_route(\"api_user_suspend\",\r\n \"api/v1/suspend\",\r\n request_method=\"POST\")\r\n config.add_route(\"api_user_invite\",\r\n \"api/v1/{username}/invite\",\r\n request_method=\"POST\")\r\n\r\n # many bookmark api calls\r\n config.add_route(\"api_bmarks_export\", \"api/v1/{username}/bmarks/export\")\r\n\r\n # we have to search before we hit the bmarks keys so that it doesn't think\r\n # the tag is \"search\"\r\n config.add_route(\"api_bmark_search\", \"api/v1/bmarks/search/*terms\")\r\n config.add_route(\"api_bmark_search_user\",\r\n \"/api/v1/{username}/bmarks/search/*terms\")\r\n\r\n config.add_route('api_bmarks', 'api/v1/bmarks')\r\n config.add_route('api_bmarks_tags', 'api/v1/bmarks/*tags')\r\n config.add_route('api_bmarks_user', 'api/v1/{username}/bmarks')\r\n config.add_route('api_bmarks_user_tags', 'api/v1/{username}/bmarks/*tags')\r\n config.add_route('api_count_bmarks_user',\r\n 'api/v1/{username}/stats/bmarkcount')\r\n\r\n # user bookmark api calls\r\n config.add_route(\"api_bmark_add\",\r\n \"/api/v1/{username}/bmark\",\r\n request_method=\"POST\")\r\n config.add_route(\"api_bmark_update\",\r\n \"/api/v1/{username}/bmark/{hash_id}\",\r\n request_method=\"POST\")\r\n config.add_route(\"api_extension_sync\", \"/api/v1/{username}/extension/sync\")\r\n\r\n config.add_route(\"api_bmark_hash\",\r\n \"/api/v1/{username}/bmark/{hash_id}\",\r\n request_method=\"GET\")\r\n config.add_route(\"api_bmark_remove\",\r\n \"/api/v1/{username}/bmark/{hash_id}\",\r\n request_method=\"DELETE\")\r\n\r\n config.add_route(\"api_tag_complete_user\",\r\n \"/api/v1/{username}/tags/complete\")\r\n config.add_route(\"api_tag_complete\",\r\n \"/api/v1/tags/complete\")\r\n\r\n # admin api calls\r\n config.add_route(\"api_admin_readable_todo\", \"/api/v1/a/readable/todo\")\r\n config.add_route(\r\n \"api_admin_readable_reindex\",\r\n \"/api/v1/a/readable/reindex\")\r\n config.add_route(\r\n \"api_admin_accounts_inactive\",\r\n \"/api/v1/a/accounts/inactive\")\r\n config.add_route(\r\n \"api_admin_accounts_invites_add\",\r\n \"/api/v1/a/accounts/invites/{username}/{count}\",\r\n request_method=\"POST\")\r\n config.add_route(\r\n \"api_admin_accounts_invites\",\r\n \"/api/v1/a/accounts/invites\",\r\n request_method=\"GET\")\r\n config.add_route(\r\n \"api_admin_imports_list\",\r\n \"/api/v1/a/imports/list\",\r\n request_method=\"GET\")\r\n config.add_route(\r\n \"api_admin_imports_reset\",\r\n \"/api/v1/a/imports/reset/{id}\",\r\n request_method=\"POST\")\r\n\r\n config.add_route(\r\n \"api_admin_users_list\",\r\n \"/api/v1/a/users/list\",\r\n request_method=\"GET\")\r\n config.add_route(\r\n \"api_admin_new_user\",\r\n \"/api/v1/a/users/add\",\r\n request_method=\"POST\")\r\n config.add_route(\r\n \"api_admin_del_user\",\r\n \"/api/v1/a/users/delete/{username}\",\r\n request_method=\"DELETE\")\r\n config.add_route(\r\n \"api_admin_bmark_remove\",\r\n \"/api/v1/a/bmark/{username}/{hash_id}\",\r\n request_method=\"DELETE\")\r\n\r\n config.add_route(\r\n \"api_admin_applog\",\r\n \"/api/v1/a/applog/list\",\r\n request_method=\"GET\")\r\n\r\n config.add_route(\r\n \"api_admin_non_activated\",\r\n \"/api/v1/a/nonactivated\",\r\n request_method=\"GET\")\r\n\r\n config.add_route(\r\n \"api_admin_delete_non_activated\",\r\n \"/api/v1/a/nonactivated\",\r\n request_method=\"DELETE\")\r\n\r\n # these are single word matching, they must be after /recent /popular etc\r\n config.add_route(\"user_home\", \"{username}\")\r\n\r\n return config",
"def init_configs(self):\n\n # get current location\n self.script_dir = os.path.dirname(__file__)\n\n # load configuration file\n with open(os.path.join(self.script_dir, \"config.json\")) as f:\n self.configs = json.load(f)\n \n # load some configs as attributes\n self.resource_folder = os.path.join(self.script_dir, self.configs[\"resource_path\"], self.resource_type, self.language)\n self.pre_processed_folder = os.path.join(self.resource_folder, self.configs[\"pre_processed_path\"])\n self.results_folder = os.path.join(self.resource_folder, self.configs[\"results_path\"])\n self.chunk_size = self.configs[\"resources\"][self.resource_type][\"chunk_size\"]",
"def configs(self, request, *args, **kwargs):\n response = self.retrieve(request, *args, **kwargs)\n response.data = response.data['configures']\n return response",
"def create_app(config_name):\n\n app = Flask(__name__)\n api = Api(app)\n CORS(app)\n\n app.config.from_object(config.configurations[config_name])\n \"\"\"This ensures that the urls /login and /login/ are recognized as same\n without considering the trailing slash \"\"\"\n app.url_map.strict_slashes = False\n\n with app.app_context():\n from app.resources.products import MenuResource\n from app.resources.orders import OrderResource\n from app.resources.addresses import AddressResource\n from app.resources.users import LoginResource, SignUpResource\n api.add_resource(MenuResource, \"/api/v1/menu\", \"/api/v1/menu/<int:product_id>\")\n api.add_resource(OrderResource, \"/api/v1/orders\",\n \"/api/v1/orders/<int:order_id>\")\n api.add_resource(AddressResource, \"/api/v1/addresses\",\n \"/api/v1/addresses/<int:address_id>\")\n api.add_resource(LoginResource, \"/api/v1/auth/login\")\n api.add_resource(SignUpResource, \"/api/v1/auth/signup\")\n\n @app.errorhandler(404)\n def error_404(e):\n return jsonify({\"code\": \"404\", \"message\": \"Not found\"}), 200\n\n @app.errorhandler(500)\n def error_500(e):\n return jsonify(\n {\"code\": \"503\", \"message\": \"We have some trouble\"\n \"processing your request\"\n \" please try again later\"}), 500\n\n @app.errorhandler(405)\n def error_405(e):\n return jsonify({\"code\": \"405\", \"message\": \"We dont allow\"\n \" the request method\",\n \"ok\": False}), 200\n\n @app.route(\"/\")\n def home():\n return render_template(\"index.html\")\n\n return app",
"def __init__(self, auth):\n super(Socrata, self).__init__(auth)\n self.views = Views(auth)\n self.sources = Sources(auth)\n self.configs = Configs(auth)"
] | [
"0.5988237",
"0.59526646",
"0.5944135",
"0.58898485",
"0.58617234",
"0.57969236",
"0.5760492",
"0.5743792",
"0.57230693",
"0.5703035",
"0.5666833",
"0.56545556",
"0.56007504",
"0.5600133",
"0.5570918",
"0.55624896",
"0.5508373",
"0.5478721",
"0.5472452",
"0.54713786",
"0.54707664",
"0.5451364",
"0.54487616",
"0.54429924",
"0.5436186",
"0.54182506",
"0.54077214",
"0.5393685",
"0.5393182",
"0.53864574"
] | 0.7443166 | 0 |
Determine the language prefix using the language header. | def determine_language_prefix():
# TODO(eholder): Figure out a more appropriate way to map the header into
# our set of prefixes. Since I don't know what those prefixes are yet, this
# is intentionally very generic. I also need to decide if this should just be
# done once as part of the login flow rather than checking every request.
# Checking every request makes this easier to test and change though in the
# meantime.
languages_string = request.headers.get('Accept-Language')
# If there is no header, use the default.
if languages_string is None:
flask.session['language_prefix'] = DEFAULT_LANGUAGE_PREFIX
return
languages = languages_string.split(',')
if languages[0] in ACCEPTABLE_LANGUAGE_PREFIXES:
flask.session['language_prefix'] = languages[0]
return
language_sections = languages[0].split(';')
if language_sections[0] in ACCEPTABLE_LANGUAGE_PREFIXES:
flask.session['language_prefix'] = language_sections[0]
return
language_subsections = language_sections[0].split('-')
if language_subsections[0] in ACCEPTABLE_LANGUAGE_PREFIXES:
flask.session['language_prefix'] = language_subsections[0]
return
flask.session['language_prefix'] = DEFAULT_LANGUAGE_PREFIX | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_full_language(self, language):\n if language:\n language = pycountry.languages.get(alpha_2=language)\n if language:\n language = language.name\n return language.title()",
"def language_name(value):\n return pycountry.languages.get(alpha_2=value)",
"def getWikiLanguageName():\r\n return \"wikidpad_mini_1_0\"",
"def get_language(lang_code) -> str:\n langs = defaultdict(lambda: \"en\", {\"ru\": \"ru\"})\n return langs[lang_code.split(\"-\")[0]] if lang_code else \"en\"",
"def get_parent_language(lang: str) -> str:\n\tis_language_variant = \"-\" in lang\n\tif is_language_variant:\n\t\treturn lang[: lang.index(\"-\")]",
"def get_language_name(self):\n return self.language_name",
"def get_locale_from_accept_header(request):\n header = request.headers.get(\"Accept-Language\", '')\n parsed = parse_accept_language_header(header)\n if parsed is None:\n return None\n locale_list_sorted_by_q = sorted(parsed.iterkeys(), reverse=True)\n locale = Locale.negotiate(locale_list_sorted_by_q, config.locales, sep='_')\n return str(locale)",
"def get_language_name(iso_code):\n if iso_code not in LANGUAGES_BY_CODE:\n try:\n lang = iso639.languages.get(part3=iso_code)\n except KeyError:\n lang = None\n\n if lang:\n # we only show up to the first semi or paren\n lang = re.split(r\";|\\(\", lang.name)[0].strip()\n\n LANGUAGES_BY_CODE[iso_code] = lang\n\n return LANGUAGES_BY_CODE[iso_code]",
"def get_locale_name(code):\n language_map = dict(django.conf.global_settings.LANGUAGES)\n\n # check for exact match\n if code in language_map:\n return language_map[code]\n\n # try for the language, fall back to just using the code\n language = code.split(\"-\")[0]\n return language_map.get(language, code)",
"def language_code(self) -> str:\n return pulumi.get(self, \"language_code\")",
"def get_language(fn):\n # FIXME - this expects the fn to be '.../XX/LC_MESSAGES/messages.po'\n return fn.split(os.sep)[-3]",
"def get_locale():\n return \"he\"",
"def GetLanguageName(*args, **kwargs):\n return _gdi_.Locale_GetLanguageName(*args, **kwargs)",
"def Locale_GetLanguageName(*args, **kwargs):\n return _gdi_.Locale_GetLanguageName(*args, **kwargs)",
"def get_locale():\n if (session.get(\"language\") is not None):\n return session.get('language')['charcode']\n return request.accept_languages.best_match(app.config['LANGUAGES'].keys())",
"def get_locale():\n localLang = request.args.get('locale')\n supportLang = app.config['LANGUAGES']\n if localLang in supportLang:\n return localLang\n userId = request.args.get('login_as')\n if userId:\n localLang = users[int(userId)]['locale']\n if localLang in supportLang:\n return localLang\n localLang = request.headers.get('locale')\n if localLang in supportLang:\n return localLang\n return request.accept_languages.best_match(app.config['LANGUAGES'])",
"def get_lang(self):\n\n path = self.get_lang_path()\n for language in self.languages:\n if language in path:\n return language",
"def language(self) -> str:\n if self.language_code in CODE_TO_LANGUAGE:\n return CODE_TO_LANGUAGE[self.language_code]\n\n return self.language_code",
"def get_native_name(locale):\n\n parts = parse_langcode(locale)\n if \"language\" not in parts:\n raise InvalidLocaleSpec(\"'%s' is not a valid locale\" % locale)\n\n name = langtable.language_name(languageId=parts[\"language\"],\n territoryId=parts.get(\"territory\", \"\"),\n scriptId=parts.get(\"script\", \"\"),\n languageIdQuery=parts[\"language\"],\n territoryIdQuery=parts.get(\"territory\", \"\"),\n scriptIdQuery=parts.get(\"script\", \"\"))\n\n return upcase_first_letter(name)",
"def language_version(language):\n return _PREFERRED_VERSIONS[language]",
"def validate_language(language):\n\n try:\n lang_code = language_dict[language]\n except KeyError:\n lang_code = None\n return lang_code",
"def __expandLanguage(self, language):\n\n # Priority Chain:\n # de_DE => de => C (default language) => code\n\n all = [language]\n if \"_\" in language:\n all.append(language[:language.index(\"_\")])\n all.append(\"C\")\n\n return all",
"def to_language(arg: str) -> Tuple[Union[str, None], str]: \n if (low:= arg.lower()) in LANGUAGES:\n return arg\n else:\n return LANGCODES.get(low, None)",
"def get_language_of_horizon_url(self) -> str:\n if 'horizons-mag' in self.url_h:\n self.language = 'en'\n elif 'horizonte-magazin' in self.url_h:\n self.language = 'de'\n elif 'revue-horizons' in self.url_h:\n self.language = 'fr'",
"def get_xpath_english_lang_label(self) -> str:\n\n return self.__xpath_english_lang_label",
"def _getLang(self, language):\n if language == None:\n language = self.getDefaultLanguage()\n\n return language",
"def get_language(self):\n return self.lang",
"def get_word(key: str, language: str):\n if key not in word_keys:\n return \"LOCALIZATION KEY {} NOT FOUND FOR LANGUAGE {}\".format(key, language)\n words = word_keys[key]\n\n # If the word doesn't exist, just show word in English\n if language not in words or words[language] == \"\":\n return words[EN]\n else:\n return words[language]",
"def language_code(self):\n return self._language_code",
"def get_language_name(self, locale: Locale | str | None = None) -> str | None:\n if locale is None:\n locale = self\n locale = Locale.parse(locale)\n return locale.languages.get(self.language)"
] | [
"0.6323368",
"0.6228192",
"0.6204164",
"0.61662155",
"0.61253226",
"0.61233664",
"0.6115906",
"0.6019953",
"0.60078716",
"0.59624434",
"0.58665967",
"0.58177227",
"0.57713544",
"0.57614154",
"0.5757509",
"0.57491803",
"0.5739453",
"0.57347256",
"0.5728561",
"0.5721372",
"0.57166797",
"0.57152325",
"0.5696862",
"0.5695589",
"0.5669754",
"0.5662105",
"0.5646542",
"0.56333417",
"0.56302714",
"0.56285924"
] | 0.7839627 | 0 |
API Wrapper object which returns stats for a specific hero | def get_heroes_stats(tag, hero, platform="pc", region="eu", mode="quickplay"):
try:
context = ssl._create_unverified_context()
hero_stats = json.load(
const.codec(
urlopen(const.URL + platform + "/" + region + "/" + tag + "/" + mode + "/hero/" + hero + "/", context=context)))
if "error" in hero_stats:
raise BattleTagNotFound(hero_stats['error'])
exit(1)
#
if bool(hero_stats[hero]) is False:
raise HeroNotFound("An error occurred when fetching stats:\nThis hero does not exist. Make sure you have input a valid hero name.")
exit(1)
#
result = h.Hero(
dc.get_dic_obj(hero_stats[hero], "Eliminations", "Elimination"),
dc.get_dic_obj(hero_stats[hero], "FinalBlows", "FinalBlow"),
dc.get_dic_obj(hero_stats[hero], "SoloKills", "SoloKill"),
dc.get_dic_obj(hero_stats[hero], "ShotsFired", "ShotFired"),
dc.get_dic_obj(hero_stats[hero], "ShotsHit", "ShotHit"),
dc.get_dic_obj(hero_stats[hero], "CriticalHits", "CriticalHit"),
dc.get_dic_obj(hero_stats[hero], "DamageDone"),
dc.get_dic_obj(hero_stats[hero], "ObjectiveKills", "ObjectiveKills"),
dc.get_dic_obj(hero_stats[hero], "Multikill", "Multikills"),
dc.get_dic_obj(hero_stats[hero], "CriticalHitsperMinute", "CriticalHitperMinute"),
dc.get_dic_obj(hero_stats[hero], "CriticalHitAccuracy"),
dc.get_dic_obj(hero_stats[hero], "EliminationsperLife", "EliminationperLife"),
dc.get_dic_obj(hero_stats[hero], "WeaponAccuracy"),
dc.get_dic_obj(hero_stats[hero], "TeleporterPadsDestroyed", "TeleporterPadDestroyed"),
dc.get_dic_obj(hero_stats[hero], "TurretsDestroyed", "TurretDestroyed"),
dc.get_dic_obj(hero_stats[hero], "SelfHealing"),
dc.get_dic_obj(hero_stats[hero], "Eliminations-MostinLife", "Elimination-MostinLife"),
dc.get_dic_obj(hero_stats[hero], "EliminationsperLife", "EliminationperLife"),
dc.get_dic_obj(hero_stats[hero], "DamageDone-MostinLife"),
dc.get_dic_obj(hero_stats[hero], "WeaponAccuracy-BestinGame"),
dc.get_dic_obj(hero_stats[hero], "KillStreak-Best"),
dc.get_dic_obj(hero_stats[hero], "DamageDone-MostinGame"),
dc.get_dic_obj(hero_stats[hero], "Eliminations-MostinGame", "Elimination-MostinGame"),
dc.get_dic_obj(hero_stats[hero], "FinalBlows-MostinGame", "FinalBlow-MostinGame"),
dc.get_dic_obj(hero_stats[hero], "ObjectiveKills-MostinGame", "ObjectiveKill-MostinGame"),
dc.get_dic_obj(hero_stats[hero], "ObjectiveTime-MostinGame"),
dc.get_dic_obj(hero_stats[hero], "SoloKills-MostinGame", "SoloKill-MostinGame"),
dc.get_dic_obj(hero_stats[hero], "CriticalHits-MostinGame", "CriticalHit-MostinGame"),
dc.get_dic_obj(hero_stats[hero], "CriticalHits-MostinLife", "CrtiticalHit-MostinLife"),
dc.get_dic_obj(hero_stats[hero], "SelfHealing-Average"),
dc.get_dic_obj(hero_stats[hero], "Deaths-Average", "Death-Average"),
dc.get_dic_obj(hero_stats[hero], "SoloKills-Average", "SoloKill-Average"),
dc.get_dic_obj(hero_stats[hero], "ObjectiveTime-Average"),
dc.get_dic_obj(hero_stats[hero], "ObjectiveKills-Average", "ObjectiveKill-Average"),
dc.get_dic_obj(hero_stats[hero], "FinalBlows-Average", "FinalBlow-Average"),
dc.get_dic_obj(hero_stats[hero], "Eliminations-Average", "Elimination-Average"),
dc.get_dic_obj(hero_stats[hero], "DamageDone-Average"),
dc.get_dic_obj(hero_stats[hero], "Deaths", "Death"),
dc.get_dic_obj(hero_stats[hero], "EnvironmentalDeaths", "EnvironmentalDeath"),
dc.get_dic_obj(hero_stats[hero], "Medals-Bronze", "Medal-Bronze"),
dc.get_dic_obj(hero_stats[hero], "Medals-Silver", "Medal-Silver"),
dc.get_dic_obj(hero_stats[hero], "Medals-Gold", "Medal-Gold"),
dc.get_dic_obj(hero_stats[hero], "Medals", "Medal"),
dc.get_dic_obj(hero_stats[hero], "Cards", "Card"),
dc.get_dic_obj(hero_stats[hero], "TimePlayed"),
dc.get_dic_obj(hero_stats[hero], "GamesWon", "GameWon"),
dc.get_dic_obj(hero_stats[hero], "ObjectiveTime"),
dc.get_dic_obj(hero_stats[hero], "TimeSpentOnFire"),
dc.get_dic_obj(hero_stats[hero], "Multikill-Best"),
)
return result
except urllib.error.URLError as e:
print("An error occurred when fetching stats\n" + str(e))
exit(1)
except Exception as e:
print("An error occurred:\n " + str(e))
exit(1) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_hero(self, uuid, hero):\n\n # I can't wait for case statements in python (3.10)\n if hero == Heroes.BULK:\n return Bulk(self.api_key, uuid)\n\n elif hero == Heroes.GENERAL_CLUCK:\n return GeneralCluck(self.api_key, uuid)\n\n elif hero == Heroes.CAKE_MONSTER:\n return CakeMonster(self.api_key, uuid)\n\n elif hero == Heroes.BOTMON:\n return Botmon(self.api_key, uuid)\n\n elif hero == Heroes.TINMAN:\n return Tinman(self.api_key, uuid)\n\n elif hero == Heroes.SERGEANT_SHIELD:\n return SergentShield(self.api_key, uuid)\n\n elif hero == Heroes.CRYOMANCER:\n return Cryomancer(self.api_key, uuid)\n\n elif hero == Heroes.SKULLFIRE:\n return Skullfire(self.api_key, uuid)\n\n elif hero == Heroes.SANIC:\n return Sanic(self.api_key, uuid)\n\n elif hero == Heroes.KARAKOT:\n return Karakot(self.api_key, uuid)\n\n elif hero == Heroes.PUG:\n return Pug(self.api_key, uuid)\n\n elif hero == Heroes.SPODERMAN:\n return Spooderman(self.api_key, uuid)\n\n elif hero == Heroes.MARAUDER:\n return Marauder(self.api_key, uuid)\n\n elif hero == Heroes.SHOOP:\n return Shoop(self.api_key, uuid)\n\n elif hero == Heroes.GREEN_HOOD:\n return GreenHood(self.api_key, uuid)\n\n elif hero == Heroes.VOID_CRAWLER:\n return VoidCrawler(self, self.api_key, uuid)\n\n else:\n return None",
"def get_heroes(self, **kwargs):\n url = self.__build_url(urls.GET_HEROES, language=self.language, **kwargs)\n req = self.executor(url)\n if self.logger:\n self.logger.info('URL: {0}'.format(url))\n if not self.__check_http_err(req.status_code):\n return response.build(req, url, self.raw_mode)",
"def get_all_heroes_stats(tag, platform=\"pc\", region=\"eu\", mode=\"quickplay\"):\n #\n try:\n context = ssl._create_unverified_context()\n all_heroes = json.load(\n const.codec(urlopen(const.URL + platform + \"/\" + region + \"/\" + tag + \"/\" + mode + \"/allHeroes/\", context=context)))\n #\n if \"error\" in all_heroes:\n raise BattleTagNotFound(all_heroes['error'])\n exit(1)\n #\n result = ah.AllHeroes(dc.get_dic_obj(all_heroes, \"MeleeFinalBlows\", \"MeleeFinalBlow\"),\n dc.get_dic_obj(all_heroes, \"SoloKills\", \"SoloKill\"),\n dc.get_dic_obj(all_heroes, \"ObjectiveKills\", \"ObjectiveKill\"),\n dc.get_dic_obj(all_heroes, \"FinalBlows\", \"FinalBlow\"),\n dc.get_dic_obj(all_heroes, \"DamageDone\"),\n dc.get_dic_obj(all_heroes, \"Eliminations\", \"Elimination\"),\n dc.get_dic_obj(all_heroes, \"EnvironmentalKills\", \"EnvironmentalKill\"),\n dc.get_dic_obj(all_heroes, \"Multikills\", \"Multikill\"),\n dc.get_dic_obj(all_heroes, \"HealingDone\"),\n dc.get_dic_obj(all_heroes, \"Eliminations-MostinGame\", \"Elimination-MostinGame\"),\n dc.get_dic_obj(all_heroes, \"FinalBlows-MostinGame\", \"FinalBlow-MostinGame\"),\n dc.get_dic_obj(all_heroes, \"DamageDone-MostinGame\"),\n dc.get_dic_obj(all_heroes, \"HealingDone-MostinGame\"),\n dc.get_dic_obj(all_heroes, \"DefensiveAssists-MostinGame\", \"DefensiveAssist-MostinGame\"),\n dc.get_dic_obj(all_heroes, \"OffensiveAssists-MostinGame\", \"OffensiveAssist-MostinGame\"),\n dc.get_dic_obj(all_heroes, \"ObjectiveKills-MostinGame\", \"ObjectiveKill-MostinGame\"),\n dc.get_dic_obj(all_heroes, \"ObjectiveTime-MostinGame\"),\n dc.get_dic_obj(all_heroes, \"Multikill-Best\"),\n dc.get_dic_obj(all_heroes, \"SoloKills-MostinGame\", \"SoloKill-MostinGame\"),\n dc.get_dic_obj(all_heroes, \"TimeSpentonFire-MostinGame\"),\n dc.get_dic_obj(all_heroes, \"MeleeFinalBlows-Average\", \"MeleeFinalBlow-Average\"),\n dc.get_dic_obj(all_heroes, \"TimeSpentonFire-Average\"),\n dc.get_dic_obj(all_heroes, \"SoloKills-Average\", \"SoloKill-Average\"),\n dc.get_dic_obj(all_heroes, \"ObjectiveTime-Average\"),\n dc.get_dic_obj(all_heroes, \"ObjectiveKills-Average\", \"ObjectiveKill-Average\"),\n dc.get_dic_obj(all_heroes, \"HealingDone-Average\"),\n dc.get_dic_obj(all_heroes, \"FinalBlows-Average\", \"FinalBlow-Average\"),\n dc.get_dic_obj(all_heroes, \"Deaths-Average\", \"Death-Average\"),\n dc.get_dic_obj(all_heroes, \"DamageDone-Average\"),\n dc.get_dic_obj(all_heroes, \"Eliminations-Average\", \"Elimination-Average\"),\n dc.get_dic_obj(all_heroes, \"Deaths\", \"Death\"),\n dc.get_dic_obj(all_heroes, \"EnvironmentalDeaths\", \"EnvironmentalDeath\"),\n dc.get_dic_obj(all_heroes, \"Cards\", \"Card\"),\n dc.get_dic_obj(all_heroes, \"Medals\", \"Medal\"),\n dc.get_dic_obj(all_heroes, \"Medals-Gold\", \"Medal-Gold\"),\n dc.get_dic_obj(all_heroes, \"Medals-Silver\", \"Medal-Silver\"),\n dc.get_dic_obj(all_heroes, \"Medals-Bronze\", \"Medal-Bronze\"),\n dc.get_dic_obj(all_heroes, \"GamesPlayed\", \"GamePlayed\"),\n dc.get_dic_obj(all_heroes, \"GamesWon\", \"GameWon\"),\n dc.get_dic_obj(all_heroes, \"TimeSpentonFire\"),\n dc.get_dic_obj(all_heroes, \"ObjectiveTime\"),\n dc.get_dic_obj(all_heroes, \"TimePlayed\"),\n dc.get_dic_obj(all_heroes, \"MeleeFinalBlows-MostinGame\", \"MeleeFinalBlow-MostinGame\"),\n dc.get_dic_obj(all_heroes, \"GamesTied\", \"GameTied\") if mode == \"competitive\" else None,\n dc.get_dic_obj(all_heroes, \"GamesLost\", \"GameLost\") if mode == \"competitive\" else None,\n dc.get_dic_obj(all_heroes, \"DefensiveAssists\", \"DefensiveAssist\"),\n dc.get_dic_obj(all_heroes, \"DefensiveAssists-Average\", \"DefensiveAssist-Average\"),\n dc.get_dic_obj(all_heroes, \"OffensiveAssists\", \"OffensiveAssist\"),\n dc.get_dic_obj(all_heroes, \"OffensiveAssists-Average\", \"OffensiveAssist-Average\")\n )\n return result\n except urllib.error.URLError as e:\n print(\"An error occurred when fetching stats\\n\" + str(e))\n exit(1)\n except Exception as e:\n print(\"An error occurred:\\n \" + str(e))\n exit(1)",
"def update():\r\n hero_ids = []\r\n with open('json/heroes.json', 'r') as heroes:\r\n data = heroes.read()\r\n obj = json.loads(data)\r\n for i in obj['data']['constants']['heroes']:\r\n hero_ids.append(i['id'])\r\n for i, hero in enumerate(hero_ids):\r\n url = f\"https://api.stratz.com/api/v1/Hero/{hero}?rank=8\"\r\n r1, r2, r3, r4, r5 = 0, 0, 0, 0, 0\r\n safe, mid, off, roam = 0, 0, 0, 0\r\n r = requests.get(url=url, headers={\"Authorization\": f\"Bearer {TOKEN}\"})\r\n r_obj = r.json()\r\n total_matches = r_obj['heroes'][0]['pickBan']['pick']['matchCount']\r\n for j in r_obj['heroes'][0]['heroLaneDetail']:\r\n if j['laneId'] == 1:\r\n safe = j['matchCount'] / total_matches\r\n elif j['laneId'] == 2:\r\n mid = j['matchCount'] / total_matches\r\n elif j['laneId'] == 3:\r\n off = j['matchCount'] / total_matches\r\n else:\r\n roam = j['matchCount']\r\n for k in r_obj['heroes'][0]['heroRoleDetail']:\r\n if k['roleId'] == 0:\r\n core = k['matchCount'] / total_matches\r\n elif k['roleId'] == 1:\r\n support = k['matchCount'] / total_matches\r\n # safe lane core/hard support\r\n r1 = safe * core\r\n r5 = safe * support\r\n # offlane core/soft support\r\n r3 = off * core\r\n r4 = off * support\r\n # midlane core/roamer\r\n r2 = mid * core\r\n r4 += (mid * support)\r\n obj['data']['constants']['heroes'][i]['roles'] = [r1, r2, r3, r4, r5]\r\n print(f\"Roles for hero {hero} added successfully!\")\r\n time.sleep(1)\r\n with open('json/heroes.json', 'w') as heroes:\r\n json.dump(obj, heroes)",
"def stats(self):\n\n for hero in self.heroes:\n print(\"{} has {} kills and {} deaths.\".format(hero.name, hero.kills, hero.deaths))",
"def query(self, hero, time):\n return self.heroes[hero].query(time)",
"def get_hero(root, _info, episode):\n if episode == 5:\n return luke # Luke is the hero of Episode V\n return artoo # Artoo is the hero otherwise",
"def get_hero_prestige(self, uuid, hero):\n\n return self.template(uuid, \"pg_\" + hero)",
"def get_hero_xp(self, uuid, hero):\n\n return self.template(uuid, \"xp_\" + hero)",
"def stats(self, **kwargs):\n return self.client.api.stats(self.id, **kwargs)",
"def getStats(self):\n if self.type != \"CREATURE\" and self.type != None:\n return\n self.stats = _xmlUrlToDict(serverString + \"/rest/creature/\" + self.id, float)",
"async def update_heroes(self):\n heroes = self.steam_api.get_heroes()\n\n with open(\"Dota/heroes.json\", 'w') as f:\n json.dump(heroes, f, ensure_ascii=True, indent=4)\n\n self.heroes = heroes",
"def get_hero_level(self, uuid, hero):\n\n return self.template(uuid, \"lastLevel_\" + hero)",
"def view_all_heroes(self):\n for hero in self.heroes:\n print(hero.name)",
"def get_stats(self, **kwargs):\n resp = self.get(_u.build_uri(\"stats\"), kwargs)\n return utils.handle_response(resp)",
"def get_supervisor_stats(hass):\n return hass.data.get(DATA_SUPERVISOR_STATS)",
"def find_hero(self, name):\n for hero in self.heroes:\n if hero.name == name:\n return hero\n return 0",
"def getPlayerAdvStat(self, stat, year = 2014):\r\n \r\n year_next = (year % 100) + 1\r\n season = str(year) + '-' + str(year_next)\r\n \r\n stat_call = stat.lower()\r\n stat_dict = {'touch':'Possessions', 'possession':'Possessions',\r\n 'speed':'SpeedDistance', 'distance':'SpeedDistance'}\r\n \r\n stat_url = 'http://stats.nba.com/stats/leaguedashptstats?College=&'\\\r\n 'Conference=&Country=&DateFrom=&DateTo=&Division=&'\\\r\n 'DraftPick=&DraftYear=&GameScope=&Height=&LastNGames=0&'\\\r\n 'LeagueID=00&Location=&Month=0&OpponentTeamID=0&Outcome=&'\\\r\n 'PORound=0&PerMode=PerGame&PlayerExperience=&PlayerOr'\\\r\n 'Team=Player&PlayerPosition=&PtMeasureType=' + \\\r\n stat_dict[stat_call] + '&Season=' + season + \\\r\n '&SeasonSegment=&SeasonType=Regular+Season&StarterBench=&'\\\r\n 'TeamID=0&VsConference=&VsDivision=&Weight='\r\n \r\n response = requests.get(stat_url)\r\n data = json.loads(response.text)\r\n \r\n headers = data['resultSets'][0]['headers']\r\n stat_data = data['resultSets'][0]['rowSet']\r\n advStat_df = pd.DataFrame(stat_data,columns=headers) \r\n \r\n return advStat_df",
"def get_hero_name(self, i):\n for hero in self.heroes:\n if hero['id'] == i:\n return hero['localized_name']\n return 'Unknown Hero'",
"async def view_stats(self, ctx):\n app_info = await self.bot.application_info()\n total_ram = (psutil.virtual_memory().total >> 30) + 1\n embed = discord.Embed(\n title=\"Bot Stats\",\n description=f\"Running on a dedicated server with {total_ram}GB RAM \\n provided by RandomGhost#0666.\",\n )\n\n embed.add_field(name=\"**__General Info__**\", inline=False, value=\"\\u200b\")\n embed.add_field(name=\"Latency\", value=f\"{self.bot.latency*1000:.03f}ms\")\n embed.add_field(name=\"Guild Count\", value=f\"{len(self.bot.guilds):,}\")\n embed.add_field(name=\"User Count\", value=f\"{len(self.bot.users):,}\")\n\n embed.add_field(name=\"**__Technical Info__**\", inline=False, value=\"\\u200b\")\n embed.add_field(name=\"System CPU Usage\", value=f\"{psutil.cpu_percent():.02f}%\")\n embed.add_field(name=\"System RAM Usage\", value=f\"{psutil.virtual_memory().used/1048576:.02f} MB\")\n embed.add_field(name=\"System Uptime\", value=f\"{timedelta(seconds=int(time.time() - psutil.boot_time()))}\")\n embed.add_field(name=\"Bot CPU Usage\", value=f\"{process.cpu_percent():.02f}%\")\n embed.add_field(name=\"Bot RAM Usage\", value=f\"{process.memory_info().rss / 1048576:.02f} MB\")\n embed.add_field(name=\"Bot Uptime\", value=f\"{timedelta(seconds=int(time.time() - process.create_time()))}\")\n\n embed.add_field(name=\"**__Links__**\", inline=False, value=\"\\u200b\")\n embed.add_field(name=\"Support Server\", value=\"[https://discord.swaglyrics.dev](https://discord.swaglyrics.dev)\")\n embed.add_field(name=\"Invite\", value=\"[https://invite.swaglyrics.dev](https://invite.swaglyrics.dev)\")\n embed.add_field(\n name=\"Source\",\n value=\"[https://swaglyrics.dev/SwagLyrics-Discord-Bot]\" \"(https://swaglyrics.dev/SwagLyrics-discord-bot)\",\n )\n\n embed.set_footer(\n text=f\"Made by {app_info.owner} • {self.bot.get_user(512708394994368548)}\",\n icon_url=[\n app_info.owner.avatar_url_as(size=128),\n self.bot.get_user(512708394994368548).avatar_url_as(size=128),\n ][getrandbits(1)],\n ) # randomize clash or flabbet avatar\n\n await ctx.send(embed=embed)",
"def get_hp():\n\n return character['HP']",
"def show_myhero(self):\n description = (self.name + ' Level is: ' + str(self.level) + ' Age is: ' + str(\n self.age) + ' Rank is: ' + self.rank + ' health is: ' + str(self.health)).title()\n print(description)",
"def get_character_health(character: dict):\r\n print(\"Your health is: %d\" % character['HP'])",
"def advancedStats():",
"async def stats(self, ctx: Message):\n\t\tawait self.open_account(ctx.author.id, ctx.author.username)\n\t\tuserid = ctx.author.id\n\t\tusers = await self.get_stats_data()\n\n\t\trancmds = users[str(userid)][\"rancmd\"]\n\t\txp = users[str(userid)][\"xp\"]\n\t\tmsgs = users[str(userid)][\"sentmsgs\"]\n\t\twhisperto = [ctx.author.id]\n\t\tawait self.send(message=f\"{ctx.author.mention} Here are your stats! • Ran {rancmds} DogeBoss commands • XP: {xp} • Sent {msgs} messages\", whisper=whisperto)",
"async def stats(ctx):\n pythonVersion = platform.python_version()\n dpyVersion = discord.__version__\n serverCount = len(bot.guilds)\n memberCount = len(set(bot.get_all_members()))\n\n embed = discord.Embed(\n title=f\"{bot.user.name} Stats\",\n description=\"\\uFEFF\",\n colour=ctx.author.colour,\n timestamp=ctx.message.created_at,\n )\n\n embed.add_field(name=\"Bot Version:\", value=\"0.0.1\")\n embed.add_field(name=\"Python Version:\", value=pythonVersion)\n embed.add_field(name=\"Discord.Py Version\", value=dpyVersion)\n embed.add_field(name=\"Total Guilds:\", value=serverCount)\n embed.add_field(name=\"Total Users:\", value=memberCount)\n embed.add_field(name=\"Bot Developers:\", value=\"<@271612318947868673>\")\n\n embed.set_footer(text=f\"Carpe Noctem | {bot.user.name}\")\n embed.set_author(name=bot.user.name, icon_url=bot.user.avatar_url)\n\n await ctx.send(embed=embed)",
"async def stats(self, ctx) -> None:\n stats = self.bot.numbers.statistics\n await ctx.send(\n \"\",\n embed=NumEmbed(\n title=\"Number Statistics\",\n colour=0x007E80,\n fields={\n \"Numbers Given\": stats[\"numbers_given\"],\n \"Even Numbers\": stats[\"evens\"],\n \"Odd Numbers\": stats[\"odds\"],\n\n \"Highest Number\": stats[\"highest_info\"],\n \"Lowest Positive\": stats[\"lowest_positive_info\"],\n \"Lowest Number\": stats[\"lowest_info\"],\n\n \"Sum of Numbers\": stats[\"sum\"],\n \"Mean\": stats[\"mean\"],\n \"Median\": stats[\"median\"],\n\n \"Below 500\": stats[\"below_500\"],\n \"Below 1000\": stats[\"below_1000\"],\n \"Below 2500\": stats[\"below_2500\"],\n },\n user=ctx.author,\n ),\n )",
"def get_health(self):\n return self.bot_client.send_command(_Command.GetHealth)",
"def fixture_player_stats_singel(self, fixture_id, player_id):\n fixture = load_match_data(f'https://footballapi.pulselive.com/football/stats/player/{player_id}?fixtures={fixture_id}')\n i = 0\n stats = {}\n if 'entity' in fixture:\n stats['info'] = fixture['entity']\n stats['info'].update({'f_id': fixture_id, \n 'seasonId':self.season_id,\n 'seasonLabel': self.season_label})\n else:\n print(f'Could not get info on: f_id:{fixture_id}, p_id{player_id}')\n if 'stats' in fixture:\n stats['stats'] = fixture['stats']\n stats['stats'].append({'id':fixture['entity']['id']})\n else:\n i += 1\n if stats:\n return stats",
"def show_myhero(self):\n description = (self.name + ' Level is: ' + str(self.level) + ' Age is: ' + str(\n self.age) + ' Rank is: ' + self.rank + ' health is: ' + str(self.health) + ' magic is: ' + str(self.__magic)).title()\n print(description)"
] | [
"0.7338335",
"0.62998796",
"0.60463583",
"0.5997762",
"0.58266133",
"0.58230054",
"0.5822899",
"0.5805955",
"0.57567394",
"0.5685973",
"0.56220925",
"0.55938345",
"0.55732405",
"0.55701035",
"0.5545963",
"0.5535635",
"0.55352396",
"0.5437361",
"0.54010344",
"0.53940934",
"0.538781",
"0.53848875",
"0.53634685",
"0.5351468",
"0.534988",
"0.53463626",
"0.5341485",
"0.5290724",
"0.5276363",
"0.52752864"
] | 0.72986597 | 1 |
A view to return the delivery and returns page | def delivery(request):
return render(request, 'contact/delivery.html') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def view_delivery() -> str:\r\n #List with amount of bottles ready for delivery for each lsit\r\n delivery_amounts = []\r\n delivery_amounts.append(delivery_information[\"Organic Red Helles\"])\r\n delivery_amounts.append(delivery_information[\"Organic Pilsner\"])\r\n delivery_amounts.append(delivery_information[\"Organic Dunkel\"])\r\n return render_template(\"view_delivery.html\",\r\n delivery_displays=delivery_amounts)",
"def get(self, request, *args, **kwargs):\n\n # Access will be granted in Complete view if payment_id matches.\n payment_id = self.execute_payment()\n # Check if payment id belongs to a Catalog donation -> product_id is set\n donation = Donation.objects.confirm_by_reference(payment_id)\n\n flow_type = 'one_time'\n url = reverse('become_supporter_complete') + \\\n '?payment_id={}'.format(payment_id)\n if donation.product_id:\n flow_type ='product_support'\n url += '&flow_type={}&product_id={}'.format(flow_type, donation.product_id)\n if donation.sponsored_event_dedication:\n flow_type = 'event_sponsorship'\n url += '&flow_type={}&event_id={}'.format(flow_type, donation.sponsored_event_id)\n\n if flow_type == 'event_sponsorship':\n custom_send_receipt(receipt_type=flow_type,\n amount=donation.amount, user=donation.user,\n dedication=donation.sponsored_event_dedication,\n musician=donation.sponsored_event.leader_string(),\n event_date=donation.sponsored_event.get_date())\n else:\n custom_send_receipt(receipt_type='one_time',\n amount=donation.amount, user=donation.user)\n\n return redirect(url)",
"def delivery_page(cls, logger=None):\n if logger is None:\n logger = cls._logger\n\n database_connection = DatabaseConnection(f\"orders.csv\")\n view = database_connection.get_view()\n logger.log(view)\n\n while True:\n\n choice = input(\n \"Please choose: \"\n \"(1) refresh orders view, \"\n \"(2) next page, \"\n \"(3) previous page, \"\n \"(4) examine order, \"\n \"Enter empty to go back \"\n )\n if choice not in ('1', '2', '3', '4'):\n break\n\n if choice=='1':\n view = database_connection.get_view()\n logger.log(view)\n\n # next page\n elif choice=='2': \n database_connection.next_page()\n view = database_connection.get_view()\n logger.log(view)\n\n # previous page\n elif choice=='3':\n database_connection.prev_page()\n view = database_connection.get_view()\n logger.log(view)\n\n elif choice=='4':\n\n # get product_id\n while True:\n order_id = input(\"Enter the order id: \")\n try:\n order_id = int(order_id)\n except:\n logger.log(\"order id should be an integer\")\n break\n\n table = database_connection.table\n order = table.loc[(table['order_id']==order_id), \"order\"][0] # order_id should be unique\n logger.log(json.dumps(json.loads(order), indent=1)) # pretty logger.log the json\n\n\n else:\n break",
"def get_view():\n\n gateway = DS_CONFIG[\"gateway_account_id\"]\n gateway_ok = gateway and len(gateway) > 25\n\n return render_template(\n \"eg014_collect_payment.html\",\n title=\"Order form with payment\",\n source_file=path.basename(path.dirname(__file__)) + \"/controller.py\",\n source_url=DS_CONFIG[\"github_example_url\"] + path.basename(path.dirname(__file__)) + \"/controller.py\",\n documentation=DS_CONFIG[\"documentation\"] + eg,\n show_doc=DS_CONFIG[\"documentation\"],\n signer_name=DS_CONFIG[\"signer_name\"],\n signer_email=DS_CONFIG[\"signer_email\"],\n gateway_ok=gateway_ok\n )",
"def _deliver_test(self):\n\n gcp_cred_storage = StorageByKeyName(GcpCredentials, self.user.user_id(), 'credentials')\n gcp_creds = gcp_cred_storage.get()\n \n if not gcp_creds:\n return self.redirect(\"/printers/add\")\n \n account = Account.get_or_insert(self.user_bundle.user.user_id())\n printers = Printer.query(Printer.owner == account.key).fetch(1000)\n\n PrintJobEnqueuer.enqueue_to_printers(\n printers,\n self.request.get(\"deliver_title\"),\n self.request.get(\"deliver_url\")\n )\n\n self.template_values.update({\n \"deliver_title\": self.request.get(\"deliver_title\"),\n \"deliver_url\": self.request.get(\"deliver_url\"),\n \"printer_names\": [printer.display_name for printer in printers]\n })\n\n path = os.path.join(os.path.dirname(__file__), '../templates/admin_deliver.html')\n self.response.write(template.render(path, self.template_values))",
"def serve(self, request, *args, **kwargs):\n\n template = self.get_template(request)\n\n if request.method == 'POST':\n\n form = self.get_form(request.POST, page=self, user=request.user)\n\n if form.is_valid():\n self.process_form_submission(form)\n return HttpResponseRedirect(self.url + '?thank=you')\n\n else:\n\n thanks = request.GET.get('thank', False)\n if thanks:\n form = None\n template = self.get_landing_page_template(request)\n if self.thanks_page_title:\n self.title = self.thanks_page_title\n else:\n form = self.get_form(page=self, user=request.user)\n\n context = self.get_context(request)\n context['form'] = form\n if form:\n context['conditional_rules'] = json.dumps(form.conditional_rules)\n\n return render(\n request,\n template,\n context\n )",
"def get(self, request, *args, **kwargs):\n context = {\"token\": request.user.auth_token.key}\n return render(request, \"payment/landing_payment.html\", context=context)",
"def landing(request):\n videos = [\n {\n 'name': 'airbnb',\n 'path': 'FirefoxMarketplace-airbnb-BR-RC-SD1%20640'\n },\n {\n 'name': 'evernote',\n 'path': 'FirefoxMarketplace-Evernote_BR-RC-SD1%20640'\n },\n {\n 'name': 'uken',\n 'path': 'FirefoxMarketplace-uken-BR-RC-SD1%20640'\n },\n {\n 'name': 'soundcloud',\n 'path': 'FirefoxMarketplace-Soundcloud-BR-RC-SD1%20640'\n },\n {\n 'name': 'box',\n 'path': 'FirefoxMarketplace_box-BR-RC-SD1%20640'\n }\n ]\n\n form = DevNewsletterForm(request.LANG, request.POST or None)\n\n if request.method == 'POST' and form.is_valid():\n data = form.cleaned_data\n\n try:\n basket.subscribe(data['email'],\n 'app-dev',\n format=data['email_format'],\n source_url=settings.SITE_URL)\n messages.success(request, _('Thank you for subscribing!'))\n return redirect('ecosystem.landing')\n except basket.BasketException as e:\n log.error(\n 'Basket exception in ecosystem newsletter: %s' % e)\n messages.error(\n request, _('We apologize, but an error occurred in our '\n 'system. Please try again later.'))\n\n return render(request, 'ecosystem/landing.html',\n {'videos': videos, 'newsletter_form': form})",
"def get(self):\n self.render('view.html')",
"def order_confirmation(request):\n customer_name = 'Kai'\n order_number = '110-4158206-7371406'\n item_list = {'Filco Ninja Majestouch-2','Cherry','CM Storm QuickFire'}\n express_delivery = True\n # when using locals(), local variables' names must match those var in the template!\n #return render_to_response( 'order_confirmation.html', locals() )\n return render(request, 'order_confirmation.html', locals())",
"def post(self, request, *args, **kwargs):\n return self.render_to_response(self.get_context_data())",
"def index(request):\n\n return render(request, 'order/index.html')",
"def manage_delivery_times(request):\n try:\n delivery_time = DeliveryTime.objects.all()[0]\n url = reverse(\"lfs_manage_delivery_time\", kwargs={\"id\": delivery_time.id})\n except IndexError:\n url = reverse(\"lfs_manage_add_delivery_time\")\n\n return HttpResponseRedirect(url)",
"async def the_store_view(request): # pylint: disable=W0613\n from componentstore.view.component.the_store import view\n requester = request.headers.get('X-FORWARDED-FOR', None)\n print(\"Serving 'The Store' to\", requester)\n html = await view()\n return web.Response(body=html, content_type=\"text/html\", charset=\"utf-8\")",
"def my_dashboard_print(request):\n #Get the associated contact for our user\n user_con = request.user.contact\n qs_proj_assoc, qs_task_assoc = get_tiered_upcoming(user_con)\n\n #Get the projects associated with the user\n user_proj_table = table_assoc.ProjectAssocTable_Printable(qs_proj_assoc)\n #Get the tasks associated with the user\n user_task_table = table_assoc.TaskAssocTable_Printable(qs_task_assoc)\n\n # Render the HTML template index.html with the data in the context variable\n return render(\n request,\n 'my_dashboard_printable.html',\n context={\n 'user_con':user_con,\n 'user_proj_table':user_proj_table,\n 'user_task_table':user_task_table,\n },\n )",
"def get(self, request, *args, **kwargs):\n return render(request, self.template_name)",
"def get(self, request, *args, **kwargs):\n return render(request, self.template_name)",
"def get(self, request, *args, **kwargs):\n return render(request, self.template_name)",
"def get(self, request, *args, **kwargs):\n return render(request, self.template_name)",
"def get(self, request, *args, **kwargs):\n return render(request, self.template_name)",
"def pagemainred():\n return render_template('do_action.html')",
"def downloads(request):\n return render(request, \"downloads.html\")",
"def get(self, request):\n return render(request, self.template, self.context)",
"def post(self, request, *args, **kwargs):\n return render(request, self.template_name, self.get_context_data(**kwargs))",
"def landingpage (request):\n # Define views here\n context = {}\n return render(request, 'landingpage.html', context=context)",
"def message_page(request,page_name):\n return HttpResponse(\"This will be the {0} page.\".format(page_name))",
"def _send_return_page(self, shorthash):\n template_filename = self._get_config_template('return')\n if shorthash == '1337':\n messagetext = '<p>Hey, you are 1337!</p>'\n else:\n messagetext = ''\n text = read_template(\n template_filename,\n message=messagetext,\n title='%s - Short URL Result' % SERVER_NAME,\n header='new URL',\n path=shorthash,\n hostname=self.server.hostname)\n self._send_response(text, 200)",
"def get(self, request ):\n return render(request, \"main_display_cards.html\")",
"def partners(request):\n return render(request, 'ecosystem/partners.html', {'page': 'partners'})",
"def collect_payment():\n\n # 1. Get required arguments\n args = Eg014Controller.get_args()\n try:\n # 2. Call the worker method\n results = Eg014Controller.worker(args)\n except ApiException as err:\n return process_error(err)\n\n # 3. Render success response\n return render_template(\n \"example_done.html\",\n title=\"Envelope sent\",\n h1=\"Envelope sent\",\n message=f\"\"\"The envelope has been created and sent!<br/> Envelope ID {results[\"envelope_id\"]}.\"\"\"\n )"
] | [
"0.7374881",
"0.6889909",
"0.64783055",
"0.6372336",
"0.6370384",
"0.6247339",
"0.61653876",
"0.6148517",
"0.6122049",
"0.6092658",
"0.6068964",
"0.6024294",
"0.5941775",
"0.58365667",
"0.58189636",
"0.5810076",
"0.5810076",
"0.5810076",
"0.5810076",
"0.5810076",
"0.5793738",
"0.5770743",
"0.5751383",
"0.57344043",
"0.5717796",
"0.5713997",
"0.5711101",
"0.5689695",
"0.56670564",
"0.5666528"
] | 0.7642086 | 0 |
InvalidSegmentError should be thrown when the segment begin is greater than the segment end. | def test_validate_begin_greater_than_end():
with pytest.raises(InvalidSegmentError):
_validate([[1, 2], [5, 3]]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_validate_begin_equals_end():\n with pytest.raises(InvalidSegmentError):\n _validate([[1, 2], [5, 5]])",
"def parse_and_validate_num_segs(segment_str):\n # try to parse numSegments\n num_segments = 0\n try:\n num_segments = int(segment_str)\n divs = math.log(num_segments, 2)\n if num_segments < 2:\n raise ValidationError(NUMSEG_ERR_SMALL_VAL)\n elif int(divs) != divs:\n raise ValidationError(NUMSEG_ERR_BAD_POW)\n except ValidationError as err:\n print USAGE_STR.format(sys.argv[0])\n print NUMSEG_ERR.format(err.args[0])\n sys.exit()\n except ValueError:\n print USAGE_STR.format(sys.argv[0])\n print NUMSEG_ERR.format(NUMSEG_ERR_BAD_PARSE)\n sys.exit()\n except BaseException as err:\n print USAGE_STR.format(sys.argv[0])\n print NUMSEG_ERR.format(\"Unexpected error\")\n print \"Error was:\\n\\t\", err\n sys.exit()\n return num_segments",
"def _invalid_section_error(self, section_name):\n msg = \"'{}' is not a subsection for the '{}' section.\".format(section_name, self._SECTION_NAME)\n raise ValueError(msg)",
"def validate(self):\n valid = (\n self.speaker != \"inter_segment_gap\"\n and self.text\n and self.text != \"ignore_time_segment_in_scoring\"\n and self.label in [\"<o,f0,male>\", \"<o,f0,female>\", \"<o,f0,mixed>\"]\n )\n\n try:\n self.start = clean_float(self.start)\n self.stop = clean_float(self.stop)\n valid = valid and float(self.start) < float(self.stop)\n except Exception as exc:\n valid = False\n print(exc)\n\n if not valid:\n LOGGER.error(\n \"\"\"Skipping segment due to validation error.\nPlease note that this invalidates WER calculations based on the entire file.\nSegment: %s\"\"\",\n json.dumps(self.__dict__),\n )\n\n if \"-\" in self.filename:\n self.filename = self.filename.replace(\"-\", \"_\")\n print(\"Please rename audio file to replace hyphens with underscores\")\n\n return valid",
"def is_span_valid(self)->bool:\n if self.get_start_offset() < 0 or self.get_end_offset() < 0:\n logger.error(\"Start and end of position of the fragment must be non-negative: %d, %d\"\n %(self.get_start_offset(), self.get_end_offset()))\n return False\n if self.get_start_offset() >= self.get_end_offset():\n logger.error(\"End position of the fragment must be greater than the starting one: start=%d, end=%d\"%(self.get_start_offset(), self.get_end_offset()))\n return False\n return True",
"def test_creating_a_new_segment(self):\n pass",
"def _validateVertex(self, v):\n if v < 0 or v >= self._V:\n raise Exception(\"vertex {} is not between 0 and {}\".format(v, (self._V-1)))",
"def test_small_invalid_bounds(self):\r\n upper_bound = 2\r\n lower_bound = 10\r\n n_value = 20\r\n ci_percent = 95\r\n\r\n with self.assertRaises(CIBoundInversionException):\r\n ci_to_sd(upper_bound, lower_bound, ci_percent, n_value)",
"def segment(self):\n try:\n safe_execute_command(self._prepare_segmentation_cmd(self.atlas_img_path, self.registered_atlas_img_path),\n self.segmentation_log_file, self.segmentation_error_file)\n except SafeExecuteCommandError as err:\n SegmentationError('Segmentation failed; {}'.format(err))",
"def _validate_interval(interval: Interval) -> None:\n origin, end = interval\n\n if end < origin:\n raise ValueError(f\"Interval [{origin}, {end}] is not a proper one.\") # pragma: no cover",
"def remove_segment(self):\n selected_segment = \\\n self.controller.shared_data.obj_track.selected_segment_idx\n\n if len(selected_segment) == 1:\n segment_idx = selected_segment[0]\n\n msg = 'Do you want to remove the selected segment?'\n proceed = tk.messagebox.askyesno(title='Remove segment',\n message=msg)\n\n if proceed:\n size = self.controller.shared_data.obj_track.remove_segment(\n segment_idx)\n\n if size > 0:\n plots.update_plots(\n self.controller.shared_data.obj_track,\n self.controller.shared_data.ax_track,\n self.controller.shared_data.ax_ele,\n self.controller.shared_data.ax_track_info,\n canvas=self.controller.shared_data.canvas)\n\n else:\n plots.initial_plots(\n self.controller.shared_data.ax_track,\n self.controller.shared_data.ax_ele,\n self.controller.shared_data.ax_track_info)\n\n tk.messagebox.showwarning(\n title='No segment',\n message='Last segment has been removed.')\n\n self.controller.shared_data.canvas.draw()\n\n elif len(selected_segment) > 1:\n messagebox.showerror('Warning',\n 'More than one segment is selected')\n elif len(selected_segment) == 0:\n messagebox.showerror('Warning',\n 'No segment is selected')",
"def test_deleting_a_segment(self):\n pass",
"def test_invalid_frag_size(self):\n p = (\n Ether(dst=self.src_if.local_mac, src=self.src_if.remote_mac)\n / IPv6(src=self.src_if.remote_ip6, dst=self.src_if.local_ip6)\n / UDP(sport=1234, dport=5678)\n / Raw()\n )\n self.extend_packet(p, 1000, self.padding)\n fragments = fragment_rfc8200(p, 1, 500)\n bad_fragment = fragments[0]\n self.extend_packet(bad_fragment, len(bad_fragment) + 5)\n self.pg_enable_capture()\n self.src_if.add_stream([bad_fragment])\n self.pg_start()\n pkts = self.src_if.get_capture(expected_count=1)\n icmp = pkts[0]\n self.assertIn(ICMPv6ParamProblem, icmp)\n self.assert_equal(icmp[ICMPv6ParamProblem].code, 0, \"ICMP code\")",
"def check_subseq_range(subseq_range):\n subseq_range_content = subseq_range.split(\"-\")\n if len(subseq_range_content) != 2:\n err_str = \"A subseq_range must have two arguments (start and stop)\"\n err_str += \" separated by a -\"\n raise ValueError(err_str)\n if int(subseq_range_content[0]) > int(subseq_range_content[1]):\n err_str = \"Start for a subseq_range must be lower than the stop\"\n raise ValueError(err_str)",
"def validateSegmentationArguments(input_file, p0, prior):\n\n # Check that input_file is sorted\n assert utilities.isSortedBEDObject(input_file), \"input file must be sorted\"\n # If prior has been provided, check that it is positive\n if prior:\n assert prior >= 0, \"--prior should be non-negative\"\n # If p0 has been provided, check that it is between 0 and 1\n if p0:\n assert 0 <= p0 <= 1, \"--p0 should be between 0 and 1, inclusive\"",
"def __validatePoint(self, point):\n # print point\n if point[0] > self.scn_width:\n raise ValueError('X coordinate: %d out of range.' % point[0])\n if point[1] > self.scn_height:\n raise ValueError('Y coordinate: %d out of range.' % point[1])\n return point",
"def test_getting_segments(self):\n pass",
"def test_uoctet_out_of_lower_range(self):\n self.failUnlessRaises(Exception, self.codec.encode_octet, -1)",
"def parse_error(self, message, exc_cls=VisualizerParseError):\n raise exc_cls(\"Error parsing %s '%s' (%s:%i): %s\" % \n (self.tag, self.ref, self.filename, self.lineno, message))",
"def segment_number(self):\n if hasattr(self, '_m_segment_number'):\n return self._m_segment_number if hasattr(self, '_m_segment_number') else None\n\n self._m_segment_number = self.segment_number_raw.value\n return self._m_segment_number if hasattr(self, '_m_segment_number') else None",
"def validate(self):\n if self._inc_begin is None:\n raise ValueError((\"TimeRange {self} missing begin point\")\n .format(self=self))\n if self._exc_end is None:\n raise ValueError((\"TimeRange {self} missing end point\")\n .format(self=self))",
"def delete_segment(self, name: str) -> None:\n self._status.check_authority_for_draft()\n\n delete_data: Dict[str, Any] = {\"segmentName\": name}\n delete_data.update(self._status.get_status_info())\n\n self._client.open_api_do(\"DELETE\", \"segments\", self.dataset_id, json=delete_data)",
"def _inrange(self, index):\n if len(index) != self.ndim:\n raise Exception('SparseN tensor has %d dimensions, and requires the same number of indices.'%self.ndim)\n for ii, ss in zip(index,self.shape):\n if ii < 0 or ii >= ss:\n raise Exception('Index is out of range: %d'%index)",
"def test_ushort_int_out_of_lower_range(self):\n self.failUnlessRaises(Exception, self.codec.encode_short, -1)",
"def test_octet_out_of_upper_range(self):\n self.failUnlessRaises(Exception, self.codec.encode_octet, 256)",
"def testSPFInvalidIPv4Range(self):\n spf_record = \"v=spf1 ip4:78.46.96.236/99 ~all\"\n domain = \"surftown.dk\"\n self.assertRaises(checkdmarc.SPFSyntaxError,\n checkdmarc.parse_spf_record, spf_record, domain)",
"def add_segment(self):\n last_seg = c.coords(self.segments[0].instance)\n x = last_seg[2] - SEG_SIZE\n y = last_seg[3] - SEG_SIZE\n self.segments.insert(0, Segment(x, y))",
"def _validate_index(self, index):\r\n\t\tvalid_index = index is int(index) and index >= 0 and index < self._size\r\n\t\tif not valid_index:\r\n\t\t\traise IndexError()",
"def error(self, message, token=None):\n raise ParseException(\n message,\n self.filename,\n line=self._line,\n line_number=self._line_number,\n token=token)",
"def get_segment_after(self, target_segment):\n index = self.segments.index(target_segment)\n last_index = len(self.segments) - 1\n if index == last_index:\n if self.loop:\n return self.segments[0] # reset to first segment\n else:\n # TODO this might be better off with an Exception\n return target_segment #return same thing\n return self.segments[index+1]"
] | [
"0.63354397",
"0.56412023",
"0.563701",
"0.53736293",
"0.5295461",
"0.52742565",
"0.5265137",
"0.52392036",
"0.5191654",
"0.5060422",
"0.5045406",
"0.5031676",
"0.49795532",
"0.49658692",
"0.49567848",
"0.49503455",
"0.4898707",
"0.4897851",
"0.489405",
"0.48883966",
"0.48655882",
"0.48570278",
"0.48351857",
"0.4829547",
"0.48238075",
"0.4819749",
"0.48054516",
"0.48046273",
"0.48031014",
"0.47983864"
] | 0.68655276 | 0 |
InvalidSegmentError should be thrown when the segment begin equals teh segment end. | def test_validate_begin_equals_end():
with pytest.raises(InvalidSegmentError):
_validate([[1, 2], [5, 5]]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_validate_begin_greater_than_end():\n with pytest.raises(InvalidSegmentError):\n _validate([[1, 2], [5, 3]])",
"def _invalid_section_error(self, section_name):\n msg = \"'{}' is not a subsection for the '{}' section.\".format(section_name, self._SECTION_NAME)\n raise ValueError(msg)",
"def validate(self):\n valid = (\n self.speaker != \"inter_segment_gap\"\n and self.text\n and self.text != \"ignore_time_segment_in_scoring\"\n and self.label in [\"<o,f0,male>\", \"<o,f0,female>\", \"<o,f0,mixed>\"]\n )\n\n try:\n self.start = clean_float(self.start)\n self.stop = clean_float(self.stop)\n valid = valid and float(self.start) < float(self.stop)\n except Exception as exc:\n valid = False\n print(exc)\n\n if not valid:\n LOGGER.error(\n \"\"\"Skipping segment due to validation error.\nPlease note that this invalidates WER calculations based on the entire file.\nSegment: %s\"\"\",\n json.dumps(self.__dict__),\n )\n\n if \"-\" in self.filename:\n self.filename = self.filename.replace(\"-\", \"_\")\n print(\"Please rename audio file to replace hyphens with underscores\")\n\n return valid",
"def test_creating_a_new_segment(self):\n pass",
"def is_span_valid(self)->bool:\n if self.get_start_offset() < 0 or self.get_end_offset() < 0:\n logger.error(\"Start and end of position of the fragment must be non-negative: %d, %d\"\n %(self.get_start_offset(), self.get_end_offset()))\n return False\n if self.get_start_offset() >= self.get_end_offset():\n logger.error(\"End position of the fragment must be greater than the starting one: start=%d, end=%d\"%(self.get_start_offset(), self.get_end_offset()))\n return False\n return True",
"def test_deleting_a_segment(self):\n pass",
"def parse_and_validate_num_segs(segment_str):\n # try to parse numSegments\n num_segments = 0\n try:\n num_segments = int(segment_str)\n divs = math.log(num_segments, 2)\n if num_segments < 2:\n raise ValidationError(NUMSEG_ERR_SMALL_VAL)\n elif int(divs) != divs:\n raise ValidationError(NUMSEG_ERR_BAD_POW)\n except ValidationError as err:\n print USAGE_STR.format(sys.argv[0])\n print NUMSEG_ERR.format(err.args[0])\n sys.exit()\n except ValueError:\n print USAGE_STR.format(sys.argv[0])\n print NUMSEG_ERR.format(NUMSEG_ERR_BAD_PARSE)\n sys.exit()\n except BaseException as err:\n print USAGE_STR.format(sys.argv[0])\n print NUMSEG_ERR.format(\"Unexpected error\")\n print \"Error was:\\n\\t\", err\n sys.exit()\n return num_segments",
"def test_updating_a_segment(self):\n pass",
"def segment_segment(s1, s2):\n l1=s1.line()\n l2=s2.line()\n i = line_line(l1, l2)\n if isinstance(i, bool): return False\n k = s1.affine(i)\n return k >= 0 and k <= 1 and i",
"def test_small_invalid_bounds(self):\r\n upper_bound = 2\r\n lower_bound = 10\r\n n_value = 20\r\n ci_percent = 95\r\n\r\n with self.assertRaises(CIBoundInversionException):\r\n ci_to_sd(upper_bound, lower_bound, ci_percent, n_value)",
"def segment(self):\n try:\n safe_execute_command(self._prepare_segmentation_cmd(self.atlas_img_path, self.registered_atlas_img_path),\n self.segmentation_log_file, self.segmentation_error_file)\n except SafeExecuteCommandError as err:\n SegmentationError('Segmentation failed; {}'.format(err))",
"def _validate_interval(interval: Interval) -> None:\n origin, end = interval\n\n if end < origin:\n raise ValueError(f\"Interval [{origin}, {end}] is not a proper one.\") # pragma: no cover",
"def test_getting_segments(self):\n pass",
"def _filter_out_bad_segments(img1, seg1, img2, seg2):\n minval = tf.reduce_min(tf.reduce_sum(seg1, [0,1])*tf.reduce_sum(seg2, [0,1]))\n if minval < 0.5:\n warnings.warn(\"filtering bad segment\")\n return False\n else:\n return True",
"def remove_segment(self):\n selected_segment = \\\n self.controller.shared_data.obj_track.selected_segment_idx\n\n if len(selected_segment) == 1:\n segment_idx = selected_segment[0]\n\n msg = 'Do you want to remove the selected segment?'\n proceed = tk.messagebox.askyesno(title='Remove segment',\n message=msg)\n\n if proceed:\n size = self.controller.shared_data.obj_track.remove_segment(\n segment_idx)\n\n if size > 0:\n plots.update_plots(\n self.controller.shared_data.obj_track,\n self.controller.shared_data.ax_track,\n self.controller.shared_data.ax_ele,\n self.controller.shared_data.ax_track_info,\n canvas=self.controller.shared_data.canvas)\n\n else:\n plots.initial_plots(\n self.controller.shared_data.ax_track,\n self.controller.shared_data.ax_ele,\n self.controller.shared_data.ax_track_info)\n\n tk.messagebox.showwarning(\n title='No segment',\n message='Last segment has been removed.')\n\n self.controller.shared_data.canvas.draw()\n\n elif len(selected_segment) > 1:\n messagebox.showerror('Warning',\n 'More than one segment is selected')\n elif len(selected_segment) == 0:\n messagebox.showerror('Warning',\n 'No segment is selected')",
"def _validateVertex(self, v):\n if v < 0 or v >= self._V:\n raise Exception(\"vertex {} is not between 0 and {}\".format(v, (self._V-1)))",
"def validate(self):\n if self._inc_begin is None:\n raise ValueError((\"TimeRange {self} missing begin point\")\n .format(self=self))\n if self._exc_end is None:\n raise ValueError((\"TimeRange {self} missing end point\")\n .format(self=self))",
"def _assert_validation_error( # type: ignore[override]\n self, expected_error_substring: str\n ) -> None:\n with self.assertRaisesRegex(\n utils.ValidationError, expected_error_substring):\n self.collection.validate()",
"def intersects_segment(\n self, a: Tuple[float, float], b: Tuple[float, float]\n ) -> bool:\n assert len(a) == 2\n assert len(b) == 2\n return bool(lib.cpBBIntersectsSegment(self, a, b))",
"def test_invalid_time_range(event_member):\n _, member, event_id = event_member\n current = date.today() + timedelta(days=6)\n start = datetime.combine(current, time(12, 30))\n end = start - timedelta(days=1)\n expect_error(edit, InputError, member.username, event_id,\n True, start, end)",
"def test_fail_signature_fragment_address_wrong(self):\n self.bundle[5].address =\\\n Address(\n b'QHEDFWZULBZFEOMNLRNIDQKDNNIELAOXOVMYEI9P'\n b'GNFDPEEZCWVYLKZGSLCQNOFUSENIXRHWWTZFBXMPS'\n )\n\n validator = BundleValidator(self.bundle)\n\n self.assertFalse(validator.is_valid())\n\n self.assertListEqual(\n validator.errors,\n\n [\n # The bundle validator uses the address to link inputs\n # together, so if it encounters a different address, then it\n # assumes it has found a new input.\n 'Transaction 4 has invalid signature (using 1 fragments).',\n ],\n )",
"def test_getting_segment_details(self):\n pass",
"def segment(x,u1,u2):\n if not (isgoodnum(u1) and isgoodnum(u2)) or close(u1,u2) or u1<0 or u2 < 0 or u1 > 1 or u2 > 1:\n raise ValueError('bad parameter arguments passed to segment: '+str(u1)+', '+str(u2))\n if ispoint(x):\n return deepcopy(x)\n elif isline(x):\n return segmentline(x,u1,u2)\n elif isarc(x):\n return segmentarc(x,u1,u2)\n elif ispoly(x):\n return segmentpoly(x,u1,u2)\n elif isgeomlist(x):\n return segmentgeomlist(x,u1,u2)\n else:\n raise ValueError(\"inappropriate figure type for segment(): \"+str(x))",
"def testSPFInvalidIPv4Range(self):\n spf_record = \"v=spf1 ip4:78.46.96.236/99 ~all\"\n domain = \"surftown.dk\"\n self.assertRaises(checkdmarc.SPFSyntaxError,\n checkdmarc.parse_spf_record, spf_record, domain)",
"def __eq__(self, other: Segment) -> bool:\n return any(\n (\n self.start == other.start and self.end == other.end,\n self.start == other.end and self.end == other.start,\n )\n )",
"def check_subseq_range(subseq_range):\n subseq_range_content = subseq_range.split(\"-\")\n if len(subseq_range_content) != 2:\n err_str = \"A subseq_range must have two arguments (start and stop)\"\n err_str += \" separated by a -\"\n raise ValueError(err_str)\n if int(subseq_range_content[0]) > int(subseq_range_content[1]):\n err_str = \"Start for a subseq_range must be lower than the stop\"\n raise ValueError(err_str)",
"def __validatePoint(self, point):\n # print point\n if point[0] > self.scn_width:\n raise ValueError('X coordinate: %d out of range.' % point[0])\n if point[1] > self.scn_height:\n raise ValueError('Y coordinate: %d out of range.' % point[1])\n return point",
"def isIndexError(self, line_number):\n return line_number < 0 or line_number + 1 > len(self.numbers)",
"def get_segment_after(self, target_segment):\n index = self.segments.index(target_segment)\n last_index = len(self.segments) - 1\n if index == last_index:\n if self.loop:\n return self.segments[0] # reset to first segment\n else:\n # TODO this might be better off with an Exception\n return target_segment #return same thing\n return self.segments[index+1]",
"def invalid(self):\n pass"
] | [
"0.6800116",
"0.56447387",
"0.5554494",
"0.5524308",
"0.55040294",
"0.54933834",
"0.5323259",
"0.5266226",
"0.52067447",
"0.51706624",
"0.51565564",
"0.51420945",
"0.5136046",
"0.50560105",
"0.50518936",
"0.5045964",
"0.50384283",
"0.50205344",
"0.49898636",
"0.49722755",
"0.49614322",
"0.48735934",
"0.48699707",
"0.4861231",
"0.48597786",
"0.48386022",
"0.48329526",
"0.48324683",
"0.48130152",
"0.481127"
] | 0.7078179 | 0 |
Returns a Boolean value indicating whether this skill can be used to handle the given command. | def matches_command(self, skill_input: SkillInput) -> bool:
verb = (skill_input.verb or None) and skill_input.verb.lower()
return verb in self._cmd_list | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def responds_to(self, command) -> bool:\n return command == self.command and self.active is True and self.command is not None",
"def is_enabled(command):\n if command not in Controller.commands:\n return False\n return Controller.commands[command][2]",
"def command_registered(self, command: str) -> bool:\n return command in self._commands",
"def has_command(self, command):\n for pbt in self._plugins.values():\n if pbt.command == command:\n return True\n return False",
"def validate_command(command):\n return command in list(VALID_COMMANDS.keys())",
"def __commandExists(self, command, cmdtype):\n try:\n # method exists\n if hasattr(self, self.__getFullCommandName(command, cmdtype)):\n # command handler type exists\n if self.__commandHandlerTypeExists(cmdtype):\n return True\n else:\n return False\n else:\n return False\n # any key does not exist\n except KeyError:\n return False",
"def checkIfEnabled(self):\n\n # Reload the command file to check for new commands\n importlib.reload(BotSettings)\n matches = BotSettings.config['commands']\n\n # Check for the match and if it is there return the value that goes with the command\n for key in matches:\n key.strip(\"!\")\n if key == self.command:\n return matches.get(key)\n\n # If reached the command does not exist\n return False",
"def __is_active(self, command):\n return True",
"async def can_run(self, ctx: Context) -> bool:\n\n if not self.enabled:\n raise DisabledCommand(f'{self.name} command is disabled')\n\n original = ctx.command\n ctx.command = self\n\n try:\n if not await ctx.bot.can_run(ctx):\n raise CheckFailure(f'The global check functions for command {self.qualified_name} failed.')\n\n cog = self.cog\n if cog is not None:\n local_check = Cog._get_overridden_method(cog.cog_check)\n if local_check is not None:\n ret = await guilded.utils.maybe_coroutine(local_check, ctx)\n if not ret:\n return False\n\n predicates = self.checks\n if not predicates:\n # since we have no checks, then we just return True.\n return True\n\n return await guilded.utils.async_all(predicate(ctx) for predicate in predicates) # type: ignore\n finally:\n ctx.command = original",
"def can_handle(self, rsm_ctx):\n return False",
"def has_command_with_name(self, command_name):\n return command_name in self.commands",
"def do_known_command(self, cmd):\n if cmd in self.commands:\n return \"true\", True\n else:\n return \"false\", True",
"def has_commands(self) -> bool:\n return len(self.commands) > 0",
"def _iscommand(self, key):\r\n\t\tyes = False\r\n\t\tfor i in COMMAND_NAME.keys():\r\n\t\t\tif key == i: \r\n\t\t\t\tyes = True; break\r\n\t\treturn yes",
"def _has_permission(self, user, user_is_mod, command, db_session):\n\n if command[1] == 'for_all':\n return True\n if command[1] == 'for_mods' and user_is_mod:\n return True\n if type(command[1]) == db.Command:\n db_command = command[1]\n if bool(db_command.permissions) is False:\n return True\n elif user in [permission.user_entity for permission in db_command.permissions]:\n return True\n return False",
"def is_valid_command(command):\n # TODO(etscrivner): Eventually we'd like to construct this dynamically from\n # a list of all available commands\n valid_commands = [\n 'add', 'append', 'decr', 'delete', 'flush_all', 'get', 'gets', 'incr',\n 'prepend', 'quit', 'replace', 'set', 'stats', 'verbosity', 'version',\n ]\n\n if not command:\n return False\n\n parts = command.split('\\r\\n')\n command_parts = parts[0].split(' ')\n\n command = command_parts[0]\n return command.strip().lower() in valid_commands",
"def is_valid_command(command):\n return is_get(command) or is_insert(command) or is_update(command) or is_delete(command) or is_showall(command) or is_search(command)",
"def is_cmd(self, name):\n \n return name in self.cmds",
"def _validated_conf_command(self, command):\n\n if (1, command) in self.CONF_ALLOWED_COMMANDS or (0, command) in self.CONF_ALLOWED_COMMANDS:\n self._last_command = command\n self._last_command_mode = 1\n self._last_command_failure = None\n return True\n\n self._last_command_failure = 'Unrecognised Command'",
"def is_gcode_supported(self, command: Any) -> bool:\n if isinstance(command, Block):\n return_val = True\n for gcode in sorted(command.gcodes):\n return_val = return_val and self.is_gcode_supported(gcode)\n return return_val\n if isinstance(command, GCode):\n modal = str(command.word_key or command.word_letter).encode(\"utf-8\")\n return self.is_gcode_supported(modal)\n if isinstance(command, bytes):\n return command in self.SUPPORTED_GCODE\n\n raise AttributeError(\"Cannot tell if %s is valid gcode.\" % command)",
"def should_execute(self, message):\n\t\tif self.command_str is not None:\n\t\t\treturn message.content.startswith(\"{}{}\".format(cmd_prefix, self.command_str))\n\t\telse:\n\t\t\treturn False",
"def is_no_command_supported(command):\n command_type = command.get('command-type')\n if command_type:\n if command_type in ['display-table','display-rest', 'show']:\n return False\n no_supported = command.get('no-supported', True)\n if no_supported == False:\n return False\n return True",
"def _is_command(self, ext):\n try:\n return issubclass(ext, CommandExtension)\n except TypeError:\n return False",
"def check_channel_exec_request(self, channel, command):\n return False",
"def is_valid_command(args):\n if args.command is not None:\n return True\n return False",
"def is_command_ancillary(args):\n # pylint: disable=bad-continuation\n if (\n # skip the parent check and only\n # determine if the parameter is present\n is_valid_executes(args, skip=True)\n ):\n return True\n return False",
"def _known_command(self, command, do_command):\n result = self.known_commands.get(command)\n if result is not None:\n return result\n translated_command = self.gtp_aliases.get(command, command)\n try:\n response = do_command(\"known_command\", translated_command)\n except BadGtpResponse:\n known = False\n else:\n known = (response == 'true')\n self.known_commands[command] = known\n return known",
"def expects_result(self, command):\n return isinstance(command, (self.package(\"Syntax\").Operator,\n self.package(\"Syntax\").Formule))",
"def check_command(self):\n return self.process is not None and self.process.poll() is None",
"def can_execute(self, msg, command, now):\n if command not in self.user_limit:\n return True, 0\n expiry = self.user_limit[command].get(msg.author.id, 0)\n return now > expiry, expiry-now"
] | [
"0.74278367",
"0.68981487",
"0.66977847",
"0.66707885",
"0.6667942",
"0.66316724",
"0.6607451",
"0.653622",
"0.6535393",
"0.6522671",
"0.6496499",
"0.6466457",
"0.6396782",
"0.63804924",
"0.6377974",
"0.63763314",
"0.63124734",
"0.63028836",
"0.62900245",
"0.62845373",
"0.6267718",
"0.6257931",
"0.6231037",
"0.6229254",
"0.6225087",
"0.62161744",
"0.60956687",
"0.606716",
"0.6059511",
"0.60352594"
] | 0.73454964 | 1 |
Test to verify view profile button Uses TestStatus class to mark/assert test case results | def test_TC_Users_200819_3(self):
self.log.info("*#" * 20)
self.log.info("test_TC_Users_200819_3 started")
self.log.info("*#" * 20)
self.us.gotoUsers()
self.us.clickViewProfile()
result = self.us.verifyViewProfile()
self.ts.markFinal("test_TC_Users_200819_3", result, "View profile button Verification") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_view_status(self):\n self.add_testuser()\n response = self.client.get(\"/profile/testuser/edit\")\n self.assertTrue(response.status_code == 301)",
"def test_view_profile(self):\n LOGGER.debug(\"Test GET /rango/view/leothelion/ for anon user\")\n anon_view_response = self.client.get('/rango/view/leothelion/')\n self.assertContains(anon_view_response, \"[email protected]\")\n \n LOGGER.debug(\"Test GET /rango/view/leothelion/ for logged in user\")\n self.client.login(username='leothelion', password='rawr')\n logged_in_view_response = self.client.get('/rango/view/leothelion/')\n self.assertContains(logged_in_view_response, \"[email protected]\")\n \n \"\"\"Test to see if profile for hungryhippo can be viewed anon and logged in\"\"\"\n LOGGER.debug(\"Test GET /rango/view/hungyhippo/ for anon user\")\n anon_view_response = self.client.get('/rango/view/hungryhippo/')\n self.assertNotContains(anon_view_response, \"[email protected]\")\n self.assertContains(anon_view_response, \"Hungry\")\n \n LOGGER.debug(\"Test GET /rango/view/hungryhippo/ for logged in user\")\n self.client.login(username='hungryhippo', password='food')\n logged_in_view_response = self.client.get('/rango/view/hungryhippo/')\n self.assertContains(logged_in_view_response, \"[email protected]\")\n self.assertContains(anon_view_response, \"Hippo\")",
"def test_TC_Users_UserProfile_200819_4(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_TC_Users_UserProfile_200819_4 started\")\n self.log.info(\"*#\" * 20)\n self.us.gotoUsers()\n self.us.clickViewProfile()\n self.us.clickTeam()\n self.us.clickDetails()\n result = self.us.verifyViewProfile()\n self.ts.markFinal(\"test_TC_Users_UserProfile_200819_4\", result, \"Teams working table open/close Verification\")",
"def test_view_a_user_profile(self):\n self.authorize_user(self.user_login_details)\n url = self.profiles_url + \\\n '{}'.format(self.user['user']['username']) + \"/\"\n response = self.client.get(url)\n self.assertEqual(response.status_code, status.HTTP_200_OK)",
"def test_profile(self):\n\n # login in\n url_extend = 'user_auth/login/'\n username = 'user4'\n password = 'user'\n login_button = login(self.browser, self.url + url_extend, username, password)\n try:\n login_button.click()\n except:\n raise Exception(\"Login Error!\")\n # locate the profile memu.\n try:\n profile_menu = self.browser.find_element_by_id('profile')\n profile_menu.click()\n except:\n raise Exception(\"Cannot find profile menu!\")\n\n # check if we are at the profile page after we clicked the profile menu.\n self.assertEqual(self.browser.current_url, self.url + 'user_auth/profile/')",
"def test_verification_status_visible(self):\r\n self.client.login(username=\"jack\", password=\"test\")\r\n self.check_verification_status_on('verified', 'You\\'re enrolled as a verified student')\r\n self.check_verification_status_on('honor', 'You\\'re enrolled as an honor code student')\r\n self.check_verification_status_on('audit', 'You\\'re auditing this course')",
"def test_TC_Users_UserProfile_200819_2(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_TC_Users_UserProfile_200819_2 started\")\n self.log.info(\"*#\" * 20)\n self.us.gotoUsers()\n self.us.clickViewProfile()\n self.us.clickTeam()\n result = self.us.verifyTeamOpenClose()\n self.ts.mark(result=result, resultMessage=\"Teams Table Open Verification\")\n self.us.clickTeam()\n result = self.us.verifyTeamOpenClose()\n result = not result\n self.ts.markFinal(\"test_TC_Users_UserProfile_200819_2\", result, \"Teams working table open/close Verification\")",
"def test_loggedin_get_userprofileview(admin_client):\n resp = admin_client.get(\"/api/record/profile/\", follow=True)\n assert resp.status_code == 200\n userdata = resp.data\n assert \"user\" in userdata.keys()\n assert \"profile\" in userdata.keys()",
"def verifyViewProfile(self):\n self.waitForElement(locator=self._viewProfileImg, locatorType=\"xpath\")\n result = self.isElementPresent(locator=self._viewProfileImg, locatorType=\"xpath\")\n self.log.info(\"Verify View Profile result: \" + str(result))\n return result",
"def clickViewProfile(self):\n self.waitForElement(locator=self._viewProfileBtn, locatorType=\"xpath\")\n element = self.getElementList(locator=self._viewProfileBtn, locatorType=\"xpath\")\n self.elementClick(element=element[0])",
"def test_view_all_users_profiles(self):\n self.authorize_user(self.user_login_details)\n response = self.client.get(self.profiles_url)\n self.assertEqual(response.status_code, status.HTTP_200_OK)",
"def test_user_profile_view_success(self):\n params = {'pk': self.user.id}\n profile_response = self.client.get(reverse('api:users-detail', kwargs=params))\n self.assertTrue(profile_response.status_code == 200)\n user_data = profile_response.data\n self.assertTrue(user_data.get('username') == self.user.username)\n self.assertTrue(user_data.get('game_nickname') == self.user.game_nickname)\n self.assertTrue(user_data.get('email') == self.user.email)\n self.assertTrue(user_data.get('description') == self.user.description)\n self.assertTrue(user_data.get('gender') == self.user.gender)\n self.assertTrue(user_data.get('coins') == self.user.coins)",
"def test_user_profile(self):\n\n with self.client:\n result = self.client.get('/users/whisky-test')\n self.assertEqual(result.status_code, 200)\n self.assertIn(b'<h1 class=\"display-4\">\\n whisky-test\\n </h1>', result.data)",
"def test_user_profile_page(self):\n result = self.client.get(\"/profile\", follow_redirects=True)\n self.assertIn(b\"User ID\", result.data)",
"def test_user_profiles(self):\n\n result = self.client.get(\"/profile/1\")\n self.assertIn(b'In house:',result.data)",
"def test_31_user_profile_progress(self, mock):\r\n self.register()\r\n self.new_application()\r\n app = db.session.query(App).first()\r\n task = Task(app_id=app.id, info={'n_answers': '10'})\r\n db.session.add(task)\r\n db.session.commit()\r\n for i in range(10):\r\n task_run = TaskRun(app_id=app.id, task_id=1, user_id=1,\r\n info={'answer': 1})\r\n db.session.add(task_run)\r\n db.session.commit()\r\n self.app.get('api/app/%s/newtask' % app.id)\r\n\r\n res = self.app.get('account/johndoe', follow_redirects=True)\r\n assert \"Sample App\" in res.data, res.data\r\n assert \"You have contributed to <strong>10</strong> tasks\" in res.data, res.data\r\n assert \"Contribute!\" in res.data, \"There should be a Contribute button\"",
"def test_profile_page_returns_correct_html(self):\n self.add_testuser()\n response = self.client.get('/profile/testuser/')\n self.assertContains(response, 'Album Count')",
"def test_user_profile(self):\n with self.client as c:\n with c.session_transaction() as sess:\n sess[CURR_USER_KEY] = self.testuser_id\n\n address = \"1215 Brookview Ave, Kettering, Ohio 45409\"\n\n resp = c.get(f\"/users/8989/\")\n html = resp.get_data(as_text=True)\n\n self.assertEqual(resp.status_code, 200)\n self.assertIn(\n '<h1 class=\"Display-4 text-center mt-3\"><b>Profile Information:</b></h1>',\n html,\n )\n self.assertIn(\"<p>testuser</p>\", html)\n self.assertIn(\"<p>[email protected]</p>\", html)\n self.assertIn(\"<p>662-996-3356</p>\", html)\n self.assertIn(\n '<a class=\"font-weight-bold btn winter-neva-gradient color-block btn-block my-4 waves-effect z-depth-0\" href=\"/users/8989/edit\">Edit Profile</a>',\n html,\n )",
"def test_user_profile_view_constraint(self):\n another_user = AnotherUserFactory()\n params = {'pk': another_user.id}\n profile_response = self.client.get(reverse('api:users-detail', kwargs=params))\n self.assertTrue(profile_response.status_code == 200)\n user_data = profile_response.data\n self.assertFalse(bool(user_data.get('coins')))\n self.assertFalse(user_data.get('email') == self.user.email)\n self.assertFalse(user_data.get('username') == self.user.username)\n self.assertFalse(user_data.get('description') == self.user.description)\n self.assertFalse(user_data.get('gender') == self.user.gender)\n self.assertFalse(user_data.get('birth_date') == self.user.birth_date)",
"def test_set_user_status(self):\n pass",
"def test_Profile(self):\n self.assertEquals(self.user_1.username, 'testuser')\n # self.assertEquals(self.user_1.password, '12345')\n self.assertEquals(self.user_1.email,\n '[email protected]')",
"def test_TC_Users_200819_1(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_TC_Users_200819_1 started\")\n self.log.info(\"*#\" * 20)\n self.us.gotoUsers()\n self.us.clickDashboard()\n result = self.us.verifyDashboard()\n self.ts.markFinal(\"test_TC_Users_200819_1\", result, \"Dashboard Link Verification\")",
"def test_home_view_is_status_ok(self):\n from imager_profile.views import HomeView\n req = self.request.get(\"/\")\n view = HomeView.as_view()\n response = view(req)\n self.assertTrue(response.status_code == 200)",
"def test_fundamental_view_properties(self):\n response = self.client.get(\n reverse('users:profile', kwargs={'pk' : self.u.pk})\n )\n title = BeautifulSoup(response.content, features='html.parser').find('title').getText().strip().replace('\\n', '')\n\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.resolver_match.func.view_class, views.ProfileView)\n self.assertEqual(title, 'Profile №%d \\\\ Chattings' % self.u.id)",
"def test_profile_view_with_url(self):\n url = reverse('profiles:profile')\n response = self.client.get(url)\n self.assertEqual(response.status_code, 200)\n self.assertTemplateUsed(response, 'profiles/profile.html')",
"def test_user_edit_profile(self):\n with self.client as c:\n with c.session_transaction() as sess:\n sess[CURR_USER_KEY] = self.testuser_id\n\n address = \"1215 Brookview Ave, Kettering, Ohio 45409\"\n\n resp = c.get(f\"/users/8989/\")\n html = resp.get_data(as_text=True)\n\n self.assertEqual(resp.status_code, 200)\n self.assertIn(\n '<h1 class=\"Display-4 text-center mt-3\"><b>Profile Information:</b></h1>',\n html,\n )\n self.assertIn(\"<p>testuser</p>\", html)\n self.assertIn(\"<p>[email protected]</p>\", html)\n self.assertIn(\"<p>662-996-3356</p>\", html)\n self.assertIn(\n '<a class=\"font-weight-bold btn winter-neva-gradient color-block btn-block my-4 waves-effect z-depth-0\" href=\"/users/8989/edit\">Edit Profile</a>',\n html,\n )",
"def test_06_user_public_profile(self):\r\n # As Anonymou user\r\n url = \"/account/%s\" % self.name\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Public User Profile page should be shown to anonymous users\"\r\n assert dom.find(id='enforce_privacy') is None, err_msg\r\n # As Authenticated user but NOT ADMIN\r\n self.signin()\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Public User Profile page should be shown to authenticated users\"\r\n assert dom.find(id='enforce_privacy') is None, err_msg\r\n self.signout\r\n # As Authenticated user but ADMIN\r\n self.signin(email=self.root_addr, password=self.root_password)\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Public User Profile page should be shown to admin users\"\r\n assert dom.find(id='enforce_privacy') is None, err_msg\r\n self.signout()",
"def test_06_user_public_profile(self):\r\n # As Anonymou user\r\n url = \"/account/%s\" % self.name\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Public User Profile page should not be shown to anonymous users\"\r\n assert dom.find(id='enforce_privacy') is not None, err_msg\r\n # As Authenticated user but NOT ADMIN\r\n self.signin()\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Public User Profile page should not be shown to authenticated users\"\r\n assert dom.find(id='enforce_privacy') is not None, err_msg\r\n self.signout\r\n # As Authenticated user but ADMIN\r\n self.signin(email=self.root_addr, password=self.root_password)\r\n res = self.app.get(url, follow_redirects=True)\r\n dom = BeautifulSoup(res.data)\r\n err_msg = \"Public User Profile page should be shown to admin users\"\r\n assert dom.find(id='enforce_privacy') is None, err_msg\r\n self.signout()",
"def test_profile_api_get(self):\n response = self.client.get(self.url)\n self.assertEqual(response.status_code, 200)",
"def test_functionality(self):\n self.browserObject = globalVars.browserObject\n \n #Check for current logged in user\n self.verifyCurrentUser(userRole='Administrator', loginAsUser=True)\n \n self.get_DashboardPage(\"Server Utilization\")\n \n self.get_DashboardPage(\"Total Server Utilization\")\n \n self.logout()"
] | [
"0.76108277",
"0.71860087",
"0.71120733",
"0.6946868",
"0.6848691",
"0.6758389",
"0.67111415",
"0.66714704",
"0.6667742",
"0.6665116",
"0.65942097",
"0.65556455",
"0.6552819",
"0.6529676",
"0.6528348",
"0.6513482",
"0.64930034",
"0.64195246",
"0.6392366",
"0.6319257",
"0.6274277",
"0.62259",
"0.6191061",
"0.6163852",
"0.6156567",
"0.6148096",
"0.6105131",
"0.6091881",
"0.6089553",
"0.6033799"
] | 0.74449015 | 1 |
Test for Teams working table open/close Uses TestStatus class to mark/assert test case results | def test_TC_Users_UserProfile_200819_2(self):
self.log.info("*#" * 20)
self.log.info("test_TC_Users_UserProfile_200819_2 started")
self.log.info("*#" * 20)
self.us.gotoUsers()
self.us.clickViewProfile()
self.us.clickTeam()
result = self.us.verifyTeamOpenClose()
self.ts.mark(result=result, resultMessage="Teams Table Open Verification")
self.us.clickTeam()
result = self.us.verifyTeamOpenClose()
result = not result
self.ts.markFinal("test_TC_Users_UserProfile_200819_2", result, "Teams working table open/close Verification") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test(self):\n\t\tx = Team.objects.get(short_name='SF')\n\t\tself.assertEqual(x.nick_name, '49ers')\n\n\t\tx = Game.objects.get(week_number=1,game_number=1)\n\t\tx.fav_score = 21\n\t\tx.udog_score = 14\n\t\tx.spread = 5\n\t\tself.assertEqual(x.favWins(), True)\n\t\tx.spread = 10\n\t\tself.assertEqual(x.favWins(), False)\n\n\t\t# Sunday game at 4pm should be open if time before 1 else closed\n\t\tx.game_date = datetime.datetime(2020,3,1,16,0,0)\n\t\tcurrent_time = datetime.datetime(2020,3,1,11,0,0)\n\t\tself.assertEqual(x.isClosed(current_time=current_time), False)\n\t\tcurrent_time = datetime.datetime(2020,3,1,14,0,0)\n\t\tself.assertEqual(x.isClosed(current_time=current_time), True)\n\n\t\t# Saturday game. Should be closed only if after kick off\n\t\tx.game_date = datetime.datetime(2020,2,29,16,0,0)\n\t\tcurrent_time = datetime.datetime(2020,2,29,15,59,59)\n\t\tself.assertEqual(x.isClosed(current_time=current_time), False)\n\t\tcurrent_time = datetime.datetime(2020,2,29,16,0,1)\n\t\tself.assertEqual(x.isClosed(current_time=current_time), True)\n\n\t\t# Monday game, should be closed as long as it's after 1pm sunday\n\t\tx.game_date = datetime.datetime(2020,3,2,21,0,0)\n\t\tcurrent_time = datetime.datetime(2020,3,1,15,59,59)\n\t\tself.assertEqual(x.isClosed(current_time=current_time), True)\n\t\tcurrent_time = datetime.datetime(2020,3,1,11,59,59)\n\t\tself.assertEqual(x.isClosed(current_time=current_time), False)\n\t\tself.assertEqual(x.isOpen(current_time=current_time), True)\n\t\tcurrent_time = datetime.datetime(2020,3,2,11,59,59)\n\t\tself.assertEqual(x.isClosed(current_time=current_time), True)\n\t\tcurrent_time = datetime.datetime(2020,3,3,11,59,59)\n\t\tself.assertEqual(x.isClosed(current_time=current_time), True)\n\n\t\t# Sunday 9a game should be open until kick off\n\t\tx.game_date = datetime.datetime(2020,3,1,9,0,0)\n\t\tcurrent_time = datetime.datetime(2020,3,1,8,0,0)\n\t\tself.assertEqual(x.isClosed(current_time=current_time), False)\n\t\tcurrent_time = datetime.datetime(2020,3,1,10,0,0)\n\t\tself.assertEqual(x.isClosed(current_time=current_time), True)\n\t\tself.assertEqual(x.isOpen(current_time=current_time), False)\n\n\t\t# Now one check with not setting current_time\n\t\tx.game_date = datetime.datetime(1971,3,1,9,0,0)\n\t\tself.assertEqual(x.isClosed(), True)\n\n\n\t\tuser = User.objects.get(username='Tammer')\n\t\tx = Pick.objects.get(player=user,week_number=1,game_number=1)\n\t\tg = Game.objects.get(week_number=1,game_number=1)\n\t\tself.assertEqual(x.picked_fav, True)\n\t\tself.assertEqual(x.isCorrect(), g.fav_score - g.udog_score > g.spread)",
"def test_meeting_status(self):\n pass",
"def test_get_team_history(self):\n pass",
"def test_TC_Users_UserProfile_200819_4(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_TC_Users_UserProfile_200819_4 started\")\n self.log.info(\"*#\" * 20)\n self.us.gotoUsers()\n self.us.clickViewProfile()\n self.us.clickTeam()\n self.us.clickDetails()\n result = self.us.verifyViewProfile()\n self.ts.markFinal(\"test_TC_Users_UserProfile_200819_4\", result, \"Teams working table open/close Verification\")",
"def test_update_team_state(self):\n pass",
"def test_teams_read(self):\n pass",
"def test_update_activity_occurrence_status(self):\n pass",
"def test_get_open_requests_by_team(self):\n pass",
"def internal_test_timeline(ve_test):\n\n try:\n start_time = time.time()\n ve_test.log(\n \"************************************* internal_test_timeline ****************************************************\")\n fullscreen = ve_test.screens.fullscreen\n infolayer = ve_test.screens.infolayer\n timeline = ve_test.screens.timeline\n playback = ve_test.screens.playback\n actionmenu = ve_test.screens.linear_action_menu\n\n '''\n Step 1: navigate to fullscreen if not already done\n '''\n # Navigate to fullscreen if not already done\n fullscreen.navigate()\n ve_test.wait(GENERIC_WAIT)\n unlock_program_if_locked(ve_test)\n\n if not fullscreen.is_active(timeout=3) and ve_test.milestones.get_current_screen() == 'infolayer':\n ve_test.ui.center_tap()\n ve_test.wait(GENERIC_WAIT)\n\n status1 = fullscreen.is_active(timeout=3)\n ve_test.assertmgr.addCheckPoint(\"internal_test_timeline\", 1, status1, \"Failed to go to fullscreen. Current screen: %s\" % ve_test.milestones.get_current_screen())\n tuned_channel_id = playback.get_current_tuned()\n ve_test.wait(GENERIC_WAIT)\n\n '''\n Step 2: Check live video playback on tuned channel\n '''\n ve_test.log(\"Checking live video playback on tuned channel\")\n Flag = True\n # Waiting until we get infolayer screen and if notification is found on tuned channel\n for conter in range(20):\n current_screen = ve_test.milestones.get_current_screen()\n if current_screen == \"notification\":\n message_element = ve_test.milestones.getElement([(\"name\", \"text_view\", \"==\"), (\"id\", \"back\", \"==\")])\n ve_test.log(\"Issue in zapping the current channel\")\n ve_test.log(\"Closing notification screen\")\n ve_test.appium.tap_element(message_element)\n ve_test.wait(0.3)\n ve_test.log(\"Zapping to next channel\")\n ve_test.ui.one_finger_swipe(ScreenActions.UP)\n ve_test.wait(0.3)\n\n else:\n ve_test.ui.center_tap()\n ve_test.wait(GENERIC_WAIT)\n if infolayer.is_active(timeout=0.3):\n break\n\n wait_for_streaming(ve_test)\n unlock_program_if_locked(ve_test)\n ve_test.assertmgr.addCheckPoint(\"internal_test_timeline\", 2, ve_test.milestones.getPlaybackStatus()['playbackState'] == \"PLAYING\", \"Initials Conditions: Zapping on channel id : %s, the video is not playing\" % tuned_channel_id)\n\n '''\n Step 3: navigate to timeline with Swipe \"left\" and check display\n '''\n # navigate to timeLine with Swipe \"left\" and check display\n ve_test.log(\"navigating to timeline with Swipe Left\")\n flag = True\n wait_counter = 0\n while flag:\n ve_test.log(str(ve_test.milestones.get_current_screen()))\n #timeline.navigate()\n device_details = ve_test.milestones.getDeviceDetails()\n y = device_details[\"screen-height\"]/2\n left_x = device_details[\"screen-width\"]*0.1\n right_x = device_details[\"screen-width\"]*0.75\n ve_test.appium.swipe_area(right_x, y, left_x, y)\n ve_test.wait(GENERIC_WAIT)\n ve_test.log(str(ve_test.milestones.get_current_screen()))\n if timeline.is_active(timeout=0.03) or wait_counter == 20:\n flag = False\n wait_counter = wait_counter + 1\n screenElement = ve_test.milestones.getElements()\n ve_test.assertmgr.addCheckPoint(\"internal_test_timeline\", 3, str(ve_test.milestones.get_current_screen()) == \"timeline\", \"Failed to go to timeline. Current screen: %s\" % ve_test.milestones.get_current_screen())\n ve_test.wait(GENERIC_WAIT)\n\n '''\n Step 4; navigate to channel 3 (or next first channel viewable) via the timeline\n '''\n # navigate to channel 3 (or next first channel viewable) via the timeline\n elements = ve_test.milestones.getElements()\n tap_label = ve_test.milestones.get_elements_if_has_key(elements, \"image_url\")\n element_to_tap = tap_label[2]\n ve_test.appium.tap_element(element_to_tap)\n #ve_test.ui.tap_element\n ve_test.wait(GENERIC_WAIT)\n elements=ve_test.milestones.getElements()\n eventIdEle = ve_test.milestones.verify_element_by_key(elements, 'event_id')\n if eventIdEle is False:\n ve_test.wait(10)\n\n elements = ve_test.milestones.getElements()\n tap_on_next_channel = ve_test.milestones.get_elements_if_has_key(elements, \"title_text\")\n #tap_on_next_channel = ve_test.milestones.getElement([(\"title_text\", \"MORE INFO\", \"==\")])\n ve_test.appium.tap_element(tap_on_next_channel[0])\n ve_test.wait(GENERIC_WAIT)\n\n '''\n Step 5: Check ACTION MENU is displayed , TAP on the PIP to access fullscreen and check live video playback\n '''\n # Check ACTION MENU is displayed , TAP on the PIP to access fullscreen and check live video playback\n status1 = actionmenu.is_active(timeout=5)\n ve_test.assertmgr.addCheckPoint(\"internal_test_timeline\", 4, status1,\n \"Action Menu is not displayed. Current screen is : %s\" % ve_test.milestones.get_current_screen())\n play_button = verify_play_button(ve_test)\n ve_test.log(\"Object for play button\"+ str(play_button))\n if play_button:\n ve_test.appium.tap_element(play_button)\n else:\n ve_test.log(\"Play button not found in the action menu\")\n\n wait_for_streaming(ve_test)\n playbackstatus = fullscreen.is_active(timeout=5)\n ve_test.wait(GENERIC_WAIT)\n ve_test.assertmgr.addCheckPoint(\"internal_test_timeline\", 5,\n (ve_test.milestones.getPlaybackStatus()[\"playbackState\"] == \"PLAYING\"),\n \"Failed to tune to channel to the next viewable channel from timeline. video playback not working (url = %s - current screen=%s ) \" % (\n ve_test.milestones.getPlaybackStatus()['sso']['sessionPlaybackUrl'],\n ve_test.milestones.get_current_screen()))\n\n '''\n Step 6: navigate to timeline with SWIPE \"left\" and check display\n '''\n # navigate to timeLine with SWIPE \" left\" and check display\n ve_test.log(\"Navigating to timeline of the viewing channel with Swipe Left\")\n # timeline.navigate()\n flag = True\n wait_counter = 0\n while flag:\n ve_test.log(str(ve_test.milestones.get_current_screen()))\n # timeline.navigate()\n device_details = ve_test.milestones.getDeviceDetails()\n y = device_details[\"screen-height\"] / 2\n left_x = device_details[\"screen-width\"] * 0.1\n right_x = device_details[\"screen-width\"] * 0.75\n ve_test.appium.swipe_area(right_x, y, left_x, y)\n ve_test.wait(GENERIC_WAIT)\n if timeline.is_active(timeout=0.03) or wait_counter == 20:\n flag = False\n wait_counter = wait_counter + 1\n ve_test.wait(GENERIC_WAIT)\n status1 = timeline.is_active(timeout=0.3)\n ve_test.assertmgr.addCheckPoint(\"internal_test_timeline\", 6, status1,\n \"Failed to go to timeline screen. Current screen: %s\" % ve_test.milestones.get_current_screen())\n flg = True\n eventCount = 0\n while flg:\n elements = ve_test.milestones.getElements()\n timelinelist = ve_test.milestones.get_elements_if_has_key(elements, \"event_id\")\n length = len(timelinelist)\n\n if length >= 2:\n device_details = ve_test.milestones.getDeviceDetails()\n y = device_details[\"screen-height\"] / 2\n left_x = device_details[\"screen-width\"] * 0.1\n right_x = device_details[\"screen-width\"] * 0.75\n ve_test.appium.swipe_area(right_x, y, left_x, y)\n #ve_test.appium.swipe_area(1526, 732, 494, 732)\n eventCount += length\n if eventCount >= 10:\n break\n\n flg = True\n ve_test.assertmgr.addCheckPoint(\"internal_test_timeline\", 7, (eventCount >= 10),\n \"Number of events while doing horizontal navigation is not 10\")\n '''\n Step 7: select channels in timeline 8 times\n '''\n # selecting next channel in the timeline with TAP Down\n ve_test.log(\"Performing timeline navigation in UP direction\")\n elements = ve_test.milestones.getElements()\n\n flag = True\n while (flag):\n for count in range(0, 2):\n ve_test.wait(GENERIC_WAIT)\n tap_label = ve_test.milestones.get_elements_if_has_key(elements, \"image_url\")\n element_to_tap = tap_label[2]\n ve_test.log(\"Navigating to the next channel\")\n ve_test.appium.tap_element(element_to_tap)\n status = timeline.is_active(timeout=3)\n ve_test.assertmgr.addCheckPoint(\"internal_test_timeline\", 8 + count, status,\n \"Failed to get the focus to the next channel\")\n #ve_test.wait(GENERIC_WAIT)\n\n for counter in range(20):\n current_screen = ve_test.milestones.get_current_screen()\n # dismiss notification screen\n if current_screen == \"notification\":\n message_element = ve_test.milestones.getElement([(\"name\", \"text_view\", \"==\"), (\"id\", \"back\", \"==\")])\n ve_test.log(\"Closing notification screen\")\n ve_test.appium.tap_element(message_element)\n break\n\n flag = True\n else:\n flag = False\n break\n\n '''\n Step 8: return to full screen with TAP on CLOSE button and check display\n '''\n # Returning to full screen from timeline\n fg = True\n while fg:\n for counter in range(20):\n current_screen = ve_test.milestones.get_current_screen()\n # dismiss notification screen\n if current_screen == \"notification\":\n message_element = ve_test.milestones.getElement([(\"name\", \"text_view\", \"==\"), (\"id\", \"back\", \"==\")])\n ve_test.log(\"Closing notification screen\")\n ve_test.appium.tap_element(message_element)\n break\n else:\n ve_test.log(\"Returning to full screen with TAP on CROSS top right corner \")\n action = ScreenDismiss.CLOSE_BUTTON\n timeline.dismiss(action)\n ve_test.wait(0.5)\n action = ScreenDismiss.TAP\n infolayer.dismiss(action)\n ve_test.wait(0.5)\n fg = False\n break\n\n\n status = fullscreen.is_active(timeout=5)\n ve_test.assertmgr.addCheckPoint(\"internal_test_timeline\", 16, status, \"Failed to go back to fullscreen. \"\n \"Current screen: %s\" % ve_test.milestones.get_current_screen())\n ve_test.wait(GENERIC_WAIT)\n total_time_duration['internal_test_timeline'] = str(time.time() - start_time)\n ve_test.log(\"Time taken to execute internal_test_timeline :\" + str(time.time() - start_time) + \" seconds\")\n\n except:\n traceback.print_exc()\n formatted_lines = traceback.format_exc().splitlines()\n message = formatted_lines[-1]\n ve_test.assertmgr.addCheckPoint(\"internal_test_timeline\", 17, False, message)",
"def test_load_table(self, test_info):\n\n test_results = test_info\n\n # STEP-1: get table selected for the test from the dict of conditions for this test\n # -------------------------------------------------------------------------------------\n\n full_qualified_name = test_common_cons.FULL_QUALIFIED_TABLE_NAME_PATTERN. \\\n replace(test_common_cons.SCHEMA_PATTERN, test_results['schema_name'])\n full_qualified_name = full_qualified_name.replace(test_common_cons.TABLE_NAME_PATTERN,\n test_results['gaia_table_name'])\n # execute request\n try:\n tables = self.gaia.load_table(full_qualified_name)\n\n # STEP-2: Update the info of the request sent for the test with the table selected.\n # -------------------------------------------------------------------------------------\n test_results['request_sent'] = test_results['request_sent'].replace(test_common_cons.TABLE_NAME_PATTERN,\n full_qualified_name)\n # Get current time\n time = datetime.now()\n time_str = time.strftime('%Y-%m-%d %H:%M:%S')\n test_results['test_finished'] = f'{time_str} CET'\n\n test_results['test_additional_info'] = str(tables)\n\n # STEP-3: Get num of results returned, if it is the expected then the test has been passed.\n # -------------------------------------------------------------------------------------\n log.debug(f'Num tables returned: {len(tables.columns)}')\n\n n_expected_results = test_results['test_expected_value']\n if len(tables.columns) == n_expected_results:\n # Test passed\n test_results['test_result'] = PASSED\n log.debug(\"Test PASSED\")\n else:\n test_results['test_result'] = NOT_PASSED\n error_message = f'The number of columns returned: {len(tables.columns)} differs from the expected' \\\n f' {n_expected_results}'\n log.error(error_message)\n test_results['test_additional_info'] = error_message\n raise ValueError(error_message)\n return test_results\n\n except ValueError as err:\n log.error(str(err))\n # Get current time\n time = datetime.now()\n time_str = time.strftime('%Y-%m-%d %H:%M:%S')\n # fill result object with the info from the http error\n test_results['test_finished'] = f'{time_str} CET'\n test_results['test_result'] = NOT_PASSED\n test_results['test_additional_info'] = str(err)\n return test_results\n except HTTPError as err:\n error_message = \"Error connecting TAP server\"\n log.error(error_message)\n\n # Get current time\n time = datetime.now()\n time_str = time.strftime('%Y-%m-%d %H:%M:%S')\n # fill result object with the info from the http error\n test_results['test_finished'] = f'{time_str} CET'\n test_results['test_result'] = NOT_PASSED\n test_results['test_additional_info'] = error_message + \",\" + str(err)\n return test_results",
"def test_meeting_live_stream_status_update(self):\n pass",
"def test_is_active_of_homework_positive():\n assert oop_hw.is_active()",
"def test_get_status(self):\n pass",
"def test_get_status(self):\n pass",
"def finished_tests(self):\n self.testing = 0",
"def test_fantasy_status_nov_1(league):\n nov_1 = datetime.datetime(2019,11,1)\n players = league.as_of(nov_1).all_players()\n # make sure sammy blais is not a free agent, he was picked up oct 31\n assert(players.loc[6544, 'fantasy_status'] != 'FA')",
"def test_alive():\n pass",
"def test_alive():\n pass",
"def test_alive():\n pass",
"def test_get_habits_trackings_table():\n habits_trackings_table = analytics.habits_trackings_table()\n assert habits_trackings_table == [\n (2, 'Run', 'weekly', 'Improved fitness', 'Jogging and sprinting', '2021-06-25', '18:26'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-06-25', '19:52'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-06-26', '20:09'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-06-26', '04:40'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-06-27', '10:44'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-06-27', '20:49'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-06-28', '01:18'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-06-29', '19:42'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-06-29', '08:25'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-06-29', '08:57'),\n (2, 'Run', 'weekly', 'Improved fitness', 'Jogging and sprinting', '2021-06-29', '02:07'),\n (5, 'Learn French', 'weekly', 'Fluent in french',\n 'Practice the 4 skills', '2021-06-29', '04:46'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-06-30', '05:54'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-06-30', '12:04'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-07-01', '05:37'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-07-01', '07:17'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-07-01', '12:16'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-07-02', '22:04'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-07-02', '23:02'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-07-03', '07:12'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-07-04', '08:11'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-07-05', '10:58'),\n (2, 'Run', 'weekly', 'Improved fitness', 'Jogging and sprinting', '2021-07-05', '14:38'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-07-06', '00:14'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-07-06', '12:28'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-07-07', '17:07'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-07-07', '02:44'),\n (5, 'Learn French', 'weekly', 'Fluent in french',\n 'Practice the 4 skills', '2021-07-07', '16:09'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-07-08', '18:19'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-07-08', '22:35'),\n (5, 'Learn French', 'weekly', 'Fluent in french',\n 'Practice the 4 skills', '2021-07-09', '10:01'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-07-09', '10:14'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-07-10', '17:12'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-07-11', '19:54'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-07-12', '00:06'),\n (2, 'Run', 'weekly', 'Improved fitness', 'Jogging and sprinting', '2021-07-12', '16:12'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-07-12', '18:43'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-07-13', '08:02'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-07-14', '21:32'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-07-15', '08:37'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-07-16', '08:32'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-07-16', '11:59'),\n (5, 'Learn French', 'weekly', 'Fluent in french',\n 'Practice the 4 skills', '2021-07-16', '21:14'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-07-18', '03:03'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-07-18', '06:59'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-07-19', '22:50'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-07-19', '13:05'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-07-20', '04:59'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-07-20', '05:52'),\n (5, 'Learn French', 'weekly', 'Fluent in french',\n 'Practice the 4 skills', '2021-07-20', '09:00'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-07-21', '18:44'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-07-21', '03:55'),\n (3, 'Read', 'daily', '12 books in a year', 'Classics and dystopian', '2021-07-22', '06:52'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-07-22', '18:27'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-07-23', '01:28'),\n (1, 'Yoga', 'daily', 'Be more flexible', 'A low-impact activity', '2021-07-23', '12:17'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-07-24', '19:10'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-07-25', '02:18'),\n (4, 'Meditation', 'daily', 'Training awareness', '20 minutes', '2021-07-26', '09:53'),\n (5, 'Learn French', 'weekly', 'Fluent in french',\n 'Practice the 4 skills', '2021-07-26', '16:59')\n ]",
"def test_synchronous_on_the_fly_query(self, test_info):\n test_results = test_info\n\n # get table selected for the test from the dict of conditions for this test\n table_name = test_results['test_table']\n\n # Prepare the query for the test\n test_query = test_results['test_query'] \\\n .replace(test_common_cons.TABLE_NAME_PATTERN, table_name)\n # Update the template query with the real query that is going to be used to execute this test.\n test_results['test_query'] = test_query\n\n upload_resource = paths.path2_example_tb_4_onthefly\n\n # Update the info of the request sent for the test with the table selected.\n test_results['request_sent'] = test_results['request_sent']. \\\n replace(test_common_cons.QUERY_PATTERN, test_query)\n test_results['request_sent'] = test_results['request_sent']. \\\n replace(test_common_cons.TABLE_NAME_PATTERN, table_name)\n\n # Execute test\n try:\n job = self.gaia.launch_job(query=test_query, upload_resource=upload_resource,\n upload_table_name=table_name, verbose=True)\n results = job.get_results()\n results.pprint()\n\n # Get current time\n time = datetime.now()\n time_str = time.strftime('%Y-%m-%d %H:%M:%S')\n test_results['test_finished'] = f'{time_str} CET'\n\n # Get num of results returned, if it is the expected then the test has been passed.\n n_rows_returned = len(results)\n\n # Save the additional info\n test_results['test_additional_info'] = str(results)\n\n log.debug(f'Num rows returned: {n_rows_returned}')\n\n n_expected_results = test_results['test_expected_value']\n if n_rows_returned == n_expected_results:\n # Test passed\n test_results['test_result'] = PASSED\n log.debug(\"Test PASSED\")\n else:\n test_results['test_result'] = NOT_PASSED\n error_message = f'The number of rows returned: {n_rows_returned} differs from the expected' \\\n f' {n_expected_results}'\n test_results['test_additional_info'] = error_message\n log.error(error_message)\n raise ValueError(error_message)\n except ValueError as err:\n log.error(str(err))\n # Get current time\n time = datetime.now()\n time_str = time.strftime('%Y-%m-%d %H:%M:%S')\n # fill result object with the info from the http error\n test_results['test_finished'] = f'{time_str} CET'\n test_results['test_result'] = NOT_PASSED\n test_results['test_additional_info'] = str(err)\n return test_results\n\n except HTTPError as err:\n error_message = \"Error connecting TAP server\"\n log.error(error_message)\n\n # Get current time\n time = datetime.now()\n time_str = time.strftime('%Y-%m-%d %H:%M:%S')\n # fill result object with the info from the http error\n test_results['test_finished'] = f'{time_str} CET'\n test_results['test_result'] = NOT_PASSED\n test_results['test_additional_info'] = error_message + \",\" + str(err)\n return test_results\n\n return test_results",
"def test_toggle_cell(self):\n self.cell.toggle_living()\n self.assertEqual(self.cell.is_living(), True)\n self.cell.toggle_living()\n self.assertEqual(self.cell.is_living(), False)",
"def test_synchronous_query(self, test_info):\n\n test_results = test_info\n\n # STEP-1: get table selected for the test from the dict of conditions for this test\n # -------------------------------------------------------------------------------------\n full_qualified_name = test_common_cons.FULL_QUALIFIED_TABLE_NAME_PATTERN \\\n .replace(test_common_cons.SCHEMA_PATTERN, test_results['schema_name'])\n full_qualified_name = full_qualified_name.replace(test_common_cons.TABLE_NAME_PATTERN,\n test_results['gaia_table_name'])\n\n # execute request\n test_query = test_results['test_query'].replace(test_common_cons.FULL_QUALIFIED_PATTERN, full_qualified_name)\n # Update the template query with the real query that is going to be used to execute this test.\n test_results['test_query'] = test_query\n\n try:\n job = self.gaia.launch_job(test_query, dump_to_file=False)\n\n # Update the info of the request sent for the test with the table selected.\n test_results['request_sent'] = test_results['request_sent']. \\\n replace(test_common_cons.QUERY_PATTERN, test_query)\n # Get current time\n time = datetime.now()\n time_str = time.strftime('%Y-%m-%d %H:%M:%S.%f')\n test_results['test_finished'] = f'{time_str} CET'\n\n # Get num of results returned, if it is the expected then the test has been passed.\n r = job.get_results()\n n_rows_returned = len(job.get_results())\n\n # Save the additional info\n test_results['test_additional_info'] = str(r['solution_id'])\n\n log.debug(f'Num rows returned: {n_rows_returned}')\n\n n_expected_results = test_results['test_expected_value']\n if n_rows_returned == n_expected_results:\n # Test passed\n test_results['test_result'] = PASSED\n log.debug(\"Test PASSED\")\n else:\n test_results['test_result'] = NOT_PASSED\n error_message = f'The number of rows returned: {n_rows_returned} differs from the expected ' \\\n f'{n_expected_results}'\n test_results['test_additional_info'] = error_message\n log.error(error_message)\n raise ValueError(error_message)\n except ValueError as err:\n log.error(str(err))\n # Get current time\n time = datetime.now()\n time_str = time.strftime('%Y-%m-%d %H:%M:%S')\n # fill result object with the info from the http error\n test_results['test_finished'] = f'{time_str} CET'\n test_results['test_result'] = NOT_PASSED\n test_results['test_additional_info'] = str(err)\n return test_results\n except HTTPError as err:\n error_message = \"Error connecting TAP server\"\n log.error(error_message)\n\n # Get current time\n time = datetime.now()\n time_str = time.strftime('%Y-%m-%d %H:%M:%S')\n # fill result object with the info from the http error\n test_results['test_finished'] = f'{time_str} CET'\n test_results['test_result'] = NOT_PASSED\n test_results['test_additional_info'] = error_message + \",\" + str(err)\n return test_results\n\n return test_results",
"def test_status_done_basic(self):\n\n EXPLAIN_CONNECT = 'sqlite:///:memory:'\n FORSETI_CONNECT = 'sqlite:///{}'.format(\n get_db_file_path('forseti_1_basic.db'))\n\n self.service_config = ServiceConfig(EXPLAIN_CONNECT,\n FORSETI_CONNECT)\n self.source = 'FORSETI'\n self.model_manager = self.service_config.model_manager\n self.model_name = self.model_manager.create(name=self.source)\n\n scoped_session, data_access = self.model_manager.get(self.model_name)\n with scoped_session as session:\n\n importer_cls = importer.by_source(self.source)\n import_runner = importer_cls(\n session,\n self.model_manager.model(self.model_name, expunge=False),\n data_access,\n self.service_config)\n import_runner.run()\n\n model = self.model_manager.model(self.model_name)\n self.assertEqual(model.state,\n 'PARTIAL_SUCCESS',\n 'Model state should be set to PARTIAL_SUCCESS')",
"def run_tests():\n passed_tests = 0\n failed_tests = 0\n for case in TEST_CASES:\n start_date, end_date = [datetime.strptime(x, \"%d/%m/%Y\") for x in case[0]]\n experiment = Experiment(start_date, end_date)\n if experiment.duration() == case[1]:\n result = \"passed\"\n passed_tests += 1\n else:\n result = \"failed\"\n failed_tests += 1\n print(f\"\"\"{\"-\".join(case[0])}, {case[1]} days: Test {result}\"\"\")\n\n print(\n f\"All tests completed\\n\"\n f\"Number of tests passed: {passed_tests}\\n\"\n f\"Number of tests failed: {failed_tests}\"\n )",
"def test_teams_list(self):\n pass",
"def test_close_if_ended(self):\n today = pytz.UTC.localize(dt.datetime.now())\n self.leaderboard.closed = False\n self.leaderboard.start = today - dt.timedelta(days=15)\n self.leaderboard.end = today - dt.timedelta(days=7)\n\n self.leaderboard.close_if_ended()\n assert self.leaderboard.closed\n # print(self.leaderboard.closed)",
"def test_completed():\n assert complete == 1\n assert errorflag == 0",
"def test_updatePlayerbHist_fold_2(self):\n self.assertEqual(self.player.foldedInd, 1)",
"def test_shared_table(self, test_info):\n\n test_results = test_info\n\n # execute request\n try:\n\n # Step 1: Get the list of public tables\n tables = self.gaia.load_tables(only_names=True)\n # Now we will keep the number of results returned\n n_public_tables = len(tables)\n log.debug(f'N public tables is {n_public_tables}')\n\n # Step 2: Now we need to do login in to de Gaia Tap.\n\n # Now we can do the login()\n self.gaia.login(user=credentials.USERNAME, password=credentials.PASSWORD)\n\n # Step 3: Now we are going to execute again 'load_tables' but now specifying that we also want to list\n # the tables that we are sharing with other users from our user_schema.\n all_tables = self.gaia.load_tables(only_names=True, include_shared_tables=True)\n # Let's keep now the number of results returned\n n_shared_tables = len(all_tables)\n log.debug(f'N all tables is {n_shared_tables}')\n\n # Step 4: Finally we will do a logout from the system.\n self.gaia.logout()\n\n # Get current time to complete our result object\n time = datetime.now()\n time_str = time.strftime('%Y-%m-%d %H:%M:%S')\n test_results['test_finished'] = f'{time_str} CET'\n\n # Now let's have a look at the results for our test. If the number of tables returned as this second step\n # is equal or higher than the number of public tables the test is correct.\n if n_public_tables <= n_shared_tables:\n # Test passed\n test_results['test_result'] = PASSED\n debug_message = f'The number of shared tables: {n_shared_tables} is major or equal than the number ' \\\n f'of public tables: {n_public_tables}'\n log.debug(debug_message)\n test_results['test_additional_info'] = debug_message + \" Test PASSED!\"\n else:\n test_results['test_result'] = NOT_PASSED\n error_message = f'The number of shared tables: {n_shared_tables} is less than the number ' \\\n f'of public tables: {n_public_tables}'\n test_results['test_additional_info'] = error_message\n log.error(error_message)\n raise ValueError(error_message)\n self.gaia.logout()\n return test_results\n\n except ValueError as err:\n log.error(str(err))\n # Get current time\n time = datetime.now()\n time_str = time.strftime('%Y-%m-%d %H:%M:%S')\n # fill result object with the info from the http error\n test_results['test_finished'] = f'{time_str} CET'\n test_results['test_result'] = NOT_PASSED\n test_results['test_additional_info'] = str(err)\n self.gaia.logout()\n return test_results\n except HTTPError as err:\n\n error_message = \"Error connecting TAP server\"\n log.error(error_message)\n\n # Get current time\n time = datetime.now()\n time_str = time.strftime('%Y-%m-%d %H:%M:%S')\n # fill result object with the info from the http error\n test_results['test_finished'] = f'{time_str} CET'\n test_results['test_result'] = NOT_PASSED\n test_results['test_additional_info'] = error_message + \",\" + str(err)\n self.gaia.logout()\n return test_results"
] | [
"0.67550015",
"0.6104613",
"0.6035139",
"0.5994046",
"0.59128124",
"0.5830796",
"0.57443374",
"0.5734942",
"0.5732594",
"0.57252914",
"0.56904143",
"0.56856155",
"0.55997735",
"0.55997735",
"0.5592335",
"0.5516779",
"0.55117416",
"0.55117416",
"0.55117416",
"0.5494369",
"0.54837614",
"0.5477689",
"0.5475572",
"0.546863",
"0.54678345",
"0.5462557",
"0.54610014",
"0.5453792",
"0.5453577",
"0.54392433"
] | 0.65149 | 1 |
Test for team user details page Uses TestStatus class to mark/assert test case results | def test_TC_Users_UserProfile_200819_4(self):
self.log.info("*#" * 20)
self.log.info("test_TC_Users_UserProfile_200819_4 started")
self.log.info("*#" * 20)
self.us.gotoUsers()
self.us.clickViewProfile()
self.us.clickTeam()
self.us.clickDetails()
result = self.us.verifyViewProfile()
self.ts.markFinal("test_TC_Users_UserProfile_200819_4", result, "Teams working table open/close Verification") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_TC_Users_UserProfile_200819_2(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_TC_Users_UserProfile_200819_2 started\")\n self.log.info(\"*#\" * 20)\n self.us.gotoUsers()\n self.us.clickViewProfile()\n self.us.clickTeam()\n result = self.us.verifyTeamOpenClose()\n self.ts.mark(result=result, resultMessage=\"Teams Table Open Verification\")\n self.us.clickTeam()\n result = self.us.verifyTeamOpenClose()\n result = not result\n self.ts.markFinal(\"test_TC_Users_UserProfile_200819_2\", result, \"Teams working table open/close Verification\")",
"def test_retrieve_team(self):\n pass",
"def test_returns_200_if_user_team_member(self):\n # Arrange\n # Create a team and add user to it\n test_team = create_canned_team()\n add_user_to_team(\n test_team, self.test_user, TeamMemberFunctions.MEMBER.value, True\n )\n # Assign team to project\n assign_team_to_project(\n self.test_project, test_team, TeamRoles.PROJECT_MANAGER.value\n )\n # Act\n response = self.client.get(\n self.url, headers={\"Authorization\": self.user_session_token}\n )\n # Assert\n self.assertEqual(response.status_code, 200)\n TestGetProjectsRestAPI.assert_project_response(\n response.json, self.test_project, assert_type=\"notasks\"\n )",
"def test_user_details(self):\n self.tc_id = \"Ts_004\"\n self.tc_desc = \"Verify the admin is able to do operation with user\"\n self.tc_step = \"TC Start\"\n\n user = adminLoginManageuser(self.driver)\n\n self.tc_step = \"Launch the url\"\n user.launchUrl(self.url)\n\n self.tc_step = \"Enter the login details\"\n user.clickVendorLogin()\n user.enterloginDetails(self.username, self.password)\n user.clickLogin()\n user.clickadminTab()\n user.clickuser()\n user.adduser()\n user.enteruserDetails(self.userfrstname, self.userlastname, self.useremail)\n user.usersubmit()\n user.oksuccess()\n user.clickedituser()\n user.edituserDetails(self.editfrstname, self.editlastname)\n user.userupdate()\n user.userupdateok()\n user.clickdeleteuser()\n user.clickconfirmdelete()\n user.okdelete()",
"def test_user_get_team_page():\n app = create_ctfd()\n with app.app_context():\n register_user(app)\n client = login_as_user(app)\n r = client.get('/team/2')\n assert r.status_code == 200\n destroy_ctfd(app)",
"def test_get_teams(self):\n pass",
"def test_get_teams(self):\n pass",
"def test_view_status(self):\n self.add_testuser()\n response = self.client.get(\"/profile/testuser/edit\")\n self.assertTrue(response.status_code == 301)",
"def test_teams_list(self):\n pass",
"def test_teams_get_users_teams_v2(self):\n pass",
"def test_team_view(self):\n with self.app.app_context():\n u = user(save=True)\n t = team(users=[u], save=True)\n\n response = self.client.get('/team/%s' % t.slug)\n eq_(response.status_code, 200)\n\n response = self.client.get('/team/not-a-real-team')\n eq_(response.status_code, 404)",
"def test_set_user_status(self):\n pass",
"def test_assign_managing_team(self):\n pass",
"def test_get_team_history(self):\n pass",
"def test_get_teams(self):\n owner2 = AnotherUserFactory(email_confirmed=True)\n owner3 = AnotherUserFactory(username='team owner 3', email='[email protected]', email_confirmed=True,)\n TeamFactory(owner=owner2, name='second team')\n TeamFactory(owner=owner3, name='third team')\n\n usual_user = UserFactory(\n username='usualuser',\n email='[email protected]',\n email_confirmed=True,\n )\n token = Token.objects.get(user=usual_user)\n self.client.credentials(\n HTTP_AUTHORIZATION=f'Token {token.key}')\n\n response = self.client.get(reverse('api:teams-list'))\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(len(response.data.get('results')), 3)",
"def test_create_team(self):\n pass",
"def test_TC_Users_200819_3(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_TC_Users_200819_3 started\")\n self.log.info(\"*#\" * 20)\n self.us.gotoUsers()\n self.us.clickViewProfile()\n result = self.us.verifyViewProfile()\n self.ts.markFinal(\"test_TC_Users_200819_3\", result, \"View profile button Verification\")",
"def test_detail_user(self):\n \n for user in User.objects.filter(Q(groups__name='Administrator') | Q(\n groups__name='Viewer') | Q(groups__name='User')):\n\n token = get_token(user)\n\n url_detail_user = reverse(\n 'user_get_retrieve_destroy_update', args=[user.organization_member.pk])\n\n response = self.client.get(url_detail_user,\n HTTP_AUTHORIZATION=f\"JWT {token}\",\n format='json')\n\n # Validate status code response\n self.assertEqual(response.status_code, 200)\n\n org_comp = comparator_member(user.organization_member, response.data)\n self.assertEqual(org_comp, True)",
"def test_verification_status_visible(self):\r\n self.client.login(username=\"jack\", password=\"test\")\r\n self.check_verification_status_on('verified', 'You\\'re enrolled as a verified student')\r\n self.check_verification_status_on('honor', 'You\\'re enrolled as an honor code student')\r\n self.check_verification_status_on('audit', 'You\\'re auditing this course')",
"def test_detail(self, client, users):\n user = users[0]\n url = reverse('users:detail', args=(user.pk,))\n response = client.get(url)\n assert response.status_code == 200\n assert user.username in str(response.content)",
"def test_teams_get_users_teams_v1(self):\n pass",
"def test_basketballteams_get(self):\n pass",
"def test_TC_Users_200819_1(self):\n self.log.info(\"*#\" * 20)\n self.log.info(\"test_TC_Users_200819_1 started\")\n self.log.info(\"*#\" * 20)\n self.us.gotoUsers()\n self.us.clickDashboard()\n result = self.us.verifyDashboard()\n self.ts.markFinal(\"test_TC_Users_200819_1\", result, \"Dashboard Link Verification\")",
"def test_teams_read(self):\n pass",
"def test_update_team(self):\n pass",
"def test_success(self, data_flow_api_client):\n response = data_flow_api_client.get(self.view_url)\n\n assert response.status_code == status.HTTP_200_OK\n\n response_team = response.json()['results'][0]\n team = Team.objects.get(id=response_team['id'])\n\n assert response_team == get_expected_data_from_team(team)",
"def test_user_get_private_team_page():\n app = create_ctfd()\n with app.app_context():\n register_user(app)\n client = login_as_user(app)\n r = client.get('/team')\n assert r.status_code == 200\n destroy_ctfd(app)",
"def clickTeam(self):\n # self.webScroll(direction=\"down\")\n self.scrollIntoView(locator=self._userProfile_team, locatorType=\"xpath\")\n self.waitForElement(locator=self._userProfile_team, locatorType=\"xpath\")\n self.elementClick(locator=self._userProfile_team, locatorType=\"xpath\")\n pp.time.sleep(2)",
"def test_teams_create(self):\n pass",
"def test_user_listed(self):\n url = reverse('admin:core_user_changelist')\n res = self.client.get(url)\n #assert are django checks on http request is 200\n self.assertContains(res, self.user.name)\n self.assertContains(res, self.user.email)"
] | [
"0.7014695",
"0.7011689",
"0.68079954",
"0.67049444",
"0.6703819",
"0.6601137",
"0.6601137",
"0.6568405",
"0.6561567",
"0.64933634",
"0.6491702",
"0.64130616",
"0.63558125",
"0.63490254",
"0.63477075",
"0.6313082",
"0.63112307",
"0.6299645",
"0.6294707",
"0.62679505",
"0.62668633",
"0.62635493",
"0.6243649",
"0.6238014",
"0.6236247",
"0.6200893",
"0.6173999",
"0.6153592",
"0.61176336",
"0.61141855"
] | 0.70787454 | 0 |
Build a list a list of files (and directories) by iterating recursively over the given path | def build_file_list(path):
dirs = []
files = []
for x in path.iterdir():
try:
if x.is_symlink():
continue
elif x.is_dir():
dirs.append(x)
new_dirs, new_files = build_file_list(x)
dirs.extend(new_dirs)
files.extend(new_files)
elif x.is_file():
files.append(x)
except PermissionError:
continue
return dirs, files | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_files(path: str) -> List[str]:\n if not isdir(path):\n return [path] # its expected to return a list each time even if its a single element\n return [file for fileOrDir in listdir(path) for file in get_files(path + '/' + fileOrDir)]\n # return list of each file returned by the recursive call getFiles(fileOrDir) on\n # each fileOrDir in listdir(path)",
"def gen_recursive_filelist(d):\n \n for root, directories, files in os.walk(d):\n for file in files:\n yield os.path.join(root, file)",
"def _listFiles(files, path):\n\n for item in os.listdir(path):\n item = os.path.join(path, item)\n if os.path.isdir(item):\n _listFiles(files, item)\n else:\n files.append(item)",
"def listFiles(path):\n outputList = []\n for root, dirs, files in os.walk(path):\n for f in files:\n outputList.append('/'.join([root, f]))\n return outputList",
"def _get_files(self, path):\n result = []\n for f in os.listdir(path):\n if os.path.isdir(os.path.join(path, f)):\n result += self._get_files(os.path.join(path, f))\n else:\n result.append(os.path.join(path, f))\n return result",
"def list_files_and_dirs(self, path=\"/\"):\n dirs = self.list_dirs(path)\n files = self.list_files(path)\n return dirs + files",
"def getfiles(path): \n global picture_list\n try:\n # dir_list has all files and directories in path\n # any directory is WITHOUT ending '/'\n dir_list = os.listdir(path)\n except:\n # path may not be a directory or permission error\n print \"ERROR: in getfiles, picture_list:\", picture_list\n picture_list = None\n return\n \n for line in dir_list:\n file = path + \"/\" + line\n if os.path.isdir(file):\n getfiles( file) # dig into subdirectory\n elif isPicture(file):\n picture_list.append(file)\n else: \n # neither picture file nor directory; ignore \n pass\n return",
"def iterate_path(path):\n fl_lst = []\n for fn in os.listdir(path):\n if fn.endswith('.jpg') or fn.endswith('.png'):\n fname, ext = os.path.splitext(fn)\n tn = fname + '.txt'\n fl_lst.append([fn, tn])\n return fl_lst",
"def list_dir(self, path):",
"def all_files_under(path):\r\n for cur_path, dirnames, filenames in os.walk(path):\r\n for filename in filenames:\r\n yield os.path.join(cur_path, filename)",
"def iter_files(path):\n if os.path.isfile(path):\n yield path\n elif os.path.isdir(path):\n for dirpath, _, filenames in os.walk(path):\n for f in filenames:\n yield os.path.join(dirpath, f)\n else:\n raise RuntimeError('Path %s is invalid' % path)",
"def walk_directory(self, path):\n files = []\n for dirpath, dirnames, filenames in os.walk(path):\n for filename in filenames:\n files.append(os.path.join(dirpath, filename))\n return files",
"def get_directories_recursive(self, path) :\n\n if path.is_dir() :\n yield path\n for child in path.iterdir():\n yield from self.get_directories_recursive(child)\n elif path.is_file() :\n yield path",
"def get_all_files_and_nested(file_path):\n stack_dirs = list()\n all_files = list()\n first_level_files = listdir(file_path)\n for f in first_level_files:\n full_f_path = join(file_path, f)\n if isdir(full_f_path):\n stack_dirs.append(full_f_path)\n else:\n all_files.append(full_f_path)\n for d in stack_dirs:\n all_files.extend(get_all_files_and_nested(d))\n return all_files",
"def get_files_in_dir(path):\n return [os.path.join(dir_name, file)\n for dir_name, subdirs, files in os.walk(path)\n for file in files]",
"def collect_files(path):\n logger.info('Building file list...')\n start_time = datetime.now()\n dirs, files = build_file_list(path)\n seconds = (datetime.now() - start_time).total_seconds()\n logger.info(\n 'Found %d files and directories in %.3fs',\n (len(dirs) + len(files)),\n seconds\n )\n return dirs, files",
"def _recurse(self, path):\n files = {}\n empty_dirs = []\n try:\n sub_paths = os.listdir(path)\n except OSError as exc:\n if exc.errno == errno.ENOENT:\n # Path does not exist\n sys.stderr.write(\"{} does not exist\\n\".format(path))\n sys.exit(42)\n elif exc.errno in (errno.EINVAL, errno.ENOTDIR):\n # Path is a file (EINVAL on Windows, ENOTDIR otherwise)\n files[path] = self._mode(path)\n else:\n if not sub_paths:\n empty_dirs.append(path)\n for fn_ in sub_paths:\n files_, empty_dirs_ = self._recurse(os.path.join(path, fn_))\n files.update(files_)\n empty_dirs.extend(empty_dirs_)\n\n return files, empty_dirs",
"def walk_deep(path):\n for root, _, filenames in os.walk(path):\n for f in filenames:\n yield os.path.join(root, f).replace('\\\\', '/')",
"def listDir(path):\n filenames = []\n for root, dirs, files in os.walk(path):\n for i in files:\n filenames.append(os.path.join(root, i))\n return filenames",
"def traverse(self, path):\n\n path_list = [s for s in path.split('/') if len(s) > 0 ]\n # print(path)\n # print('files:', self.files)\n directory = self.files\n index = 0\n while index < len(path_list) and path_list[index] in directory:\n if type(directory[path_list[index]]) is str: # directory is a file\n break\n directory = directory[path_list[index]]\n index += 1\n print('info', directory, path_list[index:])\n return directory, path_list[index:]",
"def list_files(startpath):\n for root, _, files in os.walk(startpath):\n for f in files:\n yield os.path.join(root, f)",
"def find_all_files(path):\n for root, dirs, files in os.walk(os.path.join(path)):\n for filename in files:\n yield os.path.join(root, filename)",
"def scan_tree(path):\n list_of_file_paths = []\n for file_obj in scandir(path):\n if file_obj.is_dir(follow_symlinks=False):\n # yield from scan_tree(file_obj.path)\n list_of_file_paths.extend(scan_tree(file_obj.path))\n else:\n # yield file_path\n if 'DS_Store' not in file_obj.path:\n list_of_file_paths.append(file_obj.path)\n return list_of_file_paths",
"def build_files_list(root_dir):\n return [\n os.path.join(dirpath, file_path)\n for dirpath, subdirs, files in os.walk(root_dir)\n for file_path in files\n ]",
"def build_files_list(root_dir):\n return [\n os.path.join(dirpath, file_path)\n for dirpath, subdirs, files in os.walk(root_dir)\n for file_path in files\n ]",
"def get_dir_recursive(path: str) -> List[str]:\n files = []\n for dir_entry in os.scandir(path):\n if dir_entry.is_dir(follow_symlinks=True):\n files.extend(get_dir_recursive(dir_entry))\n else:\n files.append(dir_entry.path)\n return files",
"def _get_all_files(dir_path):\n for root, _, filenames in os.walk(dir_path):\n for name in filenames:\n target = os.path.join(root, name)\n yield target",
"def scandir(path='.'):\r\n for name in os.listdir(path):\r\n yield GenericDirEntry(path, name)",
"def get_file_list(path: str) -> list:\n\treturn [f for f in listdir(path) if isfile(join(path, f))]",
"def trip_at(self, path, lazy=False):\n def iterator():\n for root, folders, filenames in os.walk(self.join(path)):\n for filename in filenames:\n yield join(root, filename)\n\n return lazy and iterator() or list(iterator())"
] | [
"0.7776247",
"0.7598177",
"0.759159",
"0.75369984",
"0.74048036",
"0.73601836",
"0.7359687",
"0.7189919",
"0.7168541",
"0.71555066",
"0.71307224",
"0.71246266",
"0.7100239",
"0.7060658",
"0.70578027",
"0.70476854",
"0.7047233",
"0.7002175",
"0.6991864",
"0.6990601",
"0.69890827",
"0.6981658",
"0.6965208",
"0.6948967",
"0.6948967",
"0.6908803",
"0.68803686",
"0.68600404",
"0.6832498",
"0.6828688"
] | 0.80638224 | 0 |
Implementing switch to buy functionality | def switch_to_buy(self):
self.switch_to_window()
self.accept_ssl_certificate() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def trade_action(self, BUY_QTY):\n BUY_QTY = 4500\n self.trade(BUY_QTY)\n #self.show()",
"def buy(self, price, volume):\r\n self.order(\"bid\", price, volume)",
"def purchase(self, item_type):",
"def doBuyIn(self):\n self.protocol.sendPacket(networkpackets.PacketPokerBuyIn(amount=self.max_buy_in, **self._serial_and_game_id))\n self.protocol.sendPacket(networkpackets.PacketPokerAutoBlindAnte(**self._serial_and_game_id))",
"def do_turn(self, price: int) -> SalesmanAction:",
"def productactivate():\n pass",
"def buy(self, price, chosen_class):\n return self.customer_classes[chosen_class - 1].buy(price)",
"def buy(self):\n\n from_symbol = self.symbol\n to_symbol = self.currency\n price = self.data[0].close\n amount = self.portfolio['buy_sell_amount'][self.currency]\n date = self.date\n\n if self.slippage:\n slip_factor = (self.data[-1].high - self.data[-1].close)*self.slippage\n price += np.abs(slip_factor)\n\n self.trade_manager.buy(from_symbol, to_symbol, price, amount, date)",
"def next(self):\r\n if self.position.size == 0:\r\n # The condition for activating BUY function --> By checking oversold condition.\r\n if self.rsi_2 < 30 and self.rsi_3 < 40:\r\n self.buyAlert = True\r\n # If BUY is activated and below conditions are met, then aa buy order would be placed.\r\n if self.rsi_1 < 50 and self.rsi_2 > 30 and self.rsi_3 > 25 and self.buyAlert:\r\n size = round((self.broker.getcash() / self.data), 3)\r\n self.order = self.buy(size=size)\r\n self.buyAlert = False\r\n print(round(self.broker.get_cash(), 1))\r\n # print(self.datas[0].low[0])\r\n\r\n if self.position.size != 0:\r\n # The condition for activating SELL_1 function --> Waiting for RSI to reach overbought zone.\r\n if self.rsi_4 > 67:\r\n self.sellAlert1 = True\r\n # If SELL_1 is activated and below conditions are met, then a sell order would be placed.\r\n if (self.rsi_1 < 70 and self.rsi_4 < 60) and self.sellAlert1:\r\n self.close()\r\n self.successNum += 1\r\n self.sellAlert1 = False\r\n\r\n # The condition for activating SELL_2 function --> Activated at overbought condition with RSI>85\r\n if self.rsi_4 > 85:\r\n self.sellAlert2 = True\r\n # If SELL_2 is activated and below conditions are met, then a sell order would be placed.\r\n if (self.rsi_4 < 80) and self.sellAlert2:\r\n self.close()\r\n self.successNum += 1\r\n self.sellAlert1 = False\r\n self.sellAlert2 = False\r\n\r\n # Setting Stop Loss for wrongly opened position.\r\n if 0.82 * self.order.executed.price > self.datas[0].close > 0.8 * self.order.executed.price:\r\n self.close()\r\n self.failureNum += 1\r\n print('Shit !!! Failed for {} times.'.format(self.failureNum))",
"def buy(self, price, active, option, direction, expiry=60):\n name = 'buyV2'\n server_timestamp = self._timesync.server_timestamp\n expiration_timestamp = self._timesync.expiration_timestamp + expiry\n data = {'price': price,\n 'act': active,\n 'type': option,\n 'direction': direction,\n 'time': server_timestamp,\n 'exp': expiration_timestamp}\n self._send_websocket_request(name, data)",
"def buy():\n\n # User reached route via GET (as by clicking a link or via redirect)\n if request.method == \"GET\":\n return render_template(\"buy.html\")\n else:\n symbol = request.form.get(\"symbol\")\n if not symbol:\n return apology(\"must provide symbol\", 400)\n\n # Check if symbol exist in lookup(symbol)\n symbol = lookup(symbol)\n\n if not symbol :\n return apology(\"symbol doesn't exist,sorry\", 400)\n else:\n name = symbol.get(\"name\")\n price = symbol.get(\"price\")\n symbol = symbol.get(\"symbol\")\n\n # Check if shares of name is a integer\n shares = request.form.get(\"shares\")\n\n # https://www.geeksforgeeks.org/program-check-input-integer-string/\n n = len(shares)\n for i in range(n) :\n if shares[i].isdigit() != True :\n return apology(\"shares need to be a number\", 400)\n\n shares = int(shares)\n\n # if positive number\n if shares > 0:\n\n # Query database for user's cash\n cash = db.execute(\"SELECT cash FROM users WHERE id = :userID\",\n userID=session[\"user_id\"])\n\n # Get cash\n cash = cash[0][\"cash\"]\n\n # Check user if have enough money\n buyNeed = shares*price\n if cash > buyNeed:\n\n # Update csah in users TABLE\n db.execute(\"UPDATE users SET cash = :cash WHERE id = :userID\", cash=cash-buyNeed, userID=session[\"user_id\"])\n\n # Check purchase time\n now = datetime.now(timezone('Asia/Shanghai'))\n\n # Add to buy table\n db.execute(\"INSERT INTO buy (date, symbol, name, price, shares, costmoney, userID) VALUES (:date, :symbol, :name, :price, :shares, :costmoney, :userID)\",\n date=now, symbol=symbol, name=name, price=price, shares=shares, costmoney=buyNeed, userID=session[\"user_id\"])\n\n # Add to buy-sell table\n db.execute(\"INSERT INTO bs (symbol, price, shares, date, userID) VALUES (:symbol, :price, :shares, :date, :userID)\", symbol=symbol, price=usd(price), shares=shares, date=now, userID=session[\"user_id\"])\n\n # Count finally cash\n endCash=cash-buyNeed\n\n # Count total shares and costmoney by buy\n sharesTotal = db.execute(\"SELECT shares FROM buy WHERE userID = :userID and name = :name\", userID=session[\"user_id\"], name=name)\n costmoneyTotal = db.execute(\"SELECT costmoney FROM buy WHERE userID = :userID and name = :name\", userID=session[\"user_id\"], name=name)\n\n # len(sharesTotal)\n st = len(sharesTotal)\n\n # Sum shares\n sumItem = 0\n for item in range(st):\n sumItem = sharesTotal[item][\"shares\"] + sumItem\n sharesTotal_2 = sumItem\n\n # Sum cost money\n sumItem2 = 0\n for item2 in range(st):\n sumItem2 = costmoneyTotal[item2][\"costmoney\"] + sumItem2\n costmoneyTotal_2 = sumItem2\n\n\n # Ensure return total number and totalGet by sell\n sharesTotalSell = db.execute(\"SELECT shares FROM sell WHERE userID = :userID and name = :name\", userID=session[\"user_id\"], name=name)\n costmoneyTotalSell = db.execute(\"SELECT totalGet FROM sell WHERE userID = :userID and name = :name\", userID=session[\"user_id\"], name=name)\n\n # Len of sharesTotalSell\n stS = len(sharesTotalSell)\n\n # Sum of sell shares\n sumItem3 = 0\n for item3 in range(stS):\n sumItem3 = sharesTotalSell[item3][\"shares\"] + sumItem3\n\n # Buy - sell shares\n sharesTotal_2 = sharesTotal_2-sumItem3\n\n # Sum of sell totalGet\n sumItem4 = 0\n for item4 in range(stS):\n sumItem4= costmoneyTotalSell[item4][\"totalGet\"] + sumItem4\n\n # Buy -sell totalGet\n costmoneyTotal_2 = costmoneyTotal_2-sumItem4\n\n # Test if can update total though shares\n total = db.execute(\"SELECT sharesTotal FROM total WHERE userID = :userID and name = :name\", userID=session[\"user_id\"], name=name)\n\n # Insert total TABLE\n if not total:\n db.execute(\"INSERT INTO total (name, symbol, price, sharesTotal, costmoneyTotal, userID) VALUES (:name, :symbol, :price, :sharesTotal, :costmoneyTotal, :userID)\",\n name=name, symbol=symbol, price=price, sharesTotal=sharesTotal_2, costmoneyTotal=costmoneyTotal_2, userID=session[\"user_id\"])\n # Update total TABLE\n else:\n db.execute(\"UPDATE total SET sharesTotal = :sharesTotal, costmoneyTotal = :costmoneyTotal WHERE userID = :userID and name = :name\", sharesTotal=sharesTotal_2, costmoneyTotal=costmoneyTotal_2, userID=session[\"user_id\"], name=name)\n\n # SELECT all rows from total TABLE WHERE userID = session[\"user_id\"]\n total = db.execute(\"SELECT * FROM total WHERE userID = :userID\", userID=session[\"user_id\"])\n\n # Len of total\n tlen = len(total)\n\n # Get user cash\n cash = db.execute(\"SELECT cash FROM users WHERE id = :userID\",\n userID=session[\"user_id\"])\n\n cash = usd(cash[0][\"cash\"])\n\n # Change price, costmoney to usd format\n for n in range(tlen):\n total[n][\"price\"] = usd(total[n][\"price\"])\n total[n][\"costmoneyTotal\"] = usd(total[n][\"costmoneyTotal\"])\n total = list(reversed(total))\n\n # Flash\n flash(\"buy\")\n return render_template(\"buyed.html\", total=total, tlen=tlen, cash=cash)\n\n else:\n # Else cash not enough\n return apology(\"cash not enough\", 400)\n\n else:\n # Else not positive number\n return apology(\"not positive number\", 400)",
"def buy_card(self):\n\n print(f\"Hand has buying power {self.hand_buying_power}...\")\n bought_card = None\n\n # by Platinium, if possible\n # otherwise (game stage agnostic) can buy a province or colony, always buy it\n if ((self.highest_buyable_money == cards.PLATINUM) and\n (self.game_stage == GameStage.early_game)):\n bought_card = cards.PLATINUM\n elif ((self.highest_buyable_victory_points == cards.PROVINCE) or\n (self.highest_buyable_victory_points == cards.COLONY)):\n bought_card = self.highest_buyable_victory_points\n else:\n # buy the highest buyable money by default\n if (self.highest_buyable_money != cards.COPPER):\n bought_card = self.highest_buyable_money\n\n # except if in the late game stage, in which case buy the highest\n # buyable victory points instead\n if ((self.game_stage == GameStage.late_game) and\n (self.highest_buyable_victory_points) and\n (self.highest_buyable_victory_points.victory_points > 0)):\n bought_card = self.highest_buyable_victory_points\n print(f\"Late Stage Game, so buying victory points over money\")\n\n # explain the play\n self.speak_hand()\n s = f\"for total buying power of {self.hand_buying_power}\"\n self.game.speak_str(s)\n\n # gain the card bought, if any, to the discard pile:\n if bought_card:\n s = f\"I buy {bought_card.name}\"\n self.game.speak_str(s)\n\n # gain the card to the discard pile\n self.deck.discard.append(bought_card)\n self.game.buy_card(bought_card)\n else:\n s = f\"I do not buy anything\"\n self.game.speak_str(s)\n\n # the whole hand is used up buying the card, discard the hand\n self.deck.discard_hand()",
"async def _vis_buy(self, ctx, *args):\n if has_post_permission(ctx.guild.id, ctx.channel.id):\n number, item = ch.parse_number_and_name(args)\n if item:\n await ctx.send(vis_helpers.shop_buy(ctx.user_object, item, number))",
"def buy():\n if request.method == \"GET\":\n return render_template(\"buy.html\")\n elif request.method == \"POST\":\n\n symbolInput = request.form.get(\"symbol\")\n shares = float(request.form.get(\"shares\"))\n\n symbolName = lookup(symbolInput)[\"name\"]\n symbolPrice = lookup(symbolInput)[\"price\"]\n symbolTicker = lookup(symbolInput)[\"symbol\"]\n\n if symbolInput != symbolTicker or symbolInput == \"\" or shares == \"\" or shares < 1:\n return apology(\"No buy for you senpai!\")\n\n else:\n userId = session[\"user_id\"]\n totalPrice = shares * symbolPrice\n availableCash = float(db.execute(f\"SELECT cash FROM users WHERE id={userId}\")[0][\"cash\"])\n\n if totalPrice > availableCash:\n return apology(\"Not enough available tendies\")\n else:\n now = datetime.now()\n transTime = now.strftime(\"%d/%m/%Y %H:%M:%S\")\n availableCash -= totalPrice\n\n db.execute(f\"UPDATE users SET cash = '{availableCash}' WHERE id = '{userId}'\")\n\n db.execute(f\"INSERT INTO transactions (trans_time, trans_type, user_id, symbol, price, shares, value, name, current_price) VALUES ('{transTime}','BUY','{userId}','{symbolTicker}','{symbolPrice}','{shares}','{totalPrice}','{symbolName}','{symbolPrice}')\")\n\n return redirect(\"/\")",
"def before_trading_start(context, data):\n pipe_bbands = algo.pipeline_output('pipe_bbands') \n\n # Find list of symbols to buy/sell.\n context.buy = pipe_bbands[pipe_bbands['buy']].index.tolist()\n context.sell = pipe_bbands[pipe_bbands['sell']].index.tolist()",
"def buy(self,\n currency_pair,\n rate,\n amount):\n pass",
"def buy():\n\n # User reached route via POST (as by submitting a form via POST)\n if request.method == \"POST\":\n\n # Access form data\n symbol = request.form.get(\"symbol\")\n shares = request.form.get(\"shares\")\n\n # Access user's id\n user_id = session[\"user_id\"]\n\n # Ensure symbol was submitted\n if not symbol:\n return apology(\"must provide symbol\", 400)\n\n # Ensure shares was submitted\n if not shares:\n return apology(\"must provide shares\", 400)\n\n # Check if submitted shares string is a positive integer\n if not shares.isdigit() :\n return apology(\"shares is not a number\", 400)\n # Shares is valid\n else:\n shares = int(shares)\n\n # Obtain quote using lookup function\n QUOTED = lookup(symbol)\n\n # Ensure valid symbol was submitted\n if QUOTED is None:\n return apology(\"invalid symbol\", 400)\n\n # Check if user has enough cash to buy shares\n cash = db.execute(\"SELECT cash FROM users WHERE id = ?\", user_id)[0][\"cash\"]\n cost = QUOTED[\"price\"] * shares\n if cash < cost:\n return apology(\"can't afford\", 400)\n\n # New amount of cash user has after buying shares\n new_cash_total = cash - cost\n\n # Update cash in users table for user\n db.execute(\"UPDATE users SET cash = ? WHERE id = ?\", new_cash_total, user_id)\n\n # Insert buy log into history table\n db.execute(\"INSERT INTO history (user_id, symbol, shares, price, transacted) VALUES (?, ?, ?, ?, datetime('now'))\",\n user_id, QUOTED[\"symbol\"], shares, QUOTED[\"price\"])\n\n # Keep track of shares in shares table\n current_shares = db.execute(\"SELECT shares_count FROM shares WHERE user_id = ? AND symbol = ?\", user_id, QUOTED[\"symbol\"])\n\n # If shares have not been bought before\n if not current_shares:\n db.execute(\"INSERT INTO shares VALUES (?, ?, ?, ?, ?, ?)\",\n user_id, QUOTED[\"symbol\"], QUOTED[\"name\"], shares, QUOTED[\"price\"], QUOTED[\"price\"])\n\n # If shares have been bought before\n else:\n new_shares_total = current_shares[0][\"shares_count\"] + shares\n shares_value_total = new_shares_total * QUOTED[\"price\"]\n db.execute(\"UPDATE shares SET shares_count = ?, price = ?, total = ? WHERE user_id = ? AND symbol = ?\",\n new_shares_total, QUOTED[\"price\"], shares_value_total, user_id, QUOTED[\"symbol\"])\n\n # Redirect user to home page\n flash(\"Bought!\", \"info\")\n return redirect(\"/\")\n\n # User reached route via GET (as by clicking a link or via redirect)\n else:\n return render_template(\"buy.html\")",
"def buy():\n\n def price_check(cash, price, shares):\n \"\"\"check affordability of stock vs cash on hand\"\"\"\n affordable = (cash - (price * shares)) > 0\n\n if affordable:\n return affordable\n\n else:\n return False\n\n if request.method == \"POST\":\n\n stock = lookup(request.form.get(\"symbol\"))\n\n # check symbol and share # are valid\n if not stock:\n return apology(\"Missing or Incorrect Symbol\", 400)\n\n try:\n shares = int(request.form.get(\"shares\"))\n except ValueError:\n return apology(\"Input at least 1 share\", 400)\n\n if shares < 0:\n return apology(\"Input at least 1 share\", 400)\n\n\n # cast shares to int & fetch users cash on hand\n shares = int(request.form.get(\"shares\"))\n user_cash = db.execute(\"SELECT cash FROM users WHERE id = :user_id\", user_id=session[\"user_id\"])[0][\"cash\"]\n\n if price_check(user_cash, stock[\"price\"], shares) == False:\n return apology(\"Sorry, you can't afford this purchase.\", 400)\n\n else:\n # define variables for inserting into transactions table\n purchase_date = datetime.now().strftime(\"%Y-%m-%d %H:%M:%S\")\n\n # update user cash\n user_cash = user_cash - (stock[\"price\"]*shares)\n db.execute(\"UPDATE users SET cash = :user_cash WHERE id = :user_id\", user_id=session[\"user_id\"], user_cash=user_cash)\n\n # update transactions table with most recent transaction\n db.execute(\"\"\"\n INSERT INTO transactions(user_id, date, symbol, shares, price)\n VALUES(:user_id, :date, :symbol, :shares, :price)\n \"\"\",\n user_id=session[\"user_id\"],\n date=purchase_date,\n symbol=stock[\"symbol\"],\n shares=shares,\n price=stock[\"price\"]\n )\n\n return redirect(\"/\")\n\n else:\n return render_template(\"buy.html\")",
"def buy():\n\n # if user reached route via GET return them an input form\n if request.method == \"GET\":\n return render_template(\"buy.html\")\n\n # if user reached route via POST (as by submitting a form via POST)\n elif request.method == \"POST\":\n\n # get id as it is used many times\n id = session[\"user_id\"]\n\n # get symbol as it is used many times\n symbol = request.form.get(\"symbol\")\n\n # get share volume requested\n volume = int(request.form.get(\"volume\"))\n\n # ensure stock symbol was submitted\n if not symbol:\n return apology(\"you must provide a stock symbol\")\n\n # ensure positive volume (integer rule handled elsewhere)\n elif volume <= 0:\n return apology(\"volume must be integer greater than 0\")\n\n # lookup stock on yahoo\n stock_info = lookup(symbol)\n\n # if error looking stock up\n if not stock_info:\n return apology(\"that stock symbol doesn't exist\")\n\n # query database for cash balance\n cash = db.execute(\"SELECT cash FROM users WHERE id = :id\", id=id)\n cash = cash[0]['cash']\n\n # cost of requested shares\n purchase_cost = volume * stock_info['price']\n\n # if sufficient cash, make purchase, else return apology\n if purchase_cost <= cash:\n\n # check if user already owns any stock in this company\n existing = db.execute(\"SELECT num_shares FROM portfolio WHERE id = :id AND symbol = :symbol\", id=id, symbol=symbol)\n\n # if no existing shares, add them\n if not existing:\n new = db.execute(\"INSERT INTO portfolio (id, symbol, num_shares) VALUES(:id, :symbol, :num_shares)\", id=id, symbol=symbol, num_shares=volume)\n\n # if there are existing shares, add new volume to them\n else:\n add = db.execute(\"UPDATE portfolio SET num_shares = :num_shares WHERE id = :id AND symbol = :symbol\", num_shares=existing[0]['num_shares'] + volume, id=id, symbol=symbol)\n\n # set date string\n dstring = time(str(datetime.datetime.utcnow()))\n\n # update transaction history\n result2 = db.execute(\"INSERT INTO `transaction` (id, symbol, volume, share_price, dtstamp) VALUES(:id, :symbol, :volume, :share_price, :dtstamp)\", id=id, symbol=symbol, volume=volume, share_price=stock_info['price'], dtstamp=dstring)\n\n # reduce cash balance\n result = db.execute(\"UPDATE users SET cash = :cash WHERE id = :id\", cash=cash-purchase_cost, id=id)\n\n # redirect user to home page\n return redirect(url_for(\"index\"))\n else:\n return apology(\"insufficient funds\")",
"def put_ask(curr, depth, asset, price, price2):\n #Transaction 1\n tmp_list = bitty.buy_limit(asset+\"-\"+curr, depth, price)\n time.sleep(5) #wait for network latency\n wait = 0\n while wait < 15:\n oList = bitty.get_open_orders(asset + \"-\" + curr)['result']\n if oList: #if there are orders open, wait until 15\n wait += 1\n print(\"Alt order outstanding\")\n else:#order is filled, switch liquidity assets\n break\n time.sleep(1)\n print(wait)\n if wait == 15: #if it's been 15 seconds and the order is not filled, cancel it\n\n for o in oList:\n orderId = o['OrderUuid']\n bitty.cancel(orderId)\n time.sleep(5)\n if asset == \"BTC\":\n asset = \"ETH\"\n elif asset == \"ETH\":\n asset = \"BTC\"\n bal_result = bitty.get_balance(curr)['result'] # gets exact balance of the altcoin, including dust\n depth_to_main = bal_result['Balance']\n print(\"Order canceled, submitting sell order for any quantity filled.\")\n bitty.sell_limit(asset + \"-\" + curr, depth_to_main, price2)\n return(asset) #back to searching\n\n if asset == \"BTC\":\n asset = \"ETH\"\n elif asset == \"ETH\":\n asset = \"BTC\"\n\n #Transaction 2\n bal_result = bitty.get_balance(curr)['result'] # gets exact balance of the altcoin, including dust\n depth_to_main = bal_result['Balance']\n print(depth_to_main)\n print(\"Submitting transaction 2, please wait, this may take a while.\")\n tmp_list = bitty.sell_limit(asset + \"-\" + curr, depth_to_main, price2)\n while tmp_list['success'] == False:\n print(\"Order failed.\")\n time.sleep(5)\n tmp_list = bitty.sell_limit(asset + \"-\" + curr, depth_to_main, price2)\n\n time.sleep(15)#wait for latency\n wait = 5\n oList= []\n while wait < 86400: #wait ten minutes\n oList = bitty.get_open_orders(asset + \"-\" + curr)['result']\n if oList:\n wait += 5\n if wait % 60 == 0:\n price2 = recast_lower_sell(oList, asset, curr, price2)\n #elif wait > 675:\n # price2 = recast_lower_sell(oList, asset, curr, depth_to_main, price2)\n print(\"Main order outstanding\")\n else:\n return(asset)\n time.sleep(5)\n if wait == 86400:\n return(\"timeout\")",
"async def buy(self, pair: str, detection_name: str, trigger_data: Dict[str, Any]):\n\n params = core.Detector.get_detection_params(detection_name, {\n 'rebuy': True\n })\n\n if pair in self.watch_only_pairs:\n self.log.info(\"{} Cannot open buy trade on watch-only pair.\", pair)\n await self.reporter.send_alert(pair, trigger_data, detection_name, prefix='WATCH SKIP BUY')\n return\n\n if not self.pair_states[pair]['enable_buy']:\n self.log.info(\"{} Cannot open buy trade with buys disabled.\", pair)\n await self.reporter.send_alert(pair, trigger_data, detection_name, prefix='DISABLED SKIP BUY')\n return\n\n if not self.balancer.states[pair.split('-')[0]]['enable_refill']:\n self.log.info(\"{} Cannot open buy trade with refills disabled.\", pair)\n await self.reporter.send_alert(pair, trigger_data, detection_name, prefix='REFILL SKIP BUY')\n return\n\n new_trade = await self._trade_methods['buy'](pair, 'BUY', detection_name, trigger_data)\n if new_trade is not None:\n self.trades[pair]['open'].append(new_trade)\n await self._track_num_open_trades(pair)\n self.save_attr('trades', max_depth=1, filter_items=[pair])\n self.save_attr('trade_stats', max_depth=2, filter_items=[pair], filter_keys=[self.time_prefix])\n\n self.pair_states[pair]['enable_rebuy'] = params['rebuy']",
"def Trading(Seller,Buyer):\n if Seller.has_sold == False:\n if Buyer.like_buy >= Seller.like_sell:\n Seller.has_sold = True\n Buyer.has_bought = True\n Seller.sold_objects += 1\n Buyer.bought_objects += 1\n print('A trade has been made')\n else:\n Buyer.has_bought = False\n Seller.has_sold = False\n print('There was no deal')\n else:\n Buyer.has_bought = False",
"def buy():\n \n user_id = session[\"user_id\"]\n\n if request.method == \"GET\":\n return render_template(\"buy.html\")\n \n if request.method == \"POST\":\n \n # get required symbol\n symbol = request.form.get(\"symbol\").upper()\n try:\n qty = int(request.form.get(\"qty\"))\n except ValueError:\n return apology(\"QTY is empty!\", 400)\n \n # proceed buy function\n buy_result: Tuple[float, str] = buy_share(db, user_id, symbol, qty )\n if buy_result[0] == -1:\n return apology(buy_result[1], 400)\n\n return redirect(\"/\", 200)",
"def buy():\n if request.method == \"GET\":\n return render_template(\"buy.html\")\n\n elif request.method == \"POST\":\n shares = request.form.get(\"shares\")\n symbol = request.form.get(\"symbol\")\n try:\n float(shares)\n except ValueError:\n return apology(\"please input a valid number of shares\")\n try:\n int(shares)\n except ValueError:\n return apology(\"please input a valid number of shares\")\n shares = int(shares)\n\n if not shares or not float(shares) or not float(shares).is_integer() or float(shares) <= 0:\n return apology(\"input a valid number of shares to buy\")\n\n elif not symbol or not lookup(symbol):\n return apology(\"input a valid symbol\")\n\n elif type(shares) != int:\n return apology(\"How did you even get this error?!\")\n\n else:\n quote = lookup(symbol)\n current_price = float(quote[\"price\"])\n company = quote[\"name\"]\n shares_num = int(request.form.get(\"shares\"))\n shares_tcost = float(shares_num * current_price)\n balance = db.execute(\"SELECT cash FROM users WHERE id = :id\", id=session['user_id'])\n\n # balance[0] b/c the returned value of balance is a dict of multiple lists\n flbal = [float(i) for i in list(balance[0].values())]\n for bal in flbal:\n if bal - shares_tcost < 0:\n return apology(\"Sorry, you don't have enough money\")\n else:\n newshares = bal - shares_tcost\n newbalance = db.execute(\"UPDATE users SET cash = :cash WHERE id = :id\", cash=newshares, id=session['user_id'])\n newpurchase = db.execute(\"INSERT INTO History ('symbol', 'company', 'shares', 'price', 'totalprice', 'id', 'transaction_type') VALUES (:symbol, :company, :shares, :price, :totalprice, :id, :transaction_type)\",\n symbol=symbol, company=company, shares=shares_num, price=current_price, totalprice=shares_tcost, id=session['user_id'], transaction_type=\"BUY\")\n\n return redirect('/')",
"def buy():\n if request.method == \"GET\":\n return render_template(\"buy.html\")\n else:\n stock = lookup(request.form.get(\"symbol\"))\n\n if stock == None:\n return apology(\"Symbol not found. Please re-check the symbol and try again!\")\n\n shares = int(request.form.get(\"shares\"))\n if not shares or int(shares) <= 0:\n return apology(\"Invalid shares. Please re-check and try again!\")\n\n company_name = stock[\"name\"]\n price = float(stock[\"price\"])\n symbol = stock[\"symbol\"]\n userid = session[\"user_id\"]\n available_cash = (db.execute(\"SELECT cash FROM users WHERE id=:id\", id = userid))[0].get(\"cash\")\n total = shares*price\n if total > available_cash:\n return apology(\"Sorry! You do not have sufficient balance\")\n else:\n check = (db.execute(\"SELECT symbol FROM purchase WHERE symbol=:symbol AND id=:uid\", symbol=symbol, uid=userid))\n dt = datetime.now(timezone(timedelta(hours=6)))\n dt = dt.strftime(\"%d-%m-%Y %H:%M:%S\")\n db.execute(\"INSERT INTO history (id, symbol, shares, price, time) VALUES (:userid, :symbol, :shares, :price, :time)\", userid=userid, symbol=symbol,shares=shares,price=price, time=dt)\n db.execute(\"UPDATE users SET cash=:cash WHERE id=:uid\", cash=available_cash-shares*price, uid=userid)\n\n # check = (db.execute(\"SELECT symbol FROM history WHERE symbol=:symbol\", symbol=symbol))[0].get(\"symbol\")\n print(len(check))\n if len(check) == 0:\n db.execute(\"INSERT INTO purchase (id, symbol, name, shares) VALUES (:userid, :symbol, :name, :shares)\", userid=userid, symbol=symbol, name=company_name, shares=shares)\n else:\n exshares = int((db.execute(\"SELECT shares FROM purchase WHERE symbol=:symbol AND id=:uid\", symbol=symbol,uid=userid))[0].get(\"shares\"))\n # print(exshares+\" \"+type(exshares))\n extotal = float((db.execute(\"SELECT total FROM purchase WHERE symbol=:symbol AND id=:uid\", symbol=symbol,uid=userid))[0].get(\"total\"))\n db.execute(\"UPDATE purchase SET shares=:newshares WHERE symbol=:symbol AND id=:uid\", newshares=shares+exshares, symbol=symbol, uid=userid)\n return render_template(\"bought.html\", company_name=company_name, shares=shares, symbol=symbol, usd=usd(shares*price), balance=usd(available_cash-shares*price))",
"def do_buy():\n i = 1\n while i <= CONF.trade_trials:\n buy_price = calculate_buy_price(get_current_price())\n order_size = calculate_buy_order_size(buy_price)\n if order_size is None:\n return None\n order = create_buy_order(buy_price, order_size)\n if order is None:\n LOG.error(\"Could not create buy order over %s\", order_size)\n return None\n write_action('-BUY')\n order_status = poll_order_status(order.id, 10)\n if order_status == 'open':\n cancel_order(order)\n i += 1\n daily_report()\n else:\n return order\n order_size = calculate_buy_order_size(get_current_price())\n if order_size is None:\n return None\n write_action('-BUY')\n return create_market_buy_order(order_size)",
"def sell(self, price, volume):\r\n self.order(\"ask\", price, volume)",
"def option_two():\n if ADD_PRODUCTS == {}:\n print \"\\n**No products availabe**\" #Cannot to buy\n press_enter()\n reset()\n main_menu()\n else:\n ask_if_want()",
"def trade(self, action=None):\n #print(\"Trading {}\".format(action))\n # Buy\n if action > 0.2 : self.posture = 1\n # Hold\n if action < 0.2 and action > -0.2: self.posture = 0\n # Sell\n if action < -0.2: self.posture = -1\n \n # Evaluate posture and calculare actual cost of trade\n #print(\"Posture: {}\".format(self.posture))\n if self.posture == 1:\n _amt = self.amt_buy\n _base = (_amt * self.state['price'] \\\n + (_amt * self.commission)) * -1\n \n elif self.posture == -1:\n _amt = self.amt_sell\n _base = _amt * self.state['price'] \\\n + (_amt * self.commission) \\\n + (_amt * self.gap)\n _amt = _amt * -1 \n\n # Set posture to 0 if no balance available\n if (self.posture == 1 and self.balance < abs(_base)) \\\n or (self.posture == -1 and self.bag < abs(_amt)):\n print(\"NOT enough amount!!\")\n self.stop=True\n self.posture = 0\n\n if self.posture == 0:\n _amt = 0\n _base = 0\n\n # Modify balances\n self.transaction = _base\n self.amt = _amt\n self.balance = self.balance + _base\n self.bag = self.bag + _amt\n self.value = self.calculate_value()\n #print(\"Posture : {} // Transaction: {}\".format(self.posture, self.transaction))\n\n return self",
"async def enable_buy(self, pair: str, detection_name: str, trigger_data: dict):\n\n if not self.pair_states[pair]['enable_buy']:\n await self.reporter.send_alert(pair, trigger_data, detection_name, prefix='ENABLE BUY')\n self.pair_states[pair]['enable_buy'] = True"
] | [
"0.6752437",
"0.66564465",
"0.65131027",
"0.64828473",
"0.6459072",
"0.641257",
"0.6370881",
"0.6362402",
"0.6345347",
"0.6336424",
"0.6329178",
"0.6314387",
"0.6267283",
"0.62372434",
"0.62257135",
"0.6205884",
"0.6183206",
"0.6168485",
"0.616273",
"0.61568713",
"0.61563355",
"0.6152934",
"0.61527",
"0.61505955",
"0.6144558",
"0.61298436",
"0.6122389",
"0.6106087",
"0.610556",
"0.6104547"
] | 0.69311965 | 0 |
Implementing get buy page title functionality | def get_buy_page_title(self):
self.wait().until(EC.visibility_of_element_located(self.default_tab_header_locator), 'default tab header not found before specified time')
return self.page_title() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_title():",
"def title(self):\n\t\treturn self.page_title",
"def get_page_title(self):\n return self.driver.get_title()",
"def page_title(self) -> str:\n return self.driver.title",
"def get_title(self) -> str:\n pass",
"def get_title(self):\n return self.title",
"def get_title(self):\n return self.title",
"def get_title(self):\n return self.title",
"def get_title(self):\n page_title = self.driver.title\n return page_title",
"def get_title(self):\n return self.run_command('get_title')[0]",
"def __str__(self):\n return self.page.get_title()",
"def get_item_title(self, soup: BeautifulSoup) -> None:\n try:\n title = soup.find(\"span\", class_=\"_bzh5lkq\").get_text()\n except AttributeError:\n title = None\n self.__collected_dic[\"title\"].append(title)",
"def page_title(self) -> str:\n xpath = r'head//title'\n return self.html.xpath(xpath)[0].text",
"def retrieve_title(self, index):\n with self.session as s:\n article_url = (\n 'https://alta.registries.gov.ab.ca/SpinII'\n '/ImmediateCheckoutPreviewHTML.aspx'\n '?ArticleTypeID=f1fdd406-26aa-45d5-9bf9-3f552c972a5c'\n '&ArticleType=CurrentTitle'\n '&ArticleID=%s&NextPage=' % index\n )\n sleep(2)\n article = s.get(article_url)\n soup = BeautifulSoup(article.content, 'html.parser')\n if soup.pre:\n payload = self.parse_title(soup.pre)\n with open('data/titles/{}.txt'.format(index), \"w\") as f:\n f.write(payload['title_text'])\n return payload",
"def getTitle(self):\n\t\treturn self.driver.title",
"def get_title(self, list_item):\n title = list_item.find('a', {'class': 'biz-name'}).find('span')\n return title.get_text()",
"def get_page_title(page):\n\n html = BeautifulSoup(page, \"html.parser\")\n return html.title.string",
"def get_title(self):\n title = self.driver.title\n return title",
"def get_title(self):\n\n return self.title",
"def get_bill_and_audit_page_title(self):\n self.wait().until(EC.presence_of_element_located(self.page_header_locator), 'top logo frame not found before specified time out')\n return self.page_title()",
"def getTitle(self):\n\n # print(self.soupObject.title.string)\n try:\n s = self.soupObject.find(\"meta\", attrs={\"name\": \"twitter:title\"})\n self.title = str(s['content'])\n self.title = self.title.replace(\"/\", \"\")\n self.title = self.title.strip()\n if not self.title:\n s = int(\"deliberateError\")\n\n # except\n except:\n self.title = \"Amazonsubtitles\"\n\n pass",
"def get_webpage_title(self, response):\n title = response.xpath('//*/title/text()').extract_first()\n if title:\n return title.strip()\n else:\n title = response.xpath('//*/meta[contains(@name,\"title\")]/@content').extract_first()\n if title:\n return title.strip()\n else:\n return \"\"",
"def get_title(self):\n return self._get_title_()",
"def get_title(article):\n title = article.find(\"div\", class_=\"col-sm-6 product_main\").h1.text\n return title",
"def getTitle(self): #$NON-NLS-1$\r",
"def getTitle(self): #$NON-NLS-1$\r",
"def get_title(portobjlist):\n #fetch_title(portobjlist)\n fetch_title(portobjlist)",
"def title(self):\n return self.get(\"title\")",
"def get_title(self):\n title_tag = self.soup.find('title').text\n title_list = string.split(sep='-')\n self.title = title_list[0].strip()",
"def get_title(self):\n return self._title"
] | [
"0.79133",
"0.73078203",
"0.72166765",
"0.7001976",
"0.68980056",
"0.6864043",
"0.6864043",
"0.6864043",
"0.68636906",
"0.6856293",
"0.68401515",
"0.6837087",
"0.6764774",
"0.6742004",
"0.6720232",
"0.6714058",
"0.6688795",
"0.66885704",
"0.66017157",
"0.66013813",
"0.6588677",
"0.6573939",
"0.6571459",
"0.6557348",
"0.6538076",
"0.6538076",
"0.65371287",
"0.65238",
"0.65148765",
"0.65018785"
] | 0.7839547 | 1 |
Implementing is buy dashboard tab present functionality | def is_buy_dashboard_tab_present(self):
return self.is_element_present(self.buy_dashboard_tab_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def select_buy_dashboard_tab(self):\n self.select_static_tab(self.buy_dashboard_tab_locator, True)",
"def click_buy_and_sell_deal_management_link(self):\n self.select_static_tab(self.buy_and_sell_deal_management_locator, message=\"buy and sell deal management locator not found before specified time out\")",
"def dashboard():",
"def is_target_buy_policies_tab_displayed(self):\n return self.is_element_visible(self.target_buy_policies_tab_locator)",
"def on_btnReservatool_clicked(self, widget):\n try:\n panelactual = variables.panel.get_current_page()\n if panelactual != 1:\n variables.panel.set_current_page(1)\n funcioneshab.listadonumhab(self)\n else:\n pass\n except:\n print(\"error botón cliente barra herramientas\")",
"def tabSelected(self):",
"def tabSelected(self):",
"def tabSelected(self):\r\n self.transactionMenuWidget.tabSelected()",
"def click_buy_and_sell_deal_create_button(self):\n self.click_element(self.save_vendor_profile_locator)",
"def open_accounts_page(self):\n log.info(\"In landing page: click bill view button\")\n bills_page_for_meters_link = self.driver.find_element(\n *self.link_to_accs_locator\n )\n bills_page_for_meters_link.click()\n self.driver.sleep(5)\n self.driver.switch_to.window(self.driver.window_handles[-1])",
"def test_get_tab(self):\n actions.login(ADMIN_EMAIL, is_admin=True)\n response = self.get(self.TAB_URL)\n self.assertEqual(response.status_code, 200)",
"def you_should_see_the_dashboard(driver):\n assert wait_on_element(driver, 10, xpaths.dashboard.title)\n assert wait_on_element(driver, 10, xpaths.dashboard.system_Info_Card_Title)",
"def you_should_see_the_dashboard(driver):\n rsc.Verify_The_Dashboard(driver)\n if wait_on_element(driver, 2, '//h1[contains(.,\"End User License Agreement - TrueNAS\")]'):\n try:\n assert wait_on_element(driver, 2, '//button[@ix-auto=\"button__I AGREE\"]', 'clickable')\n driver.find_element_by_xpath('//button[@ix-auto=\"button__I AGREE\"]').click()\n if wait_on_element(driver, 2, xpaths.button.close, 'clickable'):\n driver.find_element_by_xpath(xpaths.button.close).click()\n except ElementClickInterceptedException:\n assert wait_on_element(driver, 2, xpaths.button.close, 'clickable')\n driver.find_element_by_xpath(xpaths.button.close).click()\n assert wait_on_element(driver, 2, '//button[@ix-auto=\"button__I AGREE\"]', 'clickable')\n driver.find_element_by_xpath('//button[@ix-auto=\"button__I AGREE\"]').click()",
"def trade_action(self, BUY_QTY):\n BUY_QTY = 4500\n self.trade(BUY_QTY)\n #self.show()",
"def test_functionality(self):\n self.browserObject = globalVars.browserObject\n \n #Check for current logged in user\n self.verifyCurrentUser(userRole='Administrator', loginAsUser=True)\n \n self.get_DashboardPage(\"Server Utilization\")\n \n self.get_DashboardPage(\"Total Server Utilization\")\n \n self.logout()",
"def show(self):\n #print(\" ===== I am in show function ----\")\n if self.brightness > 0.99:\n global myItemTab\n for i in range(len(cfg.myItemTabHandler)):\n pen = QPen(QColor(self.stripTab[i]))\n brush = QBrush(pen.color())\n #brush = QBrush(pen.color().darker(100))\n cfg.myItemTabHandler[i].setPen(pen)\n cfg.myItemTabHandler[i].setBrush(brush)\n\n\n else:\n pass",
"def CashMode(self):\n self.cred_left = 0\n self.is_member = False\n self.cred_id = ''\n self.cred_card = ''\n self.builder.get_object('GuiMode').set_label(\"Payment in Cash\")",
"def execute_t24_tab_command(self, tab_items):\n\n self._make_sure_is_logged_in()\n\n self.home_page.run_t24_tab_command(tab_items)",
"def shopify_instances_onboarding_panel(self):\n\n current_company_id = request.httprequest.cookies.get('cids').split(',') if request.httprequest.cookies.get(\n 'cids', []) else []\n company = False\n if len(current_company_id) > 0 and current_company_id[0] and current_company_id[0].isdigit():\n company = request.env['res.company'].sudo().search([('id', '=', int(current_company_id[0]))])\n if not company:\n company = request.env.company\n hide_panel = company.shopify_onboarding_toggle_state != 'open'\n btn_value = 'Create More Shopify Instance' if hide_panel else 'Hide On boarding Panel'\n shopify_manager_group = request.env.ref(\"shopify_ept.group_shopify_manager_ept\")\n if request.env.uid not in shopify_manager_group.users.ids:\n return {}\n return {\n 'html': request.env.ref('shopify_ept.shopify_instances_onboarding_panel_ept')._render({\n 'company': company,\n 'toggle_company_id': company.id,\n 'hide_panel': hide_panel,\n 'btn_value': btn_value,\n 'state': company.get_and_update_shopify_instances_onboarding_state(),\n 'is_button_active': company.is_create_shopify_more_instance\n })\n }",
"def clickDashboard(self):\n self.waitForElement(locator=self._dashboardBtn, locatorType=\"xpath\")\n self.elementClick(locator=self._dashboardBtn, locatorType=\"xpath\")",
"def _test_display_up_button(self):\n return (self.product_displays.top_index > 0)",
"def on_Panel_select_page(self, widget):\n try:\n funcioneshab.listadonumhab()\n except:\n print(\"error botón cliente barra herramientas\")",
"def dashboard():\n return render_template('home/dashboard.html',title='SycliQ Dashboard')",
"def test_dashboards_v2_show(self):\n pass",
"def home_checkin():\n\tcheckpremenu()",
"def __init__(self):\r\n super().__init__()\r\n self._setupTab1()",
"def is_vendors_tab_present(self):\n return self.is_element_present(self.vendors_tab_locator)",
"def is_buy_and_sell_deal_details_screen_status_dropdown_visible(self):\n return self.is_element_present(self.buy_and_sell_deal_details_screen_status_dropdown_locator)",
"def select_qos_tab(self):\n self.click_element(self.qos_tab_locator, True)",
"def select_tab_of_view_price_list_detail_page(self, tab_name):\n self.wait_for_ajax_spinner_load(300)\n view_price_list_detail_page_tab_locator = (By.XPATH, self.vendor_profile_page_tab_locator_string + \"[text()='%s']\" % tab_name)\n self.select_static_tab(view_price_list_detail_page_tab_locator, 'tab locator not found')"
] | [
"0.7400998",
"0.64810336",
"0.6269928",
"0.5840348",
"0.57089645",
"0.5598408",
"0.5598408",
"0.5590817",
"0.5487762",
"0.54867595",
"0.5449592",
"0.54234475",
"0.54188967",
"0.5388239",
"0.5358356",
"0.5314111",
"0.5313204",
"0.52993983",
"0.5289306",
"0.5271922",
"0.52678883",
"0.524996",
"0.52443796",
"0.5229303",
"0.52047426",
"0.51951915",
"0.51801425",
"0.5164328",
"0.51465213",
"0.51413476"
] | 0.72802335 | 1 |
Implementing is vendors tab present functionality | def is_vendors_tab_present(self):
return self.is_element_present(self.vendors_tab_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def select_vendors_tab(self):\n self.select_static_tab(self.vendors_tab_locator, 'vendors tab not found before specified time')",
"def is_specific_tab_on_vendor_profile_page_present(self, tab_name):\n tab_locator = (By.XPATH, \"//div[contains(@id, 'SourceProfileTabStrip')]/descendant::a[text()='%s']\" % tab_name)\n return self.is_element_present(tab_locator)",
"def is_custom_gear_tab(self):\n return self.emulator.is_ui_element_on_screen(self.ui['CUSTOM_GEAR_QUICK_UPGRADE']) or \\\n self.emulator.is_ui_element_on_screen(self.ui['CUSTOM_GEAR_ENHANCE']) or \\\n self.emulator.is_ui_element_on_screen(self.ui['CUSTOM_GEAR_CHANGE_OPTION'])",
"def click_on_tab_of_vendor_profile_page(self, tab_name):\n vendor_profile_page_tab_locator = (By.XPATH, self.vendor_profile_page_tab_locator_string + \"[text()='%s']\" % tab_name)\n self.select_static_tab(vendor_profile_page_tab_locator, 'tab locator not found')",
"def is_buy_dashboard_tab_present(self):\n return self.is_element_present(self.buy_dashboard_tab_locator)",
"def tabSelected(self):",
"def tabSelected(self):",
"def test_editVendor(self):\n self.tc_id = \"019\"\n self.tc_desc = \"Verify the admin is able to edit and update delete the vendor\"\n self.tc_step = \"TC Start\"\n\n vendoredit = vendoreditFeatures(self.driver)\n\n self.tc_step = \"Launch the url\"\n vendoredit.launchUrl(self.url)\n\n self.tc_step = \"Enter the login details\"\n vendoredit.clickVendorLogin()\n vendoredit.enterloginDetails(self.username, self.password)\n vendoredit.clickLogin()\n vendoredit.clickvendorTab()\n vendoredit.clickVendor()\n vendoredit.clickeditvendor()\n vendoredit.entereditvendor(self.editFirstName,self.editLastName)\n vendoredit.updatevendor()\n vendoredit.clickvendorTab()\n vendoredit.clickeditPrimaryEmail()\n vendoredit.entereditPrimaryEmail(self.primaryEmail)\n vendoredit.updatePrimaryEmail()\n vendoredit.updatePrimaryEmailOk()\n vendoredit.clickeditvendorlastupdatedtime()\n vendoredit.clickpicktime()\n vendoredit.picktime()\n vendoredit.picktimeapply()\n vendoredit.picktimeUpdate()\n # vendoredit.deletevendor()\n # vendoredit.confirmremovevendor()\n # vendoredit.confirmremovevendorok()\n vendoredit.viewmore()\n vendoredit.viewmoreClose()\n vendoredit.activeCheckvendor()\n vendoredit.verifiedCheckvendor()\n vendoredit.followupvendor()\n vendoredit.followupvendorok()",
"def test_post_activate_marketplace_vendor_v3(self):\n pass",
"def on_activate(self) -> None:",
"def on_activate(self):",
"def onShow(self):\n pass",
"def tabSelected(self):\r\n self.transactionMenuWidget.tabSelected()",
"def populateTabs(self):\n frameworks = self.data['frameworks']\n for fw in frameworks:\n frameworkElements = frameworks[fw]\n for element in frameworkElements:\n\n ui = self.framework2gui[fw].get(element)\n\n if isinstance(ui, QComboBox):\n ui.clear()\n ui.setView(QListView())\n for i, deviceName in enumerate(frameworks[fw][element]):\n ui.addItem(deviceName)\n if frameworks[fw]['deviceName'] == deviceName:\n ui.setCurrentIndex(i)\n\n elif isinstance(ui, QLineEdit):\n ui.setText(f'{frameworks[fw][element]}')\n\n elif isinstance(ui, QCheckBox):\n ui.setChecked(frameworks[fw][element])\n\n elif isinstance(ui, QDoubleSpinBox):\n ui.setValue(frameworks[fw][element])\n return True",
"def __init__(self):\r\n super().__init__()\r\n self._setupTab1()",
"def select_buy_dashboard_tab(self):\n self.select_static_tab(self.buy_dashboard_tab_locator, True)",
"def is_vendor_profile_present(self):\n return self.is_element_present(self.vendor_profile_locator)",
"def show(self):\n #print(\" ===== I am in show function ----\")\n if self.brightness > 0.99:\n global myItemTab\n for i in range(len(cfg.myItemTabHandler)):\n pen = QPen(QColor(self.stripTab[i]))\n brush = QBrush(pen.color())\n #brush = QBrush(pen.color().darker(100))\n cfg.myItemTabHandler[i].setPen(pen)\n cfg.myItemTabHandler[i].setBrush(brush)\n\n\n else:\n pass",
"def activated(self):",
"def currentTabChanged (self, tabId):\n if tabId == -1:\n windowTitle = None\n else:\n wdoc = self.tab.widget(tabId)\n\n if not isinstance(wdoc, WelcomePage):\n windowTitle = wdoc.getPath( absolute=True, withAsterisk = True )\n windowTitleFinal = self.addTag( repoType=wdoc.repoDest, \n txt=windowTitle, project=wdoc.project )\n\n # emit signal\n self.CurrentDocumentChanged.emit(wdoc)\n\n if isinstance(wdoc, WelcomePage):\n self.findWidget.setDisabled(True)\n self.findWidget.hide()\n # elif wdoc.extension == TestAbstract.TYPE:\n # self.findWidget.setDisabled(True)\n # self.findWidget.hide()\n elif wdoc.extension == TestUnit.TYPE:\n # self.findWidget.show()\n self.findWidget.setDisabled(False)\n self.findWidget.setEditor( editor = wdoc.srcEditor)\n elif wdoc.extension == TestData.TYPE:\n # self.findWidget.show()\n self.findWidget.setDisabled(False)\n self.findWidget.setEditor( editor = wdoc.srcEditor)\n elif wdoc.extension == TestSuite.TYPE:\n # self.findWidget.show()\n self.findWidget.setDisabled(False)\n self.findWidget.setEditor( editor = wdoc.srcEditor)\n elif wdoc.extension == TestPlan.TYPE or wdoc.extension == TestPlan.TYPE_GLOBAL:\n wdoc.reloadSelectedItem()\n self.findWidget.setDisabled(True)\n self.findWidget.hide()\n elif wdoc.extension == TestConfig.TYPE:\n self.findWidget.setDisabled(True)\n self.findWidget.hide()\n elif wdoc.extension == TestAdapter.TYPE:\n # self.findWidget.show()\n self.findWidget.setDisabled(False)\n self.findWidget.setEditor( editor = wdoc.srcEditor)\n elif wdoc.extension == TestLibrary.TYPE:\n # self.findWidget.show()\n self.findWidget.setDisabled(False)\n self.findWidget.setEditor( editor = wdoc.srcEditor)\n elif wdoc.extension == TestTxt.TYPE:\n # self.findWidget.show()\n self.findWidget.setDisabled(False)\n self.findWidget.setEditor( editor = wdoc.srcEditor)\n else:\n self.findWidget.setDisabled(True)\n self.findWidget.hide()\n\n if RCI.instance().isAuthenticated():\n self.updateActions(wdocument = wdoc)\n else:\n self.findWidget.setDisabled(True)\n \n # emit signal\n if isinstance(wdoc, WelcomePage):\n self.UpdateWindowTitle.emit(\"\")\n else:\n self.UpdateWindowTitle.emit(windowTitleFinal)",
"def is_create_vendor_present(self):\n return self.is_element_present(self.create_vendor_locator)",
"def is_target_buy_details_section_present_in_vendor_profile_page(self):\n return self.is_element_present(self.target_buy_details_section_locator)",
"def ShowWindowMenu(self):\r\n \r\n tabCtrl = self.GetActiveTabCtrl()\r\n idx = tabCtrl.GetArtProvider().ShowDropDown(tabCtrl, tabCtrl.GetPages(), tabCtrl.GetActivePage())\r\n\r\n if not self.GetEnabled(idx):\r\n return False\r\n\r\n if idx != -1:\r\n e = AuiNotebookEvent(wxEVT_COMMAND_AUINOTEBOOK_PAGE_CHANGING, tabCtrl.GetId())\r\n e.SetSelection(idx)\r\n e.SetOldSelection(tabCtrl.GetActivePage())\r\n e.SetEventObject(tabCtrl)\r\n self.GetEventHandler().ProcessEvent(e)\r\n\r\n return True\r\n \r\n else:\r\n \r\n return False",
"def is_vendor(self) -> bool:\n return self._is_vendor",
"def init_tab(self):",
"def _test_display_up_button(self):\n return (self.product_displays.top_index > 0)",
"def on_Panel_select_page(self, widget):\n try:\n funcioneshab.listadonumhab()\n except:\n print(\"error botón cliente barra herramientas\")",
"def on_menuBarPreciosServicios_activate(self,widget):\n\n try:\n variables.venPrecios.show()\n conexion.cur.execute('select * from precios')\n precios = conexion.cur.fetchall()\n variables.entPrecioDesayuno.set_text(str(precios[0][0]))\n variables.entPrecioComida.set_text(str(precios[0][1]))\n variables.entPrecioParking.set_text(str(precios[0][2]))\n except:\n print('error abrir ventana precios')",
"def on_acercade_activate(self, widget):\n try:\n variables.venacercade.show()\n except:\n print('error abrira acerca de')",
"def support(self):"
] | [
"0.6874013",
"0.6272529",
"0.5996868",
"0.5992929",
"0.5748996",
"0.5721711",
"0.5721711",
"0.55816156",
"0.5517531",
"0.5510303",
"0.54955125",
"0.54571176",
"0.54381496",
"0.5422301",
"0.53921604",
"0.53632975",
"0.5273257",
"0.5265792",
"0.52589095",
"0.52563494",
"0.525513",
"0.5241976",
"0.523327",
"0.52307343",
"0.52297986",
"0.5217657",
"0.52111816",
"0.51924044",
"0.5190423",
"0.5178564"
] | 0.72662383 | 0 |
Implementing is country groups link present functionality | def is_country_groups_link_present(self):
return self.is_element_present(self.country_groups_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def click_country_group(self):\n self.click_element(self.country_groups_locator, script_executor=True)",
"def test_groups_get(self):\n pass",
"def test_groups_get(self):\n pass",
"def test_groups_group_ref_get(self):\n pass",
"def groups_en(request, group_id = 1):\n group = get_object_or_404(ResearchGroup, pk=group_id)\n groups = ResearchGroup.objects.order_by('name')\n group_list = []\n for g in groups:\n if g.id is not group.id:\n group_list.append({'name': g.name, 'id': g.id})\n # default showing group\n # chosen group info\n group_info = {}\n group_info['name'] = group.name\n personnel = list()\n for p in group.personnel.all():\n personnel.append(p.username)\n group_info['personnel'] = \" \".join(str(x) for x in personnel)\n group_info['projects'] = group.projects\n group_info['directions'] = group.directions\n group_info['papers'] = group.papers.split()\n context = {'group_list': group_list, 'group_info': group_info}\n return render(request, 'sacms/groups_en.html', context)",
"def test_get_groups(self):\n pass",
"def test_get_groups(self):\n pass",
"def test_groups_group_id_state_get(self):\n pass",
"def player_group(group):\n link = reverse('wouso.interface.profile.views.player_group', args=(group.id,))\n\n return u'<a href=\"%s%s\" title=\"%s\">%s</a>' % (link, group, group.name, group)",
"def test_get_country_states(self):\n pass",
"def test_get_group(self):\n pass",
"def _allowed(self, agent, request, country_code):\n uid = logged_in_user(request)\n if not uid:\n return False\n filterstr = (\"(&(objectClass=groupOfUniqueNames)(uniqueMember=%s))\" %\n agent._user_dn(uid))\n nfp_roles = agent.filter_roles(\"eionet-nfp-*-%s\" % country_code,\n prefix_dn=\"cn=eionet-nfp,cn=eionet\",\n filterstr=filterstr,\n attrlist=(\"description\",))\n\n if not (bool(nfp_roles) or self.checkPermissionZopeManager()):\n msg = (\n u\"You are not allowed to manage Eionet Groups members for %s\"\n % code_to_name(country_code))\n IStatusMessage(request).add(msg, type='error')\n request.RESPONSE.redirect(self.absolute_url())\n\n return False\n return True",
"def test_get_countries(self):\n pass",
"def test_get_ancestors_for_device_groups(self):\n pass",
"def has_nominations_priority_access(local_group):\n if local_group is not None and hasattr(\n local_group,\n 'organizinghubaccess',\n ):\n access = local_group.organizinghubaccess\n has_feature_access = access.has_feature_access(\n OrganizingHubFeature.nominations_priority_support\n )\n return has_feature_access\n else:\n return False",
"def test_products_ref_groups_get(self):\n pass",
"def connect_country_to_site(self, siteNode, projectNode, position_item, site, countryNodes):\n core = self.core\n for countryNode in countryNodes:\n if core.get_attribute(countryNode, \"isoCode2\") == site[\"Country code\"]:\n connection = core.create_child(projectNode, self.META[\"Site2Country\"])\n instance = core.create_child(projectNode, countryNode)\n position_item[\"x\"] += 400\n core.set_registry(instance, \"position\", position_item)\n core.set_pointer(connection, \"src\", instance)\n core.set_pointer(connection, \"dst\", siteNode)\n return instance",
"def allowed_group_access_use(user, group):\n return (user.has_perm(\"vnswww.group_use_any\")\n or (user.has_perm(\"vnswww.group_use_org\")\n and group.org == user.get_profile().org))",
"def test_country_name_in_countries(self):\n\t\tcountry_code = get_country_code('Andorra')\n\t\tself.assertEqual(country_code, 'ad')",
"def country_grouping(df): # group values by country\n country_group = date_formatting(df)\n country_group.fillna(value=0, inplace=True)\n country_group = country_group.groupby(['Country/Region'])[country_group.columns[3:]].sum().reset_index()\n return country_group",
"def findCountryCode(self):\n RecordsWithCountry = []\n for state in pycountry.subdivisions:\n #print(state.name)\n for record in self.Records: \n if state.name == record.state:\n #print(state.country, record.state)\n r = RecordCountry(date=record.date,\n country=state.country.alpha_3,\n impressions=record.impressions,\n CTR=record.CTR)\n self.Records.remove(record)\n RecordsWithCountry.append(r)\n for record in self.Records: \n r = RecordCountry(date=record.date,\n country=\"XXX\",\n impressions=record.impressions,\n CTR=record.CTR)\n RecordsWithCountry.append(r)\n self.Records = RecordsWithCountry",
"def add_country_counts(labels_df):\n counts = labels_df.groupby([\"country\"]).size().reset_index(name=\"num_country_labels\")\n labels_df = pd.merge(labels_df, counts, on='country')\n counts = labels_df.groupby([\"country\"])['article_id'].nunique().reset_index(name=\"num_country_articles\")\n return pd.merge(labels_df, counts, on='country')",
"def test_fake_group(self):\n response = self.client.get(reverse('education:demographic_detail',args=(\"XYZ\",)))\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response.context.get(\"json_rate_data\"), None)\n self.assertNotEqual(response.context.get(\"message\"), None)\n self.assertContains(response, \"Home\")\n self.assertContains(response, \"Error: No such group XYZ\")\n self.assertNotContains(response, '<svg id=\"popsvg\"')",
"def test_get_ancestors_for_device_group(self):\n pass",
"def getExpandedLinks():",
"def set_country_group_scope(self, country_group_scope):\n self.single_selection_from_kendo_dropdown(self.country_group_scope_dropdown_locator, country_group_scope)",
"def groupManager(request):\n \n return render_to_response('centers.html',context_instance=RequestContext(request))",
"def _feature_country_process(self):\n if 'Country' not in self._df_invoice_line.columns:\n return\n\n list_countries_keep = ['United Kingdom']\n rows_before = self._df_invoice_line.shape[0]\n \n df_invoice_line_new = pd.DataFrame()\n for country in list_countries_keep : \n df_invoice_line_new = df_invoice_line_new.append(\\\n self._df_invoice_line[self._df_invoice_line['Country']==country]\\\n , ignore_index=True)\n\n self.df_invoice_line = df_invoice_line_new\n del(df_invoice_line_new)\n \n rows_after = self._df_invoice_line.shape[0] \n _print_stat_rows(\"Countries filtering : \",rows_before, rows_after)\n\n \n #-------------------------------------------------------------------------\n # Due to the fact only one country is used, then this feature is dropped\n #-------------------------------------------------------------------------\n list_col_to_keep = [col for col in self._df_invoice_line.columns \\\n if col not in 'Country']\n \n self._df_invoice_line = self._df_invoice_line[list_col_to_keep] \n\n return",
"def group_adjacents(group, board, filter_by=None):\n liberties = set([])\n for location in group:\n if filter_by == \"None\":\n liberties |= xy_adjacents(location, board, filter_by=\"None\")\n elif filter_by == \"friend\":\n liberties |= xy_adjacents(location, board, filter_by=\"friend\")\n elif filter_by == \"foe\":\n liberties |= xy_adjacents(location, board, filter_by=\"foe\")\n else:\n liberties |= xy_adjacents(location, board)\n liberties -= group\n return liberties",
"def test_on_post_display_local_to_foreign(self):\n # data.get('country_flag', None)\n # address_display_version = 'foreign' if country_flag else 'local'\n # form.set_alt_data(name='country_display', field=self.fields['country_display'], value=address_display_version)\n pass"
] | [
"0.6317962",
"0.51911926",
"0.51911926",
"0.5140529",
"0.50892735",
"0.50562495",
"0.50562495",
"0.50487584",
"0.502029",
"0.49977654",
"0.49895462",
"0.49316293",
"0.49196282",
"0.49052003",
"0.49025372",
"0.48940086",
"0.48776388",
"0.48742306",
"0.48725045",
"0.48550633",
"0.4849856",
"0.47755346",
"0.4770817",
"0.4757959",
"0.47575343",
"0.4754676",
"0.47503266",
"0.47479874",
"0.47428283",
"0.47328064"
] | 0.7332791 | 0 |
Implementing is reanalysis link present functionality | def is_re_analysis_link_present(self):
return self.is_element_present(self.re_analysis_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def click_re_analysis_link(self):\n self.click_element(self.re_analysis_locator, True)",
"def relink(self, link_id):",
"def getLink(self):",
"def link_residues(self) -> None:\n ...",
"def add_link():\n return True",
"def relink():\n _intro()\n from . import crosslink as cr\n\n cr.relink()",
"def link(address):",
"def url_shortner(self):",
"def fix_links():\n pass",
"def match(self, brain, getlink):\n link = getlink(brain.getId,\n getlink(normalize(brain.Title), None))\n if link:\n return True",
"def linkActivated(self, *args, **kwargs): # real signature unknown\n pass",
"def linkActivated(self, *args, **kwargs): # real signature unknown\n pass",
"def ref_analyzer(citation_informations, initialresult, initial_citationlist,\n initial_referencelist,config, updated_rec_list ):\n function = \"\"\n try:\n function = config.get(\"rank_method\", \"function\")\n except:\n register_exception(prefix=\"cfg section [rank_method] has no attr function\", alert_admin=True)\n return {}\n\n pubrefntag = \"\"\n try:\n pubrefntag = config.get(function, \"reference_via_report_number\")\n except:\n register_exception(prefix=\"cfg section \"+function+\" has no attr reference_via_report_number\", alert_admin=True)\n return {}\n\n pubreftag = \"\"\n try:\n pubreftag = config.get(function, \"reference_via_pubinfo\")\n except:\n register_exception(prefix=\"cfg section \"+function+\" has no attr reference_via_pubinfo\", alert_admin=True)\n return {}\n\n #pubrefntag is often 999C5r, pubreftag 999C5s\n if task_get_task_param('verbose') >= 9:\n write_message(\"pubrefntag \"+pubrefntag)\n write_message(\"pubreftag \"+pubreftag)\n\n citation_list = initial_citationlist\n reference_list = initial_referencelist\n result = initialresult\n d_reports_numbers = citation_informations[0] #dict of recid -> institute_give_publ_id\n d_references_report_numbers = citation_informations[1] #dict of recid -> ['astro-ph/xyz'..]\n d_references_s = citation_informations[2]\n #dict of recid -> publication_infos_in_its_bibliography\n d_records_s = citation_informations[3] #recid -> its publication inf\n t1 = os.times()[4]\n\n write_message(\"Phase 0: temporarily remove changed records from citation dictionaries; they will be filled later\")\n for somerecid in updated_rec_list:\n try:\n del citation_list[somerecid]\n except KeyError:\n pass\n try:\n del reference_list[somerecid]\n except KeyError:\n pass\n\n write_message(\"Phase 1: d_references_report_numbers\")\n #d_references_report_numbers: e.g 8 -> ([astro-ph/9889],[hep-ph/768])\n #meaning: rec 8 contains these in bibliography\n\n done = 0\n numrecs = len(d_references_report_numbers)\n for thisrecid, refnumbers in d_references_report_numbers.iteritems():\n if (done % 1000 == 0):\n mesg = \"d_references_report_numbers done \"+str(done)+\" of \"+str(numrecs)\n write_message(mesg)\n task_update_progress(mesg)\n task_sleep_now_if_required()\n done = done+1\n\n for refnumber in refnumbers:\n if refnumber:\n p = refnumber\n f = 'reportnumber'\n #sanitise p\n p.replace(\"\\n\",'')\n #search for \"hep-th/5644654 or such\" in existing records\n rec_ids = get_recids_matching_query(p, f)\n\n if len(rec_ids) > 1:\n msg = \"Whoops: record '%d' report number value '%s' \" \\\n \"matches many records; taking only the first one. %s\" % \\\n (thisrecid, p, repr(rec_ids))\n write_message(msg, stream=sys.stderr)\n try:\n raise ValueError(msg)\n except ValueError:\n register_exception(alert_admin=True)\n\n if rec_ids and rec_ids[0]:\n write_citer_cited(thisrecid, rec_ids[0])\n remove_from_missing(p)\n if not result.has_key(rec_ids[0]):\n result[rec_ids[0]] = 0\n # Citation list should have rec_ids[0] but check anyway\n if not citation_list.has_key(rec_ids[0]):\n citation_list[rec_ids[0]] = []\n #append unless this key already has the item\n if not thisrecid in citation_list[rec_ids[0]]:\n citation_list[rec_ids[0]].append(thisrecid)\n #and update result\n result[rec_ids[0]] += 1\n\n if not reference_list.has_key(thisrecid):\n reference_list[thisrecid] = []\n if not rec_ids[0] in reference_list[thisrecid]:\n reference_list[thisrecid].append(rec_ids[0])\n else:\n #the reference we wanted was not found among our records.\n #put the reference in the \"missing\".. however, it will look\n #bad.. gfhgf/1254312, so get the corresponding 999C5s (full ref) too\n #This should really be done in the next loop d_references_s\n #but the 999C5s fields are not yet normalized\n\n #rectext = print_record(thisrecid, format='hm', ot=pubreftag[:-1])\n rectext = \"\" # print_record() call disabled to speed things up\n lines = rectext.split(\"\\n\")\n rpart = p #to be used..\n for l in lines:\n if (l.find(p) > 0): #the gfhgf/1254312 was found.. get the s-part of it\n st = l.find('$s')\n if (st > 0):\n end = l.find('$', st)\n if (end == st):\n end = len(l)\n rpart = l[st+2:end]\n insert_into_missing(thisrecid, rpart)\n\n mesg = \"d_references_report_numbers done fully\"\n write_message(mesg)\n task_update_progress(mesg)\n\n t2 = os.times()[4]\n\n #try to find references based on 999C5s, like Phys.Rev.Lett. 53 (1986) 2285\n write_message(\"Phase 2: d_references_s\")\n done = 0\n numrecs = len(d_references_s)\n for thisrecid, refss in d_references_s.iteritems():\n if (done % 1000 == 0):\n mesg = \"d_references_s done \"+str(done)+\" of \"+str(numrecs)\n write_message(mesg)\n task_update_progress(mesg)\n task_sleep_now_if_required()\n\n done = done+1\n\n for refs in refss:\n if refs:\n p = refs\n #remove the latter page number if it is like 67-74\n matches = re.compile(\"(.*)(-\\d+$)\").findall(p)\n if matches and matches[0]:\n p = matches[0][0]\n\n # check reference value to see whether it is well formed:\n if not re_CFG_JOURNAL_PUBINFO_STANDARD_FORM_REGEXP_CHECK.match(p):\n msg = \"Whoops, record '%d' reference value '%s' \" \\\n \"is not well formed; skipping it.\" % (thisrecid, p)\n write_message(msg, stream=sys.stderr)\n try:\n raise ValueError(msg)\n except ValueError:\n register_exception(alert_admin=True)\n continue # skip this ill-formed value\n\n # look for reference value:\n rec_id = None\n try:\n rec_ids = list(search_unit(p, 'journal') - INTBITSET_OF_DELETED_RECORDS)\n except:\n rec_ids = None\n if len(rec_ids) > 1:\n msg = \"Whoops, record '%d' reference value '%s' \" \\\n \"matches many records; taking only the first one. %s\" % \\\n (thisrecid, p, repr(rec_ids))\n write_message(msg, stream=sys.stderr)\n try:\n raise ValueError(msg)\n except ValueError:\n register_exception(alert_admin=True)\n\n if rec_ids and rec_ids[0]:\n write_message(\"These match searching \"+p+\" in journal: \"+repr(rec_ids), verbose=9)\n #the refered publication is in our collection, remove\n #from missing\n remove_from_missing(p)\n else:\n #it was not found so add in missing\n insert_into_missing(thisrecid, p)\n #check citation and reference for this..\n if rec_ids and rec_ids[0]:\n #the above should always hold\n if not result.has_key(rec_ids[0]):\n result[rec_ids[0]] = 0\n if not citation_list.has_key(rec_ids[0]):\n citation_list[rec_ids[0]] = []\n if not thisrecid in citation_list[rec_ids[0]]:\n citation_list[rec_ids[0]].append(thisrecid) #append actual list\n result[rec_ids[0]] += 1 #add count for this..\n\n #update reference_list accordingly\n if not reference_list.has_key(thisrecid):\n reference_list[thisrecid] = []\n if not rec_ids[0] in reference_list[thisrecid]:\n reference_list[thisrecid].append(rec_ids[0])\n mesg = \"d_references_s done fully\"\n write_message(mesg)\n task_update_progress(mesg)\n\n t3 = os.times()[4]\n done = 0\n numrecs = len(d_reports_numbers)\n write_message(\"Phase 3: d_reports_numbers\")\n\n #search for stuff like CERN-TH-4859/87 in list of refs\n for thisrecid, reportcodes in d_reports_numbers.iteritems():\n if (done % 1000 == 0):\n mesg = \"d_report_numbers done \"+str(done)+\" of \"+str(numrecs)\n write_message(mesg)\n task_update_progress(mesg)\n done = done+1\n\n for reportcode in reportcodes:\n if reportcode:\n rec_ids = []\n try:\n rec_ids = get_recids_matching_query(reportcode, pubrefntag)\n except:\n rec_ids = []\n\n if rec_ids:\n for recid in rec_ids:\n #normal checks..\n if not citation_list.has_key(thisrecid):\n citation_list[thisrecid] = []\n if not reference_list.has_key(recid):\n reference_list[recid] = []\n if not result.has_key(thisrecid):\n result[thisrecid] = 0\n\n #normal updates\n if not recid in citation_list[thisrecid]:\n result[thisrecid] += 1\n citation_list[thisrecid].append(recid)\n if not thisrecid in reference_list[recid]:\n reference_list[recid].append(thisrecid)\n\n mesg = \"d_report_numbers done fully\"\n write_message(mesg)\n task_update_progress(mesg)\n\n #find this record's pubinfo in other records' bibliography\n write_message(\"Phase 4: d_records_s\")\n done = 0\n numrecs = len(d_records_s)\n t4 = os.times()[4]\n for thisrecid, recs in d_records_s.iteritems():\n if (done % 1000 == 0):\n mesg = \"d_records_s done \"+str(done)+\" of \"+str(numrecs)\n write_message(mesg)\n task_update_progress(mesg)\n done = done+1\n p = recs.replace(\"\\\"\",\"\")\n #search the publication string like Phys. Lett., B 482 (2000) 417 in 999C5s\n rec_ids = list(search_unit(f=pubreftag, p=p, m='a') - INTBITSET_OF_DELETED_RECORDS)\n write_message(\"These records match \"+p+\" in \"+pubreftag+\" : \"+str(rec_ids), verbose=9)\n if rec_ids:\n for rec_id in rec_ids:\n #normal checks\n if not result.has_key(thisrecid):\n result[thisrecid] = 0\n if not citation_list.has_key(thisrecid):\n citation_list[thisrecid] = []\n if not reference_list.has_key(rec_id):\n reference_list[rec_id] = []\n\n if not rec_id in citation_list[thisrecid]:\n result[thisrecid] += 1\n citation_list[thisrecid].append(rec_id)\n if not thisrecid in reference_list[rec_id]:\n reference_list[rec_id].append(thisrecid)\n\n mesg = \"d_records_s done fully\"\n write_message(mesg)\n task_update_progress(mesg)\n\n write_message(\"Phase 5: reverse lists\")\n\n #remove empty lists in citation and reference\n keys = citation_list.keys()\n for k in keys:\n if not citation_list[k]:\n del citation_list[k]\n\n keys = reference_list.keys()\n for k in keys:\n if not reference_list[k]:\n del reference_list[k]\n\n write_message(\"Phase 6: self-citations\")\n selfdic = {}\n #get the initial self citation dict\n initial_self_dict = get_cit_dict(\"selfcitdict\")\n selfdic = initial_self_dict\n #add new records to selfdic\n acit = task_get_option(\"author-citations\")\n if not acit:\n write_message(\"Self cite processing disabled. Use -A option to enable it.\")\n else:\n write_message(\"self cite and author citations enabled\")\n selfdic = get_self_citations(updated_rec_list, citation_list,\n initial_self_dict, config)\n #selfdic consists of\n #key k -> list of values [v1,v2,..]\n #where k is a record with author A and k cites v1,v2.. and A appears in v1,v2..\n\n #create a reverse \"x cited by y\" self cit dict\n selfcitedbydic = {}\n for k in selfdic.keys():\n vlist = selfdic[k]\n for v in vlist:\n if selfcitedbydic.has_key(v):\n tmplist = selfcitedbydic[v]\n if not k in tmplist:\n tmplist.append(k)\n else:\n tmplist = [k]\n selfcitedbydic[v] = tmplist\n\n write_message(\"Getting author citations\")\n\n #get author citations for records in updated_rec_list\n initial_author_dict = get_initial_author_dict()\n authorcitdic = initial_author_dict\n acit = task_get_option(\"author-citations\")\n if not acit:\n print \"Author cites disabled. Use -A option to enable it.\"\n else:\n write_message(\"author citations enabled\")\n authorcitdic = get_author_citations(updated_rec_list, citation_list,\n initial_author_dict, config)\n\n\n if task_get_task_param('verbose') >= 3:\n #print only X first to prevent flood\n tmpdict = {}\n tmp = citation_list.keys()[0:10]\n for t in tmp:\n tmpdict[t] = citation_list[t]\n write_message(\"citation_list (x is cited by y): \"+str(tmpdict))\n write_message(\"size: \"+str(len(citation_list.keys())))\n tmp = reference_list.keys()[0:10]\n tmpdict = {}\n for t in tmp:\n tmpdict[t] = reference_list[t]\n write_message(\"reference_list (x cites y): \"+str(tmpdict))\n write_message(\"size: \"+str(len(reference_list.keys())))\n tmp = selfcitedbydic.keys()[0:10]\n tmpdict = {}\n for t in tmp:\n tmpdict[t] = selfcitedbydic[t]\n mesg = \"selfcitedbydic (x is cited by y and one of the authors of x same as y's):\"\n mesg += str(tmpdict)\n write_message(mesg)\n write_message(\"size: \"+str(len(selfcitedbydic.keys())))\n tmp = selfdic.keys()[0:100]\n tmpdict = {}\n for t in tmp:\n tmpdict[t] = selfdic[t]\n mesg = \"selfdic (x cites y and one of the authors of x same as y's): \"+str(tmpdict)\n write_message(mesg)\n write_message(\"size: \"+str(len(selfdic.keys())))\n tmp = authorcitdic.keys()[0:10]\n tmpdict = {}\n for t in tmp:\n tmpdict[t] = authorcitdic[t]\n write_message(\"authorcitdic (author is cited in recs): \"+str(tmpdict))\n write_message(\"size: \"+str(len(authorcitdic.keys())))\n insert_cit_ref_list_intodb(citation_list, reference_list,\n selfcitedbydic, selfdic, authorcitdic)\n\n t5 = os.times()[4]\n\n write_message(\"Execution time for analyzing the citation information generating the dictionary:\")\n write_message(\"... checking ref number: %.2f sec\" % (t2-t1))\n write_message(\"... checking ref ypvt: %.2f sec\" % (t3-t2))\n write_message(\"... checking rec number: %.2f sec\" % (t4-t3))\n write_message(\"... checking rec ypvt: %.2f sec\" % (t5-t4))\n write_message(\"... total time of ref_analyze: %.2f sec\" % (t5-t1))\n\n return result",
"def requestShowLink(self, *args, **kwargs): # real signature unknown\n pass",
"def linksActivated(self, *args, **kwargs): # real signature unknown\n pass",
"def collectLinks(self, output):\n pass",
"def link_dihedra(self, verbose: bool = ...) -> None:\n ...",
"def __flagPublicationAsNeedsRefetch(search_publication_element): \r\n #description \r\n #++++++PUBLICATION LITE++++++\r\n #check publication Element\r\n if search_publication_element is None:\r\n return \r\n #publication guid\r\n if search_publication_element is not None:\r\n guid = search_publication_element.get('id','')\r\n if guid == '':\r\n return\r\n #load Publication from db if it exists, otherwise give up - DONT create it! \r\n publication_object = Publication.getPublication(guid) \r\n if publication_object is None:\r\n return\r\n #symplectic has an updated version of this publication \r\n publication_object.needs_refetch = True\r\n #save this change to the publication object\r\n publication_object.save()",
"def check_link(self, link, links_para):\n href = link['href']\n if not href.startswith('/wiki/') or href == '/wiki/Latin' or href.startswith('#'):\n return False\n if \"<i>\" in link or href in links_para:\n return False\n title = href[6:]\n if title.startswith('Help:') or title.startswith('File:') or title.endswith('.ogg') or title.startswith('Wikipedia:'):\n return False\n return True",
"def iter_links(self):",
"def getExpandedLinks():",
"def has_cycle(link):\n\n ###############\n # My Solution #\n ###############\n \n def tracker(link, seen = []):\n if link in seen:\n print('True')\n return\n if link.rest == Link.empty:\n print('False')\n return\n seen.append(link)\n tracker(link.rest)\n\n\n return tracker(link)",
"def test_with_links(self):\n self.result.figure_link = 'some_link'\n self.result.start_figure_link = 'other_link'\n figure_link, start_link = fitting_report.get_figure_paths(self.result)\n self.assertEqual(figure_link, os.path.join('figures', 'some_link'))\n self.assertEqual(start_link, os.path.join('figures', 'other_link'))",
"def islink(path):\n return get_instance(path).islink(path)",
"def test_link_is_tracked_true(self):\n self.assertTrue(link_is_tracked(\"https://test.com/testurl\"))",
"def remove_link():",
"def linktype_callback(self):\n pass",
"def test_link_is_tracked_false_archive(self):\n self.assertFalse(link_is_tracked(\"https://web.archive.org/https://test.com/\"))",
"def analyse(self):\n pass",
"def is_linked(self): \n return self.ichair_id is not None"
] | [
"0.6751954",
"0.652153",
"0.6032356",
"0.5974886",
"0.5958178",
"0.5620203",
"0.5609497",
"0.55967116",
"0.5590024",
"0.5581713",
"0.55592877",
"0.55592877",
"0.55334836",
"0.55301785",
"0.5505264",
"0.542408",
"0.54043347",
"0.53962654",
"0.5396158",
"0.5350795",
"0.5345842",
"0.5331633",
"0.5329114",
"0.52956605",
"0.5284054",
"0.526759",
"0.5260585",
"0.52532125",
"0.5248063",
"0.5220941"
] | 0.67769605 | 0 |
Implementing select vendors tab functionality | def select_vendors_tab(self):
self.select_static_tab(self.vendors_tab_locator, 'vendors tab not found before specified time') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_vendor(self, vendor_list):\n self.multiple_items_selection_from_kendo_dropdown(self.vendor_dropdown_locator, vendor_list)\n self.wait_for_ajax_spinner_load()",
"def tabSelected(self):",
"def tabSelected(self):",
"def select_buy_dashboard_tab(self):\n self.select_static_tab(self.buy_dashboard_tab_locator, True)",
"def set_vendors(self, vendors_list):\n self.multiple_items_selection_from_kendo_dropdown(self.vendors_kendo_dropdown_locator, vendors_list)\n self.wait_for_ajax_spinner_load()",
"def click_on_tab_of_vendor_profile_page(self, tab_name):\n vendor_profile_page_tab_locator = (By.XPATH, self.vendor_profile_page_tab_locator_string + \"[text()='%s']\" % tab_name)\n self.select_static_tab(vendor_profile_page_tab_locator, 'tab locator not found')",
"def tabSelected(self):\r\n self.transactionMenuWidget.tabSelected()",
"def on_Panel_select_page(self, widget):\n try:\n funcioneshab.listadonumhab()\n except:\n print(\"error botón cliente barra herramientas\")",
"def _comboSlot(self, select):\n select = self.sender().itemText(select)\n if qt4:\n qs = str(self.sender().property(\"dom address\").toPyObject())\n else:\n qs = str(self.sender().property(\"dom address\"))\n item = QtXml.QDomElement()\n\n ind = qs.rfind('/')\n ids = qs[ind:]\n\n item = self.qhash[qs].elem.firstChildElement(\"Item\")\n while(item.isNull() is False):\n itemName = item.firstChildElement(\"Name\")\n if(str(itemName.text()).strip() != select):\n activ = item.firstChildElement(\"Activate\")\n while(activ.isNull() is False):\n s = str(activ.text()).strip() + ids\n h = self.qhash[s]\n widget_enabled = h.elem.attribute(\"Enabled\", \"True\")\n widget_visible = h.elem.attribute(\"Visible\", \"Unknown\")\n h.widget.setEnabled(False)\n if(widget_visible != \"Unknown\"):\n h.label.hide()\n h.widget.hide()\n activ = activ.nextSiblingElement(\"Activate\")\n item = item.nextSiblingElement(\"Item\")\n\n item = self.qhash[qs].elem.firstChildElement(\"Item\")\n while(item.isNull() is False):\n itemName = item.firstChildElement(\"Name\")\n if(str(itemName.text()).strip() == select):\n activ = item.firstChildElement(\"Activate\")\n while(activ.isNull() is False):\n s = str(activ.text()).strip() + ids\n h = self.qhash[s]\n h.widget.setEnabled(True)\n h.label.show()\n h.widget.show()\n activ = activ.nextSiblingElement(\"Activate\")\n item = item.nextSiblingElement(\"Item\")",
"def select_vendor_price_list_detail_dial_digits_tab(self):\n self.click_element(self.vendor_price_list_details_dial_digits_tab_locator, True)",
"def populateTabs(self):\n frameworks = self.data['frameworks']\n for fw in frameworks:\n frameworkElements = frameworks[fw]\n for element in frameworkElements:\n\n ui = self.framework2gui[fw].get(element)\n\n if isinstance(ui, QComboBox):\n ui.clear()\n ui.setView(QListView())\n for i, deviceName in enumerate(frameworks[fw][element]):\n ui.addItem(deviceName)\n if frameworks[fw]['deviceName'] == deviceName:\n ui.setCurrentIndex(i)\n\n elif isinstance(ui, QLineEdit):\n ui.setText(f'{frameworks[fw][element]}')\n\n elif isinstance(ui, QCheckBox):\n ui.setChecked(frameworks[fw][element])\n\n elif isinstance(ui, QDoubleSpinBox):\n ui.setValue(frameworks[fw][element])\n return True",
"def Root_Profile(self):\r\n self.combobox_profile.selectitem( 0 )",
"def select_schema_tab(self):\n if self.current_package_version() >= semver.VersionInfo.parse(\"3.8.0\"):\n if self.current_package_version() >= semver.VersionInfo.parse(\"3.9.100\"):\n schema = '//*[@id=\"subNavigationBar\"]/ul[2]/li[6]/a'\n select_schema_tab_sitem = self.locator_finder_by_xpath(schema)\n else:\n select_schema_tab_sitem = self.locator_finder_by_xpath(self.select_schema_tab_id)\n select_schema_tab_sitem.click()\n time.sleep(2)\n else:\n print('Schema check not supported for the current package version \\n')\n self.wait_for_ajax()",
"def select_tab_of_view_price_list_detail_page(self, tab_name):\n self.wait_for_ajax_spinner_load(300)\n view_price_list_detail_page_tab_locator = (By.XPATH, self.vendor_profile_page_tab_locator_string + \"[text()='%s']\" % tab_name)\n self.select_static_tab(view_price_list_detail_page_tab_locator, 'tab locator not found')",
"def handle_selection_cust(self):\n choice = self.get_input()\n if choice == '1':\n self.display_cust_unlock()\n elif choice == '2':\n self.display_return_car()\n elif choice == '3':\n self.display_main()",
"def select_vendor_price_list_detail_reference_rates_tab(self):\n self.click_element(self.vendor_price_list_details_reference_rates_tab_locator, True)",
"def show_table_combobox(self):\n self.table_chosen[\"values\"] = self.sql_database.show_tables(self.change_db())\n if len(self.table_chosen[\"values\"]) > 0:\n self.table_chosen.current(0)",
"def is_vendors_tab_present(self):\n return self.is_element_present(self.vendors_tab_locator)",
"def test_editVendor(self):\n self.tc_id = \"019\"\n self.tc_desc = \"Verify the admin is able to edit and update delete the vendor\"\n self.tc_step = \"TC Start\"\n\n vendoredit = vendoreditFeatures(self.driver)\n\n self.tc_step = \"Launch the url\"\n vendoredit.launchUrl(self.url)\n\n self.tc_step = \"Enter the login details\"\n vendoredit.clickVendorLogin()\n vendoredit.enterloginDetails(self.username, self.password)\n vendoredit.clickLogin()\n vendoredit.clickvendorTab()\n vendoredit.clickVendor()\n vendoredit.clickeditvendor()\n vendoredit.entereditvendor(self.editFirstName,self.editLastName)\n vendoredit.updatevendor()\n vendoredit.clickvendorTab()\n vendoredit.clickeditPrimaryEmail()\n vendoredit.entereditPrimaryEmail(self.primaryEmail)\n vendoredit.updatePrimaryEmail()\n vendoredit.updatePrimaryEmailOk()\n vendoredit.clickeditvendorlastupdatedtime()\n vendoredit.clickpicktime()\n vendoredit.picktime()\n vendoredit.picktimeapply()\n vendoredit.picktimeUpdate()\n # vendoredit.deletevendor()\n # vendoredit.confirmremovevendor()\n # vendoredit.confirmremovevendorok()\n vendoredit.viewmore()\n vendoredit.viewmoreClose()\n vendoredit.activeCheckvendor()\n vendoredit.verifiedCheckvendor()\n vendoredit.followupvendor()\n vendoredit.followupvendorok()",
"def on_pushButton_view_clicked(self):\n content = unicode(self.comboBox.currentText())\n if content == \"职称表\":\n data = self.sql_client.get_zc_info()\n self.fill_tableview(data)\n elif content == \"文化表\":\n data = self.sql_client.get_wh_info()\n self.fill_tableview(data)\n elif content == \"部门表\":\n data = self.sql_client.get_bm_info()\n self.fill_tableview(data)",
"def onVendor(self, action):\n\n if not self.graphicsView.hasImage():\n self.actionVendor.setChecked(False)\n self.showImageSelectionMessageBox()\n return\n\n self.actionVendor.setChecked(True)\n if not hasattr(self.actionVendor, 'tag'):\n self.actionVendor.tag = PlacePolygonCommand.PlacePolygonCommand(self.graphicsView)\n self.actionVendor.tag.onSuccess.connect(self.onVendorCreated)\n self.actionVendor.tag.onRejected.connect(self.onCommandRejected)\n\n self.graphicsView.command = self.actionVendor.tag",
"def get_airplane_suggestions(self):\n if not self.airplanes_search_bar.focus: # If the function gets called after the user has chosen option,\n self.suggestions_dropdown.dismiss() # hide drop-down.\n return\n else: # else, if the function is called to select option,\n self.suggestions_dropdown.dismiss() # hide previous drop-down.\n\n self.suggestions_dropdown = DropDown()\n\n airplanes_data = self.app.data_manager.airplanes_tree_manager.get_in_order_list(self.app.data_manager.airplanes_tree, self.airplanes_search_bar.text.upper())\n\n airplane_id_index = self.app.data_manager.airplanes_tree_manager.index\n\n if airplanes_data is None:\n btn_suggestion = Button(text='NOT FOUND', size_hint_y=None, height=44)\n self.suggestions_dropdown.add_widget(btn_suggestion)\n else:\n for airplane_data in airplanes_data:\n btn_suggestion = DataButton(data=airplane_data, text=airplane_data[airplane_id_index], size_hint_y=None, height=44)\n btn_suggestion.bind(on_release=lambda btn_suggestion_ref: self.focus_on_airplane(btn_suggestion_ref))\n self.suggestions_dropdown.add_widget(btn_suggestion)\n\n self.suggestions_dropdown.bind(on_select=lambda instance, btn_suggestion_ref: setattr(self.airplanes_search_bar, 'text', btn_suggestion_ref))\n self.suggestions_dropdown.open(self.airplanes_search_bar)\n self.airplanes_search_bar.bind(on_parent=self.suggestions_dropdown.dismiss)",
"def select_transactions_tab(self):\n self.click_element(self.transactions_tab_locator)",
"def show_selected_option(self, item):\n\n if item:\n if item.text() == \"ALL\":\n self.dlg.uStackedWidget.setCurrentIndex(0)\n self.curr_list_wid_index = self.dlg.uListOptions.findItems(\n item.text(), Qt.MatchExactly\n )[0]\n self.proxy_model.set_service_type((\"WMTS\", \"WFS\"))\n elif item.text() == \"WFS\":\n self.proxy_model.set_service_type((item.text()))\n self.curr_list_wid_index = self.dlg.uListOptions.findItems(\n item.text(), Qt.MatchExactly\n )[0]\n self.dlg.uStackedWidget.setCurrentIndex(0)\n elif item.text() == \"WMTS\":\n self.proxy_model.set_service_type((item.text()))\n self.curr_list_wid_index = self.dlg.uListOptions.findItems(\n item.text(), Qt.MatchExactly\n )[0]\n self.dlg.uStackedWidget.setCurrentIndex(0)\n elif item.text() == \"Settings\":\n self.dlg.uStackedWidget.setCurrentIndex(1)\n elif item.text() == \"Help\":\n self.dlg.uStackedWidget.setCurrentIndex(2)\n elif item.text() == \"About\":\n self.dlg.uStackedWidget.setCurrentIndex(3)",
"def on_menuBarPreciosServicios_activate(self,widget):\n\n try:\n variables.venPrecios.show()\n conexion.cur.execute('select * from precios')\n precios = conexion.cur.fetchall()\n variables.entPrecioDesayuno.set_text(str(precios[0][0]))\n variables.entPrecioComida.set_text(str(precios[0][1]))\n variables.entPrecioParking.set_text(str(precios[0][2]))\n except:\n print('error abrir ventana precios')",
"def set_vendor_profile_status(self, item_name):\n self.single_selection_from_kendo_dropdown(self.vendor_profile_status_kendo_dropdown_locator, item_name)\n self.wait_for_ajax_spinner_load()",
"def select(self):\r\n pass",
"def for_type_select_link_vlan_for_name_enter_vlan1043(driver):\n driver.find_element_by_xpath('//mat-select[@ix-auto=\"select__Type\"]').click()\n wait_on_element(driver, 0.5, 5, '//mat-option[@ix-auto=\"option__Type_VLAN\"]')\n driver.find_element_by_xpath('//mat-option[@ix-auto=\"option__Type_VLAN\"]').click()\n driver.find_element_by_xpath('//input[@ix-auto=\"input__Name\"]').clear()\n driver.find_element_by_xpath('//input[@ix-auto=\"input__Name\"]').send_keys('vlan1043')",
"def edit_tools(self, e):\n #GETTING SELECTION\n\n self.selected_item = self.user_inventory.selection()\n self.select_name = self.user_inventory.item([i for i in self.selected_item], \"values\")[0]\n self.select_entdate = self.user_inventory.item([i for i in self.selected_item], \"values\")[3]\n\n self.df_same_name = self.df_user.query(\"title == @self.select_name\")\n #this is the selected one for sure\n self.df_the_selected_item = self.df_same_name.loc[self.df_same_name[\"entry date\"] == self.select_entdate]\n\n #GETTING THE INDEX NUMBER OF THE SELECTION IN .CSV FILE\n self.index_select = self.df_the_selected_item.index\n self.index_select_number = self.index_select.tolist()\n\n #bottom buttons appear:\n self.changing_item_label.config(text=\"Now editing \"+self.select_name+\" that added on \"+self.select_entdate+\":\")\n\n self.delete_but = Button (self.bottom_frame, text=\"DELETE\", command=self.delete_button)\n self.delete_but.place(relx=0.1, rely=0.7, relwidth=0.28, anchor=\"w\")\n\n self.servings_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n self.serv_drop = Combobox(self.bottom_frame, value=self.servings_list, state=\"readonly\")\n self.serv_drop.place(relx=0.5, rely=0.7, relwidth=0.2, anchor=CENTER)\n\n \n self.serv_but = Button(self.bottom_frame, text=\"CHANGE AMOUNT\", command=self.change_amount_button, state=\"disabled\")\n self.serv_but.place(relx=0.9, rely=0.7, relwidth=0.28, anchor=\"e\")\n\n self.serv_drop.bind(\"<<ComboboxSelected>>\", self.activate_button)",
"def handle_selection_main(self):\n choice = self.get_input()\n if choice == '1':\n self.display_cust()\n elif choice == '2':\n self.is_user = False\n self.display_eng()"
] | [
"0.6188047",
"0.6105495",
"0.6105495",
"0.5952342",
"0.59404254",
"0.59331214",
"0.59069777",
"0.5803761",
"0.56433725",
"0.56020993",
"0.5578502",
"0.55331236",
"0.55300546",
"0.5517784",
"0.5484401",
"0.5458557",
"0.545534",
"0.54479146",
"0.54111296",
"0.5366512",
"0.53527933",
"0.5351647",
"0.5350627",
"0.5347504",
"0.52985054",
"0.52950764",
"0.5293737",
"0.5263094",
"0.5247543",
"0.522395"
] | 0.73975277 | 0 |
Implementing is create vendor present functionality | def is_create_vendor_present(self):
return self.is_element_present(self.create_vendor_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def onVendorCreated(self):\n\n try:\n count = len(self.actionVendor.tag._polyline._vertices)\n if count > 2:\n points = []\n for point in self.actionVendor.tag._polyline._vertices:\n points.append(QPoint(round(point[0]), round(point[1])))\n polygon = QPolygonF(points)\n item = QEngineeringVendorItem(polygon, pack_type=self.packageComboBox.currentText())\n item.area = 'Drawing'\n item.transfer.onRemoved.connect(self.itemRemoved)\n self.graphicsView.scene().addItem(item)\n finally:\n self.graphicsView.scene().removeItem(self.actionVendor.tag._polyline)\n self.actionVendor.tag.reset()",
"def test_post_activate_marketplace_vendor_v3(self):\n pass",
"def do_command(self, args):\n vendorops = dbops.Vendors()\n vendorops.add(args)",
"def click_create_vendor_button(self):\n create_vendor_element = self.wait().until(EC.element_to_be_clickable(self.create_vendor_locator), \"create vendor locator not found before specified time out\")\n create_vendor_element.click()\n self.wait_for_ajax_spinner_load()",
"def create(self):",
"def get_created_vendor_name(self):\n global vendor_name\n return vendor_name",
"def onVendor(self, action):\n\n if not self.graphicsView.hasImage():\n self.actionVendor.setChecked(False)\n self.showImageSelectionMessageBox()\n return\n\n self.actionVendor.setChecked(True)\n if not hasattr(self.actionVendor, 'tag'):\n self.actionVendor.tag = PlacePolygonCommand.PlacePolygonCommand(self.graphicsView)\n self.actionVendor.tag.onSuccess.connect(self.onVendorCreated)\n self.actionVendor.tag.onRejected.connect(self.onCommandRejected)\n\n self.graphicsView.command = self.actionVendor.tag",
"def test_post_authorize_seller_vendor_v3(self):\n pass",
"def create(self):\n ...",
"def create():",
"def create():",
"def create(self):\n pass",
"def create(self):\n pass",
"def create(self):\n pass",
"def create(self):\n\n pass",
"def test_create_system_entire(self):\n pass",
"def is_vendor(self) -> bool:\n return self._is_vendor",
"def test_editVendor(self):\n self.tc_id = \"019\"\n self.tc_desc = \"Verify the admin is able to edit and update delete the vendor\"\n self.tc_step = \"TC Start\"\n\n vendoredit = vendoreditFeatures(self.driver)\n\n self.tc_step = \"Launch the url\"\n vendoredit.launchUrl(self.url)\n\n self.tc_step = \"Enter the login details\"\n vendoredit.clickVendorLogin()\n vendoredit.enterloginDetails(self.username, self.password)\n vendoredit.clickLogin()\n vendoredit.clickvendorTab()\n vendoredit.clickVendor()\n vendoredit.clickeditvendor()\n vendoredit.entereditvendor(self.editFirstName,self.editLastName)\n vendoredit.updatevendor()\n vendoredit.clickvendorTab()\n vendoredit.clickeditPrimaryEmail()\n vendoredit.entereditPrimaryEmail(self.primaryEmail)\n vendoredit.updatePrimaryEmail()\n vendoredit.updatePrimaryEmailOk()\n vendoredit.clickeditvendorlastupdatedtime()\n vendoredit.clickpicktime()\n vendoredit.picktime()\n vendoredit.picktimeapply()\n vendoredit.picktimeUpdate()\n # vendoredit.deletevendor()\n # vendoredit.confirmremovevendor()\n # vendoredit.confirmremovevendorok()\n vendoredit.viewmore()\n vendoredit.viewmoreClose()\n vendoredit.activeCheckvendor()\n vendoredit.verifiedCheckvendor()\n vendoredit.followupvendor()\n vendoredit.followupvendorok()",
"def __init__(self, vendor_id, product_id):\n self.vendor_id = vendor_id\n self.product_id = product_id",
"def vendor(self, vendor):\n\n self._vendor = vendor",
"def _Create(self):\n pass",
"def generate(v, vendors):\n return vendors[v].new_card()",
"def create():\n pass",
"def click_buy_and_sell_deal_create_button(self):\n self.click_element(self.save_vendor_profile_locator)",
"def insert_vendor(vendor_name):\r\n sql = \"\"\"INSERT INTO vendors(vendor_name)\r\n VALUES(%s) RETURNING vendor_id;\"\"\"\r\n conn = None\r\n vendor_id = None\r\n try:\r\n # leer configuracion de la base de datos\r\n params = config()\r\n # conectarse a la Base de Datos PostgreSQL\r\n conn = psycopg2.connect(**params)\r\n # Crear un nuevo Cursor\r\n cur = conn.cursor()\r\n # ejecutar la instruccion INSERT\r\n cur.execute(sql, (vendor_name,))\r\n # recuperar el id generado\r\n vendor_id = cur.fetchone()[0]\r\n # commit cambios a la Base de Datos\r\n conn.commit()\r\n # cerra la comunicacion con l Base de Datos\r\n cur.close()\r\n except (Exception, psycopg2.DatabaseError) as error:\r\n print(error)\r\n finally:\r\n if conn is not None:\r\n conn.close()\r\n\r\n return vendor_id",
"def test_create_device(self):\n pass",
"def test_create_device(self):\n pass",
"def productactivate():\n pass",
"def Create(self):\n raise NotImplementedError()",
"def vendor_list():\n return ['nxos', 'eos', 'cumulus']"
] | [
"0.655659",
"0.6182998",
"0.61777186",
"0.6160204",
"0.61143076",
"0.6047475",
"0.59969324",
"0.5987495",
"0.59184116",
"0.5901415",
"0.5901415",
"0.5879579",
"0.5879579",
"0.5879579",
"0.5856006",
"0.58531314",
"0.57469994",
"0.57304674",
"0.57168674",
"0.56997466",
"0.5679161",
"0.56738174",
"0.5661757",
"0.5635141",
"0.5588703",
"0.5573424",
"0.5573424",
"0.5547465",
"0.5521003",
"0.5500587"
] | 0.6788756 | 0 |
Implementing is vendor price lists present functionality | def is_vendor_price_lists_present(self):
return self.is_element_present(self.vendor_price_lists_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_vendor_price_lists_details(self):\n try:\n self.vendor_price_lists_dict = self.get_grid_row_details(self.customer_price_list_grid_div_id, self.vendor_price_lists_dict)\n return True\n except:\n return False",
"def verify_vendor_price_lists_details(self, row_data):\n return self.verify_grid_row_details(self.customer_price_list_grid_div_id, row_data)",
"def get_prices(self):\n pass",
"def is_upload_vendor_price_list_present(self):\n return self.is_element_present(self.upload_vendor_price_list_locator)",
"def vendor_list():\n return ['nxos', 'eos', 'cumulus']",
"def click_on_vendor_price_lists(self):\n vendor_price_lists_element = self.wait().until(EC.element_to_be_clickable(self.vendor_price_lists_locator), 'vendor price lists locator not found before specified time')\n self.script_executor_click(vendor_price_lists_element)\n self.wait_for_ajax_spinner_load()",
"def show_vendor_product():\n vendor = input(\"Enter the Vendor: \")\n product = input(\"Enter the product: \")\n filter_string = input(\"Enter Optional Search string (i.e. HTTP): \")\n logger.debug(\"Searching: {} from {} -- Filter = {}\".format(product, vendor, filter_string))\n search_url = \"http://cve.circl.lu/api/search/{}/{}\".format(vendor, product)\n req = call_api(search_url)\n if not req:\n logger.debug(\"something no workie with the vendor product call\")\n else:\n print(\"Searching: {} from {} -- Filter = {}\".format(product, vendor, filter_string))\n for item in req:\n if filter_string != '' or not filter_string:\n if filter_string in item['summary']:\n print(\"\\nSummary: \" + item['summary'])\n print(\"CVE: \" + item['id'])\n print(\"CVSS: \" + str(item['cvss']))\n else:\n print(\"\\nSummary: \" + item['summary'])\n print(\"CVE: \" + item['id'])\n print(\"CVSS: \" + str(item['cvss']))\n menu()",
"def get_prices(uuid, card_format, price_source, price_list, card_type_order, price_data_json):\n if price_source not in price_data_json[uuid][card_format]:\n pass\n #print(f'Price source value of {price_source} is not available for {card_format} and {uuid}')\n else:\n source = price_data_json[uuid][card_format][price_source]\n if price_list not in source:\n pass\n #print(f'Price list value of {price_list} is not available for {price_source} and {uuid}')\n else:\n retail = source[price_list]\n for type in card_type_order:\n if type in retail:\n return retail[type]",
"def test_get_small_and_light_eligibility_by_seller_sku(self):\n pass",
"def test_query_product_list_by_price(self):\n products = self._create_products(10)\n test_price_low = 30\n test_price_high = 100\n price_products = [product for product in products if product.price >= test_price_low and product.price <= test_price_high]\n resp = self.app.get(\n \"/products\", query_string=(\"low={}&high={}\".format(test_price_low,test_price_high))\n )\n self.assertEqual(resp.status_code, status.HTTP_200_OK)\n data = resp.get_json()\n self.assertEqual(len(data), len(price_products))\n # check the data just to be sure\n for product in data:\n self.assertTrue(product[\"price\"] >= test_price_low)\n self.assertTrue(product[\"price\"] <= test_price_high)",
"def set_vendor(self, vendor_list):\n self.multiple_items_selection_from_kendo_dropdown(self.vendor_dropdown_locator, vendor_list)\n self.wait_for_ajax_spinner_load()",
"def query_part_info(parts, distributors, currency):\n solved = set()\n # Loop through the parts looking for those sourced by local distributors\n # that won't be found online. Place any user-added info for these parts\n # (such as pricing) into the part dictionary.\n for p in parts:\n # Find the manufacturer's part number if it exists.\n pn = p.fields.get('manf#') # Returns None if no manf# field.\n\n # Now look for catalog number, price list and webpage link for this part.\n for dist in distributors:\n cat_num = p.fields.get(dist + ':cat#')\n pricing = p.fields.get(dist + ':pricing')\n link = p.fields.get(dist + ':link')\n avail = p.fields.get(dist + ':avail')\n if cat_num is None and pricing is None and link is None:\n continue\n\n cat_num = cat_num or pn or make_unique_catalog_number(p, dist)\n p.fields[dist + ':cat#'] = cat_num # Store generated cat#.\n # Get the DistData for this distributor\n dd = p.dd.get(dist, DistData())\n dd.part_num = cat_num\n\n if link:\n url_parts = list(urlsplit(link))\n if url_parts[0] == '':\n url_parts[0] = u'http'\n link = urlunsplit(url_parts)\n else:\n # This happens when no part URL is found.\n debug_obsessive('No part URL found for local \\'{}\\' distributor!'.format(dist))\n dd.url = link\n\n price_tiers = {}\n try:\n local_currency = re.findall('[a-zA-Z]{3}', pricing)[0].upper()\n except Exception:\n local_currency = currency\n old_pricing = pricing\n pricing = re.sub('[^0-9.;:]', '', pricing) # Keep only digits, decimals, delimiters.\n for qty_price in pricing.split(';'):\n splitted = qty_price.split(SEPRTR)\n if len(splitted) == 2:\n qty, price = splitted\n if local_currency:\n dd.currency = local_currency\n try:\n price_tiers[int(qty)] = float(price)\n except ValueError:\n warning(W_BADPRICE, 'Malformed pricing number: `{}` at {}'.format(old_pricing, p.refs))\n else:\n warning(W_BADPRICE, 'Malformed pricing entry: `{}` at {}'.format(qty_price, p.refs))\n # dd.moq = min(price_tiers.keys())\n if not price_tiers:\n # This happens when no pricing info is found.\n debug_obsessive('No pricing information found for local \\'{}\\' distributor!'.format(dist))\n dd.price_tiers = price_tiers\n\n # Availability\n if avail is not None:\n dd.qty_avail = avail\n\n # Update the DistData for this distributor\n p.dd[dist] = dd\n # We have data for this distributor. Avoid marking normal distributors.\n if dist in dist_local_template.api_distributors:\n solved.add(dist)\n return solved",
"def is_vendor_price_list_not_displayed_in_the_grid(self):\n is_displayed = True\n current_number_of_rows = int(self.get_number_of_rows_from_grid(self.vendors_price_lists_grid_div_bar_id))\n if (current_number_of_rows > self.number_of_rows):\n is_displayed = False\n return is_displayed",
"def get_basket_items_pricedrop(self, offer_info, actual_volume, product_prices):\n prod_code = offer_info.base_prod_code\n base_prod_vol = actual_volume.get(prod_code.lower())\n\n pricedrop_basket = []\n\n if base_prod_vol >= offer_info.min_vol:\n offer_on_prod = offer_info.offer_on\n if actual_volume.get(offer_on_prod.lower()):\n print(\n f\"Base product volume is greater than minimum required volume & product on offer is also available \"\n f\"in cart..\")\n if offer_info.is_limited:\n print(f\"Limited offer..\")\n if prod_code == offer_on_prod:\n # total_allowed_items_on_offer = Limit Volume of base product * (Offer Product Max Volume/Minimum volume of base product)\n total_allowed_items_on_offer = offer_info.limit_vol * (\n offer_info.offer_prod_volume / offer_info.min_vol)\n max_limit = 1\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n pricedrop_basket.append((prod_code, base_prod_actual_price))\n while max_limit <= total_allowed_items_on_offer:\n new_price = (base_prod_actual_price - (offer_info.new_price)) * -1\n pricedrop_basket.append((offer_info.offer_code, new_price))\n max_limit += 1\n else:\n total_allowed_items_on_offer = offer_info.limit_vol * (\n offer_info.offer_prod_volume / offer_info.min_vol)\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n pricedrop_basket.append((prod_code, base_prod_actual_price))\n max_limit = 1\n while max_limit <= total_allowed_items_on_offer:\n offer_onprod_actual_price = product_prices.get(offer_on_prod.lower()).get('price')\n new_price = (base_prod_actual_price - (offer_info.new_price)) * -1\n for j in range(0, actual_volume.get(offer_on_prod).lower()):\n pricedrop_basket.append((offer_on_prod, offer_onprod_actual_price))\n pricedrop_basket.append((offer_info.offer_code, new_price))\n max_limit += 1\n else:\n print(f\"Unlimited offer..\")\n if prod_code == offer_on_prod:\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n pricedrop_basket.append((prod_code, base_prod_actual_price))\n new_price = (base_prod_actual_price - (offer_info.new_price))*-1\n pricedrop_basket.append((offer_info.offer_code, new_price))\n else:\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n pricedrop_basket.append((prod_code, base_prod_actual_price))\n\n offer_onprod_actual_price = product_prices.get(offer_on_prod.lower()).get('price')\n new_price = (offer_onprod_actual_price - (offer_info.new_price)) * -1\n\n for j in range(0, actual_volume.get(offer_on_prod).lower()):\n pricedrop_basket.append((offer_on_prod, offer_onprod_actual_price))\n pricedrop_basket.append((offer_info.offer_code, new_price))\n\n return pricedrop_basket",
"def get_vendor_price_lists_from_date_value(self):\n return self.get_text_from_element(self.set_from_date_locator, is_a_input_field=True)",
"def test_visualize_price_breakdown(self):\n pass",
"def is_upload_vendor_price_list_pop_up_available(self):\n return self.is_element_present(self.upload_vendor_price_list_pop_up_locator)",
"def get_basket_items_discount(self, offer_info, actual_volume, product_prices):\n prod_code = offer_info.base_prod_code\n base_prod_vol = actual_volume.get(prod_code.lower())\n\n discount_basket = []\n\n if base_prod_vol >= offer_info.min_vol:\n offer_on_prod = offer_info.offer_on\n if actual_volume.get(offer_on_prod.lower()):\n print(f\"Base product volume is greater than minimum required volume & product on offer is also available \"\n f\"in cart..\")\n if offer_info.is_limited:\n print(f\"Limited offer..\")\n if prod_code == offer_on_prod:\n # total_allowed_items_on_offer = Limit Volume of base product * (Offer Product Max Volume/Minimum volume of base product)\n total_allowed_items_on_offer = offer_info.limit_vol * (offer_info.offer_prod_volume/offer_info.min_vol)\n max_limit = 1\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n discount_basket.append((prod_code, base_prod_actual_price))\n while max_limit <= total_allowed_items_on_offer:\n discounted_price = (base_prod_actual_price *(offer_info.discount_perc/100))*-1\n discount_basket.append((offer_info.offer_code, discounted_price))\n max_limit += 1\n else:\n total_allowed_items_on_offer = offer_info.limit_vol * (offer_info.offer_prod_volume / offer_info.min_vol)\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n discount_basket.append((prod_code, base_prod_actual_price))\n max_limit = 1\n while max_limit <= total_allowed_items_on_offer:\n offer_onprod_actual_price = product_prices.get(offer_on_prod.lower()).get('price')\n discounted_price = (offer_onprod_actual_price *(offer_info.discount_perc/100))*-1\n for j in range(0, actual_volume.get(offer_on_prod.lower())):\n discount_basket.append((offer_on_prod, offer_onprod_actual_price))\n discount_basket.append((offer_info.offer_code, discounted_price))\n max_limit += 1\n else:\n print(f\"Unlimited offer..\")\n if prod_code == offer_on_prod:\n if base_prod_vol > offer_info.min_vol:\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n discount_basket.append((prod_code, base_prod_actual_price))\n if i%2 != 0:\n discounted_price = (base_prod_actual_price *(offer_info.discount_perc/100))*-1\n discount_basket.append((offer_info.offer_code, discounted_price))\n else:\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n discount_basket.append((prod_code, base_prod_actual_price))\n else:\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n discount_basket.append((prod_code, base_prod_actual_price))\n\n offer_onprod_actual_price = product_prices.get(offer_on_prod.lower()).get('price')\n discounted_price = (offer_onprod_actual_price * (offer_info.discount_perc / 100))*-1\n\n for j in range(0, actual_volume.get(offer_on_prod.lower())):\n discount_basket.append((offer_on_prod, offer_onprod_actual_price))\n discount_basket.append((offer_info.offer_code, discounted_price))\n\n\n return discount_basket",
"def get_prices(self, grab, subject):\n prices = []\n try:\n extras = grab.doc.rex_text(\n '<h3 class\\=\"h6 copy-sp-m\">.*?%s.*?</h3>(.+?)</ul>' % subject,\n flags=re.S\n )\n except DataNotFound:\n logging.debug(\n \"Price %s is not found on %s\"\n % (subject, grab.doc.url)\n )\n return None\n\n sel = XpathSelector(fromstring(extras))\n prices = []\n for li in sel.select('//li[@class=\"list__item u-cf\"]'):\n obligatory = OrderedDict()\n obligatory['name'] = li.select('node()').text()\n money = li.select('node()/strong').text()\n obligatory['value'] = money[1:].replace(',', '')\n\n # Find perweek or perday\n if li.select(\n 'span[@class=\"boatview__extras-amount\"' +\n ' and contains(text(),\"per week\")]'\n ).exists():\n obligatory['perweek'] = True\n elif li.select(\n 'span[@class=\"boatview__extras-amount\"' +\n ' and contains(text(),\"per day\")]'\n ).exists():\n obligatory['perday'] = True\n obligatory['currency'] = money[0]\n prices.append(obligatory)\n\n if len(prices) < 1:\n logging.debug(\n \"Price %s contains less than one element on: %s\"\n % (subject, grab.doc.url)\n )\n return None\n\n return prices",
"def extract_listing_price_from_result(soup, prices):\r\n for description in soup.find_all(name='div', class_='descr'):\r\n price = description.find(name='div', class_='price')\r\n if price == None:\r\n prices.append('No Price')\r\n else:\r\n prices.append(price.get_text())\r\n # print(prices)\r\n return prices",
"def set_vendor_price_list_status(self, status_items):\n self.multiple_items_selection_from_kendo_dropdown(self.vendor_price_list_status_kendo_dropdown_locator, status_items)\n self.wait_for_ajax_spinner_load()",
"def get_prices(self):\n price = self.get_price()\n if price:\n return [price]\n return []",
"def price(self, irc, msg, args, optlist, typeName):\n\n try:\n typeID = self._get_typeID(typeName)\n itemType = self._get_type(typeID)\n except:\n irc.error('Unknown type')\n return\n\n if len(optlist) == 1:\n location = optlist[0][1]\n else:\n location = 'Jita'\n\n try:\n locationID = self._get_locationID(location)\n location = self._get_location(locationID)\n except:\n irc.error('Unknown location')\n return\n\n market = self._sql(\"\"\"\n SELECT * FROM evecentral_market\n WHERE \"locationID\"=%s\"\"\", [locationID])\n if not market:\n irc.reply('No data for that market location')\n return\n\n marketitem = self._sql(\"\"\"\n SELECT * FROM evecentral_marketitem\n WHERE \"locationID\"=%s AND \"typeID\"=%s\"\"\", [locationID, typeID])\n if marketitem:\n irc.reply('{0} in {1}: buy max: {2} (volume: {3:,d}). sell min: {4} (volume: {5:,d}).'.format(\n ircutils.bold(itemType['typeName']),\n self._colorize_system(location),\n ircutils.mircColor(\n '{:,.2f}'.format(marketitem['buy_max']),\n fg='green'),\n int(marketitem['buy_volume']),\n ircutils.mircColor(\n '{:,.2f}'.format(marketitem['sell_min']),\n fg='green'),\n int(marketitem['sell_volume']),\n ), prefixNick=False)\n else:\n irc.reply(\"Prices for {0} in {1} isn't updated yet.\".format(\n itemType['typeName'],\n location['itemName']\n ))",
"def base_offers_list():\n offers_list = Offers()\n offers_list.add_new_offer('Offer1', 'Baked Beans', 3, 1)\n offers_list.add_new_offer('Offer2', 'Sardines', 1, 0.25)\n return offers_list",
"def test_get_offers(self):\n pass",
"def get_viewed_products(list):\n \n #initialize cart with random ASIN\n params = {\"Item.1.ASIN\":'B000DLB2FI', 'Item.1.Quantity':1}\n cart = amazon.CartCreate(**params)\n root = objectify.fromstring(cart)\n cartid = _safe_get_element_text('Cart.CartId', root)\n hmac = _safe_get_element_text('Cart.HMAC', root)\n\n #create empty list of similar products\n svlist = []\n\n count = 0 #testing\n\n #iterate through list of original ASINs and retrieve also viewed products\n print 'Retrieving \\\"Also Viewed\\\" Products!' #testing\n for item in list:\n #add to cart\n amazon.CartClear(CartId=cartid, HMAC=hmac)\n params = {\"Item.1.ASIN\":item, 'Item.1.Quantity':1, 'CartId':cartid, 'HMAC':hmac, 'ResponseGroup':'Cart,CartSimilarities'}\n cart = amazon.CartAdd(**params)\n root = objectify.fromstring(cart)\n \n count +=1 #testing\n print count #testing\n\n #iterate through each similar product and add to list\n #issue with ASIN = B004NK6DFE <- fixed\n if \"SimilarViewedProduct\" in cart:\n for item2 in root.Cart.SimilarViewedProducts.SimilarViewedProduct:\n if _safe_get_element_text('Title', item2) is not None:\n svlist.append({'Original ASIN':item,\n 'Associated ASIN':item2.ASIN,\n 'Title':item2.Title,\n 'Price': None,\n 'Currency Code':None,\n 'Relationship':\"Also Viewed\"})\n\n print 'Total # of \\\"Also Viewed\\\" Products: ' + str(len(svlist))\n count = 0 #testing\n\n #iterate through each also viewed prodcut and obtain lowest price\n print 'Retrieving prices!' #testing\n for item in svlist:\n if item['Title'] is not None:\n title = filter(lambda x: x in string.printable, item['Title'].text) #remove non-ascii\n item['Title'] = title\n\n count+=1 #testing\n print count #testing\n\n pricelist = amazon.ItemLookup(ItemId=item['Associated ASIN'],ResponseGroup=\"OfferSummary,VariationSummary\")\n priceroot = objectify.fromstring(pricelist)\n #conditionals to check if parent or child ASIN or OOS, Variation pricing can only be called on parent\n if _safe_get_element_text(\"Items.Item.OfferSummary.LowestNewPrice.FormattedPrice\", priceroot) is not None: #Child ASIN\n item['Price'] = _safe_get_element_text('Items.Item.OfferSummary.LowestNewPrice.FormattedPrice', priceroot)\n item['Currency Code'] = _safe_get_element_text('Items.Item.OfferSummary.LowestNewPrice.CurrencyCode', priceroot)\n else:\n item['Price'] = _safe_get_element_text('Items.Item.VariationSummary.LowestPrice.FormattedPrice', priceroot)\n item['Currency Code'] = _safe_get_element_text('Items.Item.VariationSummary.LowestPrice.CurrencyCode', priceroot)\n return svlist",
"def test_list_products_filtered_by_selling_status(self):\n self._require_login(self.user1)\n response = self.client.get('/api/1.0/products/?selling=3')\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(response.data.__len__(), 1)\n self.assertEqual(response.data[0]['name'], 'Producto 2')\n self.assertEqual(response.data[0]['description'], 'Descripcion producto 2')",
"def see_products_for_rent_handler():\n\n products = ShowProductsAndCustomers()\n my_list = products.see_products_for_rent()\n my_result_list = []\n for product in my_list:\n my_result_list.append(product)\n print(product)\n return my_result_list",
"def set_vendor_price_lists_grid_settings(self, grid_settings):\n self.set_grid_settings(self.view_price_list_div_id, grid_settings)",
"def show_products_html(self, ul=False, br=True):\n output = \"\"\n if ul:\n output += \"<ul>\"\n for sp in SubscriptionProduct.objects.filter(\n subscription=self, product__offerable=True).order_by('product_id'):\n count = self.products.filter(offerable=True).count()\n if ul:\n if sp.label_contact:\n output += \"<li>{} ({})</li>\".format(sp.product.name, sp.label_contact.name)\n else:\n output += \"<li>{}</li>\".format(sp.product.name)\n else:\n if sp.label_contact:\n output += \"{} ({})\".format(sp.product.name, sp.label_contact.name)\n else:\n output += \"{}\".format(sp.product.name)\n if count > 1:\n if br:\n output += \"<br>\"\n else:\n output += \"\\n\"\n if ul:\n output += \"</ul>\"\n return output"
] | [
"0.7492419",
"0.63614565",
"0.62272936",
"0.619145",
"0.6164832",
"0.614131",
"0.5918902",
"0.58867675",
"0.58189636",
"0.58065224",
"0.5774671",
"0.5773653",
"0.5753038",
"0.57513326",
"0.5735464",
"0.5723339",
"0.5710892",
"0.5680941",
"0.5598808",
"0.55697834",
"0.5557698",
"0.5546517",
"0.55237764",
"0.55052954",
"0.54855365",
"0.54763407",
"0.54421496",
"0.5430783",
"0.5423544",
"0.535806"
] | 0.6822345 | 1 |
Implementing click buy page inline action button functionality | def click_buy_page_inline_action_button(self, vendor):
self.click_inline_action_button(self.vendors_div_id, vendor, self.grid_column_number) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def click_buy_and_sell_deal_management_grid_first_row_inline_action_button(self):\n self.click_inline_action_button(self.buy_and_sell_management_grid_div_id, None, self.buy_and_sell_management_grid_inline_action_column_number, True)",
"def click_buy_and_sell_deal_bulk_edit_button(self):\n self.click_element(self.buy_and_sell_deal_bulk_edit_button_lcoator, True)",
"def click(self):\r\n pass",
"def click_view_price_list_detail_page_inline_action_button(self, price_list_item):\n self.click_inline_action_button(self.view_price_list_div_id, price_list_item, self.view_price_list_column_number)\n self.wait_for_ajax_spinner_load()",
"def click_target_buy_policy_grid_first_row_inline_action_button(self):\n self.click_inline_action_button(self.target_buy_policies_grid_div_id, None, column_number=2, first_row=True)\n self.wait_for_ajax_spinner_load()",
"def click_buy_and_sell_deal_create_button(self):\n self.click_element(self.save_vendor_profile_locator)",
"def trade_action(self, BUY_QTY):\n BUY_QTY = 4500\n self.trade(BUY_QTY)\n #self.show()",
"def _ClickPrimaryActionButton(self):\n self._ExecuteOobeApi('Oobe.clickGaiaPrimaryButtonForTesting')",
"def click_target_buy_policies_grid_first_row_inline_action_button(self):\n self.click_inline_action_button(self.target_buy_policies_grid_div_id, None, self.target_buy_policies_grid_inline_action_column_number, first_row=True)",
"def onClick(*args):",
"def onClick(*args):",
"def onClick(*args):",
"def onClick(*args):",
"def action(self,item):\r\n pass",
"def checkout_btn(self):\n self._checkout_btn.click()",
"def click_download_button(self):\n self._basket.click_download_button()",
"def on_click(self) -> None:\n pass",
"def click_button(self):\n self.q(css='div#fixture button').first.click()",
"def action(self):\n pass",
"def action(self):\n pass",
"def click_edit_target_buy_policy_button(self):\n self.click_element(self.edit_target_buy_policy_button_locator)",
"def click_view_price_list_detail_first_row_inline_action_button(self):\n self.click_inline_action_button(self.view_price_list_div_id, None, self.view_price_list_column_number, True)\n self.wait_for_ajax_spinner_load()",
"def custom_actions(self, form_wizard_entry, request=None):",
"def click_request_new_deal_button(self):\n self.click_element(self.request_new_deal_button_locator)",
"async def _vis_buy(self, ctx, *args):\n if has_post_permission(ctx.guild.id, ctx.channel.id):\n number, item = ch.parse_number_and_name(args)\n if item:\n await ctx.send(vis_helpers.shop_buy(ctx.user_object, item, number))",
"def click_vendor_price_list_grid_first_row_inline_action_button(self):\n self.click_inline_action_button(self.vendor_price_list_grid_div_id, None, self.view_price_list_column_number, True)",
"def click(self):\n self.dispatch['elementClick'] = self.clickJsFnc",
"def click_button(self):\n self.widgets.get('button').click()",
"def _action(self):\n pass",
"def Button(request):\n params = {\n 'mimetype': 'text/javascript',\n 'fn': request.GET.get('fn', '_bRunTest'),\n 'btn_text': request.GET.get('btn_text', 'Run the test'),\n 'cb_text': request.GET.get('cb_text',\n 'and send my results to Browserscope (anonymously)'),\n }\n return util.Render(request, 'user_test_button.js', params)"
] | [
"0.65620035",
"0.63311285",
"0.63300854",
"0.6286846",
"0.62395346",
"0.6238853",
"0.6130139",
"0.60976636",
"0.6089991",
"0.60558015",
"0.60558015",
"0.60558015",
"0.60558015",
"0.6051222",
"0.5989819",
"0.5983532",
"0.59315854",
"0.58962005",
"0.5894016",
"0.5894016",
"0.5867228",
"0.5843865",
"0.5797544",
"0.5782904",
"0.5776837",
"0.5758437",
"0.57547474",
"0.5727603",
"0.571988",
"0.5715726"
] | 0.7795137 | 0 |
Implementing is vendor profile present functionality | def is_vendor_profile_present(self):
return self.is_element_present(self.vendor_profile_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_vendor_profile_page_loaded_properly(self):\n return self.is_element_present(self.save_vendor_profile_locator)",
"def is_vendor(self) -> bool:\n return self._is_vendor",
"def is_specific_tab_on_vendor_profile_page_present(self, tab_name):\n tab_locator = (By.XPATH, \"//div[contains(@id, 'SourceProfileTabStrip')]/descendant::a[text()='%s']\" % tab_name)\n return self.is_element_present(tab_locator)",
"def is_create_vendor_present(self):\n return self.is_element_present(self.create_vendor_locator)",
"def detect_vendor(self, task):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n return super(IRMCManagement, self).detect_vendor(task)\n else:\n return super(ipmitool.IPMIManagement, self).detect_vendor(task)",
"def _should_profile(self) -> bool:\n if \"profile\" in self._allowed_plugins:\n if not self._one_shot:\n raise ValueError(\n \"Profile plugin currently only supported for one shot.\"\n )\n logger.info(\"Profile plugin is enalbed.\")\n return True\n return False",
"def is_target_buy_details_section_present_in_vendor_profile_page(self):\n return self.is_element_present(self.target_buy_details_section_locator)",
"def is_vendor(schema_obj):\n\n return isinstance(schema_obj, schema.Vendor)",
"def test_get_authorization_status_vendor_v3(self):\n pass",
"def should_profile():\n if util.dev_server:\n return _config.should_profile_development()\n else:\n return _config.should_profile_production()",
"def is_vendors_tab_present(self):\n return self.is_element_present(self.vendors_tab_locator)",
"def usefulFunction():\n print(platform.uname()) #displayed this computer's specifications",
"def support(self):",
"def test_post_activate_marketplace_vendor_v3(self):\n pass",
"def on_dedicated(self):\n\n return self.is_valid_platform() and self['MODE'] == 'enterprise'",
"def usefulFunction():\n# I think the uname platform is a func. for findout out the information of the computer\n print(platform.uname())",
"def _should_profile_production_default():\n return False",
"def get_vendor_price_lists_details(self):\n try:\n self.vendor_price_lists_dict = self.get_grid_row_details(self.customer_price_list_grid_div_id, self.vendor_price_lists_dict)\n return True\n except:\n return False",
"def getprofile(): # real signature unknown; restored from __doc__\n pass",
"def test_get_profile(self):\n self.cim.get_profile(customer_profile_id=u\"123\")",
"def get_vendor(self, result, host, mac):\n if \"vendor\" in result['scan'][host] and mac in result['scan'][host]['vendor']:\n return result['scan'][host]['vendor'][mac]\n else:\n return \"\"",
"def is_available(self, product_url):\n\t\tpass",
"def supported_vendor_interfaces(self):\n return [\n fake.FakeVendorB, fake.FakeVendorA\n ] + super().supported_vendor_interfaces",
"def is_aprentice(self):\n return self.user_profile_status == self.APPRENTICE",
"def is_profile_device(cls, device: UpnpDevice) -> bool:\n try:\n profile_device = find_device_of_type(device, cls.DEVICE_TYPES)\n except UpnpError:\n return False\n\n # Check that every service required by the subclass is declared by the device\n device_service_ids = {\n service.service_id for service in profile_device.services.values()\n }\n\n if not cls.SERVICE_IDS.issubset(device_service_ids):\n return False\n\n return True",
"def provider(provider):\n if provider in (\"alditalk\", \"netzclub\", \"congstar\"):\n return True\n else:\n return False",
"def _check_required_if_provider(self):\n return",
"def is_vendor_destination_present(self):\n return self.is_element_present(self.vendor_destination_locator)",
"def bios_vendor(self):\n\t\treturn self.__info_dict['info']['bios_vendor']['value']",
"def vendor(self):\n return self._vendor"
] | [
"0.6353229",
"0.6263275",
"0.60498637",
"0.58901536",
"0.5854708",
"0.5785455",
"0.5775481",
"0.56977445",
"0.560553",
"0.5575215",
"0.5489019",
"0.548175",
"0.54198354",
"0.54048556",
"0.54040384",
"0.53983",
"0.5386571",
"0.5337491",
"0.5300845",
"0.5284285",
"0.5280889",
"0.525742",
"0.5240133",
"0.52294344",
"0.52276605",
"0.5223554",
"0.5220753",
"0.5220585",
"0.521957",
"0.5217971"
] | 0.71709037 | 0 |
Implementing is vendor digits present functionality | def is_vendor_digits_present(self):
return self.is_element_present(self.vendor_digits_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def vendor_list():\n return ['nxos', 'eos', 'cumulus']",
"def get_vendor(mac):\r\n return p.get_manuf(mac) or 'None'",
"def get_vendor(self, result, host, mac):\n if \"vendor\" in result['scan'][host] and mac in result['scan'][host]['vendor']:\n return result['scan'][host]['vendor'][mac]\n else:\n return \"\"",
"def get_manufacturer_bytes(self):\n manufacturer = self._manufacturer.upper()\n id = ((ord(manufacturer[0]) - 64) * 32 * 32 +\n (ord(manufacturer[1]) - 64) * 32 +\n (ord(manufacturer[2]) - 64))\n if 0x0421 <= id <= 0x6b5a:\n return self.manufacturer_encode(id, 2)\n return False",
"def _get_vendor_id(device_dict):\n return device_dict['vendor_id'].split()[0].split('x')[-1]",
"def usefulFunction():\n print(platform.uname()) #displayed this computer's specifications",
"def test_generate_barcode_ean13(self):\n pass",
"def test_generate_barcode_upca(self):\n pass",
"def vendor(n, vendors_from_inn):\n inns = list(map(str, vendors_from_inn.keys()))\n\n for i in inns:\n if str(n).startswith(i):\n return vendors_from_inn[int(i)]",
"def is_valid_pci_device_vendor_id(id):\n val = id.replace('0x', '').strip()\n if not is_valid_hex(id):\n return False\n if (len(val) > 4):\n return False\n return True",
"def bios_vendor(self):\n\t\treturn self.__info_dict['info']['bios_vendor']['value']",
"def test_avp_vendor(self):\n # Vendor specific flags means you need a non default vendor ID\n with self.assertRaises(CodecException):\n avp_val = avp.UnknownAVP(\n 0, b'',\n flags=avp.FLAG_VENDOR,\n vendor=avp.VendorId.DEFAULT,\n )\n out_buf = bytearray(avp_val.length)\n avp_val.encode(out_buf, 0)\n\n avp_val = avp.UnknownAVP(\n 0, b'',\n flags=avp.FLAG_VENDOR,\n vendor=1,\n )\n out_buf = bytearray(avp_val.length)\n avp_val.encode(out_buf, 0)\n self._compare_avp(avp_val, out_buf)\n\n avp_val = avp.UnknownAVP(\n 0, b'',\n flags=avp.FLAG_VENDOR,\n vendor=0x00FFFFFF,\n )\n out_buf = bytearray(avp_val.length)\n avp_val.encode(out_buf, 0)\n self._compare_avp(avp_val, out_buf)\n\n # Avp vendor in range\n with self.assertRaises(CodecException):\n avp_val = avp.UnknownAVP(\n 0, b'',\n flags=avp.FLAG_VENDOR,\n vendor=-1,\n )\n out_buf = bytearray(avp_val.length)\n avp_val.encode(out_buf, 0)\n\n # Avp vendor in range\n with self.assertRaises(CodecException):\n avp_val = avp.UnknownAVP(\n 0, b'',\n flags=avp.FLAG_VENDOR,\n vendor=0xFFFFFFFF + 1,\n )\n out_buf = bytearray(avp_val.length)\n avp_val.encode(out_buf, 0)",
"def request_identifier(self):\n question = jbus.jbus_generator_read(self.node, 0x1000, 12)\n answer = self.send_request(question)\n #print(\"Question: [\", question, \"]\")\n print(\"Answer: [\",answer,\"] LEN: \",len(answer))\n result = self.verify_response(question, answer)\n if (result == \"OK\"):\n result = {\n \"UPS_type\" : self.extract_word(answer,0),\n \"Power_KVA\" : self.extract_word(answer,1)/10,\n \"SN\" : chr(answer[10])+\n chr(answer[9])+\n chr(answer[12])+\n chr(answer[11])+\n chr(answer[14])+\n chr(answer[13])+\n chr(answer[16])+\n chr(answer[15])+\n chr(answer[18])+\n chr(answer[17])\n }\n return result\n else:\n self.error=result\n return False",
"def detect(self):\n # Get PCI devices\n lines = subprocess.check_output([\"lspci\", \"-n\"]).decode().split(\"\\n\")\n for line in lines:\n if len(line) > 0:\n class_id = \"0x{0}\".format(line.split()[1].rstrip(\":\")[0:2])\n if class_id == self.class_id:\n dev = line.split()[2].split(\":\")\n vendor_id = \"0x{0}\".format(dev[0])\n product_id = \"0x{0}\".format(dev[1])\n if vendor_id == self.vendor_id and product_id in self.devices:\n return True\n return False",
"def bm_and_dvr_supported(self):",
"def fix_vendor_id(font):\n if font['OS/2'].achVendID != 'GOOG':\n font['OS/2'].achVendID = 'GOOG'\n print('Changed font vendor ID to GOOG')\n return True\n return False",
"def is_vendor(self) -> bool:\n return self._is_vendor",
"def test_DELL_VOSTRO3460_FINGERPRINT(self):\n devices = self.parse(\"DELL_VOSTRO3460_FINGERPRINT\")\n self.assertEqual(len(devices), 76)\n self.assertEqual(devices[35].category, \"OTHER\")\n self.assertEqual(devices[35].vendor_id, 0x0138a)\n self.assertEqual(devices[35].product_id, 0x0011)\n self.assertEqual(self.count(devices, \"VIDEO\"), 1)\n self.assertEqual(self.count(devices, \"AUDIO\"), 2)\n self.assertEqual(self.count(devices, \"KEYBOARD\"), 1)\n self.assertEqual(self.count(devices, \"TOUCHPAD\"), 0)\n self.assertEqual(self.count(devices, \"CARDREADER\"), 1)\n self.assertEqual(self.count(devices, \"CDROM\"), 1)\n self.assertEqual(self.count(devices, \"FIREWIRE\"), 0)\n self.assertEqual(self.count(devices, \"MOUSE\"), 1)\n self.assertEqual(self.count(devices, \"ACCELEROMETER\"), 0)\n self.assertEqual(self.count(devices, \"TOUCHSCREEN\"), 0)\n self.assertEqual(self.count(devices, \"DISK\"), 1)\n self.assertEqual(self.count(devices, \"RAID\"), 0)\n self.assertEqual(self.count(devices, \"BLUETOOTH\"), 1)\n self.assertEqual(self.count(devices, \"NETWORK\"), 2)\n self.assertEqual(self.count(devices, \"WIRELESS\"), 1)\n self.assertEqual(self.count(devices, \"CAPTURE\"), 1)",
"def test_generate_barcode_upce(self):\n pass",
"def test_get_small_and_light_eligibility_by_seller_sku(self):\n pass",
"def issuer(number):\n res = \"unknown\"\n num = str(number)\n if num[:1]==\"4\":\n res = \"Visa\"\n elif num[:2] in (\"34\",\"37\"):\n res = \"American Express\"\n elif num[:2] in (\"51\",\"55\"):\n res = \"MasterCard\"\n elif num[:4]==\"6011\":\n res = \"Discover/Novus\"\n return res",
"def usefulFunction():\n# I think the uname platform is a func. for findout out the information of the computer\n print(platform.uname())",
"def test_generate_barcode_ean8(self):\n pass",
"def _get_vendor_product_id(device_dict):\n return f'{_get_vendor_id(device_dict)}/{_get_product_id(device_dict)}'",
"def detect_vendor(self, task):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n return super(IRMCManagement, self).detect_vendor(task)\n else:\n return super(ipmitool.IPMIManagement, self).detect_vendor(task)",
"def get_vendor_price_lists_details(self):\n try:\n self.vendor_price_lists_dict = self.get_grid_row_details(self.customer_price_list_grid_div_id, self.vendor_price_lists_dict)\n return True\n except:\n return False",
"def gather_system_versions(self):\n # Get Mac model ID\n self.hw_version = str(\n IORegistryEntryCreateCFProperty(\n IOServiceGetMatchingService(\n 0,\n IOServiceMatching(\"IOPlatformExpertDevice\")),\n \"model\",\n None,\n 0)).replace(\n \"\\x00\",\n \"\")\n\n if \"imacpro\" in self.hw_version.lower():\n # iMac Pro stores it's EFI data different due it's new architecture\n # so grab the EFI & SMC ROM versions appropriately\n raw_efi_list = []\n raw_rom_info = str(\n IORegistryEntryCreateCFProperty(\n IORegistryEntryFromPath(\n 0,\n \"IODeviceTree:/rom\"),\n \"apple-rom-info\",\n None,\n 0))\n for data in raw_rom_info.split(\"\\n\"):\n if data.strip().startswith(\"BIOS ID\"):\n raw_efi_list = data.split(\":\")[1].strip().split(\".\")\n break\n else:\n self.message(\n \"[-] Could not find raw EFI data to determine EFI versions. Exiting....\")\n return False\n\n self.efi_version = \"%s.%s.%s\" % (\n raw_efi_list[0], raw_efi_list[2], raw_efi_list[3])\n # Can't currently find the SMC version like this on imac pros ....\n # self.smc_version = str(IORegistryEntryCreateCFProperty(IOServiceGetMatchingService(0, IOServiceMatching(\"AppleSMC\")), \"smc-version\", None, 0))\n self.smc_version = \"\"\n else:\n # EFI & SMC ROM versions\n self.smc_version = str(\n IORegistryEntryCreateCFProperty(\n IOServiceGetMatchingService(\n 0,\n IOServiceMatching(\"AppleSMC\")),\n \"smc-version\",\n None,\n 0))\n raw_efi = str(\n IORegistryEntryCreateCFProperty(\n IORegistryEntryFromPath(\n 0,\n \"IODeviceTree:/rom\"),\n \"version\",\n None,\n 0)).replace(\n \"\\x00\",\n \"\").split(\".\")\n self.efi_version = \"%s.%s.%s\" % (\n raw_efi[0], raw_efi[2], raw_efi[3])\n\n # Set the salt to be the MAC address of the system, using the MAC as a salt in this manner\n # helps ensure that the hashed sysuuid is pseudonymous. We don't want to know the sysuuid's\n # value, but we do want it to be unique however. The Salt value is\n # never submitted to the API\n salt = hex(getnode())\n sys_uuid = str(\n IORegistryEntryCreateCFProperty(\n IOServiceGetMatchingService(\n 0,\n IOServiceMatching(\"IOPlatformExpertDevice\")),\n \"IOPlatformUUID\",\n None,\n 0)).replace(\n \"\\x00\",\n \"\")\n self.h_sys_uuid = hashlib.sha256(salt + sys_uuid).hexdigest()\n\n # Get the Board-ID, this is how EFI files are matched to running\n # hardware - Nastee\n self.board_id = str(\n IORegistryEntryCreateCFProperty(\n IOServiceGetMatchingService(\n 0,\n IOServiceMatching(\"IOPlatformExpertDevice\")),\n \"board-id\",\n None,\n 0)).replace(\n \"\\x00\",\n \"\")\n\n # Get OS version\n self.os_version = commands.getoutput(\"sw_vers -productVersion\")\n\n # Get build number\n self.build_num = commands.getoutput(\"sw_vers -buildVersion\")\n\n # Carve out the major version as we use this a bunch\n # self.os_maj_ver = \".\".join(self.os_version.split(\".\")[:2])\n\n # Add gathered info to the dictionary to query the API with\n self.endpoints_to_check[\"127.0.0.1\"] = {\n \"hashed_uuid\": self.h_sys_uuid,\n \"hw_ver\": self.hw_version,\n \"rom_ver\": self.efi_version,\n \"smc_ver\": self.smc_version,\n \"board_id\": self.board_id,\n \"os_ver\": self.os_version,\n \"build_num\": self.build_num}\n\n return True",
"def get_device_sn(self):\n summary = self.get_version_summary()\n pattern = '\\$.*? .*? .*? (.*?) .*? .*? .*? .*? .*? \\r\\n' \n mcu_sn = int(re.findall(pattern,summary).pop())\n return mcu_sn",
"def _get_onu_info(self, serial_number):\n try:\n from flow.demo_data import get_tconts, get_gem_ports, get_onu_id\n \n if self.activation_method == \"autoactivate\":\n onu_id = get_onu_id(serial_number)\n if onu_id is None:\n onu_id = self.get_next_onu_id()\n enabled = True\n channel_speed = 0\n tconts = get_tconts(serial_number, onu_id)\n gem_ports = get_gem_ports(serial_number, onu_id)\n vont_ani = None\n\n elif self.activation_method == \"autodiscovery\":\n if self.authentication_method == 'serial-number':\n gpon_info = self.olt.get_xpon_info(self.pon_id)\n\n try:\n # TODO: Change iteration to itervalues below\n vont_info = next(info for _, info in gpon_info['v-ont-anis'].items()\n if info.get('expected-serial-number') == serial_number)\n vont_ani = vont_info['data']\n\n onu_id = vont_info['onu-id']\n enabled = vont_info['enabled']\n channel_speed = vont_info['upstream-channel-speed']\n\n tconts = {key: val for key, val in gpon_info['tconts'].iteritems()\n if val.vont_ani == vont_info['name']}\n tcont_names = set(tconts.keys())\n\n gem_ports = {key: val for key, val in gpon_info['gem-ports'].iteritems()\n if val.tconf_ref in tcont_names}\n\n except StopIteration:\n self.log.debug('no-vont-ony')\n return None # Can happen if vont-ani/serial-number has not yet been configured\n else:\n self.log.debug('not-serial-number-authentication')\n return None\n else:\n self.log.debug('not-auto-discovery')\n return None\n\n onu_info = {\n 'device-id': self.olt.device_id,\n 'serial-number': serial_number,\n 'xpon-name': None,\n 'pon': self,\n 'onu-id': onu_id,\n 'enabled': enabled,\n 'upstream-channel-speed': channel_speed,\n 'password': Onu.DEFAULT_PASSWORD,\n 't-conts': tconts,\n 'gem-ports': gem_ports,\n 'onu-vid': self.olt.get_channel_id(self._pon_id, onu_id),\n 'channel-id': self.olt.get_channel_id(self._pon_id, onu_id),\n 'vont-ani': vont_ani\n }\n # Hold off ONU activation until at least one GEM Port is defined.\n self.log.debug('onu-info', gem_ports=gem_ports)\n\n return onu_info\n # return onu_info if len(gem_ports) > 0 else None\n\n except Exception as e:\n self.log.exception('get-onu-info', e=e)\n return None",
"def _validate_details_of_charges_71A(self, val):\n return val"
] | [
"0.6032878",
"0.5946633",
"0.57252645",
"0.5716937",
"0.5711166",
"0.56264675",
"0.5623872",
"0.5562387",
"0.5555216",
"0.55518043",
"0.5550491",
"0.55362904",
"0.5514499",
"0.54909575",
"0.5461236",
"0.5437278",
"0.54319304",
"0.53986394",
"0.53688043",
"0.5318572",
"0.5318368",
"0.53045565",
"0.5283984",
"0.5283307",
"0.5282218",
"0.521775",
"0.520296",
"0.5183439",
"0.5179832",
"0.51792985"
] | 0.68530744 | 0 |
Implementing is vendor destination present functionality | def is_vendor_destination_present(self):
return self.is_element_present(self.vendor_destination_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_vendor(self) -> bool:\n return self._is_vendor",
"def is_create_vendor_present(self):\n return self.is_element_present(self.create_vendor_locator)",
"def is_country_column_present_in_vendor_profile_destinations_page(self):\n return self.is_specific_column_present(self.destinations_grid_div_id, self.column_name_country)",
"def isProvisioned(self, dest):\n # If destination dir does not exists then its safe to assume that IM is not installed\n if not os.path.exists(dest):\n print (\"Path does not exist: '%s'\" % (dest))\n return False\n else:\n resultDict = self.getVersion(dest)\n print (\"ResultDict is: '%s'\" % (resultDict))\n if \"installed\" in resultDict[\"im_header\"]:\n return True\n print (\"installed not found in ReturnDict\")\n return False",
"def is_destinations_page_loaded_properly(self):\n return self.is_element_present(self.search_destination_locator)",
"def is_vendor(schema_obj):\n\n return isinstance(schema_obj, schema.Vendor)",
"def __checkDestination(self):\n return os.path.exists(self.__targetPath)",
"def test_post_activate_marketplace_vendor_v3(self):\n pass",
"def is_available(self, product_url):\n\t\tpass",
"def vendor_url(self, vendor_id, type):\n mapper = {\n 'soft': 'vendor:vendor-detail',\n 'hard': 'vendor:vendor-hard-delete',\n 'restore': 'vendor:vendor-restore'\n }\n return reverse(mapper[type], args=[vendor_id])",
"def click_vendor_grid_add_destination_button(self):\n self.click_element(self.vendor_grid_add_destination_locator)",
"def getDest(): #status: Done, Tested\r\n pass",
"def on_dedicated(self):\n\n return self.is_valid_platform() and self['MODE'] == 'enterprise'",
"def is_target_buy_details_section_present_in_vendor_profile_page(self):\n return self.is_element_present(self.target_buy_details_section_locator)",
"def is_country_selection_criteria_field_present_in_vendor_profile_destinations_page(self):\n return self.is_specific_selection_criteria_filter_present(self.vendor_profile_destinations_page_div_id, self.country_label_name)",
"def onVendor(self, action):\n\n if not self.graphicsView.hasImage():\n self.actionVendor.setChecked(False)\n self.showImageSelectionMessageBox()\n return\n\n self.actionVendor.setChecked(True)\n if not hasattr(self.actionVendor, 'tag'):\n self.actionVendor.tag = PlacePolygonCommand.PlacePolygonCommand(self.graphicsView)\n self.actionVendor.tag.onSuccess.connect(self.onVendorCreated)\n self.actionVendor.tag.onRejected.connect(self.onCommandRejected)\n\n self.graphicsView.command = self.actionVendor.tag",
"def Destination(self) -> _n_0_t_1:",
"def compare_vendor_price_list_detail_dial_digits_grid_destination(self):\n self.buy_page_excel_data_dictionary = self.get_excel_data_dictionary()\n is_compared = False\n dial_digits_grid_destination = self.get_specific_column_value_from_grid(self.vendor_price_list_detail_dial_digits_grid_div_id, 1, self.destination_column_name)\n if self.buy_page_excel_data_dictionary[\"Destination\"] == dial_digits_grid_destination:\n is_compared = True\n return is_compared",
"def compare_vendor_price_list_detail_rates_grid_destination(self):\n self.buy_page_excel_data_dictionary = self.get_excel_data_dictionary()\n is_compared = False\n rates_grid_destination = self.get_specific_column_value_from_grid(self.vendor_price_list_detail_rates_grid_div_id, 1, self.destination_column_name)\n if self.buy_page_excel_data_dictionary[\"Destination\"] == rates_grid_destination:\n is_compared = True\n return is_compared",
"def suppresses(self, other_describer):\n return False",
"def connect_vendor_bundle_to_site(self, siteNode, projectNode, position_item, site, vendor):\n\n core = self.core\n vendorNodes = core.load_children(self.META[\"Vendors\"])\n if vendorNodes:\n for vendorNode in vendorNodes:\n if core.get_attribute(vendorNode, \"name\") == vendor:\n bundleNodes = core.load_children(vendorNode)\n if bundleNodes:\n for bundleNode in bundleNodes:\n if core.get_attribute(bundleNode, \"name\") == site[\"Device type\"]:\n connection = core.create_child(projectNode, self.META[\"Bundle2Site\"])\n instance = core.create_child(projectNode, bundleNode)\n position_item[\"x\"] -= 200\n core.set_registry(instance, \"position\", position_item)\n core.set_pointer(connection, \"src\", instance)\n core.set_pointer(connection, \"dst\", siteNode)\n return instance\n logger.info(\"There is no bundle named: \" + site[\"Device type\"])\n else:\n logger.info(\"There are no bundles in \" + core.get_attribute(vendorNode, \"name\") + \" vendor\")\n\n logger.info(\"There is no vendor named \" + core.get_attribute(vendorNode, \"name\"))\n else:\n logger.info(\"There are no Vendors in the database\")",
"def can_pickup(self):\n return False",
"def add_destination(self):\n pass",
"def is_vendor_profile_present(self):\n return self.is_element_present(self.vendor_profile_locator)",
"def present(self):",
"def detect_vendor(self, task):\n if (getattr(task.node, 'power_interface') == 'ipmitool'\n or task.node.driver_internal_info.get('irmc_ipmi_succeed')):\n return super(IRMCManagement, self).detect_vendor(task)\n else:\n return super(ipmitool.IPMIManagement, self).detect_vendor(task)",
"def test_gen_destination_for_alias_is_destination(self):\n destination = db.gen_destination_for_alias(self.dbm, \"reddit\")\n self.assertIsInstance(destination, db.Destination)\n self.assertEqual(\"https://www.reddit.com/r/{}\", destination.url)",
"def support(self):",
"def verify_selected_vendor(self, vendor_name):\n is_present = None\n vendor_locator = (By.XPATH, self.selected_vendor_locator_string + \"[text()='%s']\" % vendor_name)\n try:\n self.wait().until(EC.presence_of_element_located(vendor_locator))\n is_present = True\n except:\n is_present = False\n finally:\n return is_present",
"def get_vendor(self, result, host, mac):\n if \"vendor\" in result['scan'][host] and mac in result['scan'][host]['vendor']:\n return result['scan'][host]['vendor'][mac]\n else:\n return \"\""
] | [
"0.60589635",
"0.55775374",
"0.5568114",
"0.5536477",
"0.5501193",
"0.5362999",
"0.5362337",
"0.53581727",
"0.5344341",
"0.5316964",
"0.52319616",
"0.5189973",
"0.5180029",
"0.516376",
"0.5160059",
"0.5142233",
"0.50607926",
"0.50488794",
"0.50478595",
"0.5024202",
"0.50194156",
"0.5007768",
"0.49803725",
"0.49549124",
"0.49297345",
"0.48758245",
"0.48727643",
"0.48660064",
"0.4833928",
"0.48195365"
] | 0.74863005 | 0 |
Implementing is upload vendor price list present functionality | def is_upload_vendor_price_list_present(self):
return self.is_element_present(self.upload_vendor_price_list_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def upload_products_view(request):\n curr_vendor = get_object_or_404(Vendor, user=request.user)\n if request.method == 'POST':\n form = UploadFileForm(request.POST, request.FILES)\n if form.is_valid():\n folderpath = settings.UPLOAD_DIR + \"vendor/\"\n filepath = save_file(request.FILES['file'], folderpath,\n request.user.username)\n #this line is where products are added to the db\n num_added, failed_lines = upload_products(filepath, curr_vendor)\n results_dict = {'num_added':num_added,\n 'failed_lines':failed_lines,\n 'user':request.user}\n return render_to_response('base/store/upload_results.html',\n results_dict)\n else:\n form = UploadFileForm()\n form_dict = {'form': form,'user':request.user}\n return render_to_response('base/store/upload.html', form_dict)",
"def is_upload_vendor_price_list_pop_up_available(self):\n return self.is_element_present(self.upload_vendor_price_list_pop_up_locator)",
"def click_on_vendor_price_list_upload_search_button(self):\n vendor_price_list_upload_search_button_element = self.wait().until(EC.element_to_be_clickable(self.vendor_price_list_upload_search_button_locator), 'vendor price list upload search button locator not found before specified time')\n vendor_price_list_upload_search_button_element.click()\n self.wait_for_ajax_spinner_load()",
"def get_vendor_price_lists_details(self):\n try:\n self.vendor_price_lists_dict = self.get_grid_row_details(self.customer_price_list_grid_div_id, self.vendor_price_lists_dict)\n return True\n except:\n return False",
"def set_vendor(self, vendor_list):\n self.multiple_items_selection_from_kendo_dropdown(self.vendor_dropdown_locator, vendor_list)\n self.wait_for_ajax_spinner_load()",
"def verify_vendor_price_lists_details(self, row_data):\n return self.verify_grid_row_details(self.customer_price_list_grid_div_id, row_data)",
"def click_on_vendor_price_lists(self):\n vendor_price_lists_element = self.wait().until(EC.element_to_be_clickable(self.vendor_price_lists_locator), 'vendor price lists locator not found before specified time')\n self.script_executor_click(vendor_price_lists_element)\n self.wait_for_ajax_spinner_load()",
"def set_vendor_price_list_status(self, status_items):\n self.multiple_items_selection_from_kendo_dropdown(self.vendor_price_list_status_kendo_dropdown_locator, status_items)\n self.wait_for_ajax_spinner_load()",
"def do_submit(self, price_float, volume_float):\r\n raise NotImplementedError()",
"def upload(self, cr, ads_manager):\n if self.data['order']['articles']:\n res = super(ads_sales_order, self).upload(cr, ads_manager)\n if self.browse_record and self.file_name:\n self.browse_record.write({'ads_file_name': self.file_name})\n return res\n else:\n return False",
"def is_vendor_price_lists_present(self):\n return self.is_element_present(self.vendor_price_lists_locator)",
"def set_vendors(self, vendors_list):\n self.multiple_items_selection_from_kendo_dropdown(self.vendors_kendo_dropdown_locator, vendors_list)\n self.wait_for_ajax_spinner_load()",
"def vendor_list():\n return ['nxos', 'eos', 'cumulus']",
"def get_basket_items_pricedrop(self, offer_info, actual_volume, product_prices):\n prod_code = offer_info.base_prod_code\n base_prod_vol = actual_volume.get(prod_code.lower())\n\n pricedrop_basket = []\n\n if base_prod_vol >= offer_info.min_vol:\n offer_on_prod = offer_info.offer_on\n if actual_volume.get(offer_on_prod.lower()):\n print(\n f\"Base product volume is greater than minimum required volume & product on offer is also available \"\n f\"in cart..\")\n if offer_info.is_limited:\n print(f\"Limited offer..\")\n if prod_code == offer_on_prod:\n # total_allowed_items_on_offer = Limit Volume of base product * (Offer Product Max Volume/Minimum volume of base product)\n total_allowed_items_on_offer = offer_info.limit_vol * (\n offer_info.offer_prod_volume / offer_info.min_vol)\n max_limit = 1\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n pricedrop_basket.append((prod_code, base_prod_actual_price))\n while max_limit <= total_allowed_items_on_offer:\n new_price = (base_prod_actual_price - (offer_info.new_price)) * -1\n pricedrop_basket.append((offer_info.offer_code, new_price))\n max_limit += 1\n else:\n total_allowed_items_on_offer = offer_info.limit_vol * (\n offer_info.offer_prod_volume / offer_info.min_vol)\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n pricedrop_basket.append((prod_code, base_prod_actual_price))\n max_limit = 1\n while max_limit <= total_allowed_items_on_offer:\n offer_onprod_actual_price = product_prices.get(offer_on_prod.lower()).get('price')\n new_price = (base_prod_actual_price - (offer_info.new_price)) * -1\n for j in range(0, actual_volume.get(offer_on_prod).lower()):\n pricedrop_basket.append((offer_on_prod, offer_onprod_actual_price))\n pricedrop_basket.append((offer_info.offer_code, new_price))\n max_limit += 1\n else:\n print(f\"Unlimited offer..\")\n if prod_code == offer_on_prod:\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n pricedrop_basket.append((prod_code, base_prod_actual_price))\n new_price = (base_prod_actual_price - (offer_info.new_price))*-1\n pricedrop_basket.append((offer_info.offer_code, new_price))\n else:\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n pricedrop_basket.append((prod_code, base_prod_actual_price))\n\n offer_onprod_actual_price = product_prices.get(offer_on_prod.lower()).get('price')\n new_price = (offer_onprod_actual_price - (offer_info.new_price)) * -1\n\n for j in range(0, actual_volume.get(offer_on_prod).lower()):\n pricedrop_basket.append((offer_on_prod, offer_onprod_actual_price))\n pricedrop_basket.append((offer_info.offer_code, new_price))\n\n return pricedrop_basket",
"def pricing_import(request, simulation):\n try:\n # Get all pricing policies for this usertype.\n policies = get_query('policy', simulation)\n tolls = policies.filter(type='PRICING')\n # Get all links of the network.\n links = get_query('link', simulation)\n # Get all LinkSelection of the network.\n locations = LinkSelection.objects.filter(\n network=simulation.scenario.supply.network\n )\n # Get all usertypes.\n usertypes = get_query('usertype', simulation)\n # Get an empty Vector or create one if there is none.\n if Vector.objects.filter(data='').exists():\n empty_vector = Vector.objects.filter(data='')[0]\n else:\n empty_vector = Vector(data='')\n empty_vector.save()\n # Convert the imported file to a csv DictReader.\n encoded_file = request.FILES['import_file']\n tsv_file = StringIO(encoded_file.read().decode())\n if encoded_file.name.split(\".\")[-1] == 'tsv':\n reader = csv.DictReader(tsv_file, delimiter='\\t')\n else:\n reader = csv.DictReader(tsv_file, delimiter=',')\n # For each imported OD pair, if the pair already exists in t\n if 'traveler_type' in reader.fieldnames:\n has_type = True\n else:\n has_type = False\n if 'times' in reader.fieldnames:\n has_times = True\n else:\n has_times = False\n # For each imported link, if a Policy exists for the link, baseValue is\n # updated, else a new Policy is created.\n for row in reader:\n # Get link of current row.\n link = links.get(user_id=row['link'])\n # Get or create a LinkSelection associated with the link.\n if locations.filter(link=link).exists():\n # Take first matching LinkSelection.\n location = locations.filter(link=link)[0]\n else:\n # Create a LinkSelection for the current link.\n # Name and user_id of the Link Selection are set to the name\n # and user_id of the link.\n location = LinkSelection(\n network=simulation.scenario.supply.network,\n name=link.name,\n user_id=link.user_id,\n )\n location.save()\n location.link.add(link)\n # Get or create a pricing Policy with the corret LinkSelection\n # object.\n try:\n toll = tolls.get(location=location)\n except Policy.DoesNotExist:\n # Create a new toll with default values.\n toll = Policy(location=location, type='PRICING', usertype=None,\n valueVector=empty_vector,\n timeVector=empty_vector)\n toll.save()\n toll.scenario.add(simulation.scenario)\n # Update affected traveler type.\n toll.usertype = None\n if has_type:\n try:\n toll.usertype = usertypes.get(user_id=row['traveler_type'])\n except (UserType.DoesNotExist, ValueError):\n pass\n # Update values.\n values = row['values'].split(',')\n # First value is baseValue.\n toll.baseValue = float(values[0])\n if len(values) > 1:\n # Remaining values are stored in valueVector (as a string of\n # comma separated values).\n values = [str(float(x)) for x in values]\n v = Vector(data=','.join(values[1:]))\n v.save()\n toll.valueVector = v\n else:\n toll.valueVector = empty_vector\n # Update times.\n toll.timeVector = empty_vector\n if has_times:\n times = row['times'].split(',')\n if times[0] != ' ' and times[0]:\n # There is at least one value, store it in timeVector.\n times = [str(int(x)) for x in times]\n v = Vector(data=','.join(times))\n v.save()\n toll.timeVector = v\n toll.save()\n return HttpResponseRedirect(reverse(\n 'metro:pricing_main', args=(simulation.id,)\n ))\n except Exception as e:\n # Catch any exception while importing the file and return an error page\n # if there is any.\n print(e)\n context = {\n 'simulation': simulation,\n 'object': 'pricing',\n }\n return render(request, 'metro_app/import_error.html', context)",
"def scan_item(request):\n result = {'products':[]}\n u = request.user\n\n p = Product.objects.get_by_sku(request.POST['sku'])\n if p is None:\n p = Product.objects.get_by_upc(request.POST['sku'])\n \n if p is not None:\n result['products'] = [p.details(u)]\n\n return JSONHttpResponse(result)",
"def add_products():\n result = order_obj.add_products(request.forms) \n return result",
"def click_on_vendor_price_list_upload_colored_icon(self):\n vendor_price_list_upload_colored_icon_element = self.wait().until(EC.visibility_of_element_located(self.vendor_price_list_upload_colored_status_completed_icon_locator), 'vendor price list upload colored icon locator not found before specified time')\n vendor_price_list_upload_colored_icon_element.click()\n self.wait_for_ajax_spinner_load()",
"def shopify_create_product_data_queue(self, instance, template_ids=''):\n instance.connect_in_shopify()\n only_alphabets = []\n if template_ids:\n # Below one line is used to find only character values from template ids.\n only_alphabets = re.findall(\"[a-zA-Z]+\", template_ids)\n if len(template_ids.split(',')) <= 50:\n # template_ids is a list of all template ids which response did not given by\n # shopify.\n template_ids = list(set(re.findall(re.compile(r\"(\\d+)\"),template_ids)))\n results = shopify.Product().find(ids=','.join(template_ids))\n if results:\n _logger.info('Length of Shopify Products %s import from instance name: %s' % (\n len(results), instance.name))\n template_ids = [template_id.strip() for template_id in template_ids]\n # Below process to identify which id response did not give by Shopify.\n [template_ids.remove(str(result.id)) for result in results if str(result.id) in template_ids]\n else:\n raise Warning(_('Please enter the product template ids 50 or less'))\n else:\n if not instance.shopify_last_date_product_import:\n results = shopify.Product().find(status='active', limit=250)\n if len(results) >= 250:\n results = self.shopify_list_all_products(results)\n #results = self.get_product(results)\n else:\n # updated_at_min =datetime.strptime(pytz.utc.localize(instance.shopify_last_date_product_import).astimezone(\n # pytz.timezone(instance.shopify_store_time_zone[12:] or 'UTC')).strftime(\n # '%Y-%m-%d %H:%M:%S'), \"%Y-%m-%d %H:%M:%S\")\n results = shopify.Product().find(status='active',\n updated_at_min=instance.shopify_last_date_product_import,limit=250) # Change by bhavesh jadav 13/12/2019 limit=250\n if len(results) >= 250:\n results=self.shopify_list_all_products(results)\n if results:\n instance.shopify_last_date_product_import = datetime.now()\n without_gift_card_products = []\n for result in results:\n if result.to_dict().get('variants')[0].get('fulfillment_service') != 'gift_card':\n without_gift_card_products.append(result)\n results = without_gift_card_products\n if not results:\n _logger.info(\n 'No Products found to be imported from Shopify.')\n return False\n _logger.info('Total synced products - {}'.format(len(results)))\n count = 0\n one_time_create = True\n product_queue_list = []\n for result in results:\n if one_time_create:\n product_queue_id = self.shopify_create_product_queue(instance)\n product_queue_list.append(product_queue_id.id)\n _logger.info('Shopify Product Queue created. Queue name is {}'.format(\n product_queue_id.name))\n one_time_create = False\n if template_ids or only_alphabets:\n product_queue_id.message_post(body=\"%s products are not imported\" %(','.join(template_ids+only_alphabets)))\n self.shopify_create_product_data_queue_line(result, instance, product_queue_id)\n count = count + 1\n if count == 100:\n count = 0\n one_time_create = True\n return product_queue_list",
"def _check_product(self):\n\n self.importable = False\n abcde = string.ascii_uppercase[:5]\n product_infos = self.retrieve_product_infos()\n\n if product_infos['product_code'] is not None:\n try:\n Products.objects.get(\n code=product_infos['product_code']\n )\n except Products.DoesNotExist:\n if (\n product_infos['product_name'] is not None\n and product_infos['product_code'] not in ProductImportation.codes\n and product_infos['product_code'] is not None\n and product_infos['product_url'] is not None\n and product_infos['image_url'] is not None\n and product_infos['quantity'] is not None\n and product_infos['ingredients'] is not None\n and product_infos['brands'] != []\n and product_infos['stores'] != []\n and product_infos['countries'] is not None\n and product_infos['compare_to'] is not None\n and product_infos['categories_hierarchy'] is not None\n and product_infos['nutriscore'] in abcde\n and all([product_infos[nutriment] >= 0 for nutriment in self.list_nutriments])\n and Categories.objects.filter(name=product_infos['compare_to']).count() > 0\n ):\n self.name = product_infos['product_name']\n self.product_infos = product_infos\n self.code = product_infos['product_code']\n ProductImportation.codes.append(self.code)\n self.importable = True\n\n return self.importable",
"def m_ts_OrderAdded(self, sender, e):\r\n print(\"Order was added with price of {0}.\".format(e.Order.LimitPrice))",
"def _get_prix_tarif(self,cout,pricelist):\n cr = self._cr\n product=cout.name\n prix_tarif=0\n date=time.strftime('%Y-%m-%d') # Date du jour\n if pricelist:\n #Convertion du lot_mini de US vers UA\n min_quantity = self.env['product.uom']._compute_qty(cout.name.uom_id.id, cout.name.lot_mini, cout.name.uom_po_id.id)\n #TODO : Pour contourner un bug d'arrondi (le 31/01/2017)\n min_quantity=min_quantity+0.00000000001\n #TODO en utilisant la fonction repr à la place de str, cela ne tronque pas les décimales\n SQL=\"\"\"\n select ppi.price_surcharge\n from product_pricelist_version ppv inner join product_pricelist_item ppi on ppv.id=ppi.price_version_id\n where ppv.pricelist_id=\"\"\"+str(pricelist.id)+ \"\"\" \n and min_quantity<=\"\"\"+repr(min_quantity)+\"\"\"\n and (ppv.date_start <= '\"\"\"+date+\"\"\"' or ppv.date_start is null)\n and (ppv.date_end >= '\"\"\"+date+\"\"\"' or ppv.date_end is null)\n\n and ppi.product_id=\"\"\"+str(product.id)+ \"\"\" \n and (ppi.date_start <= '\"\"\"+date+\"\"\"' or ppi.date_start is null)\n and (ppi.date_end >= '\"\"\"+date+\"\"\"' or ppi.date_end is null)\n order by ppi.sequence\n limit 1\n \"\"\"\n cr.execute(SQL)\n result = cr.fetchall()\n for row in result:\n coef=1\n if min_quantity:\n coef=cout.name.lot_mini/min_quantity\n prix_tarif=row[0]/coef\n\n\n\n return prix_tarif",
"def getTransferListSummary(self):\n p_ids_and_prices = {}\n players = self.getAllPlayerInfoTransferlist()\n\n # Get IDs of all players\n log_event(self.queue, \"Gathering player prices... \")\n for p in players:\n p_bidstatus = p[1]\n p_id = p[8]\n # removed Filter for unlisted / expired players\n if p_id not in p_ids_and_prices:\n p_sellprice = self.getPlayerSellPrice(p_id)\n # If sell price returns 0, need to fetch from Futbin\n if p_sellprice == 0:\n p_sellprice = self.getFutbinPrice_opentab(p_id)\n self.sleep_approx(5) # Delay iteration to not anger futbin\n # Add player ID and price to dict\n p_ids_and_prices[p_id] = p_sellprice\n\n for p_id in p_ids_and_prices:\n p_price = p_ids_and_prices[p_id]\n p_name = self.getPlayerCardName(p_id)\n log_event(self.queue, str(p_name) + \" - #\" +\n str(p_id) + \" Price \" + str(p_price))\n\n num_p_sold = 0\n num_p_expired = 0\n num_p_unlisted = 0\n num_p_listed = 0\n\n sold_p_value = 0\n expired_p_value = 0\n unlisted_p_value = 0\n listed_p_value = 0\n\n for p in players:\n p_bidstatus = p[1]\n p_id = p[8]\n p_soldprice = p[5] # is 0 if unlisted\n p_sellprice = int(p_ids_and_prices[p_id])\n\n if \"won\" in p_bidstatus:\n num_p_sold += 1\n sold_p_value += p_soldprice\n if \"expired\" in p_bidstatus:\n num_p_expired += 1\n expired_p_value += p_sellprice\n if (p_bidstatus == \"listFUTItem\"):\n num_p_unlisted += 1\n unlisted_p_value += p_sellprice\n if (p_bidstatus == \"listFUTItem has-auction-data\"):\n num_p_listed += 1\n listed_p_value += p_sellprice\n\n log_event(self.queue, \"Players sold: \" + str(num_p_sold))\n log_event(self.queue, \"Players expired: \" + str(num_p_expired))\n log_event(self.queue, \"Players listed: \" + str(num_p_listed))\n log_event(self.queue, \"Players unlisted: \" + str(num_p_unlisted))\n log_event(self.queue, \" - - - \")\n log_event(self.queue, \"Sold players value: \" + str(sold_p_value))\n log_event(self.queue, \"Expired players value: \" +\n str(expired_p_value))\n log_event(self.queue, \"Unlisted players value: \" +\n str(unlisted_p_value))\n log_event(self.queue, \"Listed players value: \" + str(listed_p_value))\n\n # TODO subtract bought price\n self.user_players_won += int(num_p_unlisted)\n self.p_ids_and_prices = p_ids_and_prices\n intel = [p_ids_and_prices, num_p_sold, num_p_expired, num_p_unlisted,\n num_p_listed, sold_p_value, expired_p_value, unlisted_p_value, listed_p_value]\n return intel",
"def show_vendor_product():\n vendor = input(\"Enter the Vendor: \")\n product = input(\"Enter the product: \")\n filter_string = input(\"Enter Optional Search string (i.e. HTTP): \")\n logger.debug(\"Searching: {} from {} -- Filter = {}\".format(product, vendor, filter_string))\n search_url = \"http://cve.circl.lu/api/search/{}/{}\".format(vendor, product)\n req = call_api(search_url)\n if not req:\n logger.debug(\"something no workie with the vendor product call\")\n else:\n print(\"Searching: {} from {} -- Filter = {}\".format(product, vendor, filter_string))\n for item in req:\n if filter_string != '' or not filter_string:\n if filter_string in item['summary']:\n print(\"\\nSummary: \" + item['summary'])\n print(\"CVE: \" + item['id'])\n print(\"CVSS: \" + str(item['cvss']))\n else:\n print(\"\\nSummary: \" + item['summary'])\n print(\"CVE: \" + item['id'])\n print(\"CVSS: \" + str(item['cvss']))\n menu()",
"def show_uploadbox(self):\n\n manager = getMultiAdapter((self.context, self.context.REQUEST),\n ICheckinCheckoutManager)\n\n return manager.is_file_upload_allowed()",
"def validate(cls, prices):\n super(GiftCardPrice, cls).validate(prices)\n\n for price in prices:\n price.check_price()",
"def get_basket_items_discount(self, offer_info, actual_volume, product_prices):\n prod_code = offer_info.base_prod_code\n base_prod_vol = actual_volume.get(prod_code.lower())\n\n discount_basket = []\n\n if base_prod_vol >= offer_info.min_vol:\n offer_on_prod = offer_info.offer_on\n if actual_volume.get(offer_on_prod.lower()):\n print(f\"Base product volume is greater than minimum required volume & product on offer is also available \"\n f\"in cart..\")\n if offer_info.is_limited:\n print(f\"Limited offer..\")\n if prod_code == offer_on_prod:\n # total_allowed_items_on_offer = Limit Volume of base product * (Offer Product Max Volume/Minimum volume of base product)\n total_allowed_items_on_offer = offer_info.limit_vol * (offer_info.offer_prod_volume/offer_info.min_vol)\n max_limit = 1\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n discount_basket.append((prod_code, base_prod_actual_price))\n while max_limit <= total_allowed_items_on_offer:\n discounted_price = (base_prod_actual_price *(offer_info.discount_perc/100))*-1\n discount_basket.append((offer_info.offer_code, discounted_price))\n max_limit += 1\n else:\n total_allowed_items_on_offer = offer_info.limit_vol * (offer_info.offer_prod_volume / offer_info.min_vol)\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n discount_basket.append((prod_code, base_prod_actual_price))\n max_limit = 1\n while max_limit <= total_allowed_items_on_offer:\n offer_onprod_actual_price = product_prices.get(offer_on_prod.lower()).get('price')\n discounted_price = (offer_onprod_actual_price *(offer_info.discount_perc/100))*-1\n for j in range(0, actual_volume.get(offer_on_prod.lower())):\n discount_basket.append((offer_on_prod, offer_onprod_actual_price))\n discount_basket.append((offer_info.offer_code, discounted_price))\n max_limit += 1\n else:\n print(f\"Unlimited offer..\")\n if prod_code == offer_on_prod:\n if base_prod_vol > offer_info.min_vol:\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n discount_basket.append((prod_code, base_prod_actual_price))\n if i%2 != 0:\n discounted_price = (base_prod_actual_price *(offer_info.discount_perc/100))*-1\n discount_basket.append((offer_info.offer_code, discounted_price))\n else:\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n discount_basket.append((prod_code, base_prod_actual_price))\n else:\n for i in range(0, base_prod_vol):\n base_prod_actual_price = product_prices.get(prod_code.lower()).get('price')\n discount_basket.append((prod_code, base_prod_actual_price))\n\n offer_onprod_actual_price = product_prices.get(offer_on_prod.lower()).get('price')\n discounted_price = (offer_onprod_actual_price * (offer_info.discount_perc / 100))*-1\n\n for j in range(0, actual_volume.get(offer_on_prod.lower())):\n discount_basket.append((offer_on_prod, offer_onprod_actual_price))\n discount_basket.append((offer_info.offer_code, discounted_price))\n\n\n return discount_basket",
"def _onchange_price(self):\n self.price_subtotal = self.price",
"def handle_list(self, detail, *args, **kwargs):\n for product_type in models.ProductType.objects.all():\n print(product_type.name)\n if detail:\n for coverage_type in product_type.allowed_coverage_types.all():\n print(\"\\t%s\" % coverage_type.name)",
"def test_get_additional_seller_inputs(self):\n pass"
] | [
"0.6263099",
"0.6141632",
"0.6066002",
"0.5928832",
"0.5877911",
"0.5603189",
"0.55999833",
"0.5502769",
"0.5484664",
"0.5477019",
"0.5378392",
"0.5263404",
"0.5179111",
"0.5104403",
"0.5092336",
"0.5042064",
"0.5026895",
"0.5016881",
"0.5006495",
"0.49896625",
"0.4987563",
"0.49769837",
"0.49402457",
"0.49228323",
"0.49150458",
"0.48818728",
"0.48779583",
"0.48707014",
"0.4854793",
"0.4853625"
] | 0.67144847 | 0 |
Implementing is inline action popup loaded properly functionality | def is_inline_action_popup_loaded_properly(self):
return self.is_element_present(self.vendor_profile_inline_item_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def verify_popup(self, type):",
"def onShowed(self):\n self.parent.actionTagTwo=\"\"\n pass",
"def populating_popup(self, *args):\n return _ida_hexrays.Hexrays_Hooks_populating_popup(self, *args)",
"def show_popup(self, view, docstring, location=None):",
"def on_actions_list(self, e):\n self.PopupMenu(self.popup_menu())",
"def click_on_analyze_and_complete_inline_action(self, inline_item):\n self.select_inline_action_item(inline_item)\n self.wait_for_ajax_spinner_load(300)\n try:\n self.wait().until(EC.presence_of_element_located(self.analyze_and_complete_confirmation_popup_locator), 'analyze and complete confirmation popup locator not found before specified time out')\n self.wait_for_ajax_spinner_load()\n ok_button_element = self.wait().until(EC.element_to_be_clickable(self.ok_button_locator), 'ok button locator not found before specified time')\n ok_button_element.click()\n except:\n raise\n self.wait_for_ajax_spinner_load()",
"def popup(self):\r\n return self.exec_() == QDialog.Accepted",
"def onShow(self):\n pass",
"def custom_field_popup_action(self):\n if self.popup.get_option():\n custom_options = self.controller.get_minefield_options()[\"custom\"]\n self.controller.set_difficulty(custom_options)\n return Action(\"goto generating view\", [])\n return None",
"def popup():\n menu = _get_menu()\n cursor = QtGui.QCursor()\n point = cursor.pos()\n menu.exec_(point)",
"def action(self):\n pass",
"def action(self):\n pass",
"def is_shown(self, request):\n return True",
"def _action(self):\n pass",
"def is_inline_action_item_present(self, item_name):\n inline_action_item_locator = (By.XPATH, \"//ul[@id='Actions_listbox']/li[text()='%s']\" % item_name)\n return self.is_element_present(inline_action_item_locator)",
"def click(self):\r\n pass",
"def onSearch(self):\n self.mainGrid.showSearchPopup()\n self.popupActive = True",
"def custom_actions(self, form_entry, request=None):",
"def on_click(self) -> None:\n pass",
"def open_keyboard(self, instance):\n self.popup.open()",
"def onOpen(self):",
"def opm_popup(opmvers, text, nrow):\n\n layout1 = [[sg.Multiline(text, size=(80, nrow), background_color='white', text_color='darkgreen')],\n [sg.CloseButton('OK')]]\n window1 = sg.Window('OPMRUN - Flow Job Scheduler ' + opmvers, layout=layout1)\n window1.Read()\n return ()",
"def is_ime_popup(self,ignore_error_handle =False):\n message = {};\n step = 'is ime popup'\n try:\n isPopup = self.driver.is_ime_active();\n message = self.feedback.feedback_action_ok(step);\n message['is_popup'] = isPopup;\n except BaseException,e:\n message = self.feedback.feedback_action_fail(step,str(e),ignore_error_handle);\n finally:\n return message;",
"def click_inbound_statement_grid_inline_action_button(self, row_identifier_text):\n self.click_inline_action_button(self.inbound_statement_grid_div_id, row_identifier_text, self.inbound_statement_grid_inline_action_column_number)",
"def _show_popup(self) -> None:\n\n top = tk.Toplevel()\n email_list_len = len(self.get_recipients())\n msg = tk.messagebox.askquestion('Confirm send emails', 'Are you sure you want to email {} client{}?'\n .format(email_list_len, \"s\" if email_list_len > 1 else \"\"),\n icon='warning')\n if msg == \"yes\":\n self._disable_buttons()\n email_process(self.get_recipients())\n top.destroy()\n else:\n top.destroy()",
"def _confirm_action(self, action):\n\t\treturn True",
"def __on_click(self):\n if self.enable:\n self.__function_to_activate()",
"def _ClickPrimaryActionButton(self):\n self._ExecuteOobeApi('Oobe.clickGaiaPrimaryButtonForTesting')",
"def show_popup(self, data):\r\n store = get_store()\r\n self.ids.inlayout.rows = 1\r\n self.ids.inlayout.add_widget(CEToolBoxLabel(text=add_color(\"Viscosity :\", \"FFFFFF\")))\r\n value = round(store.get('Viscosity')[\"value\"], 2)\r\n viscotext = str(value)+\" \"+store.get('Viscosity')[\"unit\"]\r\n self.ids.inlayout.add_widget(CEToolBoxLabel(text=add_color(viscotext, \"FFFFFF\")))\r\n self.open()",
"def _do_action(self):\n pass"
] | [
"0.6597915",
"0.648272",
"0.6258592",
"0.6248072",
"0.5986996",
"0.59293145",
"0.5862947",
"0.5807013",
"0.56972724",
"0.55615556",
"0.5549271",
"0.5549271",
"0.5548313",
"0.55236566",
"0.5493427",
"0.54465926",
"0.5439738",
"0.5421852",
"0.54136634",
"0.5403886",
"0.53963846",
"0.53908134",
"0.5370457",
"0.53640336",
"0.53550285",
"0.5350323",
"0.53408813",
"0.53398645",
"0.5331898",
"0.5317723"
] | 0.71407616 | 0 |
Implementing click on tab of vendor profile page functionality | def click_on_tab_of_vendor_profile_page(self, tab_name):
vendor_profile_page_tab_locator = (By.XPATH, self.vendor_profile_page_tab_locator_string + "[text()='%s']" % tab_name)
self.select_static_tab(vendor_profile_page_tab_locator, 'tab locator not found') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def clickViewProfile(self):\n self.waitForElement(locator=self._viewProfileBtn, locatorType=\"xpath\")\n element = self.getElementList(locator=self._viewProfileBtn, locatorType=\"xpath\")\n self.elementClick(element=element[0])",
"def tabSelected(self):",
"def tabSelected(self):",
"def clickDetails(self):\n self.waitForElement(locator=self._userProfile_detailsBtn, locatorType=\"xpath\")\n element = self.getElementList(locator=self._userProfile_detailsBtn, locatorType=\"xpath\")\n self.elementClick(element=element[0])\n pp.time.sleep(2)",
"def select_tab_of_view_price_list_detail_page(self, tab_name):\n self.wait_for_ajax_spinner_load(300)\n view_price_list_detail_page_tab_locator = (By.XPATH, self.vendor_profile_page_tab_locator_string + \"[text()='%s']\" % tab_name)\n self.select_static_tab(view_price_list_detail_page_tab_locator, 'tab locator not found')",
"def tabSelected(self):\r\n self.transactionMenuWidget.tabSelected()",
"def select_buy_dashboard_tab(self):\n self.select_static_tab(self.buy_dashboard_tab_locator, True)",
"def clickTeam(self):\n # self.webScroll(direction=\"down\")\n self.scrollIntoView(locator=self._userProfile_team, locatorType=\"xpath\")\n self.waitForElement(locator=self._userProfile_team, locatorType=\"xpath\")\n self.elementClick(locator=self._userProfile_team, locatorType=\"xpath\")\n pp.time.sleep(2)",
"def is_specific_tab_on_vendor_profile_page_present(self, tab_name):\n tab_locator = (By.XPATH, \"//div[contains(@id, 'SourceProfileTabStrip')]/descendant::a[text()='%s']\" % tab_name)\n return self.is_element_present(tab_locator)",
"def profileToolClicked(self):\n self.openDock()\n # Set the profile map tool\n self.profile_tool.setActive()",
"def select_vendors_tab(self):\n self.select_static_tab(self.vendors_tab_locator, 'vendors tab not found before specified time')",
"def link_click(_):\r\n\r\n tag_name = about_content.tag_names(tkinter.CURRENT)[0]\r\n about_content.tag_config(tag_name, foreground=\"#551A8B\")\r\n if tag_name == 'hyper':\r\n webbrowser.open(\"https://www.facebook.com/nihal.agarwal.14\")\r\n else:\r\n webbrowser.open(\"https://github.com/NihalAgarwal/Windows-Wi-Fi-Manager\")",
"def click(cls, user, link):\r\n pass",
"def tab_url(self) -> str:",
"def on_OpenExplorerAccount_clicked(self):\n # TODO: not implemented yet\n #raise NotImplementedError\n url = f\"http://kfc.matrix.io/{self.a0_Address}\"\n\n self.browser.openurl(url)\n self.OnlyDisplay(f\"start {url}\")",
"def _ClickPrimaryActionButton(self):\n self._ExecuteOobeApi('Oobe.clickGaiaPrimaryButtonForTesting')",
"def click_buy_page_inline_action_button(self, vendor):\n self.click_inline_action_button(self.vendors_div_id, vendor, self.grid_column_number)",
"def click(cls, user, link):\n pass",
"def gotoUsers(self):\n self.elementClick(locator=self._navBar_users, locatorType=\"xpath\")",
"def click_on_phones_tab(self: object) -> object:\n phones = self.driver.find_element(*BasePageLocators.PHONES)\n phones.click()\n return self",
"def go_to_tab(self, tab_name):\r\n\r\n if tab_name not in ['Courseware', 'Course Info', 'Discussion', 'Wiki', 'Progress']:\r\n self.warning(\"'{0}' is not a valid tab name\".format(tab_name))\r\n\r\n # The only identifier for individual tabs is the link href\r\n # so we find the tab with `tab_name` in its text.\r\n tab_css = self._tab_css(tab_name)\r\n\r\n if tab_css is not None:\r\n self.q(css=tab_css).first.click()\r\n else:\r\n self.warning(\"No tabs found for '{0}'\".format(tab_name))\r\n\r\n self._is_on_tab_promise(tab_name).fulfill()",
"def back_click(self):\n self.controller.show_account_display_screen(self.us)",
"def click(self):\r\n pass",
"def handle_tab(self, index):\n self.current_tab = index\n self.views[index].activate()",
"def OnTabDClick(self, event):\r\n\r\n # notify owner that the tabbar background has been double-clicked\r\n e = AuiNotebookEvent(wxEVT_COMMAND_AUINOTEBOOK_TAB_DCLICK, self.GetId())\r\n e.SetEventObject(self)\r\n self.GetEventHandler().ProcessEvent(e)\r\n\r\n tabs = event.GetEventObject()\r\n if not tabs.GetEnabled(event.GetSelection()):\r\n return\r\n\r\n if not self.IsRenamable(event.GetSelection()):\r\n return\r\n\r\n self.EditTab(event.GetSelection())",
"def select_info_tab(self):\n self.click_submenu_entry(\"Info\")\n time.sleep(2)\n self.wait_for_ajax()",
"def switch_tab(self, tab):\n\n self.driver.switch_to.window(self.driver.window_handles[tab])",
"def open_user_page(self):\n self.switch_main_menu(\"Admin\")\n self.wait_unit_el_present(self.user_management_menu)\n self.click_menu(\"User Management\")\n self.click_menu(\"Users\")",
"def click_login_button(self):",
"def select_transactions_tab(self):\n self.click_element(self.transactions_tab_locator)"
] | [
"0.6522478",
"0.64506716",
"0.64506716",
"0.6187155",
"0.6104637",
"0.608604",
"0.6002209",
"0.59290266",
"0.5869084",
"0.5767032",
"0.5752777",
"0.5728779",
"0.5690089",
"0.56537795",
"0.5647347",
"0.5619931",
"0.5612905",
"0.5586425",
"0.5546058",
"0.5527586",
"0.55254775",
"0.5509131",
"0.54918194",
"0.5480424",
"0.54770863",
"0.5472137",
"0.54530025",
"0.54287124",
"0.53925735",
"0.5380203"
] | 0.7509712 | 0 |
Implementing is vendor profile page loaded properly functionality | def is_vendor_profile_page_loaded_properly(self):
return self.is_element_present(self.save_vendor_profile_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_vendor_profile_present(self):\n return self.is_element_present(self.vendor_profile_locator)",
"def is_specific_tab_on_vendor_profile_page_present(self, tab_name):\n tab_locator = (By.XPATH, \"//div[contains(@id, 'SourceProfileTabStrip')]/descendant::a[text()='%s']\" % tab_name)\n return self.is_element_present(tab_locator)",
"def is_browser_on_page(self):",
"def is_target_buy_details_section_present_in_vendor_profile_page(self):\n return self.is_element_present(self.target_buy_details_section_locator)",
"def is_target_buy_list_overrides_screen_loaded(self):\n return self.is_element_visible(self.target_buy_list_overrides_page_header_locator)",
"def prePresent(self, request):",
"def _should_profile(self) -> bool:\n if \"profile\" in self._allowed_plugins:\n if not self._one_shot:\n raise ValueError(\n \"Profile plugin currently only supported for one shot.\"\n )\n logger.info(\"Profile plugin is enalbed.\")\n return True\n return False",
"def is_inline_action_popup_loaded_properly(self):\n return self.is_element_present(self.vendor_profile_inline_item_locator)",
"def _verify_page(self):",
"def click_on_tab_of_vendor_profile_page(self, tab_name):\n vendor_profile_page_tab_locator = (By.XPATH, self.vendor_profile_page_tab_locator_string + \"[text()='%s']\" % tab_name)\n self.select_static_tab(vendor_profile_page_tab_locator, 'tab locator not found')",
"def should_profile():\n if util.dev_server:\n return _config.should_profile_development()\n else:\n return _config.should_profile_production()",
"def is_vendors_tab_present(self):\n return self.is_element_present(self.vendors_tab_locator)",
"def wait_for_page_load(self):\n pass",
"def test_functionality(self):\n self.browserObject = globalVars.browserObject\n \n #Check for current logged in user\n self.verifyCurrentUser(userRole='Administrator', loginAsUser=True) \n \n self.get_ServicesPage(\"\",\"Firmware_update_Template\")\n \n self.logout()",
"def on_load(self):\n pass",
"def on_load(self):\n pass",
"def not_a_product(self):\n\n try:\n if 'var PAGE_NAME = \"ProductPage\";' not in (\" \" . join(self.tree_html.xpath(\"//script/text()\"))):\n raise Exception\n except Exception:\n return True\n\n self.fv.setupCH(self.tree_html)\n\n return False",
"def is_request_in_microsite():\r\n return get_configuration()",
"def setup_page(self):\r\n raise NotImplementedError",
"def show(self):\n #show the viewlet if we are not using Firefox\n user_agent = self.request.get('HTTP_USER_AGENT', '')\n display = not ('Firefox' in user_agent or 'Chrome' in user_agent)\n return display",
"def on_load(self):",
"def test_page_existence(self):\r\n # Log in\r\n self.auth_page.visit()\r\n\r\n # Verify that each page is available\r\n for page in self.pages:\r\n page.visit()",
"def set_vendor_profile_status(self, item_name):\n self.single_selection_from_kendo_dropdown(self.vendor_profile_status_kendo_dropdown_locator, item_name)\n self.wait_for_ajax_spinner_load()",
"def _check_ready(self, _widget, __event=None, __page=0):\r\n\r\n if self.cmbHardware.get_active() > 0:\r\n self.assistant.set_page_complete(self.fxdPageGeneral, True)\r\n else:\r\n self.assistant.set_page_complete(self.fxdPageGeneral, False)\r\n\r\n return False",
"def _should_profile_production_default():\n return False",
"def request_plugins(self):",
"def _auth_plugin_available(ext):\n return ext.obj.available",
"def getprofile(): # real signature unknown; restored from __doc__\n pass",
"def is_on_home_page(self):\n current_url_path = urlparse(self.driver.current_url).path\n if current_url_path == \"/opencart.com/\":\n return True\n return False",
"def LaunchChooser(self):\n print 'calling wired profile chooser'\n self.SetNeedWiredProfileChooser(True)"
] | [
"0.66580355",
"0.6311346",
"0.5826427",
"0.56982046",
"0.55703735",
"0.5551309",
"0.55296487",
"0.5511999",
"0.5439052",
"0.54025126",
"0.54020023",
"0.5387055",
"0.5378316",
"0.5353664",
"0.53376067",
"0.53376067",
"0.52425605",
"0.5241384",
"0.52211964",
"0.5217143",
"0.51908296",
"0.51893115",
"0.5188328",
"0.5185359",
"0.51688516",
"0.51647717",
"0.51635873",
"0.51566595",
"0.5153374",
"0.51530373"
] | 0.74304616 | 0 |
Implementing is rates page loaded properly functionality | def is_rates_page_loaded_properly(self):
return self.is_element_present(self.rate_catalog_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_rates_tab_loaded_properly(self):\n return self.is_element_present(self.search_rates_locator)",
"def acquire_rates_data(self):\n prinf('%s params: %s', self.base_url, self.my_params)\n g_start()\n try:\n self.response_data = requests.get(self.base_url, params=self.my_params, timeout=self.timeout)\n except OSError:\n prinw('%s host not available', self.name)\n return False\n g_end('request responded')\n\n if not self.response_data:\n return False\n else:\n status_code = self.response_data.status_code\n prinf(status_code )\n if status_code > 400 :\n prinw('%s currency converter site response not found. %s', self.nam, status_code)\n return False\n elif status_code == 200:\n prinf('%s response ok', self.name)\n\n self.update_rates_valid_data()\n self.in_ccode = self.response_data.json()[self.strs[jpn.key_in_ccode]]\n\n self.rates = self.response_data.json()[self.strs[jpn.key_output]]\n\n # as requested ccode is not in the request respond\n # we add it => e.g 1 EUR = 1 EUR => needed for further pandas extrapolation\n self.rates.update({self.in_ccode: float(1)})\n return True",
"def rates(self):\n raise NotImplementedError(\"Must be implemented by subclass.\")",
"def is_reference_rates_tab_loaded_properly(self):\n return self.is_element_present(self.search_reference_rates_locator)",
"def handle_rates_response(self, rates):\n\n if rates.rates_id not in self.modules:\n return\n\n counter = self.modules[rates.rates_id]\n\n # update cache\n lvap = RUNTIME.lvaps[counter.lvap]\n lvap.rates = {x[0]: x[1] for x in rates.rates}\n\n # update this object\n counter.rates = {x[0]: x[1] for x in rates.rates}\n\n # call callback\n handle_callback(counter, counter)",
"def select_rates_tab(self):\n self.select_static_tab(self.rates_tab_locator, True)",
"def getActiveCurrencies():",
"def country(request):\n class Results(object):\n\n def __init__(self, cc):\n self.cc = cc\n self.registered = 0\n self.dns = 0\n self.dnf = 0\n \n def add_rider(self, rider):\n self.registered += 1\n\n if rider.dns:\n self.dns += 1\n\n if rider.dnf:\n self.dnf += 1\n\n def finish_rate(self):\n \n rate = 100*(self.registered-self.dns-self.dnf)/(self.registered-self.dns)\n return rate\n\n results = {}\n for rider in models.Rider.objects.all():\n cc = rider.country.code\n results[cc] = results.get(cc, Results(cc))\n results[cc].add_rider(rider)\n\n results = results.values()\n sort = request.GET.get('sort', 'country')\n\n if sort == \"country\":\n results.sort(key=lambda x: x.cc)\n elif sort == \"registered\":\n results.sort(key=lambda x: x.registered, reverse=True)\n elif sort == \"rate\":\n results.sort(key=lambda x: x.registered, reverse=True)\n results.sort(key=lambda x: x.finish_rate(), reverse=True)\n\n total_registered = sum([r.registered for r in results])\n total_dns = sum([r.dns for r in results])\n total_dnf = sum([r.dnf for r in results])\n overall_finish_rate = 100 * (total_registered-total_dns-total_dnf)/(total_registered-total_dns)\n\n template = env.get_template(\"country.html\")\n rendered = template.render(dict(results=results,\n country_names=countries.OFFICIAL_COUNTRIES,\n registered=total_registered,\n total_dns=total_dns,\n total_dnf=total_dnf,\n overall_finish_rate=overall_finish_rate,\n ))\n\n return HttpResponse(rendered)",
"def get_current_rate(self):\n pass",
"def __init__(self):\n self._init_site_specifications_()\n\n self.my_params = None # parameters for site requests\n self.rates = None # exchange rates from the site\n self.timeout = 1 # url response timeout in seconds\n\n # retrieved rates validity\n self.valid_from_utc = None\n self.valid_to_utc = None\n\n self.in_ccode = None\n self.response_success = False",
"def getActiveCurrency():",
"def curr_list(request):\n if request.method == 'GET':\n all_rates = Currencies.objects.all()\n serializer = CurrenciesSerializer(all_rates, many=True)\n return Response(serializer.data)",
"def exchange_rate(self):\n res = r.get(self.url + self.current_rate)\n return self.execute(res)",
"def is_vendor_rates_present(self):\n return self.is_element_present(self.vendor_rates_locator)",
"def refreshSwapRates(self):\r\n self.firstPass()",
"def scrape(self):\n pass",
"def test_get_rate_article(self):\n self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + self.token)\n self.client.post(\n self.rate_url,\n self.rate_details,\n format='json')\n self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + self.token_2)\n self.rate_details[\"user\"]['rate'] = 4\n self.client.post(\n self.rate_url,\n self.rate_details,\n format='json')\n response = self.client.get(\n self.view_rates_url + str(1) + \"/\",\n format='json')\n\n self.assertEqual(response.status_code, status.HTTP_200_OK)",
"def rates(self):\n return self._rates",
"def pricing_main(request, simulation):\n # Get number of tolls.\n policies = get_query('policy', simulation)\n tolls = policies.filter(type='PRICING')\n count = tolls.count()\n # Get links.\n links = get_query('link', simulation)\n has_link = links.count() >= 1\n # Get an import form.\n import_form = ImportForm()\n # Check ownership.\n owner = can_edit(request.user, simulation)\n context = {\n 'simulation': simulation,\n 'count': count,\n 'has_link': has_link,\n 'import_form': import_form,\n 'owner': owner,\n }\n return render(request, 'metro_app/pricing_main.html', context)",
"def test_retire_rate_plan(self):\n pass",
"def test_get_all_rate_plans(self):\n pass",
"def test_get_rate_article_not_found(self):\n self.client.credentials(HTTP_AUTHORIZATION='Bearer ' + self.token)\n response = self.client.get(\n self.view_rates_url + str(2) + \"/\",\n format='json')\n self.assertEqual(\n 0,\n response.data[\"rates\"])\n self.assertEqual(204, status.HTTP_204_NO_CONTENT)",
"def initRateType(self):\n cnt = MSTXRT.query.filter(None).count()\n if cnt == 0:\n obj = MSTXRT(\n CMRTTPCD = 'DAI',\n CMRTTPNM = 'Daily rate',\n CMRTTPDS = 'Daily rate'\n )\n self.setAuditFields(obj, 'ADMIN')\n session.add(obj)\n session.commit()\n session.close()",
"def on_init(self):\n self.write_log(\"策略初始化\")\n self.load_bar(1) # 具体加载多少天的数据, 1表示1天的数据,如果是2表示过去2天的数据",
"def preprocess_rates(self):\n # the rates from fixar.io are almost exactly in the required common format\n # as requested ccode is not in the request respond\n # we add it => e.g 1 EUR = 1 EUR => needed for further pandas extrapolation\n self.rates.update({self.in_ccode: float(1)})",
"def select_reference_rates_tab(self):\n self.select_static_tab(self.reference_rates_tab_locator, True)",
"def __call__(self):\r\n self.init_data = td.import_data(self.__module__)\r\n self.page1() # GET navigation (requests 101-153)\r\n\r\n grinder.sleep(20)\r\n self.page2() # GET case (requests 201-252)\r\n\r\n grinder.sleep(20)\r\n self.page3() # GET view (requests 301-365)\r\n\r\n grinder.sleep(20)\r\n self.page4() # POST view (requests 401-452)\r",
"def compare_vendor_price_list_detail_rates_grid_rate(self):\n self.buy_page_excel_data_dictionary = self.get_excel_data_dictionary()\n is_compared = False\n rates_grid_rate_column_element = self.wait().until(EC.presence_of_element_located(self.rates_grid_rate_column_locator))\n if self.buy_page_excel_data_dictionary[\"Rate1\"] == (rates_grid_rate_column_element.text).strip():\n is_compared = True\n return is_compared",
"def on_init(self):\n self.write_log(\"策略初始化\")\n self.exchange_load_bar(self.exchange)",
"def siterequestsrate(self) :\n\t\ttry :\n\t\t\treturn self._siterequestsrate\n\t\texcept Exception as e:\n\t\t\traise e"
] | [
"0.6668916",
"0.6402403",
"0.594141",
"0.5868841",
"0.5804853",
"0.57478184",
"0.5625256",
"0.5612651",
"0.54204524",
"0.539927",
"0.53936803",
"0.53860724",
"0.5364493",
"0.53571963",
"0.5312584",
"0.5246278",
"0.5223548",
"0.52033126",
"0.5198016",
"0.51972485",
"0.51922804",
"0.51881766",
"0.51508677",
"0.5142743",
"0.5106671",
"0.5087717",
"0.50737625",
"0.50550747",
"0.5049248",
"0.50315803"
] | 0.7343132 | 0 |
Implementing is dial digits page loaded properly functionality | def is_dial_digits_page_loaded_properly(self):
return self.is_element_present(self.dialed_digits_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_dial_digits_tab_loaded_properly(self):\n return self.is_element_present(self.search_dial_digits_locator)",
"def phone_start(self) -> None:",
"def select_dial_digits_tab(self):\n self.click_element(self.dial_digits_tab_locator, True, True)",
"def is_incall_dialing(self) -> bool:",
"def is_dialing(self) -> bool:",
"def callview(request):\n return render(request, \"calls/dial_screen.html\", {})",
"def dial_numbers():\n for number in DIAL_NUMBERS:\n print(\"Dialing \" + number)\n # set the method to \"GET\" from default POST because Amazon S3 only\n # serves GET requests on files. Typically POST would be used for apps\n client.calls.create(to=number, from_=TWILIO_PHONE_NUMBER,\n url=TWIML_INSTRUCTIONS_URL, method=\"GET\")",
"def __init__(self, gv_login):\n self.opener = gv_login.opener\n self.phone_numbers_url = 'https://www.google.com/voice/settings/tab/phones'\n phone_numbers_page_content = self.opener.open(self.phone_numbers_url).read()\n phone_data_match = re.search(r\"<json><!\\[CDATA\\[(.*?)\\]\\]></json>\", phone_numbers_page_content)\n phone_data = json.loads(phone_data_match.group(1))\n \n\t\t# Build list of all numbers and their aliases\n self.phone_number_items = [(phone_data['phones'][phone_id]['type'],\n phone_data['phones'][phone_id]['name'],\n phone_data['phones'][phone_id]['phoneNumber']) \n for phone_id in phone_data['phones']]",
"def detect_dialtone(self) -> bool:",
"def select_vendor_price_list_detail_dial_digits_tab(self):\n self.click_element(self.vendor_price_list_details_dial_digits_tab_locator, True)",
"def init_home_page(self):\n rps = self.session.get(home_url, headers = BROWSER_HEADERS)\n # with open('first_get.html', 'w') as f: f.write(rps.text)\n if CAPTCHA_ELEMENT_ID in rps.text:\n # print(\"CAPTCHA ELEMENT DETECTED!\")\n return self.bypass_captcha(rps.text)\n else:\n print(\"NO CAPTCHA\")\n return True",
"def add_contact_to_phone(self, i):\n\n click_textview_by_id('account_type')\n click_textview_by_text('PHONE')\n\n\n # fist time , input mothod is not show . show\n entertext_edittext_by_index(index = 0, value = 't')\n clear_edittext_by_index(0)\n #click_textview_by_text('Name')\n\n self.ime.IME_input_english(1, SC.PRIVATE_JACOB_NAME)\n self.ime.IME_input_english(1, SC.PRIVATE_JACOB_NAME)\n self.ime.IME_input_number(1, SC.PRIVATE_JACOB_NUMBER, 'n')\n self.ime.IME_input(1, SC.PRIVATE_JACOB_EMAIL)\n self.ime.IME_input_english(1, SC.PRIVATE_JACOB_ADDRESS)\n\n self.add_photo()\n\n #sometime overlap ok button when after tims run,so next skip the pop diag\n\n '''\n #add another field\n scroll_to_bottom()\n click_button_by_id('button_add_field')\n if search_text('Group'):\n click_textview_by_text('Group')\n click_button_by_index(0)\n click_in_list_by_index(0)\n else:\n goback()\n\n scroll_to_bottom()\n click_button_by_id('button_add_field')\n if search_text('Website'):\n click_textview_by_text('Website')\n entertext_edittext_on_focused('www.qualcomm.com')\n else:\n goback()\n\n scroll_to_bottom()\n click_button_by_id('button_add_field')\n if search_text('Notes'):\n click_textview_by_text('Notes')\n entertext_edittext_on_focused('Notes')\n else:\n goback()\n\n scroll_to_bottom()\n click_button_by_id('button_add_field')\n if search_text('Nickname'):\n click_textview_by_text('Nickname')\n entertext_edittext_on_focused('Nickname')\n else:\n goback()\n\n scroll_to_bottom()\n click_button_by_id('button_add_field')\n if search_text('Internet call'):\n click_textview_by_text('Internet call')\n entertext_edittext_on_focused('Internet call')\n else:\n goback()\n\n scroll_to_bottom()\n click_button_by_id('button_add_field')\n if search_text('IM'):\n click_textview_by_text('IM')\n entertext_edittext_on_focused('Instant message num')\n else:\n goback()\n return\n '''",
"def call_from_contact(self):\n\n log_test_case(self.name, 'call_from_contact')\n #lick_textview_by_text(SC.PRIVATE_CONTACT_NUMBER)\n click_textview_by_id('primary_action_view')\n sleep(1)\n goback()\n sleep(3)\n return",
"def is_incall_playing_dialtone(self) -> bool:",
"def select_dialed_digits_tab(self):\n self.select_static_tab(self.dialed_digits_tab_locator, True)",
"def switch3():\n print(f\"Your credit card number is: {id_class.credit_card}\")\n main()",
"def wm_dial(self):\n return self.get_par(\"dial_readback\")",
"def on_pageNumber_activate(self, widget, data=None):\n self.page = int(self.pageNumber.get_text())\n self.part = 1\n self.refresh()",
"def setup_page(self):\r\n raise NotImplementedError",
"def _verify_page(self):",
"def start(self, is_calibrating=False):\r\n self.q(css='input.calibration-interstitial-page-button'\r\n if is_calibrating else 'input.interstitial-page-button'\r\n ).first.click()",
"def _check_ready(self, _widget, __event=None, __page=0):\r\n\r\n if self.cmbHardware.get_active() > 0:\r\n self.assistant.set_page_complete(self.fxdPageGeneral, True)\r\n else:\r\n self.assistant.set_page_complete(self.fxdPageGeneral, False)\r\n\r\n return False",
"def index():\n if 'number' in request.form:\n phone_numbers.append(request.form['number'])\n return \"Cool Thanks!!!!\"\n else:\n return render_template('roulette.html', number=HOTLINE_NUMBER)",
"def open_browser(x, y):\n global barcode_digits # using the barcode_digits variable as global\n read = webbrowser.open('https://www.barcodelookup.com/' + barcode_digits) # opening the website that results from adding the digits to the barcode look up webpage",
"def voice_four(request):\n call_sid = None\n choice = None\n call_from = None\n if request.method == 'POST':\n call_sid = request.POST.get('CallSid', None)\n choice = request.POST.get('Digits', None)\n call_from = request.POST.get('From', None)\n if request.method == 'GET':\n call_sid = request.GET.get('CallSid', None)\n choice = request.GET.get('Digits', None)\n call_from = request.GET.get('From', None)\n twiml = VoiceResponse()\n if choice:\n call_detail = CallDetail.objects.get(call_sid=call_sid)\n call_detail.went_conference = True\n call_detail.save()\n if int(choice) == 1:\n client.calls.create(to=num_sendCallTo, from_=num_sendCallTo, url=BASE_URL + '/Conference/' + call_sid,\n status_callback=BASE_URL+'/ConferenceStatus/' + call_sid,\n status_callback_method='POST', status_callback_event=[\"completed\", \"no-answer\", \"busy\",\n \"failed\"])\n dial = Dial()\n dial.conference(call_sid, wait_url='http://roelofvandijk.com/mp33/IVR/CallingInformation.mp3',\n status_callback=BASE_URL+'/AddDrop?CallSid=' + call_sid + '&From='+call_from,\n status_callback_method='POST', status_callback_event=['start', 'join', 'end'],\n end_conference_on_exit=True, max_participants=2, start_conference_on_enter=True)\n twiml.append(dial)\n return HttpResponse(str(twiml))\n twiml.hangup()\n return HttpResponse(str(twiml))\n return HttpResponse(str(twiml))",
"def handle_key():\n \n digit_pressed = request.args.get('Digits', None)\n\n print \"handle-key. key: \" + str(digit_pressed)\n\n if digit_pressed == \"2\":\n resp = twilio.twiml.Response()\n # Dial (310) 555-1212 - connect that number to the incoming caller.\n resp.dial(\"12345678\")\n # If the dial fails:\n resp.say(\"The call failed, or the remote party hung up. Goodbye.\")\n \n return str(resp)\n \n elif digit_pressed == \"1\":\n resp = twilio.twiml.Response()\n resp.say(\"Record your shout after the tone. You have 3 seconds.\")\n resp.record(maxLength=\"3\", action=\"/handle-recording\")\n return str(resp)\n \n # If the caller pressed anything but 1, redirect them to the homepage.\n else:\n return redirect(\"/service\")",
"def place_call(self, number):\n call_params = urllib.urlencode({\n 'outgoingNumber' : number,\n 'forwardingNumber' : self.forwarding_number,\n 'subscriberNumber' : 'undefined',\n 'remember' : '0',\n 'phoneType' : self.phone_type,\n '_rnr_se': self.key\n })\n\n # Send the text, display status message \n self.response = self.opener.open(self.call_url, call_params).read()",
"def phonecall():\n phone_number = choice(phone_numbers)\n r = twiml.Response()\n r.dial(phone_number)\n return str(r)",
"def set_vendor_price_list_detail_dial_digits_grid_settings(self, grid_settings):\n self.wait_for_ajax_spinner_load(300)\n self.set_grid_settings(self.vendor_price_list_detail_dial_digits_grid_div_id, grid_settings)",
"def addNativeDigits(self):\r\n nativeDigits = None\r\n result = self.currentStateFull.findall('//*[@native-digits=\"false\" and string-length(@text)!=0]')\r\n\r\n if len(result):\r\n try:\r\n nativeDigits = self.getNativeDigitsList().decode(\"utf-8\")\r\n except Exception, e:\r\n debug.err(\"Error while decoding native digit list: %s\"%str(e))\r\n else:\r\n arabicDigits = \"\".join([str(d).decode(\"utf-8\") for d in range(10)])\r\n\r\n native_regexp = u\"(%s)\" % u\"|\".join(nativeDigits)\r\n arabic_regexp = u\"(%s)\" % u\"|\".join(arabicDigits)\r\n\r\n def _sub_arabic_to_native(match_object):\r\n return nativeDigits[arabicDigits.find(match_object.group(0))]\r\n\r\n def _sub_native_to_arabic(match_object):\r\n return arabicDigits[nativeDigits.find(match_object.group(0))]\r\n\r\n for node in result:\r\n text = node.getAttribute(\"text\")#.decode(\"utf-8\")\r\n if u'\\u206f' not in text:\r\n #nativeText = text.translate(maketrans(arabicDigits,nativeDigits))\r\n nativeText = re.sub(arabic_regexp, _sub_arabic_to_native, text)\r\n if nativeText != text:\r\n node.setAttribute(\"original-text\",text)\r\n node.setAttribute(\"text\",nativeText)\r\n else:\r\n newText = []\r\n translate = False\r\n for c in text:\r\n if c == u'\\u206f':\r\n translate = False\r\n continue\r\n elif c == u'\\u206e':\r\n translate = True\r\n continue\r\n if translate:\r\n newText.append(nativeText = re.sub(arabic_regexp, _sub_arabic_to_native, c))\r\n else:\r\n newText.append(c)\r\n\r\n result = self.currentStateFull.find('//evo-dynamic-keyboard')\r\n if result and not self.getPreventNativeDigitsInField():\r\n if not nativeDigits:\r\n try:\r\n nativeDigits = self.getNativeDigitsList().decode(\"utf-8\")\r\n except Exception, e:\r\n debug.err(\"Error while decoding native digit list: %s\"%str(e))\r\n else:\r\n arabicDigits = [str(d) for d in range(10)]\r\n\r\n for node in result.findall('//key'):\r\n text = node.getAttribute(\"text\")\r\n nativeText = text.translate(maketrans(arabicDigits,nativeDigits))\r\n if nativeText != text:\r\n node.setAttribute(\"original-text\",text)\r\n node.setAttribute(\"text\",nativeText)"
] | [
"0.6351673",
"0.6051149",
"0.58823335",
"0.58278865",
"0.5729101",
"0.569002",
"0.53910416",
"0.5289853",
"0.51889604",
"0.5184374",
"0.51738495",
"0.51713043",
"0.516416",
"0.5133655",
"0.50922155",
"0.50697666",
"0.5032302",
"0.49997136",
"0.49176535",
"0.4916321",
"0.48991993",
"0.48697874",
"0.48596555",
"0.4848056",
"0.48449945",
"0.48383397",
"0.48201048",
"0.47894192",
"0.47686535",
"0.47648722"
] | 0.7028279 | 0 |
Implementing is destinations page loaded properly functionality | def is_destinations_page_loaded_properly(self):
return self.is_element_present(self.search_destination_locator) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_link_registered(self):\n response = self.client.get(reverse('misago:admin:users:accounts:index'))\n\n response = self.client.get(response['location'])\n self.assertContains(response, reverse('misago:admin:users:bans:index'))",
"def is_served_area(self, location):\n\t\tis_served = False\n\t\tcode = 500\n\n\t\turl = self.base_url\n\n\t\thtml, code = self.crawler.get(url)\n\n\t\tif code == 200:\n\t\t\t# Getting form data \n\t\t\tself.parser.set_html(html)\n\t\t\tform_data = self.parser.get_postal_code_form_data()\n\t\t\tdata = form_data['data']\n\t\t\turl = self.properurl(form_data['url'])\n\n\t\t\tdata['enteredZipCode'] = location['postal_code']\n\n\t\t\thtml, code = self.crawler.post(url, data)\n\t\t\tself.parser.set_html(html)\n\t\t\tdata_delivery = self.parser.get_form_delivery_zone()\n\n\t\t\tif data_delivery['type'] == 'address':\n\t\t\t\thtml, code = self.crawler.search_adress('%s, %s %s'%(location['address'].encode('utf8', 'replace'),location['postal_code'].encode('utf8', 'replace'), location['city_name'].encode('utf8', 'replace')))\n\t\t\t\tsuggetions = self.parser.extract_suggested_addresses(html)\n\t\t\t\t[s.update({'url': self.properurl(s['url'])} )for s in suggetions]\n\n\t\t\t\tif len(suggetions) > 0:\n\t\t\t\t\t# There is at least one suggestion, select the first\n\t\t\t\t\taddress = suggetions[0]\n\n\t\t\t\t\t# Now set this address\n\t\t\t\t\thtml, code = self.crawler.set_address(address)\n\t\t\t\t\tself.parser.set_html(html)\n\t\t\t\t\tform_data = self.parser.get_form_delivery_zone()\n\t\t\t\t\tform_data['form']['url'] = self.properurl(form_data['form']['url'])\n\t\t\t\t\thtml, code = self.crawler.set_delivery(form_data)\n\t\t\t\t\tif code == 200:\n\t\t\t\t\t\tis_served = True\n\n\t\t\telif data_delivery['type'] == 'select':\n\t\t\t\tdata_delivery['form']['url'] = self.properurl(data_delivery['form']['url'])\n\t\t\t\tif 'radiogroup' in data_delivery['form']['data'] and 'LAD' in data_delivery['form']['data']['radiogroup']:\n\t\t\t\t\thtml, code = self.crawler.set_delivery(data_delivery)\n\t\t\t\t\tif code == 200:\n\t\t\t\t\t\tis_served = True\n\t\t\t\telse:\n\t\t\t\t\tis_served = False\n\n\t\telse:\n\t\t\tprint 'Error while fetching base url of Monoprix (code = %d)'%(code)\n\n\t\treturn is_served, code",
"def test_public_pages_load(self):\r\n pages = (\r\n reverse('login'),\r\n reverse('signup'),\r\n )\r\n for page in pages:\r\n print(\"Checking '{0}'\".format(page))\r\n self.check_page_get(page, 200)",
"def test_get_dealer_landing_page(self):\n pass",
"def route(self):\n # TODO: wenn keine url, herausfinden, welche ????\n # TODO: wenn url = hostname (fqdn), dann -> google.ch\n if not (self.META.has_key('REMOTE_ADDR') and \n self.GET.has_key('provider')):\n #self.GET.has_key('url')):\n #return HttpResponseRedirect('/index.php')\n # TODO: Auf die Fehlerseite Link zu back.php\n return render_to_response('error.htm', {\n 'error': \"Falsche Parameter auf route.php\",\n })\n src_ip = self.META['REMOTE_ADDR']\n prov = self.GET['provider']\n url = \"http://www.google.ch\"\n if self.GET.has_key('url'):\n url = self.GET['url']\n # Add and save new route\n add_active_route(src_ip = src_ip, prov = prov)\n return HttpResponseRedirect(url)",
"def select_destinations_tab(self):\n self.select_static_tab(self.destinations_tab_locator, True)",
"def process_IN_MOVED_TO(self, event):",
"def requires_route(self) -> bool:\n return self.goal.is_specific()",
"def _is_current_page(self, **kwargs):\n if kwargs:\n # do a lookup to get the object i\n object_id = self._get_object(**kwargs)[\"Id\"]\n pattern = r\"/lightning/r/{}/{}/view$\".format(self.object_name, object_id)\n else:\n # no kwargs means we should just verify we are on a detail\n # page without regard to which object\n pattern = r\"/lightning/r/{}/.*/view$\".format(self.object_name)\n\n location = self.selenium.get_location()\n if not re.search(pattern, location):\n raise Exception(\n \"Location '{}' didn't match pattern {}\".format(location, pattern)\n )",
"def is_browser_on_page(self):",
"def setup_page(self):\r\n raise NotImplementedError",
"def test_page_existence(self):\r\n for page in self.pages:\r\n page.visit()",
"def _is_current_page(self):\n location = \"/lightning/n/{}{}\".format(self.eda.get_eda_namespace_prefix(), self._object_name)\n self.selenium.location_should_contain(location)\n\n locator_tab = eda_lex_locators[\"eda_settings\"][\"tab\"].format(\"Relationships\")\n self.selenium.wait_until_page_contains_element(\n locator_tab,\n error=f\"Relationships tab with locator '{locator_tab}' is not available on the page\"\n )",
"def _is_current_page(self):\n self.selenium.wait_until_location_contains(\"/list\",timeout=60, message=\"Records list view did not load in 1 min\")\n self.selenium.location_should_contain(\"General_Accounting_Unit__c\",message=\"Current page is not a DataImport List view\")",
"def test_view_url_exists(self):\n response = self.client.get('/details/' + str(self.s.id))\n response2 = self.client.get(reverse('details', args=(self.s.id,)))\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response2.status_code, 200)\n self.assertTemplateUsed(response2, 'notifications/details.html')",
"def setup_page(self):\n raise NotImplementedError",
"def navigation_hook(self):\r\n pass",
"def backButtonAvailable(self):\n referer = self.request.get('HTTP_REFERER')\n if not referer:\n return False\n portalUrl = getToolByName(self.context, 'portal_url')()\n if referer and referer.startswith(portalUrl):\n return True\n return False",
"def test_page_existence(self):\r\n # Log in\r\n self.auth_page.visit()\r\n\r\n # Verify that each page is available\r\n for page in self.pages:\r\n page.visit()",
"def _check_ready(self, _widget, __event=None, __page=0):\r\n\r\n if self.cmbHardware.get_active() > 0:\r\n self.assistant.set_page_complete(self.fxdPageGeneral, True)\r\n else:\r\n self.assistant.set_page_complete(self.fxdPageGeneral, False)\r\n\r\n return False",
"def test_view_url_exists_at_desired_location(self):\r\n response = self.client.get(reverse('search_results'),\r\n {'query': '', 'name': 'nutella'})\r\n self.assertEqual(response.status_code, 200)",
"def available(self):\n existing_url = self.context.get_personal_fundraising_campaign_url()\n same = existing_url == self.context.absolute_url()\n creating = 'create-personal-campaign-page' in self.request.URL\n return not same and not creating",
"def get_url_parts(self, *args, **kwargs):\n url_parts = super().get_url_parts(*args, **kwargs)\n # NOTE evidently this can sometimes be None; unclear why – perhaps it\n # gets called in a context where the request is unavailable? Only\n # happens in QA, not locally.\n if url_parts:\n site_id, root_url, _ = url_parts\n page_path = reverse(\n \"events:detail\",\n kwargs={\n \"year\": self.start_time.year,\n # force two-digit month\n \"month\": \"%02d\" % self.start_time.month,\n \"slug\": self.slug,\n },\n )\n return site_id, root_url, page_path",
"def process_IN_MOVED_FROM(self, event):",
"def show_landing(self):\n print(\"Hooray, the Eagle has landed!\")",
"def available(self) -> bool:\n return self._router.available",
"def is_store_page(entry):\n pattern = re.compile(\"^/view\\d*/.*$\")\n return entry[\"method\"] == \"GET\" and pattern.match(entry[\"uri\"]) != None",
"def route(self):\n pass",
"async def _landing_url(self, responses: SourceResponses) -> URL:\n landing_url = await super()._landing_url(responses)\n return (\n URL(f\"{landing_url}/ViewerMain.aspx?scanId={self._scan_id}&ProjectID={self.__project_id}\")\n if responses\n else landing_url\n )",
"def on_connect():\n articleList()\n #test_location()\n get_state_colors()\n ip = request.environ[\"HTTP_X_FORWARDED_FOR\"]\n loc = get_location(ip)\n push_stat_data(loc.state)\n return True"
] | [
"0.56053495",
"0.55677485",
"0.55168056",
"0.547717",
"0.5470642",
"0.54690707",
"0.54483056",
"0.5434261",
"0.53937316",
"0.5334329",
"0.5315497",
"0.52834636",
"0.52634895",
"0.5261351",
"0.5257627",
"0.5223185",
"0.5205327",
"0.5196006",
"0.5182019",
"0.51654166",
"0.5157402",
"0.51573026",
"0.5156812",
"0.51487786",
"0.5145621",
"0.5143163",
"0.5142551",
"0.51414824",
"0.51028985",
"0.50999635"
] | 0.73504025 | 0 |
Implementing click on vendor price lists functionality | def click_on_vendor_price_lists(self):
vendor_price_lists_element = self.wait().until(EC.element_to_be_clickable(self.vendor_price_lists_locator), 'vendor price lists locator not found before specified time')
self.script_executor_click(vendor_price_lists_element)
self.wait_for_ajax_spinner_load() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def click_buy_page_inline_action_button(self, vendor):\n self.click_inline_action_button(self.vendors_div_id, vendor, self.grid_column_number)",
"def click_vendor_price_lists_search_button(self):\n search_button_element = self.wait().until(EC.element_to_be_clickable(self.search_button_locator), 'search button not found before specified time')\n self.script_executor_click(search_button_element)\n self.wait_for_ajax_spinner_load(300)",
"def click_compare_price_lists_button(self):\n self.click_element(self.compare_price_lists_button_locator, True)",
"def select_vendor_price_list_detail_dial_digits_tab(self):\n self.click_element(self.vendor_price_list_details_dial_digits_tab_locator, True)",
"def click_buy_and_sell_deal_bulk_edit_button(self):\n self.click_element(self.buy_and_sell_deal_bulk_edit_button_lcoator, True)",
"def click_vendor_price_list_grid_first_row_inline_action_button(self):\n self.click_inline_action_button(self.vendor_price_list_grid_div_id, None, self.view_price_list_column_number, True)",
"def click_view_price_list_detail_page_inline_action_button(self, price_list_item):\n self.click_inline_action_button(self.view_price_list_div_id, price_list_item, self.view_price_list_column_number)\n self.wait_for_ajax_spinner_load()",
"def select_vendor_price_list_detail_reference_rates_tab(self):\n self.click_element(self.vendor_price_list_details_reference_rates_tab_locator, True)",
"def click_on_vendor_price_list_upload_search_button(self):\n vendor_price_list_upload_search_button_element = self.wait().until(EC.element_to_be_clickable(self.vendor_price_list_upload_search_button_locator), 'vendor price list upload search button locator not found before specified time')\n vendor_price_list_upload_search_button_element.click()\n self.wait_for_ajax_spinner_load()",
"def click_buy_and_sell_deal_create_button(self):\n self.click_element(self.save_vendor_profile_locator)",
"def click_view_price_list_detail_first_row_inline_action_button(self):\n self.click_inline_action_button(self.view_price_list_div_id, None, self.view_price_list_column_number, True)\n self.wait_for_ajax_spinner_load()",
"def OnMidClick(self, event):\n\n # note bdaqmid is a string so we need to convert to int here\n bdaqmid = int(event.GetEventObject().GetURL())\n\n bdaqname = self.mstore.get_name_from_BDAQmid(bdaqmid)\n\n # show the price panel for the market selected\n self.app.frame.GoToPricePanel(bdaqname, bdaqmid)",
"def click_vendor_price_list_detail_dial_digits_grid_export_to_excel_button(self):\n self.click_grid_export_to_excel_button(self.vendor_price_list_detail_dial_digits_grid_div_id)",
"def click_volver(self):\n self.button.click(liquidaciones_historicas_catalog.BOTON_VOLVER)",
"def click(self):\r\n pass",
"def click_edit_target_buy_policy_button(self):\n self.click_element(self.edit_target_buy_policy_button_locator)",
"def select_sort_by_price_descendant(self):\n msg = \"The new order of the items is by descendant price\"\n with self.allure.step(msg):\n self.__product_sort.select_by_text('Price (high to low)')\n self.allure.attach_image(self.driver, msg)",
"def verify_price_list_item(self, price_list_item):\n self.single_selection_from_kendo_dropdown(self.price_list_kendo_dropdown_locator, price_list_item)",
"def DoAction(self,event):\r\n selections = self.list.GetSelections()\r\n if not selections: return bell()\r\n itemDex = selections[0]\r\n item = self.items[itemDex]\r\n self.data.action(item)",
"def buySingleProduct(url):\n #parsed_url = urlparse(url)\n assert \"http\" and \"://\" in url, \"Bitte die URL komplett kopieren, inklusive \\\"http://\\\" bzw. \\\"https://\\\" am Anfang.\"\n assert \"amazon\" in url, \"Die aufzurufende Seite ist nicht die Amazon-Seite oder konnte nicht erkannt werden.\"\n print(\"Open page '\"+url+\"'\")\n driver.get(url)\n print(\"Find add-to-cart element\")\n try:\n print(\"actually find element\")\n #add_to_cart_button = driver.find_element_by_css_selector(amazon_add_to_cart)\n\n print(\"scroll element into view using native js\")\n driver.execute_script(\"window.scrollTo(0, document.GetElementById(\"+amazon_add_to_cart+\"));\")\n print(\"Send 'click' to element\")\n add_to_cart_button.click()\n print(\"Success.\")\n except Exception, e:\n print(\"Element could not be found. General exception: \"+str(e))\n #driver.close()",
"def open_products_page(catalog_menu):\n catalog_menu.open_products_page()",
"def select_sort_by_price_ascendant(self):\n msg = \"The new order of the items is by ascendant price\"\n with self.allure.step(msg):\n self.__product_sort.select_by_text('Price (low to high)')\n self.allure.attach_image(self.driver, msg)",
"def set_vendor(self, vendor_list):\n self.multiple_items_selection_from_kendo_dropdown(self.vendor_dropdown_locator, vendor_list)\n self.wait_for_ajax_spinner_load()",
"def _click_function( self, event ):\n if self.click_function is None:\n print( \"ListboxScroll -- click_function not set\" )\n else:\n # sending the selection get, but perhaps should\n # send the event and let click function ....!!!\n # a_key = event.widget.selection_get()\n #rint( a_key )\n # self.click_function( a_key )\n self.click_function( event )",
"def productactivate():\n pass",
"def on_click(self) -> None:\n pass",
"def test_search_shoes_item_to_buy(self):\n self.driver.find_element_by_id(\"search_query_top\").send_keys(\"shoes\")\n self.driver.find_element_by_name(\"submit_search\").click()\n self.driver.find_element_by_xpath(\n \"/html/body/div[1]/div[2]/div/div[3]/div[2]/ul/li[2]/div/div[1]/div/a[1]/img\").click()\n self.driver.find_element_by_name(\"Submit\").click()\n time.sleep(5)",
"def onClick(self, *value):\n self.dbgprint(\"[CLASS CB]item clicked w/ value: %r\"%(value))",
"def get_vendor_price_lists_details(self):\n try:\n self.vendor_price_lists_dict = self.get_grid_row_details(self.customer_price_list_grid_div_id, self.vendor_price_lists_dict)\n return True\n except:\n return False",
"def handle_view(self, controller):\n \n order = controller.customer.my_order ## make a reference to the order of customer\n \n for i in range(len(order.items)):\n if not order.items[i]:\n continue\n \n label0 = Label(self, text=order.items[i])\n label0.grid(row=i+2, column=0, columnspan=2, padx=10)\n \n label1 = Label(self, text=\"QTY:\")\n label1.grid(row=i+2, column=2)\n \n qty = order.items[i].quantity\n var = IntVar()\n self.vars[i] = var\n self.vars[i].set(qty)\n combobox0 = ttk.Combobox(self, textvariable=self.vars[i], state=\"readonly\", values=[j+1 for j in range(self.max_range)], width='3')\n combobox0.bind(\"<<ComboboxSelected>>\", lambda event, c=controller.customer, p=i:self.onChange(c, p)) ## change pizza quantity\n combobox0.focus_set()\n combobox0.grid(row=i+2, column=3)\n\n button3 = Button(self, text=\"Remove\", command=lambda p=i:self.onRemove(controller, p))\n button3.grid(row=i+2, column=4)\n\n button4 = Button(self, text=\"CHECKOUT\", command=lambda:self.onCheckout(controller))\n button4.grid(row=1, column=4, columnspan=2, sticky='e')\n \n self.showOrderPrice(order)"
] | [
"0.66060024",
"0.6558755",
"0.62382877",
"0.61915034",
"0.6082665",
"0.6070102",
"0.6055624",
"0.59267545",
"0.5718903",
"0.5711302",
"0.5507519",
"0.5475869",
"0.5474474",
"0.54237473",
"0.5406867",
"0.5405167",
"0.53965765",
"0.53871065",
"0.53813577",
"0.53805524",
"0.53282565",
"0.53103137",
"0.52707356",
"0.52689326",
"0.5268691",
"0.52499086",
"0.5242977",
"0.5242458",
"0.521132",
"0.52103275"
] | 0.74880445 | 0 |
Implementing set to date functionality | def set_to_date(self):
self.set_value_into_input_field(self.set_to_date_locator, self.get_current_date()) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def setDate(self, p_int, p_int_1, p_int_2): # real signature unknown; restored from __doc__\r\n return False",
"def set_date(self, date):\n self.date = date",
"def date(self, value):\n self.date_value = value",
"def _date(self, _date):\n\n self.__date = _date",
"def _date(self, _date):\n\n self.__date = _date",
"def setSelectedDate(self, data):\n # print('setSelectedDate ', data)\n self.currentDate = data",
"def set_date(self, date):\n self.date = date\n return",
"def settlement_date(self, value):\n if value:\n self._settlement_date = (\n parse(value).date() if isinstance(value, type_check) else value\n )",
"def date(self, new_date):\n self._date.date = new_date",
"def set_datetime(self, date):\n self.date = date",
"def set_date(self, date):\n self.date = self.date_to_local(date)\n # ephem deals only in UTC\n self.site.date = ephem.Date(self.date_to_utc(self.date))",
"def set_date(self, date):\n self.data['date'] = date",
"def update(self, date):\r\n self.date = date",
"def date(self, date):\n self.value = date.strftime(\"%Y-%m-%d\") if date else \"\"",
"def date(self, date):\n self._date = date",
"def set_from_date(self, date):\n self.set_value_into_input_field(self.set_from_date_locator, date)",
"def _setVals(self, datetime=0):\n self.datetime = datetime",
"def date(self):",
"def _fill_date(self):\n if not self.date['year']:\n self.date['year'] = self.DEFAULT_DATE['year']\n if not self.date['month']:\n self.date['month'] = self.DEFAULT_DATE['month']\n if not self.date['day']:\n self.date['day'] = self.DEFAULT_DATE['day']",
"def to_date(self, value: date):\n self._to_date = value\n self._dao.to_date = value",
"def set_start_date(self, date):\n pass",
"def set_document_date(self, date):\n self.set_value_into_input_field(self.document_date_text_field_locator, date)",
"def set_end_date(self, date):\n pass",
"def date(self, date):\n\n self._date = date",
"def date(self, date):\n\n self._date = date",
"def date(self, date):\n\n self._date = date",
"def date(self, date):\n\n self._date = date",
"def date(self, date):\n\n self._date = date",
"def i_see_the_set_dates(_step):\r\n verify_date_or_time(COURSE_START_DATE_CSS, '12/20/2013')\r\n verify_date_or_time(COURSE_END_DATE_CSS, '12/26/2013')\r\n verify_date_or_time(ENROLLMENT_START_DATE_CSS, '12/01/2013')\r\n verify_date_or_time(ENROLLMENT_END_DATE_CSS, '12/10/2013')\r\n\r\n verify_date_or_time(COURSE_START_TIME_CSS, DUMMY_TIME)\r\n # Unset times get set to 12 AM once the corresponding date has been set.\r\n verify_date_or_time(COURSE_END_TIME_CSS, DEFAULT_TIME)\r\n verify_date_or_time(ENROLLMENT_START_TIME_CSS, DEFAULT_TIME)\r\n verify_date_or_time(ENROLLMENT_END_TIME_CSS, DUMMY_TIME)",
"def ts_setter(func):\n\n @wraps(func)\n def inner(self, value):\n \"\"\" Parse input value as ISO8601 date \"\"\"\n if value is None:\n return func(self, None)\n elif isinstance(value, datetime.datetime):\n return func(self, value)\n else:\n value = TS_SETTER_TRANSFORM_RE.sub(TS_SETTER_TRANSFORM_REPL, value)\n return func(self, iso8601.parse_date(value))\n\n return inner"
] | [
"0.7597517",
"0.7108432",
"0.70938134",
"0.7081148",
"0.7081148",
"0.7057913",
"0.7012738",
"0.6949188",
"0.6927733",
"0.6875525",
"0.6873057",
"0.6810564",
"0.67987144",
"0.6746936",
"0.6733423",
"0.67172575",
"0.6710233",
"0.6599115",
"0.6569132",
"0.6557032",
"0.6553031",
"0.650199",
"0.64934194",
"0.64648974",
"0.64648974",
"0.64648974",
"0.64648974",
"0.64648974",
"0.6455069",
"0.64132774"
] | 0.76180005 | 0 |
Implementing click vendor price lists search button functionality | def click_vendor_price_lists_search_button(self):
search_button_element = self.wait().until(EC.element_to_be_clickable(self.search_button_locator), 'search button not found before specified time')
self.script_executor_click(search_button_element)
self.wait_for_ajax_spinner_load(300) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def click_on_vendor_price_list_upload_search_button(self):\n vendor_price_list_upload_search_button_element = self.wait().until(EC.element_to_be_clickable(self.vendor_price_list_upload_search_button_locator), 'vendor price list upload search button locator not found before specified time')\n vendor_price_list_upload_search_button_element.click()\n self.wait_for_ajax_spinner_load()",
"def click_on_vendor_price_lists(self):\n vendor_price_lists_element = self.wait().until(EC.element_to_be_clickable(self.vendor_price_lists_locator), 'vendor price lists locator not found before specified time')\n self.script_executor_click(vendor_price_lists_element)\n self.wait_for_ajax_spinner_load()",
"def test_search_shoes_item_to_buy(self):\n self.driver.find_element_by_id(\"search_query_top\").send_keys(\"shoes\")\n self.driver.find_element_by_name(\"submit_search\").click()\n self.driver.find_element_by_xpath(\n \"/html/body/div[1]/div[2]/div/div[3]/div[2]/ul/li[2]/div/div[1]/div/a[1]/img\").click()\n self.driver.find_element_by_name(\"Submit\").click()\n time.sleep(5)",
"def on_searchButton_clicked(self):\n self.__search()",
"def search_items(self):\n urls = []\n prices = []\n names = []\n for item in self.items:\n print(f\"Searching for {item}...\")\n\n self.driver.get(self.amazon_url)\n #select = Select(self.driver.find_element_by_id(\"searchDropdownDescription\"))\n #select.select_by_visible_text('All Departments')\n\n search_input = self.driver.find_element_by_id(\"twotabsearchtextbox\")\n search_input.send_keys(item)\n\n time.sleep(2)\n #wait = WebDriverWait(self.driver, self.explicit_wait)\n #wait.until(EC.presence_of_all_elements_located((By.ID, \"twotabsearchtextbox\")))\n\n search_button = self.driver.find_element_by_xpath('//*[@id=\"nav-search\"]/form/div[2]/div/input')\n search_button.click()\n\n time.sleep(2)\n\n t = self.driver.find_element_by_id(\"result_0\")\n asin = t.get_attribute(\"data-asin\")\n url = \"https://www.amazon.ca/dp/\" + asin\n price = self.get_product_price(url)\n name = self.get_product_name(url)\n \n prices.append(price)\n urls.append(url)\n names.append(name)\n\n print(name)\n print(price)\n print(url)\n\n time.sleep(2)\n\n return prices, urls, names",
"def show_vendor_product():\n vendor = input(\"Enter the Vendor: \")\n product = input(\"Enter the product: \")\n filter_string = input(\"Enter Optional Search string (i.e. HTTP): \")\n logger.debug(\"Searching: {} from {} -- Filter = {}\".format(product, vendor, filter_string))\n search_url = \"http://cve.circl.lu/api/search/{}/{}\".format(vendor, product)\n req = call_api(search_url)\n if not req:\n logger.debug(\"something no workie with the vendor product call\")\n else:\n print(\"Searching: {} from {} -- Filter = {}\".format(product, vendor, filter_string))\n for item in req:\n if filter_string != '' or not filter_string:\n if filter_string in item['summary']:\n print(\"\\nSummary: \" + item['summary'])\n print(\"CVE: \" + item['id'])\n print(\"CVSS: \" + str(item['cvss']))\n else:\n print(\"\\nSummary: \" + item['summary'])\n print(\"CVE: \" + item['id'])\n print(\"CVSS: \" + str(item['cvss']))\n menu()",
"def search(self):\n premium = self.config.get('premium', False)\n\n self.params[self.opts['keyword']['query_key']] = self.config[self.opts['keyword']['config_key']] # keyword\n # Selection params\n self.append_param('tag_mode', 'selection')\n if premium:\n self.append_param('order_premium', 'selection')\n else:\n self.append_param('order_not_premium', 'selection')\n\n self.append_param('type', 'selection')\n self.append_param('tool', 'selection')\n self.append_param('ratio', 'selection')\n self.append_param('mode', 'selection')\n\n # Number params\n self.append_param('min_width', 'number')\n self.append_param('max_width', 'number')\n self.append_param('min_height', 'number')\n self.append_param('max_height', 'number')\n if premium:\n self.append_param('min_bookmark', 'number')\n self.append_param('max_bookmark', 'number')\n else:\n self.set_bookmark_filter()\n\n # Date params\n self.append_param('start_time', 'date')\n self.append_param('end_time', 'date')\n\n # multi work filter\n self.filters['multi'] = self.config.get('download_multi', False)\n\n for i in range(self.config['start_page'], self.config['end_page'] + 1):\n self.params['p'] = i\n self.headers['Referer'] = 'https://www.pixiv.net/'\n url ='https://www.pixiv.net/search.php'\n html = self.session.get(url, headers = self.headers, params = self.params, timeout = 10, proxies = self.proxies)\n\n soup = BeautifulSoup(html.text, 'lxml')\n data_items = json.loads(soup.find('input', id = 'js-mount-point-search-result-list')['data-items'])\n\n return self.extract_work_info(data_items)",
"def click_search_button(self):\n self.click_element(self.generic_search_button_locator)",
"def search_sales():\r\n\r\n elem = driver.find_element_by_id(\"SearchSaleDate\")\r\n elem.send_keys('1/1/2000')\r\n go = driver.find_element_by_id(\"cmdGo\")\r\n go.click()\r\n\r\n return",
"def enable_search(self):\n html_element = self.find_element_by_xpath(\n '/html/body').get_attribute('outerHTML')\n soup = Scraper(html_element)\n\n elms_obj = soup.find_search_enable_btn()\n\n for tag, target in elms_obj.items():\n if len(target) > 0:\n for elem in target:\n for attr, value in elem.items():\n try:\n if str(attr) == 'class':\n for element in value:\n btn = self.find_elements_by_class_name(\n f'{element}')\n for e in btn:\n try:\n e.click()\n print(\n colored(':: The Searching is able ::', 'green'))\n return\n except:\n print(\n 'The searching isn\\'t able yet =(')\n except:\n pass\n btn = self.find_elements_by_css_selector(\n f'{tag}[{attr}=\"{value}\"]'\n )\n for element in btn:\n try:\n element.click()\n print(\n colored(':: The Searching is able ::', 'green'))\n return\n except:\n print('The searching isn\\'t able yet =(')",
"def search_btn_clicked(self, widget, data=None):\n # Method to handle search here.\n search_text = self.get_text(\"txt_search\")\n print search_text",
"def search_product(self):\n cat = []\n product = open_products()\n radio = self.radiobutton_check()\n search = self.lineEdit_search.text()\n _translate = QtCore.QCoreApplication.translate\n __sortingEnabled = self.tableWidget.isSortingEnabled()\n self.tableWidget.setSortingEnabled(False)\n o=0\n if len(self.lineEdit_search.text()) == 0:\n self.show_product()\n else:\n for r in range(0, len(product)):\n if search.upper() in str(product[r][radio]).upper():\n cat.append(product[r])\n for i in range(0, len(cat)):\n for c in range(0, 5):\n item = self.tableWidget.item(i, c)\n item.setText(_translate(\"MainWindow\", str(cat[i][c])))\n o+=1\n else:\n for c in range(0, 5):\n item = self.tableWidget.item(r, c)\n item.setText(_translate(\"MainWindow\", \"\"))\n if o == 0:\n self.frame_3.show()\n self.label_16.setText('PRODUCT NOT FOUND!')",
"def click_buy_page_inline_action_button(self, vendor):\n self.click_inline_action_button(self.vendors_div_id, vendor, self.grid_column_number)",
"def search_product(query, *args, **kwargs):\n search_query_link = WalTracker.construct_search_query(query)\n soup = Base.get_soup(search_query_link)\n response_links = []\n\n # Search Multiple Classes\n for a in soup.findAll('a', class_='card'):\n response_links.append(WalTracker.URL + a['href'])\n return response_links",
"def click_compare_price_lists_button(self):\n self.click_element(self.compare_price_lists_button_locator, True)",
"def search(self, *args, **kwargs):",
"def do_search(self):\n # Call the website\n self.driver.get(self.BASE_URL)\n\n # Request the proper historical data\n self.select_proper_version()\n self.save_image()\n\n # If the entity exists in this historical version, extract the data\n if self.select_proper_region() is True:\n # Do the search\n self.fill_search_parameters()\n # Scrap the results page\n self.scrap_results()\n # Export the data to .csv\n self.search_results.export()",
"def search_convo_ask_price(update, context):\n user_data = context.user_data\n query = update.message.text\n user_data['query'] = query\n update.message.reply_text(\n f'Ja, {query} er også godt. Og til hvilken pris (i kr.)?')\n return SEARCH_SHOW_RESULT",
"def linkSearch(self):\n self.identificationParams = []\n try:\n url = 'https://shopee.sg/api/v2/search_items/?by=relevancy&keyword=' + self.searchParameters + '&limit=' + str(\n self.itemQuantity) + '&newest=' + str(\n self.items_per_page) + '&order=desc&page_type=search' # Base URL\n print(url)\n r = requests.get(url, headers=self.HEADERS).json()\n for item in r['items']: # Store name, price, stocks left and amount sold in respective lists\n self.identificationParams.append((item['shopid'], item['itemid']))\n except AttributeError:\n self.identificationParams = []",
"def search_market_gather_players(self, name, max_price_to_pay, bids_allowed, bids_made, futbindata, min_bid, max_bid):\n if (int(max_bid) < 400):\n max_bid = 400\n # Ensure bid box is visible, then clear previous params\n self.sleep_approx(2)\n input = self.driver.find_element(\n By.XPATH, \"/html/body/main/section/section/div[2]/div/div[2]/div/div[1]/div[2]/div[6]/div[2]/input\")\n self.driver.execute_script(\"arguments[0].scrollIntoView(true);\", input)\n WebDriverWait(self.driver, 20).until(EC.element_to_be_clickable(\n (By.XPATH, \"/html/body/main/section/section/div[2]/div/div[2]/div/div[1]/div[2]/div[6]/div[2]/input\"))).click()\n self.sleep_approx(1)\n input.send_keys(0)\n self.sleep_approx(1)\n\n clear = \"/html/body/main/section/section/div[2]/div/div[2]/div/div[1]/div[2]/div[1]/button\"\n maxbidbox = self.driver.find_element(\n By.XPATH, \"/html/body/main/section/section/div[2]/div/div[2]/div/div[1]/div[2]/div[3]/div[2]/input\")\n minbidbox = self.driver.find_element(\n By.XPATH, \"/html/body/main/section/section/div[2]/div/div[2]/div/div[1]/div[2]/div[2]/div[2]/input\")\n\n # CLEAR RESULTS BOX\n self.driver.find_element(By.XPATH, clear).click()\n self.sleep_approx(1)\n\n # insert max_bid here\n maxbidbox.click()\n self.sleep_approx(1)\n maxbidbox.send_keys(max_bid)\n self.sleep_approx(1)\n\n # insert min_bid here\n minbidbox.click()\n self.sleep_approx(1)\n minbidbox.send_keys(min_bid)\n self.sleep_approx(1)\n\n # search the pages, and bid on players under bid price\n self.clickSearch()\n sleep(3)\n\n keepgoing = True\n while keepgoing:\n # Each page, get user config\n self.getUserConfig()\n status = self.checkState(\"transfermarket\")\n if status:\n max_price_to_pay = int(max_price_to_pay)\n self.sleep_approx(4)\n\n # TODO understand why some eligible players fail to receive bids...\n players_on_page = self.getAllPlayerInfo()\n for card in players_on_page:\n playernumber = card[0]\n bidStatus = card[1]\n curbid = card[5]\n timeremainingseconds = card[7]\n timeremainingmins = timeremainingseconds/60\n playerid = card[8]\n buynow = card[6]\n\n if bids_made < bids_allowed-1:\n if \"highest-bid\" not in bidStatus:\n stopbidTime = int(self.bidexpiration_ceiling)\n if timeremainingmins < stopbidTime:\n if timeremainingmins >= 2:\n # Check if bid to make falls under ceiling\n if (curbid < 1000):\n curbidprice_afterbidding = curbid+50\n else:\n curbidprice_afterbidding = curbid+100\n if curbidprice_afterbidding < max_price_to_pay:\n if ((curbid*2)<self.user_num_coins):\n self.makebid_individualplayer(\n playernumber, max_price_to_pay)\n self.sleep_approx(2)\n bids_made += 1\n log_event(self.queue, \"Bids made on \" + str(name) +\n \": \" + str(bids_made) + \"/\" + str(bids_allowed))\n else:\n log_event(self.queue, \"not enough coins\")\n else:\n keepgoing = False\n else:\n keepgoing = False\n\n self.sleep_approx(3)\n log_event(self.queue, \"Going to next page\")\n try:\n self.driver.find_element_by_xpath(\n '/html/body/main/section/section/div[2]/div/div/section[1]/div/div/button[2]')\n self.driver.find_element_by_xpath(\n '/html/body/main/section/section/div[2]/div/div/section[1]/div/div/button[2]').click()\n self.user_requests_made += 1\n except:\n log_event(self.queue, \"No next page found, returning\")\n keepgoing = False\n self.clickBack()\n self.sleep_approx(1)\n return bids_made",
"def filterPrice(self, minPrice = 5000):\n\n # Check and select if price button is displayed\n if commonFunctionsUI.isElementDisplayedByXPath(selector = self.locators.price):\n commonFunctionsUI.clickByXPath(selector = self.locators.price)\n else:\n LOGGER.error(\"Could not click price button\")\n raise Exception(\"could not click price button\")\n\n time.sleep(3)\n\n\n try:\n commonFunctionsUI.clickByXPath(selector = self.locators.minPrice)\n commonFunctionsUI.sendBackspace(selector = self.locators.priceSave, numOfBackspace = 5)\n\n commonFunctionsUI.enterTextByXPath(selector = self.locators.minPrice, text = minPrice)\n except:\n try:\n commonFunctionsUI.clickByXPath(selector = self.locators.searchButton)\n except:\n commonFunctionsUI.clickByXPath(selector = self.locators.priceSave)\n LOGGER.error(\"Could not find input field to enter min price\")\n raise Exception(\"Could not find input field to enter min price\")\n\n\n if commonFunctionsUI.isElementDisplayedByXPath(selector = self.locators.priceSave):\n commonFunctionsUI.clickByXPath(selector = self.locators.priceSave)\n else:\n raise Exception(\"Could not click on save price button\")",
"def onSearch(self):\n self.mainGrid.showSearchPopup()\n self.popupActive = True",
"def shop_items(request):\n\n items = Item.objects.all()\n\n query = None\n\n \"\"\" Used Code Institute Search logic from Tutorial \"\"\"\n if 'query' in request.GET:\n query = request.GET['query']\n if not query:\n messages.error(request, \"Please enter your search\")\n return redirect(reverse('items'))\n \n queries = Q(name__icontains=query) | Q(item_description__icontains=query)\n items = items.filter(queries)\n\n context = {\n 'items': items,\n 'search_term': query,\n }\n\n return render(request, 'items/items.html', context)",
"def apply_search(self, queryset):\n self.form = self.form_class(self.request.GET)\n\n if not self.form.is_valid():\n return queryset\n\n data = self.form.cleaned_data\n\n if data.get('upc'):\n # If there's an exact UPC match, it returns just the matched\n # product. Otherwise does a broader icontains search.\n qs_match = queryset.filter(upc=data['upc'])\n if qs_match.exists():\n queryset = qs_match\n else:\n queryset = queryset.filter(upc__icontains=data['upc'])\n\n if data.get('title'):\n queryset = queryset.filter(title__icontains=data['title'])\n\n if data.get('product_class'):\n queryset = queryset.filter(product_class=data['product_class'])\n\n return queryset",
"def click_vendor_price_list_grid_first_row_inline_action_button(self):\n self.click_inline_action_button(self.vendor_price_list_grid_div_id, None, self.view_price_list_column_number, True)",
"def search_convo_show_result(update, context):\n chat = Chat.get(update.message.chat_id)\n user_data = context.user_data\n query = user_data['query']\n price = float(update.message.text)\n user_data['price'] = price\n\n ses = Session()\n offers = ses.search_all(query, chat.lat, chat.lon, chat.radius)\n too_expensive = 0\n total_offers = 0\n for offer in offers:\n total_offers += 1\n if offer.price > price:\n too_expensive += 1\n continue\n\n update.message.reply_text(offer_text(offer))\n\n if total_offers == 0:\n update.message.reply_text(\n f'Der blev ikke fundet nogen tilbud lige nu.')\n if too_expensive > 0:\n update.message.reply_text(f'{too_expensive} tilbud blev frasorteret, '\n 'fordi de var for dyre.')\n\n keyboard = [[\n InlineKeyboardButton(text='💾 Gem søgning', callback_data='save'),\n InlineKeyboardButton(text='🌟 Ny søgning', callback_data='new'),\n InlineKeyboardButton(text='🚪️ Færdig', callback_data='done')\n ]]\n markup = InlineKeyboardMarkup(keyboard)\n\n update.message.reply_text('❓ Vil du gemme søgningen?', reply_markup=markup)\n\n return SEARCH_DONE",
"def test_product_search(self):\n\n flag = \"user\"\n api = \"product.product.add\"\n current_page = 1\n search_info = json.dumps({\n 'name': \"可爱的小蓝牙呀\"\n })\n print('start------------------------>add')\n result = self.access_api(flag = flag, api = api, current_page = current_page, product_info = search_info)",
"def perform_search(self):\n\n self.implicitly_wait(5)\n html_element = self.find_element_by_xpath(\n '/html/body').get_attribute('outerHTML')\n soup = Scraper(html_element)\n target = soup.find_search_field()\n\n for elem in target:\n for attr, value in elem.items():\n placeholder = self.find_elements_by_css_selector(\n f'input[{attr}=\"{value}\"]'\n )\n for element in placeholder:\n try:\n element.send_keys(self.keywords)\n element.send_keys(Keys.RETURN)\n print(colored(':: Placeholder fullfilled ::', 'green'))\n return\n except:\n print(\n colored('Can\\'t type inside the search input', 'yellow'))",
"def search_input(self):\n self.driver.get(\"https://streeteasy.com/\")\n try:\n rental_button = self.driver.find_element_by_tag_name('Rentals')\n\n except NoSuchElementException:\n sleep(2)\n self.captcha()\n\n else:\n sleep(5)\n # rental button #\n rental_button.click()\n # location #\n neighborhood = self.driver.find_element_by_id(\"search-areas-dropdown-input\")\n neighborhood.click()\n neighborhood.send_keys(\"All Midtown\")\n self.driver.find_element_by_xpath('//*[@id=\"application\"]').click()\n\n # close pop up #\n sleep(2)\n try:\n self.driver.find_element_by_xpath('//*[@id=\"content\"]/main/section[1]/div/form/div[1]/div/div[3]'\n '/div/div/div[1]/div[2]/button').click()\n\n except NoSuchElementException:\n pass\n\n else:\n # price - giving it only a maximum range, not minimum (price selected $3k) #\n sleep(2)\n self.driver.find_element_by_xpath('//*[@id=\"price_to\"]/option[24]').click()\n\n # number of rooms #\n sleep(2)\n self.driver.find_element_by_xpath('//*[@id=\"content\"]/main/section[1]/'\n 'div/form/div[1]/div/div[3]/fieldset/div/label[3]/span').click()\n\n # advanced options button #\n try:\n sleep(2)\n self.driver.find_element_by_css_selector(\"button.Home-advancedSearchLink\").click()\n\n except NoSuchElementException:\n pass\n\n else:\n # amenities search (doorman and dog) #\n sleep(2)\n self.driver.find_element_by_name(\"amenities[doorman]\").click()\n sleep(2)\n self.driver.find_element_by_name(\"amenities[pets]\").click()\n\n # click the search button #\n try:\n sleep(3)\n search_button = self.driver.find_element_by_xpath('//*[@id=\"content\"]/main/section[1]/div/'\n 'form/div[1]'\n '/div/div[4]/button')\n search_button.click()\n\n except ElementClickInterceptedException:\n sleep(2)\n self.driver.find_element_by_xpath('//*[@id=\"content\"]/main/section[1]/div/'\n 'form/div[1]/div/div[4]'\n '/button').click()",
"def test_product_search(self):\n\n flag = \"user\"\n api = \"product.product.search\"\n current_page = 1\n search_info = json.dumps({\n })\n\n result = self.access_api(flag = flag, api = api, current_page = current_page, search_info = search_info)\n self.assertTrue('data_list' in result)"
] | [
"0.67439646",
"0.66573006",
"0.6485732",
"0.634402",
"0.6308638",
"0.6296258",
"0.61310154",
"0.6101929",
"0.60976154",
"0.6091547",
"0.60725826",
"0.5917563",
"0.58863914",
"0.58090204",
"0.57779515",
"0.57762134",
"0.57505846",
"0.57471293",
"0.5746761",
"0.57103413",
"0.5709403",
"0.5688981",
"0.56732935",
"0.5639366",
"0.56310254",
"0.5611367",
"0.55851394",
"0.5576493",
"0.55750495",
"0.5569336"
] | 0.79111916 | 0 |
Implementing verify price list item functionality | def verify_price_list_item(self, price_list_item):
self.single_selection_from_kendo_dropdown(self.price_list_kendo_dropdown_locator, price_list_item) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_LinkedPriceCheck(self):\n # Basic price check\n self.log.info(\"Price checking Linked Item 1 via PLU\")\n pos.click(\"Price Check\")\n pos.enter_keypad(\"014\", after=\"enter\")\n \n # Confirm the right item, at the right price\n self.read_price_check(\"Linked Item 1\", \"$1.00\")\n # Add the item\n pos.click(\"Sell Item\")\n \n # Confirm we are in a transaction\n if not self.in_transaction():\n self.tc_fail(\"POS did not start a transaction; can not confirm item was added\")\n else:\n self.log.info(\"Confirmed we are in a transaction\")\n \n # Confirm we added the item\n ret = self.confirm_line(-2, \"Linked Item 1\", \"$1.00\")\n if ret == True:\n self.log.info(\"Confirmed item added\")\n else:\n self.tc_fail(ret)\n \n # Confirm we added the linked item\n ret = self.confirm_line(-1, \"Linked Item 2\", \"$1.00\")\n if ret == True:\n self.log.info(\"Confirmed item added\")\n else:\n self.tc_fail(ret)\n \n # Setup for next test\n self.recover()",
"def test_PriceCheckPLU(self):\n # Basic price check\n self.log.info(\"Price checking Generic Item via PLU\")\n pos.click(\"Price Check\")\n pos.enter_keypad(\"1\", after=\"enter\")\n \n # Confirm the right item, at the right price\n self.read_price_check(\"Generic Item\", \"$0.01\")\n # Don't add the item\n pos.click(\"Ok\")\n \n # Confirm we aren't in a transaction\n if self.in_transaction():\n self.tc_fail(\"Unintentionally In Transaction\")\n else:\n self.log.info(\"Confirmed we are not in a transaction\")\n \n # Setup for next test\n self.recover()",
"def verify_selected_price_list(self, price_list_item):\n is_present = None\n price_list_locator = (By.XPATH, self.selected_price_list_locator_string + \"[text()='%s']\" % price_list_item)\n try:\n self.wait().until(EC.presence_of_element_located(price_list_locator))\n is_present = True\n except:\n is_present = False\n finally:\n return is_present",
"def test_basicSalePC(self):\n # Basic price check\n self.log.info(\"Price checking Generic Item via speedkey\")\n pos.click(\"Price Check\")\n pos.click_speed_key(\"Generic Item\")\n \n # Confirm the right item, at the right price\n self.read_price_check(\"Generic Item\", \"$0.01\")\n # Add the item\n pos.click(\"Sell Item\")\n \n # Confirm we added the item\n ret = self.confirm_line(-1, \"Generic Item\", \"$0.01\")\n if ret == True:\n self.log.info(\"Confirmed item added\")\n else:\n self.tc_fail(ret)\n \n # Setup for next test\n self.recover()",
"def verify_vendor_price_lists_details(self, row_data):\n return self.verify_grid_row_details(self.customer_price_list_grid_div_id, row_data)",
"def validate(cls, prices):\n super(GiftCardPrice, cls).validate(prices)\n\n for price in prices:\n price.check_price()",
"def verify_selected_price_lists_details(self):\n status = True\n row_data_length = len(self.compare_price_lists_list)\n for i in range(row_data_length):\n price_list_locator = (By.XPATH, \"//div[@id='divSelectedPriceLists']/descendant::span[text()='%s']\" % (str(self.compare_price_lists_list[i])))\n price_list_present = self.is_element_present(price_list_locator)\n if price_list_present is not True:\n status = False\n break\n return status",
"def read_price_check(self, item_name, item_price):\n msg = pos.read_message_box()\n self.log.info(f\"Message received: [{msg}]\")\n if not msg:\n self.tc_fail(\"Did not receive price check prompt\")\n if not item_name.lower() in msg.lower():\n self.tc_fail(f\"Did not find correct item [{item_name}] in message\")\n if not item_price.lower() in msg.lower():\n self.tc_fail(f\"Did not find correct price [{item_price}] in message\")\n return True",
"def qualified_item(self):\n # Price check with base item\n self.log.info(\"Price checking Qual 1 via PLU\")\n pos.click(\"Price Check\")\n pos.enter_keypad(\"030\", after='enter')\n if self.selection_list_visible():\n pos.select_list_item(\"Qual 1 ($5.00)\")\n pos.click(\"enter\")\n else:\n tc_fail(\"Selection list didn't appear.\")\n\n # Confirm the right item, at the right price\n self.read_price_check(\"Qual 1\", \"$5.00\")\n # Add the item\n pos.click(\"Sell Item\")\n\n # Price check with qualifier\n self.log.info(\"Price checking Qual 1 via PLU\")\n pos.click(\"Price Check\")\n pos.enter_keypad(\"030\", after='enter')\n if self.selection_list_visible():\n pos.select_list_item(\"Test Type ($10.00)\")\n pos.click(\"enter\")\n else:\n tc_fail(\"Selection list didn't appear.\")\n\n # Confirm the right item, at the right price\n self.read_price_check(\"Qualifier 1\", \"$10.00\")\n # Add the item\n pos.click(\"Sell Item\")\n\n # Confirm we are in a transaction\n if not self.in_transaction():\n self.tc_fail(\"POS did not start a transaction; can not confirm item was added\")\n else:\n self.log.info(\"Confirmed we are in a transaction\")\n \n # Confirm we added the item\n ret1 = self.confirm_line(-2, \"Qual 1\", \"$5.00\")\n if ret1:\n self.log.info(\"Confirmed Qual 1 item added\")\n else:\n self.tc_fail(ret1)\n \n # Confirm we added the linked item\n ret2 = self.confirm_line(-1, \"Qualifier 1\", \"$10.00\")\n if ret2:\n self.log.info(\"Confirmed Qualifier 1 item added\")\n else:\n self.tc_fail(ret2)\n \n # Setup for next test\n self.recover()",
"def test_NegativePriceCheck(self):\n # Basic price check\n self.log.info(\"Price checking Negative Item via speedkey\")\n pos.click(\"Price Check\")\n pos.click_speed_key(\"Negative Item\")\n \n # Confirm the right item, at the right price\n # NOTE: Price check returns negative prices as possitive. Legacy defect deemed 'Will Not Fix'\n self.read_price_check(\"Negative Item\", \"$5.00\")\n # Add the item\n pos.click(\"Sell Item\")\n \n # Confirm we are in a transaction\n if not self.in_transaction():\n self.tc_fail(\"POS did not start a transaction; can not confirm item was added\")\n else:\n self.log.info(\"Confirmed we are in a transaction\")\n \n # Confirm we added the item, and that it was negative\n ret = self.confirm_line(-1, \"Negative Item\", \"-$5.00\")\n if ret == True:\n self.log.info(\"Confirmed item added\")\n else:\n self.tc_fail(ret)\n \n # Setup for next test\n self.recover()",
"def process_verify_item(args):\n return do_process_verify_item(*args)",
"def test_basicNoSalePC(self):\n # Basic price check\n self.log.info(\"Price checking Generic Item via speedkey\")\n pos.click(\"Price Check\")\n pos.click_speed_key(\"Generic Item\")\n \n # Confirm the right item, at the right price\n self.read_price_check(\"Generic Item\", \"$0.01\")\n # Don't add the item\n pos.click(\"Ok\")\n \n # Confirm we aren't in a transaction\n if self.in_transaction():\n self.tc_fail(\"Unintentionally In Transaction\")\n else:\n self.log.info(\"Confirmed we are not in a transaction\")\n \n # Setup for next test\n self.recover()",
"def test_check_price_ok() -> None:\n data = check_price(min_price=1, data={'p': 2.0})\n assert data == {'p': 2.0}",
"def check_restrictions(self):\n from .signals import determine_availability\n\n responses = determine_availability.send(\n self.item.event, item=self.item,\n variations=[self.to_variation_dict()], context=None,\n cache=self.item.event.get_cache()\n )\n price = self.default_price if self.default_price is not None else self.item.default_price\n for receiver, response in responses:\n if 'available' in response[0] and not response[0]['available']:\n return False\n elif 'price' in response[0] and response[0]['price'] is not None and response[0]['price'] < price:\n price = response[0]['price']\n return price",
"def test_shopping_cart_has_items(self):\n list_items = self.get_list_of_items()\n\n self.assertTrue(len(self.expected_contents) == len(list_items))\n\n for expected_item, list_item in zip(\n self.expected_contents, list_items):\n item_dict = self.get_item_dict(list_item)\n for key in expected_item:\n try:\n list_value = item_dict[key].text\n except AttributeError:\n list_value = item_dict[key]\n self.assertEqual(str(expected_item[key]), list_value)\n self.assertEqual(\n str(self.client.session['cart_cost']),\n self.browser.find_element_by_id('food-cost').text\n )",
"def check(self,item):\r\n raise AbstractError\r\n return False",
"def test_positive_price_details(self):\n with self.client:\n response = self.add_meal(\"beef\", -15000)\n data = json.loads(response.data.decode())\n self.assertEqual(data.get('message'),\n \"Price must be a positive number\")\n self.assertEqual(response.status_code, 400)",
"def item_call(data):\n print('-' * 80)\n print(\"\")\n print(\"This is the Item Review.\")\n items_tally = get_new_list(data, 4)\n create_unique_value_list(items_tally)\n print(\"Here is the total sales for the advisors.\\n\")\n item_sale_count = create_dict_count(items_tally)\n find_max_key_val(item_sale_count)\n exit_call = continue_exit(data)\n if exit_call:\n return True\n else:\n return False",
"def test_product_buy_more_then_have(self):\n result_buy = self.info_list.product_buy(\"соль 1 кг\", 50)\n self.assertFalse(result_buy)",
"def test_sell_ticket_valid_quantity(self, *_):\n # logout to invalidate any logged in session\n self.open(base_url + '/logout')\n # login a user\n self.open(base_url + '/login')\n # fill email and password\n self.type(\"#email\", \"[email protected]\")\n self.type(\"#password\", \"Test_frontend@\")\n # click enter button\n self.click('input[type=\"submit\"]')\n # open the /sell route\n self.open(base_url)\n # Enter an invalid ticket name\n self.type('#name_sell', \"ticketname\")\n self.type('#quantity_sell', \"-1\")\n self.type(\"#price_sell\", \"15\")\n self.type(\"#exp_date_sell\", \"20200921\")\n self.click('#submit-sell')\n # Assert that the valid error message is shown\n self.assert_text(\"Invalid quantity of tickets\", \"#message\")\n\n # logout to invalidate any logged in session\n self.open(base_url + '/logout')\n # login a user\n self.open(base_url + '/login')\n # fill email and password\n self.type(\"#email\", \"[email protected]\")\n self.type(\"#password\", \"Test_frontend@\")\n # click enter button\n self.click('input[type=\"submit\"]')\n # open the /sell route\n self.open(base_url)\n # Enter an invalid ticket name\n self.type('#name_sell', \"ticketname\")\n self.type('#quantity_sell', \"101\")\n self.type(\"#price_sell\", \"15\")\n self.type(\"#exp_date_sell\", \"20200921\")\n self.click('#submit-sell')\n # Assert that the valid error message is shown\n self.assert_text(\"Invalid quantity of tickets\", \"#message\")",
"def test_sell_ticket_price_range(self, *_):\n # logout to invalidate any logged in session\n self.open(base_url + '/logout')\n # login a user\n self.open(base_url + '/login')\n # fill email and password\n self.type(\"#email\", \"[email protected]\")\n self.type(\"#password\", \"Test_frontend@\")\n # click enter button\n self.click('input[type=\"submit\"]')\n # open the /sell route\n self.open(base_url)\n # Enter an invalid ticket name\n self.type('#name_sell', \"testticket\")\n self.type(\"#quantity_sell\", \"1\")\n self.type(\"#price_sell\", \"101\")\n self.click('#submit-sell')\n # Assert that the valid error message is shown.\n self.assert_text(\"Ticket price outside of valid range\", \"#message\")\n\n # logout to invalidate any logged in session\n self.open(base_url + '/logout')\n # login a user\n self.open(base_url + '/login')\n # fill email and password\n self.type(\"#email\", \"[email protected]\")\n self.type(\"#password\", \"Test_frontend@\")\n # click enter button\n self.click('input[type=\"submit\"]')\n # open the /sell route\n self.open(base_url)\n # Enter an invalid ticket name\n self.type('#name_sell', \"testticket\")\n self.type(\"#quantity_sell\", \"1\")\n self.type(\"#price_sell\", \"9\")\n self.click('#submit-sell')\n # Assert that the valid error message is shown.\n self.assert_text(\"Ticket price outside of valid range\", \"#message\")",
"def validate_product_quantity(item, qty):\n return True",
"def check_price(self):\n if self.price < 0:\n self.raise_user_error(\"negative_amount\")",
"def checker(self, product):\n for item in self.instock:\n if item == product:\n return True\n return False",
"def pay_for_item(self, item):\n while self.amount < item.price:\n paid_amount = float(input(f\"Pay €{round((item.price - self.amount), 2)} : \"))\n if paid_amount <= 0:\n custom_log(\"Invalid amount entered.\", MSG_ERROR)\n continue\n self.amount = self.amount + paid_amount",
"def purchase(self, item_type):",
"def check_symbol_price(self, data):\n if self.input_price < float(data.get(\"price\")):\n logging.info(\"Symbol price is higher than the input provided by the user.\")\n logging.info(\"Input Price :- \")\n logging.info(str(self.input_price))\n logging.info(\"Symbol Price :- \")\n logging.info(str(data.get(\"price\")))\n logging.info(\"+++++++++++++++++++++++++++++\")",
"def __call__(self, data):\n if sum(item_data['amount'] for item_data in data) < self.order.total_cost:\n raise ValidationError({\n api_settings.NON_FIELD_ERRORS_KEY: self.message,\n })",
"def test_price_details_number(self):\n with self.client:\n response = self.add_meal(\"beef\", \"jasmine\")\n data = json.loads(response.data.decode())\n self.assertEqual(data.get('message'),\n \"Price must be a number\")\n self.assertEqual(response.status_code, 400)",
"def is_satisfied(self, item: Any) -> bool:"
] | [
"0.69587696",
"0.69523215",
"0.6614124",
"0.65662354",
"0.6559915",
"0.65482956",
"0.63033843",
"0.6295743",
"0.61983514",
"0.61921096",
"0.6160937",
"0.6149853",
"0.6114186",
"0.58709925",
"0.57739615",
"0.5763249",
"0.57321095",
"0.5725347",
"0.57172257",
"0.57046336",
"0.5696734",
"0.5691517",
"0.5659365",
"0.56344044",
"0.56273633",
"0.56091374",
"0.55864257",
"0.5585893",
"0.5583613",
"0.55564004"
] | 0.7701968 | 0 |
Implementing click view price list detail page inline action button functionality | def click_view_price_list_detail_page_inline_action_button(self, price_list_item):
self.click_inline_action_button(self.view_price_list_div_id, price_list_item, self.view_price_list_column_number)
self.wait_for_ajax_spinner_load() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def click_view_price_list_detail_first_row_inline_action_button(self):\n self.click_inline_action_button(self.view_price_list_div_id, None, self.view_price_list_column_number, True)\n self.wait_for_ajax_spinner_load()",
"def click_vendor_price_list_grid_first_row_inline_action_button(self):\n self.click_inline_action_button(self.vendor_price_list_grid_div_id, None, self.view_price_list_column_number, True)",
"def click_buy_page_inline_action_button(self, vendor):\n self.click_inline_action_button(self.vendors_div_id, vendor, self.grid_column_number)",
"def action(self,item):\r\n pass",
"def click_vendor_price_list_detail_dial_digits_grid_export_to_excel_button(self):\n self.click_grid_export_to_excel_button(self.vendor_price_list_detail_dial_digits_grid_div_id)",
"def click_vendor_price_list_detail_rates_grid_export_to_excel_button(self):\n self.click_grid_export_to_excel_button(self.vendor_price_list_detail_rates_grid_div_id)",
"def click_buy_and_sell_deal_bulk_edit_button(self):\n self.click_element(self.buy_and_sell_deal_bulk_edit_button_lcoator, True)",
"def click_compare_price_lists_button(self):\n self.click_element(self.compare_price_lists_button_locator, True)",
"def click(self):\r\n pass",
"def click_vendor_price_list_detail_reference_rates_grid_export_to_excel_button(self):\n self.click_grid_export_to_excel_button(self.vendor_price_list_detail_reference_rates_grid_div_id)",
"def click_buy_and_sell_deal_management_grid_first_row_inline_action_button(self):\n self.click_inline_action_button(self.buy_and_sell_management_grid_div_id, None, self.buy_and_sell_management_grid_inline_action_column_number, True)",
"def on_click ( self, object ):\n pass",
"def DoAction(self,event):\r\n selections = self.list.GetSelections()\r\n if not selections: return bell()\r\n itemDex = selections[0]\r\n item = self.items[itemDex]\r\n self.data.action(item)",
"def onClick(*args):",
"def onClick(*args):",
"def onClick(*args):",
"def onClick(*args):",
"def on_click(self) -> None:\n pass",
"def click_target_buy_policy_grid_first_row_inline_action_button(self):\n self.click_inline_action_button(self.target_buy_policies_grid_div_id, None, column_number=2, first_row=True)\n self.wait_for_ajax_spinner_load()",
"def action(self):\n pass",
"def action(self):\n pass",
"def OnMidClick(self, event):\n\n # note bdaqmid is a string so we need to convert to int here\n bdaqmid = int(event.GetEventObject().GetURL())\n\n bdaqname = self.mstore.get_name_from_BDAQmid(bdaqmid)\n\n # show the price panel for the market selected\n self.app.frame.GoToPricePanel(bdaqname, bdaqmid)",
"def __actions__(self, obj):\n\t\t\tprimary_fields \t= self.__provider__.get_primary_fields(self.__entity__)\n\t\t\tpklist \t\t= '/'.join(map(lambda x: str(getattr(obj, x)), primary_fields))\n\n\t\t\tvalue \t\t= '<div>'\n\t\t\tif has_permission('editar_LB'):\n\t\t\t\tvalue = value + '<div><a class=\"edit_link\" href=\"'+pklist+'/edit\" style=\"text-decoration:none\">edit</a></div>'\n\t\t\tif has_permission('eliminar_LB'):\n\t\t\t\tvalue = value + '<div><form method=\"POST\" action=\"'+pklist+'\" class=\"button-to\"><input type=\"hidden\" name=\"_method\" value=\"DELETE\" /><input class=\"delete-button\" onclick=\"return confirm(\\'Está seguro que desea eliminar?\\');\" value=\"delete\" type=\"submit\" style=\"background-color: transparent; float:left; border:0; color: #286571; display: inline; margin: 0; padding: 0;\"/></form></div>'\n\t\t\tvalue = value + '</div>'\n\t\t\treturn value",
"def click_target_buy_policies_grid_first_row_inline_action_button(self):\n self.click_inline_action_button(self.target_buy_policies_grid_div_id, None, self.target_buy_policies_grid_inline_action_column_number, first_row=True)",
"def select_vendor_price_list_detail_reference_rates_tab(self):\n self.click_element(self.vendor_price_list_details_reference_rates_tab_locator, True)",
"def onClick(self, *value):\n self.dbgprint(\"[CLASS CB]item clicked w/ value: %r\"%(value))",
"def click_edit_target_buy_policy_button(self):\n self.click_element(self.edit_target_buy_policy_button_locator)",
"def is_view_price_list_detail_present(self):\n return self.is_element_present(self.view_price_list_detail_locator)",
"def select_vendor_price_list_detail_dial_digits_tab(self):\n self.click_element(self.vendor_price_list_details_dial_digits_tab_locator, True)",
"def pricing_view(request, simulation):\n return TollListView.as_view()(request, simulation=simulation, )"
] | [
"0.7218018",
"0.6463628",
"0.6188844",
"0.6057789",
"0.58169675",
"0.57019943",
"0.56886107",
"0.5678829",
"0.55927885",
"0.55755067",
"0.55638826",
"0.5536148",
"0.5473998",
"0.5421182",
"0.5421182",
"0.5421182",
"0.5421182",
"0.5415894",
"0.5382531",
"0.5321642",
"0.5321642",
"0.53206956",
"0.5306234",
"0.52605015",
"0.52579117",
"0.5251199",
"0.5248551",
"0.5240074",
"0.5232683",
"0.52307135"
] | 0.79761547 | 0 |
Implementing click create vendor button functionality | def click_create_vendor_button(self):
create_vendor_element = self.wait().until(EC.element_to_be_clickable(self.create_vendor_locator), "create vendor locator not found before specified time out")
create_vendor_element.click()
self.wait_for_ajax_spinner_load() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def click_buy_and_sell_deal_create_button(self):\n self.click_element(self.save_vendor_profile_locator)",
"def test_create_custom_button(self):\n pass",
"def goto_create(self):\n\n self.create.click()",
"def generate_buttons(self):\n raise Exception('Implement me!')",
"def create_button(self) -> MyButton:\n pass",
"def click_buy_page_inline_action_button(self, vendor):\n self.click_inline_action_button(self.vendors_div_id, vendor, self.grid_column_number)",
"def test_get_custom_button(self):\n pass",
"def click_vendor_grid_add_destination_button(self):\n self.click_element(self.vendor_grid_add_destination_locator)",
"def make_chose_btn(self):\n self.chose_btn_accept = Button(text='Accept', pos_hint={'x': 0.31, 'y': 0.201}, size_hint=(0.33, 0.098),\n background_color=(0.81640625, 0.3125, 0.43359375, 1), background_normal='')\n self.chose_btn_accept.bind(on_release=self.download_chosen)\n self.add_widget(self.chose_btn_accept)\n self.chose_btn_canel = Button(text='Canel', pos_hint={'x': 0.66, 'y': 0.201}, size_hint=(0.33, 0.098),\n background_color=(0.81640625, 0.3125, 0.43359375, 1), background_normal='')\n self.chose_btn_canel.bind(on_release=self.canel_chose)\n self.add_widget(self.chose_btn_canel)",
"def create_buttons(self):\r\n return []",
"def setECVButton(self):\n self.ECVButton = qt.QPushButton(\"Create ECV Map\")\n self.ECVButton.toolTip = \"Create the ECV map with the volumes selected as Native and Enhanced LL\"\n self.ECVButton.enabled = False\n self.ECVcollButton_Layout.addRow(self.ECVButton)",
"def click_create_an_app(self):\r\n self.log.info('clicking on create app button')\r\n button = self.driver.find_element_by_xpath(\"//a[contains(text(),'Create an App')]\")\r\n button.click()\r\n time.sleep(5)",
"def click(self):\r\n pass",
"def click_request_new_deal_button(self):\n self.click_element(self.request_new_deal_button_locator)",
"def set_button_to_create(self):\n self.configuration.disable_validations = False\n self.create_tool_button.removeAction(self.set_button_to_create_action)\n self.create_tool_button.removeAction(self.edit_command_action)\n self.create_tool_button.addAction(\n self.set_button_to_create_without_constraints_action\n )\n self.create_tool_button.addAction(self.edit_command_action)\n self.create_tool_button.setText(self.create_text)",
"def __create_button(self, parent, flag):\n btns = {\n wx.ID_OK: (MSG_ACTION_OK, \"ok\"),\n wx.ID_CANCEL: (MSG_ACTION_CANCEL, \"cancel\"),\n wx.ID_YES: (MSG_ACTION_YES, \"yes\"),\n wx.ID_NO: (MSG_ACTION_NO, \"no\"),\n wx.ID_APPLY: (MSG_ACTION_APPLY, \"apply\"),\n wx.ID_CLOSE: (MSG_ACTION_CLOSE, \"close-window\"),\n wx.ID_SAVE: (MSG_ACTION_SAVE, \"save\"),\n }\n btn = sppasBitmapTextButton(parent, label=btns[flag][0], name=btns[flag][1])\n btn.SetId(flag)\n\n if flag == wx.CANCEL:\n self.SetAffirmativeId(wx.ID_CANCEL)\n\n elif flag in (wx.CLOSE, wx.OK):\n btn.SetDefault()\n btn.SetFocus()\n self.SetAffirmativeId(flag)\n\n elif flag == wx.YES:\n self.SetAffirmativeId(wx.ID_YES)\n\n elif flag == wx.OK:\n btn.SetDefault()\n\n return btn",
"def test_update_custom_button(self):\n pass",
"def __createButton(self):\r\n self.button = QPushButton(\"Plot\") # text diplayed on the button\r\n self.button.setShortcut(\"Ctrl+P\") # adding a shortcut \r\n self.button.clicked.connect(self.__onClick) # connect it to the __onClick function\r",
"def newDragBtn(self, color, selected, name, parent, width, height, tabIndex):\n btn = drag.DragButton(color, selected, self, name ) #create new draggable button\n btn.setParent(parent)\n btn.resize(width, height)\n btn.show() #show button\n logger.info(\"new button: %s\" % name)\n\n #add to objects dictionary\n if selected != None:\n for i in selected:\n if(i in self.objects[tabIndex]):\n self.objects[tabIndex][str(i)].append(btn) #add to array of buttons\n else:\n self.objects[tabIndex][str(i)]=[btn] #create array of buttons\n logger.debug(self.objects)\n else:\n logger.error(\"nothing is being connected to button\")\n\n return btn",
"def create_buttons(self):\n\t\t\n\t\tbutton_add = Gtk.ToolButton()\n\t\tbutton_add.set_icon_name(\"gtk-add\")\n\t\tbutton_add.set_sensitive(False)\n\t\tbutton_add.set_tooltip_text(_(\"Create new device\"))\n\t\tself.toolbar.insert(button_add, 0)\n\t\tself.buttons[\"add\"] = button_add\n\t\tbutton_add.connect(\"clicked\", self.on_add_clicked)\n\t\t\n\t\tbutton_delete = Gtk.ToolButton()\n\t\tbutton_delete.set_icon_name(\"gtk-delete\")\n\t\tbutton_delete.set_sensitive(False)\n\t\tbutton_delete.set_tooltip_text(_(\"Delete selected device\"))\t\t\n\t\tself.toolbar.insert(button_delete, 1)\n\t\tself.buttons[\"delete\"] = button_delete\n\t\tbutton_delete.connect(\"clicked\", self.on_delete_clicked)\n\t\t\n\t\tself.toolbar.insert(Gtk.SeparatorToolItem(), 2)\n\t\t\n\t\tbutton_edit = Gtk.ToolButton()\n\t\tbutton_edit.set_icon_name(\"gtk-edit\")\n\t\tbutton_edit.set_sensitive(False)\n\t\tbutton_edit.set_tooltip_text(_(\"Edit or resize device\"))\n\t\tself.toolbar.insert(button_edit, 3)\n\t\tself.buttons[\"edit\"] = button_edit\n\t\tbutton_edit.connect(\"clicked\", self.on_edit_clicked)\n\t\t\n\t\t\n\t\tbutton_umount = Gtk.ToolButton()\n\t\tbutton_umount.set_icon_name(\"emblem-readonly\")\n\t\tbutton_umount.set_sensitive(False)\n\t\tbutton_umount.set_tooltip_text(_(\"Unmount selected device\"))\n\t\tself.toolbar.insert(button_umount, 4)\n\t\tself.buttons[\"umount\"] = button_umount\n\t\tbutton_umount.connect(\"clicked\", self.on_umount_clicked)\n\t\t\n\t\tself.toolbar.insert(Gtk.SeparatorToolItem(), 5)\n\t\t\n\t\tbutton_apply = Gtk.ToolButton()\n\t\tbutton_apply.set_icon_name(\"gtk-apply\")\n\t\tbutton_apply.set_sensitive(False)\n\t\tbutton_apply.set_tooltip_text(_(\"Apply queued actions\"))\n\t\tself.toolbar.insert(button_apply, 6)\n\t\tself.buttons[\"apply\"] = button_apply\n\t\tbutton_apply.connect(\"clicked\", self.on_apply_clicked)\n\t\t\n\t\tbutton_clear = Gtk.ToolButton()\n\t\tbutton_clear.set_icon_name(\"gtk-clear\")\n\t\tbutton_clear.set_sensitive(False)\n\t\tbutton_clear.set_tooltip_text(_(\"Clear queued actions\"))\n\t\tself.toolbar.insert(button_clear, 7)\n\t\tself.buttons[\"clear\"] = button_clear\n\t\tbutton_clear.connect(\"clicked\", self.on_clear_clicked)",
"def onVendor(self, action):\n\n if not self.graphicsView.hasImage():\n self.actionVendor.setChecked(False)\n self.showImageSelectionMessageBox()\n return\n\n self.actionVendor.setChecked(True)\n if not hasattr(self.actionVendor, 'tag'):\n self.actionVendor.tag = PlacePolygonCommand.PlacePolygonCommand(self.graphicsView)\n self.actionVendor.tag.onSuccess.connect(self.onVendorCreated)\n self.actionVendor.tag.onRejected.connect(self.onCommandRejected)\n\n self.graphicsView.command = self.actionVendor.tag",
"def onVendorCreated(self):\n\n try:\n count = len(self.actionVendor.tag._polyline._vertices)\n if count > 2:\n points = []\n for point in self.actionVendor.tag._polyline._vertices:\n points.append(QPoint(round(point[0]), round(point[1])))\n polygon = QPolygonF(points)\n item = QEngineeringVendorItem(polygon, pack_type=self.packageComboBox.currentText())\n item.area = 'Drawing'\n item.transfer.onRemoved.connect(self.itemRemoved)\n self.graphicsView.scene().addItem(item)\n finally:\n self.graphicsView.scene().removeItem(self.actionVendor.tag._polyline)\n self.actionVendor.tag.reset()",
"def create_new_collection_btn(self):\n create_new_collection_btn_sitem = self.locator_finder_by_id(self.create_new_collection_btn_id)\n create_new_collection_btn_sitem.click()\n time.sleep(3)",
"def click_create_new_statement_button(self):\n self.click_element(self.create_new_statement_button_locator)",
"def on_toolButton_clicked(self):\n # TODO: not implemented yet\n raise NotImplementedError",
"def click_bulk_add_button(self):\n self.click_element(self.bulk_add_button_locator, True)",
"def createButtonsOnInterface(self, dlg):\n #reclassification dictionary made from the field setup file\n self.reclassificationDict = dlg.makeReclassificationDict()\n #button size defined by the user\n self.size = dlg.slider.value()\n #check if the button must be grouped by category\n withTabs = dlg.checkBox.isChecked()\n #actual button creation step\n self.createButtons(self.reclassificationDict, withTabs)",
"def add_create_pl_btn(self):\n self.create_pl = QPushButton(\"Add to playlist\")\n self.create_pl.clicked.connect(self.pl_btn_push)\n self.hbtnbox.addWidget(self.create_pl)",
"def test_delete_custom_button(self):\n pass",
"def click_button(self):\n self.q(css='div#fixture button').first.click()"
] | [
"0.71414775",
"0.6954892",
"0.6602717",
"0.64814246",
"0.6473455",
"0.628929",
"0.6244608",
"0.6196933",
"0.61187863",
"0.60596347",
"0.6059178",
"0.5956047",
"0.59116757",
"0.5863656",
"0.5848215",
"0.5848059",
"0.58261234",
"0.5824615",
"0.58196187",
"0.58189887",
"0.57997817",
"0.5773015",
"0.57690907",
"0.5762258",
"0.57491225",
"0.5727492",
"0.57250226",
"0.5723119",
"0.57184935",
"0.5713874"
] | 0.7178269 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.