query
stringlengths 9
9.05k
| document
stringlengths 10
222k
| metadata
dict | negatives
listlengths 30
30
| negative_scores
listlengths 30
30
| document_score
stringlengths 4
10
| document_rank
stringclasses 2
values |
---|---|---|---|---|---|---|
Sets up the regexp for parsing out IP addresses from the 'ip neighbor' command and pass it along to the parser function. | def _parse_ip_table_neigh(self, ip_output):
ip_regex = re.compile(r"(.*?)\s+.*lladdr\s+(.*?)\s+")
return self._parse_mac_addr_table(ip_output, ip_regex) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __call__(self, parser, namespace, values, option_string=None):\n ip_split = values.split(\",\")\n [ip_address(ip) for ip in ip_split]\n setattr(namespace, self.dest, ip_split)",
"def parse_ip(self, ip):\n if not ip in self.ip_list:\n try:\n ip_address = ipaddress.ip_address(ip)\n use = not (\n ip_address.is_multicast or ip_address.is_unspecified or ip_address.is_reserved or ip_address.is_loopback or ip_address.is_link_local)\n if use and (self.include_public or ip_address.is_private):\n self.new_ip(ip)\n network = ipaddress.IPv4Network(\"{}/{}\".format(ip,\n self.netmask), strict=False)\n self.new_range(str(network))\n except ValueError:\n pass",
"def ip(self, mess, args):\n return '%s\\n' % urlgrabber.urlread('http://whatismyip.org')",
"def prepare_regexps(self):\r\n print(\"Preparing regular expressions for this session.\")\r\n privmsg_parse = re.compile(\"\")",
"def _update_ips(self):\n self.ip_others = []\n ips = self.mesh.ipaddr()\n self.rloc16 = self.mesh.rloc()\n for line in ips:\n if line.startswith('fd'):\n # Mesh-Local unicast IPv6\n try:\n addr = int(line.split(':')[-1], 16)\n except Exception:\n continue\n if addr == self.rloc16:\n # found RLOC\n # RLOC IPv6 has x:x:x:x:0:ff:fe00:RLOC16\n self.rloc = line\n elif ':0:ff:fe00:' not in line:\n # found Mesh-Local EID\n self.ip_eid = line\n elif line.startswith('fe80'):\n # Link-Local\n self.ip_link = line\n else:\n self.ip_others.append(line)",
"def parseHostList( ipstring ):\r\n\r\n # ideally, we should be able to handle these cases:\r\n # w.x.y.z, .x.y.z, .y.z, .z\r\n # w.x.y.a-b, .x.y.a-b, .x.a-b, .a-b\r\n # w.x.y.z-a.b.c.d, w.x.y-a.b.c, w.x-a.b, w-a\r\n # we also need to be able to parse CIDR ranges. Urgh. w.x.y.z/0\r\n \r\n # ...but for the sake of simplicity we'll implement a subset, consisting of these cases:\r\n # 1. w.x.y.z\r\n # 2. w.x.y.z1-zN\r\n # 3. .z1-.zN\r\n\r\n currentNetwork = '0.0.0'\r\n groups = ipstring.split(',') \r\n iplist = []\r\n for i in groups:\r\n\r\n octets = i.split('.')\r\n if len(octets) == 4: # cases 1 and 2\r\n currentNetwork = \"%s.%s.%s\" % (octets[0],octets[1],octets[2])\r\n iprange = getRange(octets[3])\r\n ips = [\"%s.%s\" % (currentNetwork,i) for i in iprange]\r\n\r\n elif len(octets) == 2: # case 3\r\n network = currentNetwork\r\n iprange = getRange(octets[1])\r\n ips = [\"%s.%s\" % (currentNetwork,i) for i in iprange]\r\n \r\n else:\r\n print 'syntax error in specifying host list!'\r\n sys.exit(1)\r\n \r\n iplist += ips\r\n\r\n return uniq(iplist) # get rid of repeats\r",
"def address_regex(self) -> Any:",
"def _search_regx(self, regx_pattern):\n matches = re.finditer(regx_pattern, self._logfile, re.MULTILINE)\n\n for match in matches:\n self._ip_adresses.append(match.group())",
"def integrated_address_regex(self) -> Any:",
"def _init_ipaddress_ops(self):\n\n # retrieve local and external IPs\n all_ips_str = set(self.statistics.process_db_query(\"all(ipAddress)\", print_results=False))\n # external_ips_str = set(self.statistics.process_db_query(\"ipAddress(macAddress=%s)\" % self.get_probable_router_mac(), print_results=False)) # including router\n # local_ips_str = all_ips_str - external_ips_str\n external_ips = set()\n local_ips = set()\n all_ips = set()\n\n self.contains_priv_ips = False\n self.priv_ip_segment = None\n\n # convert IP strings to IPv4.IPAddress representation\n for ip in all_ips_str:\n if is_ipv4(ip):\n ip = IPAddress.parse(ip)\n # exclude local broadcast address and other special addresses\n if (not str(ip) == \"255.255.255.255\") and (not ip.is_localhost()) and (not ip.is_multicast()) and (\n not ip.is_reserved()) and (not ip.is_zero_conf()):\n all_ips.add(ip)\n\n for ip in all_ips:\n if ip.is_private():\n local_ips.add(ip)\n\n external_ips = all_ips - local_ips\n\n # save the certain unused local IPs of the network\n # to do that, divide the unused local Addressspace into chunks of (chunks_size) Addresses\n # initally only the first chunk will be used, but more chunks can be added to the pool of unused_local_ips if needed\n self.min_local_ip, self.max_local_ip = min(local_ips), max(local_ips)\n local_ip_range = (self.max_local_ip.to_int()) - (self.min_local_ip.to_int() + 1)\n if local_ip_range < 0:\n # for min,max pairs like (1,1), (1,2) there is no free address in between, but for (1,1) local_ip_range may be -1, because 1-(1+1)=-1\n local_ip_range = 0\n\n # chunk size can be adjusted if needed\n self.chunk_size = 200\n\n self.current_chunk = 1\n if local_ip_range < self.chunk_size:\n # there are not more than chunk_size unused IP Addresses to begin with\n self.chunks = 0\n self.chunk_remainder = local_ip_range\n else:\n # determine how many chunks of (chunk_size) Addresses there are and the save the remainder\n self.chunks = local_ip_range // self.chunk_size\n self.chunk_remainder = local_ip_range % self.chunk_size\n\n # add the first chunk of IP Addresses\n self.unused_local_ips = set()\n self.expand_unused_local_ips()\n\n # save the gathered information for efficient later use\n self.external_ips = frozenset(external_ips)\n self.remaining_external_ips = external_ips\n self.max_uncertain_local_ip = self.max_local_ip\n self.local_ips = frozenset(local_ips)\n # print(\"External IPS: \" + str(external_ips))\n # print(\"LOCAL IPS: \" + str(local_ips))\n self.remaining_local_ips = local_ips\n self.uncertain_local_ips = set()",
"def get_ip_pattern(ip):\n return re.compile(ip.replace('.', '[.]'))",
"def __init__(self):\n self.networks = [\n ipaddress.ip_network(address)\n for address in self.addresses\n ]",
"def recompose_ip(self, wl):\n ips = []\n r = re.search('([\\d\\.]+)\\(([\\d\\|]+)\\)', wl)\n base = r.group(1)\n items = re.split(r'[|]',r.group(2))\n for i in items:\n ips.append(\"%s%s\" % (base, i))\n return ips",
"def extract_ipaddress(string):\n pattern = r\"((([01]?[0-9]?[0-9]|2[0-4][0-9]|25[0-5])[ (\\[]?(\\.|dot)[ )\\]]?){3}([01]?[0-9]?[0-9]|2[0-4][0-9]|25[0-5]))\"\n ips = list()\n h_map = list()\n for match in re.finditer(pattern, string):\n if match.group(0) not in h_map:\n ips.append(wrap_value_with_context(match.group(0),match.start(),match.end()))\n h_map.append(match.group(0))\n\n return ips",
"def callback(self, pkt):\n if ARP in pkt:\n self.parse_ip(pkt.sprintf(\"%ARP.psrc%\"))\n if TCP in pkt or UDP in pkt:\n self.parse_ip(pkt.sprintf(\"%IP.src%\"))\n self.parse_ip(pkt.sprintf(\"%IP.dst%\"))",
"def __init__(self, ip, mask):\n self.vip = ip\n self.mask = mask",
"def handle_ip(bot, ievent):\n try:\n item = ievent.args[0]\n except IndexError:\n ievent.missing('<hostname>')\n return\n try:\n ipnr = socket.gethostbyname(item)\n ievent.reply(ipnr)\n except:\n ievent.reply(\"can't match \" + str(item))",
"def _parse_ip_addr_show(raw_result):\n # does link exist?\n show_re = (\n r'\"(?P<dev>\\S+)\"\\s+does not exist'\n )\n re_result = search(show_re, raw_result)\n result = None\n\n if not (re_result):\n # match top two lines for serveral 'always there' variables\n show_re = (\n r'\\s*(?P<os_index>\\d+):\\s+(?P<dev>\\S+):\\s+<(?P<falgs_str>.*)?>.*?'\n r'mtu\\s+(?P<mtu>\\d+).+?state\\s+(?P<state>\\w+).*'\n r'\\s*link/(?P<link_type>\\w+)\\s+(?P<mac_address>\\S+)'\n )\n\n re_result = search(show_re, raw_result, DOTALL)\n result = re_result.groupdict()\n\n # seek inet if its there\n show_re = (\n r'((inet )\\s*(?P<inet>[^/]+)/(?P<inet_mask>\\d{1,2}))'\n )\n re_result = search(show_re, raw_result)\n if (re_result):\n result.update(re_result.groupdict())\n\n # seek inet6 if its there\n show_re = (\n r'((?<=inet6 )(?P<inet6>[^/]+)/(?P<inet6_mask>\\d{1,2}))'\n )\n re_result = search(show_re, raw_result)\n if (re_result):\n result.update(re_result.groupdict())\n\n # cleanup dictionary before returning\n for key, value in result.items():\n if value is not None:\n if value.isdigit():\n result[key] = int(value)\n\n return result",
"def parse_ip_addr(data):\n # 2: eth0: <BROADCAST,MULTICAST,UP,LOWER_UP> mtu 1500 qdisc pfifo_fast state UP qlen 1000\n # link/ether 52:54:00:a0:b9:b6 brd ff:ff:ff:ff:ff:ff\n # inet 10.133.58.56/20 brd 10.133.63.255 scope global eth0\n # valid_lft 85266sec preferred_lft 85266sec\n # inet6 2001:1bc8:100:6::f301/64 scope global\n # valid_lft forever preferred_lft forever\n for iface in _IP_ADDR_SPLIT_RE.split(data.strip()):\n if not iface:\n continue\n lines = [l.strip() for l in iface.splitlines()]\n info = {\n \"name\": lines.pop(0).partition(\":\")[0],\n \"ip-addresses\": [],\n \"hardware-address\": None,\n }\n for line in lines:\n words = line.split()\n if words[0].startswith(\"link/\") and len(words) >= 2:\n info[\"hardware-address\"] = words[1]\n elif words[0] in (\"inet\", \"inet6\"):\n addrtype = \"ipv6\" if words[0] == \"inet6\" else \"ipv4\"\n addr, _, prefix = words[1].partition(\"/\")\n if prefix == '':\n prefix = 128 if addrtype == \"ipv6\" else 32\n info[\"ip-addresses\"].append({\"ip-address-type\": addrtype, \"ip-address\": addr, \"prefix\": int(prefix)})\n yield info",
"def _parse(x, cliargs=CliArg(), heap=HeapGate()):\n country = re.findall(r'/images/flags/(..)\\.png', x)\n if not country:\n return 0\n country = country[0]\n ip = re.findall(r'\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}', x)\n if not ip:\n return 0\n ip = ip[0]\n users = re.findall(r'Total.*?(\\d{1,3}.*?)users', x)\n if not users:\n return 0\n users = int(users[0].replace(',',''))\n mbps = re.findall(r'(\\d{1,3}\\.\\d{1,2})\\sMbps', x)\n if not mbps:\n return 0\n mbps = float(mbps[0])\n ms = re.findall(r'(\\d{1,10})\\sms', x)\n if not ms:\n return 0\n ms = int(ms[0])\n vpn = re.findall(r'(do_openvpn[^\\'\" >]+)', x)\n if not vpn:\n return 0\n vpn = cliargs._site+vpn[0]\n node = OpenNode(string=x,country=country,ip=ip,total=users,mbps=mbps,ms=ms,vpn=vpn)\n \"check if vpn fits wanted cli arguments\"\n if cliargs._parse_cliargs(node, heap):\n heap.insert_node(node)\n return 1\n return 0",
"def adjust_ip (self, ip=None):\n if ip != None and ip.haslayer(IP):\n if (self.type == 0x11):\n if (self.gaddr == \"0.0.0.0\"):\n ip.dst = \"224.0.0.1\" # IP rule 1\n retCode = True \n elif isValidMCAddr(self.gaddr):\n ip.dst = self.gaddr # IP rule 3a\n retCode = True\n else:\n print \"Warning: Using invalid Group Address\"\n retCode = False\n elif ((self.type == 0x17) and isValidMCAddr(self.gaddr)):\n ip.dst = \"224.0.0.2\" # IP rule 2\n retCode = True\n elif ((self.type == 0x12) or (self.type == 0x16)) and (isValidMCAddr(self.gaddr)):\n ip.dst = self.gaddr # IP rule 3b\n retCode = True\n else:\n print \"Warning: Using invalid IGMP Type\"\n retCode = False\n else:\n print \"Warning: No IGMP Group Address set\"\n retCode = False\n if retCode == True:\n ip.ttl=1 # IP Rule 4\n ip.options=[IPOption_Router_Alert()] # IP rule 5\n return retCode",
"def handle_filter(packets, arg, arguments):\r\n matched_packets = []\r\n if arg == \"host\":\r\n if len(arguments) == 0:\r\n print(\"A host IP address should be followed by the host command.\")\r\n sys.exit()\r\n else:\r\n # ip address here\r\n arg = arguments.popleft()\r\n for pkt in packets:\r\n dest_ip = pkt[1][10]\r\n src_ip = pkt[1][9]\r\n if arg == dest_ip or arg == src_ip:\r\n matched_packets.append(pkt)\r\n elif arg == \"ip\":\r\n for pkt in packets:\r\n if str(pkt[0][3]) == \"0800\":\r\n matched_packets.append(pkt)\r\n elif arg == \"port\":\r\n if len(arguments) == 0:\r\n print(\"\\\"port\\\" cannot be the last argument.\")\r\n sys.exit()\r\n else:\r\n # port number\r\n arg = arguments.popleft()\r\n\r\n for pkt in packets:\r\n if pkt[1][7] == 6 or pkt[1][7] == 17:\r\n if str(pkt[2][0]) == arg or str(pkt[2][1]) == arg:\r\n matched_packets.append(pkt)\r\n\r\n elif arg == \"tcp\":\r\n for pkt in packets:\r\n if pkt[1][7] == 6:\r\n matched_packets.append(pkt)\r\n elif arg == \"udp\":\r\n for pkt in packets:\r\n if pkt[1][7] == 17:\r\n matched_packets.append(pkt)\r\n elif arg == \"icmp\":\r\n for pkt in packets:\r\n if pkt[1][7] == 1:\r\n matched_packets.append(pkt)\r\n elif arg == \"net\":\r\n if len(arguments) == 0:\r\n print(\"\\\"net net\\\" is required. \")\r\n sys.exit()\r\n else:\r\n # ip prefix\r\n arg = arguments.popleft()\r\n if len(arg.split(\".\")) != 4:\r\n print(\"Please enter a valid ip address format. (x.x.x.x)\")\r\n sys.exit()\r\n prefix_length = 0\r\n length = len(arg)\r\n if arg == \"0.0.0.0\":\r\n prefix_length = 0\r\n elif arg[length - 6:length] == \".0.0.0\":\r\n prefix_length = length - 6\r\n elif arg[length - 4:length] == \".0.0\":\r\n prefix_length = length - 4\r\n elif arg[length - 2:length] == \".0\":\r\n prefix_length = length - 2\r\n else:\r\n prefix_length = length\r\n\r\n for pkt in packets:\r\n if pkt[1][9][0:prefix_length] == arg[0:prefix_length] or pkt[1][10][0:prefix_length] == \\\r\n arg[0:prefix_length]:\r\n matched_packets.append(pkt)\r\n\r\n elif arg == \"not\":\r\n if len(arguments) == 0:\r\n print(\"\\\"not\\\" cannot be the last argument.\")\r\n sys.exit()\r\n else:\r\n arg = arguments.popleft()\r\n if arg == \"host\":\r\n if len(arguments) == 0:\r\n print(\"A host IP address should be followed by the host command.\")\r\n sys.exit()\r\n else:\r\n # ip address here\r\n arg = arguments.popleft()\r\n for pkt in packets:\r\n dest_ip = pkt[1][10]\r\n src_ip = pkt[1][9]\r\n if arg != dest_ip and arg != src_ip:\r\n matched_packets.append(pkt)\r\n elif arg == \"ip\":\r\n for pkt in packets:\r\n if str(pkt[0][3]) != \"0800\":\r\n matched_packets.append(pkt)\r\n elif arg == \"port\":\r\n if len(arguments) == 0:\r\n print(\"\\\"port\\\" cannot be the last argument.\")\r\n sys.exit()\r\n else:\r\n # port number\r\n arg = arguments.popleft()\r\n for pkt in packets:\r\n if pkt[1][7] == 6 or pkt[1][7] == 17:\r\n if str(pkt[2][0]) != arg and str(pkt[2][1]) != arg:\r\n matched_packets.append(pkt)\r\n elif arg == \"tcp\":\r\n for pkt in packets:\r\n if pkt[1][7] != 6:\r\n matched_packets.append(pkt)\r\n elif arg == \"udp\":\r\n for pkt in packets:\r\n if pkt[1][7] != 17:\r\n matched_packets.append(pkt)\r\n elif arg == \"icmp\":\r\n for pkt in packets:\r\n if pkt[1][7] != 1:\r\n matched_packets.append(pkt)\r\n elif arg == \"net\":\r\n if len(arguments) == 0:\r\n print(\"\\\"net net\\\" is required. \")\r\n sys.exit()\r\n else:\r\n # ip prefix\r\n arg = arguments.popleft()\r\n if len(arg.split(\".\")) != 4:\r\n print(\"Please enter a valid ip address format. (x.x.x.x)\")\r\n sys.exit()\r\n prefix_length = 0\r\n\r\n length = len(arg)\r\n if arg == \"0.0.0.0\":\r\n prefix_length = 0\r\n\r\n elif arg[length - 6:length] == \".0.0.0\":\r\n\r\n prefix_length = length - 6\r\n elif arg[length - 4:length] == \".0.0\":\r\n prefix_length = length - 4\r\n elif arg[length - 2:length] == \".0\":\r\n prefix_length = length - 2\r\n else:\r\n prefix_length = length\r\n for pkt in packets:\r\n if pkt[1][9][0:prefix_length] != arg[0:prefix_length] and pkt[1][10][0:prefix_length] != \\\r\n arg[0:prefix_length]:\r\n matched_packets.append(pkt)\r\n\r\n return matched_packets, arg",
"def _parse_ip(self, ip_html, style):\n def is_int(val):\n try:\n int(val.text.strip())\n return True\n except:\n return False\n\n blocks = ip_html.findAll('span')\n\n # Clear display none\n blocks = filter(lambda x: 'none' not in x.get('style', ''), blocks)\n\n # Filter non integer\n blocks = filter(is_int, blocks)\n\n # Filter by class\n hidden = map(lambda x: x[0], filter(lambda x: 'none' in x[1], self.CLASS_MATCH.findall(style)))\n items = []\n for block in blocks:\n if block.get('class') in hidden:\n continue\n items.append(block.text)\n\n if len(items) != 4:\n return None\n\n return '.'.join(items)",
"def __init__(self, *args, **kwargs):\n self._directives = []\n self.ip = kwargs.get(\"ip\", \"0.0.0.0\")\n self.port = kwargs.get(\"port\", 80)\n self._server_names = {}\n\n if not isinstance(self.port, str) and not isinstance(self.port, int):\n raise TypeError(\"The port is expected either as a string or an integer, not %s.\" % (type(self.port)))\n if not self.__is_valid_ipv4_address(**{\"ip\" : self.ip}):\n raise ValueError(\"%s is not a valid IPv4 address.\" % (self.ip))\n if int(self.port) < 1 or int(self.port) > 65535:\n raise ValueError(\"%s is not a valid port.\" % (self.port))\n if not isinstance(self.ip, str):\n raise TypeError(\"The IP address is expected as a string, not %s.\" % (type(self.ip)))",
"def _check_ip_port_split(self):\n if self._type == \"A\":\n formatted_value = self._value.split(':')\n self._ip = formatted_value[0]\n self._port = int(formatted_value[1])",
"def testIP(self):\n self.assertEqual([\"http://234.234.234.234\"], grab('http://234.234.234.234', self.needScheme))",
"def test_ipam_ip_addresses_list(self):\n pass",
"def is_ip(self,inputs):\n format = '((?:(?:25[0-5]|2[0-4]\\\\d|[01]?\\\\d?\\\\d)\\\\.){3}(?:25[0-5]|2[0-4]\\\\d|[01]?\\\\d?\\\\d))'\n pattern = re.match(format, inputs)\n if pattern is not None:\n return True\n else:\n return False",
"def __init__(self, ips, ports):\n self._ips = ips\n self._ports = ports\n self._scanner = nmap.PortScanner()",
"def handle_hexip(bot, ievent):\n if not ievent.args:\n return ievent.missing('<ip | hex ip>')\n is_a = None\n if _re_hexip.match(ievent.args[0]):\n is_a = 'hexip'\n else:\n try:\n socket.inet_pton(socket.AF_INET, ievent.args[0])\n is_a = 'defip'\n except socket.error:\n pass\n if not is_a:\n ievent.missing('<ip | hex ip>')\n return\n if is_a == 'hexip':\n ip = []\n for i in range(4):\n ip.append(str(int(ievent.args[0][i*2:i*2+2], 16)))\n ip = '.'.join(ip)\n nevent = copy.copy(ievent)\n nevent.args = [ip]\n handle_dns(bot, nevent)\n else:\n test = ievent.args[0].split('.')\n ip = 16777216 * int(test[0]) + 65536 * int(test[1]) + 256 * \\\nint(test[2]) + int(test[3])\n ievent.reply('ip %s = %08x' % (ievent.args[0], ip))"
] | [
"0.59346735",
"0.5507849",
"0.5420339",
"0.540353",
"0.53581053",
"0.5341133",
"0.53306186",
"0.53253126",
"0.52746946",
"0.5222205",
"0.5208916",
"0.5197301",
"0.5188588",
"0.51674026",
"0.51580864",
"0.5143807",
"0.5061729",
"0.49728638",
"0.49534848",
"0.49212003",
"0.4916837",
"0.48985004",
"0.4888551",
"0.48804715",
"0.48774093",
"0.48609003",
"0.48591605",
"0.48360217",
"0.48251158",
"0.48157403"
] | 0.55468833 | 1 |
Parse the command output and return a dictionary which maps mac address to an IP address. | def _parse_mac_addr_table(self, cmd_output, mac_regex):
lines = ensure_string(cmd_output).split("\n")
arp_table = defaultdict(list)
for line in lines:
match = mac_regex.match(line)
if not match:
continue
groups = match.groups()
ip_address = groups[0]
mac_address = groups[1]
arp_table[mac_address].append(ip_address)
return arp_table | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_mac_address_table(self):\n\n mac_address_table = []\n command = '/interface bridge host print terse'\n\n output = self._send_command(command)\n\n for host in parse_terse_output(output):\n mac_address_table.append({\n 'mac': cast_mac(host.get('mac-address')),\n 'interface': host.get('interface'),\n 'vlan': -1,\n 'static': True if 'D' not in host.get('_flags') else False,\n 'active': True if 'X' not in host.get('_flags') else False,\n 'moves': -1,\n 'last_move': -1.0\n })\n\n return mac_address_table",
"def find_ip_dns_pair(command_output):\n ip_dns_found = IP_REXP.findall(command_output)\n return [ip_dns_found[0][0], ip_dns_found[1][0]]",
"def parse_output(output):\n lines = output.splitlines()[3:-1]\n r = {}\n for line in lines:\n kv = filter(None, line.split('|'))\n kv = [x.strip() for x in kv]\n r.update({kv[0]: kv[1]})\n return r",
"def get_mac_address(self, result, host):\n if \"mac\" in result['scan'][host][\"addresses\"]:\n return result['scan'][host][\"addresses\"][\"mac\"]\n else:\n return \"\"",
"def _result_to_dict(line):\n f = line.split(':;')\n return {'server': f[0], 'os_name': f[1], 'status': f[2], 'ipv4': f[3]}",
"def _parse_ip_table_arp(self, arp_output):\n arp_regex = re.compile(r\".*?\\((.*?)\\) at (.*?)\\s+\")\n return self._parse_mac_addr_table(arp_output, arp_regex)",
"def parse_config_resp(self, data):\n\n macaddr = f'{data[8]:x}:{data[9]:x}:{data[10]:x}'\\\n f':{data[11]:x}:{data[12]:x}:{data[13]:x}'\n\n pump_array = [0, 0, 0, 0, 0, 0]\n pump_array[0] = int((data[5] & 0x03))\n pump_array[1] = int((data[5] & 0x0c) >> 2)\n pump_array[2] = int((data[5] & 0x30) >> 4)\n pump_array[3] = int((data[5] & 0xc0) >> 6)\n pump_array[4] = int((data[6] & 0x03))\n pump_array[5] = int((data[6] & 0xc0) >> 6)\n\n light_array = [0, 0]\n # not a typo\n light_array[1] = int((data[7] & 0x03) != 0)\n light_array[0] = int((data[7] & 0xc0) != 0)\n\n return (macaddr, pump_array, light_array)",
"def get_address_to_usb_info_dict():\n with open(os.devnull, 'w') as devnull:\n output = subprocess.check_output(MAC_COMMAND, stderr=devnull)\n output_dict = json.loads(output)\n usb_connections = output_dict[PROFILER_USB_DATA_TYPE]\n usb_info_dict = {}\n _convert_system_profiler_dict(usb_connections, usb_info_dict)\n return usb_info_dict",
"def getmacaddrs():\n # Unpack just for the sake of being meaningful.\n ifaddrs, sockaddr_dl, sockaddr = PLATFORM_LOOKUP[PLATFORM]\n ptr = c_void_p(None)\n result = LIBC.getifaddrs(pointer(ptr))\n if result != 0:\n return {}\n ifa = ifaddrs.from_address(ptr.value)\n result = {}\n\n while True:\n name = ifa.ifa_name\n if name not in result:\n result[name] = []\n # Some interface (such as a TUN virtual network) doesn't give us\n # ifa_addr at all and we can usually skip them because they're hardly\n # relevant for our usage case.\n if ifa.ifa_addr:\n sa = sockaddr.from_address(ifa.ifa_addr)\n if sa.sa_family == AF_LINK:\n si = sockaddr_dl.from_address(ifa.ifa_addr)\n addr = \"%s\" % si\n if addr:\n result[name].append(addr)\n if ifa.ifa_next:\n ifa = ifaddrs.from_address(ifa.ifa_next)\n else:\n break\n\n LIBC.freeifaddrs(ptr)\n return result",
"def get_server_ip_mac(self, server_id):\n port_list = self.list_port()\n if not isinstance(port_list, list):\n return\n interface_list = []\n for port in port_list:\n if port[\"device_id\"] == server_id:\n port_info = {}\n port_info['mac'] = port['mac_address']\n port_info['ip_address'] = port['fixed_ips'][0]['ip_address']\n interface_list.append(port_info)\n\n LOG_OBJ.info(\"VM Interface Info : %s \" % interface_list)\n return interface_list",
"def get_ipmac_bind(self) -> dict:\n return self._get_json(self._URLS['GetIpMacBind'])",
"def __process_address(self, address: Tuple[int, int, int, int, int]) -> Dict[str, int]:\n return {\n 'interface': address[0],\n 'protocol': address[1],\n 'type': address[2],\n 'hardware_type': address[3],\n 'address': address[4],\n }",
"def parse_dhcp_stats(input: str) -> dict:\n found_pool = False\n found_dash = False\n\n stats = {}\n\n for line in input.split(\"\\n\"):\n line = line.strip().replace(\"\\x1b[m\", \"\")\n\n if not line:\n continue\n\n if not found_pool and line.startswith(\"pool\"):\n found_pool = True\n continue\n elif found_pool and found_dash is False and line.startswith(\"----\"):\n found_dash = True\n continue\n elif found_pool and found_dash:\n dhcp_stats = line.split()\n if len(dhcp_stats) != 4:\n continue\n stats[dhcp_stats[0]] = {\n \"size\": int(dhcp_stats[1]),\n \"used\": int(dhcp_stats[2]),\n \"avail\": int(dhcp_stats[3])\n }\n return stats",
"def parsePing(self,stdoutputdata):\n\t\tprint(stdoutputdata)\n\t\tres = {}\n\t\t# hostname = re.search(\"\\b(([a-zA-Z0-9]\\w{0,61}?[a-zA-Z0-9]|[a-zA-Z0-9])\\.){0,1}?([a-zA-Z0-9]\\w{0,61}?[a-zA-Z0-9]|[a-zA-Z0-9])\\.(com|edu|gov|int|mil|net|org|biz|info|name|museum|coop|aero|[a-z][a-z])(\\.[a-z][a-z]){0,1}\\b\", stdoutputdata, re.M|re.I)\n\t\thostname = re.split(\" \", re.split(r\"---\", stdoutputdata)[1])[1]\n\t\tprint hostname\n\t\tres[\"hostname\"] = hostname\n\t\tre_loss_rate = re.search(\"\\d{1,3}\\.\\d{1,2}\\%\", stdoutputdata)\n\t\tif re_loss_rate:\n\t\t\tprint re_loss_rate.group(0)\n\t\t\tres[\"loss_rate\"] = re_loss_rate.group(0)\n\n\t\tre_min_avg = re.search(\"\\d{1,3}\\.\\d{1,3}/\\d{1,3}\\.\\d{1,3}\", stdoutputdata)\n\t\tif re_min_avg:\n\t\t\tprint re_min_avg.group(0)\n\t\t\tmin_avg = re_min_avg.group(0).split(\"/\")\n\t\t\tres[\"min\"] = min_avg[0]\n\t\t\tres[\"avg\"] = min_avg[1]\n\t\treturn res",
"def _ParseScanResult(self, output):\n # Split access points into a list. Since we split on a string encountered\n # at the very beginning of the output, the first element is blank (thus\n # we skip the first element). Remaining elements are in groups of three,\n # in groups of: (BSSID, associated, other).\n bssid_ap_list = self._ACCESS_POINT_RE.split(output)[1:]\n bssid_ap_tuples = [bssid_ap_list[x:x+3]\n for x in range(0, len(bssid_ap_list), 3)]\n\n # Parse each AP.\n aps = []\n for bssid, associated, ap_data in bssid_ap_tuples:\n active = bool(associated)\n aps.append(self._ParseScanAccessPoint(bssid, active, ap_data))\n\n # Return AP list.\n return aps",
"def arptable(inputDict):\n if inputDict['interface'] not in getInterfaces():\n return [], \"Interface is not available on the node\", 3\n command = \"ip neigh\"\n cmdOut = externalCommand(command, False)\n out, err = cmdOut.communicate()\n retOut = []\n for line in out.decode(\"utf-8\").split('\\n'):\n splLine = line.split(' ')\n if len(splLine) > 4 and splLine[2] == inputDict['interface']:\n retOut.append(line)\n return retOut, err.decode(\"utf-8\"), cmdOut.returncode",
"def _parse_ip_addr_show(raw_result):\n # does link exist?\n show_re = (\n r'\"(?P<dev>\\S+)\"\\s+does not exist'\n )\n re_result = search(show_re, raw_result)\n result = None\n\n if not (re_result):\n # match top two lines for serveral 'always there' variables\n show_re = (\n r'\\s*(?P<os_index>\\d+):\\s+(?P<dev>\\S+):\\s+<(?P<falgs_str>.*)?>.*?'\n r'mtu\\s+(?P<mtu>\\d+).+?state\\s+(?P<state>\\w+).*'\n r'\\s*link/(?P<link_type>\\w+)\\s+(?P<mac_address>\\S+)'\n )\n\n re_result = search(show_re, raw_result, DOTALL)\n result = re_result.groupdict()\n\n # seek inet if its there\n show_re = (\n r'((inet )\\s*(?P<inet>[^/]+)/(?P<inet_mask>\\d{1,2}))'\n )\n re_result = search(show_re, raw_result)\n if (re_result):\n result.update(re_result.groupdict())\n\n # seek inet6 if its there\n show_re = (\n r'((?<=inet6 )(?P<inet6>[^/]+)/(?P<inet6_mask>\\d{1,2}))'\n )\n re_result = search(show_re, raw_result)\n if (re_result):\n result.update(re_result.groupdict())\n\n # cleanup dictionary before returning\n for key, value in result.items():\n if value is not None:\n if value.isdigit():\n result[key] = int(value)\n\n return result",
"def _get_ipv6_addresses(self, host: str) -> Dict[str, List[IPv6Address]]:\n if host == \"self\":\n command = \"show ipv6 interface\"\n elif host == \"peer\":\n command = \"failover exec mate show ipv6 interface\"\n\n show_ipv6_interface = self.show(command)\n show_ipv6_interface_lines: List[str] = show_ipv6_interface.strip().splitlines()\n first_line = show_ipv6_interface_lines.pop(0)\n interface: str = first_line.split()[0]\n ipv6_addresses: List[IPv6Interface] = []\n results: Dict[str, List] = {}\n for line in show_ipv6_interface_lines:\n # match IPv6 addresses under interface line\n if line[0].isspace():\n match = RE_IPV6_INTERFACE_MATCH.match(line)\n if match:\n ipv6_addresses.append(IPv6Interface(f\"{match.group(1)}{match.group(2)}\"))\n # update results mapping interface to matched IPv6 addresses and generate the next interface name\n else:\n if ipv6_addresses:\n results[interface] = ipv6_addresses\n ipv6_addresses = []\n interface = line.split()[0]\n\n # Add final interface in iteration if it has IPv6 addresses\n if ipv6_addresses:\n results[interface] = ipv6_addresses\n\n log.debug(\"Host %s: ip interfaces %s\", self.host, results)\n return results",
"def parseMountOutput(output):\n\t\n\t# none on /proc/sys/fs/binfmt_misc type binfmt_misc (rw)\n\t\n\tparsedOutput = {}\n\tregex = \"(\\S+)\\s+on\\s+(\\S+)\\s+type\\s+(\\S+)\\s+\\((\\S+)\\)\"\n\tfor l in output:\n\t\tif re.search(regex,l):\n\t\t\tm = re.search(regex,l)\n\t\t\tdev = m.group(1)\n\t\t\tmntpoint = m.group(2)\n\t\t\tfs = m.group(3)\n\t\t\tperm = m.group(4)\n\t\t\tparsedOutput[dev] = {}\n\t\t\tparsedOutput[dev]['mntpoint'] = mntpoint\n\t\t\tparsedOutput[dev]['filesys'] = fs\n\t\t\tparsedOutput[dev]['perm'] = perm\n\t\t\t\n\t\t\t\n\treturn parsedOutput",
"def parse_cli_output(self, output, operation, name, index):\n state = None\n regex = None\n error = self.__cli_connection_error(output)\n if(not error):\n if index is not None:\n regex = re.search( r'RuntimeCmd:\\s*(' + re.escape(name) + r'\\[' + index + r'\\])=\\s*(\\d+)', output, re.M|re.I)\n else:\n regex = re.search( r'RuntimeCmd:\\s*(' + re.escape(name) + r')=\\s*([\\d+,\\s]+)', output, re.M|re.I)\n\n if regex:\n state = regex.group(2)\n else:\n # regex = re.search( r'RuntimeCmd:(\\sError:\\s|\\s)(.*)\\((.*)\\)\\n', output, re.M|re.I)\n regex = re.search( r'RuntimeCmd:(\\sError:\\s|\\s)(.*)\\n', output, re.M|re.I)\n if regex:\n # self.logger.error(\"CLI Error: %s - %s\", regex.group(2), regex.group(3))\n self.logger.error(\"CLI Error [%s %s]: %s\", operation, name, regex.group(2))\n return state",
"def sysinfo_scrape(output):\r\n # Create a dictionary\r\n return_dict = dict()\r\n for idx, line in enumerate(output.strip().split(\"\\n\")):\r\n tokens = re.split(r\"\\s{2,}\", line)\r\n if idx == 0:\r\n return_dict[\"Name\"] = tokens[-1]\r\n else:\r\n try:\r\n name, value = tokens[-1].split(\":\")\r\n return_dict[name.strip()] = value.strip()\r\n except ValueError:\r\n # Handle extra lines in the logo\r\n pass\r\n\r\n return return_dict",
"def parse_input(self, input_string):\n ip_addresses = {}\n\n tree = ET.parse(StringIO(input_string))\n root = tree.getroot()\n\n for e1 in root:\n if e1.tag == \"host\":\n host = e1\n\n ports = None\n address = None\n\n for e2 in host:\n if e2.tag == \"ports\":\n ports = e2\n if e2.tag == \"address\" and e2.attrib['addrtype'] != \"mac\":\n address = e2\n\n if ports is None:\n continue\n\n ip_address = address.attrib['addr']\n if ip_address not in ip_addresses:\n ip_addresses[ip_address] = []\n\n for port in ports:\n\n if port.tag != \"port\":\n continue\n\n port_protocol = port.attrib[\"protocol\"]\n port_number = str(port.attrib['portid'])\n port_state = port.find(\"state\").attrib['state']\n\n # lets only pay attention to open ports\n if port_state in [\"open\"]:\n s = dumps([port_protocol, port_number, port_state])\n\n ip_addresses[ip_address].append(s)\n\n service = port.find(\"service\")\n\n if \"name\" in service.attrib:\n s = dumps([port_protocol, port_number, port_state, service.attrib[\"name\"]])\n ip_addresses[ip_address].append(s)\n\n #s = dumps([port_protocol, port_state, service.attrib[\"name\"]])\n #ip_addresses[ip_address].append(s)\n\n s_list = [port_protocol, port_number, port_state, service.attrib[\"name\"]]\n #s_no_port_list = [port_protocol, port_state, service.attrib[\"name\"]]\n\n for sid in [\"product\", \"version\", \"extrainfo\", \"servicefp\"]:\n if sid in service.attrib:\n s_list.append(service.attrib[sid])\n s = dumps(s_list)\n ip_addresses[ip_address].append(s)\n\n #s_no_port_list.append(service.attrib[sid])\n #s = dumps(s_no_port_list)\n #ip_addresses[ip_address].append(s)\n\n for script_element in port:\n if script_element.tag != \"script\":\n continue\n # todo parse script tag from xml\n script_id = script_element.attrib[\"id\"]\n\n for table in script_element:\n if table.tag == \"table\":\n for elem in table:\n key = \"\"\n if \"key\" in elem.attrib:\n key = elem.attrib[\"key\"]\n\n if elem.text is not None:\n s = dumps([port_protocol, port_number, port_state, service.attrib[\"name\"],\n script_id, key, elem.text])\n else:\n s = dumps(\n [port_protocol, port_number, port_state, service.attrib[\"name\"],\n script_id, key])\n ip_addresses[ip_address].append(s)\n\n if table.tag == \"elem\":\n elem = table\n key = \"\"\n if \"key\" in elem.attrib:\n key = elem.attrib[\"key\"]\n\n if elem.text is not None:\n s = dumps([port_protocol, port_number, port_state, service.attrib[\"name\"],\n script_id, key, elem.text])\n else:\n s = dumps(\n [port_protocol, port_number, port_state, service.attrib[\"name\"],\n script_id, key])\n ip_addresses[ip_address].append(s)\n\n print \"no of IP's taken from NMAP: \" + str(len(ip_addresses.viewkeys()))\n return ip_addresses",
"def parse_device_info(self, info_string):\n device = {}\n block_list = [\"[\\x1b[0;\", \"removed\"]\n if not any(keyword in info_string for keyword in block_list):\n try:\n device_position = info_string.index(\"Device\")\n except ValueError:\n pass\n else:\n if device_position > -1:\n attribute_list = info_string[device_position:].split(\" \", 2)\n device = {\n \"mac_address\": attribute_list[1],\n \"name\": attribute_list[2],\n }\n return device",
"def _parse_head(line):\n retval = {}\n m = re.match(\n '[0-9]+: (?P<if>\\w+\\d{1,3}): <(?P<flags>[^>]+)> mtu (?P<mtu>[0-9]+)',\n line\n )\n if m:\n retval['ifname'] = m.group('if')\n retval['mtu'] = int(m.group('mtu'))\n retval['flags'] = m.group('flags').split(',')\n return retval",
"def get_interfaces_ip(self):\n\n interfaces_ip = dict()\n command = '/ip address print terse'\n\n ip_address_output_v4 = self._send_command(command)\n\n ip_addresses = parse_terse_output(ip_address_output_v4)\n\n for ip_address in ip_addresses:\n interface = ip_address.get('interface')\n address, mask = ip_address.get('address').split('/')\n\n interfaces_ip.setdefault(interface, {}) \\\n .setdefault('ipv4', {}) \\\n .setdefault(cast_ip(address), {}) \\\n .setdefault('prefix_length', int(mask))\n\n return interfaces_ip",
"def summary(self):\n mac_dict = {}\n for eth in self.get_members():\n if eth.mac_address is not None and eth.status is not None:\n if eth.status.health == res_cons.HEALTH_OK:\n mac_dict[eth.mac_address] = eth.status.state\n return mac_dict",
"def pull_info(task):\n\n interface_result = task.run(task=send_command, command=\"show interfaces\")\n task.host[\"facts\"] = interface_result.scrapli_response.genie_parse_output()\n interfaces = task.host[\"facts\"]\n for interface in interfaces:\n try:\n mac_addr = interfaces[interface][\"mac_address\"]\n if target == mac_addr:\n target_list.append(mac_addr)\n intf = interface\n print_info(task, intf)\n except KeyError:\n pass",
"def _get_ipv4_addresses(self, host: str) -> Dict[str, List[IPv4Address]]:\n if host == \"self\":\n command = \"show ip address\"\n elif host == \"peer\":\n command = \"failover exec mate show ip address\"\n\n show_ip_address = self.show(command)\n re_ip_addresses = RE_SHOW_IP_ADDRESS.findall(show_ip_address)\n\n results = {\n interface: [IPv4Interface(f\"{address}/{netmask}\")] for interface, address, netmask in re_ip_addresses\n }\n log.debug(\"Host %s: ip interfaces %s\", self.host)\n return results",
"def get_commands_dict() -> dict:\n commands_dict = {}\n f = open(f\"data/metadata/commands.dict.txt\", \"r\", encoding=\"utf-8\").read()\n for command in f.split(\"\\n\"):\n commands_dict[command.split(\":\")[0]] = command.split(\":\")[1]\n return commands_dict",
"def parse_ping(stdout):\n parsed_lines = []\n for line in stdout:\n # 64 bytes from 100.0.0.1: icmp_seq=1 ttl=63 time=1.32 ms\n parsed = {}\n match = re.search(r\"icmp_seq=(\\d+)\", line)\n if match:\n parsed['icmp_seq'] = match.group(1)\n else:\n continue\n\n match = re.search(r\"(\\d+) bytes\", line)\n if match:\n parsed['bytes'] = match.group(1)\n\n match = re.search(r\"ttl=(\\d+)\", line)\n if match:\n parsed['ttl'] = match.group(1)\n\n match = re.search(r\"time=([\\.\\d]+)\", line)\n if match:\n parsed['time'] = match.group(1)\n\n match = re.search(r\"Time[\\w\\s]+exceeded\", line)\n if match:\n parsed['ttl_exceed'] = True\n else:\n parsed['ttl_exceed'] = False\n\n if parsed != {}:\n parsed_lines.append(parsed)\n\n return parsed_lines"
] | [
"0.6518071",
"0.6207442",
"0.6205289",
"0.6143003",
"0.6007293",
"0.59642804",
"0.59295213",
"0.5820728",
"0.5768313",
"0.5717903",
"0.56790775",
"0.5641952",
"0.56189096",
"0.56146896",
"0.55993295",
"0.5571131",
"0.556581",
"0.5563868",
"0.55427015",
"0.55418855",
"0.55296886",
"0.55291694",
"0.549782",
"0.548888",
"0.5479451",
"0.54660934",
"0.5462794",
"0.54551333",
"0.54373723",
"0.543401"
] | 0.69882375 | 0 |
Use BFS to find the shortest path use level ={} to keep track of distance of each node use parent = {} to back track and trace it out | def find_shortest_path(self, start, end):
if start==None:
return
visited = {}
distance = {start:0}
parent = {start:None}
queue = deque()
queue.append(start)
while queue:
cn = queue.popleft()
for n in self.adjacencylist[cn]:
if n not in visited:
queue.append(n)
parent[n] = cn
if n not in distance:
distance[n] = 1
else:
distance[n] += 1
visited[cn] = True
if all(visited.values()) == True:
print('BFS done')
print("Finding shortest path")
path = []
cn = end
path.append(cn)
while cn != start:
cn = parent[cn]
path.append(cn)
print (path[::-1]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def bfs(self, queue, target, targetx,\n targety): # finds BFS path to the finish. if there is no path, will return nothing\n\n '''\n 1. So we have a parent matrix\n 2. This records the parent\n 3. We have a dictionary of cell: parents'''\n if self.map1[queue[0][0]][queue[0][1]] == target:\n return [1]\n\n thisset = {queue[0]}\n traceSet = {queue[0]: None}\n\n flag = False # variable to see if it is possible to reach the goal\n while queue:\n fringe = queue.pop(0) # gets 0, 0 first\n adjs = self.getAdj(fringe[0], fringe[1])\n\n if self.map1[fringe[0]][fringe[1]] == 2:\n print(\"Our attempt has started\")\n\n if self.map1[fringe[0]][fringe[1]] == target:\n print(\"Goal reached\")\n print(\"This is how you go about it\")\n # print(traceSet)\n ans = self.trace(traceSet, targetx, targety)\n path = self.savePath(ans)\n flag = True\n # print(ans.pop())\n break\n\n if self.map1[fringe[0]][fringe[1]] == 0 or self.map1[fringe[0]][fringe[1]] == 3 or self.map1[fringe[0]][fringe[1]] == 4:\n continue\n\n for i in range(len(adjs)):\n if self.legal(adjs[i][0], adjs[i][1]):\n if adjs[i] in thisset:\n continue\n\n thisset.add(adjs[i])\n traceSet[adjs[i]] = fringe\n queue.append(adjs[i])\n if flag is False:\n print(\"No way to goal\")\n return []\n return path",
"def bfs(self, vertex_s):\r\n nd_list = list(self.vertices())\r\n visited = dict((node, 0) for node in nd_list)\r\n\r\n nq = deque()\r\n pre_dict, dist = {}, {}\r\n nq.append(vertex_s)\r\n visited[vertex_s]=1\r\n dist[vertex_s] = 0\r\n\r\n loop_counts = 0\r\n while nq:\r\n s = nq.popleft()\r\n for node in self.__graph_dict[s]: # for each child/neighbour of current node 's'\r\n loop_counts += 1\r\n \r\n #if not node in visited:\r\n if not visited[node]:\r\n nq.append(node) # let 'node' in queue\r\n pre_dict[node] = [s] # the 'parent' (in terms of shortest path from 'root') of 'node' is 's'\r\n dist[node] = dist[s] + 1 # shortest path to 'root'\r\n visited[node]=1 # 'node' is visted\r\n #if node in visited and dist[node] == dist[s] + 1: # still within the shortest path\r\n if visited[node] and dist[node] == dist[s] + 1: # still within the shortest path\r\n if s not in pre_dict[node]: # if this path have NOT been recorded, let's do that now\r\n pre_dict[node].append(s) \r\n \r\n if visited[node] and dist[node] > dist[s] + 1: # the previous 'recorded' path is longer than our current path (via node 's'); let's update that path and distance\r\n pre_dict[node] = [s]\r\n dist[node] = dist[s] + 1\r\n #print(\" #loops: %d\" %loop_counts)\r\n #current_bfs[vertex_s] = pre_dict\r\n \r\n return pre_dict",
"def shortest_path_tree__bfs(self, start):\r\n from queue import deque\r\n\r\n assert start in self.graph\r\n\r\n distance = {vertex: None for vertex in self.vertices()}\r\n distance[start] = 0\r\n\r\n previous = {vertex: None for vertex in self.vertices()}\r\n\r\n queue = deque()\r\n queue.append(start)\r\n\r\n while queue:\r\n current_vertex = queue.pop()\r\n for neighbour in self.neighbours(current_vertex):\r\n if distance[neighbour] is None:\r\n queue.append(neighbour)\r\n distance[neighbour] = distance[current_vertex] + 1\r\n previous[neighbour] = current_vertex\r\n\r\n return previous",
"def bft(self, starting_vertex):\n # Create a q and enqueue starting vertex\n qq = Queue()\n qq.enqueue([starting_vertex])\n # Create a set of traversed vertices\n visited = set()\n # eldest = []\n depth_counter = {} \n starter = 0 \n # visited = []\n # While queue is not empty:\n while qq.size() > 0:\n # dequeue/pop the first vertex\n path = qq.dequeue()\n # if not visited\n # print(visited)\n starter += 1\n if path[-1] not in visited:\n # DO THE THING!!!!!!!\n # print(path[-1])\n depth_counter[starter] = path[-1]\n # mark as visited\n visited.add(path[-1])\n # visited.append(path[-1])\n # enqueue all neightbors\n \n if not self.get_neighbors(path[-1]):\n \n if starting_vertex == path[-1]:\n return -1\n else:\n # print(\"eldest ancestor:\",path[-1])\n depth_counter[starter] = path[-1]\n # print(depth_counter)\n # eldest.append(path[-1])\n else:\n # starter += 1\n for next_vert in self.get_neighbors(path[-1]): \n new_path = list(path)\n new_path.append(next_vert)\n qq.enqueue(new_path)\n\n\n return depth_counter[starter]",
"def bfs_level(level):\n queue.append(seed_url)\n visited.add(seed_url)\n bfs(level)",
"def bfs(self, start, end):\n\n queue = [start]\n parent = dict()\n\n # Initialize parent dictionary\n for v in iter(self._reachable): parent[v] = None\n parent[start] = start\n\n while len(queue) > 0:\n (x, y) = queue.pop(0)\n if (x, y) == end: break\n\n for v in self.get_reachables(x, y):\n if parent[v] is not None: \n # Vertex v already visited\n continue\n parent[v] = (x, y)\n queue.append(v)\n\n # Reconstruct path\n path = [end]\n vertex = end\n\n while parent[vertex] != vertex:\n if parent[vertex] is None: return []\n path.append(parent[vertex])\n vertex = parent[vertex]\n\n path.reverse()\n return path",
"def breadthFirstSearch(problem):\n \"*** YOUR CODE HERE ***\"\n from util import Queue\n q = Queue()\n mapper = {} #child_point : (parent_point, direction_to_child)\n q.push(problem.getStartState())\n mapper[problem.getStartState()] = None #root\n\n while (not q.isEmpty()):\n point = q.pop()\n\n if (problem.isGoalState(point)):\n c = point\n l = []\n while mapper[c] != None:\n tup = mapper[c]\n l.append(tup[1])\n c = tup[0]\n l.reverse()\n print l\n return l\n\n else:\n for child in problem.getSuccessors(point):\n if (child[0] not in mapper):\n q.push(child[0])\n mapper[child[0]] = (point, child[1])\n\n # util.raiseNotDefined()",
"def bfs(self, starting_vertex, destination_vertex): # great if you know to result is somewhere close to the root/start\n visited = set() # create an empty 'set' to store visisted vertex, set sorts \n\n q = Queue() # create an empty Queue\n q.enqueue([starting_vertex]) # set the starting_vertex with enqueue \n\n while q.size() > 0:\n path = q.dequeue() # dequeue and store first path\n v = path[-1] # store the vertex from the end of path \n\n if v == destination_vertex: # if v is equal to the destination_vertex\n return path # return the path \n\n if v not in visited: # if v has not been visited yet \n visited.add(v) # add v to the vistied set \n\n for neighbor in self.vertices[v]: # loop through the neighbors \n path_copy = list(path) # make a copy of the path \n path_copy.append(neighbor) # append each neighbor to the back of the path copy \n q.enqueue(path_copy) # enqueue the path copy to the queue ",
"def BFS(graph,root):\r\n \r\n nodes, edges = graph\r\n \r\n assert root in nodes\r\n adjList = adjacencyList(edges)\r\n \r\n distance = {root:0}\r\n parent = {root:None}\r\n queue = [root]\r\n \r\n while queue:\r\n current = queue.pop()\r\n \r\n for node in adjList.get(current,array('L')):\r\n if node not in distance:\r\n queue = [node] + queue\r\n distance[node] = distance[current] + 1\r\n parent[node] = current\r\n \r\n for node in nodes:\r\n if node not in distance:\r\n distance[node] = 'Inf'\r\n \r\n return distance, parent",
"def bfs(vertex, graph, distances, shortest_ways, queue=deque()):\n if vertex not in distances:\n distances[vertex] = 0\n shortest_ways[vertex] = vertex\n for neighbour in graph[vertex]:\n if neighbour not in distances:\n queue.append(neighbour)\n distances[neighbour] = distances[vertex] + 1\n shortest_ways[neighbour] = shortest_ways[vertex] + ' ' + vertex + neighbour\n while len(queue) > 0:\n vertex = queue.popleft()\n bfs(vertex, graph, distances, shortest_ways, queue)",
"def bfs(self, starting_vertex, destination_vertex):\n \"\"\" FIFO ir LILO\n Create a queue\n Enqueue PATH to starting Vertex\n Create a set top store visited vertices\n While the queue is NOT empty: e.g. > 0\n Dequeue the first PATH Vertex\n Get Vertex from END of PATH\n Check IF NOT visited:\n Mark as visited\n check if vertex is destination_vertex\n If TRUE, return path\n enqueue PATH to ALL of neighbors \n make COPY of current path\n add neighbor to path copy\n enqueue copy \n \"\"\"\n\n q = Queue() # Create a queue\n q.enqueue([starting_vertex]) # Enqueue starting at vertex into Queue (list)\n visited = set() # Create a set to store visited \n \n while q.size() > 0: # While the queue is NOT empty: \n path = q.dequeue() # Dequeue the first PATH Vertices\n v = path[-1] # Get Vertex from END of PATH\n\n if v not in visited: # Check IF NOT visited:\n visited.add(v) # Mark as visited\n\n if v == destination_vertex: # check if vertex is destination_vertex\n return path # If TRUE, return path, DONE\n\n for n in self.get_neighbors(v): # enqueue PATH to ALL of neighbors\n path_c = path [:] # make COPY of current path\n path_c.append(n) # add neighbor to path copy\n q.enqueue(path_c) # enqueue copy",
"def breadthFirstSearch(problem):\n \"*** YOUR CODE HERE ***\"\n visited=[]\n \n node=dict()\n start=problem.getStartState()\n node['parent']=None\n node['direction']=None\n node['state']=start\n \n \n qu.push(node)\n lis.append(node)\n \n print qu.list\n while qu.isEmpty()!=True:\n node=qu.pop()\n pos=node['state']\n visited.append(pos)\n print visited\n if problem.isGoalState(pos):\n print \"found\"\n return getPath(problem,node)\n #break\n suc=problem.getSuccessors(pos)\n if suc ==None:\n continue \n \n print suc\n for step in suc:\n #if step not in dic :\n if step[0] not in visited:\n childnode={}\n childnode['parent']=pos\n childnode['direction']=step[1]\n childnode['state']=step[0]\n qu.push(childnode)\n lis.append(childnode)\n \n\n #util.raiseNotDefined()",
"def BFS(self,s,t,parent):\n #mark all vertices as not visited\n visited = [False]*(self.ROWS);\n # initialize a queue\n queue = []\n # add source to q and mark it visited\n queue.append(s)\n visited[s] = True\n #Breadth-first-search\n while queue:\n n = queue.pop(0)\n for index,val in enumerate(self.graph[n]):\n if visited[index] == False and val>0:\n queue.append(index)\n visited[index] = True\n parent[index] = n\n #return True if sink was visted\n if visited[t]:\n return True\n else:\n return False",
"def bfs_path(graph, s, goals=[]):\n visited = []\n parents = [None]*len(graph)\n boundary = [s]\n while len(boundary) > 0:\n v = boundary.pop(0)\n visited += [v]\n for w in neighbours(v, graph):\n if w not in visited and w not in boundary:\n boundary.append(w)\n parents[w] = v\n #w's parent\n # put w's parent in the partent list\n return get_path(parents, goals[0], goals[1])",
"def bfs(start_node, goal_node, max_depth) -> \"solution path\":\n\td = deque([start_node,[]])\n\texplored = {}\n\tlevel = 0\n\n\t# Return empty path if start is equal to goal\n\tif start_node == goal_node:\n\t\treturn []\n\n\t# Keep exploring while the deque has nodes\n\twhile len(d) > 0:\n\t\tpath = d.popleft()\n\n\t\tif level == 0:\n\t\t\tnode = path\n\t\telse:\n\t\t\t# To keep track of levels an empty node gets popped between levels which will cause an exception\n\t\t\ttry:\n\t\t\t\tnode = path[-1]\n\t\t\texcept Exception:\n\t\t\t\tnode = []\n\t\t\t\tpass\n\n\t\tif len(node) == 0:\n\t\t\tlevel += 1\n\t\t\t# Return empty list if max depth was reached\n\t\t\tif max_depth == level:\n\t\t\t\treturn []\n\t\t\td.append(node)\n\n\t\telse:\n\t\t\tval = getNodeVal(node)\n\t\t\tif val not in explored:\n\n\t\t\t\t# Mark node as explored\n\t\t\t\texplored[val] = True\n\n\t\t\t\tfor row in range(len(node)):\n\t\t\t\t\tfor col in range(len(node)):\n\t\t\t\t\t\tchild = toggle(node, row, col)\n\t\t\t\t\t\tnew_path = list(path)\n\t\t\t\t\t\tif level == 0:\n\t\t\t\t\t\t\tnew_path = [new_path]\n\t\t\t\t\t\tnew_path.append(child)\n\t\t\t\t\t\td.append(new_path)\n\t\t\t\t\t\tif child == goal_node:\n\t\t\t\t\t\t\tlevel+=1\n\t\t\t\t\t\t\treturn new_path\n\t# No solution found\n\treturn []",
"def depthFirstSearch(problem):\r\n \"*** YOUR CODE HERE ***\"\r\n node = problem.getStartState()\r\n if (problem.isGoalState(node)):\r\n return [] # no need to make any moves of the start state is goal\r\n start = (node, 'NoDirection',0)\r\n\r\n frontier_queue = Stack() # queue for frontier\r\n frontier_queue.push(start) # frontier consists of only the start state\r\n\r\n explored_nodes = set()\r\n explored_track = {start:None} # keep a track of parent, parent of root node is None\r\n\r\n while not frontier_queue.isEmpty():\r\n state = frontier_queue.pop() # pop the top element from the queue \r\n explored_nodes.add(state)\r\n\r\n if problem.isGoalState(state[0]):\r\n return get_track(explored_track, state)\r\n\r\n neighbors_state = problem.getSuccessors(state[0])\r\n for neighbor in neighbors_state: # neighbor will be something like this ((34, 15), 'South', 1)\r\n if neighbor not in frontier_queue.list and neighbor not in explored_nodes:\r\n frontier_queue.push(neighbor)\r\n explored_track[neighbor] = state\r\n\r\n\r\n def get_track(explored_track, state):\r\n from game import Directions\r\n track_history = [state]\r\n track_history_direction = []\r\n leaf = state\r\n while (explored_track[leaf]) != start:\r\n track_history.append(explored_track[leaf])\r\n leaf = explored_track[leaf]\r\n\r\n for j in range (len(track_history),-1,-1):\r\n this_step = track_history[j-1]\r\n this_step = this_step[1]\r\n track_history_direction.append(this_step)\r\n return track_history_direction[:-1]",
"def shortestPathBFS(start):\n if start is None:\n return None\n\n # keep track of nodes to be checked\n queue = [start]\n start.curr_dist = 0\n\n while queue:\n curr = queue.pop()\n for neighbor in curr.neighbors:\n next_distance = curr.curr_dist + curr.getDistance(neighbor)\n if neighbor.curr_dist == math.inf or neighbor.curr_dist > next_distance:\n neighbor.curr_dist = next_distance\n neighbor.previous = curr\n queue.insert(0, neighbor)",
"def bfs(self, starting_vertex, destination_vertex):\n # create an empty queue and enqueue A-PATH-TO the starting vertex ID\n # create a Set to store the visited vertices\n # while the queue is not empty ..\n # dequeue the first PATH\n # grab the last vertex from the PATH\n # if that vertex has not been visited ..\n # check if its the target\n #if yes, return path\n #mark it as visited\n # add A PATH TO its neighbots to the back of the queue\n # copt the path\n # append the neighbor to the back\n \n \n # create an empty Queue \n queue = Queue()\n #push the starting vertex ID as list\n queue.enqueue([starting_vertex])\n # create an empty Set to store the visited vertices\n visited = set()\n # while the queue is not empty ...\n while queue.size() > 0:\n # dequeue the first vertex\n path = queue.dequeue()\n vert = path[-1]\n # if that vertex has not been visited ..\n if vert not in visited:\n #check for target\n if vert == destination_vertex:\n return path\n # mark it is visited\n visited.add(vert)\n # then add all of its neighbors to the top of the stack\n for neighbor in self.vertices[vert]: #self.get_neighbors(vert)\n #copy path to avoid pass by reference\n new_path = list(path) # make a copy\n new_path.append(neighbor)\n queue.enqueue(new_path)",
"def breadthFirstSearch(problem):\r\n \"*** YOUR CODE HERE ***\"\r\n node = problem.getStartState()\r\n if (problem.isGoalState(node)):\r\n return [] # no need to make any moves of the start state is goal\r\n start = (node, 'NoDirection',0)\r\n\r\n frontier_queue = Queue() # queue for frontier\r\n frontier_queue.push(start) # frontier consists of only the start state\r\n\r\n explored_nodes = set()\r\n explored_track = {start:None} # keep a track of parent, parent of root node is None\r\n\r\n while not frontier_queue.isEmpty():\r\n state = frontier_queue.pop() # pop the top element from the queue \r\n explored_nodes.add(state)\r\n\r\n if problem.isGoalState(state[0]):\r\n return get_track(explored_track, state)\r\n\r\n neighbors_state = problem.getSuccessors(state[0])\r\n for neighbor in neighbors_state: # neighbor will be something like this ((34, 15), 'South', 1)\r\n if neighbor not in frontier_queue.list and neighbor not in explored_nodes:\r\n frontier_queue.push(neighbor)\r\n explored_track[neighbor] = state\r\n\r\n\r\n def get_track(explored_track, state):\r\n from game import Directions\r\n track_history = [state]\r\n track_history_direction = []\r\n leaf = state\r\n while (explored_track[leaf]) != start:\r\n track_history.append(explored_track[leaf])\r\n leaf = explored_track[leaf]\r\n\r\n for j in range (len(track_history),-1,-1):\r\n this_step = track_history[j-1]\r\n this_step = this_step[1]\r\n track_history_direction.append(this_step)\r\n return track_history_direction[:-1]",
"def depthFirstSearch(problem):\n \"*** YOUR CODE HERE ***\"\n from util import Stack\n \n st = Stack()\n mapper = {}\n mapper[problem.getStartState()] = None\n\n st.push(problem.getStartState())\n while not(st.isEmpty()):\n vertex = st.pop()\n \n if (problem.isGoalState(vertex)):\n c = vertex\n l = []\n while mapper[c] != None:\n tup = mapper[c]\n l.append(tup[1])\n c = tup[0]\n l.reverse()\n print l\n return l\n\n else:\n neigh = problem.getSuccessors(vertex)\n # neigh.reverse()\n # neigh.sort()\n for child in neigh:\n if child[0] not in mapper:\n st.push(child[0])\n mapper[child[0]] = (vertex, child[1])\n # print mapper\n \n # visited = []\n # p = dfsRecursive(problem, problem.getStartState(), st, visited, [])\n # return p\n \n # pathfind = {}\n # st.push(problem.getStartState())\n # iterative approach:\n # while (not st.isEmpty()):\n # point = st.pop() # (x,y)\n # if problem.isGoalState(point):\n # # print point\n # print pathfind\n # # print visited\n # elif (not (point in visited)):\n # visited.append(point)\n # # print pathfind, '\\n'\n # print visited, '\\n'\n # for child in problem.getSuccessors(point):\n # st.push(child[0])\n # pathfind[child[0]] = point #this preemptively adds!\n # util.raiseNotDefined()",
"def bfs_shortest_path(graph, start, end):\n assert not graph.weighted, 'This method will not work for weighted graphs.'\n\n parents = {}\n distances = {start: 0}\n\n queue = deque([start])\n while queue:\n node = queue.popleft()\n for next_node in (graph.adj[node] - distances.keys()):\n parents[next_node] = node\n distances[next_node] = distances[node] + 1\n if next_node == end:\n return backtrace_path(start, end, parents)\n queue.append(next_node)\n\n return None",
"def dfs(G,u,parent,ap,depth,low,bridges):\r\n\tchildren = 0\r\n\tfor v in G[u]:\r\n\t\tif depth[v] ==-1:\r\n\t\t\tdepth[v] = low[v] = depth[u]+1\r\n\t\t\tparent[v] = u\r\n\t\t\tchildren+=1\r\n\t\t\tdfs(G,v,parent,ap,depth,low,bridges)\r\n\t\t\tlow[u] = min(low[u],low[v])\r\n\t\t\tif parent[u] == -1 and children > 1:\r\n\t\t\t\tap[u] = 1\r\n\t\t\tif parent[u] != -1 and low[v] >= depth[u]:\r\n\t\t\t\tap[u] = 1\r\n\t\t\tif low[v] > depth[u]:\r\n\t\t\t\tbridges.append((u,v))\r\n\t\telif depth[v] < depth[u] and parent[u]!=v:\r\n\t\t\tlow[u] = min(low[u],depth[v])\r\n\treturn",
"def breadthFirstSearch(problem):\n \"*** YOUR CODE HERE ***\"\n class Node:\n def __init__(self, state, parent, action, pathCost):\n self.state = state\n self.parent = parent\n self.action = action\n self.pathCost = pathCost\n\n def solution(self):\n path = list()\n tempNode = self\n while tempNode.state != problem.getStartState():\n path.insert(0, tempNode.action)\n tempNode = tempNode.parent\n return path\n\n\n\n\n def childNode(successor, parent, action, stepCost):\n pathCost = parent.pathCost + stepCost\n child = Node(successor, parent, action, pathCost)\n return child\n\n initialNode = Node(problem.getStartState(), None, None, 0)\n if problem.isGoalState(initialNode.state):\n return initialNode.solution()\n\n frontier = util.Queue() #bfs uses a queue\n frontier.push(initialNode)\n explored = set()\n\n while not frontier.isEmpty() :\n nextNode = frontier.pop() #extract from the start of the queue\n explored.add(nextNode.state)\n for successor, action, stepCost in problem.getSuccessors(nextNode.state):\n child = childNode(successor, nextNode, action, stepCost)\n if child.state not in explored and child not in frontier.list:\n if problem.isGoalState(child.state):\n return child.solution()\n frontier.push(child)\n return []\n util.raiseNotDefined()",
"def bfs_shortest_path(graph: dict=g2, start: str = \"1\", goal: str = \"4\") -> list:\n visited = []\n queue = [[start]]\n\n while queue:\n path = queue.pop(0)\n node = path[-1]\n if node not in visited:\n neighbours = graph[node]\n for neighbour in neighbours:\n new_path = path[:]\n new_path.append(neighbour)\n queue.append(new_path)\n if neighbour == goal:\n return new_path\n visited.append(node)\n # No path\n return [\"No Path\"]",
"def bfs(self, starting_vertex, destination_vertex):\n \n def populate_parents():\n parents = {\n # '1': [],\n # '2': [],\n # '3': [],\n }\n\n for index, (k, v) in enumerate(self.vertices.items()):\n parents[k] = []\n\n queue = Q()\n visited = []\n\n queue.add(starting_vertex)\n visited.append(starting_vertex)\n \n while len(queue):\n node = queue.pop()\n\n for child in self.vertices[node]:\n if child not in visited:\n queue.add(child)\n visited.append(child)\n parents[child].append(node)\n\n return parents\n\n parents = populate_parents()\n path = []\n current = destination_vertex\n path.append(destination_vertex)\n\n while len(parents[current]):\n parent = parents[current][0]\n path.append(parent)\n current = parent\n\n path.reverse()\n\n return path",
"def bfs_iterative(graph,start):\n\tvisited = set()\n\twatched = set()\n\tnodes_queue = [start] # List that helps as queue\n\twatched.add(start)\n\t\n\twhile nodes_queue:\n\t\tcurrent_node = nodes_queue.pop(0)\n\n\t\tprint(\"visiting\",current_node)\n\t\tvisited.add(current_node)\n\t\t\n\t\tfor adjacent_node in graph[current_node]:\n\t\t\tif (adjacent_node not in watched) and (adjacent_node not in visited):\n\t\t\t\tnodes_queue.append(adjacent_node)\n\t\t\t\t#path.add(adjacent_node)",
"def dfs(self, starting_vertex, destination_vertex):\n \"\"\" LIFO\n Create a stack\n Create a set to store visited\n PUSH starting vertex into an array (STACK)\n While the STACK is NOT empty \n get((pop) first PATH vertex\n get Vertex from END of PATH\n check if NOT visited\n mark as visited\n check if vertex is destination_vertex\n If TRUE, return path \n PUSH path to ALL of neighbors\n make copy of current path\n add neighbor to path copy\n PUSH path copy\n \"\"\" \n s = Stack() # Create a stack\n s.push([starting_vertex]) # PUSH starting vertex into an array (STACK)\n visited = set() # Create a set to store visited\n\n while s.size() > 0: # While the STACK is NOT empty\n path = s.pop() # get(pop) first PATH vertex)\n v = path[-1] # get Vertex from END of PATH \n\n while v not in visited: # check if NOT visited\n visited.add(v) # mark as visited\n\n if v == destination_vertex: # check if vertex is destination_vertex\n return path # If TRUE, return path \n\n for n in self.get_neighbors(v): # PUSH path to ALL of neighbors\n path_c = path[:] # make copy of current path\n # path_c.extend([n]) # add neighbor to path copy\n path_c.append(n) # add neighbor to path copy\n s.push(path_c) # PUSH path copy",
"def bfs(graph, root, max_depth):\n ###TODO\n pass",
"def dfs(self, starting_vertex, destination_vertex): # great for if you know the start and end, like a maze with 1 entry/1 exit\n visited = set() # create an empty 'set' to store visisted vertex, set sorts \n\n s = Stack() # create an empty Stack\n s.push([starting_vertex]) # push the starting vertex to the top of the stack \n\n while s.size() > 0: # loop if the size is greater than 0\n path = s.pop() # pop off the top element of the stack and store \n v = path[-1] # store the vertex from the end of path\n\n if v == destination_vertex: # if v is equal to the destination_vertex\n return path # return the path \n if v not in visited: # if v has not been visited yet \n visited.add(v) # add v to the vistied set \n\n for neighbor in self.vertices[v]: # loop through the neighbors\n path_copy = list(path) # make a copy of the path \n path_copy.append(neighbor) # append each neighbor to the back of the path copy \n s.push(path_copy) # push the path copy to the Stack",
"def shortest_path(start, end):\n\n\tmoves = rubik.quarter_twists\n\n\t# Parent nodes: (Parent_State, move)\n\tstartParents = {}\n\tstartParents[start] = None # Start state has no parent\n\n\t# Parent nodes: (Parent_State, move)\n\tendParents = {}\n\tendParents[end] = None # End state has no parent\n\n\tstartFrontier = [] # Current frontier in start BFS\n\tendFrontier = [] # Current frontier in end BFS\n\n\tstartFrontier.append(start) # Add start state as first and only node to generate next frontier\n\tendFrontier.append(end) # Add end state as first and only node to generate next frontier\n\n\tif end in startParents:\n\t\treturn [] # Start == End : No moves required\n\n\t# We only have to search at most 14 levels in BFS\n\t# Two-way BFS therefore requires 7 concurrent levels from both states\n\tfor i in range(7):\n\n\t\tstartNextFrontier = [] # New empty set for new frontier to be discovered\n\t\tfor state in startFrontier: # Iterate through each rubiks state in this frontier\n\t\t\tfor move in moves: # Apply each move to this state\n\t\t\t\tnextState = rubik.perm_apply(move, state)\n\n\t\t\t\t# Makes sure this new state is not already in the Graph\n\t\t\t\t# This skips nodes that were already permuted in another path,\n\t\t\t\t# essentially trimming the Graph's leaves\n\t\t\t\tif nextState not in startParents:\n\t\t\t\t\tstartParents[nextState] = (state, move) # Store this state's parent + move\n\t\t\t\t\tstartNextFrontier.append(nextState) # Create a node in the next frontier\n\t\t\t\t\n\t\t\t\t# Intersect of both Graphs, Intermediate state of path found\n\t\t\t\tif nextState in endParents:\n\t\t\t\t\treturn solution(startParents, endParents, nextState)\n\n\t\tstartFrontier = startNextFrontier # Make the next frontier the current one\n\n\t\tendNextFrontier = [] # New empty set for new frontier to be discovered\n\t\tfor state in endFrontier: # Iterate through each rubiks state in this frontier\n\t\t\tfor move in moves: # Apply each move to this state\n\t\t\t\tnextState = rubik.perm_apply(move, state)\n\n\t\t\t\t# Makes sure this new state is not already in the Graph\n\t\t\t\t# This skips nodes that were already permuted in another path,\n\t\t\t\t# essentially trimming the Graph's leaves\n\t\t\t\tif nextState not in endParents:\n\t\t\t\t\tendParents[nextState] = (state, move) # Store this state's parent + move\n\t\t\t\t\tendNextFrontier.append(nextState) # Create a node in the next frontier\n\t\t\t\t\n\t\t\t\t# Intersect of both Graphs, Intermediate state of path found\n\t\t\t\tif nextState in startParents:\n\t\t\t\t\treturn solution(startParents, endParents, nextState)\n\n\t\tendFrontier = endNextFrontier # Make the next frontier the current one\n\n\treturn None"
] | [
"0.69503415",
"0.6943903",
"0.6853803",
"0.68397886",
"0.6801403",
"0.675973",
"0.6747544",
"0.6738333",
"0.6723769",
"0.6721417",
"0.6716232",
"0.67153084",
"0.67074156",
"0.6693994",
"0.66901886",
"0.6654073",
"0.66488194",
"0.66420025",
"0.66287",
"0.6627181",
"0.6569387",
"0.65582085",
"0.65582085",
"0.65557945",
"0.6531472",
"0.6512358",
"0.64923424",
"0.6488557",
"0.6482157",
"0.6453584"
] | 0.6980737 | 0 |
Read the next expression from src, a Buffer of tokens. >>> lines = ['(+ 1', '(+ 23 4)) ('] >>> src = Buffer(tokenize_lines(lines)) >>> print(scheme_read(src)) (+ 1 (+ 23 4)) | def scheme_read(src):
if src.current() is None:
raise EOFError
if val == 'nil':
return nil
elif val not in DELIMITERS: # ( ) ' .
return val
elif val == '(':
return read_tail(src)
else:
raise SyntaxError('unexpected token: {0}'.format(val)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def parse(source_code):\n tokens = tokenize(source_code)\n return read(tokens)",
"def read_from_tokens(tokens):\n if len(tokens) == 0:\n raise SyntaxError(\"unexpected EOF while reading\")\n token = tokens.pop(0)\n if \"(\" == token:\n res = []\n while tokens[0] != \")\":\n res.append(read_from_tokens(tokens))\n tokens.pop(0) # pop off ')'\n return res\n elif \")\" == token:\n raise SyntaxError(\"unexpected )\")\n else:\n return atom(token)",
"def read(tokens):\n if len(tokens) == 0:\n raise UnexpectedEndOfInput()\n token = tokens.pop(0)\n\n if token == '(':\n parsed = []\n if len(tokens) == 0:\n raise UnexpectedEndOfInput()\n while tokens[0] != ')':\n parsed.append(read(tokens))\n if len(tokens) == 0:\n raise UnexpectedEndOfInput()\n tokens.pop(0) # pop off ')'\n return parsed\n elif token == ')':\n raise UnexpectedRightParen()\n else:\n return atom(token)",
"def read_tail(src):\n if src.current() is None:\n raise SyntaxError('unexpected end of file')\n if src.current() == ')':\n src.pop()\n return nil\n first = scheme_read(src)\n rest = read_tail(src)\n return Pair(first, rest)",
"def SimpleEval(source):\n itertokens = generate_tokens(StringIO(source).readline)\n next = (token[1] for token in itertokens if token[0] is not NL).next\n res = atom(next, next())\n if next():\n raise SyntaxError('bogus data after expression')\n return res",
"def get_stream(ast):\n while True:\n yield evaluate_program(ast)",
"def parse(program):\n return read_from_tokens(tokenize(program))",
"def readin(pythonfilename):\n with open(pythonfilename) as f:\n code = f.read()\n FuncLister().visit(ast.parse(code))",
"def parse(path):\n data = gzip.open(path, 'rb')\n for byte_line in data:\n yield eval(byte_line) # return generator instance to save memory",
"def parse_file(self, path):\r\n return self._parse(antlr3.ANTLRFileStream(path))",
"def read_token(stream):\n strip_whitespace(stream)\n\n if stream.eof():\n raise VeryUnexpectedEndException(stream, \"Encountered EOF while scanning for token\")\n\n pos = stream.pos()\n while not stream.eof() and stream.peek() in VALID_TOKEN_CHARS:\n stream.consume()\n\n return stream.slice(pos)",
"def evaluate_file(filename, env=None):\n if not env:\n env = Environment(builtin_env)\n with open(filename, 'r') as f:\n source = \"\\n\".join(f.readlines())\n tokens = tokenize(source)\n tree = None\n if MULTIEXP_ENABLED:\n trees = []\n while tokens:\n trees.append(parse(tokens, False))\n tree = [\"begin\"] + trees\n else:\n tree = parse(tokens)\n return evaluate(tree, env)",
"def read_next_code_chunk(self) -> List[str]:\n with open(self._filepath) as f:\n for line in f:\n yield [line.strip()]",
"def parse_input(self, input):\r\n return self._parse(antlr3.ANTLRInputStream(input))",
"def read(in_file):\n require_type(is_input(in_file), 'the parameter of read must be an input file')\n txt = in_file.readline().lower()\n while txt == '\\n':\n txt = in_file.readline().lower()\n return txt.strip() if txt else Symbol('#!eof')",
"def _read_grammar(filename):\r\n with open(filename, 'r') as file:\r\n data = file.read()\r\n\r\n return data",
"def __parse_next(self, buffer):\n\t\ttoken = buffer.read(1)\n\t\t\n\t\t_tell = buffer.tell()\n\t\t# Is it an operator?\n\t\tif token == \"/\":\n\t\t\tnum, var = self.__parse_operator(buffer)\n\t\t\tif num is None:\n\t\t\t\tbuffer.seek(_tell - 1)\n\t\t\t\treturn \"$\"\n\t\t\t\n\t\t\tif isinstance(var, str):\n\t\t\t\treturn var\n\t\t\t\n\t\t\tret = (var / num)\n\t\t\tif isinstance(ret, Range):\n\t\t\t\tret = ret.min # XXX is this right?\n\t\t\tif int(ret) != ret:\n\t\t\t\treturn \"%.1f\" % ret\n\t\t\treturn str(int(ret))\n\t\t\n\t\tif token == \"*\":\n\t\t\tnum, var = self.__parse_operator(buffer)\n\t\t\tret = var * num\n\t\t\tif isinstance(ret, float):\n\t\t\t\tret = int(round(ret))\n\t\t\treturn str(ret)\n\t\t\n\t\t# Is it a conditional?\n\t\tif token == \"?\":\n\t\t\tbuffer.seek(-1, SEEK_CUR)\n\t\t\tblocks = self.__parse_conditional(buffer)\n\t\t\t\n\t\t\t# Prepare the condition cache\n\t\t\t# This shouldn't be done here, but anyway...\n\t\t\tfor condition, value in blocks:\n\t\t\t\tcondition.evaluate({})\n\t\t\t\tself.conditions.extend(condition.identifiers)\n\t\t\t\n\t\t\t# blocks is a list of (condition, value) tuples\n\t\t\t# We evaluate the paperdoll against each of them\n\t\t\t# and return when we get a hit\n\t\t\t\n\t\t\tfor condition, value in blocks:\n\t\t\t\tif condition.evaluate(self.paperdoll):\n\t\t\t\t\treturn value\n\t\t\t\n\t\t\treturn\n\t\t\n\t\tif token == \"<\":\n\t\t\tbuffer.seek(-1, SEEK_CUR)\n\t\t\tidentifier = self.__read_block(buffer, startchr=\"<\", endchr=\">\")\n\t\t\ttry:\n\t\t\t\tvalue = self.get_variable(identifier)\n\t\t\t\treturn SpellString(value).format(self.obj, proxy=self.proxy)\n\t\t\texcept VariableNotFound:\n\t\t\t\treturn \"<%s>\" % (identifier)\n\t\t\n\t\tif token == \"{\":\n\t\t\tbuffer.seek(-1, SEEK_CUR)\n\t\t\tblock = self.__read_block(buffer, startchr=\"{\", endchr=\"}\")\n\t\t\t\n\t\t\t# Attempt to read decimals formatting\n\t\t\tdecimals = 0\n\t\t\ttoken = buffer.read(1)\n\t\t\tif token == \".\":\n\t\t\t\tdecimals = self.__read_number(buffer)\n\t\t\telif token:\n\t\t\t\t# Step one char back, only if we are not at the end\n\t\t\t\tbuffer.seek(-1, SEEK_CUR)\n\t\t\t\n\t\t\tblock = SpellString(block).format(self.obj, proxy=self.proxy, braced=True)\n\t\t\ttry: # FIXME\n\t\t\t\tblock = eval(block)\n\t\t\t\tif decimals:\n\t\t\t\t\tblock = round(block, decimals)\n\t\t\t\treturn \"%g\" % (block)\n\t\t\texcept Exception:\n\t\t\t\treturn \"[%s]\" % (block)\n\t\t\n\t\t# At this point, we need to check for functions and variables\n\t\t# but only if we don't already have a digit\n\t\tif not token.isdigit():\n\t\t\t_tell = buffer.tell()\n\t\t\tbuffer.seek(-1, SEEK_CUR)\n\t\t\tidentifier = self.__read_alpha(buffer)\n\t\t\t\n\t\t\tif identifier.lower() in FUNCTIONS:\n\t\t\t\targs = self.__parse_function_args(buffer)\n\t\t\t\treturn self.formatter.format_function(identifier, args)\n\t\t\t\n\t\t\tif identifier.lower() in PAPERDOLL_VALUES:\n\t\t\t\treturn self.formatter.format_paperdoll(identifier)\n\t\t\t\n\t\t\t\n\t\t\t# We didn't find any valid identifier\n\t\t\tif not identifier:\n\t\t\t\treturn \"$\"\n\t\t\t\n\t\t\t# Nothing left to check for but booleans\n\t\t\t# The values get messed with the identifier however, so we need to\n\t\t\t# look at only the first char\n\t\t\tif identifier[0] in BOOLEANS:\n\t\t\t\tidentifier = identifier[0]\n\t\t\t\tbuffer.seek(_tell)\n\t\t\t\tvalues = self.__parse_boolean(buffer)\n\t\t\t\treturn self.formatter.format_boolean(token, values)\n\t\t\n\t\t# It's probably a variable then\n\t\tbuffer.seek(-1, SEEK_CUR)\n\t\tspell, identifier, effect = self.__parse_macro(buffer)\n\t\t\n\t\tif identifier:\n\t\t\tspell = int(spell or 0)\n\t\t\teffect = int(effect or 1)\n\t\t\t\n\t\t\tvalue = self.formatter.format_macro(spell, identifier, effect)\n\t\t\tself.formatter.last_value = value\n\t\t\treturn str(value)\n\t\telse:\n\t\t\treturn \"$\"\n\t\t\n\t\tif not token or token.isspace():\n\t\t\treturn token\n\t\t\n\t\treturn token",
"def job_reader(path) -> Generator[ParsedActionType, None, None]:\n try:\n with open(path, \"r\") as f:\n parser = Parser()\n for line in f:\n result = parser.process_line(line)\n if result is not None:\n yield result\n except IOError as err:\n print(\"Error opening/reading from file '{0}': {1}\"\n .format(err.filename, err.strerror))",
"def read(filepath_or_buffer: FilePathOrBuffer) -> Grid:\n with _handle_buf(filepath_or_buffer) as buf:\n return ZincParser(ZincTokenizer(buf)).parse()",
"def parse(expr, filename='<unknown>', mode='exec'):\r\n return compile(expr, filename, mode, PyCF_ONLY_AST)",
"def load(f):\n while True:\n c = f.read(1)\n if len(c) == 1:\n msg_len = _read_int(f, already_read=c)\n msg_str = f.read(msg_len)\n if len(msg_str) < msg_len:\n raise ValueError(\"Unexpected EOF while parsing message\")\n yield javascript.loads(msg_str.decode())\n else:\n break",
"def raw_tokenize(src: str) -> Iterable[RawToken]:\n # Raw token handling; there is a later semantic mapping stage which\n # annotates atoms for the special handling of keywords and numbers.\n # We treat tokenization as an explicit state machine.\n # State transitions emit the previous block along with the previous state.\n state, start = None, 0\n\n for index, character in enumerate(src):\n next_state = None\n major_category = unicodedata.category(character) + character\n\n for (from_state, category_match), to_state in STATE_MACHINE.items():\n if (\n from_state == state and\n major_category.startswith(category_match)\n ):\n next_state = to_state\n break\n\n if next_state is None:\n raise ParseError(\n \"Unexpected '{0!r}'\".format(character),\n (index, index + 1),\n )\n\n if next_state != state:\n if start != index:\n assert state is not None\n\n yield RawToken(\n kind=state,\n value=src[start:index],\n location=(start, index),\n )\n start = index\n state = next_state\n\n if start != len(src):\n assert state is not None\n\n yield RawToken(\n kind=state,\n value=src[start:],\n location=(start, index + 1),\n )",
"def read_from_readline_interface(self, readline, filename=None, compat_mode=False):\n # Todo: Compat mode arg handling could be cleaned up in this method and class.\n if compat_mode:\n self.compat_mode = compat_mode\n tok_generator = call_tokenize(readline)\n\n self.token_list = []\n nesting_level = 0\n lower_nest_level = False\n for tok in tok_generator:\n if lower_nest_level:\n nesting_level -= 1\n lower_nest_level = False\n if tok[1] in self.nest_open:\n nesting_level += 1\n elif tok[1] in self.nest_close:\n lower_nest_level = True # Lower for next token.\n\n self.token_list.append(Token(tok, nesting_level=nesting_level,\n filename=filename, compat_mode=self.compat_mode))",
"def _parse_input(text, cwd=None):\n lexer = blackbirdLexer(antlr4.InputStream(text))\n stream = antlr4.CommonTokenStream(lexer)\n parser = blackbirdParser(stream)\n\n tree = parser.start()\n\n bb = BlackbirdListener(cwd=cwd)\n walker = antlr4.ParseTreeWalker()\n walker.walk(bb, tree)\n return bb.program",
"def readline(self): \n\t\tif not self._input: raise PlumberExceptions.PipeTypeException(self)\n\t\tif self.eof(): return None\n\t\tret = \"\"\n\t\twhile not self.eof():\n\t\t\tbuf = self.read()\n\t\t\tif not buf:\n\t\t\t\tif not self.eof():\n\t\t\t\t\tself._state.unread(buf)\n\t\t\t\t\treturn \"\"\n\t\t\t\telse:\n\t\t\t\t\treturn None\n\t\t\tnl = self._nl_pattern.search(buf)\n\t\t\tif nl:\n\t\t\t\tret = ret + buf[:nl.span()[1]]\n\t\t\t\tself.unread(buf[nl.span()[1]:])\n\t\t\t\treturn ret\n\t\t\telse:\n\t\t\t\tret = ret + buf\n\t\treturn ret",
"def tokenize(exp: str) -> List[tokens.Token]:\r\n\r\n def is_name_char(char: str) -> bool:\r\n # only check after number token, safe to use isnumeric\r\n return char.isalpha() or char.isnumeric() or (char in NAME_CHAR_SPECIALS)\r\n\r\n def is_op(char: str) -> bool:\r\n return (char in BINOP_TABLE) or (char in UNARYOP_TABLE)\r\n\r\n def read_number(start: int, token_list: list) -> int:\r\n state_map = {\r\n 'start': ['int', 'fraction'],\r\n 'int': ['fraction', 'exponent', 'end'],\r\n 'fraction': ['exponent', 'end'],\r\n 'exponent': ['exponent_value'],\r\n 'exponent_value': ['end']\r\n }\r\n\r\n state = 'start'\r\n\r\n is_end = False\r\n i = start\r\n while not is_end and i < len(exp):\r\n char = exp[i]\r\n if char.isnumeric():\r\n if state == 'start':\r\n state = 'int'\r\n elif state == 'exponent':\r\n state = 'exponent_value'\r\n\r\n i += 1\r\n elif char == Separators.SEP_DOT:\r\n if 'fraction' in state_map[state]:\r\n state = 'fraction'\r\n i += 1\r\n else:\r\n raise ParsingException(\"invalid '.'\", i)\r\n elif char.isalpha():\r\n if char == 'e' or char == 'E':\r\n if 'exponent' in state_map[state]:\r\n state = 'exponent'\r\n i += 1\r\n else:\r\n raise ParsingException(\"invalid 'e'\", i)\r\n else:\r\n raise ParsingException(f\"invalid character '{char}'\", i)\r\n elif char == UnaryOperators.OP_NEGATIVE or char == UnaryOperators.OP_POSITIVE:\r\n if 'exponent_value' in state_map[state]:\r\n state = 'exponent_value'\r\n i += 1\r\n else:\r\n is_end = True\r\n else:\r\n is_end = True\r\n\r\n if 'end' in state_map[state]:\r\n content = exp[start : i]\r\n if state == 'int':\r\n num = int(content)\r\n else:\r\n num = float(content)\r\n token_list.append(tokens.TokenNumber(num, pos=start))\r\n\r\n return i\r\n else:\r\n if is_end:\r\n raise ParsingException(f\"invalid character '{exp[i - 1]}'\", i - 1)\r\n else:\r\n raise ParsingException(f\"invalid character '{exp[i]}'\", i)\r\n\r\n def read_name(start: int, token_list: list) -> int:\r\n i = start\r\n while i < len(exp) and is_name_char(exp[i]):\r\n i += 1\r\n\r\n token = tokens.TokenName(exp[start : i], pos=start)\r\n token_list.append(token)\r\n\r\n return i\r\n\r\n if not isinstance(exp, str):\r\n raise TypeError(\"invalid exp\")\r\n\r\n if exp == '':\r\n raise ValueError(\"exp is empty\")\r\n\r\n token_list: List[tokens.Token] = []\r\n\r\n i = 0\r\n while i < len(exp):\r\n char = exp[i]\r\n\r\n if is_op(char) or char in VALID_SEPARATORS:\r\n token_list.append(tokens.TokenSymbol(char, pos=i))\r\n i += 1\r\n elif char == Separators.SEP_LEFT_BRACKET:\r\n token_list.append(tokens.TokenOpenBracket(pos=i))\r\n i += 1\r\n elif char == Separators.SEP_RIGHT_BRACKET:\r\n token_list.append(tokens.TokenCloseBracket(pos=i))\r\n i += 1\r\n elif char == Separators.SEP_DOT or char.isnumeric():\r\n i = read_number(i, token_list)\r\n elif char == ' ':\r\n i += 1\r\n elif is_name_char(char):\r\n i = read_name(i, token_list)\r\n else:\r\n raise ParsingException(f\"invalid character '{char}'\", i)\r\n\r\n return token_list",
"def test_input_stream(self):\n file = StringIO(\"\"\"\n @simple-var: 1;\n \"\"\")\n\n self.lexer.input(file)\n\n token = self.lexer.token()\n self.assertEqual('@simple-var', token.value)",
"def test_evaluate():\n # Create a lexer instance with rules and text loaded\n lexer = lex._lexer(\n [lex_bases.rule(\"JUMP_LINE\", r\"\\n\"), lex_bases.rule(\"TEST\", r\"test\")], []\n )._load_text(\"test\")\n\n # Evalueate the loaded text and compare\n assert lexer.evaluate() == [lex_bases.token(\"TEST\", \"test\")]",
"def lexf(path) -> \"(lib.Token,)\":\n with open(path, 'r') as source:\n for line in source:\n yield from lexs(line)",
"def read_file(path: str) -> Iterator[Problem]:\n with open(path) as f:\n txt = f.read()\n\n for encoded_problem in txt.split('\\n\\n'):\n yield parse_alpha_encoding(encoded_problem)"
] | [
"0.6343479",
"0.6047994",
"0.5913599",
"0.5844269",
"0.5784189",
"0.5618982",
"0.54318804",
"0.5407249",
"0.53160536",
"0.525312",
"0.52294385",
"0.521524",
"0.5187013",
"0.51560414",
"0.5153102",
"0.51475793",
"0.51185143",
"0.5095528",
"0.50852764",
"0.5074779",
"0.5058327",
"0.5027548",
"0.49977908",
"0.4989564",
"0.49565807",
"0.4951618",
"0.49478278",
"0.4939583",
"0.49335927",
"0.49289212"
] | 0.71613556 | 0 |
Return the remainder of a list in src, starting before an element or ). >>> read_tail(Buffer(tokenize_lines([')']))) nil >>> read_tail(Buffer(tokenize_lines(['2 3)']))) Pair(2, Pair(3, nil)) >>> read_tail(Buffer(tokenize_lines(['2 (3 4))']))) Pair(2, Pair(Pair(3, Pair(4, nil)), nil)) | def read_tail(src):
if src.current() is None:
raise SyntaxError('unexpected end of file')
if src.current() == ')':
src.pop()
return nil
first = scheme_read(src)
rest = read_tail(src)
return Pair(first, rest) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def take(self, line, head, tail):\n data = None\n rest = line\n begin = line.find(head)\n if begin != -1:\n line = line[begin + len(head):]\n end = line.find(tail)\n if end != -1:\n data = line[:end]\n rest = line[end + len(tail):]\n return (data, rest)",
"def deconstruct_tail(self):\n ret = []\n for ii in range(len(self.__data)):\n op = self.__data[-ii - 1].deconstruct()\n if not op:\n return (self.__data[:len(self.__data) - ii], ret)\n ret = op + ret\n return ([], ret)",
"def tailParser(inLoc):\n f = open(inLoc, 'r')\n tails = f.readlines()\n f.close()\n\n tailList = []\n\n for i in range(len(tails)):\n if i==0: continue #skips the header\n line = tails[i].rstrip().split(',')\n tailList.append(line)\n return tailList",
"def strip_tail(sequence, values):\n return list(reversed(list(strip_head(reversed(sequence), values))))",
"def list_tail(term):\n tail = term\n while _is_list_maybe(tail):\n tail = tail.args[1]\n return tail",
"def _trim_end(\n self, templated_str: str, target_end: str = \"head\"\n ) -> Tuple[\"IntermediateFileSlice\", List[TemplatedFileSlice]]:\n target_idx = 0 if target_end == \"head\" else -1\n terminator_types = (\"block_start\") if target_end == \"head\" else (\"block_end\")\n main_source_slice = self.source_slice\n main_templated_slice = self.templated_slice\n slice_buffer = self.slice_buffer\n\n end_buffer = []\n\n # Yield any leading literals, comments or blocks.\n while len(slice_buffer) > 0 and slice_buffer[target_idx].slice_type in (\n \"literal\",\n \"block_start\",\n \"block_end\",\n \"comment\",\n ):\n focus = slice_buffer[target_idx]\n templater_logger.debug(\" %s Focus: %s\", target_end, focus)\n # Is it a zero length item?\n if focus.slice_type in (\"block_start\", \"block_end\", \"comment\"):\n # Only add the length in the source space.\n templated_len = 0\n else:\n # Assume it's a literal, check the literal actually matches.\n templated_len = len(focus.raw)\n if target_end == \"head\":\n check_slice = offset_slice(\n main_templated_slice.start,\n templated_len,\n )\n else:\n check_slice = slice(\n main_templated_slice.stop - templated_len,\n main_templated_slice.stop,\n )\n\n if templated_str[check_slice] != focus.raw:\n # It doesn't match, we can't use it. break\n templater_logger.debug(\" Nope\")\n break\n\n # If it does match, set up the new slices\n if target_end == \"head\":\n division = (\n main_source_slice.start + len(focus.raw),\n main_templated_slice.start + templated_len,\n )\n new_slice = TemplatedFileSlice(\n focus.slice_type,\n slice(main_source_slice.start, division[0]),\n slice(main_templated_slice.start, division[1]),\n )\n end_buffer.append(new_slice)\n main_source_slice = slice(division[0], main_source_slice.stop)\n main_templated_slice = slice(division[1], main_templated_slice.stop)\n else:\n division = (\n main_source_slice.stop - len(focus.raw),\n main_templated_slice.stop - templated_len,\n )\n new_slice = TemplatedFileSlice(\n focus.slice_type,\n slice(division[0], main_source_slice.stop),\n slice(division[1], main_templated_slice.stop),\n )\n end_buffer.insert(0, new_slice)\n main_source_slice = slice(main_source_slice.start, division[0])\n main_templated_slice = slice(main_templated_slice.start, division[1])\n\n slice_buffer.pop(target_idx)\n if focus.slice_type in terminator_types:\n break\n # Return a new Intermediate slice and the buffer.\n # NB: Don't check size of slice buffer here. We can do that later.\n new_intermediate = self.__class__(\n \"compound\", main_source_slice, main_templated_slice, slice_buffer\n )\n return new_intermediate, end_buffer",
"def scheme_read(src):\n if src.current() is None:\n raise EOFError\n if val == 'nil':\n return nil\n elif val not in DELIMITERS: # ( ) ' .\n return val\n elif val == '(':\n return read_tail(src)\n else:\n raise SyntaxError('unexpected token: {0}'.format(val))",
"def take_rest():\n def run(chunks, chunk, last):\n if last:\n return ParserResult.from_done(_chunks_merge((chunk, chunks)), chunk[:0], last)\n else:\n return ParserResult.from_partial(Parser(run, (chunk, chunks)))\n return Parser(run, tuple())",
"def line_end(self):\n curr = self\n while curr.next:\n curr = curr.next\n return curr",
"def split_pop(lines, pattern, count):\n if (len(parts := re.split(pattern, lines[0])) - 2) == count:\n _ = lines.pop(0)\n return parts[1:-1]\n return [None] * count",
"def __rd_tpl_tail(self, fp):\n tlist = []\n tail = \"\"\n while True:\n line = fp.readline()\n if line == \"\":\n break\n tlist.append(line)\n for line in reversed(tlist):\n if line.strip() == \"\":\n tlist.pop()\n else:\n break\n for line in tlist:\n tail += line\n self.template['tail'] = tail\n return",
"def tail(self, xes):\n return xes[1:]",
"def trim_ends(\n self, templated_str: str\n ) -> Tuple[\n List[TemplatedFileSlice], \"IntermediateFileSlice\", List[TemplatedFileSlice]\n ]:\n # Trim start:\n new_slice, head_buffer = self._trim_end(\n templated_str=templated_str, target_end=\"head\"\n )\n # Trim end:\n new_slice, tail_buffer = new_slice._trim_end(\n templated_str=templated_str, target_end=\"tail\"\n )\n # Return\n return head_buffer, new_slice, tail_buffer",
"def scrap(consensus, end_of_field):\n if b'\\n' not in consensus:\n return consensus, None\n\n line, remaining = consensus.split(b'\\n', 1)\n if end_of_field(line):\n return consensus, None\n return remaining, line",
"def _first_right_hand_split(\n line: Line,\n omit: Collection[LeafID] = (),\n) -> RHSResult:\n tail_leaves: List[Leaf] = []\n body_leaves: List[Leaf] = []\n head_leaves: List[Leaf] = []\n current_leaves = tail_leaves\n opening_bracket: Optional[Leaf] = None\n closing_bracket: Optional[Leaf] = None\n for leaf in reversed(line.leaves):\n if current_leaves is body_leaves:\n if leaf is opening_bracket:\n current_leaves = head_leaves if body_leaves else tail_leaves\n current_leaves.append(leaf)\n if current_leaves is tail_leaves:\n if leaf.type in CLOSING_BRACKETS and id(leaf) not in omit:\n opening_bracket = leaf.opening_bracket\n closing_bracket = leaf\n current_leaves = body_leaves\n if not (opening_bracket and closing_bracket and head_leaves):\n # If there is no opening or closing_bracket that means the split failed and\n # all content is in the tail. Otherwise, if `head_leaves` are empty, it means\n # the matching `opening_bracket` wasn't available on `line` anymore.\n raise CannotSplit(\"No brackets found\")\n\n tail_leaves.reverse()\n body_leaves.reverse()\n head_leaves.reverse()\n head = bracket_split_build_line(\n head_leaves, line, opening_bracket, component=_BracketSplitComponent.head\n )\n body = bracket_split_build_line(\n body_leaves, line, opening_bracket, component=_BracketSplitComponent.body\n )\n tail = bracket_split_build_line(\n tail_leaves, line, opening_bracket, component=_BracketSplitComponent.tail\n )\n bracket_split_succeeded_or_raise(head, body, tail)\n return RHSResult(head, body, tail, opening_bracket, closing_bracket)",
"def _chunk_end(c):\n end = None\n if isinstance(c, list):\n for e in c:\n if end is None or e.offset + e.length > end:\n end = e.offset + e.length\n else:\n end = c.offset + c.length\n return end",
"def get_list_after_paired(line):\n if find_pair(line):\n index = get_index(line)\n return line[index + 2: ]\n return []",
"def get_tail(self):\n return self._readahead.getvalue()",
"def tail(real_iter, n_th):\n if n_th <= 0:\n return []\n\n real_list = list(real_iter)\n start = len(real_list)-n_th if n_th < len(real_list) else 0\n return real_list[start:]",
"def remove_tail(col_lines):\n while len(col_lines[-1]) < 2:\n col_lines.pop()",
"def _last_directive(lang: LangDef, lines):\n directive = deque()\n expected_indent = float(\"+inf\")\n while lines:\n line = lines.pop()\n directive.appendleft(line)\n indent = measure_indentation(line)\n m = line.match(lang.directive)\n if m:\n if indent <= expected_indent:\n return lines, m, directive\n break # pragma: no cover # coverage.py bug\n if not line.isspace():\n expected_indent = min(expected_indent, indent - 3)\n if expected_indent < 0:\n break # No need to keep looping\n lines.extend(directive)\n return lines, None, None",
"def tl(lst):\n return lst[1:] if len(lst) > 1 else None",
"def testNotNullTail(self):\n myVar=Var()\n myList=PList([1,2,3],tail=Atom(\"test\"))\n self.assertEqual(str(myList),'[1,2,3|test]',\"Failed conversion to str\")\n myList=PList([1,2,3],tail=myVar)\n self.assertEqual(str(myList), '[1,2,3|_]',\"Failed conversion to str\")\n myCompound=Compound('f',\"ciao\",1)\n Compound('=',myList,PList([1,2,3],tail=myCompound)).post_goal()\n resume()\n self.assertEqual(str(myVar.value()), 'f(\"ciao\",1)',\"Failed unit test\")\n #test getListTail\n self.assertEqual(myList.getListTail(), ([1,2,3],myCompound), \"Failed test of getListTail\")\n #test get index\n self.assertEqual(myList[-1], 3, \"Failed getitem with negative index\")\n self.assertEqual(myList[-3], 1, \"Failed getitem with negative index\")\n #test len see that tail is not considered\n self.assertEqual(len(myList),3,\"Failed len\")\n #Test iterHeadTail\n headList=[]\n tailList=[]\n for head,tail in myList.iterHeadTail():\n headList.append(head)\n tailList.append(tail)\n self.assertEqual(headList, [1,2,3], \"Incorrect head conversion generated by iterHeadTail\")\n referenceTailList=[PList([2,3],tail=myCompound),PList([3],tail=myCompound),myCompound]\n self.assertEqual(tailList,referenceTailList, \"Incorrect tail conversion generated by iterHeadTail\")",
"def feed(self, inline, endchar=None):\n # preserve the original line\n # for error messages\n if endchar is None:\n self.origline = inline\n inline = inline.lstrip()\n #\n outlist = []\n comma_needed = False\n found_comma = False\n while inline:\n # NOTE: this sort of operation would be quicker\n # with lists - but then can't use regexes\n thischar = inline[0]\n if thischar == '#':\n # reached a comment\n # end of the line...\n break\n #\n if thischar == endchar:\n return outlist, inline[1:]\n #\n if comma_needed:\n if thischar == ',':\n inline = inline[1:].lstrip()\n comma_needed = False\n found_comma = True\n continue\n raise BadLineError('Line is badly built :\\n%s' % self.origline)\n #\n try:\n # the character that marks the end of the list\n listend = self.liststart[thischar]\n except KeyError:\n pass\n else:\n if not self.recursive and endchar is not None:\n raise BadLineError('Line is badly built :\\n%s' % self.origline)\n newlist, inline = self.feed(inline[1:], endchar=listend)\n outlist.append(newlist)\n inline = inline.lstrip()\n comma_needed = True\n continue\n #\n if thischar in self.quotes:\n # this might raise an error\n # FIXME: trap the error and raise a more appropriate one ?\n element, inline = unquote(inline, fullquote=False, \n retain=self.retain)\n inline = inline.lstrip()\n outlist.append(element)\n comma_needed = True\n continue\n #\n # must be an unquoted element\n mat = unquoted.match(inline)\n if mat is not None:\n # FIXME: if the regex was better we wouldn't need an rstrip\n element = mat.group(1).rstrip()\n # group 2 will be ``None`` if we reach the end of the line\n inline = mat.group(2) or ''\n outlist.append(element)\n comma_needed = True\n continue\n # or it's a badly built line\n raise BadLineError('Line is badly built :\\n%s' % self.origline)\n #\n # if we've been called recursively\n # we shouldn't have got this far\n if endchar is not None:\n raise BadLineError('Line is badly built :\\n%s' % self.origline)\n #\n if not found_comma:\n # if we didn't find a comma\n # the value could be a nested list\n if outlist:\n outlist = outlist[0]\n else:\n outlist = ''\n if self.force_list and not isinstance(outlist, list):\n if outlist:\n outlist = [outlist]\n else:\n outlist = []\n if not self.comment:\n if inline:\n raise CommentError('Comment not allowed :\\n%s' % self.origline)\n return outlist\n return outlist, inline",
"def tailLines(filename,linesback):\r\n\tavgcharsperline=150\r\n\t\r\n\tfile = open(filename,'r')\r\n\twhile 1:\r\n\t\ttry: file.seek(-1 * avgcharsperline * linesback,2)\r\n\t\texcept IOError: file.seek(0) \r\n\t\tif file.tell() == 0: atstart=1 \r\n\t\telse: atstart=0\r\n\t\tlines=file.read().split(\"\\n\")\r\n\t\tif (len(lines) > (linesback+1)) or atstart: break\r\n\t\t#The lines are bigger than we thought\r\n\t\tavgcharsperline=avgcharsperline * 1.3 #Inc avg for retry\r\n\tfile.close()\r\n\t\r\n\tif len(lines) > linesback: start=len(lines)-linesback -1\r\n\telse: start=0\r\n\treturn lines[start:len(lines)-1]",
"def tail_lines(fd, linesback=10):\n avgcharsperline = 75\n\n while True:\n try:\n fd.seek(-1 * avgcharsperline * linesback, 2)\n except IOError:\n fd.seek(0)\n\n if fd.tell() == 0:\n atstart = 1\n else:\n atstart = 0\n\n lines = fd.read().split(\"\\n\")\n if (len(lines) > (linesback+1)) or atstart:\n break\n\n avgcharsperline=avgcharsperline * 1.3\n\n if len(lines) > linesback:\n start = len(lines) - linesback - 1\n else:\n start = 0\n\n return lines[start:len(lines)-1]",
"def _trim_end(self, tokens: list[Token]) -> Block:\n i = last_token = self.end - 1\n while tokens[i].name in NON_CODING_TOKENS | {'DEDENT', 'NEWLINE'}:\n # if we find an indented comment inside our block, keep it\n if (\n tokens[i].name in {'NL', 'NEWLINE'} and\n tokens[i + 1].name == UNIMPORTANT_WS and\n len(tokens[i + 1].src) > self._initial_indent(tokens)\n ):\n break\n # otherwise we've found another line to remove\n elif tokens[i].name in {'NL', 'NEWLINE'}:\n last_token = i\n i -= 1\n return self._replace(end=last_token + 1)",
"def get_end_of_part(config_lines, start_line, indent=None):\n i = start_line\n end = len(config_lines)\n if i >= end:\n return end\n\n if is_blank(config_lines[i]):\n return None\n\n if indent is None:\n indent = get_indent(config_lines[i])\n\n i += 1\n has_seen_data = False\n while i < end:\n if is_blank(config_lines[i]):\n i += 1\n elif is_at_least_indented(config_lines[i], indent + 1):\n i += 1\n has_seen_data = True\n else:\n end = i\n\n if not has_seen_data:\n return None\n\n return end",
"def scanwhile(first, p):\n lines = [first]\n while True:\n line = lr.readline()\n if not line:\n break\n if p(line):\n lines.append(line)\n else:\n lr.push(line)\n break\n return lines",
"def get_next_line(self):\n return self.storage.pop(0).split(\"#\")[0].strip()"
] | [
"0.62023",
"0.5961643",
"0.5870388",
"0.5625115",
"0.559991",
"0.55696696",
"0.55203354",
"0.5368489",
"0.5332481",
"0.5304666",
"0.5293615",
"0.5290877",
"0.5275637",
"0.5255011",
"0.52535564",
"0.51826555",
"0.5160872",
"0.5157932",
"0.51445717",
"0.5142943",
"0.51237637",
"0.51137",
"0.51094615",
"0.5107866",
"0.5083085",
"0.5055589",
"0.50555557",
"0.5023807",
"0.50234544",
"0.49909768"
] | 0.7980613 | 0 |
Query FS_IMMUTABLE_FL This queries the `FS_IMMUTABLE_FL` flag on a specified file. Arguments fd Filedescriptor to operate on. Returns bool Whether the `FS_IMMUTABLE_FL` flag is set or not. Raises OSError If the underlying ioctl fails, a matching `OSError` will be raised. | def ioctl_get_immutable(fd: int):
if not isinstance(fd, int) or fd < 0:
raise ValueError()
flags = array.array('L', [0])
fcntl.ioctl(fd, FS_IOC_GETFLAGS, flags, True)
return bool(flags[0] & FS_IMMUTABLE_FL) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ioctl_toggle_immutable(fd: int, set_to: bool):\n\n if not isinstance(fd, int) or fd < 0:\n raise ValueError()\n\n flags = array.array('L', [0])\n fcntl.ioctl(fd, FS_IOC_GETFLAGS, flags, True)\n if set_to:\n flags[0] |= FS_IMMUTABLE_FL\n else:\n flags[0] &= ~FS_IMMUTABLE_FL\n fcntl.ioctl(fd, FS_IOC_SETFLAGS, flags, False)",
"def Poll (self, fd, mask, cancel = None):\n if self.Disposed:\n return RaisedFuture (FutureCanceled ('Core is stopped'))\n\n assert fd >= 0, 'Invalid file descriptor: {}'.format (fd)\n\n file = self.files.get (fd)\n if file is None:\n file = PollAwaiter (fd, self.poller)\n self.files [fd] = file\n\n return file.Await (mask, cancel)",
"def check_fd(fd):\n try:\n os.fstat(fd)\n return True\n except OSError as error:\n if error.errno != errno.EBADF:\n raise\n return False",
"def is_file_immutable_unlink(path):\n\n x = vserver.get_file_attr(path)\n return x.has_key('iunlink') and x.has_key('immutable') and x['iunlink'] and x['immutable']",
"def dev_open(self):\n return (self._dev_open & self.um) != 0",
"def flags(self) -> Optional[int]:\n return self.get(\"/Ff\")",
"def seekable(self):\n # Not seekable, but we do support tell...\n return False",
"def f_supports_fast_access(self):\n return not self.f_is_empty()",
"def check_file_flag(file):\n return process_file_flag(file, None)",
"def flags(self):\n if self._flags is None:\n raise ValueError('Flags are not available since dataset '\n 'was opened with metadata only')\n return self._flags",
"def test_command_edit_info_boolean_flags():\n def f(inputfile):\n with tempfile.NamedTemporaryFile() as tmp:\n shutil.copy(inputfile, tmp.name)\n\n for flag in (\"write_protected\", \"synchronized\", \"cleaned\"):\n for true_value, false_value in ((\"1\", \"0\"),\n (\"yes\", \"no\"),\n (\"YES\", \"No\"),\n (\"true\", \"false\"),\n (\"tRuE\", \"FaLsE\")):\n wozardry.parse_args([\"edit\", \"-i\", \"%s:%s\" % (flag, true_value), tmp.name])\n with open(tmp.name, \"rb\") as tmpstream:\n woz = wozardry.WozDiskImage(tmpstream)\n assert woz.info[flag] == True\n wozardry.parse_args([\"edit\", \"-i\", \"%s:%s\" % (flag, false_value), tmp.name])\n with open(tmp.name, \"rb\") as tmpstream:\n woz = wozardry.WozDiskImage(tmpstream)\n assert woz.info[flag] == False\n f(kValid1)\n f(kValid2)",
"def test_io_flags(pyocf_ctx, cache_mode):\n\n flags = 0x239482\n block_size = 4096\n\n data = bytes(block_size)\n\n pyocf_ctx.register_volume_type(FlagsValVolume)\n\n cache_device = FlagsValVolume(Size.from_MiB(50), flags)\n core_device = FlagsValVolume(Size.from_MiB(50), flags)\n\n cache = Cache.start_on_device(cache_device, cache_mode=cache_mode)\n core = Core.using_device(core_device)\n\n cache.add_core(core)\n vol = CoreVolume(core)\n\n cache_device.set_check(True)\n core_device.set_check(True)\n\n # write miss\n io_to_exp_obj(vol, block_size * 0, block_size, data, 0, IoDir.WRITE, flags)\n assert not cache_device.fail\n assert not core_device.fail\n\n # read miss\n io_to_exp_obj(vol, block_size * 1, block_size, data, 0, IoDir.READ, flags)\n assert not cache_device.fail\n assert not core_device.fail\n\n # \"dirty\" read hit\n io_to_exp_obj(vol, block_size * 0, block_size, data, 0, IoDir.READ, flags)\n assert not cache_device.fail\n assert not core_device.fail\n\n # \"clean\" read hit\n io_to_exp_obj(vol, block_size * 1, block_size, data, 0, IoDir.READ, flags)\n assert not cache_device.fail\n assert not core_device.fail\n\n # \"dirty\" write hit\n io_to_exp_obj(vol, block_size * 0, block_size, data, 0, IoDir.WRITE, flags)\n assert not cache_device.fail\n assert not core_device.fail\n\n # \"clean\" write hit\n io_to_exp_obj(vol, block_size * 1, block_size, data, 0, IoDir.WRITE, flags)\n assert not cache_device.fail\n assert not core_device.fail",
"def mount(f, mountpoint=DMG_MOUNT, read_only=False, dry_run=ARGS.dry_run):\n result = None\n cmd = ['/usr/bin/hdiutil', 'attach', '-mountpoint', str(mountpoint), '-plist', f]\n\n # Insert read only option in the correct spot\n if read_only:\n cmd.insert(2, '-readonly')\n\n if not dry_run:\n _p = subprocess.run(cmd, capture_output=True)\n LOG.debug('{cmd} ({returncode})'.format(cmd=' '.join([str(x) for x in cmd]), returncode=_p.returncode))\n\n if _p.returncode == 0:\n _entities = plist.read_string(_p.stdout).get('system-entities')\n\n if _entities:\n result = mount_device(_entities)\n LOG.warning('Mounted {dmg} to {mountpoint}'.format(dmg=f, mountpoint=mountpoint))\n else:\n LOG.info(_p.stderr.decode('utf-8').strip())\n else:\n LOG.warning('Mount {dmg} to {mountpoint}'.format(dmg=f, mountpoint=mountpoint))\n\n return result",
"def test_flags(self):\n d = self._examineOrSelect()\n self._response(\n b'* FLAGS (\\\\Answered \\\\Flagged \\\\Deleted \\\\Seen \\\\Draft)')\n self.assertEqual(\n self.successResultOf(d), {\n 'READ-WRITE': False,\n 'FLAGS': ('\\\\Answered', '\\\\Flagged', '\\\\Deleted', '\\\\Seen',\n '\\\\Draft')})",
"def seekable(self):\n self._check_not_closed()\n return False",
"def _CheckFileDescriptor(self):\n if self._file_desc is None:\n raise RuntimeError(\"%s has not been locked.\" % self._file_path)",
"def _IsDevice(self, file_attribute_flags):\n if file_attribute_flags is None:\n return False\n return bool(file_attribute_flags & pyfsntfs.file_attribute_flags.DEVICE)",
"def setblocking(fd, flag):\n\n # get the file's current flag settings\n fl = fcntl.fcntl(fd, fcntl.F_GETFL)\n if flag:\n # clear non-blocking mode from flags\n fl = fl & ~os.O_NONBLOCK\n else:\n # set non-blocking mode from flags\n fl = fl | os.O_NONBLOCK\n # update the file's flags\n fcntl.fcntl(fd, fcntl.F_SETFL, fl)",
"def lfs_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"lfs_enabled\")",
"def lfs_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"lfs_enabled\")",
"def is_filesystem_supported(fs, personality):\n\n if personality in constants.FILESYSTEM_HOSTS_SUPPORTED_LIST_DICT:\n if fs in constants.FILESYSTEM_HOSTS_SUPPORTED_LIST_DICT[personality]:\n return True\n return False",
"def opened(self, *args) -> \"bool\":\n return _ida_fpro.qfile_t_opened(self, *args)",
"def test_managed_file_mode(file, tmp_path, grail_scene33_file):\n desired_mode = \"0o770\" # 0770 octal\n name = tmp_path / \"grail_scene33\"\n ret = file.managed(name=str(name), mode=\"0770\", source=\"salt://grail/scene33\")\n\n if IS_WINDOWS:\n assert ret.result is False\n assert ret.comment == \"The 'mode' option is not supported on Windows\"\n else:\n assert ret.result is True\n resulting_mode = stat.S_IMODE(name.stat().st_mode)\n assert oct(resulting_mode) == desired_mode",
"def has_immutability_policy(self) -> bool:\n return pulumi.get(self, \"has_immutability_policy\")",
"async def test_fan_read_state(hass: HomeAssistant, utcnow) -> None:\n helper = await setup_test_component(hass, create_fan_service)\n\n state = await helper.async_update(\n ServicesTypes.FAN, {CharacteristicsTypes.ON: False}\n )\n assert state.state == \"off\"\n\n state = await helper.async_update(\n ServicesTypes.FAN, {CharacteristicsTypes.ON: True}\n )\n assert state.state == \"on\"",
"def setNonBlocking(fd):\n\n import fcntl\n\n flags = fcntl.fcntl(fd, fcntl.F_GETFL)\n flags = flags | os.O_NONBLOCK\n fcntl.fcntl(fd, fcntl.F_SETFL, flags)",
"def setNonBlocking(fd):\n flags = fcntl.fcntl(fd, fcntl.F_GETFL)\n flags = flags | os.O_NONBLOCK\n fcntl.fcntl(fd, fcntl.F_SETFL, flags)",
"def lfs_enabled(self) -> pulumi.Output[bool]:\n return pulumi.get(self, \"lfs_enabled\")",
"def __setNonBlocking(fd):\n flags = fcntl.fcntl(fd, fcntl.F_GETFL)\n flags = flags | os.O_NONBLOCK\n fcntl.fcntl(fd, fcntl.F_SETFL, flags)",
"def getFlags(path, iface):\n\n nulls = '\\0'*256\n\n SIOCGIFFLAGS = 0x8913\n\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n # copied from https://www.safaribooksonline.com/library/view/python-cookbook/0596001673/ch07s05.html # noqa\n result = fcntl.ioctl(s.fileno(), SIOCGIFFLAGS, iface + nulls)\n flags, = struct.unpack('h', result[16:18])\n\n if_stat_bit = int(\"{0:b}\".format(flags)) & (1 << 0)\n promisc_bit = int(\"{0:b}\".format(flags)) & (1 << 8)\n\n if promisc_bit:\n return {'promisc': 1, 'iface_stat': if_stat_bit}\n else:\n return {'promisc': 0, 'iface_stat': if_stat_bit}"
] | [
"0.6428742",
"0.47341985",
"0.47216454",
"0.46669763",
"0.45207182",
"0.4378003",
"0.43503478",
"0.43097138",
"0.4301548",
"0.42695826",
"0.4249345",
"0.42367932",
"0.42250556",
"0.42015633",
"0.4192277",
"0.41917893",
"0.41803315",
"0.4150465",
"0.41474935",
"0.41474935",
"0.41361678",
"0.41321456",
"0.4129778",
"0.4120411",
"0.41061494",
"0.41029274",
"0.41028562",
"0.40923396",
"0.40880832",
"0.40878567"
] | 0.77494645 | 0 |
Toggle FS_IMMUTABLE_FL This toggles the `FS_IMMUTABLE_FL` flag on a specified file. It can both set and clear the flag. Arguments fd Filedescriptor to operate on. set_to Whether to set the `FS_IMMUTABLE_FL` flag or not. Raises OSError If the underlying ioctl fails, a matching `OSError` will be raised. | def ioctl_toggle_immutable(fd: int, set_to: bool):
if not isinstance(fd, int) or fd < 0:
raise ValueError()
flags = array.array('L', [0])
fcntl.ioctl(fd, FS_IOC_GETFLAGS, flags, True)
if set_to:
flags[0] |= FS_IMMUTABLE_FL
else:
flags[0] &= ~FS_IMMUTABLE_FL
fcntl.ioctl(fd, FS_IOC_SETFLAGS, flags, False) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def ioctl_get_immutable(fd: int):\n\n if not isinstance(fd, int) or fd < 0:\n raise ValueError()\n\n flags = array.array('L', [0])\n fcntl.ioctl(fd, FS_IOC_GETFLAGS, flags, True)\n return bool(flags[0] & FS_IMMUTABLE_FL)",
"def setblocking(fd, flag):\n\n # get the file's current flag settings\n fl = fcntl.fcntl(fd, fcntl.F_GETFL)\n if flag:\n # clear non-blocking mode from flags\n fl = fl & ~os.O_NONBLOCK\n else:\n # set non-blocking mode from flags\n fl = fl | os.O_NONBLOCK\n # update the file's flags\n fcntl.fcntl(fd, fcntl.F_SETFL, fl)",
"def set_file_immutable_unlink(path):\n\n return vserver.set_file_attr(path, {'immutable':True, 'iunlink':True})",
"def setNonBlocking(fd):\n flags = fcntl.fcntl(fd, fcntl.F_GETFL)\n flags = flags | os.O_NONBLOCK\n fcntl.fcntl(fd, fcntl.F_SETFL, flags)",
"def setNonBlocking(fd):\n\n import fcntl\n\n flags = fcntl.fcntl(fd, fcntl.F_GETFL)\n flags = flags | os.O_NONBLOCK\n fcntl.fcntl(fd, fcntl.F_SETFL, flags)",
"def toggle_flag(self, bit):\n\n self.fl = self.fl ^ (1 << bit)",
"def __setNonBlocking(fd):\n flags = fcntl.fcntl(fd, fcntl.F_GETFL)\n flags = flags | os.O_NONBLOCK\n fcntl.fcntl(fd, fcntl.F_SETFL, flags)",
"def unbuffer_fd(fileno: int):\n fcntl.fcntl(fileno, fcntl.F_SETFL, fcntl.fcntl(fileno, fcntl.F_GETFL) | os.O_NONBLOCK)",
"def toggle(self):\n self._state.is_on = not self._state.is_on\n self.send_command(Command.TOGGLE, [])",
"def _set_nonblocking(fd):\n oflags = fcntl.fcntl(fd, fcntl.F_GETFL)\n nflags = oflags | os.O_NONBLOCK\n fcntl.fcntl(fd, fcntl.F_SETFL, nflags)",
"def set_wakeup_fd(fd): # real signature unknown; restored from __doc__\n pass",
"def set_writable(self, wrapper, writable):\n self.__validate_wrapper(wrapper)\n fd = self.__wrap_function(wrapper, \"fileno\")\n if type(fd) is type(0):\n self.__wakeup()\n with self.__poll_lock:\n try:\n self.__poll.modify(fd, True, writable, True)\n except IOError as e:\n if e.errno == errno.EBADF:\n self.__logger.warning(\"Invalid File Descriptor %d in \" \\\n \"%s. Closing IOWrapper.\" \\\n % (fd, str(wrapper)))\n self.__wrap_function(wrapper, \"close\")\n else:\n raise\n return True\n else:\n self.__logger.error(\"Cannot modify IOWrapper with file descriptor %s\" % fd)\n return False",
"def set_flammable(self, f):\n self.flammable = f",
"def set_blocking(fildes, blocking):\n import fcntl\n\n flags = fcntl.fcntl(fildes, fcntl.F_GETFL)\n flags = flags | (os.O_NONBLOCK * (blocking == False))\n fcntl.fcntl(fildes, fcntl.F_SETFL, flags)",
"def make_readonly(path):\n mode = Path.stat(path).st_mode\n Path.chmod(path, mode & ~stat.S_IWRITE)",
"def set_mode(gpio, mode):\n return _u2i(_pigpio_command(_control, _PI_CMD_MODES, gpio, mode))",
"def set_immutable(self):\n self._mutable = False",
"def fl_remove_io_callback(fd, fmask, pyfn_IoCallback):\n #FL_IO_CALLBACK = cty.CFUNCTYPE(None, cty.c_int, cty.c_void_p)\n _fl_remove_io_callback = library.cfuncproto(\n library.load_so_libforms(), \"fl_remove_io_callback\", \\\n None, [cty.c_int, cty.c_uint, xfdata.FL_IO_CALLBACK], \\\n \"\"\"void fl_remove_io_callback(int fd, unsigned int mask,\n FL_IO_CALLBACK cb) \"\"\")\n library.check_if_flinitialized()\n i_fd = library.convert_to_intc(fd)\n library.checkfatal_allowed_value_in_list(fmask, xfdata.ASYNCIO_list)\n ui_fmask = library.convert_to_uintc(fmask)\n library.verify_function_type(pyfn_IoCallback)\n cfn_IoCallback = xfdata.FL_IO_CALLBACK(pyfn_IoCallback)\n library.keep_cfunc_refs(cfn_IoCallback, pyfn_IoCallback)\n library.keep_elem_refs(fd, i_fd, fmask, ui_fmask)\n _fl_remove_io_callback(i_fd, ui_fmask, cfn_IoCallback)",
"def toggle_mute(cls) -> bool:\n raise NotImplementedError",
"def toggle(self, *, state: str = None, opposite_of: str = None) -> None:\n if not state and not opposite_of:\n self.error(\"No state value provided\")\n return\n\n if state:\n _state = state\n elif opposite_of == \"off\":\n _state = \"on\"\n else:\n _state = \"off\"\n\n if self.state == \"off\" and _state == \"on\":\n self.log(\"Turning on: {0}\".format(self.entity_ids[\"switch\"]))\n\n self.turn_on(self.entity_ids[\"switch\"])\n elif self.state == \"on\" and _state == \"off\":\n self.log(\"Turning off: {0}\".format(self.entity_ids[\"switch\"]))\n\n self.turn_off(self.entity_ids[\"switch\"])",
"def toggle(initial=False):\n def wrapgen(f):\n f.state = initial\n def wrapper(*args, **kw):\n ret = f(*args, **kw)\n f.state = not f.state\n return ret\n return wrapgen",
"def set_file_nonblock(f, non_blocking=True):\n flags = fcntl.fcntl(f.fileno(), fcntl.F_GETFL)\n if bool(flags & os.O_NONBLOCK) != non_blocking:\n flags ^= os.O_NONBLOCK\n fcntl.fcntl(f.fileno(), fcntl.F_SETFL, flags)",
"def unlock(fd):\n fcntl.lockf(fd, fcntl.LOCK_UN)\n os.close(fd)",
"async def async_toggle(self):\n await self.async_mute_volume(not self._muted)",
"def umasker(func):\n def inner(*args, **kwargs):\n orig_umask = os.umask(0o0002)\n try:\n retval = func(*args, **kwargs)\n finally:\n os.umask(orig_umask)\n return retval\n return inner",
"def toggle_bit(bit) -> int:\n\treturn 1 if bit == 0 else 0",
"def issue_ioctl_to_device(device):\n\n try:\n fd = open(device, \"wb\")\n except IOError as e:\n u.error(\"unable to open device %s: \"\n \"%s\" % (device, e.strerror))\n u.verbose(1, \"issuing USBDEVFS_RESET ioctl() to %s\" % device)\n fcntl.ioctl(fd, USBDEVFS_RESET, 0)\n fd.close()",
"def chmod(self, path, mode):\n str_mode = (\"%o\" % mode)[-4:]\n if str_mode not in [\"0755\", \"0644\"]:\n raise FuseOSError(errno.EINVAL)\n\n result = super(CurrentView, self).chmod(path, mode)\n\n if os.path.isdir(self.repo._full_path(path)):\n return result\n\n message = \"Chmod to {} on {}\".format(str_mode, path)\n self._stage(add=path, message=message)\n\n log.debug(\"CurrentView: Change %s mode to %s\", path, (\"0%o\" % mode)[-4:])\n return result",
"def toggle(self, *options: str) -> int:\n self.flags ^= self.mask(*options)\n return self.flags",
"def test_set_nonblocking():\n\n f = tempfile.TemporaryFile()\n flags = fcntl.fcntl(f, fcntl.F_GETFL, os.O_NONBLOCK)\n assert (flags | os.O_NONBLOCK) != flags\n altered_f = prefork.set_nonblocking(f)\n flags = fcntl.fcntl(f, fcntl.F_GETFL, os.O_NONBLOCK)\n assert (flags | os.O_NONBLOCK) == flags\n\n # Destroy the file, even though GC will do that anyway.\n f.close()"
] | [
"0.6070557",
"0.52018124",
"0.5024385",
"0.49306548",
"0.4926993",
"0.48649842",
"0.48337775",
"0.47418475",
"0.46019533",
"0.45977533",
"0.4591028",
"0.44767058",
"0.44018012",
"0.43646082",
"0.43338102",
"0.43089062",
"0.4275479",
"0.42734283",
"0.42591506",
"0.4255107",
"0.42509606",
"0.42377996",
"0.42357096",
"0.42352378",
"0.42325923",
"0.42257312",
"0.42112693",
"0.42048344",
"0.41905287",
"0.4177717"
] | 0.84549505 | 0 |
Add a handler to an existing logging.Logger object | def _add_handler(logger, handler=None, loglevel=None):
handler.setLevel(loglevel or DEFAULT_LOGLEVEL)
if handler.level <= logging.DEBUG:
_fmt = '%(asctime)s| %(levelname)-4.3s|%(threadName)10.9s/' \
'%(lineno)04d@%(module)-10.9s| %(message)s'
handler.setFormatter(logging.Formatter(_fmt))
else:
handler.setFormatter(logging.Formatter(
'%(asctime)s| %(levelname)-8s| %(message)s'
))
logger.addHandler(handler) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def add_file_handler_to_logger(logger):\n # This makes \n if AppState().log_file is None:\n return\n\n # Create file handler which logs even DEBUG messages.\n fh = logging.FileHandler(AppState().log_file)\n\n # Set logging level for this file.\n fh.setLevel(logging.DEBUG)\n\n # Create formatter and add it to the handlers.\n formatter = logging.Formatter(fmt='[%(asctime)s] - %(levelname)s - %(name)s >>> %(message)s',\n datefmt='%Y-%m-%d %H:%M:%S')\n fh.setFormatter(formatter)\n\n # Add the handler to the logger.\n logger.addHandler(fh)",
"def add(self, handler, on_error=None):\n self.handlers.append(handler)",
"def install_handler(self, app):\n # Check if directory exists.\n basedir = dirname(app.config[\"LOGGING_FS_LOGFILE\"])\n if not exists(basedir):\n raise ValueError(\"Log directory {0} does not exist.\".format(basedir))\n\n handler = RotatingFileHandler(\n app.config[\"LOGGING_FS_LOGFILE\"],\n backupCount=app.config[\"LOGGING_FS_BACKUPCOUNT\"],\n maxBytes=app.config[\"LOGGING_FS_MAXBYTES\"],\n delay=True,\n )\n handler.setFormatter(\n logging.Formatter(\n \"%(asctime)s %(levelname)s: %(message)s \" \"[in %(pathname)s:%(lineno)d]\"\n )\n )\n handler.setLevel(app.config[\"LOGGING_FS_LEVEL\"])\n\n # Add handler to application logger\n app.logger.addHandler(handler)\n\n if app.config[\"LOGGING_FS_PYWARNINGS\"]:\n self.capture_pywarnings(handler)\n\n # Add request_id to log record\n app.logger.addFilter(add_request_id_filter)",
"def add_handler(self, handler):\n pass",
"def addhandler(self, txt, handler):\n self.handlers[txt] = handler\n rlog(0, 'webserver', '%s handler added' % txt)",
"def addHandler(self, fn):\n self.handlers.append(fn)",
"def add_logger_filehandler(logger=set_default_logger(), logger_level=logging.INFO, filename='default.log', format=None):\n if format is None:\n _format = logging.Formatter(u\"%(asctime)s - %(name)s - %(levelname)s - %(message)s\")\n else:\n _format = logging.Formatter(format)\n try:\n fh = logging.FileHandler(filename)\n fh.setLevel(logger_level)\n fh.setFormatter(_format)\n logger.addHandler(fh)\n except Exception as e:\n logger.error(\"Failed to set %s as log file handler. Error: %s\" % (filename, e))\n finally:\n return logger",
"def configure(logger, level):\n logger.setLevel(level)\n for d_handler in DEFAULT_HANDLERS:\n logger.addHandler(d_handler)\n return logger",
"def add_logger_splunkhandler(logger=set_default_logger(), **kwargs):\n try:\n from splunk_hec_handler import SplunkHecHandler\n import urllib3\n urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)\n except Exception as err:\n logger.warning(\"Failed to add Splunk log handler. Error: %s\" % err)\n return logger\n else:\n try:\n host = kwargs.pop('host')\n token = kwargs.pop('token')\n level = kwargs.pop('level') if 'level' in kwargs.keys() else 'INFO'\n sh = SplunkHecHandler(host, token, **kwargs)\n except Exception as err:\n logger.warning(\"Failed to add Splunk log handler. Error: %s\" % err)\n raise err\n else:\n sh.setLevel(level)\n logger.addHandler(sh)\n return logger",
"def set_added_handler(self, handler):\n self._added_handler = handler",
"def setup_logger(log_comp, handler, level):\n\n name = LOGGER_NAMES[log_comp]\n logger = logging.getLogger(name)\n\n for h in logger.handlers:\n if isinstance(h, logging.NullHandler):\n logger.removeHandler(h)\n\n handler.setLevel(level)\n logger.addHandler(handler)\n logger.setLevel(level)",
"def add_logger(self, logger):\n self.comp('logmanager').add_logger(logger)\n return logger",
"def test_add_file_handler():\n change_cwd()\n logger = logging.getLogger(__name__)\n file_handler = logging.FileHandler('logging.log')\n file_handler.setLevel(logging.DEBUG)\n logger.addHandler(file_handler)\n log_all_levels(logger)\n log_all_levels_decorated(logger)\n log_all_levels_loop(logger)\n return logger",
"def add_logger(log, request):\n request.cls.log = log",
"def fileHandlers(self, handlers):\n for handler in handlers:\n self.logger.addHandler(handler)",
"def log_handler(self):\n return self._log_handler",
"def add_handler(self, handler, backtrack = False):\n\n # Add Handler\n self._handlers.append(handler)\n logger.debug(\"%s: handler %s added.\" % \\\n (self.__class__.__name__, handler.__name__))\n \n # Backtrack\n if backtrack:\n for message in self.get_waiting(): handler(message)\n logger.debug(\"%s: handler %s backtracked.\" % \\\n (self.__class__.__name__, handler.__name__))",
"def log_handler(self, handler):\n if not self.opened():\n handler = handler or util.noop\n self._log_handler = enums.JLinkFunctions.LOG_PROTOTYPE(handler)\n self._dll.JLINKARM_EnableLog(self._log_handler)",
"def register_handler(self, handler):\r\n self.handler = handler",
"def add_handler(self, handler):\n self.register(abcs.AHandler, handler, handler)",
"def extra_logging(self, level, formatter):\r\n\r\n # verifies if the logging attribute of the current instance is\r\n # defined and in case it's not returns immediately, otherwise\r\n # starts by converting the currently defined set of handlers into\r\n # a list so that it may be correctly manipulated (add handlers)\r\n if not self.logging: return\r\n self.handlers = list(self.handlers)\r\n\r\n # iterates over the complete set of handler configuration in the\r\n # logging to create the associated handler instances\r\n for config in self.logging:\r\n # gathers the base information on the current handler configuration\r\n # running also the appropriate transformation on the level\r\n name = config.get(\"name\", None)\r\n _level = config.get(\"level\", level)\r\n _level = self._level(_level)\r\n\r\n # \"clones\" the configuration dictionary and then removes the base\r\n # values so that they do not interfere with the building\r\n config = dict(config)\r\n if \"level\" in config: del config[\"level\"]\r\n if \"name\" in config: del config[\"name\"]\r\n\r\n # retrieves the proper building, skipping the current loop in case\r\n # it does not exits and then builds the new handler instance, setting\r\n # the proper level and formatter and then adding it to the set\r\n if not hasattr(log, name + \"_handler\"): continue\r\n builder = getattr(log, name + \"_handler\")\r\n handler = builder(**config)\r\n handler.setLevel(_level)\r\n handler.setFormatter(formatter)\r\n self.handlers.append(handler)\r\n self._extra_handlers.append(handler)\r\n\r\n # restores the handlers structure back to the \"original\" tuple form\r\n # so that no expected data types are violated\r\n self.handlers = tuple(self.handlers)",
"def register(self, handler):\n self.handlers.add(handler)\n return self",
"def add_log_handler(\n min_level: LogLevel,\n src_starts_with: str,\n handler: LogHandler\n) -> LogHandlerId:\n global _HANDLER_COUNT # pylint: disable=global-statement\n global _HANDLERS # pylint: disable=global-statement\n _HANDLER_LOCK.acquire_write()\n try:\n handler_id = LogHandlerId(_HANDLER_COUNT)\n _HANDLER_COUNT += 1\n _HANDLERS[handler_id] = (min_level, src_starts_with, handler,)\n return handler_id\n finally:\n _HANDLER_LOCK.release()",
"def __add_logger(self):\n #FIXME: adapt to the settings that are proper for you\n self.__logger = logging.getLogger('lib-autopilot')\n self.__logger.setLevel(logging.INFO)\n ch = logging.StreamHandler()\n ch.setLevel(logging.INFO)\n formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n ch.setFormatter(formatter)\n self.__logger.addHandler(ch) \n # TODO: CHANGE from Console to file handler\n # fh = logging.FileHandler('lib-autopilot.log')\n # fh.setLevel(logging.DEBUG)\n #fh.setFormatter(formatter)\n #self.__logger.addHandler(fh)",
"def _handler_file():\n handler = logging.FileHandler(LOG_FILENAME)\n handler.setFormatter(LOG_ENTRY_FORMAT)\n return handler",
"def add_handler(self, path, handler):\n if path: # guard against Null path, we assume handler could be Null\n path_list = self.split_path(path)\n self.trie.insert(step_list=path_list, handler=handler)",
"def logger(name=None):\r\n\r\n log = logging.getLogger(name or 'logging')\r\n if HANDLER and HANDLER not in log.handlers:\r\n log.addHandler(HANDLER)\r\n\r\n return log",
"def register_logger(self, logger):\n\n self._logger = logger",
"def setup_logger(level, name, use_rotating_handler=True):\r\n \r\n logger = logging.getLogger(name)\r\n logger.propagate = False # Prevent the log messages from being duplicated in the python.log file\r\n logger.setLevel(level)\r\n \r\n log_file_path = os.path.join( os.environ['SPLUNK_HOME'], 'var', 'log', 'splunk', 'radius_auth_rest_handler.log' )\r\n \r\n if use_rotating_handler:\r\n file_handler = logging.handlers.RotatingFileHandler(log_file_path, maxBytes=25000000, backupCount=5)\r\n else:\r\n file_handler = logging.FileHandler(log_file_path)\r\n \r\n formatter = logging.Formatter('%(asctime)s %(levelname)s ' + name + ' - %(message)s')\r\n file_handler.setFormatter(formatter)\r\n \r\n logger.addHandler(file_handler)\r\n \r\n return logger",
"def __CreateLog(self, log_name, log_level=NOTSET, log_handler=FILE,\n stream=sys.stderr):\n logger = logging.getLogger(log_name)\n\n # Update log level to reflect changes. If a higher log level is given\n # the logger should raise it's boundary.\n if log_level < logger.level or logger.level == logging.NOTSET:\n logger.setLevel(log_level)\n\n if (log_name in self.__log_table and\n self.__log_table[log_name] == Logger.FILE_AND_CONSOLE):\n # Don't add any more handlers.\n return\n\n # Create an entry for log name.\n if log_name not in self.__log_table:\n self.__log_table[log_name] = Logger.NONE\n\n if log_handler != Logger.NONE:\n fmt = ('[%(asctime)s::%(levelname)s::' + self.__lib_sig +\n '] %(message)s')\n # Add FILE handler if needed.\n if (log_handler == Logger.FILE or\n log_handler == Logger.FILE_AND_CONSOLE and\n self.__log_table[log_name] != Logger.FILE):\n if not os.path.exists(self.__log_path):\n os.makedirs(self.__log_path)\n fh = logging.FileHandler(os.path.join(self.__log_path,\n '%s.log' % log_name))\n fh.setLevel(log_level)\n fh.setFormatter(logging.Formatter(fmt))\n logger.addHandler(fh)\n # Binary arithmetic to yield updated handler.\n self.__log_table[log_name] = self.__log_table[log_name] + Logger.FILE\n\n # Add CONSOLE handler if needed.\n if (log_handler == Logger.CONSOLE or\n log_handler == Logger.FILE_AND_CONSOLE and\n self.__log_table[log_name] != Logger.CONSOLE):\n ch = logging.StreamHandler(stream)\n ch.setLevel(log_level)\n ch.setFormatter(logging.Formatter(fmt))\n logger.addHandler(ch)\n # Binary arithmetic to yield updated handler.\n self.__log_table[log_name] = self.__log_table[log_name] + Logger.CONSOLE"
] | [
"0.730158",
"0.7083873",
"0.70768595",
"0.69950664",
"0.67779076",
"0.67307496",
"0.6639868",
"0.6612778",
"0.6585183",
"0.65658885",
"0.6468931",
"0.64650214",
"0.6443481",
"0.64255625",
"0.63551295",
"0.63521326",
"0.634116",
"0.63212717",
"0.63015157",
"0.62598276",
"0.6255844",
"0.62425405",
"0.62234664",
"0.6215817",
"0.6186613",
"0.6176456",
"0.616096",
"0.6142454",
"0.6136743",
"0.6125811"
] | 0.74555445 | 0 |
Add a console handler for paramiko.transport's logger if not present | def _check_paramiko_handlers(logger=None):
paramiko_logger = logging.getLogger('paramiko.transport')
if not paramiko_logger.handlers:
if logger:
paramiko_logger.handlers = logger.handlers
else:
console_handler = logging.StreamHandler()
console_handler.setFormatter(
logging.Formatter('%(asctime)s | %(levelname)-8s| PARAMIKO: '
'%(lineno)03d@%(module)-10s| %(message)s')
)
paramiko_logger.addHandler(console_handler) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _setup_cmd_logger():\n logger.setLevel(logging.DEBUG)\n ch = logging.StreamHandler()\n formatter = ColoredFormatter('%(log_color)s[%(levelname)8s] %(message)s%(reset)s')\n ch.setLevel(level=logging.DEBUG)\n ch.setFormatter(formatter)\n logger.addHandler(ch)",
"def setup_logger_console(log_level='info'):\n # Configureer threshold log level DEBUG voor de root logger (i.p.v. WARNING).\n setup_logger()\n\n # Configureer de console handler.\n console_handler = customize_handler(logging.StreamHandler(), log_level)\n # Koppel console handler aan de root logger.\n logging.getLogger('').addHandler(console_handler)\n\n return console_handler",
"def __logger_console(self):\n console_handler = logging.StreamHandler()\n console_handler.setFormatter(self.__formatter)\n console_handler.setLevel(logging.INFO)\n self.__logger.addHandler(console_handler)",
"def setup_logger(log=None, level='INFO'):\n if not log:\n log = logging.getLogger()\n if not log.handlers:\n channel = logging.StreamHandler()\n channel.setFormatter(DebugLogFormatter())\n\n log.setLevel(level)\n log.addHandler(channel)\n\n # setup styling for repo loggers\n repo_logger = logging.getLogger('libvcs')\n channel = logging.StreamHandler()\n channel.setFormatter(RepoLogFormatter())\n channel.addFilter(RepoFilter())\n repo_logger.setLevel(level)\n repo_logger.addHandler(channel)",
"def create_logger(logger=None,\n loglevel=None,\n capture_warnings=True,\n add_paramiko_handler=True):\n logger = logger or logging.getLogger(\n 'sshtunnel.SSHTunnelForwarder'\n )\n if not any(isinstance(x, logging.Handler) for x in logger.handlers):\n logger.setLevel(loglevel or DEFAULT_LOGLEVEL)\n console_handler = logging.StreamHandler()\n _add_handler(logger,\n handler=console_handler,\n loglevel=loglevel or DEFAULT_LOGLEVEL)\n if loglevel: # override if loglevel was set\n logger.setLevel(loglevel)\n for handler in logger.handlers:\n handler.setLevel(loglevel)\n\n if add_paramiko_handler:\n _check_paramiko_handlers(logger=logger)\n\n if capture_warnings and sys.version_info >= (2, 7):\n logging.captureWarnings(True)\n pywarnings = logging.getLogger('py.warnings')\n pywarnings.handlers.extend(logger.handlers)\n return logger",
"def add_terminal_logging(log_format, level=logging.DEBUG):\n logger = logging.getLogger()\n terminal_logger = logging.StreamHandler(sys.stdout)\n terminal_logger.setLevel(level)\n formatter = logging.Formatter(log_format)\n terminal_logger.setFormatter(formatter)\n logger.addHandler(terminal_logger)",
"def configure_console_logger ():\n\t\tconsole = logging.StreamHandler()\n\t\tconsole.setLevel(logging.INFO) # Change level for console logger in development mode\n\t\tformatter = logging.Formatter('%(levelname)-8s %(message)s')\n\t\tconsole.setFormatter(formatter)\n\t\tlogging.getLogger('').addHandler(console)",
"def setup_logger():\n root = logging.getLogger()\n root.setLevel(LOGGING_LEVEL)\n formatter = logging.Formatter('%(asctime)s - %(message)s')\n ch = logging.StreamHandler(sys.stdout)\n ch.setLevel(LOGGING_LEVEL)\n ch.setFormatter(formatter)\n root.addHandler(ch)",
"def setup_logging(log=None, level=logging.INFO\n , program=\"\", syslog_address=\"\", tcp_address=\"\", udp_address=\"\"\n , only_message=False, json_to_console=False, logging_handler_file=sys.stderr\n ):\n log = log if log is not None else logging.getLogger(log)\n\n if syslog_address:\n address = syslog_address\n if not syslog_address.startswith(\"/\") and \":\" in syslog_address:\n split = address.split(\":\", 2)\n address = (split[0], int(split[1]))\n handler = SyslogHandler(address = address)\n elif udp_address:\n handler = JsonOverUDPHandler(program, udp_address.split(\":\")[0], int(udp_address.split(\":\")[1]))\n elif tcp_address:\n handler = JsonOverTCPHandler(program, tcp_address.split(\":\")[0], int(tcp_address.split(\":\")[1]))\n else:\n if json_to_console:\n handler = JsonToConsoleHandler(program, logging_handler_file)\n else:\n handler = RainbowHandler(logging_handler_file)\n\n # Protect against this being called multiple times\n handler.delfick_logging = True\n if any(getattr(h, \"delfick_logging\", False) for h in log.handlers):\n return\n\n if syslog_address:\n handler.setFormatter(SimpleFormatter(\"{0}[{1}]: %(message)s\".format(program, os.getpid()), ignore_extra=True))\n elif udp_address or tcp_address or json_to_console:\n handler.setFormatter(SimpleFormatter(\"%(message)s\"))\n else:\n base_format = \"%(name)-15s %(message)s\"\n if only_message:\n base_format = \"%(message)s\"\n\n handler._column_color['%(asctime)s'] = ('cyan', None, False)\n handler._column_color['%(levelname)-7s'] = ('green', None, False)\n handler._column_color['%(message)s'][logging.INFO] = ('blue', None, False)\n if only_message:\n handler.setFormatter(SimpleFormatter(base_format))\n else:\n handler.setFormatter(SimpleFormatter(\"{0} {1}\".format(\"%(asctime)s %(levelname)-7s\", base_format)))\n\n log.addHandler(handler)\n log.setLevel(level)\n return handler",
"def add_logger_splunkhandler(logger=set_default_logger(), **kwargs):\n try:\n from splunk_hec_handler import SplunkHecHandler\n import urllib3\n urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)\n except Exception as err:\n logger.warning(\"Failed to add Splunk log handler. Error: %s\" % err)\n return logger\n else:\n try:\n host = kwargs.pop('host')\n token = kwargs.pop('token')\n level = kwargs.pop('level') if 'level' in kwargs.keys() else 'INFO'\n sh = SplunkHecHandler(host, token, **kwargs)\n except Exception as err:\n logger.warning(\"Failed to add Splunk log handler. Error: %s\" % err)\n raise err\n else:\n sh.setLevel(level)\n logger.addHandler(sh)\n return logger",
"def _debug_logging_handler(\n name: str, plugin_invoker: PluginInvoker, stderr: StreamReader\n) -> Task:\n if not plugin_invoker.context or not plugin_invoker.context.base_output_logger:\n return asyncio.ensure_future(\n _stream_redirect(stderr, sys.stderr, write_str=True)\n )\n\n out = plugin_invoker.context.base_output_logger.out(\n name, color=\"yellow\", subtask_name=\"discovery\"\n )\n with out.line_writer() as outerr:\n return asyncio.ensure_future(_stream_redirect(stderr, outerr, write_str=True))",
"def enable_console_logging(log):\n\n log.setLevel(logging.DEBUG)\n # create console handler and set level to debug\n ch = logging.StreamHandler()\n ch.setLevel(logging.DEBUG)\n # create formatter\n formatter = logging.Formatter(\"%(asctime)s - %(name)s - %(levelname)s - %(message)s\")\n # add formatter to ch\n ch.setFormatter(formatter)\n # add ch to logger\n log.addHandler(ch)",
"def get_console_handler():\r\n formatter = logging.Formatter(\"%(name)s - %(levelname)s - %(funcName)s:%(lineno)d - %(message)s\")\r\n console_handler = logging.StreamHandler(sys.stdout)\r\n console_handler.setFormatter(formatter)\r\n return console_handler",
"def init_stdio():\n _logger.stdio()",
"def enable_console():\n global CONSOLE\n if CONSOLE is None:\n # define a Handler which writes messages to sys.stderr\n CONSOLE = logging.StreamHandler()\n CONSOLE.setLevel(logging.DEBUG)\n # set a format which is simpler for console use\n formatter = logging.Formatter('%(levelname)s %(name)s: %(message)s')\n # tell the handler to use this format\n CONSOLE.setFormatter(formatter)\n logger.addHandler(CONSOLE)",
"def setup_logger(logger, argv):\n log_level = get_log_level(argv)\n logger.setLevel(log_level)\n stream_handler = logging.StreamHandler()\n stream_handler.setLevel(log_level)\n logger.addHandler(stream_handler)",
"def _setup_logger(self, level, log_file):\n level = getattr(logging, level.upper())\n logger.setLevel(level)\n formatter = logging.Formatter(\n '[%(levelname)s] %(asctime)s - %(module)s.%(funcName)s() - %(message)s')\n handler = logging.StreamHandler()\n logger.addHandler(handler)\n handler.setFormatter(formatter)\n if not log_file:\n return\n try:\n handler = TimedRotatingFileHandler(log_file)\n except IOError:\n logger.error(\"Could not write to %s, falling back to stdout\",\n log_file)\n else:\n logger.addHandler(handler)\n handler.setFormatter(formatter)",
"def setup_logging():\n formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n console = logging.StreamHandler(sys.stdout)\n console.setLevel(logging.DEBUG)\n console.setFormatter(formatter)\n root = logging.getLogger()\n root.addHandler(console)\n root.setLevel(logging.DEBUG)",
"def add_sock_handler(self, host, port):\n hand = MySocketHandler(host, port, get_machine_id())\n # hand.setFormatter(self.formatter)\n self.log.addHandler(hand)",
"def setup_logging():\n logger = logging.getLogger()\n logger.level = logging.DEBUG\n stream_handler = logging.StreamHandler(sys.stdout)\n logger.addHandler(stream_handler)",
"def logToConsole(level=logging.INFO):\n logger = logging.getLogger()\n stdHandlers = [\n h for h in logger.handlers\n if type(h) is logging.StreamHandler and h.stream is sys.stderr]\n if stdHandlers:\n # if a standard stream handler already exists, use it and\n # set the log level for the ib_insync namespace only\n logging.getLogger('ib_insync').setLevel(level)\n else:\n # else create a new handler\n logger.setLevel(level)\n formatter = logging.Formatter(\n '%(asctime)s %(name)s %(levelname)s %(message)s')\n handler = logging.StreamHandler()\n handler.setFormatter(formatter)\n logger.addHandler(handler)",
"def setup_logging(use_syslog=False):\n\n LOG.setLevel(logging.INFO)\n if use_syslog:\n ch = SysLogHandler()\n else:\n ch = logging.StreamHandler(sys.stdout)\n ch.setFormatter(logging.Formatter('%(asctime)s %(name)s[%(process)d] '\n '%(levelname)s: %(message)s'))\n LOG.addHandler(ch)",
"def _setup_logger():\n root = logging.getLogger()\n root.setLevel(logging.INFO)\n\n log_handle = logging.StreamHandler(sys.stdout)\n formatter = logging.Formatter(\n \"[%(levelname)s] (%(asctime)s) - %(message)s\", datefmt=\"%Y-%m-%d %H:%M:%S\"\n )\n log_handle.setFormatter(formatter)\n root.addHandler(log_handle)\n\n logging.info(\"Initializing snakes\")",
"def setup_logger():\n mc_logger = logging.getLogger('chess_logger')\n mc_logger.setLevel(logging.DEBUG)\n console_handler = logging.StreamHandler()\n formatter = logging.Formatter(fmt='%(asctime)s %(levelname)-8s %(message)s',\n datefmt='%Y-%m-%d %H:%M:%S')\n console_handler.setFormatter(formatter)\n mc_logger.addHandler(console_handler)",
"def set_logger( logger_fn: Callable[[str,str],any] = lambda llvl, msg: sys.stdout.write( \"[%s]: %s\\n\" % (llvl, msg) ) ):\n global LOGGER\n\n LOGGER = logger_fn",
"def _add_handler(logger, handler=None, loglevel=None):\n handler.setLevel(loglevel or DEFAULT_LOGLEVEL)\n if handler.level <= logging.DEBUG:\n _fmt = '%(asctime)s| %(levelname)-4.3s|%(threadName)10.9s/' \\\n '%(lineno)04d@%(module)-10.9s| %(message)s'\n handler.setFormatter(logging.Formatter(_fmt))\n else:\n handler.setFormatter(logging.Formatter(\n '%(asctime)s| %(levelname)-8s| %(message)s'\n ))\n logger.addHandler(handler)",
"def configure_logging():\n # console_handler = TTSHandler()\n root = logging.getLogger('node_' + __name__)\n root.setLevel(logging.INFO)\n\n console_handler = logging.StreamHandler()\n console_handler.setLevel(logging.DEBUG)\n\n formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n console_handler.setFormatter(formatter)\n root.addHandler(console_handler)\n\n root = logging.getLogger()\n root.addHandler(console_handler)\n # the default formatter just returns the message\n root.setLevel(logging.DEBUG)",
"def init_logger(verbosity):\n # Register our logging handler\n handler = logging.StreamHandler(sys.stdout)\n handler.setLevel(verbosity)\n rootLogger = logging.getLogger('')\n rootLogger.addHandler(handler)\n\n # Decrease the log level of the root logger if needed\n if verbosity < rootLogger.level:\n rootLogger.setLevel(verbosity)",
"def setup_logger(logLevel=\"DEBUG\"):\n logmoduleconsole = logging.getLogger(f\"{__name__}.console\")\n logmoduleconsole.propagate = False\n logmoduleconsole.setLevel(logLevel)\n\n module_console_handler = logging.StreamHandler()\n\n # log_format_module = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'\n log_format_module = \"%(name)s - %(levelname)s: %(message)s\"\n # log_format_module = '%(levelname)s: %(message)s'\n formatter = logging.Formatter(log_format_module)\n module_console_handler.setFormatter(formatter)\n\n logmoduleconsole.addHandler(module_console_handler)\n\n logging.addLevelName(5, \"TRACE\")\n # use it like this\n # logmoduleconsole.log(5, 'Exceedingly verbose debug')\n\n return logmoduleconsole",
"def __add_logger(self):\n #FIXME: adapt to the settings that are proper for you\n self.__logger = logging.getLogger('lib-autopilot')\n self.__logger.setLevel(logging.INFO)\n ch = logging.StreamHandler()\n ch.setLevel(logging.INFO)\n formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n ch.setFormatter(formatter)\n self.__logger.addHandler(ch) \n # TODO: CHANGE from Console to file handler\n # fh = logging.FileHandler('lib-autopilot.log')\n # fh.setLevel(logging.DEBUG)\n #fh.setFormatter(formatter)\n #self.__logger.addHandler(fh)"
] | [
"0.62692356",
"0.61782694",
"0.61622053",
"0.60665613",
"0.601746",
"0.59057075",
"0.58986324",
"0.58926237",
"0.5885338",
"0.58700985",
"0.58597547",
"0.58551115",
"0.58316165",
"0.57992226",
"0.5792013",
"0.57112014",
"0.56644404",
"0.56620884",
"0.56540704",
"0.5643124",
"0.56293535",
"0.56252986",
"0.5623947",
"0.5592303",
"0.5577431",
"0.5572614",
"0.55533415",
"0.5544728",
"0.5535247",
"0.55262256"
] | 0.72201777 | 0 |
Check that if all tunnels are established and populates | def check_tunnels(self):
skip_tunnel_checkup = self.skip_tunnel_checkup
try:
# force tunnel check at this point
self.skip_tunnel_checkup = False
for _srv in self._server_list:
self._check_tunnel(_srv)
finally:
self.skip_tunnel_checkup = skip_tunnel_checkup # roll it back | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _check_tunnel(self, _srv):\n if self.skip_tunnel_checkup:\n self.tunnel_is_up[_srv.local_address] = True\n return\n self.logger.info('Checking tunnel to: {0}'.format(_srv.remote_address))\n if isinstance(_srv.local_address, string_types): # UNIX stream\n s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)\n else:\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.settimeout(TUNNEL_TIMEOUT)\n try:\n # Windows raises WinError 10049 if trying to connect to 0.0.0.0\n connect_to = ('127.0.0.1', _srv.local_port) \\\n if _srv.local_host == '0.0.0.0' else _srv.local_address\n s.connect(connect_to)\n self.tunnel_is_up[_srv.local_address] = _srv.tunnel_ok.get(\n timeout=TUNNEL_TIMEOUT * 1.1\n )\n self.logger.debug(\n 'Tunnel to {0} is DOWN'.format(_srv.remote_address)\n )\n except socket.error:\n self.logger.debug(\n 'Tunnel to {0} is DOWN'.format(_srv.remote_address)\n )\n self.tunnel_is_up[_srv.local_address] = False\n\n except queue.Empty:\n self.logger.debug(\n 'Tunnel to {0} is UP'.format(_srv.remote_address)\n )\n self.tunnel_is_up[_srv.local_address] = True\n finally:\n s.close()",
"def tunnel_up(self):\n return self._ssh_host != None and self._ssh_port != None",
"def _create_tunnels(self):\n if not self.is_active:\n try:\n self._connect_to_gateway()\n except socket.gaierror: # raised by paramiko.Transport\n msg = 'Could not resolve IP address for {0}, aborting!' \\\n .format(self.ssh_host)\n self.logger.error(msg)\n return\n except (paramiko.SSHException, socket.error) as e:\n template = 'Could not connect to gateway {0}:{1} : {2}'\n msg = template.format(self.ssh_host, self.ssh_port, e.args[0])\n self.logger.error(msg)\n return\n for (rem, loc) in zip(self._remote_binds, self._local_binds):\n try:\n self._make_ssh_forward_server(rem, loc)\n except BaseSSHTunnelForwarderError as e:\n msg = 'Problem setting SSH Forwarder up: {0}'.format(e.value)\n self.logger.error(msg)",
"def connect_all(self):\n if not all(p.is_connected() for p in self._process_managers):\n print 'Connecting...'\n self.async_run_all(ProcMgrProxy.connect)\n # Check failure.\n success = all(p.is_connected() for p in self._process_managers)\n if not success:\n print \"Perhaps you should run 'setup' first\"\n return success",
"def validate_connection(self):\n for hostInfo in self.client.transport.hosts:\n host = hostInfo.get('host')\n port = hostInfo.get('port')\n self.validate_server_connection(host, port)",
"def verify_tunnels_state(device, tunnels,\n prot=\"up\", state=\"up\",\n max_time=15, check_interval=5,\n parsed_output=None):\n timeout = Timeout(max_time, check_interval)\n while timeout.iterate():\n if not parsed_output:\n try:\n parsed_output1 = device.parse(\n \"show mpls traffic-eng tunnels brief\"\n )\n except SchemaEmptyParserError as se:\n pass\n \n tunnel_state=[]\n port_state=[]\n for tunnel in tunnels:\n res=1\n if tunnel in parsed_output1.q.get_values('tunnel_id'):\n tunnel_state1=parsed_output1.q.contains(tunnel)\\\n .get_values('state')[0]\n if tunnel_state1 == state:\n tunnel_state.append(tunnel)\n else:\n log.error(\"state of the tunnel {tunnel} is {state}\"\\\n .format(tunnel=tunnel, state=tunnel_state1))\n res=0\n \n port_state1=parsed_output1.q.contains(tunnel)\\\n .get_values('prot')[0]\n if port_state1 == prot:\n port_state.append(tunnel)\n else:\n log.error(\n \"protocol state of the tunnel {tunnel} is \"\"{prot}\"\\\n .format(tunnel=tunnel, prot=port_state1))\n res=0\n else:\n log.error(\n \"Tunnel id {tunnel} not found in the output\"\\\n .format(tunnel=tunnel))\n return False\n if res:\n log.info(\"State of the tunnel {tunnel} is {state}\".format(\n tunnel=(','.join(tunnel_state)), state=state))\n log.info(\"Protocol state of the tunnel {tunnel} is {state}\"\\\n .format(tunnel=(','.join(port_state)), state=state))\n\n return True\n timeout.sleep()\n return False",
"def test_connection_is_established(self):\n for conn in self.connections:\n assert conn.is_connected is True",
"def check_port_connections(self):\n all_ports = crest.get_all_ports(self.model)\n influences_to_target = {p: [] for p in all_ports}\n updates_to_target = {p: [] for p in all_ports}\n actions_to_target = {p: [] for p in all_ports}\n\n # fill data stores\n for inf in crest.get_all_influences(self.model):\n influences_to_target[inf.target].append(inf)\n\n for up in crest.get_all_updates(self.model):\n updates_to_target[up.target].append(up)\n\n for action in crest.get_all_actions(self.model):\n actions_to_target[action.target].append(action)\n\n for port in all_ports:\n assert not (len(influences_to_target[port]) > 0 and (\n len(updates_to_target[port]) > 0 or len(actions_to_target[port]) > 0)\n ), f\"There are [influences and (updates or actions)] writing to port {port._name} (entity: {port._parent._name})\"\n\n assert len(influences_to_target[port]) < 2, f\"There are two influences writing to {port._name}\"\n\n states = [update.state for update in updates_to_target[port]]\n assert len(states) == len(set(states)), f\"Port {port._name} (entity: {port._parent._name}) is written by multiple updates linked to the same state\"\n\n transitions = [action.transition for action in actions_to_target[port]]\n assert len(transitions) == len(set(transitions)), f\"Port {port._name} (entity: {port._parent._name}) is written by multiple actions linked to the same transition\"",
"def verify_lag_host_connectivity(self):\n # Find all LACP hosts\n for lacp_id, host_options in self.host_options.items():\n if 'lacp' in host_options:\n # Found LACP host\n for dst_id in self.host_information:\n if lacp_id == dst_id:\n continue\n # Test connectivity to any other host (might be another LAG host)\n self.check_host_connectivity_by_id(lacp_id, dst_id)",
"def test_tunneling(self):\n self.assertEqual(self.kineticsjob.reaction.transition_state.tunneling, None)",
"def verify_intervlan_routing(self):\n for src in self.host_information:\n for dst in self.host_information:\n if dst > src:\n self.check_host_connectivity_by_id(src, dst)",
"def connection_test():\n connections_result_passed = []\n connections_result_failed = []\n with open(f'endpoints/{dc_number}/endpoints.yaml', 'r') as ep_file:\n try:\n yaml_object = yaml.safe_load(ep_file)\n for components in yaml_object.values():\n for host_info in components.values():\n if host_info is None:\n pass\n else:\n for hostname, port in host_info.items():\n if ',' in str(port):\n port_list = str(port).split(',')\n for items in port_list:\n connections_result_passed, connections_result_failed = connect(hostname, items, connections_result_passed, connections_result_failed)\n else:\n connections_result_passed, connections_result_failed = connect(hostname, port, connections_result_passed, connections_result_failed)\n message, color = message_content(connections_result_passed, connections_result_failed)\n if str2bool(slack_enabled) is True and str2bool(email_enabled) is True:\n send_message_slack(message, color)\n send_email(message)\n elif str2bool(slack_enabled) is True and str2bool(email_enabled) is False:\n send_message_slack(message, color)\n elif str2bool(slack_enabled) is False and str2bool(email_enabled) is True:\n send_email(message)\n else:\n pass\n except yaml.YAMLError as exc:\n print(exc)",
"def tunnels(self, tunnels):\n\n self._tunnels = tunnels",
"def checkonly(self):\n OTHER_WSREP.append(socket.gethostbyname(socket.gethostname()))\n for hostitem in ALL_NODES:\n checkhost(hostitem)\n if OTHER_WSREP:\n for wsrepitem in OTHER_WSREP:\n REMAINING_NODES.append(wsrepitem)\n if REMAINING_NODES:\n for wsrephost in OTHER_WSREP:\n checkwsrep(wsrephost)\n print ''",
"def __ssh_tunnel(self):\n\n host = self.sshTunnelDict[\"ssh_ip\"]\n user = self.sshTunnelDict[\"ssh_user\"]\n password = self.sshTunnelDict[\"ssh_password\"]\n sfcs = self.sshTunnelDict[\"target_ip\"]\n\n tunnel_command = 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -C -N -f -L 3306:{0} {1}@{2}'.format(sfcs, user, host)\n retry = 5\n while retry:\n if not self.__check_ssh():\n try:\n ssh_tunnel = pexpect.spawn(tunnel_command)\n ssh_tunnel.expect('password:')\n time.sleep(0.1)\n ssh_tunnel.sendline(password)\n ssh_tunnel.expect(pexpect.EOF)\n retry -= 1\n except:\n raise Exception(\"Create SSH Tunnel Failed: retry 5\")\n else: break",
"def isConnected():",
"def check_all_neighbors_present(duthosts, nbrhosts, all_cfg_facts, nbr_macs, check_nbr_state=True):\n for per_host in duthosts.frontend_nodes:\n for asic in per_host.asics:\n logger.info(\"Checking local neighbors on host: %s, asic: %s\", per_host.hostname, asic.asic_index)\n cfg_facts = all_cfg_facts[per_host.hostname][asic.asic_index]['ansible_facts']\n if 'BGP_NEIGHBOR' in cfg_facts:\n neighs = cfg_facts['BGP_NEIGHBOR']\n else:\n logger.info(\"No local neighbors for host: %s/%s, skipping\", per_host.hostname, asic.asic_index)\n continue\n\n dump_and_verify_neighbors_on_asic(duthosts, per_host, asic, list(neighs.keys()),\n nbrhosts, all_cfg_facts, nbr_macs, check_nbr_state=check_nbr_state)",
"async def check_connection_status(self):\n while True:\n if not self.connected:\n self.log.error(\"Lost connection to spa, attempting reconnect.\")\n await self.connect()\n await asyncio.sleep(10)\n continue\n if (self.lastupd + 5 * self.sleep_time) < time.time():\n self.log.error(\"Spa stopped responding, requesting panel config.\")\n await self.send_panel_req(0, 1)\n await asyncio.sleep(self.sleep_time)",
"def tunnel_bindings(self):\n return dict((_server.remote_address, _server.local_address) for\n _server in self._server_list if\n self.tunnel_is_up[_server.local_address])",
"def _check_all_replicas_connected(num_replicas, gateway_port, protocol):\n exec_ids = set()\n exec_id_list = []\n for i in range(num_replicas + 1):\n id_ = _send_request(gateway_port, protocol, request_size=2)[0].text\n exec_ids.add(id_)\n exec_id_list.append(id_)\n print(exec_id_list)\n assert len(exec_ids) == num_replicas",
"def test_mgre(self):\n\n for itf in self.pg_interfaces[3:]:\n #\n # one underlay nh for each overlay/tunnel peer\n #\n itf.generate_remote_hosts(4)\n itf.configure_ipv4_neighbors()\n\n #\n # Create an L3 GRE tunnel.\n # - set it admin up\n # - assign an IP Addres\n # - Add a route via the tunnel\n #\n gre_if = VppGreInterface(\n self,\n itf.local_ip4,\n \"0.0.0.0\",\n mode=(VppEnum.vl_api_tunnel_mode_t.TUNNEL_API_MODE_MP),\n )\n gre_if.add_vpp_config()\n gre_if.admin_up()\n gre_if.config_ip4()\n gre_if.generate_remote_hosts(4)\n\n self.logger.info(self.vapi.cli(\"sh adj\"))\n self.logger.info(self.vapi.cli(\"sh ip fib\"))\n\n #\n # ensure we don't match to the tunnel if the source address\n # is all zeros\n #\n tx = self.create_tunnel_stream_4o4(\n self.pg0,\n \"0.0.0.0\",\n itf.local_ip4,\n self.pg0.local_ip4,\n self.pg0.remote_ip4,\n )\n self.send_and_assert_no_replies(self.pg0, tx)\n\n #\n # for-each peer\n #\n for ii in range(1, 4):\n route_addr = \"4.4.4.%d\" % ii\n tx_e = self.create_stream_ip4(self.pg0, \"5.5.5.5\", route_addr)\n\n #\n # route traffic via the peer\n #\n route_via_tun = VppIpRoute(\n self,\n route_addr,\n 32,\n [VppRoutePath(gre_if._remote_hosts[ii].ip4, gre_if.sw_if_index)],\n )\n route_via_tun.add_vpp_config()\n\n # all packets dropped at this point\n rx = self.send_and_assert_no_replies(self.pg0, tx_e)\n\n gre_if.admin_down()\n gre_if.admin_up()\n rx = self.send_and_assert_no_replies(self.pg0, tx_e)\n\n #\n # Add a TEIB entry resolves the peer\n #\n teib = VppTeib(\n self,\n gre_if,\n gre_if._remote_hosts[ii].ip4,\n itf._remote_hosts[ii].ip4,\n )\n teib.add_vpp_config()\n\n #\n # Send a packet stream that is routed into the tunnel\n # - packets are GRE encapped\n #\n rx = self.send_and_expect(self.pg0, tx_e, itf)\n self.verify_tunneled_4o4(\n self.pg0, rx, tx_e, itf.local_ip4, itf._remote_hosts[ii].ip4\n )\n\n tx_i = self.create_tunnel_stream_4o4(\n self.pg0,\n itf._remote_hosts[ii].ip4,\n itf.local_ip4,\n self.pg0.local_ip4,\n self.pg0.remote_ip4,\n )\n rx = self.send_and_expect(self.pg0, tx_i, self.pg0)\n self.verify_decapped_4o4(self.pg0, rx, tx_i)\n\n #\n # delete and re-add the TEIB\n #\n teib.remove_vpp_config()\n self.send_and_assert_no_replies(self.pg0, tx_e)\n self.send_and_assert_no_replies(self.pg0, tx_i)\n\n teib.add_vpp_config()\n rx = self.send_and_expect(self.pg0, tx_e, itf)\n self.verify_tunneled_4o4(\n self.pg0, rx, tx_e, itf.local_ip4, itf._remote_hosts[ii].ip4\n )\n rx = self.send_and_expect(self.pg0, tx_i, self.pg0)\n self.verify_decapped_4o4(self.pg0, rx, tx_i)\n\n #\n # bounce the interface state and try packets again\n #\n gre_if.admin_down()\n gre_if.admin_up()\n rx = self.send_and_expect(self.pg0, tx_e, itf)\n self.verify_tunneled_4o4(\n self.pg0, rx, tx_e, itf.local_ip4, itf._remote_hosts[ii].ip4\n )\n rx = self.send_and_expect(self.pg0, tx_i, self.pg0)\n self.verify_decapped_4o4(self.pg0, rx, tx_i)\n\n gre_if.admin_down()\n gre_if.unconfig_ip4()",
"def verify_all_stack_hosts(self):\n for _ in range(2):\n self.verify_stack_up()\n self.verify_no_cable_errors()\n self.verify_stack_hosts()\n self.verify_traveling_dhcp_mac()\n self.verify_unicast_not_looped()\n self.verify_no_bcast_to_self()\n self.verify_stack_has_no_loop()\n self.flap_all_switch_ports()",
"def _check_connection(self):\n for _ in range(3):\n try:\n r = get(f\"http://{self.ip}/student/{self.user}\")\n if r.ok:\n break \n except OSError as e:\n print(f\"Connection error:\\n{e}\")\n sleep(2)\n else:\n raise ConnectionError(f\"Can not connect to server with params ip: {self.ip}, user: {self.user}\")",
"def Connected(self) -> bool:",
"def Connected(self) -> bool:",
"def _check_publishers_connection(self):\n rate = rospy.Rate(10) # 1hz\n while self._vel_pub.get_num_connections() == 0 and not rospy.is_shutdown():\n rospy.logdebug(\"No susbribers to _vel_pub yet so we wait and try again\")\n try:\n rate.sleep()\n except rospy.ROSInterruptException:\n # This is to avoid error when world is rested, time when backwards.\n pass\n rospy.logdebug(\"_vel_pub Publisher Connected\")\n\n rospy.logdebug(\"All Publishers READY\")",
"def check_ip_fwd(duthosts, all_cfg_facts, nbrhosts, tbinfo):\n for porttype in [\"ethernet\", \"portchannel\"]:\n for version in [4, 6]:\n\n ports = pick_ports(duthosts, all_cfg_facts, nbrhosts, tbinfo, port_type_a=porttype, version=version)\n\n for ttl, size in [(2, 64), (1, 1450)]:\n # local interfaces\n check_packet(sonic_ping, ports, 'portB', 'portA', size=size, ttl=ttl, ttl_change=0)\n\n # local neighbors\n check_packet(sonic_ping, ports, 'portA', 'portA',\n dst_ip_fld='nbr_ip', size=size, ttl=ttl, ttl_change=0)\n\n vm_host_to_A = nbrhosts[ports['portA']['nbr_vm']]['host']\n\n check_packet(eos_ping, ports, 'portD', 'portA', dst_ip_fld='my_lb4096_ip', src_ip_fld='nbr_lb',\n dev=vm_host_to_A, size=size, ttl=ttl)\n\n # loopbacks\n check_packet(sonic_ping, ports, 'portA', 'portA', dst_ip_fld='nbr_lb', size=size, ttl=ttl, ttl_change=0)\n\n # inband\n check_packet(sonic_ping, ports, 'portA', 'portA', src_ip_fld='inband', size=size, ttl=ttl, ttl_change=0)\n\n # DUT loopback\n # these don't decrement ttl\n check_packet(sonic_ping, ports, 'portA', 'portA', src_ip_fld='my_lb_ip', dst_ip_fld='my_ip', size=size,\n ttl=ttl, ttl_change=0)\n check_packet(sonic_ping, ports, 'portA', 'portA', src_ip_fld='my_lb_ip', dst_ip_fld='nbr_ip', size=size,\n ttl=ttl, ttl_change=0)\n check_packet(sonic_ping, ports, 'portA', 'portA', src_ip_fld='my_lb_ip', dst_ip_fld='nbr_lb', size=size,\n ttl=ttl, ttl_change=0)\n\n vm_host_to_A = nbrhosts[ports['portA']['nbr_vm']]['host']\n check_packet(eos_ping, ports, 'portA', 'portA', dst_ip_fld='my_lb4096_ip', src_ip_fld='nbr_lb',\n dev=vm_host_to_A, size=size, ttl=ttl, ttl_change=0)\n\n # end to end\n vm_host_to_A = nbrhosts[ports['portA']['nbr_vm']]['host']\n check_packet(eos_ping, ports, 'portB', 'portA', dst_ip_fld='nbr_lb', src_ip_fld='nbr_lb',\n dev=vm_host_to_A, size=size, ttl=ttl)\n check_packet(eos_ping, ports, 'portC', 'portA', dst_ip_fld='nbr_lb', src_ip_fld='nbr_lb',\n dev=vm_host_to_A, size=size, ttl=ttl)\n check_packet(eos_ping, ports, 'portD', 'portA', dst_ip_fld='nbr_lb', src_ip_fld='nbr_lb',\n dev=vm_host_to_A, size=size, ttl=ttl)",
"def check_all_neighbors_present_local(duthosts, per_host, asic, neighbors, all_cfg_facts,\n nbrhosts, nbr_macs, check_nbr_state=True):\n cfg_facts = all_cfg_facts[per_host.hostname][asic.asic_index]['ansible_facts']\n neighs = cfg_facts['BGP_NEIGHBOR']\n\n fail_cnt = 0\n\n # Grab dumps of the asicdb, appdb, voqdb, and arp table\n asicdb = AsicDbCli(asic)\n asic_dump = asicdb.dump_neighbor_table()\n\n appdb = AppDbCli(asic)\n app_dump = appdb.dump_neighbor_table()\n\n encaps = {}\n\n if per_host.is_multi_asic:\n arptable = per_host.switch_arptable(namespace=asic.namespace)['ansible_facts']\n else:\n arptable = per_host.switch_arptable()['ansible_facts']\n\n if len(duthosts.supervisor_nodes) == 1:\n voqdb = VoqDbCli(duthosts.supervisor_nodes[0])\n voq_dump = voqdb.dump_neighbor_table()\n elif per_host.is_multi_asic:\n # look on linecard for pizzabox multiasic\n voqdb = VoqDbCli(per_host)\n voq_dump = voqdb.dump_neighbor_table()\n else:\n voq_dump = {}\n\n for neighbor in neighbors:\n nbr_vm = get_vm_with_ip(neighbor, nbrhosts)\n neigh_mac = nbr_macs[nbr_vm['vm']][nbr_vm['port']]\n local_ip = neighs[neighbor]['local_addr']\n local_port = get_port_by_ip(cfg_facts, local_ip)\n\n sysport_info = {'slot': cfg_facts['DEVICE_METADATA']['localhost']['hostname'],\n 'asic': cfg_facts['DEVICE_METADATA']['localhost']['asic_name']}\n\n # Validate the asic db entries\n for entry in asic_dump:\n matchstr = '\"%s\",' % neighbor\n if matchstr in entry:\n\n if neigh_mac.lower() != asic_dump[entry]['value']['SAI_NEIGHBOR_ENTRY_ATTR_DST_MAC_ADDRESS'].lower():\n logger.error(\"Asic neighbor macs for %s do not match: %s != %s\", neighbor, neigh_mac.lower(),\n asic_dump[entry]['value']['SAI_NEIGHBOR_ENTRY_ATTR_DST_MAC_ADDRESS'].lower())\n fail_cnt += 1\n else:\n logger.debug(\"Asic neighbor macs for %s match: %s == %s\", neighbor, neigh_mac.lower(),\n asic_dump[entry]['value']['SAI_NEIGHBOR_ENTRY_ATTR_DST_MAC_ADDRESS'].lower())\n encaps[neighbor] = asic_dump[entry]['value']['SAI_NEIGHBOR_ENTRY_ATTR_ENCAP_INDEX']\n break\n else:\n logger.error(\"Did not find neighbor in asictable for IP: %s\" % neighbor)\n fail_cnt += 1\n\n # Validate the APP db entries\n for entry in app_dump:\n matchstr = ':%s' % neighbor\n if entry.endswith(matchstr):\n if neigh_mac.lower() != app_dump[entry]['value']['neigh'].lower():\n logger.error(\"App neighbor macs for %s do not match: %s != %s\", neighbor, neigh_mac.lower(),\n app_dump[entry]['value']['neigh'].lower())\n fail_cnt += 1\n else:\n logger.debug(\"App neighbor macs for %s match: %s == %s\", neighbor, neigh_mac.lower(),\n app_dump[entry]['value']['neigh'].lower())\n\n pytest_assert(\":{}:\".format(local_port) in entry, \"Port for %s does not match\" % entry)\n break\n else:\n logger.error(\"Did not find neighbor in app for IP: %s\" % neighbor)\n fail_cnt += 1\n\n # Validate the arp table entries\n if check_nbr_state:\n check_host_arp_table(per_host, asic, neighbor, neigh_mac, local_port, 'REACHABLE', arptable=arptable)\n else:\n check_host_arp_table(per_host, asic, neighbor, neigh_mac, local_port, None, arptable=arptable)\n\n # supervisor checks\n for entry in voq_dump:\n if entry.endswith('|%s' % neighbor) or entry.endswith(':%s' % neighbor):\n\n if \"portchannel\" in local_port.lower():\n slotname = cfg_facts['DEVICE_METADATA']['localhost']['hostname']\n asicname = cfg_facts['DEVICE_METADATA']['localhost']['asic_name']\n else:\n slotname = sysport_info['slot']\n asicname = sysport_info['asic']\n\n logger.debug(\"Neigh key: %s, slotnum: %s\", entry, slotname)\n pytest_assert(\"|%s|\" % slotname in entry,\n \"Slot for %s does not match %s\" % (entry, slotname))\n pytest_assert(\"|%s:\" % local_port in entry or \"|%s|\" % local_port in entry,\n \"Port for %s does not match %s\" % (entry, local_port))\n pytest_assert(\"|%s|\" % asicname in entry,\n \"Asic for %s does not match %s\" % (entry, asicname))\n\n pytest_assert(voq_dump[entry]['value']['neigh'].lower() == neigh_mac.lower(),\n \"Voq: neighbor: %s mac does not match: %s\" %\n (neighbor, voq_dump[entry]['value']['neigh'].lower()))\n pytest_assert(voq_dump[entry]['value']['encap_index'].lower() == encaps[neighbor],\n \"Voq: encap: %s mac does not match: %s\" %\n (neighbor, voq_dump[entry]['value']['encap_index'].lower()))\n break\n else:\n logger.error(\"Neighbor: %s on slot: %s, asic: %s not present in voq\",\n neighbor, sysport_info['slot'], sysport_info['asic'])\n fail_cnt += 1\n\n logger.info(\"Local %s/%s and chassisdb neighbor validation of %s is successful (mac: %s, idx: %s)\",\n per_host.hostname, asic.asic_index, neighbor, neigh_mac, encaps[neighbor])\n\n return {'encaps': encaps, 'fail_cnt': fail_cnt}",
"def run():\n\n assert SSH_HOST is not None, 'SSH_HOST not set. Please configure.'\n\n\n def connect():\n port = find_open_port(SSH_HOST)\n if init_tunnel(SSH_HOST, port):\n print 'Tunnel initialized, pid:', PID\n return {'ssh tunnel entry': 'ssh://{}:{}'.format(SSH_HOST, port)}\n return {}\n\n def is_pid_alive(pid):\n processes = subprocess.check_output(['ps', '-fx'])\n for line in processes.splitlines():\n lpid = line.split()[0]\n if lpid == pid:\n return True\n return False\n\n def find_open_port(host, start_port=22222):\n i = 0\n while i < 1000:\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n result = sock.connect_ex((host, start_port + i))\n if result == 0:\n print \"Port is already used: \", start_port + i\n i += 1\n else:\n return start_port + i\n \n\n \n\n if PID is None:\n return connect()\n else:\n # check if process is still alive\n if is_pid_alive(PID):\n print 'Tunnel still active. Not doing anything.'\n else:\n return connect()",
"def initialize_network_los() -> bool:\n return True"
] | [
"0.6775253",
"0.66485655",
"0.64152706",
"0.638992",
"0.62913436",
"0.62176776",
"0.6206469",
"0.61653435",
"0.6158915",
"0.5879251",
"0.58663124",
"0.57571006",
"0.57403094",
"0.5739101",
"0.57283133",
"0.57218915",
"0.56847924",
"0.5672672",
"0.5664348",
"0.5653754",
"0.5640217",
"0.563764",
"0.56279385",
"0.5620742",
"0.5620742",
"0.56171596",
"0.5617154",
"0.55967313",
"0.5581065",
"0.55793065"
] | 0.767064 | 0 |
Check if tunnel is already established | def _check_tunnel(self, _srv):
if self.skip_tunnel_checkup:
self.tunnel_is_up[_srv.local_address] = True
return
self.logger.info('Checking tunnel to: {0}'.format(_srv.remote_address))
if isinstance(_srv.local_address, string_types): # UNIX stream
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
else:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(TUNNEL_TIMEOUT)
try:
# Windows raises WinError 10049 if trying to connect to 0.0.0.0
connect_to = ('127.0.0.1', _srv.local_port) \
if _srv.local_host == '0.0.0.0' else _srv.local_address
s.connect(connect_to)
self.tunnel_is_up[_srv.local_address] = _srv.tunnel_ok.get(
timeout=TUNNEL_TIMEOUT * 1.1
)
self.logger.debug(
'Tunnel to {0} is DOWN'.format(_srv.remote_address)
)
except socket.error:
self.logger.debug(
'Tunnel to {0} is DOWN'.format(_srv.remote_address)
)
self.tunnel_is_up[_srv.local_address] = False
except queue.Empty:
self.logger.debug(
'Tunnel to {0} is UP'.format(_srv.remote_address)
)
self.tunnel_is_up[_srv.local_address] = True
finally:
s.close() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def tunnel_up(self):\n return self._ssh_host != None and self._ssh_port != None",
"def check_tunnels(self):\n skip_tunnel_checkup = self.skip_tunnel_checkup\n try:\n # force tunnel check at this point\n self.skip_tunnel_checkup = False\n for _srv in self._server_list:\n self._check_tunnel(_srv)\n finally:\n self.skip_tunnel_checkup = skip_tunnel_checkup # roll it back",
"def isConnected():",
"def is_connected():\n \n try:\n socket.create_connection((\"www.google.com\", 80))\n return True\n except OSError:\n pass\n return False",
"def local_is_up(self, target):\n try:\n check_address(target)\n except ValueError:\n self.logger.warning('Target must be a tuple (IP, port), where IP '\n 'is a string (i.e. \"192.168.0.1\") and port is '\n 'an integer (i.e. 40000). Alternatively '\n 'target can be a valid UNIX domain socket.')\n return False\n\n self.check_tunnels()\n return self.tunnel_is_up.get(target, True)",
"def check_up(addr, p):\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n result = sock.connect_ex((addr, p))\n sock.close()\n if result == 0:\n ans = True\n else:\n ans = False\n return ans",
"def is_connected():\n import socket\n try:\n host = socket.gethostbyname(\"www.gov.uk\")\n socket.create_connection((host, 80), 2)\n return True\n except:\n pass\n return False",
"def alive(opts):\n\n dev = conn()\n\n # check if SessionListener sets a TransportError if there is a RpcTimeoutError\n thisproxy[\"conn\"].connected = ping()\n\n local_connected = dev.connected\n if not local_connected:\n __salt__[\"event.fire_master\"](\n {}, \"junos/proxy/{}/stop\".format(opts[\"proxy\"][\"host\"])\n )\n return local_connected",
"def status_check(self):\n try:\n client = self.connect()\n client.sys.is_initialized() # make an actual network connection\n return True\n except:\n return False",
"def __CheckConnectStatus(self):\r\n if not self.tn:\r\n print \"Connection is down!\"\r\n return False\r\n else:\r\n print \"Connection is alive!\"\r\n return True",
"def CheckIfConnecting(self):\n if self.CheckIfWiredConnecting() or self.CheckIfWirelessConnecting():\n return True\n else:\n return False",
"def nat_waitforconn_alive():\r\n return NAT_STATE_DATA[\"mux\"] != None and NAT_STATE_DATA[\"mux\"].isAlive()",
"def isAlive(self):\r\n # Just use connectionInit, that is our internal variable\r\n return self.connectionInit",
"def __check_ssh(self):\n sfcs = self.sshTunnelDict[\"target_ip\"]\n\n cmd = \"ps aux | grep ssh | awk '{print $20}'\"\n result = subprocess.Popen(cmd,\n shell= True,\n stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n stdout, stderr = result.communicate()\n if sfcs not in stdout.decode():\n return False\n else: return True",
"def port_is_alive(target, port):\n a_socket = socket(AF_INET, SOCK_STREAM)\n a_socket.settimeout(5)\n\n location = (target, port)\n try:\n result_of_check = a_socket.connect_ex(location)\n except gaierror:\n return False\n a_socket.close()\n\n if result_of_check == 0:\n return True\n else:\n return False",
"def is_connected(self):\n return \"_connection\" in self.__dict__",
"def isconnected(self) -> bool:",
"def check_port(self):\r\n\t\treturn(self.connect.is_open)",
"def get_on_tunnel(self):\n return self._is_on_tunnel",
"def _is_sshd_server_running(self, timeout=1):\n try:\n self.ssh_client.connect(timeout=timeout)\n self.ssh_client.close()\n return True\n except Exception:\n return False",
"def is_connected(self) -> bool:",
"def check_connection(self):\n pass",
"def checkServerThread(self):\r\n\r\n # check if the server is alive\r\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\r\n\r\n result = 1\r\n try:\r\n result = sock.connect_ex((\"dealookup.com\", 80))\r\n except:\r\n result = 1 \r\n\r\n # server is not live \r\n if result != 0:\r\n result = 1\r\n\r\n self.checkResultSignal.emit(result)",
"def tunnel_up(self, org_cb):\n _log.info(\"storage proxy up\")\n if not self.tunnel:\n return True\n _log.analyze(self.node.id, \"+ CLIENT\", {'tunnel_id': self.tunnel.id})\n # FIXME assumes that the org_cb is the callback given by storage when starting, can only be called once\n # not future up/down\n if org_cb:\n org_cb(True)\n # We should always return True which sends an ACK on the destruction of the tunnel\n return True",
"def isconnected(self) -> bool:\n ...",
"def check_connection(self):\n return False",
"def is_active(self):\n if not self._relaypid:\n return False\n\n self._lock.acquire()\n relaypid = None\n portoffset = None\n try:\n relaypid, portoffset = self._check_tcprelay()\n except AttributeError:\n logger.debug(\n \"No active TCPRELAY tunnel on locationid - {0}\"\n \"\".format(self.locationid_param))\n finally:\n self._lock.release()\n\n return (\n self._relaypid == relaypid and\n self._portoffset == portoffset\n )",
"def is_connected():\r\n global connection\r\n if connection is None:\r\n return False\r\n else:\r\n return True",
"def host_is_up(self, conn_type, hostname):\n try:\n socket_host = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n socket_host.settimeout(1)\n if conn_type == CONN_SSH:\n if ':' in hostname:\n LIBVIRT_HOST, PORT = (hostname).split(\":\")\n PORT = int(PORT)\n else:\n PORT = SSH_PORT\n LIBVIRT_HOST = hostname\n socket_host.connect((LIBVIRT_HOST, PORT))\n if conn_type == CONN_TCP:\n socket_host.connect((hostname, TCP_PORT))\n if conn_type == CONN_TLS:\n socket_host.connect((hostname, TLS_PORT))\n socket_host.close()\n return True\n except Exception as err:\n return err",
"def reconnecting(self) -> bool:"
] | [
"0.76572174",
"0.68320966",
"0.6712594",
"0.66211444",
"0.65298826",
"0.6516994",
"0.6469484",
"0.64455706",
"0.6440732",
"0.642409",
"0.64158744",
"0.64121675",
"0.640858",
"0.6397321",
"0.6368237",
"0.63668215",
"0.63547385",
"0.6343381",
"0.6332009",
"0.62437207",
"0.6231109",
"0.623042",
"0.6226915",
"0.6221889",
"0.6217247",
"0.6210239",
"0.6200677",
"0.6193047",
"0.6189303",
"0.61852276"
] | 0.7658537 | 0 |
Make SSH Handler class | def _make_ssh_forward_handler_class(self, remote_address_):
class Handler(_ForwardHandler):
remote_address = remote_address_
ssh_transport = self._transport
logger = self.logger
return Handler | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __init__(self, settings, server=None):\n print(\"SSH Action Handler Started\")\n self.server = server\n self.active_ssh_tasks = {}\n self.key_location = settings[\"ssh_key_location\"]\n self.server_addr = settings[\"ssh_server_addr\"]\n self.server_username = settings[\"ssh_server_username\"]",
"def SSH(*args, **kwargs):\n method = import_class(settings.ORCHESTRATION_SSH_METHOD_BACKEND)\n return method(*args, **kwargs)",
"def __init__(self, **kwargs):\n self.host = kwargs.get(\"host\")\n self.user = kwargs.get(\"user\")\n self.passwd = kwargs.get(\"passwd\")\n self.key_filename = kwargs.get(\"ssh_key\")\n self._sock = None\n self._chan = None\n self._session = None",
"def ssh_cmd(ctx):\n pass",
"def __init__(__self__, *,\n auth_type: pulumi.Input[str],\n host_key: Optional[pulumi.Input[str]] = None,\n host_key_algorithm: Optional[pulumi.Input[str]] = None,\n private_key: Optional[pulumi.Input[str]] = None):\n pulumi.set(__self__, \"auth_type\", 'SSH')\n if host_key is not None:\n pulumi.set(__self__, \"host_key\", host_key)\n if host_key_algorithm is not None:\n pulumi.set(__self__, \"host_key_algorithm\", host_key_algorithm)\n if private_key is not None:\n pulumi.set(__self__, \"private_key\", private_key)",
"def __init__(self, args, shell, userns):\n super(SSHMgr, self).__init__(args, shell, userns)\n parser = MagicArgumentParser()\n parser.add_argument('--host', type=str, default='localhost',\n help='Machine to reach (default = localhost)')\n parser.add_argument('--pid', type=str,\n help='Variable to store SSH process pid')\n _args, cmd = parser.parse_known_args(args)\n self.cmd = self._wlbin + [_args.host, ] + cmd\n # SSH Cannot fork into background without a command to execute.\n # Popen instance is created in submit",
"def make_new_handler(self, *args, **kwargs):",
"def create_ssh_handle(xcnode):\n client = paramiko.SSHClient()\n client.load_system_host_keys()\n client.set_missing_host_key_policy(paramiko.WarningPolicy)\n\n try:\n client.connect(\n hostname=xcnode.hostname,\n username=xcnode.username,\n password=xcnode.password,\n port=int(xcnode.port)\n )\n xcnode.fd.write('ssh\\'ed to {} @ {}\\n'.format(\n xcnode.hostname, datetime.now()))\n except Exception as e:\n print e\n client = None\n\n xcnode.client = client\n\n return xcnode",
"def main():\n # Set these to your own details.\n myssh = connect('example.com')\n myssh.put('ssh.py')\n myssh.close()",
"def __init__(self, host, username, password, port=22):\n self.sftp = None\n self.sftp_open = False\n \n # open SSH Transport stream\n self.transport = paramiko.Transport((host, port))\n \n self.transport.connect(username=username, password=password)",
"def __init__(self, command_handler_name):\n\n # Set the command handler attributes\n self.name = command_handler_name",
"def __init__(self, host, port, **kwargs):\n\n SocketHandler.__init__(self, host, port)\n BaseHandler.__init__(self, **kwargs)",
"def __enter__(self):\n self.ssh = paramiko.SSHClient()\n self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n self.ssh.connect(self.host, username=self.user, port=self.port, password=self.password)\n return self",
"async def client_ssh_handler(process):\n log.debug(f\"clients.py:client_ssh_handler - SSH details are: {dir(process)}\")\n reader = process.stdin\n writer = process.stdout\n client_details = process.get_extra_info(\"peername\")\n addr, port, *rest = client_details\n\n connection = PlayerConnection(addr, port, \"ssh\")\n\n await register_client(connection)\n\n tasks = [\n asyncio.create_task(client_read(reader, connection), name=f\"{connection.uuid} read\"),\n asyncio.create_task(client_write(writer, connection), name=f\"{connection.uuid} write\"),\n ]\n\n asyncio.current_task().set_name(f\"{connection.uuid} handler\")\n\n # We want to .wait until the first task is completed. Completed could be an actual finishing\n # of execution or an exception. If either the read or writer \"completes\", we want to ensure\n # we move beyond this point and cleanup the tasks associated with this client.\n _, rest = await asyncio.wait(tasks, return_when=\"FIRST_COMPLETED\")\n\n await unregister_client(connection)\n\n process.close()\n process.exit(0)\n\n for task in rest:\n task.cancel()",
"def __init__(self, handler):\n self.__handler = handler",
"def salt_ssh_cli(\n self,\n factory_class=cli.ssh.SaltSsh,\n roster_file=None,\n target_host=None,\n client_key=None,\n ssh_user=None,\n **factory_class_kwargs,\n ):\n script_path = self.factories_manager.get_salt_script_path(\"salt-ssh\")\n return factory_class(\n script_name=script_path,\n config=self.config.copy(),\n roster_file=roster_file,\n target_host=target_host,\n client_key=client_key,\n ssh_user=ssh_user or running_username(),\n system_service=self.factories_manager.system_service,\n python_executable=self.python_executable,\n **factory_class_kwargs,\n )",
"def open_ssh():\n print('Opening SSH...')",
"def _start_ssh(self):\n try:\n message = '\\nEnter number you want to connect: '\n num = raw_input(message)\n while not int(num) in self.instance_list:\n num = raw_input(message)\n\n message_user = 'Enter username for ssh_login(blank = %s): ' % DEFAULT_USER \n user = raw_input(message_user)\n if not user:\n user = DEFAULT_USER\n \n target = self.instance_list[int(num)]\n ssh_key_path = os.path.join(SSH_DIR, target['key'])\n if not os.path.exists(ssh_key_path):\n print 'SSH key not found! KEY_PATH[ %s ]' % ssh_key_path\n return\n\n command = COMMAND % {'sshkey' : ssh_key_path, 'user' : user, 'server' : target['dns'], 'port' : self.port}\n\n print 'Connecting to \"%s\"... [SSH COMMAND: %s ]' % (target['name'], command)\n os.system(command)\n except KeyboardInterrupt:\n print '\\nAborted!'\n finally:\n sys.exit()",
"def handle_message(self, message):\n payload = json.loads(message.payload)\n action_type = payload[\"action_type\"]\n parameters = json.loads(payload[\"parameters\"])\n\n command = payload[\"command\"]\n if command == \"SSH-Start\":\n return self.start_ssh_task(parameters)\n elif command == \"SSH-Stop\":\n return self.stop_ssh_task(parameters)\n else:\n print(\"Message Error SSH Action Handler \" + str(message))\n return False",
"def _ssh(ip, *, user=None, key=None, port=8888):\n # Need to replace \".\", because I don't want \n # `ip` to be a keyword argument\n if ip == \".\" or ip == \"...\": ip = None \n func_args = locals()\n conf = Bunch(**func_args)\n \n # Loads default config if there is one\n # and update the conf object with data\n # from it, but function args have precedence\n fname = os.path.expanduser(\"~/.nbx/aws.json\")\n fname = Path(fname)\n if fname.is_file(): \n stored = load(fname)\n for k,v in stored.items():\n # Function args have precedence\n if conf[k] is None: conf[k] = v\n \n # Check if we got everything we need to\n # connect to instance\n fail = False\n for k in [\"ip\", \"user\", \"key\", \"port\"]:\n if conf[k] is None:\n fail = True\n print(f\"Please provide --{k}\")\n \n # Save what we already got, and\n # proceed if we got everything or return\n dump(conf, fname)\n if fail: return\n \n config_str = SSH_CONFIG_TEMPLATE.format(\n host=\"aws\", \n user=conf.user, \n ip=conf.ip, \n key=conf.key\n )\n print(config_str)\n dump(config_str, os.path.expanduser(\"~/.ssh/ec2_config\"), format=\".txt\")\n \n # We could write some environment vars\n # but we can't source them from here\n #\n # fname = os.path.expanduser(\"~/.nbx/.bash_aws\")\n # string = f\"export xaws={conf.user}@{conf.ip};\\n\"\n # dump(string, fname, format=\".txt\")\n\n # Connect to server and forward local port 8888 to remote port 8888\n # We can now connect to a remote jupyter notebook server via `http://localhost:8888/`\n cmd = f\"ssh -i {conf.key} -L {conf.port}:localhost:{conf.port} {conf.user}@{conf.ip}\"\n os.system(f'bash -c \\\"{cmd}\\\"')",
"def make_priv(self, handler):\n self.handler = handler",
"def __init__(self, config):\n\n self.umodes = \"+iko\"\n\n self.config = config\n\n ServerConn.__init__(self, self.config[\"host\"], self.config[\"port\"])\n\n self.last_uid = 0",
"def __ssh_tunnel(self):\n\n host = self.sshTunnelDict[\"ssh_ip\"]\n user = self.sshTunnelDict[\"ssh_user\"]\n password = self.sshTunnelDict[\"ssh_password\"]\n sfcs = self.sshTunnelDict[\"target_ip\"]\n\n tunnel_command = 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -C -N -f -L 3306:{0} {1}@{2}'.format(sfcs, user, host)\n retry = 5\n while retry:\n if not self.__check_ssh():\n try:\n ssh_tunnel = pexpect.spawn(tunnel_command)\n ssh_tunnel.expect('password:')\n time.sleep(0.1)\n ssh_tunnel.sendline(password)\n ssh_tunnel.expect(pexpect.EOF)\n retry -= 1\n except:\n raise Exception(\"Create SSH Tunnel Failed: retry 5\")\n else: break",
"def __init__(self, config):\n self.config = config\n\n # setup logging\n if config.get('ssh_log_path'):\n ssh.util.log_to_file(config['ssh_log_path'])\n\n hostname = config.get('remote_host', 'localhost')\n port = config.get('remote_port', 22)\n username = config.get('username') or getpass.getuser()\n password = config.get('password')\n private_key = config.get('private_key')\n hostkey = self._load_host_key(hostname)\n\n log.debug(\"Connecting to %s, port %s...\", hostname, port)\n self._transport = tpt = ssh.Transport((hostname, port))\n tpt.use_compression(compress=config.get('compress', False))\n self._authenticate(tpt, username, password, hostkey, private_key)\n\n if tpt.is_authenticated():\n log.debug(\"SSH transport authenticated. Creating SFTP client.\")\n # create SFTP client from SSHClient\n self._client = ssh.SFTPClient.from_transport(tpt)\n else:\n raise tpt.get_exception()",
"def __init__(self, *args, **kvargs):\n self.proxy_host = kvargs.get('proxy_host')\n self.proxy_user = kvargs.get('proxy_user')\n self.proxy_password = kvargs.get('proxy_password')\n self.proxy_port = kvargs.get('proxy_port')\n self.proxy_ssh_key_file = kvargs.get('proxy_ssh_key')\n self.proxy_connection = False\n self.host = kvargs.get('host')\n self.user = kvargs.get('user')\n self.password = kvargs.get('password')\n self.port = kvargs.get('port')\n self.dest_connection = False\n\n try:\n # Add host key policy\n if self.proxy_port is None:\n self.proxy_port = 22\n self.transport = paramiko.Transport((self.proxy_host, self.proxy_port))\n self.transport.start_client()\n if self.proxy_ssh_key_file:\n self.proxy_ssh_key = paramiko.RSAKey.from_private_key_file(self.proxy_ssh_key_file)\n conn_result = self.transport.auth_publickey(username=self.proxy_user, key=self.proxy_ssh_key)\n else:\n conn_result = self.transport.auth_password(username=self.proxy_user, password=self.proxy_password)\n if len(conn_result) == 0:\n self.proxy_connection = True\n else:\n logging.error('Unable to connect to proxy host. Authentication failed.')\n raise TobyException('Unable to connect to proxy host. Authentication failed.')\n except Exception as exp:\n logging.error('Unable to connect to proxy host: %s' % exp)\n raise TobyException('Unable to connect to proxy host: %s' % exp)\n\n try:\n if self.port is None:\n self.port = 22\n self.tunnel = paramiko.Transport(self.transport.open_channel(\n kind='direct-tcpip',\n dest_addr=(self.host, self.port),\n src_addr=('127.0.0.1', 0)))\n self.tunnel.start_client()\n conn_result = self.tunnel.auth_password(username=self.user, password=self.password)\n if len(conn_result) == 0:\n self.dest_connection = True\n else:\n logging.error('Unable to connect to destination host. Authentication failed.')\n raise TobyException('Unable to connect to destination host. Authentication failed.')\n except Exception as exp:\n logging.error('Unable to connect to destination host: %s' % exp)\n raise TobyException('Unable to connect to destination host: %s' % exp)\n\n try:\n self.handle = self.tunnel.open_session(20)\n self.handle.get_pty(width=160, height=0)\n self.handle.invoke_shell()\n self.handle.set_combine_stderr(True)\n self.handle.settimeout(60)\n tnh = self.handle\n got = []\n while True:\n _rd, _wr, _err = select([tnh], [], [], 10)\n if _rd:\n data = tnh.recv(1024)\n data = data.decode(\"utf-8\")\n got.append(data)\n if re.search('> ', data):\n tnh.send(b' start shell\\n')\n data = tnh.recv(1024)\n data = data.decode(\"utf-8\")\n if re.search(r'(\\$|>|#|%)[\\s\\t]?', data):\n break\n except Exception as exp:\n logging.error(\n 'Unable to fetch the prompt on destination host: %s' % exp)\n raise TobyException(\n 'Unable to fetch the prompt on destination host: %s' % exp)",
"def __init__(self, cmd_handler: Callable[[IRCClient], CommandHandler], *args, **kwargs):\n IRCClient.__init__(self, *args, **kwargs)\n self.command_handler: CommandHandler = cmd_handler(self)",
"def msftp(\n instance: Instance,\n) -> None:\n host = instance_host(instance)\n spawn.runv([*SFTP_COMMAND, host])",
"def __init__(self, host, username= \"\", password= \"\", stdprompt=ixia_prompt_regex):\n\n Host.__init__(self, host, username, password, stdprompt)\n log.output(\"Ixia object for host %s created.\" % host)\n\t#self.cmd(\"\")",
"def __initHandlersUser(self):\n handlers = {}\n handlers['WRITE_FILE'] = self.write_file\n handlers['READU_FILE'] = self.read_file\n handlers['DELET_FILE'] = self.delete_file\n handlers['STATUS_SRV'] = self.status_server\n handlers['RSYNC_FILE'] = self.rsync_file\n handlers['WSYNC_FILE'] = self.wsync_file\n return handlers",
"def __init__(self, host, username, port, key_file=None, debug=False):\n\n self.log = logger.getLogger(name=\"directord\", debug_logging=debug)\n self.key_file = key_file\n self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.sock.connect((host, port))\n\n self.session = Session()\n self.session.handshake(self.sock)\n self.log.debug(\n \"Handshake with [ %s ] on port [ %s ] complete.\", host, port\n )\n\n self.known_hosts = self.session.knownhost_init()\n\n if key_file:\n self.session.userauth_publickey_fromfile(username, key_file)\n self.log.debug(\"Key file [ %s ] added\", key_file)\n else:\n try:\n self.session.agent_auth(username)\n self.log.debug(\"User agent based authentication enabled\")\n except ssh2.exceptions.AgentConnectionError as e:\n self.log.warning(\n \"SSH Agent connection has failed: %s.\"\n \" Attempting to connect with the user's implicit ssh key.\",\n str(e),\n )\n home = os.path.abspath(os.path.expanduser(\"~\"))\n default_keyfile = os.path.join(home, \".ssh/id_rsa\")\n if os.path.exists(default_keyfile):\n self.session.userauth_publickey_fromfile(\n username, default_keyfile\n )\n self.log.debug(\"Key file [ %s ] added\", key_file)\n\n self.channel = None"
] | [
"0.6786921",
"0.664092",
"0.5964434",
"0.59032935",
"0.5826504",
"0.5811605",
"0.5802258",
"0.57916",
"0.57681113",
"0.57595307",
"0.56702554",
"0.5632079",
"0.5591133",
"0.5555422",
"0.5547671",
"0.5513227",
"0.54987305",
"0.547998",
"0.54228693",
"0.5409541",
"0.54088503",
"0.5382245",
"0.5358509",
"0.5351307",
"0.5267029",
"0.52557606",
"0.5241743",
"0.52376693",
"0.5233335",
"0.52186835"
] | 0.7127526 | 0 |
Read ssh_config_file and tries to look for user (ssh_username), identityfile (ssh_pkey), port (ssh_port) and proxycommand (ssh_proxy) entries for ssh_host | def _read_ssh_config(ssh_host,
ssh_config_file,
ssh_username=None,
ssh_pkey=None,
ssh_port=None,
ssh_proxy=None,
compression=None,
logger=None):
ssh_config = paramiko.SSHConfig()
if not ssh_config_file: # handle case where it's an empty string
ssh_config_file = None
# Try to read SSH_CONFIG_FILE
try:
# open the ssh config file
with open(os.path.expanduser(ssh_config_file), 'r') as f:
ssh_config.parse(f)
# looks for information for the destination system
hostname_info = ssh_config.lookup(ssh_host)
# gather settings for user, port and identity file
# last resort: use the 'login name' of the user
ssh_username = (
ssh_username or
hostname_info.get('user')
)
ssh_pkey = (
ssh_pkey or
hostname_info.get('identityfile', [None])[0]
)
ssh_host = hostname_info.get('hostname')
ssh_port = ssh_port or hostname_info.get('port')
proxycommand = hostname_info.get('proxycommand')
ssh_proxy = ssh_proxy or (paramiko.ProxyCommand(proxycommand) if
proxycommand else None)
if compression is None:
compression = hostname_info.get('compression', '')
compression = True if compression.upper() == 'YES' else False
except IOError:
if logger:
logger.warning(
'Could not read SSH configuration file: {0}'
.format(ssh_config_file)
)
except (AttributeError, TypeError): # ssh_config_file is None
if logger:
logger.info('Skipping loading of ssh configuration file')
finally:
return (ssh_host,
ssh_username or getpass.getuser(),
ssh_pkey,
int(ssh_port) if ssh_port else 22, # fallback value
ssh_proxy,
compression) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _load_config(self, sshconfig=\"~/.ssh/config\"):\n rpath = os.path.realpath(os.path.expanduser(sshconfig))\n try:\n os.stat(rpath)\n except OSError:\n return\n\n try:\n with codecs.open(rpath, \"rb\", \"utf-8\") as f:\n clines = f.readlines()\n except:\n print(\"!! Failed to parse %s\" % (rpath))\n return\n\n self._config.parse(clines)\n print(\"** Loaded ssh config %s\" % (rpath))",
"def _load_config(self, sshconfig=\"~/.ssh/config\"):\n rpath = os.path.realpath(os.path.expanduser(sshconfig))\n try:\n os.stat(rpath)\n except OSError:\n return\n\n try:\n with codecs.open(rpath, \"rb\", \"utf-8\") as f:\n clines = f.readlines()\n except:\n print(\"!! Failed to parse %s\" % (rpath))\n return\n\n self._config.parse(clines)\n if self.verbose: print(\"** Loaded ssh config %s\" % (rpath))",
"def load_ssh_config_for_plumbum(filename, host):\n\n conf = paramiko.config.SSHConfig()\n expanded_path = os.path.expanduser(filename)\n\n username_from_host = None\n m = re.search(\"([^@]+)@(.*)\", host)\n if m:\n username_from_host = m.group(1)\n host = m.group(2)\n\n if os.path.exists(expanded_path):\n with open(expanded_path) as ssh_config:\n conf.parse(ssh_config)\n\n lookup = conf.lookup(host)\n\n plumbum_kwargs = dict(\n user=username_from_host,\n port=None,\n keyfile=None,\n load_system_ssh_config=False,\n # TODO: Drop WarningPolicy\n # This is need in current plumbum and wrapper implementation\n # in case proxycommand is set.\n missing_host_policy=paramiko.WarningPolicy(),\n )\n\n plumbum_host = host\n if \"hostname\" in lookup:\n plumbum_host = lookup.get(\"hostname\")\n\n if \"port\" in lookup:\n plumbum_kwargs[\"port\"] = int(lookup[\"port\"])\n\n if not username_from_host:\n plumbum_kwargs[\"user\"] = lookup.get(\"user\")\n\n plumbum_kwargs[\"keyfile\"] = lookup.get(\"identityfile\")\n\n if \"proxycommand\" in lookup:\n plumbum_kwargs[\"load_system_ssh_config\"] = True\n # load_system_ssh_config: read system SSH config for ProxyCommand configuration.\n # https://plumbum.readthedocs.io/en/latest/_modules/plumbum/machines/paramiko_machine.html\n\n if lookup.get(\"hostname\") != host:\n msg = (\n \"can't handle both ProxyCommand and HostName at once, \"\n \"please drop either\"\n )\n raise ValueError(msg)\n plumbum_host = host\n # When load_system_ssh_config is True, plumbum_host must be Host\n # instead of HostName.\n # Otherwise parsing SSH config will fail in Plumbum.\n\n # Plumbum doesn't support agent-forwarding\n forward_agent = lookup.get(\"forwardagent\")\n\n return (plumbum_host, plumbum_kwargs, forward_agent)",
"def get_ssh_config(hostname):\n\n ssh_config_file = path.abspath(path.expanduser('~/.ssh/config'))\n if path.exists(ssh_config_file):\n ssh_config = SSHConfig()\n with open(ssh_config_file) as f:\n ssh_config.parse(f)\n return ssh_config.lookup(hostname)\n\n return dict()",
"def get_ssh_config(hostname):\n\n ssh_config_file = path.abspath(path.expanduser('~/.ssh/config'))\n if path.exists(ssh_config_file):\n ssh_config = SSHConfig()\n with open(ssh_config_file) as f:\n ssh_config.parse(f)\n return ssh_config.lookup(hostname)\n\n return dict()",
"def read_config(self, path):\n if not os.path.isfile(path):\n raise SSHKeyError('No such file: {}'.format(path))\n\n try:\n for l in [l.rstrip() for l in open(path, 'r').readlines()]:\n sshkey = SSHKeyFile(self, os.path.expandvars(os.path.expanduser(l)))\n\n if sshkey.path not in self.keys():\n self[sshkey.path] = sshkey\n\n self[sshkey.path].autoload = True\n\n except IOError as e:\n raise SSHKeyError('Error loading {}: {}'.format(path, e))\n except OSError as e:\n raise SSHKeyError('Error loading {}: {}'.format(path, e))",
"def __parse_user_keyfiles(self):\n\n user_sshdir = os.path.expanduser('~/.ssh')\n if not os.path.isdir(user_sshdir):\n return\n\n paths = []\n for filename in os.listdir(user_sshdir):\n if filename in SSH_CONFIG_FILES or os.path.splitext(filename)[1] != '.pub':\n continue\n path = os.path.join(user_sshdir, filename)\n if os.path.isfile(path):\n paths.append(path)\n for path in paths:\n try:\n sshkey = SSHKeyFile(self, path)\n except SSHKeyError as e:\n self.log.debug(e)\n continue\n\n self[sshkey.path] = sshkey",
"def vagrant_ssh_config():\n proc = sp.Popen(\"vagrant ssh-config\", stdout=sp.PIPE, stderr=sp.PIPE, shell=True)\n out, _ = proc.communicate()\n\n hosts = {}\n new_line = True # new block\n for line in out.decode().split(\"\\n\"):\n\n if new_line is True:\n hostname = line.replace(\"Host \", \"\")\n new_line = False\n elif len(line) == 0:\n new_line = True\n else:\n data = line[2:].split(\" \")\n\n host = hosts.get(hostname, {})\n host.update({data[0]: \" \".join(data[1:])})\n hosts.update({hostname: host})\n\n return hosts",
"def session_open(self):\n logger.debug(\"entering session_open()\")\n kwargs = {\"hostname\": self.host, \"username\": self.user}\n ssh_client = paramiko.SSHClient()\n ssh_client.load_system_host_keys()\n ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n ssh_config = os.path.expanduser(\"~/.ssh/config\")\n ask_pass = False\n key_found = False\n if (\n os.path.isfile(os.path.expanduser(\"~/.ssh/id_rsa\"))\n or os.path.isfile(os.path.expanduser(\"~/.ssh/id_dsa\"))\n or os.path.isfile(os.path.expanduser(\"~/.ssh/id_ecdsa\"))\n ):\n key_found = True\n\n if os.path.isfile(ssh_config):\n config = paramiko.SSHConfig()\n with open(ssh_config) as open_ssh_config:\n config.parse(open_ssh_config)\n config = config.lookup(self.host)\n if config.get(\"proxycommand\"):\n self._sock = paramiko.proxy.ProxyCommand(config.get(\"proxycommand\"))\n kwargs.update({\"sock\": self._sock})\n\n agent = paramiko.Agent()\n agent_keys = agent.get_keys()\n logger.debug(\"ssh agent has {} keys\".format(len(agent_keys)))\n\n if self.passwd is not None:\n kwargs.update(\n {\"password\": self.passwd, \"allow_agent\": False, \"look_for_keys\": False}\n )\n elif self.user != getpass.getuser():\n print(\n \"skipping publickey ssh auth as {} != {}\".format(\n self.user, getpass.getuser()\n )\n )\n kwargs.update({\"allow_agent\": False, \"look_for_keys\": False})\n ask_pass = True\n elif self.key_filename is not None:\n kwargs.update(\n {\n \"key_filename\": self.key_filename,\n \"allow_agent\": False,\n \"look_for_keys\": False,\n \"password\": None,\n }\n )\n # paramiko is a little broken (see github issue #1664) \n # work around by always asking for passphrase here\n # else \"SSHException: encountered RSA key, expected OPENSSH key\" error\n # when key has passphrase\n passphrase = getpass.getpass(\n prompt=\"ssh key passphrase (Enter for None): \", stream=None\n )\n if passphrase != \"\":\n kwargs.update({\"passphrase\": passphrase})\n elif len(agent_keys) == 0 and not key_found:\n print(\"no ssh keys found, nor ssh agent running, skipping publickey ssh auth\")\n kwargs.update({\"allow_agent\": False, \"look_for_keys\": False})\n ask_pass = True\n\n if ask_pass:\n self.passwd = getpass.getpass(\n prompt=\"{}@{}'s password: \".format(self.user, self.host), stream=None\n )\n kwargs[\"password\"] = self.passwd\n\n try:\n ssh_client.connect(**kwargs)\n except PasswordRequiredException:\n passphrase = getpass.getpass(\n prompt=\"ssh key passphrase (Enter for None): \", stream=None\n )\n if passphrase != \"\":\n kwargs.update({\"passphrase\": passphrase})\n ssh_client.connect(**kwargs)\n return ssh_client",
"def test_use_ssh_file():\n connection = FakeBaseConnection(\n host=\"localhost\",\n port=22,\n username=\"\",\n password=\"secret\",\n use_keys=True,\n allow_agent=False,\n key_file=\"/home/user/.ssh/id_rsa\",\n timeout=60,\n pkey=None,\n passphrase=None,\n disabled_algorithms=None,\n auth_timeout=None,\n banner_timeout=10,\n conn_timeout=5,\n ssh_config_file=join(RESOURCE_FOLDER, \"ssh_config\"),\n sock=None,\n )\n\n connect_dict = connection._connect_params_dict()\n\n expected = {\n \"hostname\": \"10.10.10.70\",\n \"port\": 8022,\n \"username\": \"admin\",\n \"password\": \"secret\",\n \"look_for_keys\": True,\n \"allow_agent\": False,\n \"key_filename\": \"/home/user/.ssh/id_rsa\",\n \"timeout\": 5,\n \"pkey\": None,\n \"passphrase\": None,\n \"disabled_algorithms\": None,\n \"auth_timeout\": None,\n \"banner_timeout\": 10,\n }\n\n result = connection._use_ssh_config(connect_dict)\n assert \"sock\" in result\n assert len(result[\"sock\"].cmd) == 5\n assert \"nc\" in result[\"sock\"].cmd\n del result[\"sock\"]\n assert result == expected",
"def __ssh_tunnel(self):\n\n host = self.sshTunnelDict[\"ssh_ip\"]\n user = self.sshTunnelDict[\"ssh_user\"]\n password = self.sshTunnelDict[\"ssh_password\"]\n sfcs = self.sshTunnelDict[\"target_ip\"]\n\n tunnel_command = 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -C -N -f -L 3306:{0} {1}@{2}'.format(sfcs, user, host)\n retry = 5\n while retry:\n if not self.__check_ssh():\n try:\n ssh_tunnel = pexpect.spawn(tunnel_command)\n ssh_tunnel.expect('password:')\n time.sleep(0.1)\n ssh_tunnel.sendline(password)\n ssh_tunnel.expect(pexpect.EOF)\n retry -= 1\n except:\n raise Exception(\"Create SSH Tunnel Failed: retry 5\")\n else: break",
"def ssh_config(name=''):\n with settings(hide('running')):\n output = local('vagrant ssh-config %s' % name, capture=True)\n\n config = {}\n for line in output.splitlines()[1:]:\n key, value = line.strip().split(' ', 2)\n config[key] = value\n return config",
"def test_use_ssh_file_proxyjump():\n connection = FakeBaseConnection(\n host=\"10.10.10.70\",\n port=22,\n username=\"\",\n password=\"secret\",\n use_keys=True,\n allow_agent=False,\n key_file=\"/home/user/.ssh/id_rsa\",\n timeout=60,\n pkey=None,\n passphrase=None,\n disabled_algorithms=None,\n auth_timeout=None,\n conn_timeout=5,\n banner_timeout=10,\n ssh_config_file=join(RESOURCE_FOLDER, \"ssh_config_proxyjump\"),\n sock=None,\n )\n\n connect_dict = connection._connect_params_dict()\n\n expected = {\n \"hostname\": \"10.10.10.70\",\n \"port\": 8022,\n \"username\": \"admin\",\n \"password\": \"secret\",\n \"look_for_keys\": True,\n \"allow_agent\": False,\n \"key_filename\": \"/home/user/.ssh/id_rsa\",\n \"timeout\": 5,\n \"pkey\": None,\n \"passphrase\": None,\n \"disabled_algorithms\": None,\n \"auth_timeout\": None,\n \"banner_timeout\": 10,\n }\n\n result = connection._use_ssh_config(connect_dict)\n assert \"sock\" in result\n assert \"-W\" in result[\"sock\"].cmd\n del result[\"sock\"]\n assert result == expected",
"def ssh_config(name=''):\n name = _name_or_host_string(name)\n with settings(hide('running')):\n output = local('vagrant ssh-config %s' % name, capture=True)\n\n config = {}\n for line in output.splitlines()[1:]:\n key, value = line.strip().split(' ', 1)\n config[key] = value\n return config",
"def check_config(self, host, path, settings):\n for key in settings.keys():\n cmd = 'grep {1} {0} | grep -i \"{2}\"'.format(path, key,\n settings[key])\n logger.debug('CMD: {}'.format(cmd))\n SSHManager().check_call(host, cmd)",
"def write_ssh_config(bastion_ip, os_user, keyfile):\n with open('ssh_config-metrics', 'w') as config_file:\n config_file.write('host *\\n')\n config_file.write(' User %s\\n' % os_user)\n config_file.write(' IdentityFile %s\\n' % keyfile)\n config_file.write(' StrictHostKeyChecking no\\n')\n config_file.write(' UserKnownHostsFile /dev/null\\n')\n if bastion_ip:\n config_file.write(' ProxyCommand ssh -i %s -o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null %s@%s exec nc %%h %%p\\n'\n % (keyfile, os_user, bastion_ip))\n if not bastion_ip:\n return False",
"def find_juju_ssh_keys():\n\n paths = []\n paths.append(os.path.expanduser(\"~/.local/share/juju/ssh\"))\n\n for path in paths:\n if os.path.exists(path):\n private = os.path.expanduser(\"{}/juju_id_rsa\".format(path))\n public = os.path.expanduser(\"{}/juju_id_rsa.pub\".format(path))\n if os.path.exists(private) and os.path.exists(public):\n return (private, public)\n return (None, None)",
"def read_ssh_config_file(path):\n with open(path, \"r\") as fh_:\n lines = fh_.read().splitlines()\n return SshConfigFile(lines)",
"def test_ssh(self):\n assert self.rc_conf.has_key('sshd_enable')\n assert self.rc_conf['sshd_enable'] == '\"YES\"'\n sshd_conf = open('/etc/ssh/sshd_config').read()\n assert re.search('[^#]PermitRootLogin yes', sshd_conf)",
"def read_ssh_config(master_path):\n base_path = os.path.dirname(master_path)\n master_config = read_ssh_config_file(master_path)\n configs = []\n queue = [(master_path, master_config)]\n while len(queue) > 0:\n cur_path, cur_config = queue.pop()\n cur_includes = [ x.value for x in cur_config.lines_ if x.key is not None and x.key.lower() == \"include\" ]\n configs.append((cur_path, cur_config))\n for cur_include in cur_includes:\n for new_path in _resolve_includes(base_path, cur_include):\n new_config = read_ssh_config_file(new_path)\n queue.append((new_path, new_config))\n\n return SshConfig(configs)",
"def ssh_config(self, arguments):\n instance_name = arguments['<instance>']\n instance_name = self.activate(instance_name)\n\n print(utils.config_ssh_string(self.config_ssh))",
"def files(self):\n log.info(\"starting file iteration\")\n ssh = paramiko.SSHClient()\n\n if self.load_system_host_keys:\n log.debug('loading system host keys')\n ssh.load_system_host_keys()\n if self.host_key_auto_add:\n log.debug('setting host key policy to auto add')\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n\n sshconf = paramiko.SSHConfig()\n # paramiko uses 'USER' environment var to parsing %u, %r\n # when nrpe daemon run the check, that var is not set and results in\n # 'None' user, set it before parsing config file.\n local_user = pwd.getpwuid(os.getuid()).pw_name\n os.environ['USER'] = os.environ.get('USER', local_user)\n with open('/etc/ssh/ssh_config') as f:\n sshconf.parse(f)\n\n # paramiko wrongly parses %u/%r@%h as it use same value for %u and %r\n # replace %r with the configured username\n self.kwargs['key_filename'] = [\n path for path in sshconf.lookup(self.hostname)['identityfile']\n ]\n\n log.info(\"connecting to %s\", self.hostname)\n log.debug(\"kwargs: %s\", str(self.kwargs))\n for key_file in self.kwargs['key_filename'][:]:\n try:\n ssh.connect(**self.kwargs)\n break\n except IOError as e:\n log.info(\"Key %s does not exist, trying another\", key_file)\n try:\n self.kwargs['key_filename'].pop(0)\n except IndexError:\n raise Exception('No more ssh private key to try.'\n 'Make sure good ssh key exist.')\n log.debug(\"opening sftp\")\n ftp = ssh.open_sftp()\n log.debug(\"chdir %s\", self.pwd)\n try:\n ftp.chdir(self.pwd)\n except IOError, e:\n log.error(\"Error going to directory %s: %s\", self.pwd, e)\n return\n\n # optimization. To avoid running fstat for every backup file, I filter\n # out to only test the newest backup for each facility\n files = {}\n log.debug(\"running ls\")\n for fattr in ftp.listdir_attr():\n # a longname looks like:\n # -rw-r--r-- 1 radvd quagga 5586928 Jun 22 06:35\n # postgresql-roundcube-2016-06-22-06_34_47.sql.xz\n if fattr.longname.startswith('d'): # is a directory\n log.debug(\"Skipping directory %s\", fattr.longname)\n continue\n filename = fattr.longname.split()[-1]\n log.debug('processing %s', filename)\n\n f = self.make_file(filename, None)\n if not f:\n log.debug('skipping')\n continue\n key, value = f.items()[0]\n # we may want to run fstat on this filename later on\n f[key]['filename'] = filename\n # keeps only the newest file for each facility\n if (key not in files) or (value['date'] > files[key]['date']):\n log.debug('first or newer.')\n files.update(f)\n else:\n log.debug('was old')\n\n # now fetch fstat for each file, and yield them\n for k, f in files.items():\n log.debug('getting fstat for %s', f['filename'])\n filestat = ftp.stat(f['filename'])\n f['size'] = filestat.st_size\n yield {k: f}",
"def _consolidate_auth(ssh_password=None,\n ssh_pkey=None,\n ssh_pkey_password=None,\n allow_agent=True,\n host_pkey_directories=None,\n logger=None):\n ssh_loaded_pkeys = SSHTunnelForwarder.get_keys(\n logger=logger,\n host_pkey_directories=host_pkey_directories,\n allow_agent=allow_agent\n )\n\n if isinstance(ssh_pkey, string_types):\n ssh_pkey_expanded = os.path.expanduser(ssh_pkey)\n if os.path.exists(ssh_pkey_expanded):\n ssh_pkey = SSHTunnelForwarder.read_private_key_file(\n pkey_file=ssh_pkey_expanded,\n pkey_password=ssh_pkey_password or ssh_password,\n logger=logger\n )\n elif logger:\n logger.warning('Private key file not found: {0}'\n .format(ssh_pkey))\n if isinstance(ssh_pkey, paramiko.pkey.PKey):\n ssh_loaded_pkeys.insert(0, ssh_pkey)\n\n if not ssh_password and not ssh_loaded_pkeys:\n raise ValueError('No password or public key available!')\n return (ssh_password, ssh_loaded_pkeys)",
"def set_ssh_keys(self, ssh_keys):\n self.ssh_keys = {}\n self.ssh_keys_private = {}\n for user_name in ssh_keys:\n key = ssh_keys[user_name]\n if key.startswith('file:'):\n public_key_file = key.split('file:')[1]\n with open(public_key_file) as fd:\n key = fd.read()\n # try to open private key\n private_key_file = public_key_file.split('.pub')[0]\n try:\n with open(private_key_file) as fd:\n self.ssh_keys_private[user_name] = private_key_file\n except FileNotFoundError:\n pass\n\n self.ssh_keys[user_name] = key.strip()\n if user_name == 'root':\n # check if the private key is available:\n # (1) check ssh-agent\n # (2) check for private key file\n command = \"echo {} | ssh-keygen -l -f - | awk '{{ print $2 }}'\"\n finger = check_output(command.format(self.ssh_keys[user_name]),\n shell=True, encoding='ascii')\n try:\n command = 'ssh-add -l | grep -q {}'\n check_call(command.format(finger), shell=True)\n return\n except CalledProcessError:\n if user_name not in self.ssh_keys_private:\n fatal('Could not find matching ssh key for root -',\n 'neither in ssh-agent nor on disk.')",
"def cfg_credentials(context):\n arguments = {\n '--config': context.config_file,\n 'authorize': False,\n 'account_summary': False\n }\n pychex_cli = PychexCli(arguments)\n pychex_cli.read_config()\n # Check that the values pulled from the read_config method match what we\n # know\n print(pychex_cli.username)\n assert pychex_cli.username == context.username\n assert pychex_cli.security_image_path == context.security_image_path\n assert pychex_cli.password == context.password\n # Check that the unencrypted values are not present\n with open(arguments['--config']) as cfg:\n cfg_txt = cfg.read()\n assert cfg_txt.find(context.username) == -1\n assert cfg_txt.find(context.security_image_path) == -1\n assert cfg_txt.find(context.password) == -1",
"def readConfig():\n hosts = []\n domains = []\n with open(\"./host.conf\", \"r\") as fd:\n for line in fd.readlines():\n line = line.strip().split()\n if line != []:\n # Parse config for zone files and hosts\n if line[0] == \"ZONE_FILE:\":\n zoneFile = line[1]\n if line[0] == \"REVERSE_ZONE_FILE:\":\n reverseZoneFile = line[1]\n if line[0] == \"HOST:\":\n hosts.append((line[1], line[2], line[3]))\n if line[0] == \"DOMAIN:\":\n domains.append((line[1], line[2], line[3]))\n\n return zoneFile, reverseZoneFile, hosts, domains",
"def read_config(file, destination=None, user=None, host=None, cmd_host=None, copy_protocol=None):\n with open(file) as config_yaml:\n base_yaml = yaml.safe_load(config_yaml)\n\n # with config loaded, make sure we have the keys that we need\n\n base_config = {\n 'keys': [],\n 'map': [],\n 'default': [],\n 'required_files': [],\n 'path': None,\n 'destination': destination,\n 'command': {\n 'exts': [],\n 'run': None\n }\n }\n\n router_config = {\n 'key': [],\n 'files': [],\n 'filter': None,\n 'invert': None,\n 'lowercase': None,\n 'exact': None\n }\n\n remote_config = {\n 'user': user,\n 'host': host,\n 'copy_protocol': copy_protocol,\n 'cmd_host': cmd_host,\n }\n\n if 'dassort' in base_yaml.keys() and 'remote' in base_yaml.keys():\n tree_yaml = base_yaml['dassort']\n map_json = tree_yaml['json']\n base_config = merge_dicts(base_config, map_json)\n base_config = merge_dicts(base_config, tree_yaml)\n remote_yaml = base_yaml['remote']\n remote_config = merge_dicts(remote_config, remote_yaml)\n router_config = None\n elif 'dassort' in base_yaml.keys():\n tree_yaml = base_yaml['dassort']\n map_json = tree_yaml['json']\n base_config = merge_dicts(base_config, map_json)\n base_config = merge_dicts(base_config, tree_yaml)\n remote_config = None\n router_config = None\n elif 'router' in base_yaml.keys():\n tree_yaml = base_yaml['router']\n router_config = merge_dicts(router_config, tree_yaml)\n # all router items should be iterables\n for k, v in router_config.items():\n if type(v) is not list:\n router_config[k] = [v]\n base_config = None\n remote_config = None\n else:\n base_config = None\n remote_config = None\n router_config = None\n\n # reformat base configuration\n if base_config is not None:\n base_config = {\n 'keys': base_config['keys'],\n 'map': base_config['map'],\n 'default': base_config['default'],\n 'required_files': base_config['required_files'],\n 'value': [],\n 'path': {\n 'path_string': base_config['path'],\n 're': {'root': base_config['destination']}\n },\n 'command': base_config['command'],\n }\n\n return base_config, remote_config, router_config",
"def read_config(config_file):\n config_dict = {\n \"port\": \"22\",\n \"persist_remote_files\": False\n }\n with open(config_file) as fin:\n for config_line in fin:\n config_line = config_line.strip()\n # check for commented out lines\n if config_line.startswith(\"#\") or len(config_line) == 0:\n continue\n key, value = config_line.split(\"=\")\n config_dict[key.rstrip()] = value.lstrip()\n\n return config_dict",
"def _ssh(ip, *, user=None, key=None, port=8888):\n # Need to replace \".\", because I don't want \n # `ip` to be a keyword argument\n if ip == \".\" or ip == \"...\": ip = None \n func_args = locals()\n conf = Bunch(**func_args)\n \n # Loads default config if there is one\n # and update the conf object with data\n # from it, but function args have precedence\n fname = os.path.expanduser(\"~/.nbx/aws.json\")\n fname = Path(fname)\n if fname.is_file(): \n stored = load(fname)\n for k,v in stored.items():\n # Function args have precedence\n if conf[k] is None: conf[k] = v\n \n # Check if we got everything we need to\n # connect to instance\n fail = False\n for k in [\"ip\", \"user\", \"key\", \"port\"]:\n if conf[k] is None:\n fail = True\n print(f\"Please provide --{k}\")\n \n # Save what we already got, and\n # proceed if we got everything or return\n dump(conf, fname)\n if fail: return\n \n config_str = SSH_CONFIG_TEMPLATE.format(\n host=\"aws\", \n user=conf.user, \n ip=conf.ip, \n key=conf.key\n )\n print(config_str)\n dump(config_str, os.path.expanduser(\"~/.ssh/ec2_config\"), format=\".txt\")\n \n # We could write some environment vars\n # but we can't source them from here\n #\n # fname = os.path.expanduser(\"~/.nbx/.bash_aws\")\n # string = f\"export xaws={conf.user}@{conf.ip};\\n\"\n # dump(string, fname, format=\".txt\")\n\n # Connect to server and forward local port 8888 to remote port 8888\n # We can now connect to a remote jupyter notebook server via `http://localhost:8888/`\n cmd = f\"ssh -i {conf.key} -L {conf.port}:localhost:{conf.port} {conf.user}@{conf.ip}\"\n os.system(f'bash -c \\\"{cmd}\\\"')",
"def connectSsh(self):\n connect_handle = pexpect.spawn(\"ssh -q -o StrictHostKeyChecking=no root@%s\" % self.ip)\n connect_handle.setwinsize(800,800)\n connect_handle.logfile_read = sys.stdout\n #connect_handle.logfile_send = sys.stdout\n i = 0\n ssh_newkey = r'(?i)Are you sure you want to continue connecting'\n remote_key_changed = r\"REMOTE HOST IDENTIFICATION HAS CHANGED\"\n\n perm_denied = r\"(?i)Permission denied\"\n while True:\n i = connect_handle.expect([ssh_newkey, 'assword:',self.promptshell,\n pexpect.EOF, pexpect.TIMEOUT,\n remote_key_changed, perm_denied])\n if i==0:\n connect_handle.sendline('yes')\n continue\n elif i==1:\n logger.info(\"Password supplied\")\n connect_handle.sendline(self.password)\n continue\n\t elif i==2:\n self._mode = CLI_MODES.shell\n self._prompt = self.promptshell\n break\n elif i==3:\n logger.info(\"Connection closed: %s\" % self)\n logger.info(connect_handle.before) # print out the result\n raise ValueError(\"Connection Closed: %s\" % self)\n elif i==4:\n logger.warning(\"Timeout while waiting for connection\")\n logger.info(connect_handle.before) # print out the result\n raise ValueError(\"Unable to establish connection %s\" % self)\n elif i==5:\n logger.warn(\"Removing offending key from .known_hosts..\")\n known_hosts_file = os.path.expanduser(\"~/.ssh/known_hosts\")\n\n if \"darwin\" in sys.platform.lower():\n # MAC OS\n utils.run_cmd(\"sed -i 1 's/%s.*//' %s\" % (self.ip, known_hosts_file))\n elif \"linux\" in sys.platform.lower():\n # Linux\n utils.run_cmd(\"sed -i 's/%s.*//' %s\" % (self.ip, known_hosts_file))\n\n connect_handle = pexpect.spawn(\"ssh root@%s\" % self.ip)\n connect_handle.setwinsize(800,800)\n connect_handle.logfile_read = sys.stdout\n\n continue\n elif i==6:\n logger.warning(\"Permission denied: %s\" % self)\n logger.info(connect_handle.before) # print out the result\n raise ValueError(\"Permission denied: %s.\" % self)\n return connect_handle"
] | [
"0.6473108",
"0.64405686",
"0.6379368",
"0.5951086",
"0.5951086",
"0.58582616",
"0.58304334",
"0.57917017",
"0.5772919",
"0.57614845",
"0.57045346",
"0.56750584",
"0.563401",
"0.5628457",
"0.5615216",
"0.5609538",
"0.5607589",
"0.5602402",
"0.55837905",
"0.557609",
"0.5556947",
"0.5481369",
"0.5468911",
"0.54628205",
"0.54528916",
"0.5432132",
"0.5431283",
"0.5392161",
"0.5369242",
"0.5335231"
] | 0.7810559 | 0 |
Fill local_binds with defaults when no value/s were specified, leaving paramiko to decide in which local port the tunnel will be open | def _consolidate_binds(local_binds, remote_binds):
count = len(remote_binds) - len(local_binds)
if count < 0:
raise ValueError('Too many local bind addresses '
'(local_bind_addresses > remote_bind_addresses)')
local_binds.extend([('0.0.0.0', 0) for x in range(count)])
return local_binds | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def local_forward(\n self, remote_host, remote_port, local_host=\"0.0.0.0\", local_port=44556\n ):\n tunnel = SSHTunnelForwarder(\n (self.hostname, self.port),\n ssh_username=self.user,\n ssh_pkey=get_pkey(self.issho_conf[\"ID_RSA\"]),\n remote_bind_address=(remote_host, remote_port),\n local_bind_address=(local_host, local_port),\n )\n tunnel.start()\n return tunnel",
"def local_bind_ports(self):\n self._check_is_started()\n return [_server.local_port for _server in self._server_list if\n _server.local_port is not None]",
"def tunnel(local_port, remote_port):\n env.tunnel_local_port = local_port\n env.tunnel_remote_port = remote_port\n local(' ssh -i %(key_filename)s -L %(tunnel_local_port)s:localhost:%(tunnel_remote_port)s %(user)s@%(host_string)s -N' % env)",
"def _create_tunnels(self):\n if not self.is_active:\n try:\n self._connect_to_gateway()\n except socket.gaierror: # raised by paramiko.Transport\n msg = 'Could not resolve IP address for {0}, aborting!' \\\n .format(self.ssh_host)\n self.logger.error(msg)\n return\n except (paramiko.SSHException, socket.error) as e:\n template = 'Could not connect to gateway {0}:{1} : {2}'\n msg = template.format(self.ssh_host, self.ssh_port, e.args[0])\n self.logger.error(msg)\n return\n for (rem, loc) in zip(self._remote_binds, self._local_binds):\n try:\n self._make_ssh_forward_server(rem, loc)\n except BaseSSHTunnelForwarderError as e:\n msg = 'Problem setting SSH Forwarder up: {0}'.format(e.value)\n self.logger.error(msg)",
"def test_init_correct_transport_bindaddr(self):\n os.environ = BASE_ENVIRON\n self.plugin.init([\"dummy\", \"boom\"])\n bindaddr = self.plugin.getBindAddresses()\n self.assertEqual(bindaddr[\"dummy\"], ('127.0.0.1', 5556))\n self.assertEqual(bindaddr[\"boom\"], ('127.0.0.1', 6666))\n self.assertOutputLinesStartWith(\"VERSION \")",
"def local_bind_addresses(self):\n self._check_is_started()\n return [_server.local_address for _server in self._server_list]",
"def _allBindAddresses(self):\n if not config.BindAddresses:\n if getattr(socket, \"has_ipv6\", False):\n if conflictBetweenIPv4AndIPv6():\n # If there's a conflict between v4 and v6, then almost by\n # definition, v4 is mapped into the v6 space, so we will\n # listen \"only\" on v6.\n config.BindAddresses = [\"::\"]\n else:\n config.BindAddresses = [\"\", \"::\"]\n else:\n config.BindAddresses = [\"\"]\n return config.BindAddresses",
"def tunnel_bindings(self):\n return dict((_server.remote_address, _server.local_address) for\n _server in self._server_list if\n self.tunnel_is_up[_server.local_address])",
"def local_bind_hosts(self):\n self._check_is_started()\n return [_server.local_host for _server in self._server_list if\n _server.local_host is not None]",
"def get_default_kwargs(self, **defaults):\n return super().get_default_kwargs(**{\"port\": 22})",
"def init_tunnel(server_address, remote_port, local_port=22):\n def get_pid(appstring):\n # print 'determining PID'\n processes = subprocess.check_output(['ps', '-fx'])\n for line in processes.splitlines():\n if appstring in line:\n pid = line.split()[0]\n return pid\n\n global PID\n cmd = ['ssh', '-f', '-N', '-T', '-R{}:127.0.0.1:{}'.format(remote_port,local_port), server_address]\n try:\n proc = Popen(cmd, preexec_fn=os.setsid)\n proc.wait() # otherwise the PID won't be correct. proc.pid is of no help.\n PID = get_pid(' '.join(cmd))\n return True\n except Exception as e:\n print e\n return False",
"def __bind(self, args = []):\n \n try: \n\n # Start the local chat server and be ready to receive incoming requests\n localServerPort = self.__agent.startLocalServer()\n\n # Sleep a little bit to allow the new thread to open the listening port\n sleep(0.3)\n \n serverIp, serverPort = self.__cm.getConnectionInfo()\n\n self.__cm.send(p.T_BIND, [serverIp, localServerPort])\n reply = self.__cm.receive()\n \n if (reply.type == p.T_ERR):\n raise Exception, \"Port binding was not succussful!\"\n\n except Exception,e:\n self.__handleError('Bind', e)",
"def set_ip_routing(self):\n os_type = os.getenv('server_os_type', None)\n if self.remote is not True and os_type not in ['Linux']:\n return\n\n self.log_output('Setting IP forwarding and iptables rules on {} host'.format(\n os_type))\n\n command = (\n \"echo '{0}' | sudo -S sysctl -w net.ipv4.ip_forward=1 && \"\n \"echo '{0}' | sudo -S sysctl -w net.ipv6.conf.all.forwarding=1 && \"\n \"echo '{0}' | sudo -S sysctl -w net.ipv4.conf.all.send_redirects=0 \"\n \"&& echo '{0}' | sudo -S iptables -t nat -A PREROUTING -i {1} -p \"\n \"tcp --dport 80 -j REDIRECT --to-port {2} && \"\n \"echo '{0}' | sudo -S ip6tables -t nat -A PREROUTING -i {1} -p tcp \"\n \"--dport 80 -j REDIRECT --to-port {2}\"\n )\n self.run_command(command.format(\n self.ssh_password, self.interface, self.proxy_port))",
"def apply_default_port(nodes):\n nodes = nodes.split(',')\n\n def append_port(node):\n if re.match(r'.*:\\d+$', node):\n return node\n return f'{node}:8091'\n return [append_port(x) for x in nodes]",
"def _spawn_ssh_tunnel(local_port: int, remote_port: int, remote_ip: str,\n server: str, port: int, key: Optional[str] = None):\n if sys.platform == 'win32':\n ssh_server = server + \":\" + str(port)\n return tunnel.paramiko_tunnel(local_port, remote_port, ssh_server, remote_ip, key)\n else:\n ssh = \"ssh -p %s -o ServerAliveInterval=%i\" % (port, max_keep_alive_interval)\n cmd = \"%s -S none -L 127.0.0.1:%i:%s:%i %s\" % (ssh, local_port, remote_ip, remote_port, server)\n return pexpect.spawn(cmd, env=os.environ.copy().pop('SSH_ASKPASS', None))",
"def init():\r\n if not env.hosts:\r\n _init_local()\r\n else:\r\n _init_remote()",
"def get_default_config(self):\n config = super(BindCollector, self).get_default_config()\n config.update({\n 'host': 'localhost',\n 'port': 8080,\n 'path': 'bind',\n # Available stats:\n # - resolver (Per-view resolver and cache statistics)\n # - server (Incoming requests and their answers)\n # - zonemgmt (Requests/responses related to zone management)\n # - sockets (Socket statistics)\n # - memory (Global memory usage)\n 'publish': [\n 'resolver',\n 'server',\n 'zonemgmt',\n 'sockets',\n 'memory',\n ],\n # By default we don't publish these special views\n 'publish_view_bind': False,\n 'publish_view_meta': False,\n })\n return config",
"def _tunnel(port):\n func_args = locals()\n conf = Bunch(**func_args)\n \n # Loads default config if there is one\n # and update the conf object with data\n # from it, but function args have precedence\n fname = os.path.expanduser(\"~/.nbx/aws.json\")\n fname = Path(fname)\n if fname.is_file(): \n stored = load(fname)\n for k,v in stored.items():\n if k not in conf: conf[k] = v\n \n # Check if we got everything we need to\n # connect to instance\n fail = False\n for k in [\"ip\", \"user\", \"key\", \"port\"]:\n if conf[k] is None:\n fail = True\n print(f\"Please provide --{k}\")\n \n if fail: return\n \n # We could write some environment vars\n # but we can't source them from here\n #\n # fname = os.path.expanduser(\"~/.nbx/.bash_aws\")\n # string = f\"export xaws={conf.user}@{conf.ip};\\n\"\n # dump(string, fname, format=\".txt\")\n\n # Connect to server and forward local port 8888 to remote port 8888\n # We can now connect to a remote jupyter notebook server via `http://localhost:8888/`\n cmd = f\"ssh -i {conf.key} -L {conf.port}:localhost:{conf.port} {conf.user}@{conf.ip}\"\n os.system(f'bash -c \\\"{cmd}\\\"')",
"def bind(self,cluster_name,ip_address='',bind_details={},project_id=''):\n project_id = project_id if project_id != '' else self.__project_id\n if ip_address == '':\n headers = { 'User-Agent': 'curl/7.61.0'} # spoof for simple response\n ip = requests.get('http://ifconfig.co', headers)\n ip_address = ip.text.rstrip()\n logger.info(f'bind: looked up ip address: {ip_address}')\n #key = self.create_programatic_apikey(description=description,project_id=project_id)\n db_user = { 'username' : 'foo'\n ,'password' : 'changeme'\n ,'databaseName' : 'admin'\n ,'roles' : [ {'databaseName' : 'admin', 'roleName' : 'dbAdminAnyDatabase'} ] \n }\n user = self.create_database_user(db_user,project_id=project_id) \n cluster = self.get_cluster(cluster_name)\n cs = cluster['mongoURIWithOptions'].split('/',1)\n #conn_str = f'{cs[0]//{key['publicKey']}:{key['privateKey']}@{cs[1]}'\n return conn_str",
"def open(self):\n self._server = socketserver.ThreadingTCPServer(\n server_address=('localhost', self._requested_local_port),\n RequestHandlerClass=self._create_handler(self._ssh_client, self._remote_host, self._remote_port),\n )\n\n threading.Thread(target=self.serve_forever).start()\n\n print('Forwarding local port {} to remote {}:{}'.format(self.local_port, self.remote_host, self.remote_port))",
"def _bindlist(input_str):\n try:\n ip_port = input_str.split(':')\n if len(ip_port) == 1:\n _ip = ip_port[0]\n _port = None\n else:\n (_ip, _port) = ip_port\n if not _ip and not _port:\n raise AssertionError\n elif not _port:\n _port = '22' # default port if not given\n return _ip, int(_port)\n except ValueError:\n raise argparse.ArgumentTypeError(\n 'Address tuple must be of type IP_ADDRESS:PORT'\n )\n except AssertionError:\n raise argparse.ArgumentTypeError(\"Both IP:PORT can't be missing!\")",
"async def bindip_choices(self):\n return {\n d['address']: d['address'] for d in await self.middleware.call(\n 'interface.ip_in_use', {'static': True, 'any': True}\n )\n }",
"def insert_conf(src_ip):\n from settings import GRE_TUNNEL_CONF\n return insert_route(src_ip, GRE_TUNNEL_CONF)",
"def bind_acl_to_ports(self, acl_name=None, ports=None):\n pass",
"def bind_server(self):\n self.MAIN_CONNECTION.bind((self.HOST, self.PORT))",
"def _parse_arguments(args=None):\n parser = argparse.ArgumentParser(\n description='Pure python ssh tunnel utils\\n'\n 'Version {0}'.format(__version__),\n formatter_class=argparse.RawTextHelpFormatter\n )\n\n parser.add_argument(\n 'ssh_address',\n type=str,\n help='SSH server IP address (GW for SSH tunnels)\\n'\n 'set with \"-- ssh_address\" if immediately after '\n '-R or -L'\n )\n\n parser.add_argument(\n '-U', '--username',\n type=str,\n dest='ssh_username',\n help='SSH server account username'\n )\n\n parser.add_argument(\n '-p', '--server_port',\n type=int,\n dest='ssh_port',\n default=22,\n help='SSH server TCP port (default: 22)'\n )\n\n parser.add_argument(\n '-P', '--password',\n type=str,\n dest='ssh_password',\n help='SSH server account password'\n )\n\n parser.add_argument(\n '-R', '--remote_bind_address',\n type=_bindlist,\n nargs='+',\n default=[],\n metavar='IP:PORT',\n required=True,\n dest='remote_bind_addresses',\n help='Remote bind address sequence: '\n 'ip_1:port_1 ip_2:port_2 ... ip_n:port_n\\n'\n 'Equivalent to ssh -Lxxxx:IP_ADDRESS:PORT\\n'\n 'If port is omitted, defaults to 22.\\n'\n 'Example: -R 10.10.10.10: 10.10.10.10:5900'\n )\n\n parser.add_argument(\n '-L', '--local_bind_address',\n type=_bindlist,\n nargs='*',\n dest='local_bind_addresses',\n metavar='IP:PORT',\n help='Local bind address sequence: '\n 'ip_1:port_1 ip_2:port_2 ... ip_n:port_n\\n'\n 'Elements may also be valid UNIX socket domains: \\n'\n '/tmp/foo.sock /tmp/bar.sock ... /tmp/baz.sock\\n'\n 'Equivalent to ssh -LPORT:xxxxxxxxx:xxxx, '\n 'being the local IP address optional.\\n'\n 'By default it will listen in all interfaces '\n '(0.0.0.0) and choose a random port.\\n'\n 'Example: -L :40000'\n )\n\n parser.add_argument(\n '-k', '--ssh_host_key',\n type=str,\n help=\"Gateway's host key\"\n )\n\n parser.add_argument(\n '-K', '--private_key_file',\n dest='ssh_private_key',\n metavar='KEY_FILE',\n type=str,\n help='RSA/DSS/ECDSA private key file'\n )\n\n parser.add_argument(\n '-S', '--private_key_password',\n dest='ssh_private_key_password',\n metavar='KEY_PASSWORD',\n type=str,\n help='RSA/DSS/ECDSA private key password'\n )\n\n parser.add_argument(\n '-t', '--threaded',\n action='store_true',\n help='Allow concurrent connections to each tunnel'\n )\n\n parser.add_argument(\n '-v', '--verbose',\n action='count',\n default=0,\n help='Increase output verbosity (default: {0})'.format(\n logging.getLevelName(DEFAULT_LOGLEVEL)\n )\n )\n\n parser.add_argument(\n '-V', '--version',\n action='version',\n version='%(prog)s {version}'.format(version=__version__),\n help='Show version number and quit'\n )\n\n parser.add_argument(\n '-x', '--proxy',\n type=_bindlist,\n dest='ssh_proxy',\n metavar='IP:PORT',\n help='IP and port of SSH proxy to destination'\n )\n\n parser.add_argument(\n '-c', '--config',\n type=str,\n default=SSH_CONFIG_FILE,\n dest='ssh_config_file',\n help='SSH configuration file, defaults to {0}'.format(SSH_CONFIG_FILE)\n )\n\n parser.add_argument(\n '-z', '--compress',\n action='store_true',\n dest='compression',\n help='Request server for compression over SSH transport'\n )\n\n parser.add_argument(\n '-n', '--noagent',\n action='store_false',\n dest='allow_agent',\n help='Disable looking for keys from an SSH agent'\n )\n\n parser.add_argument(\n '-d', '--host_pkey_directories',\n nargs='*',\n dest='host_pkey_directories',\n metavar='FOLDER',\n help='List of directories where SSH pkeys (in the format `id_*`) '\n 'may be found'\n )\n return vars(parser.parse_args(args))",
"def do_local(self, host=\"localhost\", port=8000):\n port = int(port)\n if host == \"off\":\n self._local_endpoint = None\n else:\n self._local_endpoint = (host, port)\n self.onecmd(\"use %s\" % self.engine.region)",
"def default (no_flow = False,\n network = \"192.168.0.0/24\", # Address range\n first = 100, last = 199, count = None, # Address range\n ip = \"192.168.0.254\",\n router = (), # Auto\n dns = ()): # Auto\n launch(no_flow, network, first, last, count, ip, router, dns)",
"def test_init_defaults(self):\n self._set_args(log_path=None,\n state='present',\n username='myBindAcct',\n password='myBindPass',\n server='ldap://example.com:384',\n search_base='OU=Users,DC=example,DC=com',\n role_mappings={'.*': ['storage.monitor']},\n )\n\n ldap = Ldap()",
"def __ssh_tunnel(self):\n\n host = self.sshTunnelDict[\"ssh_ip\"]\n user = self.sshTunnelDict[\"ssh_user\"]\n password = self.sshTunnelDict[\"ssh_password\"]\n sfcs = self.sshTunnelDict[\"target_ip\"]\n\n tunnel_command = 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -C -N -f -L 3306:{0} {1}@{2}'.format(sfcs, user, host)\n retry = 5\n while retry:\n if not self.__check_ssh():\n try:\n ssh_tunnel = pexpect.spawn(tunnel_command)\n ssh_tunnel.expect('password:')\n time.sleep(0.1)\n ssh_tunnel.sendline(password)\n ssh_tunnel.expect(pexpect.EOF)\n retry -= 1\n except:\n raise Exception(\"Create SSH Tunnel Failed: retry 5\")\n else: break"
] | [
"0.579129",
"0.5754728",
"0.55586314",
"0.5536749",
"0.5503512",
"0.54927737",
"0.54771763",
"0.54581785",
"0.54327863",
"0.5351175",
"0.5337139",
"0.5322381",
"0.5304873",
"0.5271184",
"0.52708703",
"0.5213023",
"0.5150957",
"0.512968",
"0.5124702",
"0.5117299",
"0.5079624",
"0.5067241",
"0.50609",
"0.5053566",
"0.50492114",
"0.5041157",
"0.50359297",
"0.50136733",
"0.499794",
"0.49508786"
] | 0.610802 | 0 |
Return the SSH transport to the remote gateway | def _get_transport(self):
if self.ssh_proxy:
if isinstance(self.ssh_proxy, paramiko.proxy.ProxyCommand):
proxy_repr = repr(self.ssh_proxy.cmd[1])
else:
proxy_repr = repr(self.ssh_proxy)
self.logger.debug('Connecting via proxy: {0}'.format(proxy_repr))
_socket = self.ssh_proxy
else:
_socket = (self.ssh_host, self.ssh_port)
if isinstance(_socket, socket.socket):
_socket.settimeout(SSH_TIMEOUT)
_socket.connect((self.ssh_host, self.ssh_port))
transport = paramiko.Transport(_socket)
sock = transport.sock
if isinstance(sock, socket.socket):
sock.settimeout(SSH_TIMEOUT)
transport.set_keepalive(self.set_keepalive)
transport.use_compression(compress=self.compression)
transport.daemon = self.daemon_transport
# try to solve https://github.com/paramiko/paramiko/issues/1181
# transport.banner_timeout = 200
if isinstance(sock, socket.socket):
sock_timeout = sock.gettimeout()
sock_info = repr((sock.family, sock.type, sock.proto))
self.logger.debug('Transport socket info: {0}, timeout={1}'
.format(sock_info, sock_timeout))
return transport | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def connect():\n paramiko.util.log_to_file(LOG)\n trans = paramiko.Transport((HOST, 22))\n rsa_key = paramiko.RSAKey.from_private_key_file(KEY)\n trans.connect(username=USER, pkey=rsa_key)\n sftp = paramiko.SFTPClient.from_transport(trans)\n \n return trans, sftp",
"def ssh_tunnel(self):\n return self._ssh_tunnel",
"def get_transport(host):\n # If the host name is not valid, paramiko is unable to find the host\n # and throws an error.\n try:\n transport = paramiko.Transport(host)\n except:\n transport = None\n print(\"Unable to find server: \" + host)\n return transport",
"def authentication_protocol(transport_type):\n return \"ssh -o 'UserKnownHostsFile=/dev/null' -o 'StrictHostKeyChecking no'\"",
"def _connect_to_gateway(self):\n for key in self.ssh_pkeys:\n self.logger.debug('Trying to log in with key: {0}'\n .format(hexlify(key.get_fingerprint())))\n try:\n self._transport = self._get_transport()\n self._transport.connect(hostkey=self.ssh_host_key,\n username=self.ssh_username,\n pkey=key)\n if self._transport.is_alive:\n return\n except paramiko.AuthenticationException:\n self.logger.debug('Authentication error')\n self._stop_transport()\n\n if self.ssh_password: # avoid conflict using both pass and pkey\n self.logger.debug('Trying to log in with password: {0}'\n .format('*' * len(self.ssh_password)))\n try:\n self._transport = self._get_transport()\n self._transport.connect(hostkey=self.ssh_host_key,\n username=self.ssh_username,\n password=self.ssh_password)\n if self._transport.is_alive:\n return\n except paramiko.AuthenticationException:\n self.logger.debug('Authentication error')\n self._stop_transport()\n\n self.logger.error('Could not open connection to gateway')",
"def ssh():\n env['remote_port'] = env['port_map']['22']\n\n sys.stdout.write('Connecting to SSH session on remote port %(remote_port)s\\n' % env)\n\n run('chmod 600 %(pair_private_key)s' % env)\n\n client = paramiko.SSHClient()\n client.load_system_host_keys()\n client.connect(\n hostname=env['relay_server'],\n port=int(env['remote_port']),\n username=env['pair_user'],\n key_filename=env['pair_private_key']\n )\n\n channel = client.invoke_shell()\n posix_shell(channel)",
"def __ssh_tunnel(self):\n\n host = self.sshTunnelDict[\"ssh_ip\"]\n user = self.sshTunnelDict[\"ssh_user\"]\n password = self.sshTunnelDict[\"ssh_password\"]\n sfcs = self.sshTunnelDict[\"target_ip\"]\n\n tunnel_command = 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -C -N -f -L 3306:{0} {1}@{2}'.format(sfcs, user, host)\n retry = 5\n while retry:\n if not self.__check_ssh():\n try:\n ssh_tunnel = pexpect.spawn(tunnel_command)\n ssh_tunnel.expect('password:')\n time.sleep(0.1)\n ssh_tunnel.sendline(password)\n ssh_tunnel.expect(pexpect.EOF)\n retry -= 1\n except:\n raise Exception(\"Create SSH Tunnel Failed: retry 5\")\n else: break",
"def openSSH(target, user):\r\n ssh = paramiko.SSHClient()\r\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\r\n ssh.connect(target, username=user)\r\n return ssh",
"def _connect(self):\n ssh = paramiko.SSHClient()\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n ssh.connect(\n self.hostname,\n username=self.user,\n port=self.port,\n pkey=get_pkey(self.issho_conf[\"RSA_ID_PATH\"]),\n )\n return ssh",
"def getSSHConnection(host):\n try:\n ssh = SSHWrapper()\n ssh.connect(host.getID())\n return ssh\n except:\n return None",
"def getRemoteHost():",
"def get_ssh():\n\n ip = str(sc.sticky[\"SSH\"]['ip'])\n port = str(sc.sticky[\"SSH\"]['port'])\n user = str(sc.sticky[\"SSH\"]['user'])\n pw = str(sc.sticky[\"SSH\"]['password'])\n\n ssh_dict = {'ip': ip, 'port': port, 'user': user, 'password': pw}\n\n return ssh_dict",
"def transport(self) -> pulumi.Output['outputs.TransportResponse']:\n return pulumi.get(self, \"transport\")",
"def get_sshhost(self):\n return self._sshhost.gethost()",
"def open_connection_ssh():\n\tssh_server = config_basic.config_ssh_server()\n\tssh_username = config_basic.config_ssh_username()\n\tssh_password = config_basic.config_ssh_password()\n\tconnection = SSH(ssh_server, ssh_username, ssh_password)\n\treturn connection",
"def create_ssh_tunnel():\n \n # Reference link: https://sshtunnel.readthedocs.io/en/latest/\n tunnel = SSHTunnelForwarder(\n (config['ip'], 22),\n ssh_username=config['username'],\n ssh_password=config[\"ssh-password\"],\n remote_bind_address=('localhost', 3306),\n )\n\n tunnel.start() \n print(\"SSH Connected\") \n return tunnel",
"def ssh_tunnel():\n tunnel = SSHTunnelForwarder(\n config.dbreddit['ssh_host_ip'],\n remote_bind_address=('localhost', 5432),\n # local_bind_address=('localhost', 5432),\n ssh_username=config.dbreddit['user'],\n ssh_password=config.dbreddit['password'],\n )\n # Start the SSH tunnel\n print(tunnel)\n tunnel.start()\n return tunnel",
"def ssh(self):\n should_attempt = time.time() - SSHDataAccess._last_attempt > self._retry_after\n\n if self._ssh is None:\n if should_attempt:\n try:\n self._setup_server_connection()\n return self._ssh\n except: # noqa\n SSHDataAccess._last_attempt = time.time()\n msg = f\"Could not connect to server, will try again after {self._retry_after} seconds\"\n raise IOError(msg)\n\n return self._ssh",
"def connect(self, instance):\n client = sshclient.SSHClient()\n client.set_missing_host_key_policy(sshclient.AutoAddPolicy())\n client.connect(instance.ip_address, username=\"core\",\n key_filename=self._ssh_keyfile)\n return client",
"def __init__(self, host, username, password, port=22):\n self.sftp = None\n self.sftp_open = False\n \n # open SSH Transport stream\n self.transport = paramiko.Transport((host, port))\n \n self.transport.connect(username=username, password=password)",
"def _get_ssh_connection(cls, host, user=None):\n if not user:\n user = cls.user\n\n ssh_opts = ()\n ssh_opts += ('-oPasswordAuthentication=no',\n '-oStrictHostKeyChecking=no',\n '-oPort=22',\n '-oConnectTimeout=10')\n\n keyfile = None\n if 'ssh_keyfile' in cls.config:\n keyfile = cls.config['ssh_keyfile']\n\n ssh_opts += ('-o', 'IdentityFile=%s' % keyfile)\n\n if cls.use_controlpersist:\n ssh_opts += ('-oControlMaster=auto',\n '-oControlPersist=4h',\n '-oControlPath=~/.ssh/glusto-ssh-%r@%h:%p')\n\n scp_opts = ssh_opts\n\n ssh_opts += ('-T',)\n\n conn_name = \"%s@%s\" % (user, host)\n # if no existing connection, create one\n if conn_name not in cls._ssh_connections:\n cls.log.debug(\"Creating connection: %s\" % conn_name)\n try:\n ssh = SshMachine(host, user,\n ssh_opts=ssh_opts, scp_opts=scp_opts)\n except:\n cls.log.error(\"Exception trying to establish SshMachine\")\n return None\n cls._ssh_connections[conn_name] = ssh\n else:\n cls.log.debug(\"Retrieved connection from cache: %s\" % conn_name)\n ssh = cls._ssh_connections[conn_name]\n\n if ssh:\n return ssh\n\n print(\"oops. did not get ssh for %s\", conn_name)\n return None",
"def transport(self):\n return self._transport",
"def transport(self) -> Debugger2Transport:\n return self._client.transport",
"def SSH(*args, **kwargs):\n method = import_class(settings.ORCHESTRATION_SSH_METHOD_BACKEND)\n return method(*args, **kwargs)",
"def pull_file():\n try:\n global IP_OR_HOST\n paramiko.util.log_to_file(BASE_DIR + '/logs/amazon_os.log')\n config = _get_hvm_config()\n key = paramiko.RSAKey.from_private_key_file(config.get('amazon_hvm').get('private_key_path'))\n transport = paramiko.Transport(IP_OR_HOST, 22)\n transport.connect(username=config.get('amazon_hvm').get('username'), pkey=key)\n sftp = paramiko.SFTPClient.from_transport(transport)\n p = sftp.put(BASE_DIR + '/logs/amazon_os.log', '/etc/test/amazon_os.log')\n # sftp.get('remove_path', 'local_path')\n transport.close()\n except Exception as e:\n transport.close()\n raise e\n else:\n return transport",
"def connect(self):\n source_address = (socket.gethostname(), 0)\n dest_address = (self.host, self.port)\n\n self.sock = self.ssh_transport._open_ssh_channel(dest_address, source_address)\n\n # hack to work around issue in using paraminko channels as a \"socket\". See http://bugs.python.org/issue7806\n # for details. The gist is socket.close() doesn't actually close a socket in python. It only removes a reference\n # and allows the gc reference counting to perform the actual close.\n original_close = self.sock.close\n\n def monkey_patched_close():\n pass\n\n def real_close():\n original_close()\n\n self.ssh_transport.clean_up_callbacks.append(real_close)\n self.sock.close = monkey_patched_close\n\n if self._tunnel_host:\n self._tunnel()",
"def transport(self) -> Optional[pulumi.Input['TransportArgs']]:\n return pulumi.get(self, \"transport\")",
"def _spawn_ssh_tunnel(local_port: int, remote_port: int, remote_ip: str,\n server: str, port: int, key: Optional[str] = None):\n if sys.platform == 'win32':\n ssh_server = server + \":\" + str(port)\n return tunnel.paramiko_tunnel(local_port, remote_port, ssh_server, remote_ip, key)\n else:\n ssh = \"ssh -p %s -o ServerAliveInterval=%i\" % (port, max_keep_alive_interval)\n cmd = \"%s -S none -L 127.0.0.1:%i:%s:%i %s\" % (ssh, local_port, remote_ip, remote_port, server)\n return pexpect.spawn(cmd, env=os.environ.copy().pop('SSH_ASKPASS', None))",
"def get_transport(hostname=None, username=None, key_filename=None, timeout=None, port=22):\n client = get_client(hostname, username, key_filename, timeout, port)\n transport = client.get_transport()\n try:\n logger.debug(f\"Instantiated Paramiko client {client._id}\")\n logger.debug(f\"Instantiated Paramiko transport {transport.native_id}\")\n logger.info(\"Connected to [%s]\", hostname)\n yield transport\n finally:\n transport.close()\n logger.debug(f\"Destroyed Paramiko transport {transport.native_id}\")\n client.close()\n logger.debug(f\"Destroyed Paramiko client {client._id}\")",
"def _get_connect(self, port, username, password):\n paramiko.util.log_to_file(PARAMIKO_LOG)\n ssh_obj = paramiko.SSHClient()\n ssh_obj.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n\n try:\n ssh_obj.connect(self.host, port, username=username,\n password=password,\n allow_agent=True)\n except Exception as err:\n print \"Can't access node %s, detailed error as %s\" % (\n host,\n six.text_type(err))\n return ssh_obj"
] | [
"0.67360616",
"0.6717303",
"0.66640174",
"0.65196717",
"0.65074384",
"0.6271527",
"0.6258517",
"0.6214405",
"0.61691695",
"0.60874236",
"0.6065305",
"0.6048453",
"0.60281336",
"0.6024088",
"0.5985701",
"0.59793967",
"0.5929255",
"0.5899957",
"0.5889407",
"0.5888757",
"0.58854175",
"0.58634084",
"0.5854368",
"0.5800453",
"0.5793606",
"0.5787681",
"0.57370543",
"0.5727865",
"0.57043177",
"0.56932026"
] | 0.7293791 | 0 |
Shut the tunnel down. By default we are always waiting until closing all connections. You can use `force=True` to force close connections | def stop(self, force=False):
self.logger.info('Closing all open connections...')
opened_address_text = ', '.join(
(address_to_str(k.local_address) for k in self._server_list)
) or 'None'
self.logger.debug('Listening tunnels: ' + opened_address_text)
self._stop_transport(force=force)
self._server_list = [] # reset server list
self.tunnel_is_up = {} # reset tunnel status | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _stop_transport(self, force=False):\n try:\n self._check_is_started()\n except (BaseSSHTunnelForwarderError,\n HandlerSSHTunnelForwarderError) as e:\n self.logger.warning(e)\n if force and self.is_active:\n # don't wait connections\n self.logger.info('Closing ssh transport')\n self._transport.close()\n self._transport.stop_thread()\n for _srv in self._server_list:\n status = 'up' if self.tunnel_is_up[_srv.local_address] else 'down'\n self.logger.info('Shutting down tunnel: {0} <> {1} ({2})'.format(\n address_to_str(_srv.local_address),\n address_to_str(_srv.remote_address),\n status\n ))\n _srv.shutdown()\n _srv.server_close()\n # clean up the UNIX domain socket if we're using one\n if isinstance(_srv, _StreamForwardServer):\n try:\n os.unlink(_srv.local_address)\n except Exception as e:\n self.logger.error('Unable to unlink socket {0}: {1}'\n .format(_srv.local_address, repr(e)))\n self.is_alive = False\n if self.is_active:\n self.logger.info('Closing ssh transport')\n self._transport.close()\n self._transport.stop_thread()\n self.logger.debug('Transport is closed')",
"def shutDown(self):\n self.host = None\n self.port = None\n if(self.loop is not None):\n test = asyncio.run_coroutine_threadsafe(self.stopLoop(), self.loop)\n self.thread.join()\n if(self.loop.is_running()):\n self.loop.stop()\n else:\n self.loop.close()\n self.pool.shutDown()\n self.pool = None\n self.loop = None\n self.thread = None",
"def shutdown(opts):\n log.debug(\"Proxy module %s shutting down!!\", opts[\"id\"])\n try:\n thisproxy[\"conn\"].close()\n\n except Exception: # pylint: disable=broad-except\n pass",
"def shutdown(self):\n ts.client.transport.close()",
"def Shutdown(self):\n self.conn.send(False)\n self.process.join()",
"def shutdown(self):\n self.channel.close()\n self.conn.close()",
"def shutdown(self):\n\n self.active = False\n\n try:\n self.listen_socket.shutdown(socket.SHUT_RDWR)\n except:\n self.logger.info(\"Ignoring listen soc shutdown error\")\n self.listen_socket = None\n\n with self.connect_cv:\n self.connect_cv.notifyAll()\n\n self.wakeup()\n self.dbg_state = \"down\"",
"def shutdown(self):\n if self.alive:\n libplasma.disconnect(self.conn)\n self.alive = False",
"def shutdown(self):\r\n self.socket.close()\r\n # self.socket_video.close()\r\n self.socket_state.close()",
"def shutdown(self):\n self.connected = False\n self.protocol.send_message(self.sock, '__!shutdown__')\n data = self.protocol.recover_message(self.sock)\n self.sock.close()\n self.sock = None",
"def shutdown(self):\n self._send_command('shutdown')\n self.sock.close()\n self.disconnected = True",
"def shutdown(self):\t\r\n\t\tself.is_running = False\r\n\t\tfor connection in self.established_connection_list:\r\n\t\t\tconnection.send('The server has been shutdown adruptly by the server owner.\\n')\r\n\t\t\tconnection.socket_send()",
"def shutdown():\n\n cmd = dict()\n cmd[\"type_\"] = \"shutdown\"\n cmd[\"name_\"] = \"all\"\n\n ## In case of the shutdown there will be no returned message to\n ## check the success.\n s = comm.send_and_receive_socket(cmd)\n\n s.close()",
"def db_close_conn(tunnel, engine):\n engine.dispose()\n tunnel.close()",
"def shutdown(self):\n try:\n self._request(\"POST /shutdown\")\n time.sleep(0.300)\n except requests.exceptions.ConnectionError:\n pass\n if self._process and self._process.poll() is None:\n self._process.kill()\n if self._session:\n self._session.close()",
"def shutdown(self):\n self._msg_disp.abort()\n self._conn_mgr.shutdown_connections()",
"def close(self) -> None:\n _LOGGER.info('Shutting down connections to deCONZ.')\n if self.websocket:\n self.websocket.stop()",
"def shutdown(self):\n self.sock.close()",
"def _close(self, force=False):\n if not force:\n self.__closed = True\n self.pingval = False\n # decrement the reference counter\n MockConnection.__refcount -= 1\n # if there are no mor active connections then close up\n if MockConnection.__refcount == 0 or force:\n MockConnection.destroy()",
"def stop(self):\n\n net_tuple = self.read_nodestate(0)\n\n # 1. Kill localhost client\n try:\n localhost_socket = self.lookup_socket(\"127.0.0.1\")\n localhost_connection = (localhost_socket, \"127.0.0.1\")\n self.send(localhost_connection, \"stop\")\n\n except ConnectionRefusedError:\n pass # Localhost is already disconnected\n\n log_msg = \"Attempting to gracefully disconnect and disassociate from all clients...\"\n Primitives.log(log_msg, in_log_level=\"Info\")\n\n # 2. Disconnect from all clients\n for connection in net_tuple:\n log_msg = str(\"Trying to disconnect from socket: \" + str(connection[0]))\n Primitives.log(log_msg, in_log_level=\"Debug\")\n\n try:\n self.disconnect(connection, disallow_local_disconnect=True)\n\n except OSError:\n another_log_msg = str(\"Failed to disconnect from socket: \"+str(connection[0]))\n Primitives.log(another_log_msg, in_log_level=\"Warning\")\n\n finally:\n Primitives.log(\"Successfully disconnected\", in_log_level=\"Debug\")\n\n # Forcefully close localhost socket\n localhost_sock_name = localhost.getsockname()\n localhost.close()\n\n Primitives.log(\"Exiting gracefully;\", in_log_level=\"Info\")\n\n # 3. Kill the network injector and terminate the Server.\n\n self.write_nodestate(nodeState, 2, True) # set terminated=True\n self.write_nodestate(nodeState, 4, True) # set injector_terminated = True\n\n # Hack the socket.listen() loop in the init() function by connecting to it(localhost),\n # which will force it to terminate.\n\n temp = socket.socket()\n temp.connect(localhost_sock_name) # This will kill the localhost socket\n temp.close()\n\n # noinspection PyProtectedMember\n os._exit(0)",
"def disconnect(self):\r\n self._manual_disconnect = True\r\n self.transport.close()",
"async def shutdown(self):\n\n if self.log_output:\n logging.info('Shutting down ...')\n else:\n print('Shutting down ...')\n\n await self.send_reset()\n\n try:\n self.loop.stop()\n except:\n pass\n try:\n self.loop.close()\n except:\n pass\n sys.exit(0)",
"def close_connection(self):\n if self.ssh_params == False:\n self.cursor.close()\n self.connection.close()\n if self.autoconnection == False:\n print(\"py2edw: Connection Closed Successfully\")\n else:\n self.cursor.close()\n self.connection.close()\n self.server.stop()\n if self.autoconnection == False:\n print(\"py2edw: Connection Closed Successfully\")",
"def stop(self):\n with self.active_lock:\n self.active = False\n if self.thread:\n self.thread.join()\n for conn in self.connections:\n conn.close()\n for srv_sock in self.srv_socks:\n srv_sock.close()\n for client_sock in self.client_socks:\n client_sock.close()\n self.client_socks = []\n self.srv_socks = []\n self.connections = []\n self.scenario = None",
"async def shutdown_listener(self):\n # If a comm port has been established, instruct the listener to shutdown so that proper\n # kernel termination can occur. If not done, the listener keeps the launcher process\n # active, even after the kernel has terminated, leading to less than graceful terminations.\n\n if self.comm_port > 0:\n shutdown_request = dict()\n shutdown_request['shutdown'] = 1\n\n try:\n await self._send_listener_request(shutdown_request, shutdown_socket=True)\n self.log.debug(\"Shutdown request sent to listener via gateway communication port.\")\n except Exception as e:\n if not isinstance(e, OSError) or e.errno != errno.ECONNREFUSED:\n self.log.warning(\"An unexpected exception occurred sending listener shutdown to {}:{} for \"\n \"KernelID '{}': {}\"\n .format(self.comm_ip, self.comm_port, self.kernel_id, str(e)))\n\n # Also terminate the tunnel process for the communication port - if in play. Failure to terminate\n # this process results in the kernel (launcher) appearing to remain alive following the shutdown\n # request, which triggers the \"forced kill\" termination logic.\n\n comm_port_name = KernelChannel.COMMUNICATION.value\n comm_port_tunnel = self.tunnel_processes.get(comm_port_name, None)\n if comm_port_tunnel:\n self.log.debug(\"shutdown_listener: terminating {} tunnel process.\".format(comm_port_name))\n comm_port_tunnel.terminate()\n del self.tunnel_processes[comm_port_name]",
"def shutdown(self):\n asyncio.cancel(self._server_coro)\n for hid, coro in self.conns.items():\n asyncio.cancel(coro)",
"async def shutdown_gracefully(self) -> None:",
"async def shutdown_gracefully(self) -> None:",
"def close(self):\n self._server.shutdown()\n self._server = None",
"def shutdown(self):\n self.socket_thread.stop()"
] | [
"0.77367306",
"0.6694203",
"0.65290046",
"0.64633465",
"0.6440187",
"0.64118755",
"0.63934726",
"0.6372723",
"0.63686204",
"0.6336779",
"0.6279869",
"0.62692505",
"0.62551904",
"0.6253709",
"0.622172",
"0.62026864",
"0.6190491",
"0.6169963",
"0.61657774",
"0.6155323",
"0.6127215",
"0.61197746",
"0.611585",
"0.611415",
"0.60800076",
"0.603593",
"0.60157984",
"0.60157984",
"0.6015126",
"0.59951407"
] | 0.77172184 | 1 |
Open connection to SSH gateway First try with all keys loaded from an SSH agent (if allowed) Then with those passed directly or read from ~/.ssh/config As last resort, try with a provided password | def _connect_to_gateway(self):
for key in self.ssh_pkeys:
self.logger.debug('Trying to log in with key: {0}'
.format(hexlify(key.get_fingerprint())))
try:
self._transport = self._get_transport()
self._transport.connect(hostkey=self.ssh_host_key,
username=self.ssh_username,
pkey=key)
if self._transport.is_alive:
return
except paramiko.AuthenticationException:
self.logger.debug('Authentication error')
self._stop_transport()
if self.ssh_password: # avoid conflict using both pass and pkey
self.logger.debug('Trying to log in with password: {0}'
.format('*' * len(self.ssh_password)))
try:
self._transport = self._get_transport()
self._transport.connect(hostkey=self.ssh_host_key,
username=self.ssh_username,
password=self.ssh_password)
if self._transport.is_alive:
return
except paramiko.AuthenticationException:
self.logger.debug('Authentication error')
self._stop_transport()
self.logger.error('Could not open connection to gateway') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def session_open(self):\n logger.debug(\"entering session_open()\")\n kwargs = {\"hostname\": self.host, \"username\": self.user}\n ssh_client = paramiko.SSHClient()\n ssh_client.load_system_host_keys()\n ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n ssh_config = os.path.expanduser(\"~/.ssh/config\")\n ask_pass = False\n key_found = False\n if (\n os.path.isfile(os.path.expanduser(\"~/.ssh/id_rsa\"))\n or os.path.isfile(os.path.expanduser(\"~/.ssh/id_dsa\"))\n or os.path.isfile(os.path.expanduser(\"~/.ssh/id_ecdsa\"))\n ):\n key_found = True\n\n if os.path.isfile(ssh_config):\n config = paramiko.SSHConfig()\n with open(ssh_config) as open_ssh_config:\n config.parse(open_ssh_config)\n config = config.lookup(self.host)\n if config.get(\"proxycommand\"):\n self._sock = paramiko.proxy.ProxyCommand(config.get(\"proxycommand\"))\n kwargs.update({\"sock\": self._sock})\n\n agent = paramiko.Agent()\n agent_keys = agent.get_keys()\n logger.debug(\"ssh agent has {} keys\".format(len(agent_keys)))\n\n if self.passwd is not None:\n kwargs.update(\n {\"password\": self.passwd, \"allow_agent\": False, \"look_for_keys\": False}\n )\n elif self.user != getpass.getuser():\n print(\n \"skipping publickey ssh auth as {} != {}\".format(\n self.user, getpass.getuser()\n )\n )\n kwargs.update({\"allow_agent\": False, \"look_for_keys\": False})\n ask_pass = True\n elif self.key_filename is not None:\n kwargs.update(\n {\n \"key_filename\": self.key_filename,\n \"allow_agent\": False,\n \"look_for_keys\": False,\n \"password\": None,\n }\n )\n # paramiko is a little broken (see github issue #1664) \n # work around by always asking for passphrase here\n # else \"SSHException: encountered RSA key, expected OPENSSH key\" error\n # when key has passphrase\n passphrase = getpass.getpass(\n prompt=\"ssh key passphrase (Enter for None): \", stream=None\n )\n if passphrase != \"\":\n kwargs.update({\"passphrase\": passphrase})\n elif len(agent_keys) == 0 and not key_found:\n print(\"no ssh keys found, nor ssh agent running, skipping publickey ssh auth\")\n kwargs.update({\"allow_agent\": False, \"look_for_keys\": False})\n ask_pass = True\n\n if ask_pass:\n self.passwd = getpass.getpass(\n prompt=\"{}@{}'s password: \".format(self.user, self.host), stream=None\n )\n kwargs[\"password\"] = self.passwd\n\n try:\n ssh_client.connect(**kwargs)\n except PasswordRequiredException:\n passphrase = getpass.getpass(\n prompt=\"ssh key passphrase (Enter for None): \", stream=None\n )\n if passphrase != \"\":\n kwargs.update({\"passphrase\": passphrase})\n ssh_client.connect(**kwargs)\n return ssh_client",
"def __ssh_tunnel(self):\n\n host = self.sshTunnelDict[\"ssh_ip\"]\n user = self.sshTunnelDict[\"ssh_user\"]\n password = self.sshTunnelDict[\"ssh_password\"]\n sfcs = self.sshTunnelDict[\"target_ip\"]\n\n tunnel_command = 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -C -N -f -L 3306:{0} {1}@{2}'.format(sfcs, user, host)\n retry = 5\n while retry:\n if not self.__check_ssh():\n try:\n ssh_tunnel = pexpect.spawn(tunnel_command)\n ssh_tunnel.expect('password:')\n time.sleep(0.1)\n ssh_tunnel.sendline(password)\n ssh_tunnel.expect(pexpect.EOF)\n retry -= 1\n except:\n raise Exception(\"Create SSH Tunnel Failed: retry 5\")\n else: break",
"def _consolidate_auth(ssh_password=None,\n ssh_pkey=None,\n ssh_pkey_password=None,\n allow_agent=True,\n host_pkey_directories=None,\n logger=None):\n ssh_loaded_pkeys = SSHTunnelForwarder.get_keys(\n logger=logger,\n host_pkey_directories=host_pkey_directories,\n allow_agent=allow_agent\n )\n\n if isinstance(ssh_pkey, string_types):\n ssh_pkey_expanded = os.path.expanduser(ssh_pkey)\n if os.path.exists(ssh_pkey_expanded):\n ssh_pkey = SSHTunnelForwarder.read_private_key_file(\n pkey_file=ssh_pkey_expanded,\n pkey_password=ssh_pkey_password or ssh_password,\n logger=logger\n )\n elif logger:\n logger.warning('Private key file not found: {0}'\n .format(ssh_pkey))\n if isinstance(ssh_pkey, paramiko.pkey.PKey):\n ssh_loaded_pkeys.insert(0, ssh_pkey)\n\n if not ssh_password and not ssh_loaded_pkeys:\n raise ValueError('No password or public key available!')\n return (ssh_password, ssh_loaded_pkeys)",
"def login(host):\n\n\n \"\"\" change this settings to make use. \"\"\"\n gateway_user = \"lonli\"\n gateway_ip = \"127.0.0.1\"\n gateway_port = \"22\"\n gateway_key = \"/home/lonli/.ssh/id_rsa\"\n\n \"\"\" change abbove settings to make use. \"\"\"\n\n\n if host:\n try:\n subprocess.check_output([\"ssh\", \"-p\", gateway_port, \"-i\", gateway_key,\n \"{0}@{1}\".format(gateway_user, gateway_ip), \"grep {0} ~/.ssh/config\".format(host)])\n except subprocess.CalledProcessError as e:\n print(\"'{0}' does not exists in the configuratian of the gateway!\".format(host), file=sys.stderr)\n return\n\n to_gateway = \"ssh -p {0} -i {1} {2}@{3}\".format(gateway_port, gateway_key, gateway_user, gateway_ip)\n ssh = pexpect.spawn(to_gateway)\n if host:\n\n \n \"\"\" change this settings to make use. \"\"\"\n exps = [\n (\"lonli@arch\", 'echo -n \"Enter diretory : \" && read && [ -d \"${REPLY}\" ] && cd ${REPLY}'),\n (\"Enter diretory : \", \"/tmp\"),\n (\"/tmp\", \"pwd\"),\n ]\n \"\"\" change abbove session to make use. \"\"\"\n\n\n for p, s in exps:\n # print(\"expect : {0}, then send : {1}\".format(p, s))\n ssh.expect(p)\n ssh.sendline(s)\n winch_handler = sigwinch_handler(ssh)\n signal.signal(signal.SIGWINCH, winch_handler)\n winch_handler(None, None)\n ssh.interact()",
"def connectSsh(self):\n connect_handle = pexpect.spawn(\"ssh -q -o StrictHostKeyChecking=no root@%s\" % self.ip)\n connect_handle.setwinsize(800,800)\n connect_handle.logfile_read = sys.stdout\n #connect_handle.logfile_send = sys.stdout\n i = 0\n ssh_newkey = r'(?i)Are you sure you want to continue connecting'\n remote_key_changed = r\"REMOTE HOST IDENTIFICATION HAS CHANGED\"\n\n perm_denied = r\"(?i)Permission denied\"\n while True:\n i = connect_handle.expect([ssh_newkey, 'assword:',self.promptshell,\n pexpect.EOF, pexpect.TIMEOUT,\n remote_key_changed, perm_denied])\n if i==0:\n connect_handle.sendline('yes')\n continue\n elif i==1:\n logger.info(\"Password supplied\")\n connect_handle.sendline(self.password)\n continue\n\t elif i==2:\n self._mode = CLI_MODES.shell\n self._prompt = self.promptshell\n break\n elif i==3:\n logger.info(\"Connection closed: %s\" % self)\n logger.info(connect_handle.before) # print out the result\n raise ValueError(\"Connection Closed: %s\" % self)\n elif i==4:\n logger.warning(\"Timeout while waiting for connection\")\n logger.info(connect_handle.before) # print out the result\n raise ValueError(\"Unable to establish connection %s\" % self)\n elif i==5:\n logger.warn(\"Removing offending key from .known_hosts..\")\n known_hosts_file = os.path.expanduser(\"~/.ssh/known_hosts\")\n\n if \"darwin\" in sys.platform.lower():\n # MAC OS\n utils.run_cmd(\"sed -i 1 's/%s.*//' %s\" % (self.ip, known_hosts_file))\n elif \"linux\" in sys.platform.lower():\n # Linux\n utils.run_cmd(\"sed -i 's/%s.*//' %s\" % (self.ip, known_hosts_file))\n\n connect_handle = pexpect.spawn(\"ssh root@%s\" % self.ip)\n connect_handle.setwinsize(800,800)\n connect_handle.logfile_read = sys.stdout\n\n continue\n elif i==6:\n logger.warning(\"Permission denied: %s\" % self)\n logger.info(connect_handle.before) # print out the result\n raise ValueError(\"Permission denied: %s.\" % self)\n return connect_handle",
"def open(self):\n class IgnorePolicy(paramiko.MissingHostKeyPolicy):\n \"\"\"\n Policy for ignoring missing host keys.\n\n TODO: It would be better to know and confirm the host key.\n \"\"\"\n\n def missing_host_key(self, client, hostname, key):\n return\n\n client = paramiko.SSHClient()\n client.set_missing_host_key_policy(IgnorePolicy())\n client.connect(\n hostname=self._ssh_config.ip,\n username=self._ssh_config.user,\n pkey=paramiko.rsakey.RSAKey.from_private_key(io.StringIO(self._ssh_config.key))\n )\n\n self._paramiko_ssh_client = client",
"def _ssh_connect():\n client = paramiko.SSHClient()\n client.load_system_host_keys()\n client.set_missing_host_key_policy(paramiko.WarningPolicy)\n\n client.connect(**SSH_CONFIG)\n yield client\n\n client.close()",
"def ssh():\n env['remote_port'] = env['port_map']['22']\n\n sys.stdout.write('Connecting to SSH session on remote port %(remote_port)s\\n' % env)\n\n run('chmod 600 %(pair_private_key)s' % env)\n\n client = paramiko.SSHClient()\n client.load_system_host_keys()\n client.connect(\n hostname=env['relay_server'],\n port=int(env['remote_port']),\n username=env['pair_user'],\n key_filename=env['pair_private_key']\n )\n\n channel = client.invoke_shell()\n posix_shell(channel)",
"def setupSSH(key_rsa_path, key_append_path, key_gen_cmd, HostList):\n # Generate SSH key on localhost\n LocalKey = getLocalKey(key_gen_cmd, key_rsa_path)\n\n # Setup passwordless SSH with each of the specified machines\n for i in HostList:\n if i[0] != 'localhost':\n\n box_ip = i[1]\n user = i[2]\n pwd = i[3]\n\n out = subprocess.Popen(\"echo $\" + user, shell=True,\n stdout=subprocess.PIPE)\n box_user = out.stdout.read().rstrip('\\n')\n out = subprocess.Popen(\"echo $\" + pwd, shell=True,\n stdout=subprocess.PIPE)\n box_pwd = out.stdout.read().rstrip('\\n')\n try:\n\n RemoteKey = getRemoteKey(key_gen_cmd, key_rsa_path, box_ip,\n box_user, box_pwd)\n appendLocalKeyInRemote(LocalKey, key_append_path, box_ip,\n box_user, box_pwd)\n appendRemoteKeyInLocal(RemoteKey, key_append_path, box_ip)\n logging.info(\"Passwordless SSH has been setup b/w \\\n localhost & %s\", box_ip)\n\n except (paramiko.SSHException, paramiko.BadHostKeyException,\n paramiko.AuthenticationException, socket.error) as e:\n logging.info(\"Passwordless SSH setup failed b/w localhost & %s \\\n with %s, please verify host connectivity\", box_ip, e)",
"def _start_ssh(self):\n try:\n message = '\\nEnter number you want to connect: '\n num = raw_input(message)\n while not int(num) in self.instance_list:\n num = raw_input(message)\n\n message_user = 'Enter username for ssh_login(blank = %s): ' % DEFAULT_USER \n user = raw_input(message_user)\n if not user:\n user = DEFAULT_USER\n \n target = self.instance_list[int(num)]\n ssh_key_path = os.path.join(SSH_DIR, target['key'])\n if not os.path.exists(ssh_key_path):\n print 'SSH key not found! KEY_PATH[ %s ]' % ssh_key_path\n return\n\n command = COMMAND % {'sshkey' : ssh_key_path, 'user' : user, 'server' : target['dns'], 'port' : self.port}\n\n print 'Connecting to \"%s\"... [SSH COMMAND: %s ]' % (target['name'], command)\n os.system(command)\n except KeyboardInterrupt:\n print '\\nAborted!'\n finally:\n sys.exit()",
"def login (self,server,username,password='',terminal_type='ansi',original_prompts=r\"][#$]|~[#$]|bash.*?[#$]|[#$] \",login_timeout=10):\r\n cmd = \"ssh -l %s %s\" % (username, server)\r\n spawn.__init__(self, cmd, timeout=login_timeout)\r\n #, \"(?i)no route to host\"])\r\n i = self.expect([\"(?i)are you sure you want to continue connecting\", original_prompts, \"(?i)password\", \"(?i)permission denied\", \"(?i)terminal type\", TIMEOUT, \"(?i)connection closed by remote host\"])\r\n if i==0: # New certificate -- always accept it. This is what you if SSH does not have the remote host's public key stored in the cache.\r\n self.sendline(\"yes\")\r\n i = self.expect([\"(?i)are you sure you want to continue connecting\", original_prompts, \"(?i)password\", \"(?i)permission denied\", \"(?i)terminal type\", TIMEOUT])\r\n if i==2: # password\r\n self.sendline(password)\r\n i = self.expect([\"(?i)are you sure you want to continue connecting\", original_prompts, \"(?i)password\", \"(?i)permission denied\", \"(?i)terminal type\", TIMEOUT])\r\n if i==4:\r\n self.sendline(terminal_type)\r\n i = self.expect([\"(?i)are you sure you want to continue connecting\", original_prompts, \"(?i)password\", \"(?i)permission denied\", \"(?i)terminal type\", TIMEOUT])\r\n\r\n if i==0:\r\n # This is weird. This should not happen twice in a row.\r\n self.close()\r\n return False\r\n elif i==1: # can occur if you have a public key pair set to authenticate. \r\n ### TODO: May NOT be OK if expect() matched a false prompt.\r\n pass\r\n elif i==2: # password prompt again\r\n # For incorrect passwords, some ssh servers will\r\n # ask for the password again, others return 'denied' right away.\r\n # If we get the password prompt again then this means\r\n # we didn't get the password right the first time. \r\n self.close()\r\n return False\r\n elif i==3: # permission denied -- password was bad.\r\n self.close()\r\n return False\r\n elif i==4: # terminal type again? WTF?\r\n self.close()\r\n return False\r\n elif i==5: # Timeout\r\n # This is tricky... presume that we are at the command-line prompt.\r\n # It may be that the prompt was so weird that we couldn't match it.\r\n pass\r\n elif i==6: # Connection closed by remote host\r\n self.close()\r\n return False\r\n else: # Unexpected \r\n self.close()\r\n return False\r\n # We appear to be in -- reset prompt to something more unique.\r\n if not self.set_unique_prompt():\r\n self.close()\r\n return False\r\n return True",
"def _connect(self):\n ssh = paramiko.SSHClient()\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n ssh.connect(\n self.hostname,\n username=self.user,\n port=self.port,\n pkey=get_pkey(self.issho_conf[\"RSA_ID_PATH\"]),\n )\n return ssh",
"def establish_connection(self):\r\n\r\n #creates SSH connection and adds SSH key to .known_hosts\r\n self.ssh_conn = paramiko.SSHClient()\r\n self.ssh_conn.set_missing_host_key_policy(paramiko.AutoAddPolicy())\r\n\r\n try:\r\n self.ssh_conn.connect(**self.conn_parm)\r\n print \"Connected to %s\" % self.conn_parm['hostname']\r\n #testing: self.ssh_conn.close()\r\n except socket.error:\r\n print \"Connection Failed on device %s\" % self.conn_parm['hostname']\r\n\r\n #find prompt\r\n open_session = self.ssh_conn.invoke_shell()\r\n output = open_session.recv(1000)\r\n\r\n #testing: print output\r\n\r\n #go into Enable-Mode if not already in it\r\n if '#' not in output:\r\n open_session.send('enable\\n')\r\n time.sleep(1)\r\n open_session.send(self.password)\r\n open_session.send('\\n')\r\n else:\r\n print \"In Enable-Mode\"\r\n\r\n #turn off paging\r\n open_session.send('terminal length 0\\n')\r\n time.sleep(3)\r\n \r\n return open_session",
"def agent_auth(transport, username):\n agent = paramiko.Agent()\n agent_keys = agent.get_keys()\n if len(agent_keys) == 0:\n return\n\n for key in agent_keys:\n print('Trying ssh-agent key %s' % hexlify(key.get_fingerprint()))\n try:\n transport.auth_publickey(username, key)\n print('... success!')\n return\n except paramiko.SSHException:\n print('... nope.')",
"def connect():\n paramiko.util.log_to_file(LOG)\n trans = paramiko.Transport((HOST, 22))\n rsa_key = paramiko.RSAKey.from_private_key_file(KEY)\n trans.connect(username=USER, pkey=rsa_key)\n sftp = paramiko.SFTPClient.from_transport(trans)\n \n return trans, sftp",
"def connecting(self):\r\n \r\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) \r\n try:\r\n ssh.connect(self.hostname,self.port,self.identifier,self.password)\r\n feedback = '***Connection Established***'\r\n return feedback\r\n \r\n except Exception as e:\r\n feedback= '***Connection failed : '+str(e)+'***'\r\n return feedback\r\n sys.exit(1)",
"def _authenticate(self, transport, username, password=None, hostkey=None,\n pkey=None):\n if not password:\n if pkey:\n if not isinstance(pkey, (tuple, list)):\n pkey = [pkey]\n else:\n log.debug(\"Fetching keys from SSH agent...\")\n agent = ssh.Agent()\n agent_keys = agent.get_keys()\n log.debug(\"Agent keys: %r\", agent_keys)\n key_files = [join(self.config.get('ssh_dir',\n expanduser('~/.ssh')), 'id_%s' % keytype)\n for keytype in ('dsa', 'rsa')]\n pkey = itertools.chain(agent_keys, key_files)\n \n saved_exception = None\n for key in pkey:\n if not isinstance(key, ssh.PKey):\n if not exists(key):\n continue\n \n log.debug(\"Loading key file: %s\", key)\n key = self._load_private_key(key)\n \n try:\n transport.connect(username=username, hostkey=hostkey,\n pkey=key)\n if transport.is_authenticated():\n log.info(\"Authentication (pubkey) successful. \"\n \"Key: '%s'.\", key.get_name())\n return\n except ssh.SSHException as exc:\n log.info(\"Authenticating using key '%s' failed.\",\n key.get_name())\n saved_exception = exc\n\n try:\n transport.connect(username=username, password=password,\n hostkey=hostkey)\n log.info(\"Authentication (password) successful.\")\n if transport.is_authenticated():\n return\n except ssh.SSHException as exc:\n saved_exception = exc\n\n if saved_exception:\n raise saved_exception",
"def open_connection_ssh():\n\tssh_server = config_basic.config_ssh_server()\n\tssh_username = config_basic.config_ssh_username()\n\tssh_password = config_basic.config_ssh_password()\n\tconnection = SSH(ssh_server, ssh_username, ssh_password)\n\treturn connection",
"def open_ssh():\n print('Opening SSH...')",
"def _read_ssh_config(ssh_host,\n ssh_config_file,\n ssh_username=None,\n ssh_pkey=None,\n ssh_port=None,\n ssh_proxy=None,\n compression=None,\n logger=None):\n ssh_config = paramiko.SSHConfig()\n if not ssh_config_file: # handle case where it's an empty string\n ssh_config_file = None\n\n # Try to read SSH_CONFIG_FILE\n try:\n # open the ssh config file\n with open(os.path.expanduser(ssh_config_file), 'r') as f:\n ssh_config.parse(f)\n # looks for information for the destination system\n hostname_info = ssh_config.lookup(ssh_host)\n # gather settings for user, port and identity file\n # last resort: use the 'login name' of the user\n ssh_username = (\n ssh_username or\n hostname_info.get('user')\n )\n ssh_pkey = (\n ssh_pkey or\n hostname_info.get('identityfile', [None])[0]\n )\n ssh_host = hostname_info.get('hostname')\n ssh_port = ssh_port or hostname_info.get('port')\n\n proxycommand = hostname_info.get('proxycommand')\n ssh_proxy = ssh_proxy or (paramiko.ProxyCommand(proxycommand) if\n proxycommand else None)\n if compression is None:\n compression = hostname_info.get('compression', '')\n compression = True if compression.upper() == 'YES' else False\n except IOError:\n if logger:\n logger.warning(\n 'Could not read SSH configuration file: {0}'\n .format(ssh_config_file)\n )\n except (AttributeError, TypeError): # ssh_config_file is None\n if logger:\n logger.info('Skipping loading of ssh configuration file')\n finally:\n return (ssh_host,\n ssh_username or getpass.getuser(),\n ssh_pkey,\n int(ssh_port) if ssh_port else 22, # fallback value\n ssh_proxy,\n compression)",
"def open(self):\n logging.debug('Connecting to device %s' % self.paramiko_cfg.get('hostname'))\n self.ssh = paramiko.SSHClient()\n self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n self.ssh.connect(**self.paramiko_cfg)",
"def manual_auth(t, username, hostname, keyfile_path):\n if keyfile_path:\n try:\n if not os.path.exists(keyfile_path):\n print(\"{} doesn't exist\".format(keyfile_path))\n sys.exit(2)\n key = paramiko.RSAKey.from_private_key_file(keyfile_path)\n except paramiko.PasswordRequiredException:\n password = getpass.getpass('RSA key password: ')\n key = paramiko.RSAKey.from_private_key_file(keyfile_path, password)\n t.auth_publickey(username, key)\n else:\n pw = getpass.getpass('Password for %s@%s: ' % (username, hostname))\n t.auth_password(username, pw)",
"def _connect(self):\n self.ssh_conn = paramiko.SSHClient()\n if self.debug:\n self.ssh_conn.log = paramiko.common.logging.basicConfig(\n level=paramiko.common.DEBUG)\n # \"known_hosts\" is ignored, so there's no potential for mismatched keys\n self.ssh_conn.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n # The default for allow_agent (False) breaks SSH to some devices\n self.ssh_conn.connect(self.device, username=self.username,\n password=self.passwd, allow_agent=False)\n self.ssh_shell = self.ssh_conn.invoke_shell()\n self.ssh_shell.set_combine_stderr(True)\n self.ssh_shell.setblocking(True)",
"def _connect(self):\n self.client = SSHClient()\n self.client.load_system_host_keys()\n self.client.set_missing_host_key_policy(AutoAddPolicy())\n self.client.connect(self.host,\n username=self.user,\n key_filename=self.filepath,\n look_for_keys=True,\n timeout=5000)\n self.scp = SCPClient(self.client.get_transport())",
"def openSSH(target, user):\r\n ssh = paramiko.SSHClient()\r\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\r\n ssh.connect(target, username=user)\r\n return ssh",
"def _setup_server_connection(self):\n client = paramiko.SSHClient()\n client.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n try:\n client.load_system_host_keys()\n except IOError:\n print(\"Could not find ssh host keys.\")\n ssh_known_hosts = input(\"Provide ssh known_hosts key file =\")\n while True:\n try:\n client.load_system_host_keys(str(ssh_known_hosts))\n break\n except IOError:\n print(\"Cannot read file, try again\")\n ssh_known_hosts = input(\"Provide ssh known_hosts key file =\")\n\n server_user = server_setup.get_server_user()\n client.connect(\n server_setup.SERVER_ADDRESS,\n username=server_user,\n port=server_setup.SERVER_SSH_PORT,\n timeout=10,\n )\n\n self._ssh = client",
"def agent_auth(self, transport, username):\n\n # ki = paramiko.RSAKey.from_private_key_file(os.path.abspath(os.path.expanduser(self.key)))\n ki = paramiko.RSAKey.from_private_key_file(os.path.abspath(os.path.expanduser(self.key)))\n\n '''\n try:\n log.debug(\"In Agent_Auth...\")\n log.debug(\"self.key: %s\" % os.path.abspath(os.path.expanduser(self.key)))\n log.debug(\"self.key: %s\" % os.path.split(self.key)[1])\n ki = paramiko.RSAKey.from_private_key_file(os.path.split(self.key)[1])\n log.debug(\"Where the hell am I now?\") \n except Exception, e:\n print 'Failed loading' % (self.key, e)\n '''\n agent = paramiko.Agent()\n log.debug(\"Where the hell am I now?\")\n agent_keys = agent.get_keys() + (ki,)\n if len(agent_keys) == 0:\n return\n log.debug(\"About to attempt all keys in agent_keys\")\n for key in agent_keys:\n print ('Trying ssh-agent key %s' % str(key.get_fingerprint().encode('hex'),))\n try:\n transport.auth_publickey(username, key)\n print '... success!'\n return\n except paramiko.SSHException, e:\n print '... failed!', e",
"def connect(self, driver):\n # 0 1 2\n events = [driver.password_re, self.device.prompt_re, driver.unable_to_connect_re,\n # 3 4 5 6 7\n NEWSSHKEY, KNOWN_HOSTS, HOST_KEY_FAILED, MODULUS_TOO_SMALL, PROTOCOL_DIFFER,\n # 8 9\n driver.timeout_re, pexpect.TIMEOUT]\n\n transitions = [\n (driver.password_re, [0, 1, 4, 5], -1, partial(a_save_last_pattern, self), 0),\n (self.device.prompt_re, [0], -1, partial(a_save_last_pattern, self), 0),\n # cover all messages indicating that connection was not set up\n (driver.unable_to_connect_re, [0], -1, a_unable_to_connect, 0),\n (NEWSSHKEY, [0], 1, partial(a_send_line, \"yes\"), 10),\n (KNOWN_HOSTS, [0, 1], 0, None, 0),\n (HOST_KEY_FAILED, [0], -1, ConnectionError(\"Host key failed\", self.hostname), 0),\n (MODULUS_TOO_SMALL, [0], 0, self.fallback_to_sshv1, 0),\n (PROTOCOL_DIFFER, [0], 4, self.fallback_to_sshv1, 0),\n (PROTOCOL_DIFFER, [4], -1, ConnectionError(\"Protocol version differs\", self.hostname), 0),\n (pexpect.TIMEOUT, [0], 5, partial(a_send, \"\\r\\n\"), 10),\n (pexpect.TIMEOUT, [5], -1, ConnectionTimeoutError(\"Connection timeout\", self.hostname), 0),\n (driver.timeout_re, [0], -1, ConnectionTimeoutError(\"Connection timeout\", self.hostname), 0),\n ]\n\n logger.debug(\"EXPECTED_PROMPT={}\".format(pattern_to_str(self.device.prompt_re)))\n fsm = FSM(\"SSH-CONNECT\", self.device, events, transitions, timeout=_C['connect_timeout'],\n searchwindowsize=160)\n return fsm.run()",
"def connect(self):\n try:\n self.ssh = paramiko.SSHClient()\n self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n credentials = self.auth.to_dict()\n credentials.update({'hostname': self.host})\n self.ssh.connect(**credentials)\n self.conn_status = self.CONN_OPEN\n except paramiko.AuthenticationException:\n self.conn_status = self.CONN_FAILED\n except paramiko.ssh_exception.SSHException:\n if self.loop_counter < 3:\n time.sleep(2)\n self.loop_counter += 1\n self.connect()",
"def connect_new_ssh(child, password):\n child.sendline('yes');\n index = child.expect('password: ');\n if index == 0:\n child.sendline(password);"
] | [
"0.75185007",
"0.69287705",
"0.67997575",
"0.6714912",
"0.6696348",
"0.66660726",
"0.66601133",
"0.65854514",
"0.65743357",
"0.65551376",
"0.65543604",
"0.65050286",
"0.6448243",
"0.6445054",
"0.6422562",
"0.6422422",
"0.6381381",
"0.63182485",
"0.628655",
"0.62543863",
"0.62532955",
"0.62446153",
"0.62302935",
"0.6225046",
"0.6212445",
"0.618652",
"0.61438787",
"0.6138825",
"0.6124088",
"0.6102782"
] | 0.73891294 | 1 |
Return a list containing the ports of local side of the TCP tunnels | def local_bind_ports(self):
self._check_is_started()
return [_server.local_port for _server in self._server_list if
_server.local_port is not None] | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_ports(self):\n return self._ports",
"def ports(self) -> List[int]:\n if self.head_port:\n return [self.head_port]\n else:\n ports = []\n for replica in self.pod_args['pods'][0]:\n if isinstance(replica.port, list):\n ports.extend(replica.port)\n else:\n ports.append(replica.port)\n return ports",
"def port_list(self):\n return self._port_list",
"def _GetPorts(self):\n ports = []\n for start, end in self.term.destination_port:\n if start == end:\n ports.append(str(start))\n else:\n ports.append('%d-%d' % (start, end))\n return ports",
"def _get_oam_dc_tcp_ports(self):\n port_list = list(firewall.OAM_DC[\"tcp\"])\n port_list.sort()\n return port_list",
"def incoming_connections_ports(self) -> Sequence[str]:\n return pulumi.get(self, \"incoming_connections_ports\")",
"def get_ports(cls):\n return cls._open_ports.copy()",
"def determine_ports():\n ports = [config('admin-port'), config('service-port')]\n return list(set(ports))",
"def list_ports(self):\n return self.ironic_client.port.list()",
"def _get_oam_common_tcp_ports(self):\n port_list = list(firewall.OAM_COMMON[\"tcp\"])\n port_list.sort()\n return port_list",
"def _get_subcloud_tcp_ports(self):\n port_list = list(firewall.SUBCLOUD[\"tcp\"].keys())\n http_service_port = self._get_http_service_port()\n if (http_service_port):\n port_list.append(http_service_port)\n port_list.sort()\n return port_list",
"def _ports(self):\n try:\n return self._graph.node[self.node_id][\"_ports\"]\n except KeyError:\n log.debug(\"No interfaces initialised for %s\" % self)\n return",
"def get_ports(self) -> tuple:\n raise NotImplementedError",
"def trafficInboundPorts(self):\n #\n # TODO: Reimplement this if possible\n #\n return client.trafficInboundPorts(self)",
"def list_ports(bridge):\n cp = _run('ovs-vsctl', 'list-ports', bridge)\n return cp.stdout.splitlines()",
"def list_occupied_adb_ports():\n out = AdbProxy().forward('--list')\n clean_lines = str(out, 'utf-8').strip().split('\\n')\n used_ports = []\n for line in clean_lines:\n tokens = line.split(' tcp:')\n if len(tokens) != 3:\n continue\n used_ports.append(int(tokens[1]))\n return used_ports",
"def open_ports(host):\n tcp = nm[host].get(\"tcp\", {})\n\n def ok_port(port):\n return tcp[port][\"state\"] == \"open\" and port not in IGNORE_PORTS\n\n return list(filter(ok_port, tcp))",
"def list_port(self):\n _url = \"http://\" + self.host_ip + \":9696/v2.0/ports.json\"\n _headers = {'Content-type': 'application/json',\n 'x-auth-token': self.project_info[\"token_project\"]}\n _body = None\n\n response = self.request(\"GET\", _url, _headers, _body)\n\n if response is None:\n LOG_OBJ.error(\"No response from Server, while listing ports.\")\n return response\n\n if response.status not in [200, 201, 202, 203, 204]:\n LOG_OBJ.error(\"Get port list Failed with status %s\"\n % response.status)\n return response.status\n\n output = json.loads(response.data)\n LOG_OBJ.info(\"Port List : %s \" % output)\n return output[\"ports\"]",
"def get_ports(self) -> tuple:\n return self._current_dev_manager.get_ports()",
"def make_port_list(ssh, https, port):\n\n ports = []\n\n if ssh:\n ports.append(22)\n if https:\n ports.append(443)\n ports.append(80)\n if port is not None:\n ports.append(port)\n\n return ports",
"def findOccupiedPorts():\n netstatOutput = cactus_call(parameters=[\"netstat\", \"-tuplen\"], check_output=True)\n ports = set()\n for line in netstatOutput.split(\"\\n\"):\n fields = line.split()\n if len(fields) != 9:\n # Header or other garbage line\n continue\n port = int(fields[3].split(':')[-1])\n ports.add(port)\n logger.debug('Detected ports in use: %s' % repr(ports))\n return ports",
"def list_ports(state):\n\tstate.report()",
"def ports(self): # type: () -> t.Dict[str, t.List[t.Dict[str, str]]]\n return self.network_settings['Ports']",
"def _get_systemcontroller_tcp_ports(self):\n port_list = list(firewall.SYSTEMCONTROLLER[\"tcp\"].keys())\n http_service_port = self._get_http_service_port()\n if (http_service_port):\n port_list.append(http_service_port)\n port_list.sort()\n return port_list",
"def net_get_connections ():\n\tdata = commands.getoutput(\"netstat -n | grep -c tcp\")\n\t\n\treturn data",
"def exposed_ports(self) -> list[\"Port\"]:\n _args: list[Arg] = []\n _ctx = self._select(\"exposedPorts\", _args)\n _ctx = Port(_ctx)._select_multiple(\n _description=\"description\",\n _port=\"port\",\n _protocol=\"protocol\",\n )\n return _ctx.execute_sync(list[Port])",
"def netstat(self):\n \n command = 'netstat -utn'\n lines = subprocess.check_output(command, shell=True).split('\\n')[2:]\n \n\tports = {'tcp':[], 'udp':[]}\n\tfor line in lines:\n\t if len(line) < 4:\n\t continue\n\t\t\n\t words = line.split()\n\t port = int(words[3].split(':')[-1])\n\t lst = ports[words[0]]\n\t if port in lst:\n\t continue\n\t lst.append(port)\n\t \n\tports['tcp'].sort()\n\tports['udp'].sort()\n\t\n\treturn ports",
"def remote_getPort(self):\r\n return int(self._fwdPort)",
"def get_vulnerable_ports(self):\n self.__get_vulnerable_ports(modules.__path__[0])\n return self.__vulnerable_ports",
"def get_ports():\r\n ports = serial.tools.list_ports.comports()\r\n return ports"
] | [
"0.71080494",
"0.70984805",
"0.70850456",
"0.706947",
"0.69653106",
"0.6954283",
"0.68551344",
"0.68414927",
"0.6839134",
"0.6836259",
"0.67883843",
"0.67638737",
"0.6759709",
"0.6700629",
"0.66362476",
"0.6629589",
"0.66145587",
"0.659666",
"0.65552",
"0.6541116",
"0.6525449",
"0.6517429",
"0.65093553",
"0.6498638",
"0.6487645",
"0.6476328",
"0.64664656",
"0.6459621",
"0.644355",
"0.6313198"
] | 0.7267724 | 0 |
Return a dictionary containing the active localremote tunnel_bindings | def tunnel_bindings(self):
return dict((_server.remote_address, _server.local_address) for
_server in self._server_list if
self.tunnel_is_up[_server.local_address]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def remote_connections(self):\r\n\r\n self.remote = self.newest_connections[~((self.newest_connections['remote_address'] == '0.0.0.0') | (self.newest_connections['remote_address'] == '127.0.0.1'))]\r\n return self.remote",
"def bindings(self):\n return self.__bindings",
"def tunnel(self):\n return self._tunnel",
"def getConnections():\n\n c = psutil.net_connections()\n connects = {}\n\n count = 0\n for connection in c:\n conn = {}\n status = connection.status\n if status == 'ESTABLISHED' or connection.status == 'CLOSE_WAIT':\n conn['status'] = status\n conn['local'] = connection.laddr[0] + ':' + str(connection.laddr[1])\n conn['remote'] = connection.raddr[0] + ':' + str(connection.raddr[1])\n connects[count] = conn\n count += 1\n elif status == 'LISTEN':\n conn['status'] = status\n conn['local'] = connection.laddr[0] + ':' + str(connection.laddr[1])\n connects[count] = conn\n count += 1\n else:\n pass\n\n return connects",
"def get_active_bindings(self):\n\n return list(self._active_bindings.values())",
"def ssh_tunnel(self):\n return self._ssh_tunnel",
"def get_bindable_vars(self):\n return (self.linkable_vars + self.local_vars.keys() +\n self.external_vars.keys())",
"def get_bindable_vars(self):\n return self.local_vars.keys() + self.parent.get_bindable_vars()",
"def local_bind_hosts(self):\n self._check_is_started()\n return [_server.local_host for _server in self._server_list if\n _server.local_host is not None]",
"def local_bind_ports(self):\n self._check_is_started()\n return [_server.local_port for _server in self._server_list if\n _server.local_port is not None]",
"def remotes(self) -> GitSyncRemoteDict:\n remotes = {}\n\n cmd = self.cmd.remote.run()\n ret: filter[str] = filter(None, cmd.split(\"\\n\"))\n\n for remote_name in ret:\n remote = self.remote(remote_name)\n if remote is not None:\n remotes[remote_name] = remote\n return remotes",
"def __getLocalAndRemoteMachineNames(self):\n hostNameMapping = {}\n ## collect the qualified hostnames for each remote node\n for nodeId in list(set(self.runInfoDict['Nodes'])):\n hostNameMapping[nodeId.strip()] = socket.gethostbyname(nodeId.strip())\n self.raiseADebug('Host \"'+nodeId.strip()+'\" identified with IP: ', hostNameMapping[nodeId.strip()])\n\n return hostNameMapping",
"def local_bind_addresses(self):\n self._check_is_started()\n return [_server.local_address for _server in self._server_list]",
"def list(self):\n path = 'orgProvisioning/ipGreTunnelInfo'\n return self._session.get(path)",
"def ssh_list_connections(cls):\n for name in cls._ssh_connections.keys():\n print (name)",
"def list_network_profile_bindings(self, **params):\r\n return self.get(self.network_profile_bindings_path, params=params)",
"def connecting_vars(self):\n return irdvars_for_ext(self.ext)",
"def ssh_get_connections(cls):\n return cls._ssh_connections",
"def bokeh_tunnel(self) -> TunnelInternal:\n return self._bokeh_tunnel",
"def connected_network_devices(self):\n connected = {'ip': self.ip, 'port': self.port}\n return connected",
"async def bindip_choices(self):\n return {\n d['address']: d['address'] for d in await self.middleware.call(\n 'interface.ip_in_use', {'static': True, 'any': True}\n )\n }",
"def connecting_vars(self):\n vars = {}\n for prod in self.products:\n vars.update(prod.connecting_vars())\n\n return vars",
"def get_binding_dict(self):\n dict_format = {\"interface_name\": self.name, \"connection\": []}\n for binding in self.bindings:\n _, dname = binding.get_name()\n midx = binding.get_owner_idx()\n dict_format[\"connection\"].append({\"mod_idx\": midx, \"interface_name\": dname})\n\n self.check_binding_dict(dict_format)\n return dict_format",
"def _find_relationship_env_vars(self) -> dict:\n env = {}\n for rel in self.connections.by_type(): # only one per connection type\n env.update(rel.merge_props(find_env_vars, True))\n\n return env",
"def get_on_tunnel(self):\n return self._is_on_tunnel",
"def remote_route(self):\r\n proxy = self.environ.get('HTTP_X_FORWARDED_FOR')\r\n if proxy: return [ip.strip() for ip in proxy.split(',')]\r\n remote = self.environ.get('REMOTE_ADDR')\r\n return [remote] if remote else []",
"def remote_properties(self):\n return dat2obj(pn_connection_remote_properties(self._impl))",
"def neighbors(self, *args, **kwargs):\n return {\n 'neighbors': [\n {'ip': ip, 'port': port}\n for ip, port in self.neighbors\n ],\n }",
"def ports(self): # type: () -> t.Dict[str, t.List[t.Dict[str, str]]]\n return self.network_settings['Ports']",
"def _create_tunnels(self):\n if not self.is_active:\n try:\n self._connect_to_gateway()\n except socket.gaierror: # raised by paramiko.Transport\n msg = 'Could not resolve IP address for {0}, aborting!' \\\n .format(self.ssh_host)\n self.logger.error(msg)\n return\n except (paramiko.SSHException, socket.error) as e:\n template = 'Could not connect to gateway {0}:{1} : {2}'\n msg = template.format(self.ssh_host, self.ssh_port, e.args[0])\n self.logger.error(msg)\n return\n for (rem, loc) in zip(self._remote_binds, self._local_binds):\n try:\n self._make_ssh_forward_server(rem, loc)\n except BaseSSHTunnelForwarderError as e:\n msg = 'Problem setting SSH Forwarder up: {0}'.format(e.value)\n self.logger.error(msg)"
] | [
"0.62615836",
"0.6211017",
"0.61059785",
"0.60165507",
"0.59712064",
"0.5943763",
"0.5931612",
"0.5906434",
"0.5779625",
"0.57694745",
"0.5747807",
"0.56731176",
"0.5661168",
"0.5645949",
"0.5590057",
"0.5546492",
"0.55099237",
"0.5508082",
"0.5494753",
"0.5438664",
"0.5422935",
"0.54019153",
"0.5401885",
"0.5379651",
"0.53527534",
"0.5332484",
"0.5314523",
"0.5302066",
"0.52998906",
"0.5283875"
] | 0.84435135 | 0 |
Define type of data expected for remote and local bind address lists Returns a tuple (ip_address, port) whose elements are (str, int) | def _bindlist(input_str):
try:
ip_port = input_str.split(':')
if len(ip_port) == 1:
_ip = ip_port[0]
_port = None
else:
(_ip, _port) = ip_port
if not _ip and not _port:
raise AssertionError
elif not _port:
_port = '22' # default port if not given
return _ip, int(_port)
except ValueError:
raise argparse.ArgumentTypeError(
'Address tuple must be of type IP_ADDRESS:PORT'
)
except AssertionError:
raise argparse.ArgumentTypeError("Both IP:PORT can't be missing!") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def address_tuple(self):\n\n return (self.address, int(self.port))",
"def localhost_address_tuple(self):\n\n return (\"127.0.0.1\", int(self.port))",
"def address(self) -> tuple[str, int]:",
"def get_ip_port_tshark(str_data):\n separator = str_data.rindex(\":\")\n ip = str_data[:separator]\n port = str_data[separator + 1:]\n return ip, port",
"def get_address(self):\n \n return tuple('localhost',self._port)",
"def get_address(self):\n \n return tuple('localhost',self._port)",
"def get_network_info() -> tuple:\n # Getting LAN IP adress\n # A big part of the code here has been extracted from the question of this man.\n # https://stackoverflow.com/questions/41625274/extracting-subnet-mask-from-my-computer-python\n s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)\n s.connect((\"8.8.8.8\", 80))\n lan_ip = s.getsockname()[0]\n s.close()\n\n # Checking network interfaces for a convincing Gateway and Mask\n for i in netifaces.interfaces():\n try:\n\n pc_ip = netifaces.ifaddresses(i)[netifaces.AF_INET][0]['addr']\n mask = netifaces.ifaddresses(i)[netifaces.AF_INET][0]['netmask']\n gateway = netifaces.gateways()['default'][netifaces.AF_INET][0]\n\n if pc_ip == lan_ip:\n break\n except:\n pass\n\n else:\n # mask and gateway not found, using default values\n mask = DEFAULT_NETMASK\n gateway = str(lan_ip)\n\n # If invalid netmask we put the default netmask\n if mask == '255.255.255.255': mask = DEFAULT_NETMASK\n\n # Now we need to set to zero the host ports.\n splitted_ip = gateway.split('.')\n splitted_mask = mask.split('.')\n\n for i in range(4):\n if splitted_mask[i] == '0':\n splitted_ip[i] = '0'\n elif splitted_mask[i] != '255':\n num = bin(int(splitted_ip[i]))[2:]\n pat = bin(int(splitted_mask[i]))[2:]\n\n # Adding 0s if needed\n while len(num) < 8:\n num = '0' + num\n while len(pat) < 8:\n pat = '0' + pat\n\n for i in range(8):\n if pat[i] == '0':\n num = num[:i] + '0' + num[i+1:]\n\n splitted_ip[i] = str(int(num, 2))\n\n\n correct_ip = '.'.join(splitted_ip)\n return correct_ip, mask",
"def getData(self):\n if len(self.data)==2+4:\n return (socket.AF_INET,socket.inet_ntop(socket.AF_INET,self.data[2:]))\n elif len(self.data)==2+16:\n return (socket.AF_INET6,socket.inet_ntop(socket.AF_INET6,self.data[2:]))\n else:\n raise InvalidAddressTypeException(self)",
"def discovery_address_tuple(self):\n\n return (self.discovery_address, int(self.port))",
"def getaddrinfo(host: str, port: int) -> List:\n ...",
"def get_ports(self) -> tuple:\n raise NotImplementedError",
"def getAddress(self):\n return self.hostname, self.port",
"def get_endpoints(self):\n if self.type not in ['IPv4', 'IPv6']:\n return (None, None)\n\n local = None\n remote = None\n\n split_name = self.name.split('->')\n local = split_name[0]\n\n # Turn \"localhost:ipp (LISTEN)\" into \"ipp\" and nothing else\n local = local.split(' ')[0]\n if '*' in local:\n # We can't match against this endpoint\n local = None\n\n if len(split_name) == 2:\n remote = split_name[1]\n\n return (local, remote)",
"def _check_ip_port_split(self):\n if self._type == \"A\":\n formatted_value = self._value.split(':')\n self._ip = formatted_value[0]\n self._port = int(formatted_value[1])",
"def get_all_ip():\n sql = sqlite3.connect('data.db')\n\n cursor = sql.cursor()\n\n get_ip = \"\"\"SELECT ip FROM Status\"\"\"\n\n ip = cursor.execute(get_ip).fetchall()\n\n get_protocol = \"\"\"SELECT protocol FROM Status\"\"\"\n\n protocol = cursor.execute(get_protocol).fetchall()\n\n get_port = \"\"\"SELECT port FROM Status\"\"\"\n\n port = cursor.execute(get_port).fetchall()\n\n cursor.close()\n\n return zip(ip, protocol, port)",
"def ipaddr_to_tuple_of_bytes(value):\n if not isinstance(value, basestring):\n Convert.logger.error(\n \"Failed - expect string value '%s', received type '%s'\",\n value, type(value))\n return None\n\n error_v4, error_v6 = '', ''\n\n # check IPv4\n try:\n new_value = tuple([int(item) for item in value.split('.')])\n if len(new_value) != DataDescription.B_SEQ_IPv4_LEN:\n Convert.logger.error(\n \"Failed, length error, string value is '{}', \"\n \"length: '{}'\".format(value, len(new_value)))\n raise ValueError('IPv4 invalid value')\n except Exception, error_v4:\n new_value = None # could be IPv6\n\n # check IPv6\n if None is new_value:\n try:\n ipv6_str = hexlify(socket.inet_pton(socket.AF_INET6, value))\n new_value = tuple([int(ipv6_str[i:i + 2], 16)\n for i in range(0, len(ipv6_str), 2)])\n except Exception, error_v6:\n new_value = None # neither IPv6\n\n if None is new_value:\n Convert.logger.warn(\n \"Failed to covert value '%s', ipv4[%s] ipv6[%s]\",\n value, error_v4, error_v6)\n return None\n for item in new_value:\n if not (0 <= item <= 255):\n Convert.logger.warn(\"Failed - tuple item '%s' expect range 0 \"\n \"<= item <= 255 in value '%s' \",\n str, value)\n return None\n\n return new_value",
"def patch_ports(cls, pair):\n if pair[0] in ('remote_port', 'local_port'):\n return pair[0], pair[1] and int(pair[1]) or None\n return pair",
"def get_ips_and_ports(eth, ip, tcp):\n # For IP addresses, need to convert the packet IP address to the standard one\n if type(eth.data) == dpkt.ip.IP:\n daddr = socket.inet_ntop(socket.AF_INET, ip.dst)\n saddr = socket.inet_ntop(socket.AF_INET, ip.src)\n else: # dpkt.ip6.IP6\n daddr = socket.inet_ntop(socket.AF_INET6, ip.dst)\n saddr = socket.inet_ntop(socket.AF_INET6, ip.src)\n\n # Ports encoded as strings in connections, so let convert those integers\n dport = str(tcp.dport)\n sport = str(tcp.sport)\n\n return saddr, daddr, sport, dport",
"def get_unused_port_and_socket():\n s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n s.bind(('localhost', 0))\n addr, port = s.getsockname()\n return (port, s)",
"def netstat(self):\n \n command = 'netstat -utn'\n lines = subprocess.check_output(command, shell=True).split('\\n')[2:]\n \n\tports = {'tcp':[], 'udp':[]}\n\tfor line in lines:\n\t if len(line) < 4:\n\t continue\n\t\t\n\t words = line.split()\n\t port = int(words[3].split(':')[-1])\n\t lst = ports[words[0]]\n\t if port in lst:\n\t continue\n\t lst.append(port)\n\t \n\tports['tcp'].sort()\n\tports['udp'].sort()\n\t\n\treturn ports",
"async def bindip_choices(self):\n return {\n d['address']: d['address'] for d in await self.middleware.call(\n 'interface.ip_in_use', {'static': True, 'any': True}\n )\n }",
"def _get_remote_socket_descriptions(self, host, port):\n return self._socket_api.getaddrinfo(host, port,\n self._socket_api.AF_UNSPEC,\n self._socket_api.SOCK_STREAM)",
"def get_network_interfaces_info(endpoint: dict) -> tuple[list, list]:\n ips_list = []\n mac_address_list = []\n for data in endpoint.get('networkInterfaces', []):\n ips_list.append(data.get('ipv4'))\n mac_address_list.append(data.get('macAddress'))\n\n return ips_list, mac_address_list",
"def validate(net_string):\n\n host, port = net_string.split(':')\n validate_host(host)\n validate_port(port)\n return (host, port)",
"def ipaddrs( host ):\n return socket.gethostbyaddr(host)[2][0]",
"def parse_address(address, sanity=True):\n address = address.split(':')\n address, port = ':'.join(address[:-1]), address[-1]\n\n guessed_type = 4\n if address.startswith('['):\n address = address[1:]\n guessed_type = 6\n if address.endswith(']') or (sanity and guessed_type == 6):\n if sanity:\n assert address.endswith(']')\n address = address[:-1]\n guessed_type = 6\n if address.count(':') > 3:\n if sanity:\n assert guessed_type == 6\n guessed_type = 6\n\n return address, int(port), guessed_type",
"def get_L4_info(self, tcp_pkt, udp_pkt):\r\n ip_proto = None\r\n L4_port = []\r\n Flag = None\r\n if tcp_pkt:\r\n ip_proto = 6\r\n if tcp_pkt.src_port and tcp_pkt.dst_port:\r\n L4_port = [tcp_pkt.src_port,tcp_pkt.dst_port]\r\n Flag = True\r\n else:\r\n Flag=False\r\n\r\n elif udp_pkt:\r\n ip_proto = 17\r\n if udp_pkt.src_port and udp_pkt.dst_port:\r\n L4_port = [udp_pkt.src_port,udp_pkt.dst_port]\r\n Flag = True\r\n else:\r\n Flag=False\r\n\r\n else:\r\n pass\r\n return (ip_proto, L4_port, Flag)",
"def bind(self, address: Tuple[str, int]) -> None:\n ...",
"def make_data_port(self):\n err = None\n sock = None\n for res in socket.getaddrinfo(None, 0, socket.AF_INET, socket.SOCK_STREAM, 0, socket.AI_PASSIVE):\n af, socktype, proto, canonname, sa = res\n try:\n sock = socket.socket(af, socktype, proto)\n sock.bind(sa)\n except OSError as _:\n err = _\n if sock:\n sock.close()\n sock = None\n continue\n break\n if sock is None:\n if err is not None:\n raise err\n else:\n raise OSError(\"getaddrinfo returns an empty list\")\n sock.listen(1)\n port = sock.getsockname()[1]\n host = self.sock.getsockname()[0]\n response = self._send_port_command(host, port)\n return sock, response",
"def parse_port_req(self, sock):\n try:\n host_ip = self.s.getsockname()[0] # Get local IPv4 addr of client.\n host_port = sock.getsockname()[1] # Get opened port of socket.\n # PORT requires parameters split up as:\n # octet1,octet2,octet3,octet4,p1,p2\n list_csv_ip = host_ip.split('.') # Split octets into a list.\n port_params = \"\"\n for octet in list_csv_ip:\n port_params += octet + \",\"\n # Parse port into PORT command's expected parameter.\n p1 = str((host_port - (host_port % 256)) / 256)\n p2 = str(host_port % 256)\n port_params += p1 + \",\" + p2\n except:\n return \"\", \"\", \"\"\n return port_params, host_ip, host_port"
] | [
"0.65942574",
"0.6388307",
"0.62693864",
"0.6202907",
"0.61671025",
"0.61671025",
"0.6098779",
"0.60970616",
"0.60473275",
"0.6044083",
"0.5978591",
"0.59327024",
"0.59112835",
"0.5846814",
"0.5834561",
"0.5809299",
"0.57478064",
"0.56925285",
"0.56547385",
"0.5647703",
"0.5636716",
"0.56300384",
"0.5618286",
"0.5607802",
"0.5529383",
"0.55198467",
"0.551543",
"0.55028284",
"0.54980886",
"0.5476816"
] | 0.7255948 | 0 |
Pass input arguments to open_tunnel | def _cli_main(args=None, **extras):
arguments = _parse_arguments(args)
# Remove all "None" input values
_remove_none_values(arguments)
verbosity = min(arguments.pop('verbose'), 4)
levels = [logging.ERROR,
logging.WARNING,
logging.INFO,
logging.DEBUG,
TRACE_LEVEL]
arguments.setdefault('debug_level', levels[verbosity])
# do this while supporting py27/py34 instead of merging dicts
for (extra, value) in extras.items():
arguments.setdefault(extra, value)
with open_tunnel(**arguments) as tunnel:
if tunnel.is_alive:
input_('''
Press <Ctrl-C> or <Enter> to stop!
''') | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def testExtraArgsSSHTunnel(self):\n fake_ip_addr = \"1.1.1.1\"\n fake_rsa_key_file = \"/tmp/rsa_file\"\n fake_target_vnc_port = 8888\n target_adb_port = 9999\n ssh_user = \"fake_user\"\n fake_port = 12345\n self.Patch(utils, \"PickFreePort\", return_value=fake_port)\n self.Patch(utils, \"_ExecuteCommand\")\n self.Patch(subprocess, \"check_call\", return_value=True)\n extra_args_ssh_tunnel = \"-o command='shell %s %h' -o command1='ls -la'\"\n utils.AutoConnect(ip_addr=fake_ip_addr,\n rsa_key_file=fake_rsa_key_file,\n target_vnc_port=fake_target_vnc_port,\n target_adb_port=target_adb_port,\n ssh_user=ssh_user,\n client_adb_port=fake_port,\n extra_args_ssh_tunnel=extra_args_ssh_tunnel)\n args_list = [\"-i\", \"/tmp/rsa_file\",\n \"-o\", \"UserKnownHostsFile=/dev/null\",\n \"-o\", \"StrictHostKeyChecking=no\",\n \"-L\", \"12345:127.0.0.1:9999\",\n \"-L\", \"12345:127.0.0.1:8888\",\n \"-N\", \"-f\", \"-l\", \"fake_user\", \"1.1.1.1\",\n \"-o\", \"command=shell %s %h\",\n \"-o\", \"command1=ls -la\"]\n first_call_args = utils._ExecuteCommand.call_args_list[0][0]\n self.assertEqual(first_call_args[1], args_list)",
"def cmd_port(args):",
"def ssh_reverse_tunnel(args, base_url, api_key, prefix=None):\n local_port = 8000\n remote_port = local_port\n remote_host = 'git@'\n shell_command(['ssh', '-fnNT',\n '-R', '*:%(remote_port)d:localhost:%(local_port)d' % {\n 'local_port': local_port, 'remote_port': remote_port},\n remote_host])",
"def _parse_arguments(args=None):\n parser = argparse.ArgumentParser(\n description='Pure python ssh tunnel utils\\n'\n 'Version {0}'.format(__version__),\n formatter_class=argparse.RawTextHelpFormatter\n )\n\n parser.add_argument(\n 'ssh_address',\n type=str,\n help='SSH server IP address (GW for SSH tunnels)\\n'\n 'set with \"-- ssh_address\" if immediately after '\n '-R or -L'\n )\n\n parser.add_argument(\n '-U', '--username',\n type=str,\n dest='ssh_username',\n help='SSH server account username'\n )\n\n parser.add_argument(\n '-p', '--server_port',\n type=int,\n dest='ssh_port',\n default=22,\n help='SSH server TCP port (default: 22)'\n )\n\n parser.add_argument(\n '-P', '--password',\n type=str,\n dest='ssh_password',\n help='SSH server account password'\n )\n\n parser.add_argument(\n '-R', '--remote_bind_address',\n type=_bindlist,\n nargs='+',\n default=[],\n metavar='IP:PORT',\n required=True,\n dest='remote_bind_addresses',\n help='Remote bind address sequence: '\n 'ip_1:port_1 ip_2:port_2 ... ip_n:port_n\\n'\n 'Equivalent to ssh -Lxxxx:IP_ADDRESS:PORT\\n'\n 'If port is omitted, defaults to 22.\\n'\n 'Example: -R 10.10.10.10: 10.10.10.10:5900'\n )\n\n parser.add_argument(\n '-L', '--local_bind_address',\n type=_bindlist,\n nargs='*',\n dest='local_bind_addresses',\n metavar='IP:PORT',\n help='Local bind address sequence: '\n 'ip_1:port_1 ip_2:port_2 ... ip_n:port_n\\n'\n 'Elements may also be valid UNIX socket domains: \\n'\n '/tmp/foo.sock /tmp/bar.sock ... /tmp/baz.sock\\n'\n 'Equivalent to ssh -LPORT:xxxxxxxxx:xxxx, '\n 'being the local IP address optional.\\n'\n 'By default it will listen in all interfaces '\n '(0.0.0.0) and choose a random port.\\n'\n 'Example: -L :40000'\n )\n\n parser.add_argument(\n '-k', '--ssh_host_key',\n type=str,\n help=\"Gateway's host key\"\n )\n\n parser.add_argument(\n '-K', '--private_key_file',\n dest='ssh_private_key',\n metavar='KEY_FILE',\n type=str,\n help='RSA/DSS/ECDSA private key file'\n )\n\n parser.add_argument(\n '-S', '--private_key_password',\n dest='ssh_private_key_password',\n metavar='KEY_PASSWORD',\n type=str,\n help='RSA/DSS/ECDSA private key password'\n )\n\n parser.add_argument(\n '-t', '--threaded',\n action='store_true',\n help='Allow concurrent connections to each tunnel'\n )\n\n parser.add_argument(\n '-v', '--verbose',\n action='count',\n default=0,\n help='Increase output verbosity (default: {0})'.format(\n logging.getLevelName(DEFAULT_LOGLEVEL)\n )\n )\n\n parser.add_argument(\n '-V', '--version',\n action='version',\n version='%(prog)s {version}'.format(version=__version__),\n help='Show version number and quit'\n )\n\n parser.add_argument(\n '-x', '--proxy',\n type=_bindlist,\n dest='ssh_proxy',\n metavar='IP:PORT',\n help='IP and port of SSH proxy to destination'\n )\n\n parser.add_argument(\n '-c', '--config',\n type=str,\n default=SSH_CONFIG_FILE,\n dest='ssh_config_file',\n help='SSH configuration file, defaults to {0}'.format(SSH_CONFIG_FILE)\n )\n\n parser.add_argument(\n '-z', '--compress',\n action='store_true',\n dest='compression',\n help='Request server for compression over SSH transport'\n )\n\n parser.add_argument(\n '-n', '--noagent',\n action='store_false',\n dest='allow_agent',\n help='Disable looking for keys from an SSH agent'\n )\n\n parser.add_argument(\n '-d', '--host_pkey_directories',\n nargs='*',\n dest='host_pkey_directories',\n metavar='FOLDER',\n help='List of directories where SSH pkeys (in the format `id_*`) '\n 'may be found'\n )\n return vars(parser.parse_args(args))",
"def handle_arguments(cls, *args, **kwargs):\n connection_info = misc_utils.ConnectionInfo(\n kwargs[\"dictionary\"], kwargs[\"ip_address\"], kwargs[\"port\"]\n )\n search_info = misc_utils.SearchInfo(\n kwargs[\"is_printing_list\"],\n kwargs[\"ids\"],\n kwargs[\"components\"],\n kwargs[\"search\"],\n kwargs[\"json\"],\n )\n\n cls._execute_command(\n connection_info, search_info, kwargs[\"command_name\"], kwargs[\"arguments\"]\n )",
"def testEstablishWebRTCSshTunnel(self):\n fake_ip_addr = \"1.1.1.1\"\n fake_rsa_key_file = \"/tmp/rsa_file\"\n ssh_user = \"fake_user\"\n self.Patch(utils, \"ReleasePort\")\n self.Patch(utils, \"_ExecuteCommand\")\n self.Patch(subprocess, \"check_call\", return_value=True)\n extra_args_ssh_tunnel = \"-o command='shell %s %h' -o command1='ls -la'\"\n utils.EstablishWebRTCSshTunnel(\n ip_addr=fake_ip_addr, rsa_key_file=fake_rsa_key_file,\n ssh_user=ssh_user, extra_args_ssh_tunnel=None)\n args_list = [\"-i\", \"/tmp/rsa_file\",\n \"-o\", \"UserKnownHostsFile=/dev/null\",\n \"-o\", \"StrictHostKeyChecking=no\",\n \"-L\", \"8443:127.0.0.1:8443\",\n \"-L\", \"15550:127.0.0.1:15550\",\n \"-L\", \"15551:127.0.0.1:15551\",\n \"-N\", \"-f\", \"-l\", \"fake_user\", \"1.1.1.1\"]\n first_call_args = utils._ExecuteCommand.call_args_list[0][0]\n self.assertEqual(first_call_args[1], args_list)\n\n extra_args_ssh_tunnel = \"-o command='shell %s %h'\"\n utils.EstablishWebRTCSshTunnel(\n ip_addr=fake_ip_addr, rsa_key_file=fake_rsa_key_file,\n ssh_user=ssh_user, extra_args_ssh_tunnel=extra_args_ssh_tunnel)\n args_list_with_extra_args = [\"-i\", \"/tmp/rsa_file\",\n \"-o\", \"UserKnownHostsFile=/dev/null\",\n \"-o\", \"StrictHostKeyChecking=no\",\n \"-L\", \"8443:127.0.0.1:8443\",\n \"-L\", \"15550:127.0.0.1:15550\",\n \"-L\", \"15551:127.0.0.1:15551\",\n \"-N\", \"-f\", \"-l\", \"fake_user\", \"1.1.1.1\",\n \"-o\", \"command=shell %s %h\"]\n first_call_args = utils._ExecuteCommand.call_args_list[1][0]\n self.assertEqual(first_call_args[1], args_list_with_extra_args)",
"def _tunnel(port):\n func_args = locals()\n conf = Bunch(**func_args)\n \n # Loads default config if there is one\n # and update the conf object with data\n # from it, but function args have precedence\n fname = os.path.expanduser(\"~/.nbx/aws.json\")\n fname = Path(fname)\n if fname.is_file(): \n stored = load(fname)\n for k,v in stored.items():\n if k not in conf: conf[k] = v\n \n # Check if we got everything we need to\n # connect to instance\n fail = False\n for k in [\"ip\", \"user\", \"key\", \"port\"]:\n if conf[k] is None:\n fail = True\n print(f\"Please provide --{k}\")\n \n if fail: return\n \n # We could write some environment vars\n # but we can't source them from here\n #\n # fname = os.path.expanduser(\"~/.nbx/.bash_aws\")\n # string = f\"export xaws={conf.user}@{conf.ip};\\n\"\n # dump(string, fname, format=\".txt\")\n\n # Connect to server and forward local port 8888 to remote port 8888\n # We can now connect to a remote jupyter notebook server via `http://localhost:8888/`\n cmd = f\"ssh -i {conf.key} -L {conf.port}:localhost:{conf.port} {conf.user}@{conf.ip}\"\n os.system(f'bash -c \\\"{cmd}\\\"')",
"def pub_tunnel(args, project=\"\", base_url=\"\", api_key=\"\"):\n project, base_url, api_key, updated = get_project_config(\n project=project, base_url=base_url, api_key=api_key)\n if updated:\n save_config()\n ssh_reverse_tunnel(args, base_url, api_key, prefix=project)",
"def main():\n parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter, description=\"ICMP Tunnel, send TCP over ICMP\")\n\n # Add server or client\n parser.add_argument(\"type\", choices=[\"client\", \"server\"], help=\"client - Run the client proxy (All flags needed)\\nserver - Run the server proxy (No flags needed)\")\n parser.add_argument(\"-p\", \"--proxy-host\", help=\"IP of the server tunnel\")\n parser.add_argument(\"-lh\", \"--local-host\", help=\"Local IP for incoming TCP connections\")\n parser.add_argument(\"-lp\", \"--local-port\", type=int, help=\"Local port for incoming TCP connections\")\n parser.add_argument(\"-dh\", \"--destination-host\", help=\"Remote IP to send TCP connection to\")\n parser.add_argument(\"-dp\", \"--destination-port\", type=int, help=\"Remote port to send TCP connection to\")\n parser.add_argument(\"-v\", \"--verbose\", default=False, action=\"store_true\", help=\"Print debug messages\")\n args = parser.parse_args()\n\n # Set the logger verbosity\n logger.SetVerbosity(args.verbose)\n\n if args.type == \"server\":\n logger.Log(\"INFO\", \"Starting server\")\n Server().Run()\n\n else:\n # Make sure we have all params\n if args.proxy_host is None or \\\n args.local_host is None or \\\n args.local_port is None or \\\n args.destination_host is None or \\\n args.destination_port is None:\n parser.error(\"client requires proxy,local and destination flags\")\n\n logger.Log(\"INFO\", \"Starting client\")\n ClientProxy(args.proxy_host, args.local_host, args.local_port, args.destination_host, args.destination_port).Run()",
"def tunnel(local_port, remote_port):\n env.tunnel_local_port = local_port\n env.tunnel_remote_port = remote_port\n local(' ssh -i %(key_filename)s -L %(tunnel_local_port)s:localhost:%(tunnel_remote_port)s %(user)s@%(host_string)s -N' % env)",
"def __init__(self, *args, **kvargs):\n self.proxy_host = kvargs.get('proxy_host')\n self.proxy_user = kvargs.get('proxy_user')\n self.proxy_password = kvargs.get('proxy_password')\n self.proxy_port = kvargs.get('proxy_port')\n self.proxy_ssh_key_file = kvargs.get('proxy_ssh_key')\n self.proxy_connection = False\n self.host = kvargs.get('host')\n self.user = kvargs.get('user')\n self.password = kvargs.get('password')\n self.port = kvargs.get('port')\n self.dest_connection = False\n\n try:\n # Add host key policy\n if self.proxy_port is None:\n self.proxy_port = 22\n self.transport = paramiko.Transport((self.proxy_host, self.proxy_port))\n self.transport.start_client()\n if self.proxy_ssh_key_file:\n self.proxy_ssh_key = paramiko.RSAKey.from_private_key_file(self.proxy_ssh_key_file)\n conn_result = self.transport.auth_publickey(username=self.proxy_user, key=self.proxy_ssh_key)\n else:\n conn_result = self.transport.auth_password(username=self.proxy_user, password=self.proxy_password)\n if len(conn_result) == 0:\n self.proxy_connection = True\n else:\n logging.error('Unable to connect to proxy host. Authentication failed.')\n raise TobyException('Unable to connect to proxy host. Authentication failed.')\n except Exception as exp:\n logging.error('Unable to connect to proxy host: %s' % exp)\n raise TobyException('Unable to connect to proxy host: %s' % exp)\n\n try:\n if self.port is None:\n self.port = 22\n self.tunnel = paramiko.Transport(self.transport.open_channel(\n kind='direct-tcpip',\n dest_addr=(self.host, self.port),\n src_addr=('127.0.0.1', 0)))\n self.tunnel.start_client()\n conn_result = self.tunnel.auth_password(username=self.user, password=self.password)\n if len(conn_result) == 0:\n self.dest_connection = True\n else:\n logging.error('Unable to connect to destination host. Authentication failed.')\n raise TobyException('Unable to connect to destination host. Authentication failed.')\n except Exception as exp:\n logging.error('Unable to connect to destination host: %s' % exp)\n raise TobyException('Unable to connect to destination host: %s' % exp)\n\n try:\n self.handle = self.tunnel.open_session(20)\n self.handle.get_pty(width=160, height=0)\n self.handle.invoke_shell()\n self.handle.set_combine_stderr(True)\n self.handle.settimeout(60)\n tnh = self.handle\n got = []\n while True:\n _rd, _wr, _err = select([tnh], [], [], 10)\n if _rd:\n data = tnh.recv(1024)\n data = data.decode(\"utf-8\")\n got.append(data)\n if re.search('> ', data):\n tnh.send(b' start shell\\n')\n data = tnh.recv(1024)\n data = data.decode(\"utf-8\")\n if re.search(r'(\\$|>|#|%)[\\s\\t]?', data):\n break\n except Exception as exp:\n logging.error(\n 'Unable to fetch the prompt on destination host: %s' % exp)\n raise TobyException(\n 'Unable to fetch the prompt on destination host: %s' % exp)",
"def option():\n parser = argparse.ArgumentParser(description='ssh with screen')\n parser.add_argument('host', type=str, default=sys.stdin)\n\n return parser.parse_args()",
"def parse_args():\n argparser = argparse.ArgumentParser()\n argparser.add_argument('--server-uri', required=True, help='Server URI to proxy')\n argparser.add_argument('--host', help='Proxy address')\n argparser.add_argument('--port', type=int, help='Proxy port', default=8888)\n return argparser.parse_args()",
"def cli(ctx, host, port):\n if ctx.obj is None:\n ctx.obj = {}\n ctx.obj[\"HOST\"] = host\n ctx.obj[\"PORT\"] = port",
"def main_tunnel():\r\n print 'yay you beat the boss'",
"def set_launch_args_proxy(self, launch_options: Dict[str, Any]) -> None:\r\n launch_options['args'] = [\r\n a for a in launch_options.get('args', []) if not a.startswith('--proxy-server=')] \\\r\n + [f'--proxy-server=\"{launch_options[\"proxy\"]}\"']",
"def remote_arg(fn):\n click_util.append_params(\n fn, [click.Argument((\"remote\",), shell_complete=_ac_remote)]\n )\n return fn",
"def tunnel_putty_link(name, password, ip):\r\n try:\r\n subprocess.call(\"plink -D %s -pw %s %s@%s\" % (str(PROXY_PORT_NUMBER), password, name, ip))\r\n except Exception as error:\r\n print \"An error occured: \" + str(error)",
"def _map_arguments(self, args):\n data = args.get('data')\n comp = args.get('comp')\n library = args.get('library')\n dry_run = args.get('dry_run', False)\n\n self._set_link('srcmaps-catalog', SrcmapsCatalog_SG,\n comp=comp, data=data,\n library=library,\n nsrc=args.get('nsrc', 500),\n dry_run=dry_run)\n\n self._set_link('gather-srcmaps', GatherSrcmaps_SG,\n comp=comp, data=data,\n library=library,\n dry_run=dry_run)\n \n self._set_link('merge-srcmaps', MergeSrcmaps_SG,\n comp=comp, data=data,\n library=library,\n dry_run=dry_run)",
"def parse_arguments(args):",
"def connect(self, *args, **kwargs):",
"def ConnectPort(self, *args, **kwargs):\n # type: (*Any, **Any) -> None\n payload = { \"Arg1\": self }\n for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]\n for item in kwargs.items(): payload[item[0]] = item[1]\n return self._execute('connectPort', payload=payload, response_object=None)",
"def command_line_arguments():\n _parser.add_argument('-l', '--list', nargs='+',\n help='<Required> Set flag', required=True)\n _parser.add_argument(\"-A\", \"--access\", required=True,\n help=\"access to host => grant/revoke\")",
"def fill_args(args):\n args.agent_module = 'dstar_sgolam_walker'\n args.checkpoint_path = None\n args.exp_config = 'configs/baselines/dstar_proto_sgolam.yaml'\n args.num_episodes = 25\n \n return args",
"def connect(**connectparams):",
"def set_arguments(parser):\n args = parser.parse_args()\n\n if not args.ip:\n parser.print_help()\n show_error(\"The IP address of web server is required\")\n\n if not args.udp and not args.syn and not args.fudp and not args.ipsec and not args.icmp and not args.ficmp and not args.http and not args.slow:\n parser.print_help()\n show_error(\"At least one type of attack is required\")\n\n if args.port:\n if args.port > 0 and args.port <= 65535:\n global destination_port\n destination_port = args.port\n else:\n parser.print_help()\n show_error(\"Wrong port number\")\n\n if (args.udp or args.syn or args.fudp or args.ipsec or args.slow or args.http) and not args.port:\n parser.print_help()\n show_error(\"Port number for IPSEC, UDP, TCP, HTTP, protocols is required\")\n\n if (args.icmp or args.ficmp) and args.port and (not args.udp and not args.syn and not args.fudp and not args.ipsec and not args.http and not args.slow):\n print(\"WARNING: port number only for UDP, TCP, IPSEC protocols is required.\")\n time.sleep(3)\n\n if args.icmp:\n if args.icmp > 0 and args.icmp <= 300:\n global processes_icmp\n processes_icmp = args.icmp\n else:\n parser.print_help()\n show_error(\"Wrong processes count\") \n\n if args.ficmp:\n if args.ficmp > 0 and args.ficmp <= 300:\n global processes_ficmp\n processes_ficmp = args.ficmp\n else:\n parser.print_help()\n show_error(\"Wrong processes count\")\n\n if args.ipsec:\n if args.ipsec > 0 and args.ipsec <= 300:\n global processes_ipsec\n processes_ipsec = args.ipsec\n else:\n parser.print_help()\n show_error(\"Wrong processes count\")\n\n if args.syn:\n if args.syn > 0 and args.syn <= 300:\n global processes_syn\n processes_syn = args.syn\n else:\n parser.print_help()\n show_error(\"Wrong processes count 100\")\n\n if args.udp:\n if args.udp > 0 and args.udp <= 300:\n global processes_udp\n processes_udp = args.udp\n else:\n parser.print_help()\n show_error(\"Wrong processes count 100\")\n\n if args.fudp:\n if args.fudp > 0 and args.fudp <= 300:\n global processes_fudp\n processes_fudp = args.fudp\n else:\n parser.print_help()\n show_error(\"Wrong processes count\")\n\n if args.http:\n if args.http > 0 and args.http <= 300:\n global processes_http\n processes_http = args.http\n else:\n parser.print_help()\n show_error(\"Wrong processes count\")\n\n if args.slow:\n if args.slow > 0 and args.slow <= 300:\n if not args.sockets:\n parser.print_help()\n show_error(\"Sockets count is required\")\n if args.sockets >= 1 and args.sockets <= 1000:\n global socket_count\n socket_count = args.sockets\n else:\n parser.print_help()\n show_error(\"Wrong sockets count\")\n\n global processes_slowloris\n processes_slowloris = args.slow\n else:\n parser.print_help()\n show_error(\"Wrong processes count\")\n\n if not args.slow and args.sockets:\n print(\"WARNING: sockets only for Slowloris are required.\")\n time.sleep(3)\n\n global destination_ip\n destination_ip = args.ip",
"def open_tunnel(self, serial_no, port=19020):\n return self.open(ip_addr='tunnel:' + str(serial_no) + ':' + str(port))",
"def ssh_tunnel(log, mode, ltransport, lport, rtransport, rport, server, user,\n ssh_port, check_hosts=False, sleep_duration=30, silent=True):\n if rtransport == \"tcp\":\n forwarding_args = \"%s:%s:%s\" % (lport, \"localhost\", rport)\n else:\n forwarding_args = \"%s:%s\" % (lport, rport)\n\n # remove the local ipc socket if it exists, might be\n # better to pick a unique name\n if ltransport == \"ipc\" and os.path.exists(lport):\n os.remove(lport)\n\n host_check_opt = use_host_check(check_hosts)\n\n if mode == \"ssh\":\n cmd = ssh_cmd.format(fwd_flg=\"L\", fwd_args=forwarding_args, host_check_opt=host_check_opt,\n server=server, ssh_port=ssh_port, sleep=sleep_duration)\n elif mode == \"mrsh\":\n cmd = mrsh_cmd.format(server=server, ssh_cmd=ssh_cmd.format(fwd_flg=\"R\", fwd_args=forwarding_args,\n host_check_opt=host_check_opt, server=localhost, ssh_port=ssh_port, sleep=sleep_duration))\n else:\n raise TunnelError(\"Unknown mode %s\" % mode)\n\n if silent:\n args = dict(stdout=DEVNULL, stderr=DEVNULL, stdin=DEVNULL)\n else:\n args = {}\n\n log(\"starting ssh tunnel> %s\" % cmd)\n # TODO: this can fail\n Popen(cmd.split(), close_fds=True, preexec_fn=os.setpgrp, **args)",
"def ssh(args, config):\n print('{}'.format(ssh.__doc__))",
"def __common_args_handler(parser):\n parser.add_argument(\"-netloc\", help=\"<host>:<port>\", default=\"[::]:50051\", type=str)\n parser.add_argument(\"-debug\", help=\"Print debug messages.\", action=\"store_true\")\n args = parser.parse_args(sys.argv[2:])\n logging.basicConfig(level=logging.DEBUG if args.debug else logging.INFO)\n return args"
] | [
"0.6474638",
"0.5975455",
"0.5935769",
"0.59085613",
"0.5812909",
"0.57935566",
"0.57738376",
"0.5760542",
"0.5744616",
"0.57356274",
"0.5582266",
"0.5520039",
"0.55166715",
"0.55127615",
"0.54224026",
"0.5387747",
"0.53810596",
"0.533782",
"0.532005",
"0.53157586",
"0.5314493",
"0.5311208",
"0.5285863",
"0.5281834",
"0.5281002",
"0.5261332",
"0.52580005",
"0.52536535",
"0.52346355",
"0.5233311"
] | 0.6211992 | 1 |
THe reference number of the bottle listed in the reading | def ref(self):
return self.bottle.ref | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_ref_index(self):\n total_pol = self.get_compound_pol()\n molar_volume = self.get_molar_volume()\n if not total_pol:\n return None\n ref_index = np.sqrt((4 * np.pi * total_pol) / ((2.26 - 4 * np.pi / 3) * total_pol + molar_volume) + 1)\n return ref_index",
"def getNumber():",
"def latestShotNumber():\n\tconn = _mds.Connection(_pref._HBT_SERVER_ADDRESS+':8003');\n\tshot_num = conn.get('current_shot(\"hbtep2\")')\n\treturn int(shot_num)",
"def referencecount(self) :\n\t\ttry :\n\t\t\treturn self._referencecount\n\t\texcept Exception as e:\n\t\t\traise e",
"def ring_idx(self) -> int:\n return self._ring_idx",
"def get_read_number(self):\n\t\tnode = self.find_read_number_block()\n\t\tif node:\n\t\t\ttry:\n\t\t\t\treturn int(node.attrs['read_number'])\n\t\t\texcept:\n\t\t\t\treturn None\n\t\treturn None",
"def get_tid(self, reference):\n\n tid = self.ref2tid.get(reference, -1)\n if tid == -1:\n raise KeyError('{} was not found in the file header'.format(reference))\n return tid",
"def get_list_index(self):\r\n return self.n",
"def getNum(self):\r\n return self.blockNum",
"def number(self) -> int:\n return self._id",
"def get_reynolds_number(self, velocity, refLength):\n\t\tre_num = self.Density * velocity * refLength / self.Dynamic_viscosity\n\t\treturn re_num",
"def tracenb(self):\n trace_nb = self._pna.query('CALC{}:PAR:MNUM?'.format(self._channel))\n if trace_nb:\n return int(trace_nb)\n else:\n raise InstrIOError(cleandoc('''Agilent PNA did not return the\n trace number on channel {} '''.format(self._channel)))",
"def ref(element):\n return int(element['attrs']['ref'])",
"def rn(self):\n return self._rn",
"def getNumber(self):\n return self.number",
"def get_update_number( self ):",
"def getFrameNumber(fileName, jointNumber):\n with open(fileName) as f:\n for i, l in enumerate(f):\n pass\n return (i+1)//jointNumber",
"def _num(self):\n try:\n num = int(self.__rId[3:])\n except ValueError:\n num = 9999\n return num",
"def getOccurence(self) -> int:\n ...",
"def get_reference_id(reference):\n ref_id = -1\n match = re.search('\\[[0-9]+\\]', reference)\n if match:\n ref_id = int(match.group(0).strip('[]'))\n return ref_id",
"def atomic_number(self) -> int:\n return elements.index(self.label) + 1",
"def get_number(self):\n return self.__number",
"def getNumber(self):\n return self.__number",
"def get_fret_num(self):\n low_note = Note(self.guitar.tuning[self.string], self.string,\n self.guitar, False)\n self.fret = (ALL_NOTES.index(self.name) -\n ALL_NOTES.index(low_note.name))\n return self.fret",
"def fget(self):\n if not hasattr(self, \"_n\"):\n self._n = 0\n self._n += 1\n return self._n",
"def number(cls, tileName):\n return TILENAMEMAP[tileName]['Number'] if tileName in TILENAMEMAP else None",
"def get_sensor_number(self):\n return int(self.data[1][-1])",
"def getAddressOfIndex(self) -> long:\n ...",
"def getReferenceId(self):\n return _libsbml.ReferenceGlyph_getReferenceId(self)",
"def ball_num(self):\n counter = 0\n for i in range(0, 100):\n if self.cells[i].is_ball:\n counter += 1\n return int(counter)"
] | [
"0.6187323",
"0.61432976",
"0.6112604",
"0.60540026",
"0.59829843",
"0.5976102",
"0.5951885",
"0.5948475",
"0.59262407",
"0.5922211",
"0.59036016",
"0.5899132",
"0.5871037",
"0.58575326",
"0.5835584",
"0.583141",
"0.5823899",
"0.58146167",
"0.58122456",
"0.5795709",
"0.57921714",
"0.5786016",
"0.57839453",
"0.57686806",
"0.57609284",
"0.5758762",
"0.573168",
"0.5717912",
"0.5694538",
"0.5693799"
] | 0.6673594 | 0 |
THe gas mix of the associated bottle | def mix(self):
return self.bottle.mix | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def compute_mixing_coefficients_bot(self):\n [Ly,N] = self.b.shape\n z_u_w = self.grid_dict['z_u_w']\n\n v_upts = TTTW_func.v2u(self.v)\n\n self.sigma_bot = []\n self.Kv0 = np.zeros([Ly,N+1])\n self.Kt0 = np.zeros([Ly,N+1])\n for j in range(Ly):\n # turbulent velocity sclaes with buoyancy effects neglected\n ustar2 = self.r_D[j] * np.sqrt(self.u[j,0]**2 + v_upts[j,0]**2)\n wm = self.vonKar * np.sqrt(ustar2)\n ws = wm\n \n for k in range(1,N):\n k_w = k\n k_r = k - 1\n\n if k_w < self.kbl[j]: # NEED Zob\n sigma = np.min( [ ((z_u_w[j,k_w] - z_u_w[j,0] + self.Zob) / (self.hbbl[j] + self.Zob)),1.])\n if j ==1:\n self.sigma_bot.append(sigma)\n a1 = sigma - 2.\n a2 = 3. - 2.*sigma\n a3 = sigma - 1.\n\n self.Kv0[j,k_w] = wm * self.hbbl[j] * ( sigma * (1. + sigma * ( a1 + a2*self.Gm1_bot[j]+a3*self.dGm1_dS_bot[j]))) \n self.Kt0[j,k_w] = ws * self.hbbl[j] * ( sigma * (1. + sigma * ( a1 + a2*self.Gt1_bot[j]+a3*self.dGt1_dS_bot[j])))",
"def isGasBoiler(self):\n if self.getTER1() == 0 and self.getTER2() == 0:\n return 1 #gas boiler\n else:\n return 0",
"def _add_bal(self):\n\n c = self.components\n p = self.pipes\n\n # TODO No mass flow reversal yet\n if self.temperature_driven:\n\n lines = self.params['lines'].v()\n\n self.block.mix_temp = Var(self.TIME, lines)\n\n def _temp_bal_incoming(b, t, l):\n\n incoming_comps = collections.defaultdict(list)\n incoming_pipes = collections.defaultdict(list)\n\n for name, comp in c.items():\n if value(comp.get_mflo(t)) >= 0:\n incoming_comps['supply'].append(name)\n else:\n incoming_comps['return'].append(name)\n\n for name, pipe in p.items():\n if value(pipe.get_edge_mflo(self.name, t)) >= 0:\n incoming_pipes['supply'].append(name)\n else:\n incoming_pipes['return'].append(name)\n # Zero mass flow rate:\n if value(\n sum(c[comp].get_mflo(t) for comp in incoming_comps[l]) + \\\n sum(p[pipe].get_edge_mflo(self.name, t) for pipe in\n incoming_pipes[l])) == 0:\n # mixed temperature is average of all joined pipes, actual value should not matter,\n # because packages in pipes of this time step will have zero size and components do not take over\n # mixed temperature in case there is no mass flow\n\n return b.mix_temp[t, l] == (\n sum(c[comp].get_temperature(t, l) for comp in c) +\n sum(p[pipe].get_temperature(self.name, t, l) for\n pipe in p)) / (\n len(p) + len(c))\n\n\n else: # mass flow rate through the node\n return (sum(\n c[comp].get_mflo(t) for comp in incoming_comps[l]) +\n sum(p[pipe].get_edge_mflo(self.name, t) for pipe in\n incoming_pipes[l])) * b.mix_temp[t, l] == \\\n sum(c[comp].get_mflo(t) * c[comp].get_temperature(t,\n l)\n for comp in incoming_comps[l]) + \\\n sum(p[pipe].get_edge_mflo(self.name, t) * p[\n pipe].get_edge_temperature(self.name, t, l)\n for pipe in incoming_pipes[l])\n\n self.block.def_mixed_temp = Constraint(self.TIME,\n lines,\n rule=_temp_bal_incoming)\n\n def _temp_bal_outgoing(b, t, l, comp):\n\n outgoing_comps = collections.defaultdict(list)\n outgoing_pipes = collections.defaultdict(list)\n\n for name, comp_obj in c.items():\n if comp_obj.get_mflo(t) >= 0:\n outgoing_comps['return'].append(name)\n else:\n outgoing_comps['supply'].append(name)\n\n for name, pipe_obj in p.items():\n if pipe_obj.get_edge_mflo(self.name, t) >= 0:\n outgoing_pipes['return'].append(name)\n else:\n outgoing_pipes['supply'].append(name)\n\n if t == 0:\n return Constraint.Skip\n if comp in outgoing_pipes[l]:\n return p[comp].get_edge_temperature(self.name, t, l) == \\\n b.mix_temp[t, l]\n elif comp in outgoing_comps[l]:\n return c[comp].get_temperature(t, l) == b.mix_temp[t, l]\n else:\n return Constraint.Skip\n\n self.block.outgoing_temp_comps = Constraint(self.TIME,\n lines,\n c.keys(),\n rule=_temp_bal_outgoing)\n self.block.outgoing_temp_pipes = Constraint(self.TIME,\n lines,\n p.keys(),\n rule=_temp_bal_outgoing)\n\n elif self.repr_days is None:\n\n def _heat_bal(b, t):\n return 0 == sum(\n self.components[i].get_heat(t) for i in self.components) \\\n + sum(\n pipe.get_edge_heat(self.name, t) for pipe in p.values())\n\n self.block.ineq_heat_bal = Constraint(self.TIME,\n rule=_heat_bal)\n\n def _mass_bal(b, t):\n return 0 == sum(\n self.components[i].get_mflo(t) for i in self.components) \\\n + sum(\n pipe.get_edge_mflo(self.name, t) for pipe in p.values())\n\n self.block.ineq_mass_bal = Constraint(self.TIME,\n rule=_mass_bal)\n\n else:\n def _heat_bal(b, t, c):\n return 0 == sum(\n self.components[i].get_heat(t, c) for i in\n self.components) \\\n + sum(\n pipe.get_edge_heat(self.name, t, c) for pipe in p.values())\n\n self.block.ineq_heat_bal = Constraint(self.TIME, self.REPR_DAYS,\n rule=_heat_bal)\n\n def _mass_bal(b, t, c):\n return 0 == sum(\n self.components[i].get_mflo(t, c) for i in\n self.components) \\\n + sum(\n pipe.get_edge_mflo(self.name, t, c) for pipe in p.values())\n\n self.block.ineq_mass_bal = Constraint(self.TIME, self.REPR_DAYS,\n rule=_mass_bal)",
"def additional_equations(self, k):\n ######################################################################\n # equation for saturated gas at hot side outlet\n o1 = self.outl[0].to_flow()\n self.residual[k] = o1[2] - h_mix_pQ(o1, 1)",
"def merge_bowl(self):\n self.B += self.Bowl_bS\n self.Bc += self.Bowl_bC\n self.W += self.Bowl_WS\n self.Wc += self.Bowl_WS",
"def mix(a, b, amount):\n return ((1.0 - amount) * a) + (amount * b)",
"def calc_ti(self):\n m = 0\n for i in self.inl:\n m += i.m.val_SI * i.fluid.val[self.fuel_alias.val]\n\n for o in self.outl:\n m -= o.m.val_SI * o.fluid.val[self.fuel_alias.val]\n\n return m * self.lhv",
"def mix(self, well, volume=\"50:microliter\", speed=\"100:microliter/second\",\n repetitions=10):\n if isinstance(well, Well) or isinstance(well, str):\n well = WellGroup([well])\n for w in well.wells:\n opts = {\n \"well\": w,\n \"volume\": volume,\n \"speed\": speed,\n \"repetitions\": repetitions\n }\n self.pipette([{\"mix\": [opts]}])",
"def calculate_mix(self, recipe_name, totalvol=10, nic=3, vg=70, mix='juice_from_ingredients'):\n try:\n #pdb.set_trace()\n recipe = self._recipes[recipe_name]\n except KeyError:\n print(\"Error: recipe %s not found!\"%recipe_name)\n return None\n\n # assumes all flavors are PG, will fix this later\n totalflav = sum([1.0*totalvol*recipe[f] for f in recipe.keys()])\n totalflav_part = totalflav / float(totalvol)\n message = None\n\n if mix != 'concentrate':\n if vg > 1:\n vg = vg / 100.0\n\n message = None\n nic = 1.0*nic*totalvol / self._nic_strength\n\n # make sure we're within Max VG/Min VG range\n max_vg = 1.0 - self.get_total_flavor(recipe_name) - (nic if self._nic_base=='pg' else 0)\n if vg > max_vg:\n vg = max_vg\n message = 'Using Max VG: %.1f%%'%(max_vg*100.0)\n else:\n min_vg = nic / totalvol if self._nic_base=='vg' else 0.0\n if vg < min_vg:\n vg = min_vg\n message = 'Using Max PG: %.1f%%'%(100.0*(1.0-min_vg))\n\n totalvg = totalvol * vg\n totalpg = totalvol - totalvg\n\n if self._nic_base == 'vg':\n addpg = totalpg - totalflav\n addvg = totalvg - nic\n else:\n addpg = totalpg - totalflav - nic\n addvg = totalvg\n\n # force nonnegative numbers in case rounding makes -.0000000001 or whatever\n if addpg < 0:\n addpg = 0\n if addvg < 0:\n addvg = 0\n\n # generate string to return for the output box\n max_flavor_name_len = max(len(f) for f in (list(recipe.keys()) + ['Nicotine']))\n ret = ''\n\n if mix != 'from_concentrate':\n for f in sorted(recipe.keys()):\n if mix == 'from_ingredients':\n f_vol = 1.0*totalvol*recipe[f]\n else: # 'concentrate'\n f_vol = (totalvol * recipe[f]) / totalflav_part\n\n spaces = max_flavor_name_len - len(f)\n ret += '\\n%s: %5.2f mL'%(' '*spaces + f, f_vol)\n # the first character will always be a newline from the loop above, which we don't want, so kill it\n ret = ret[1:]\n\n elif mix == 'from_concentrate':\n # \"Concentrate\" is longer than Nicotine, so it gets the max\n max_flavor_name_len = len('Concentrate')\n ret = 'Concentrate: %5.2f mL'%totalflav\n\n if mix != 'concentrate':\n # add nic/VG/PG\n ret += '\\n\\n' + ' '*(max_flavor_name_len-8) + 'Nicotine: %5.2f mL'%nic\n ret += '\\n' + ' '*(max_flavor_name_len-2) + 'VG: %5.2f mL'%addvg\n ret += '\\n' + ' '*(max_flavor_name_len-2) + 'PG: %5.2f mL'%addpg\n else:\n ctotal = totalvol / totalflav_part\n message = ' '*(max_flavor_name_len-5) + 'Makes: %5.2f mL'%ctotal\n\n if message is not None:\n ret += '\\n\\n' + message\n\n return ret",
"def cargo_gas(self):\n return self._cargo_gas",
"def totalValue(self):\n\n\t\tvalue = 0\n\t\tfor bottle in self.bottles:\n\t\t\tvalue += bottle.inflatedCost\n\n\t\treturn value",
"def TNG_net(self):\n import h5py as h5\n filename = localpath+'input/yields/TNG/AGB.hdf5'\n # Read H5 file\n f = h5.File(filename, \"r\")\n\n indexing = {}\n indexing['H'] = 'Hydrogen'\n indexing['He'] = 'Helium'\n indexing['C'] = 'Carbon'\n indexing['N']= 'Nitrogen'\n indexing['O'] = 'Oxygen'\n indexing['Ne'] = 'Neon'\n indexing['Mg'] = 'Magnesium'\n indexing['Si'] = 'Silicon'\n indexing['S'] = 'Sulphur' # Not used by TNG simulation\n indexing['Ca'] = 'Calcium' # Not used by TNG simulation\n indexing['Fe'] = 'Iron'\n\n self.elements = list(indexing.keys())\n \n self.table = {}\n \n self.metallicities = list(f['Metallicities'].value)\n self.masses = f['Masses'].value\n \n\n for z_index,z in enumerate(self.metallicities):\n\n yield_subtable = {}\n \n z_name = f['Yield_names'].value[z_index].decode('utf-8')\n z_data = f['Yields/'+z_name+'/Yield']\n \n ejecta_mass = f['Yields/'+z_name+'/Ejected_mass'].value\n \n yield_subtable['Mass'] = list(reversed(self.masses))\n remnants = self.masses-ejecta_mass\n yield_subtable['mass_in_remnants'] = np.divide(list(reversed(remnants)),yield_subtable['Mass'])\n for el in list(indexing.keys()):\n yield_subtable[el] = np.zeros(len(self.masses))\n \n summed_yields = np.zeros(len(self.masses))\n \n for m_index,mass in enumerate(yield_subtable['Mass']):\n for el_index,el in enumerate(self.elements):\n el_yield = z_data[el_index][len(self.masses)-m_index-1]\n el_yield_fraction = el_yield/mass\n yield_subtable[el][m_index] = el_yield_fraction\n summed_yields[m_index]+=el_yield_fraction\n \n yield_subtable['unprocessed_mass_in_winds'] = 1.-summed_yields-yield_subtable['mass_in_remnants']\n \n self.table[z.astype(float)] = yield_subtable\n \n # Restructure table\n all_keys = ['Mass','mass_in_remnants','unprocessed_mass_in_winds']+self.elements\n \n list_of_arrays = [yield_subtable[key] for key in all_keys]\n restructure_subtable = np.core.records.fromarrays(list_of_arrays,names=all_keys)\n \n self.table[z] = restructure_subtable",
"def stoich_flue_gas(self, nw):\n lamb = 1\n n_fuel = 1\n m_fuel = 1 / molar_mass_flow(self.fuel.val) * n_fuel\n m_fuel_fg = m_fuel\n m_co2 = 0\n m_h2o = 0\n molar_masses[self.h2o] = CP.PropsSI('M', self.h2o)\n molar_masses[self.co2] = CP.PropsSI('M', self.co2)\n molar_masses[self.o2] = CP.PropsSI('M', self.o2)\n\n self.fg = {}\n self.fg[self.co2] = 0\n self.fg[self.h2o] = 0\n\n for f, x in self.fuel.val.items():\n fl = set(list(self.fuels())).intersection(\n set([a.replace(' ', '') for a in CP.get_aliases(f)]))\n\n if len(fl) == 0:\n if f in self.fg:\n self.fg[f] += x * m_fuel\n else:\n self.fg[f] = x * m_fuel\n else:\n n_fluid = x * m_fuel / molar_masses[f]\n m_fuel_fg -= n_fluid * molar_masses[f]\n structure = fluid_structure(f)\n n = {}\n for el in ['C', 'H', 'O']:\n if el in structure:\n n[el] = structure[el]\n else:\n n[el] = 0\n\n m_co2 += n_fluid * n['C'] * molar_masses[self.co2]\n m_h2o += n_fluid * n['H'] / 2 * molar_masses[self.h2o]\n\n self.fg[self.co2] += m_co2\n self.fg[self.h2o] += m_h2o\n\n n_o2 = (m_co2 / molar_masses[self.co2] +\n 0.5 * m_h2o / molar_masses[self.h2o]) * lamb\n m_air = n_o2 * molar_masses[self.o2] / self.air.val[self.o2]\n\n self.air_min = m_air / m_fuel\n\n for f, x in self.air.val.items():\n if f != self.o2:\n if f in self.fg:\n self.fg[f] += m_air * x\n else:\n self.fg[f] = m_air * x\n\n m_fg = m_fuel + m_air\n\n for f in self.fg.keys():\n self.fg[f] /= m_fg\n\n if not self.path.is_set:\n self.path.val = None\n\n TESPyFluid(\n self.fuel_alias.val, self.fuel.val, [1000, nw.p_range_SI[1]],\n path=self.path.val)\n TESPyFluid(\n self.fuel_alias.val + '_fg', self.fg, [1000, nw.p_range_SI[1]],\n path=self.path.val)\n msg = (\n 'Generated lookup table for ' + self.fuel_alias.val + ' and for '\n 'stoichiometric flue gas at component ' + self.label + '.')\n logging.debug(msg)\n\n if self.air_alias.val not in ['Air', 'air']:\n TESPyFluid(\n self.air_alias.val, self.air.val, [1000, nw.p_range_SI[1]],\n path=self.path.val)\n msg = ('Generated lookup table for ' + self.air_alias.val +\n ' at stoichiometric combustion chamber ' + self.label + '.')\n else:\n msg = ('Using CoolProp air at stoichiometric combustion chamber ' +\n self.label + '.')\n logging.debug(msg)",
"def tot_pot_energy (self):\n\n for planet_a in self.planets: #this loop takes a 'planet_a' in 'self.planets'.\n pot_energy = 0.\n for planet_b in self.planets: #this loop takes a 'planet_b' in 'self.planets'.\n if planet_a == planet_b: #This is a condition to avoid to find the potential energy of a body shared with itself, a physical nonsense\".\n continue\n G=6.67408e-11 #gravitational constant\n energy = ((-G) * (planet_a.mass*planet_b.mass))/(np.linalg.norm((planet_a.position-planet_b.position))) #potential energy of planet_a with each other body.\n pot_energy += energy #all the potential energies acting on planet_a summed together.\n\n return(pot_energy)",
"def b_mix(s, p, phase='x'): # (Validated)\n b_mix = 0.0\n for i in range(1, p.m['n']+1):\n b_mix += s.c[i][phase]*s.c[i]['b']\n return b_mix",
"def calculate_sh(self):\n if self.data.get('Specific_Humidity') is None:\n if self.data.get('Mixing_Ratio') is None:\n raise KeyError('Calculate mixing ratio first!')\n else:\n w_kg = self.data['Mixing_Ratio'] / 1000\n self.data['Specific_Humidity'] = (w_kg / (w_kg + 1)) * 1000",
"def GetSpeculated(self):\n return self.money + sum([self.share[i] * self.price[i][0] * (1 + self.taxe) for i in self.price])",
"def one_variation(self):\n\n globals_ = dict(\n # Physical constants\n g=9.81, # Gravitational acceleration [m/s^2]\n c=3e8, # Speed of Light [m/s]\n h=6.6262e-34, # Planck [Js]\n k=1.38e-23, # Boltzmann [J/K]\n R=8.31441, # k*NA [J/(mol*kg)]\n NA=6.0225e23, # Avogadro [1/mol]\n gamma=6.67e11, # Gravitational Constant [Nm^2/kg^2]num\n qe=1.60219e-19, # Elementary charge [C]\n # (e is not free unfortunately)\n e0=8.854187816e-12, # Permittivity of Vacuum [As/(Vm)]\n epsilon0=8.854187816e-12, # Permittivity of Vacuum [As/(Vm)]\n mu0=4e-7*pi, # Permeability of Vacuum [Vs/(Am)]\n K=9e9, # 1/(4*pi*epsilon0) [Vm/(As)]\n me=9.1095e-31, # The mass of electron [kg]\n mu=1.66056e-27, # Atomic mass unit [kg]\n sigma=5.67e-8, # Stefan-Boltzmann Constant\n )\n exec(function_import, globals_)\n for i in ('pi', 'e', 'sin', 'sind', 'asin'):\n assert i in globals_\n\n values = {}\n\n # For example there is a variable k, it is not equal to k (Planck const)\n for variable in self.variable_list:\n exec('%s = None' % variable, globals_, values)\n\n for const in self.const_list:\n exec('%(name)s = %(value)g' % const, values)\n\n for intv in self.interval_list:\n value = interval_.random(intv['interval'])\n if intv['name']:\n name = intv['name']\n exec('%s = float(%g)' % (name, value), globals_, values)\n\n compute_list = self.compute_list[:]\n number_of_uncomputable_formulas = 0\n # The number of the failed computation after\n # a successful computation.\n\n while compute_list:\n compute = compute_list[0]\n try:\n exec(compute['formula'], globals_, values)\n\n except (NameError, TypeError):\n compute_list.append(compute_list.pop(0))\n # It writes the first item to the end\n\n number_of_uncomputable_formulas += 1\n if number_of_uncomputable_formulas == len(compute_list):\n raise UncomputableError(self.code)\n self.is_computable = False\n return\n continue\n except ValueError:\n print('Value Error. Formula is:')\n print(compute['formula'])\n return\n\n compute_list.pop(0)\n number_of_uncomputable_formulas = 0\n command = '%(name)s = %(right)s' % compute\n exec(command, globals_, values)\n\n possibilities = next(self.possibilities_cycle)\n erased_elements = set(self.variable_list) - possibilities\n self.list.append((values, erased_elements))",
"def add_computed_gas_concentrations(self):\n # Extract the z-coordinate and T, S, P profile\n zs = self.interp_ds.coords[self.ztsp[0]].values\n Ts = self.interp_ds[self.ztsp[1]].values\n Ss = self.interp_ds[self.ztsp[2]].values\n Ps = self.interp_ds[self.ztsp[3]].values\n \n # Create an air object\n air_names = ['nitrogen', 'oxygen', 'argon', 'carbon_dioxide']\n yk = np.array([0.78084, 0.20946, 0.009340, 0.00036])\n from tamoc import dbm\n air = dbm.FluidMixture(air_names)\n m = air.masses(yk)\n \n # Compute the concentrations adjusted for depth\n Cs = np.zeros((len(zs), len(air_names)))\n for i in range(len(zs)):\n Cs[i,:] = air.solubility(m, Ts[i], 101325., Ss[i])[0,:] * \\\n seawater.density(Ts[i], Ss[i], Ps[i]) / \\\n seawater.density(Ts[i], Ss[i], 101325.)\n \n # Make sure none of these gases are already in the measured profile\n for name in air_names:\n if name in self.interp_ds:\n air_names[air_names.index(name)] = 'computed_' + name\n \n # Add these data to the Profile object\n data = np.hstack((np.atleast_2d(zs).transpose(), Cs))\n names = [self.ztsp[0]] + air_names \n units = [self.ztsp_units[0]] + 4*['kg/m^3']\n self.append(data, names, units)\n \n # Rebuild the interpolator\n self._build_interpolator()",
"def computeRazor(l0, l1, met):\n metlv = met\n l0 = l0\n l1 = l1\n # lab frame\n vBETA_z = (l0+l1).Vect()*r.Double(1./(l0.E()+l1.E()))\n vBETA_z.SetX(0.0)\n vBETA_z.SetY(0.0)\n l0.Boost(-vBETA_z)\n l1.Boost(-vBETA_z)\n pT_CM = (l0+l1).Vect() + metlv.Vect()\n pT_CM.SetZ(0.0)\n ll = l0+l1\n SHATR = sqrt( 2.*(ll.E()*ll.E() - ll.Vect().Dot(pT_CM)\n + ll.E()*sqrt( ll.E()*ll.E() + pT_CM.Mag2() - 2.*ll.Vect().Dot(pT_CM) )))\n vBETA_T_CMtoR = pT_CM * r.Double(1./sqrt(pT_CM.Mag2() + SHATR*SHATR))\n l0.Boost(-vBETA_T_CMtoR)\n l1.Boost(-vBETA_T_CMtoR)\n ll.Boost(-vBETA_T_CMtoR)\n # R-frame\n dphi_LL_vBETA_T = fabs((ll.Vect()).DeltaPhi(vBETA_T_CMtoR))\n dphi_L1_L2 = fabs(l0.Vect().DeltaPhi(l1.Vect()))\n vBETA_R = (l0.Vect() - l1.Vect())*r.Double(1./(l0.E()+l1.E()))\n try:\n gamma_R = 1./sqrt(1.-vBETA_R.Mag2())\n except ValueError:\n print 1.-vBETA_R.Mag2()\n dphi_vBETA_R_vBETA_T = fabs(vBETA_R.DeltaPhi(vBETA_T_CMtoR))\n l0.Boost(-vBETA_R)\n l1.Boost(vBETA_R)\n # R+1 frame\n MDELTAR = 2.*l0.E()\n costhetaRp1 = l0.Vect().Dot(vBETA_R)/(l0.Vect().Mag()*vBETA_R.Mag())\n return dphi_LL_vBETA_T, MDELTAR",
"def variations():",
"def get_brutto_formula(self):\n\n def m_ind(i):\n return str(i) if i > 1 else \"\"\n\n if self.modified:\n c_count = 0\n h_count = 0\n other_counts = OrderedDict()\n for a in self.get_atoms():\n if a.element == E.C:\n c_count += 1\n connected = len(self._graph.get_connected_vertices(a))\n h_count += 4 - connected\n elif a.element == E.H:\n h_count += 1\n else:\n if a.element.symbol in other_counts:\n other_counts[a.element.symbol] += 1\n else:\n other_counts[a.element.symbol] = 1\n c_part = \"C\" + m_ind(c_count) if c_count > 0 else \"\"\n h_part = \"H\" + m_ind(h_count) if h_count > 0 else \"\"\n others = reduce(lambda b, (e, c): b + e + m_ind(c),\n other_counts.items(),\n \"\")\n self._brutto_formula = c_part + h_part + others\n self.modified = False\n return self._brutto_formula",
"def funky_sum(a, b, mix):\n if mix <= 0:\n return a\n elif mix >= 1:\n return b\n else:\n return (1 - mix) * a + mix * b",
"def main():\n print('Calculating gas compositions.')\n resf = 'results/cell_gas' # results folder\n if not os.path.isdir(resf):\n os.makedirs(resf)\n sizex = 0.03 # sample size\n sizey = 0.02 # sample size\n sizez = 0.02 # sample size\n volume = sizex**3 # sample volume\n volume = sizex * sizey * sizez # sample volume\n # polymer density\n rhop = INPUTS['polymer_density']\n # molecular weight\n mw_ba = INPUTS['molar_mass']\n # foam density\n rhof = INPUTS['foam_density']\n # cell size for Kelvin effect on saturated vapour pressure\n dcell = INPUTS['cell_size']\n # initial weight fraction of BA\n w_ba_ini = INPUTS['initial_weight_fraction']\n names = w_ba_ini.keys()\n if 'H2O' in w_ba_ini:\n if 'CO2' in w_ba_ini:\n print(\"WARNING: H2O and CO2 are both in initial_weight_fraction.\",\n \"We will sum these contributions.\")\n else:\n w_ba_ini['CO2'] = 0\n w_ba_ini['CO2'] += w_ba_ini['H2O'] * mw_ba['CO2'] / mw_ba['H2O']\n names.append('CO2')\n names.remove('H2O')\n temps = linspace(\n INPUTS['temperature']['min'],\n INPUTS['temperature']['max'],\n INPUTS['temperature']['points']\n )\n por = 1 - rhof / rhop # porosity\n m_foam = rhof * volume # foam sample weight\n m_pol = m_foam * (1 - sum(w_ba_ini.values())) # weight of polymer\n if ARGS['--verbose']:\n print('Foam weight {0:.3f} g'.format(m_foam * 1e3))\n args = [por, mw_ba, m_foam, m_pol, volume, dcell]\n for name in names:\n with open(os.path.join(resf, 'cell_gas_{0}.csv'.format(name)),\n 'w') as csvfile:\n fieldnames = ['temp', 'pres_ba', 'w_ba_g', 'w_ba_d', 'w_ba_c']\n writer = csv.DictWriter(csvfile, fieldnames=fieldnames)\n writer.writeheader()\n for temp in temps:\n pres_ba, w_ba_g, w_ba_d, w_ba_c = initial_pressure(\n name, w_ba_ini[name], temp, args)\n writer.writerow(\n {'temp': temp, 'pres_ba': pres_ba, 'w_ba_g': w_ba_g,\n 'w_ba_d': w_ba_d, 'w_ba_c': w_ba_c})\n print('End.')",
"def get_total(self):\n\n self.base_price = self.get_base_price()\n\n if self.species == \"christmas melon\":\n self.base_price = self.base_price * 1.5\n\n total = (1 + self.tax) * self.qty * self.base_price\n return total",
"def netasset(self,coef):\n net_total = sum([share.sharetotal() for share in self.shares])*(1-coef)\n return net_total",
"def doCalculation(self, E1, E2, muL, muR, T, pot, C, TCalc, Density, E0, L):\n NEcut = len(E1) #we determine the number of single-particle states that we use\n VG=np.diag(pot)\n E= int(0.5*np.size(VG))\n V = VG[0:E] #since the potential of both barriers is symmetric and we only tunnel through one barrier. Therefore we only use one half of the potential.\n dx= L/(np.size(pot))\n\n #Following prints are for debugging purposes:\n #print(\"---------------------------------------------------------------------\")\n #print(\"---------------------------------------------------------------------\")\n #print(\"Hier beginnt die Ausgabe von Rates:\")\n #print(\"---------------------------------------------------------------------\")\n #print(\"V:\", V)\n #print(\"E1:\", E1)\n #print(\"E2:\", E2)\n #print(\"C:\", C)\n\n kB=0.08629 #Boltzmann constant in meV/K\n \n \n def fermi(E,mu,T):\n \"\"\"This fermi-function tells us with which likelyhood a state with an E is occupied on the lead.\n E(float): energy difference between the initial and the final state that the tunneling electron has to carry.\n mu(float): chemical potential of either drain(muR) or source(muL).\n T(float): temperature.\n \"\"\"\n if (E-mu)/T > 600:\n f=0\n\t\t\t\t\n else:\n f=1/(math.exp((E-mu)/(kB*T) )+1)\n return(f)\n \n\n\t#This function is called by the Gamma_ij-equations and includes the transmission-coefficient for each tunnelling-event\n #and the density of state function of the source and drain. \n def Gamma(Ea,Eb,V):\n \"\"\":math:`\\\\Gamma` includes the transmission coefficient and DOS: :math:`\\Gamma = | t |^2 * DOS`\n\n Ea(float): energy of initial state\n Eb(float): energy of final state\n V(np.array): barrier potential\n \"\"\"\n #print(Ea)\n #print(V)\n return (np.absolute(TCalc.calculate_transmission(Ea,V,dx))**2*Density.calculate_DensityofStates(np.absolute(Ea-Eb)))\n \n #These next four functions are used to calculate the transition rates.Each function for a different kind of transition:\n #We distinguish between transitions, in which the number of electrons on the dot changes from one to two(Gamma_12) and reverse(Gamma_21).\n #And between transitions in which the number of electrons on the dot change from zero to one(Gamma_01) and reverse(Gamma_10).\n\n def Gamma_12(Ea,Eb,mu,T):\n \"\"\"Calculates the rate of a transition from a one body state to a two body state.\n\n Ea(float): energy of initial state\n Eb(float): energy of final state\n mu(float): chemical potential of either drain(muR) or source(muL)\n T(float): temperature\n \"\"\"\n summe=0\n j=0\n Cb=C[np.where(E2==Eb)[0][0]]\n while j< NEcut:\n summe=Cb[np.where(E1==Ea)[0][0]][j]+summe\n j=j+1\n return(Gamma(Ea,Eb,V)*(np.absolute(summe))**2*fermi((Eb-Ea),mu,T))\n\n\n def Gamma_01(Eb,mu,T):\n \"\"\"Calculates the transition rate from the vacuum state to a one-body state.\n\n Eb(float): energy of final state\n mu(float): chemical potential of either drain(muR) or source(muL)\n T(float): temperature\n \"\"\"\n return(Gamma(E0,Eb,V)*fermi((Eb-E0),mu,T))\n\n def Gamma_21(Ea,Eb,mu,T):\n \"\"\"Calculates the rate of a transition from a two body state to a one body state.\n\n Ea(float): energy of initial state\n Eb(float): energy of final state\n mu(float): chemical potential of either drain(muR) or source(muL)\n T(float): temperature\n \"\"\"\n summe=0\n nu=0\n Ca=C[np.where(E2==Ea)[0][0]]\n while nu < NEcut:\n summe=summe+Ca[np.where(E1==Eb)[0][0]][nu]\n nu=nu+1\n return(Gamma(Ea,Eb,V)*(np.absolute(summe))**2*(1-fermi((Ea-Eb),mu,T)))\n\n def Gamma_10(Ea,mu,T):\n \"\"\"Calculates the rate of a transition from a one body state to the vacuum state.\n\n Ea(float): energy of initial state \n mu(float): chemical potential of either drain(muR) or source(muL)\n T(float): temperature\n \"\"\"\n return(Gamma(Ea,E0,V)*(1-fermi((Ea-E0),mu,T)))\n\n #creating the output matrices that later contain all the transition rates through either\n #the left or the right barrier\n Gamma_R=np.zeros((1+np.size(E1)+np.size(E2),1+np.size(E1)+np.size(E2)))\n Gamma_L=np.zeros((1+np.size(E1)+np.size(E2),1+np.size(E1)+np.size(E2)))\n\n #using a loop to fill the output matrices with transition rates.\n i_=0\n for i in E1:\n j_=0\n for j in E2:\n Gamma_L[i_+1][j_+1+np.size(E1)]=Gamma_12(i,j,muL,T)\n Gamma_L[j_+1+np.size(E1)][i_+1]=Gamma_21(j,i,muL,T)\n Gamma_R[i_+1][j_+1+np.size(E1)]=Gamma_12(i,j,muR,T)\n Gamma_R[j_+1+np.size(E1)][i_+1]=Gamma_21(j,i,muR,T)\n j_=j_+1\n Gamma_L[0][i_+1]=Gamma_10(i,muL,T)\n Gamma_R[0][i_+1]=Gamma_10(i,muR,T)\n Gamma_L[i_+1][0]=Gamma_01(i,muL,T)\n Gamma_R[i_+1][0]=Gamma_01(i,muR,T)\n i_=1+i_\n\n #print(\"Gamma_L und Gamma_R:\")\n #print(Gamma_L,Gamma_R)\n #print(\"-----------------------------------------------------------------------\")\n #print(\"---------------------------------------------------------------------\")\n return(Gamma_L,Gamma_R)",
"def calculate_useful_stuff(self):\n\n self.pos = self.pair[1].position - self.pair[0].position\n self.vel = self.pair[1].velocity - self.pair[0].velocity\n\n self.r = self.pos.length()\n self.v = self.vel.length()\n self.inv_r = 1.0 / self.r\n\n self.rvec = self.pos * self.inv_r\n self.rdot = (self.rvec * self.vel).sum() * self.rvec\n self.rdot_mag = self.rdot.length()\n\n self.vth = self.vel - self.rdot\n self.vth_mag = self.vth.length()\n self.vth_vec = self.vth / self.vth_mag",
"def _Dedup(self):\n kegg_id_to_index = {}\n for i, c in enumerate(self.reactants):\n first_i = kegg_id_to_index.setdefault(c.compound.kegg_id, i)\n if i != first_i:\n self.reactants[first_i].coeff += c.coeff\n c.coeff = 0\n \n self.reactants = filter(lambda x: x.coeff != 0, self.reactants)\n \n # always make sure that H2O is the last reactant (so that it will\n # appear last in the chemical formula)\n i_h2o = self._FindCompoundIndex('C00001')\n if i_h2o is not None:\n self.reactants = self.reactants[:i_h2o] + \\\n self.reactants[(i_h2o + 1):] + \\\n [self.reactants[i_h2o]]",
"def abbott_steam():\n per_klb = 20 # dollars per klb of steam\n kwh_eq = to_kwh(1) # kwh equivalent of steam\n per_kwh = per_klb / kwh_eq\n return per_kwh"
] | [
"0.61490744",
"0.5648351",
"0.5496578",
"0.5425663",
"0.5341396",
"0.531968",
"0.53028226",
"0.5288497",
"0.5263487",
"0.52349967",
"0.5207807",
"0.52044094",
"0.5183879",
"0.51709795",
"0.51653236",
"0.5154777",
"0.5135477",
"0.5124198",
"0.51158786",
"0.5088113",
"0.5088013",
"0.50727564",
"0.50690687",
"0.50682586",
"0.50669575",
"0.50587493",
"0.50541466",
"0.504065",
"0.50384605",
"0.5038332"
] | 0.5975279 | 1 |
Load all cogs from the 'cogs' directory | def load_cogs(self):
path = "cogs/" # Should always have a trailing slash
import_path = path.replace("/", ".")
extensions: list[str] = [
import_path + file.replace(".py", "")
for file in os.listdir(path)
if os.path.isfile(f"{path}{file}")
]
for extension in extensions:
try:
self.load_extension(extension)
except errors.ExtensionAlreadyLoaded:
pass
log.info(f"Loaded {len(self.commands)} commands from {len(self.cogs)} cogs") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def __load_cogs(self):\n for cog in self.__cogs.get():\n logging.info('loading %s', cog)\n self.load_extension(cog)",
"def reload_cogs(self):\n\n for extension in list(self.extensions):\n try:\n self.reload_extension(extension)\n except errors.NoEntryPointError:\n log.info(\"The extension {extension} has no setup function\")\n pass\n except errors.ExtensionAlreadyLoaded:\n pass\n\n log.info(f\"Reloaded {len(self.extensions)} cogs\")",
"async def load(ctx, cog):\n\tos.chdir(os.path.dirname(os.path.realpath(__file__)))\n\tif path.exists(\"cogs/\"+cog+\".py\"):\n\t\tprelen = len(bot.commands)\n\t\t\n\t\tbot.load_extension('cogs.'+cog)\n\n\t\t#checking if the extension loaded\n\t\tif len(bot.commands) > prelen:\n\t\t\tawait ctx.send('Loaded extension.')\n\t\telse:\n\t\t\tawait ctx.send('Failed to load extension.')\n\telse:\n\t\tawait ctx.send('No such extension.')",
"def reload(self, subdirs: list):\n self.__cogs = [f'cogs.{cog.replace(\".py\",\"\")}'\n for cog in listdir(self.__COG_PATH) if self.__is_cog(cog)]\n\n for sub in subdirs:\n if not sub:\n continue\n sub_path = path.join(self.__COG_PATH, sub)\n if path.isdir(sub_path):\n self.__cogs += [f'cogs.{sub_path}.{cog.replace(\".py\",\"\")}'\n for cog in listdir(sub_path) if self.__is_cog(cog)]",
"def load_extensions(self, *exts):\n for ext in exts:\n try:\n self.load_extension(ext)\n logger.info(f\"Successfully loaded cog {ext}.\")\n except Exception:\n logger.error(f\"Failed to load cog: {ext}: {format_exc()}\")\n\n logger.info(\"Cog loading complete.\")",
"def load_modules(bot, config):\n for item in MODULES:\n importlib.import_module(\"cogs.\" + item).setup(bot, config)",
"async def load_cog(self, ctx, *, cog: str):\n\n try:\n await self.bot.load_extension(f'cogs.{cog}')\n except Exception as e:\n await ctx.send(f'**`ERROR:`** {type(e).__name__} - {e}')\n else:\n await ctx.send('**`SUCCESS`**')",
"async def load_all_extensions(self):\n await self.wait_until_ready()\n await asyncio.sleep(1)\n\n cogs = [\"cogs.member\",\n \"cogs.officer\",\n \"cogs.rolemanager\",\n \"cogs.database\",\n \"cogs.everyone\",\n \"cogs.nodewar\",\n \"cogs.twitch\"]\n\n for extension in cogs:\n try:\n self.load_extension(extension)\n print(f'loaded {extension}')\n except Exception as e:\n error = f'{extension}\\n {type(e).__name__} : {e}'\n print(f'failed to load extension {error}')\n print('-' * 10)\n\n for guild in self.guilds:\n if not discord.utils.get(guild.roles, name=self.manager_role):\n await self.create_bot_manager(guild)\n\n print(f\"\\nUsername: {self.user}\\nID: {self.user.id}\")",
"def load_cogs(client):\n client.load_extension(\"cogs.message_listener\")",
"def loadall(bot) :\n for feature in features :\n load(bot, feature)",
"async def reload(self, ctx:utils.Context, *cog_name:str):\n\n cog_name = 'cogs.' + '_'.join([i for i in cog_name])\n\n try:\n self.bot.load_extension(cog_name)\n except commands.ExtensionAlreadyLoaded:\n try:\n self.bot.unload_extension(cog_name)\n self.bot.load_extension(cog_name)\n except Exception as e:\n await ctx.send('```py\\n' + format_exc() + '```')\n return\n except Exception as e:\n await ctx.send('```py\\n' + format_exc() + '```')\n return\n await ctx.send('Cog reloaded.')",
"async def load_cog(self, ctx, *, cog: str):\n\n try:\n self.bot.load_extension(cog)\n except Exception as e:\n await ctx.send(f'**`ERROR:`** {type(e).__name__} - {e}')\n else:\n await ctx.send('**`SUCCESS`**')",
"def load_coco_ann_files(self):\n if self.type == 'train':\n datasets = [\n (os.path.join(self.dataset_root, 'coco', 'train2014'),\n COCO(os.path.join(self.dataset_root, 'coco',\n 'annotations_trainval2014', 'person_keypoints_train2014.json'))),\n (os.path.join(self.dataset_root, 'coco', 'train2017'),\n COCO(os.path.join(self.dataset_root, 'coco',\n 'annotations_trainval2017', 'person_keypoints_train2017.json'))),\n # (os.path.join(self.dataset_root, 'mpii', 'images'),\n # COCO(os.path.join(self.dataset_root, 'mpii',\n # 'annotations', 'train.json')))\n ]\n else:\n datasets = [\n (os.path.join(self.dataset_root, 'coco', 'val2014'),\n COCO(os.path.join(self.dataset_root, 'coco',\n 'annotations_trainval2014', 'person_keypoints_val2014.json'))),\n (os.path.join(self.dataset_root, 'coco', 'val2017'),\n COCO(os.path.join(self.dataset_root, 'coco',\n 'annotations_trainval2017', 'person_keypoints_val2017.json')))\n ]\n\n dict_list = []\n for dataset_path, dataset in datasets:\n img_ids = dataset.getImgIds()\n\n for idx in img_ids:\n try:\n img = dataset.loadImgs([idx])[0]\n ann_ids = dataset.getAnnIds([idx])\n anns = dataset.loadAnns(ann_ids)\n\n if [ann['keypoints'] for ann in anns] and not all([ann['keypoints'] == [0]*51 for ann in anns]):\n keypoints = [ann['keypoints'] for ann in anns if ann['keypoints'] != [0]*51]\n for i in range(len(keypoints)):\n if 'coco' in dataset_path:\n keypoints[i] = keypoints[i] + ([0, 0, 0] if not (keypoints[i][17] and keypoints[i][20])\n else [(keypoints[i][15] + keypoints[i][18]) // 2, (keypoints[i][16] + keypoints[i][19]) // 2, 1])\n else:\n keypoints[i] = keypoints[i] + ([0, 0, 0] if not (keypoints[i][41] and keypoints[i][38])\n else [(keypoints[i][39] + keypoints[i][36]) // 2, (keypoints[i][40] + keypoints[i][37]) // 2, 1])\n\n if len([kp for kp in keypoints if kp != [0]*54]) <= 4:\n dict_list.append({'path': os.path.join(dataset_path, img[\"file_name\"]),\n 'keypoints': [kp for kp in keypoints if kp != [0]*54]})\n except:\n print(f'Skipped: {idx}')\n\n final_dataset = pd.DataFrame.from_dict(dict_list)\n\n return final_dataset",
"def load_crops(directory=\".\"):\n import os\n import re\n\n folders = next(os.walk(directory))[1]\n crop_rgx = re.compile(r\"^\\.xyz-(.+)\")\n\n names = []\n for folder in folders:\n match = crop_rgx.match(folder)\n if match:\n names.append(match.groups(1)[0])\n\n return {name: Crop(name=name) for name in names}",
"def get(self) -> list:\n return self.__cogs",
"def gci(path):\n parents = os.listdir(path)\n for parent in parents:\n if parent == \"forgifs\" or parent == \"hilariousgifs\":\n pass\n else:\n child = os.path.join(path,parent)\n #print(child)\n if os.path.isdir(child):\n gci(child)\n else:\n filepath.append(child)\n #print(child)",
"def _load_objects(self):\n self._get_package()\n\n object_names = [name for name in dir(self._sdk) if name != \"GATDLSession\" and name != \"SDKInfo\" and name.startswith(\"GA\") and not name.endswith(\"Fetcher\")]\n\n for object_name in object_names:\n obj = getattr(self._sdk, object_name)\n self._objects_mapping[obj.rest_name] = object_name",
"def preload_all_configs(self):\n for _, _, filenames in os.walk(self.configDir):\n for filename in filenames:\n if filename[-3:] == \".py\" and filename != \"__init__.py\":\n configID = filename[0:-3]\n self.load_config(configID)",
"def searchforcog(sfc_cogs, sfc_twogs, sfc_prots):\n cogupdate(sfc_cogs, sfc_twogs, sfc_prots)\n newcogfind(sfc_cogs, sfc_twogs, sfc_prots)\n newtwogfile(sfc_twogs)",
"def load_classes(self):\n\t\t\t# Load class names (name -> label).\n\t\t\tcategories = self.coco.loadCats(self.coco.getCatIds())\n\t\t\tcategories.sort(key=lambda x: x['id'])\n\n\t\t\tself.classes = {}\n\t\t\tself.coco_labels = {}\n\t\t\tself.coco_labels_inverse = {}\n\t\t\tfor c in categories:\n\t\t\t\tself.coco_labels[len(self.classes)] = c['id']\n\t\t\t\tself.coco_labels_inverse[c['id']] = len(self.classes)\n\t\t\t\tself.classes[c['name']] = len(self.classes)\n\n\t\t\t# Also load the reverse (label -> name).\n\t\t\tself.labels = {}\n\t\t\tfor key, value in self.classes.items():\n\t\t\t\tself.labels[value] = key",
"def _load_jsons(self):\n items = []\n labels = []\n segms = []\n for split in self._splits:\n anno = os.path.join(self._root, 'annotations', split) + '.json'\n _coco = COCO(anno)\n self._coco.append(_coco)\n classes = [c['name'] for c in _coco.loadCats(_coco.getCatIds())]\n if not classes == self.classes:\n raise ValueError(\"Incompatible category names with COCO: \")\n assert classes == self.classes\n json_id_to_contiguous = {\n v: k for k, v in enumerate(_coco.getCatIds())}\n if self.json_id_to_contiguous is None:\n self.json_id_to_contiguous = json_id_to_contiguous\n self.contiguous_id_to_json = {\n v: k for k, v in self.json_id_to_contiguous.items()}\n else:\n assert self.json_id_to_contiguous == json_id_to_contiguous\n\n # iterate through the annotations\n image_ids = sorted(_coco.getImgIds())\n for entry in _coco.loadImgs(image_ids):\n filename = entry['file_name']\n dirname = split.split('_')[-1] # \"train\" or \"val\"\n abs_path = os.path.join(self._root, dirname, filename)\n if not os.path.exists(abs_path):\n raise IOError('Image: {} not exists.'.format(abs_path))\n label, segm = self._check_load_bbox(_coco, entry)\n # skip images without objects\n if self._skip_empty and label is None:\n continue\n items.append(abs_path)\n labels.append(label)\n segms.append(segm)\n return items, labels, segms",
"def _load_classes(self):\n\t\t# load class names (name -> label)\n\t\tcategories = self.coco.loadCats(self.coco.getCatIds())\n\t\tcategories.sort(key=lambda x: x['id'])\n\n\t\tself.classes \t\t\t\t= {}\n\t\tself.coco_labels \t\t\t= {}\n\t\tself.coco_labels_inverse \t= {}\n\t\tfor c in categories:\n\t\t\tself.coco_labels[len(self.classes)] = c['id']\n\t\t\tself.coco_labels_inverse[c['id']] = len(self.classes)\n\t\t\tself.classes[c['name']] = len(self.classes)\n\t\tself.labels = {}\n\t\tfor key, value in self.classes.items():\n\t\t\tself.labels[value] = key\n\n\t\tprint(self.coco_labels)\n\t\tprint(self.coco_labels_inverse)\n\t\tprint(self.classes)\n\t\tprint(self.labels)",
"def load_extensions(self):\n extension_module_name = f\"{utils.get_project_name()}.cogs\"\n for extension in CONF.LOADED_EXTENSIONS:\n try:\n self.load_extension(extension_module_name + \".\" + extension)\n LOG.debug(f\"The extension '{extension.split('.')[0]}' has been successfully loaded\")\n except Exception as e:\n message = f\"Failed to load extension '{extension.split('.')[0]}'\"\n LOG.exception(log.get_log_exception_message(message, e))",
"def load_all_files(self):\n\t\tself.get_rankings()\n\t\tself.get_partition()\n\t\tself.__load_factors()\n\t\tself.get_document_associations()\n\t\tself.get_term_associations()",
"def load_images(self):\n for image in self.gltf.images:\n self.images.append(image.load(self.path.parent))",
"def cog_load(self) -> None:\n\n self._check_to_send_cookie.start()",
"def _load_recipes():\n from django.conf import settings\n\n global _RECIPE_CLASSES\n if not _RECIPE_CLASSES:\n paths = [\"evennia.contrib.game_systems.crafting.example_recipes\"]\n if hasattr(settings, \"CRAFT_RECIPE_MODULES\"):\n paths += make_iter(settings.CRAFT_RECIPE_MODULES)\n for path in paths:\n for cls in callables_from_module(path).values():\n if inherits_from(cls, CraftingRecipeBase):\n _RECIPE_CLASSES[cls.name] = cls",
"def load_models(self):\n self.models = {}\n for code in self.soi_codes:\n print(f\"> Loading CNN for species code {code:02d}.\")\n self.models[code] = load_model(self.cnn_locations[code])\n print(\"> Complete.\")",
"def load_images(self):\r\n self.standing_frame = [load_image(\"cat1.png\")]\r\n self.walk_frames_r = [load_image(\"cat2.png\"), load_image(\"cat3.png\"),\r\n load_image(\"cat4.png\")]",
"def load(self):\n for name, item in itertools.chain(\n self._cal_objs.items(),\n self._noise_objs.items()):\n logger.debug(\"load {}\".format(item))\n item.load()"
] | [
"0.81047094",
"0.6737576",
"0.6547387",
"0.64138633",
"0.627226",
"0.61398363",
"0.6122156",
"0.6012189",
"0.58965945",
"0.5682925",
"0.56692845",
"0.5621133",
"0.55831224",
"0.55609417",
"0.5515546",
"0.5498867",
"0.54932314",
"0.5485387",
"0.5458083",
"0.5417369",
"0.54132634",
"0.53835523",
"0.53722394",
"0.53428406",
"0.53315645",
"0.53053653",
"0.5284387",
"0.5272692",
"0.52641946",
"0.5260062"
] | 0.85657376 | 0 |
Reload all loaded cogs | def reload_cogs(self):
for extension in list(self.extensions):
try:
self.reload_extension(extension)
except errors.NoEntryPointError:
log.info("The extension {extension} has no setup function")
pass
except errors.ExtensionAlreadyLoaded:
pass
log.info(f"Reloaded {len(self.extensions)} cogs") | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def reload(self, ctx:utils.Context, *cog_name:str):\n\n cog_name = 'cogs.' + '_'.join([i for i in cog_name])\n\n try:\n self.bot.load_extension(cog_name)\n except commands.ExtensionAlreadyLoaded:\n try:\n self.bot.unload_extension(cog_name)\n self.bot.load_extension(cog_name)\n except Exception as e:\n await ctx.send('```py\\n' + format_exc() + '```')\n return\n except Exception as e:\n await ctx.send('```py\\n' + format_exc() + '```')\n return\n await ctx.send('Cog reloaded.')",
"def __load_cogs(self):\n for cog in self.__cogs.get():\n logging.info('loading %s', cog)\n self.load_extension(cog)",
"def reload(self):",
"def reload(self):",
"def load_cogs(self):\n\n path = \"cogs/\" # Should always have a trailing slash\n import_path = path.replace(\"/\", \".\")\n extensions: list[str] = [\n import_path + file.replace(\".py\", \"\")\n for file in os.listdir(path)\n if os.path.isfile(f\"{path}{file}\")\n ]\n\n for extension in extensions:\n try:\n self.load_extension(extension)\n except errors.ExtensionAlreadyLoaded:\n pass\n\n log.info(f\"Loaded {len(self.commands)} commands from {len(self.cogs)} cogs\")",
"def reload(self, subdirs: list):\n self.__cogs = [f'cogs.{cog.replace(\".py\",\"\")}'\n for cog in listdir(self.__COG_PATH) if self.__is_cog(cog)]\n\n for sub in subdirs:\n if not sub:\n continue\n sub_path = path.join(self.__COG_PATH, sub)\n if path.isdir(sub_path):\n self.__cogs += [f'cogs.{sub_path}.{cog.replace(\".py\",\"\")}'\n for cog in listdir(sub_path) if self.__is_cog(cog)]",
"async def reload_cog(self, ctx, *, cog: str):\n\n try:\n self.bot.unload_extension(cog)\n self.bot.load_extension(cog)\n except Exception as e:\n await ctx.send(f'**`ERROR:`** {type(e).__name__} - {e}')\n else:\n await ctx.send('**`SUCCESS`**')",
"def reload(self):\n\n pass",
"async def reload_all(ctx):\n await ext_manager.reload_all()\n await ctx.send(\"Successfully reloaded.\")",
"async def tool_reload(self, ctx, *, cog: str):\n\n try:\n self.bot.unload_extension(cog)\n self.bot.load_extension(cog)\n except Exception as e:\n await zb.bot_errors(ctx,sp.format(e))\n else:\n await ctx.send('**`SUCCESS`**')",
"def reload( self ):\n\t\tCORE.info( 'Reloading resources: modules, categories' )\n\t\tmoduleManager.load()\n\t\tcategoryManager.load()\n\t\tRESOURCES.info( 'Reloading UCR variables' )\n\t\tucr.load()",
"async def giveaway_reload(self, ctx):\n self._load_games()\n await ctx.send(\n f\"Reloaded list of games ({len(self.steam_keys)} games)\")",
"def reload_configurations(self) -> None:\n ...",
"def reload(bot, event, *args):\n bot.config.load()\n bot.memory.load()",
"def reload(self):\n\n dict_of_dicts = {}\n classes = {\n \"BaseModel\": BaseModel,\n \"User\": User,\n \"Amenity\": Amenity,\n \"City\": City,\n \"Place\": Place,\n \"Review\": Review,\n \"State\": State}\n\n try:\n temp_dict = {}\n with open(self.__file_path, \"r\") as r:\n dict_of_dicts = json.load(r)\n for k, v in dict_of_dicts.items():\n if v['__class__'] in classes:\n temp_dict[k] = classes[v['__class__']](**v)\n self.__objects = temp_dict\n except Exception:\n pass",
"def reload(self):\n try:\n with open(self.__file_path, 'r') as f:\n dicts = json.load(f)\n for key, value in dicts.items():\n obj1 = eval(value['__class__'])(**value)\n self.__objects[key] = obj1\n except FileNotFoundError:\n pass",
"def reload(self):\n from models.base_model import BaseModel\n from models.user import User\n from models.amenity import Amenity\n from models.city import City\n from models.place import Place\n from models.review import Review\n from models.state import State\n dict_reload = {}\n try:\n with open(FileStorage.__file_path) as file:\n dict_reload = json.load(file)\n for key, value in dict_reload.items():\n obj = value[\"__class__\"]\n self.__objects[key] = locals()[obj](**value)\n except:\n pass",
"def reload(self):\n self.rpc.call(MsfRpcMethod.CoreReloadModules)",
"def reload_config(self):\n pass",
"def reload(self):\r\n self.unload()\r\n for name in self.strategy_name_list:\r\n name = name.replace(\".py\", \"\").strip()\r\n\r\n try:\r\n strategy_module = __import__(name)\r\n try:\r\n reload(strategy_module)\r\n strategy_object = strategy_module.Strategy(self.gox)\r\n self.strategy_object_list.append(strategy_object)\r\n if hasattr(strategy_object, \"name\"):\r\n self.gox.strategies[strategy_object.name] = strategy_object\r\n\r\n except Exception:\r\n self.gox.debug(\"### error while loading strategy %s.py, traceback follows:\" % name)\r\n self.gox.debug(traceback.format_exc())\r\n\r\n except ImportError:\r\n self.gox.debug(\"### could not import %s.py, traceback follows:\" % name)\r\n self.gox.debug(traceback.format_exc())",
"def reload(self):\n\t\toldlayers = self.layers\n\t\tself.layers = []\n\t\tfor cp, filename, fp in oldlayers:\n\t\t\tcp = cp # pylint\n\t\t\tif fp is None:\n\t\t\t\tself.read(filename)\n\t\t\telse:\n\t\t\t\tself.readfp(fp, filename)",
"def reload():\n import cubegame\n importlib.reload(cubegame)\n exec(\"from cubegame import *\")",
"def reload(self):\n if len(self.files) > 0:\n self.load(self.files, regfiles=self.regions)",
"def refresh(self):\n self.update_from_file()\n self.update_from_env()",
"async def reload(self, ctx, name: str):\n try:\n self.bot.reload_extension(f\"cogs.{name}\")\n except Exception as e:\n return await ctx.send(default.traceback_maker(e))\n await ctx.send(f\"{emote.check} | Reloaded extension **{name}**\")",
"async def admin_reload_cog(self, ctx, cog_name: str):\n embed = discord.Embed(color=admin_color)\n embed.set_author(name=\"🛠️ Admin\")\n embed.add_field(name=\"🔁 Reloading cog\", value=f\"{ctx.author.mention}, le cog **{cog_name}** va bientôt se redémarrer\")\n embed = set_footer(embed, ctx)\n await ctx.send(embed=embed)\n reload_cog(self.bot, cog_name)",
"def reload(self):\n try:\n with open(FileStorage.__file_path) as f:\n objs = json.load(f)\n for obj in objs.values():\n name = obj['__class__']\n del obj['__class__']\n self.new(eval(name)(**obj))\n except FileNotFoundError:\n return",
"def reload(self):\n self.containers = list(filter(_check_alive_container, self.containers))",
"def clear_data_cache():\n load_glove.cache_clear()",
"def refresh(self):\n self.config.read(self.filename)\n self.loadRecentFiles()"
] | [
"0.7033478",
"0.69451255",
"0.6755757",
"0.6755757",
"0.67295516",
"0.6648899",
"0.664729",
"0.6645348",
"0.65898687",
"0.64225334",
"0.6355888",
"0.6350144",
"0.62714565",
"0.6174131",
"0.6154524",
"0.6139652",
"0.61350954",
"0.60747707",
"0.60646397",
"0.6044675",
"0.6041968",
"0.6028552",
"0.6002928",
"0.5964006",
"0.59155643",
"0.5881067",
"0.5878617",
"0.5875209",
"0.5872719",
"0.5867981"
] | 0.773994 | 0 |
Test AnnualLeaveForm with decimal days. | def test_annual_leave_form_decimals(self):
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
data = {
"staff": staffprofile.id,
"year": 2018,
"leave_type": Leave.REGULAR,
"allowed_days": 16.5,
"carried_over_days": 8.5,
}
form = AnnualLeaveForm(data=data)
self.assertTrue(form.is_valid())
annual_leave = form.save()
self.assertEqual(staffprofile, annual_leave.staff)
self.assertEqual(2018, annual_leave.year)
self.assertEqual(16.5, annual_leave.allowed_days)
self.assertEqual(8.5, annual_leave.carried_over_days)
self.assertEqual(Leave.REGULAR, annual_leave.leave_type)
data2 = {
"staff": staffprofile.id,
"year": 2017,
"leave_type": Leave.REGULAR,
"allowed_days": 21,
"carried_over_days": 5,
}
form = AnnualLeaveForm(data=data2, instance=annual_leave)
self.assertTrue(form.is_valid())
form.save()
annual_leave.refresh_from_db()
self.assertEqual(staffprofile, annual_leave.staff)
self.assertEqual(2017, annual_leave.year)
self.assertEqual(21, annual_leave.allowed_days)
self.assertEqual(5, annual_leave.carried_over_days)
self.assertEqual(Leave.REGULAR, annual_leave.leave_type) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_leaveform_max_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n allowed_days=21,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def test_leaveform_max_sick_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 20, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=0,\n allowed_days=10,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough sick days. Available sick days are 10.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough sick days. Available sick days are 10.00\", form.errors[\"end\"][0]\n )",
"def test_one_day_leave(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 1 day of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=0).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)\n self.assertEqual(\n 1,\n get_taken_leave_days(\n staffprofile, Leave.PENDING, Leave.REGULAR, 2017, 2017\n ),\n )",
"def test_leaveform_apply(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)",
"def test_holidays_validate(self):\n self.request.sudo(self.user_1.id).holidays_validate()\n\n accrual = self.employee.get_leave_accrual(self.leave_type.id)\n self.assertEqual(accrual.total_hours, 22.5)",
"def test_leaveform_admin(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.APPROVED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)",
"def test_sickleave_apply(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.SICK, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)",
"def test_leaveform_start_end(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(1, len(form.errors.keys()))\n self.assertEqual(\"end must be greater than start\", form.errors[\"end\"][0])\n\n # end year and start year must be the same\n\n end = datetime(2018, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n data2 = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form2 = LeaveForm(data=data2)\n self.assertFalse(form2.is_valid())\n self.assertEqual(2, len(form2.errors.keys()))\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"start\"][0]\n )\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"end\"][0]\n )",
"def test_leave_oversubscribe_off(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def test_leaveform_no_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n mommy.make(\n \"small_small_hr.Leave\",\n leave_type=Leave.REGULAR,\n start=start,\n end=end,\n review_status=Leave.APPROVED,\n staff=staffprofile,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"start\"][0]\n )\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"end\"][0]\n )",
"def test_leaveform_process_with_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n # make sure leave obj already exists for said dates\n mommy.make(\n \"small_small_hr.Leave\",\n staff=staffprofile,\n start=start,\n end=end,\n leave_type=Leave.REGULAR,\n review_status=Leave.APPROVED,\n )\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.REJECTED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.REJECTED, leave.review_status)",
"def test_sickleave_process(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.REJECTED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.SICK, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.REJECTED, leave.review_status)",
"def test_leave_oversubscribe(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n\n # make it approved\n obj_type = ContentType.objects.get_for_model(leave)\n review = ModelReview.objects.get(content_type=obj_type, object_id=leave.id)\n review.review_status = ModelReview.APPROVED\n review.save()\n leave.refresh_from_db()\n\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=39).days, (leave.end - leave.start).days)\n self.assertEqual(\"Mini retirement\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)\n self.assertEqual(\n 40,\n get_taken_leave_days(\n staffprofile, Leave.APPROVED, Leave.REGULAR, 2017, 2017\n ),\n )\n self.assertEqual(-19, staffprofile.get_available_leave_days(year=2017))",
"def Daysleftverification():\n pass",
"def test_day_percentage(self):\n day_percentage = utils.day_percentage()\n self.assertIsInstance(day_percentage, float)\n self.assertGreaterEqual(day_percentage, 0)\n self.assertLess(day_percentage, 1)",
"def test_calculate_retention_fee():\n assert calculate_retention_fee(2578) == Decimal('128.91')",
"def test_centenary_positive():\n assert is_leap_year(2400) is True",
"def test_calculate_working_days():\n assert (\n calculate_working_days(parse('2020-01-01'), parse('2020-03-31')) == 64\n )",
"def test_period(self):\r\n with self.assertRaises(ParseException):\r\n calc.evaluator({}, {}, '.')\r\n with self.assertRaises(ParseException):\r\n calc.evaluator({}, {}, '1+.')",
"def test_initialization_of_homework_deadline():\n expected = datetime.timedelta(days=1)\n assert oop_hw.deadline_days == expected",
"def test_trailing_period(self):\r\n self.assertEqual(4.0, calc.evaluator({}, {}, '4.'))",
"def test_centenary_negative():\n assert is_leap_year(2100) is False",
"def test_date_interval(self, init_date, end_date):\n self.calc_earning(self.security[(self.security['Date'] > init_date) &\n (self.security['Date'] < end_date)])",
"def test_date_valid_data(self):\n selected_menu_form = SelectedMenuForm(self.possible_meals_choices)\n\n self.assertTrue(selected_menu_form)",
"def test_days():\n assert_equal(datetime.timedelta(days=1), convert_delta(\"1d\"))",
"def test_end_of_year(self):\n input_ = [\n self.indicator_record(date=datetime.date(2005, 12, 29),\n end_date=datetime.date(2006, 1, 29),\n value=0.2276),\n ]\n output = self.expander._daily_three_field_indicator_expander(input_)\n\n expected = [(datetime.date(2005, 12, 29), datetime.date(2006, 1, 29)),\n (datetime.date(2005, 12, 30), datetime.date(2006, 1, 30)),\n (datetime.date(2005, 12, 31), datetime.date(2006, 1, 31)),\n (datetime.date(2006, 1, 1), datetime.date(2006, 2, 1)),\n (datetime.date(2006, 1, 2), datetime.date(2006, 2, 2)),\n ]\n\n actual = [(record.date, record.end_date) for record in output[:5]]\n\n self.assertEqual(expected, actual)",
"def testMonthlyPayrollTotals(self):\n self._setupMonthlyTotals()\n totals = self.rows[-1]\n\n work_total = Decimal('110.00')\n self.assertEquals(totals['work_total'], work_total)\n\n self.assertEquals(len(totals['billable']), 1 + 1)\n for entry in totals['billable']:\n self.assertEquals(entry['hours'], Decimal('90.00'))\n self.assertEquals(entry['percent'],\n Decimal('90.00') / work_total * 100)\n\n self.assertEquals(len(totals['nonbillable']), 1 + 1)\n for entry in totals['nonbillable']:\n self.assertEquals(entry['hours'], Decimal('20.00'))\n self.assertEquals(entry['percent'],\n Decimal('20.00') / work_total * 100)\n\n self.assertEquals(len(totals['leave']), 2 + 1)\n sick_index = self.labels['leave'].index(self.sick.name)\n vacation_index = self.labels['leave'].index(self.vacation.name)\n self.assertEquals(totals['leave'][sick_index]['hours'],\n Decimal('80.00'))\n self.assertEquals(totals['leave'][sick_index]['percent'],\n Decimal('80.00') / Decimal('120.00') * 100)\n self.assertEquals(totals['leave'][vacation_index]['hours'],\n Decimal('40.00'))\n self.assertEquals(totals['leave'][vacation_index]['percent'],\n Decimal('40.00') / Decimal('120.00') * 100)\n self.assertEquals(totals['leave'][-1]['hours'], Decimal('120.00'))\n self.assertEquals(totals['leave'][-1]['percent'], Decimal('100.00'))\n\n self.assertEquals(totals['grand_total'], Decimal('230.00'))",
"def test_positive():\n assert is_leap_year(2016) is True",
"def test_one_date(self):\n result = beautiful_days_at_the_movies(10, 10, 6)\n self.assertEquals(result, 0)",
"def test_calculate_contract_duration():\n assert calculate_contract_duration(\n parse('2020-01-01'), parse('2020-03-31')\n ) == relativedelta(months=+2, days=+30)"
] | [
"0.7281258",
"0.68907803",
"0.6889661",
"0.6779469",
"0.6674891",
"0.64672923",
"0.6454044",
"0.64290416",
"0.63169855",
"0.627206",
"0.6265622",
"0.62043196",
"0.5948398",
"0.5836956",
"0.57466686",
"0.5738188",
"0.57262295",
"0.5616313",
"0.5578795",
"0.5547616",
"0.5536841",
"0.55309606",
"0.54699963",
"0.5452832",
"0.54478097",
"0.5440764",
"0.5430863",
"0.54198056",
"0.5418377",
"0.5381144"
] | 0.7893113 | 0 |
Test OverTimeForm with overlap for existing objects. | def test_overtime_form_process_with_overlap(self):
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 hours of overtime
start = datetime(2017, 6, 5, 18, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 5, 19, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
# make sure object already exists
mommy.make(
"small_small_hr.OverTime",
start=start.time(),
end=end.time(),
review_status=OverTime.APPROVED,
date=start.date,
staff=staffprofile,
)
data = {
"staff": staffprofile.id,
"date": start.date(),
"start": start.time(),
"end": end.time(),
"review_reason": "Extra work",
"review_status": OverTime.REJECTED,
}
form = OverTimeForm(data=data)
self.assertTrue(form.is_valid())
overtime = form.save()
self.assertEqual(staffprofile, overtime.staff)
self.assertEqual(start.date(), overtime.date)
self.assertEqual(start.time(), overtime.start)
self.assertEqual(end.time(), overtime.end)
self.assertEqual(
timedelta(seconds=3600).seconds, overtime.get_duration().seconds
)
self.assertEqual("Extra work", overtime.review_reason)
self.assertEqual(OverTime.REJECTED, overtime.review_status) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_overtime_form_apply_no_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 hours of overtime\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 5, 6, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.OverTime\",\n start=start.time(),\n end=end.time(),\n review_status=OverTime.APPROVED,\n date=start.date,\n staff=staffprofile,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"date\": start.date(),\n \"start\": start.time(),\n \"end\": end.time(),\n \"review_reason\": \"Extra work\",\n }\n\n form = ApplyOverTimeForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(3, len(form.errors.keys()))\n self.assertEqual(\n \"you cannot have overlapping overtime hours on the same day\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"you cannot have overlapping overtime hours on the same day\",\n form.errors[\"date\"][0],\n )\n self.assertEqual(\n \"you cannot have overlapping overtime hours on the same day\",\n form.errors[\"end\"][0],\n )",
"def overlap_with(self, other):",
"def overlaps(self, other): # -> bool:\n ...",
"def can_overlap(self):\n return False",
"def check_overlaps(self, verbose = False):\n if hasattr(self.phot, \"data\") and hasattr(self, 'spec'):\n for i, spectrum in enumerate(self.spec):\n if verbose:print(i, spectrum)\n for j, filtername in enumerate(self.phot.data_filters):\n if verbose:print(j, filtername)\n\n if hasattr(self.phot.data_filters[filtername], \"_lower_edge\") and \\\n hasattr(self.phot.data_filters[filtername], \"_upper_edge\") and \\\n hasattr(self.spec[spectrum], \"data\"):\n blue_bool = self.phot.data_filters[filtername]._lower_edge > self.spec[spectrum].min_wavelength\n red_bool = self.phot.data_filters[filtername]._upper_edge < self.spec[spectrum].max_wavelength\n\n if blue_bool and red_bool:\n within = True\n else:\n within = False\n\n if verbose:print(within)\n if within:\n self.spec[spectrum]._add_to_overlapping_filters(filtername, verbose=verbose)\n else:\n warnings.warn(\"SNClass.check_overlaps - something went wrong... no data?\")\n pass",
"def overlaps(self, other):\n pass",
"def check_overlaps(self, filter_objects, verbose = False):\n if isinstance(FilterClass, type(filter_objects)):\n ## if only one filter is given\n filter_objects = [filter_objects, ]\n\n\n for i, filter_name in enumerate(filter_objects):\n if isinstance(FilterClass, type(filter_name)):\n filter_obj = filter_name\n elif isinstance(filter_objects, dict):\n filter_obj = filter_objects[filter_name]\n else:\n filter_obj = filter_objects[i]\n\n if verbose:print(i, filter_obj)\n\n if hasattr(filter_obj, \"_lower_edge\") and \\\n hasattr(filter_obj, \"_upper_edge\") and \\\n hasattr(self, \"data\"):\n blue_bool = filter_obj._lower_edge > self.min_wavelength\n red_bool = filter_obj._upper_edge < self.max_wavelength\n\n if blue_bool and red_bool:\n within = True\n else:\n within = False\n if verbose: print(within)\n if within:\n self._add_to_overlapping_filters(filter_name, verbose=verbose)\n else:\n warnings.warn(\"SpectrumClass.check_overlaps - something went wrong... no overlaps or data?\")\n if self._n_overlapping_filters == 1:\n self._overlapping_filter_list = [self._overlapping_filter_list,] ## added to fix issue #27\n pass",
"def test_overtime_form_start_end(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n start = datetime(2017, 6, 5, 6, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 5, 5, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n data = {\n \"staff\": staffprofile.id,\n \"date\": start.date(),\n \"start\": start.time(),\n \"end\": end.time(),\n \"review_reason\": \"Extra work\",\n }\n\n form = OverTimeForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(1, len(form.errors.keys()))\n self.assertEqual(\"end must be greater than start\", form.errors[\"end\"][0])",
"def time_overlap(d1, d2):\n gt1, gt2, vt1, vt2 = parse_date(d1[\"t1\"]), parse_date(d1[\"t2\"]), parse_date(d2[\"t1\"]), parse_date(d2[\"t2\"])\n return (gt1 != vt2) and (vt1 != gt2) and (gt1 <= vt2) and (vt1 <= gt2)",
"def strong_overlapping(time_1, time_2):\n\n if (time_1[0] <= time_2[0] < time_1[1]) or (time_2[0] <= time_1[0] < time_2[1]):\n return True\n\n return False",
"def check_sim_overlaps(self, verbose = False):\n if hasattr(self.phot, \"data\") and hasattr(self, 'spec'):\n for i, spectrum in enumerate(self.sim_spec):\n if verbose:print(i, spectrum)\n for j, filtername in enumerate(self.phot.data_filters):\n if verbose:print(j, filtername)\n\n if hasattr(self.phot.data_filters[filtername], \"_lower_edge\") and \\\n hasattr(self.phot.data_filters[filtername], \"_upper_edge\") and \\\n hasattr(self.sim_spec[spectrum], \"data\"):\n blue_bool = self.phot.data_filters[filtername]._lower_edge > self.sim_spec[spectrum].min_wavelength\n red_bool = self.phot.data_filters[filtername]._upper_edge < self.sim_spec[spectrum].max_wavelength\n\n if blue_bool and red_bool:\n within = True\n else:\n within = False\n\n if verbose:print(within)\n if within:\n self.sim_spec[spectrum]._add_to_overlapping_filters(filtername, verbose=verbose)\n else:\n warnings.warn(\"SNClass.check_sim_overlaps - something went wrong... no data?\")\n pass",
"def test_overlapping_events(sample_events, woodshop, caplog):\n caplog.set_level(logging.INFO)\n event1, event2 = sample_events.make_overlapping_events()\n overlap_events(event1, event2, woodshop, woodshop, [woodshop])\n assert len(caplog.messages) == 1\n message = caplog.messages[0]\n assert \"Schedule conflict: place='Woodshop'\" in message\n expected_conflict_times = \"Conflict(start_time='{}', end_time='{}',\".format(\n event2.start_time, event1.end_time)\n assert expected_conflict_times in message\n assert event1.meetup_id in message\n assert event2.meetup_id in message",
"def overlap(component1, component2):\n if component1[0].start <= component2[0].stop and component2[0].start <= component1[0].stop:\n if component1[1].start <= component2[1].stop and component2[1].start <= component1[1].stop:\n return True\n return False",
"def test_overlapping_events_containing_place(sample_events, shops, woodshop, metalshop, caplog):\n caplog.set_level(logging.INFO)\n places = [shops, woodshop, metalshop]\n event1, event2 = sample_events.make_overlapping_events()\n overlap_events(event1, event2, shops, shops, places)\n assert len(caplog.messages) == 1\n message = caplog.messages[0]\n assert \"Schedule conflict: place='Shops'\" in message\n expected_conflict_times = \"Conflict(start_time='{}', end_time='{}',\".format(\n event2.start_time, event1.end_time)\n assert expected_conflict_times in message\n assert event1.meetup_id in message\n assert event2.meetup_id in message",
"def can_overlap(self):\n return self.is_open",
"def can_overlap(self):\n return self.is_open",
"def test_overlap(self):\r\n rect1 = Rectangle(10, 20, 30, 40)\r\n rect2 = Rectangle(50, 60, 70, 80)\r\n\r\n # overlap should be commutative\r\n assert not rect1.overlap_with(rect2)\r\n assert not rect2.overlap_with(rect1)\r\n assert not Rectangle.overlap(rect1, rect2)\r\n assert not Rectangle.overlap(rect2, rect1)\r\n\r\n rect1 = Rectangle(-10, -20, 10, 60)\r\n rect2 = Rectangle(0, 50, 100, 200)\r\n assert rect1.overlap_with(rect2)\r\n assert rect2.overlap_with(rect1)\r\n assert Rectangle.overlap(rect1, rect2)\r\n assert Rectangle.overlap(rect2, rect1)\r\n\r\n # rectangles with only same boarder are not considered overlapped\r\n rect1 = Rectangle(-30, -10, -20, 0)\r\n rect2 = Rectangle(-20, -5, 30, 20)\r\n rect3 = Rectangle(-40, 0, 30, 20)\r\n assert not rect1.overlap_with(rect2)\r\n assert not rect1.overlap_with(rect3)\r\n assert not Rectangle.overlap(rect2, rect1)\r\n assert not Rectangle.overlap(rect3, rect1)",
"def test_overlap(self):\r\n t = Expense(name = \"fake lunch\",\r\n amount = 1.,\r\n on = (WeeklyRecurring(FR,\r\n fromdt = self.fromdt,\r\n todt = self.todt),\r\n DailyRecurring(fromdt = self.fromdt, \r\n todt = self.todt)))\r\n\r\n self.m.addTransaction(t)\r\n self.assertEqual(self.m.totalSaved(self.fromdt, self.todt), -365.)",
"def _is_overlapping(last_tf_end, cur_tf_start, tolerance_months=0):\n return last_tf_end <= _substract_months(cur_tf_start, months=tolerance_months)",
"def overlap(\n state: State, # pylint: disable=unused-argument\n action: Action, # pylint: disable=unused-argument\n next_state: State,\n *,\n object_type: Type[GridObject],\n) -> bool:\n return isinstance(next_state.grid[next_state.agent.position], object_type)",
"def has_overlap(vevent, start, end):\n event_start = vevent.dtstart.value\n event_end = vevent.dtend.value\n\n assert not is_naive(start), 'start dt is naive'\n assert not is_naive(end), 'end dt is naive'\n assert not is_naive(event_start), 'event_start dt is naive'\n assert not is_naive(event_end), 'event_end dt is naive'\n\n if start <= event_start <= end: # starts today\n return True\n if start <= event_end <= end: # ends today\n return True\n if event_start <= start and end <= event_end: # spans over today\n return True\n return False",
"def test_overlap():\n events = [['Event', '2017-11-21T10:00:00-08:00', '2017-11-21T11:00:00-08:00'],\n ['Event', '2017-11-21T10:30:00-08:00', '2017-11-21T11:20:00-08:00']]\n freetimes, _ = free(events, 9, 0, 17, 0, day_range, 30)\n fmt_freetime = output_format(freetimes)\n for i in fmt_freetime:\n print(i)\n assert fmt_freetime == ['Tue, Nov 21, 9:00 am to Tue, Nov 21, 10:00 am.',\n 'Tue, Nov 21, 11:20 am to Tue, Nov 21, 5:00 pm.',\n 'Wed, Nov 22, 9:00 am to Wed, Nov 22, 5:00 pm.',\n 'Thu, Nov 23, 9:00 am to Thu, Nov 23, 5:00 pm.',\n 'Fri, Nov 24, 9:00 am to Fri, Nov 24, 5:00 pm.',\n 'Sat, Nov 25, 9:00 am to Sat, Nov 25, 5:00 pm.',\n 'Sun, Nov 26, 9:00 am to Sun, Nov 26, 5:00 pm.',\n 'Mon, Nov 27, 9:00 am to Mon, Nov 27, 5:00 pm.']",
"def check_recon_overlaps(self, verbose = False):\n if hasattr(self.phot, \"data\") and hasattr(self, 'recon_spec'):\n for i, spectrum in enumerate(self.recon_spec):\n if verbose:print(i, spectrum)\n for j, filtername in enumerate(self.phot.data_filters):\n if verbose:print(j, filtername)\n\n if hasattr(self.phot.data_filters[filtername], \"_lower_edge\") and \\\n hasattr(self.phot.data_filters[filtername], \"_upper_edge\") and \\\n hasattr(self.recon_spec[spectrum], \"data\"):\n blue_bool = self.phot.data_filters[filtername]._lower_edge > self.recon_spec[spectrum].min_wavelength\n red_bool = self.phot.data_filters[filtername]._upper_edge < self.recon_spec[spectrum].max_wavelength\n\n if blue_bool and red_bool:\n within = True\n else:\n within = False\n\n if verbose:print(within)\n if within:\n self.recon_spec[spectrum]._add_to_overlapping_filters(filtername)\n else:\n warnings.warn(\"SNClass.check_sim_overlaps - something went wrong... no data?\")\n pass",
"def compute_time_overlap(appointment1, appointment2):\n assert appointment1.date_ == appointment2.date_\n print(\"Checking for time overlap on \\\"{}\\\"...\".\n format(appointment1.date_))\n print(\"Times to check: {}, {}\".\n format(appointment1.time_range_, appointment2.time_range_))\n\n latest_start = max(appointment1.start_time_, appointment2.start_time_)\n earliest_end = min(appointment1.end_time_, appointment2.end_time_)\n\n delta = (earliest_end - latest_start).seconds\n overlap = max(0, delta)\n if overlap == 0:\n print(\"No time overlap.\")\n return False\n\n print(\"\\033[93mFound time overlap.\\033[0m\")\n return True",
"def check_overtime(self, cr, uid, att, context=None):\n if att:\n overtime_obj = self.pool.get('hr.overtime')\n orertime_ids = overtime_obj.search(cr, uid, [('employee_id', '=', att.employee_id.id),\n ('mode', '=', 'by_employee'),\n ('name', '=', att.day_tz),\n ('datetime_start', '<=', att.name),\n ('datetime_stop', '>=', att.name),\n ('state', 'not in', ['cancel', 'confirmed', 'done'])\n ])\n if orertime_ids:\n return True\n return False",
"def __is_position_overlapped(self, position, exon):\n start, end = self.__get_exon_coordinates(exon)\n return position >= start and position <= end",
"def CheckOverlap(self, via):\r\n\r\n for item in self.overlappings:\r\n if type(item) is pcbnew.PAD:\r\n if item.GetBoundingBox().Intersects(via.GetBoundingBox()):\r\n return True\r\n elif type(item) is pcbnew.PCB_VIA:\r\n # Overlapping with vias work best if checking is performed by intersection\r\n if item.GetBoundingBox().Intersects(via.GetBoundingBox()):\r\n return True\r\n elif type(item) in [pcbnew.ZONE, pcbnew.FP_ZONE]:\r\n if item.GetBoundingBox().Intersects(via.GetBoundingBox()):\r\n return True\r\n elif type(item) is pcbnew.PCB_TRACK:\r\n if item.GetBoundingBox().Intersects(via.GetBoundingBox()):\r\n width = item.GetWidth()\r\n dist, _ = pnt2line(via.GetPosition(), item.GetStart(), item.GetEnd())\r\n if dist <= self.clearance + width // 2 + via.GetWidth() / 2:\r\n return True\r\n return False",
"def overlap(t1start, t1end, t2start, t2end):\n\n return (t1start <= t2start <= t1end) or (t2start <= t1start <= t2end)",
"def test_overlapping_events_places_contained_in_unchecked(\n sample_events, blacksmithing, forge, metalshop, caplog):\n caplog.set_level(logging.INFO)\n places = [blacksmithing, forge, metalshop]\n event1, event2 = sample_events.make_overlapping_events()\n overlap_events(event1, event2, blacksmithing, blacksmithing, places)\n assert len(caplog.messages) == 2\n message = caplog.messages[0]\n assert \"Schedule conflict: place='Forge'\" in message \\\n or \"Schedule conflict: place='Metal Shop'\" in message\n expected_conflict_times = \"Conflict(start_time='{}', end_time='{}',\".format(\n event2.start_time, event1.end_time)\n assert expected_conflict_times in message\n assert event1.meetup_id in message\n assert event2.meetup_id in message",
"def test_leaveform_no_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n mommy.make(\n \"small_small_hr.Leave\",\n leave_type=Leave.REGULAR,\n start=start,\n end=end,\n review_status=Leave.APPROVED,\n staff=staffprofile,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"start\"][0]\n )\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"end\"][0]\n )"
] | [
"0.76187646",
"0.6542121",
"0.6452166",
"0.62871176",
"0.62107134",
"0.6148731",
"0.6093373",
"0.6084772",
"0.60275173",
"0.59073406",
"0.59055",
"0.58857536",
"0.5885605",
"0.58551836",
"0.58335024",
"0.58335024",
"0.5771665",
"0.5764608",
"0.57556933",
"0.57429683",
"0.5738605",
"0.5718679",
"0.5709321",
"0.57056177",
"0.57035595",
"0.5681182",
"0.5680853",
"0.56693244",
"0.5666226",
"0.56542647"
] | 0.75105304 | 1 |
Test OverTimeForm start end fields. | def test_overtime_form_start_end(self):
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
start = datetime(2017, 6, 5, 6, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 5, 5, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
data = {
"staff": staffprofile.id,
"date": start.date(),
"start": start.time(),
"end": end.time(),
"review_reason": "Extra work",
}
form = OverTimeForm(data=data)
self.assertFalse(form.is_valid())
self.assertEqual(1, len(form.errors.keys()))
self.assertEqual("end must be greater than start", form.errors["end"][0]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_field_start_time_not_bigger_than_end_time(self):\n form = self.make_FieldForm_validated(start_time=\"10:40\", end_time=\"9:00\")\n self.assertListEqual([\"__all__\"], list(form.errors))",
"def test_overtime_form_process_with_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 hours of overtime\n start = datetime(2017, 6, 5, 18, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 5, 19, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n # make sure object already exists\n mommy.make(\n \"small_small_hr.OverTime\",\n start=start.time(),\n end=end.time(),\n review_status=OverTime.APPROVED,\n date=start.date,\n staff=staffprofile,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"date\": start.date(),\n \"start\": start.time(),\n \"end\": end.time(),\n \"review_reason\": \"Extra work\",\n \"review_status\": OverTime.REJECTED,\n }\n\n form = OverTimeForm(data=data)\n self.assertTrue(form.is_valid())\n overtime = form.save()\n self.assertEqual(staffprofile, overtime.staff)\n self.assertEqual(start.date(), overtime.date)\n self.assertEqual(start.time(), overtime.start)\n self.assertEqual(end.time(), overtime.end)\n self.assertEqual(\n timedelta(seconds=3600).seconds, overtime.get_duration().seconds\n )\n self.assertEqual(\"Extra work\", overtime.review_reason)\n self.assertEqual(OverTime.REJECTED, overtime.review_status)",
"def test_start_and_end_equal(self):\n start = timezone.now()\n end = start\n with six.assertRaisesRegex(self, ValidationError, self.msg):\n validate_timeframe(start, end)",
"def check_time(self,data,data_orginal):\n if data['start_time'] > data['end_time']:\n raise ValidationError('event end time should be greater than start time.')",
"def test_overtime_form_apply_no_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 hours of overtime\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 5, 6, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.OverTime\",\n start=start.time(),\n end=end.time(),\n review_status=OverTime.APPROVED,\n date=start.date,\n staff=staffprofile,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"date\": start.date(),\n \"start\": start.time(),\n \"end\": end.time(),\n \"review_reason\": \"Extra work\",\n }\n\n form = ApplyOverTimeForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(3, len(form.errors.keys()))\n self.assertEqual(\n \"you cannot have overlapping overtime hours on the same day\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"you cannot have overlapping overtime hours on the same day\",\n form.errors[\"date\"][0],\n )\n self.assertEqual(\n \"you cannot have overlapping overtime hours on the same day\",\n form.errors[\"end\"][0],\n )",
"def EndAfterStart(obj):\n if obj.end_date is None: return\n if obj.end_date <= obj.start_date:\n raise interface.Invalid(\n _(\"End Date must be after Start Date\"), \n \"start_date\", \n \"end_date\")",
"def test_leaveform_start_end(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(1, len(form.errors.keys()))\n self.assertEqual(\"end must be greater than start\", form.errors[\"end\"][0])\n\n # end year and start year must be the same\n\n end = datetime(2018, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n data2 = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form2 = LeaveForm(data=data2)\n self.assertFalse(form2.is_valid())\n self.assertEqual(2, len(form2.errors.keys()))\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"start\"][0]\n )\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"end\"][0]\n )",
"def test_charter_form_end_date(self):\n\n data = {\n \"language\": 1,\n \"countries\": 1,\n \"start_date_month\": \"1\",\n \"start_date_day\": \"1\",\n \"start_date_year\": \"2015\",\n \"end_date_month\": \"1\",\n \"end_date_day\": \"1\",\n \"end_date_year\": \"2015\",\n \"number\": \"12345\",\n \"lead_dept\": 1,\n \"contact_person\": \"Vicky Leong\",\n \"created_by\": \"Vicky Leong\"\n }\n charter_form = CharterForm(data=data)\n result = charter_form.is_valid()\n self.assertFalse(result)",
"def test_start_before_end(self):\n start = timezone.now()\n end = start + timedelta(seconds=1)\n actual = validate_timeframe(start, end)\n expected = None\n self.assertEqual(actual, expected)",
"def clean(self):\n super(Event, self).clean()\n\n if self.start and self.end and self.end < self.start:\n raise ValidationError({'start': \"Start time must be before end time\"})",
"def test_start_end_hour():\n # sh = None\n # eh = None\n # data = None\n # result = makesky.start_end_hour(sh, eh, data)\n pass",
"def test_end_before_start(self):\n start = timezone.now()\n end = start - timedelta(seconds=1)\n with six.assertRaisesRegex(self, ValidationError, self.msg):\n validate_timeframe(start, end)",
"def validateTiming(obj):\n if obj.start_time:\n if obj.end_time:\n return obj.start_time <= timezone.now() and obj.end_time > timezone.now()\n else:\n return obj.start_time <= timezone.now()\n else:\n if obj.end_time:\n return obj.end_time > timezone.now()\n else:\n return True",
"def test_time_field():",
"def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n set_fields_to_required(self, ['end_date'])",
"def test_end_date(self):\n self.assertEqual(self.active.end_date, self.active.start_date + timedelta(3))",
"def test_start_older_than_last_contact(self):\n\n # When\n form = PartnershipModelForm(\n data={'start_date': self.partnership.start_date,\n 'last_contact_date': self.partnership.last_contact_date,\n 'status': self.partnership.status})\n\n # Then\n self.assertTrue(form.is_valid())",
"def validate(self, data):\n l = len(data[\"start_times\"])\n for i in range(l):\n if data[\"start_times\"][i]>=data['end_times'][i]:\n raise serializers.ValidationError(\"Start times should come before end times\") \n return data",
"def test_modify_slot_end_early(self):\n coupon = COUPON_FACTORY.create_coupon()\n slot = Slot.objects.create(site_id=2,\n business_id=coupon.offer.business.id,\n start_date=datetime.date.today(),\n end_date=datetime.date.today() + datetime.timedelta(2))\n SlotTimeFrame.objects.create(slot=slot, coupon_id=coupon.id,\n start_datetime=datetime.datetime.today(),\n end_datetime=datetime.datetime.today() + datetime.timedelta(1))\n slot.end_date = datetime.date.today() + datetime.timedelta(1)\n with self.assertRaises(ValidationError) as context_manager:\n slot.save()\n self.fail('Invalid slot saved.')\n LOG.debug(context_manager.exception)",
"def test_parse_valid_time_range(self):\n from azure.servicefabric.models.time_range import (\n TimeRange\n )\n from azure.servicefabric.models.time_of_day import (\n TimeOfDay\n )\n\n res = sf_c.parse_time_range({\n 'StartTime': {\n 'Hour': 0,\n 'Minute': 0\n },\n 'EndTime': {\n 'Hour': 23,\n 'Minute': 59,\n }\n })\n\n self.assertIsInstance(res, TimeRange)\n\n self.assertIsInstance(res.start_time, TimeOfDay)\n self.assertEqual(res.start_time.hour, 0)\n self.assertEqual(res.start_time.minute, 0)\n\n self.assertIsInstance(res.end_time, TimeOfDay)\n self.assertEqual(res.end_time.hour, 23)\n self.assertEqual(res.end_time.minute, 59)",
"def test_Fieldform_has_fields(self):\n self.assertSequenceEqual(\n [\n \"date\",\n \"start_time\",\n \"end_time\",\n \"temperature\",\n \"humidity\",\n \"coordinator\",\n \"staff\",\n \"parcel_id\",\n ],\n list(self.Fieldform.fields),\n )",
"def _check_hours(self):\n for record in self:\n if record.start_hour or record.end_hour:\n if int(record.start_hour) == int(record.end_hour):\n raise ValidationError(_('Please enter different Start Hours and End Hours!'))\n if int(record.start_hour) > int(record.end_hour):\n raise ValidationError(_('Start hours can not be greater than end hours for the day.'))",
"def clean(self):\n cleaned_data = self.cleaned_data\n startTime = cleaned_data['start_time']\n endTime = cleaned_data['end_time']\n if startTime > endTime:\n msg = _(u'The start time must occur before the end time of the event')\n self._errors['start_time'] = self.error_class([msg])\n\n return cleaned_data",
"def assert_end_time_with_price(self, data, **kwargs):\n if \"price\" in data and \"end_time\" not in data:\n raise ValidationError(\"If the price is included, you must also include the end time.\")\n elif \"price\" not in data and \"end_time\" in data:\n raise ValidationError(\"If the end time is included, you must also include the price.\")\n if \"price\" in data and \"estimated_price\" in data:\n raise ValidationError(\"Rental should have one of either price or estimated_price.\")",
"def test_range__no_end_date(self):\n data = self._data()\n data.pop('end_date')\n response = self._get(get_kwargs=data)\n self._check_response(response, 104)",
"def test_start_and_last_contact_less_than_current(self):\n\n # When\n form = PartnershipModelForm(\n data={'start_date': self.partnership.start_date - dt.timedelta(days=10),\n 'last_contact_date': self.partnership.last_contact_date - dt.timedelta(days=10),\n 'status': self.partnership.status})\n # Then\n self.assertTrue(form.is_valid())",
"def simulatedate_checkinput(start, end):\n start_year, start_month, start_day = parse_string_datetime(start)\n end_year, end_month, end_day = parse_string_datetime(end)\n if datetime_checkinput(start_year, start_month, start_day) == 0 and datetime_checkinput(end_year, end_month, end_day) == 0:\n start_time = datetime.datetime(start_year, start_month, start_day)\n end_time = datetime.datetime(end_year, end_month, end_day)\n if start_time < end_time:\n return 0\n else:\n raise Invaliddatetimeinput",
"def __init__(__self__, *,\n end_time: pulumi.Input[str],\n start_time: pulumi.Input[str]):\n pulumi.set(__self__, \"end_time\", end_time)\n pulumi.set(__self__, \"start_time\", start_time)",
"def clean(self):\n cleaned_data = super(EventForm, self).clean()\n\n # Validate that the event ends AFTER it starts\n start_hour = cleaned_data.get(\"start_hour\")\n end_hour = cleaned_data.get(\"end_hour\")\n\n if (start_hour is not None and end_hour is not None):\n # Report an error if end_hour is < start_hour\n if end_hour < start_hour:\n # Report an error against end_hour. Because we have an end_hour\n # value we know that it's currently valid.\n msg = (u\"Ensure the end hour is greater than or equal to the \"\n u\"start hour.\")\n self._errors[\"end_hour\"] = self.error_class([msg])\n\n # end_hour is no longer valid, so remove it from cleaned data\n del cleaned_data[\"end_hour\"]\n\n start_minute = cleaned_data.get(\"start_minute\")\n end_minute = cleaned_data.get(\"end_minute\")\n\n # Report an error if the event ends when it starts, or ends before\n # it starts.\n if (start_minute is not None and end_minute is not None\n and start_hour == end_hour):\n if end_minute <= start_minute:\n msg = u\"Ensure the event ends after it starts.\"\n self._errors[\"end_minute\"] = self.error_class([msg])\n del cleaned_data[\"end_minute\"]\n return cleaned_data",
"def validate(self):\n if self._inc_begin is None:\n raise ValueError((\"TimeRange {self} missing begin point\")\n .format(self=self))\n if self._exc_end is None:\n raise ValueError((\"TimeRange {self} missing end point\")\n .format(self=self))"
] | [
"0.69037473",
"0.65858287",
"0.6472992",
"0.6455287",
"0.6415512",
"0.61734456",
"0.6171806",
"0.61608535",
"0.6140685",
"0.61234224",
"0.5972017",
"0.5946175",
"0.59197986",
"0.59122825",
"0.59088945",
"0.58745897",
"0.58695656",
"0.5765348",
"0.5743106",
"0.5724506",
"0.56768155",
"0.5660184",
"0.56544185",
"0.56329346",
"0.5625015",
"0.5621336",
"0.5594799",
"0.5574968",
"0.55602044",
"0.5559859"
] | 0.80720454 | 0 |
Test LeaveForm apply for leave. | def test_leaveform_apply(self):
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.REGULAR,
carried_over_days=12,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.REGULAR,
"start": start,
"end": end,
"review_reason": "Need a break",
}
form = ApplyLeaveForm(data=data)
self.assertTrue(form.is_valid())
leave = form.save()
self.assertEqual(staffprofile, leave.staff)
self.assertEqual(Leave.REGULAR, leave.leave_type)
self.assertEqual(start, leave.start)
self.assertEqual(end, leave.end)
self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)
self.assertEqual("Need a break", leave.review_reason)
self.assertEqual(Leave.PENDING, leave.review_status) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_leaveform_no_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n mommy.make(\n \"small_small_hr.Leave\",\n leave_type=Leave.REGULAR,\n start=start,\n end=end,\n review_status=Leave.APPROVED,\n staff=staffprofile,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"start\"][0]\n )\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"end\"][0]\n )",
"def test_leaveform_process_with_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n # make sure leave obj already exists for said dates\n mommy.make(\n \"small_small_hr.Leave\",\n staff=staffprofile,\n start=start,\n end=end,\n leave_type=Leave.REGULAR,\n review_status=Leave.APPROVED,\n )\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.REJECTED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.REJECTED, leave.review_status)",
"def test_leaveform_admin(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.APPROVED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)",
"def test_leave_oversubscribe_off(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def test_sickleave_apply(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.SICK, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)",
"def test_one_day_leave(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 1 day of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=0).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)\n self.assertEqual(\n 1,\n get_taken_leave_days(\n staffprofile, Leave.PENDING, Leave.REGULAR, 2017, 2017\n ),\n )",
"def test_leaveform_max_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n allowed_days=21,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def test_leaveform_start_end(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(1, len(form.errors.keys()))\n self.assertEqual(\"end must be greater than start\", form.errors[\"end\"][0])\n\n # end year and start year must be the same\n\n end = datetime(2018, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n data2 = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form2 = LeaveForm(data=data2)\n self.assertFalse(form2.is_valid())\n self.assertEqual(2, len(form2.errors.keys()))\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"start\"][0]\n )\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"end\"][0]\n )",
"def test_leave_oversubscribe(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n\n # make it approved\n obj_type = ContentType.objects.get_for_model(leave)\n review = ModelReview.objects.get(content_type=obj_type, object_id=leave.id)\n review.review_status = ModelReview.APPROVED\n review.save()\n leave.refresh_from_db()\n\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=39).days, (leave.end - leave.start).days)\n self.assertEqual(\"Mini retirement\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)\n self.assertEqual(\n 40,\n get_taken_leave_days(\n staffprofile, Leave.APPROVED, Leave.REGULAR, 2017, 2017\n ),\n )\n self.assertEqual(-19, staffprofile.get_available_leave_days(year=2017))",
"def test_sickleave_process(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.REJECTED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.SICK, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.REJECTED, leave.review_status)",
"def test_leaveform_max_sick_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 20, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=0,\n allowed_days=10,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough sick days. Available sick days are 10.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough sick days. Available sick days are 10.00\", form.errors[\"end\"][0]\n )",
"def test_annual_leave_form_decimals(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n data = {\n \"staff\": staffprofile.id,\n \"year\": 2018,\n \"leave_type\": Leave.REGULAR,\n \"allowed_days\": 16.5,\n \"carried_over_days\": 8.5,\n }\n\n form = AnnualLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n annual_leave = form.save()\n self.assertEqual(staffprofile, annual_leave.staff)\n self.assertEqual(2018, annual_leave.year)\n self.assertEqual(16.5, annual_leave.allowed_days)\n self.assertEqual(8.5, annual_leave.carried_over_days)\n self.assertEqual(Leave.REGULAR, annual_leave.leave_type)\n\n data2 = {\n \"staff\": staffprofile.id,\n \"year\": 2017,\n \"leave_type\": Leave.REGULAR,\n \"allowed_days\": 21,\n \"carried_over_days\": 5,\n }\n\n form = AnnualLeaveForm(data=data2, instance=annual_leave)\n self.assertTrue(form.is_valid())\n form.save()\n annual_leave.refresh_from_db()\n self.assertEqual(staffprofile, annual_leave.staff)\n self.assertEqual(2017, annual_leave.year)\n self.assertEqual(21, annual_leave.allowed_days)\n self.assertEqual(5, annual_leave.carried_over_days)\n self.assertEqual(Leave.REGULAR, annual_leave.leave_type)",
"def onchange_leave_date(self):\n warning = {}\n if self.date_of_leave and self.date_of_leave < self.date_of_join:\n warning.update({\n 'title': _('Information'),\n 'message': _(\"Leaving Date Must Be Greater Than Joining Date.\")})\n self.date_of_leave = False\n return {'warning': warning}",
"def _validate_leave_request(self):\n\t\tfor holiday in self.filtered (lambda request: request.type == 'remove' and request.holiday_type == 'employee'):\n\t\t\tmeeting_values = holiday._prepare_holidays_meeting_values ()\n\t\t\tmeeting = self.env['calendar.event'].with_context (no_mail_to_attendees=True).create (meeting_values)\n\t\t\tholiday.write ({'meeting_id': meeting.id})\n\t\t\tholiday._create_resource_leave ()",
"def leave(self, fsm):\n pass",
"def leave(self):\n p = GameOverPopup(self)\n p.open()",
"async def leave(ctx, *, check=\"\"):\r\n # if botv.isAdmin(ctx.message.author) and check == \"now, bot\":\r\n # if necessary, save checks can go here; check presently commented out because botv can\r\n # fail to initialize in testing\r\n await bot.say(\"Allan, please add dialogue!\")\r\n quit()",
"def _check_leave_request(self, cr, uid, request, token, context=None):\n holidays_obj = request.registry['hr.holidays']\n holidays_ids = holidays_obj.search(cr, uid, [\n ('token', '=', token)\n ])\n\n if len(holidays_ids) == 0:\n return request.website.render(\n \"tk_hr_approve_request.leave_request_not_found\"\n )\n\n _id = holidays_ids[0] if len(holidays_ids) else None\n if _id:\n leave_request = holidays_obj.browse(\n cr, uid, _id, context=context\n )\n return leave_request",
"def ev_windowleave(self, event: tcod.event.WindowEvent) -> T | None:",
"def leave_notify_event(self, widget, event):\n self.logger.debug(\"leaving widget...\")\n return self.make_callback('leave')",
"def leave(self):\n self.pleaseQuit=1",
"def ev_windowleave(self, event: WindowEvent) -> None:",
"def leave(event):\n if tooltip.event is not None:\n widget.after_cancel(tooltip.event)\n tooltip.event = None\n tooltip.hidetip()",
"def leave_request_accept(self, token, **kwargs):\n cr, uid, context = self._get_cr_uid_context()\n res = self._check_leave_request(\n cr, uid, request, token, context=context\n )\n if isinstance(res, http.Response):\n return res\n if res:\n res.signal_workflow('validate')\n if res.state == 'validate':\n return request.website.render(\n \"tk_hr_approve_request.leave_request_accepted\"\n )",
"def isLeaveLeft(self,leave_type,days):\n if leave_type == 1 :\n return days<=self.earned_balance\n elif leave_type == 2 :\n return days<=self.hp_balance\n elif leave_type == 3 :\n return days*2<=self.hp_balance \n else :\n return False",
"def leave_group(self):\n\t\tself.sendMessage(ID_CTRL + \"LEAVE\", True)\n\t\tself.joinstate = 0\n\t\tself.createstate = 0\n\t\tself.__key = None",
"def on_leave(self, *args):\n self.ids.edit_area.content.clear_widgets()\n return super().on_leave(*args)",
"def test_if_fires_on_zone_leave(self):\n self.hass.states.set('geo_location.entity', 'hello', {\n 'latitude': 32.880586,\n 'longitude': -117.237564,\n 'source': 'test_source'\n })\n self.hass.block_till_done()\n\n assert setup_component(self.hass, automation.DOMAIN, {\n automation.DOMAIN: {\n 'trigger': {\n 'platform': 'geo_location',\n 'source': 'test_source',\n 'zone': 'zone.test',\n 'event': 'leave',\n },\n 'action': {\n 'service': 'test.automation',\n }\n }\n })\n\n self.hass.states.set('geo_location.entity', 'hello', {\n 'latitude': 32.881011,\n 'longitude': -117.234758,\n 'source': 'test_source'\n })\n self.hass.block_till_done()\n\n self.assertEqual(1, len(self.calls))",
"def OnLeaveEpisode(self):\n pass",
"def test_not_logged_user_cannot_leave(self):\n\n utils.test_not_logged_cannot_access(self, self.url)"
] | [
"0.71491563",
"0.7073332",
"0.68779004",
"0.6833441",
"0.6643837",
"0.65629363",
"0.6559908",
"0.63612664",
"0.6309911",
"0.6163169",
"0.6080903",
"0.59029245",
"0.58312833",
"0.56878823",
"0.5650338",
"0.56020325",
"0.5566159",
"0.55186003",
"0.54910886",
"0.54739606",
"0.54640734",
"0.543446",
"0.5414914",
"0.5316205",
"0.52944565",
"0.52933013",
"0.5269195",
"0.52370244",
"0.52325654",
"0.52165926"
] | 0.75476915 | 0 |
Test application for one day leave. | def test_one_day_leave(self):
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 1 day of leave
start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.REGULAR,
carried_over_days=12,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.REGULAR,
"start": start,
"end": end,
"review_reason": "Need a break",
}
form = ApplyLeaveForm(data=data)
self.assertTrue(form.is_valid())
leave = form.save()
self.assertEqual(staffprofile, leave.staff)
self.assertEqual(Leave.REGULAR, leave.leave_type)
self.assertEqual(start, leave.start)
self.assertEqual(end, leave.end)
self.assertEqual(timedelta(days=0).days, (leave.end - leave.start).days)
self.assertEqual("Need a break", leave.review_reason)
self.assertEqual(Leave.PENDING, leave.review_status)
self.assertEqual(
1,
get_taken_leave_days(
staffprofile, Leave.PENDING, Leave.REGULAR, 2017, 2017
),
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_leave_oversubscribe_off(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def Daysleftverification():\n pass",
"def test_leave_oversubscribe(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n\n # make it approved\n obj_type = ContentType.objects.get_for_model(leave)\n review = ModelReview.objects.get(content_type=obj_type, object_id=leave.id)\n review.review_status = ModelReview.APPROVED\n review.save()\n leave.refresh_from_db()\n\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=39).days, (leave.end - leave.start).days)\n self.assertEqual(\"Mini retirement\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)\n self.assertEqual(\n 40,\n get_taken_leave_days(\n staffprofile, Leave.APPROVED, Leave.REGULAR, 2017, 2017\n ),\n )\n self.assertEqual(-19, staffprofile.get_available_leave_days(year=2017))",
"def test_interview_applications_expired():\n test_username = \"test_user\"\n\n user = UserFactory.create(username=test_username, is_active=True)\n UserSocialAuthFactory.create(user=user, provider=\"edX\")\n\n assert user.is_active is True\n assert \"retired_email\" not in user.email\n assert UserSocialAuth.objects.filter(user=user).count() == 1\n\n now = now_in_utc()\n run = BootcampRunFactory.create(start_date=(now + timedelta(days=10)))\n\n bootcamp_app = BootcampApplicationFactory.create(\n state=\"AWAITING_USER_SUBMISSIONS\", bootcamp_run=run\n )\n interview = InterviewFactory.create(\n job=JobFactory.create(run=bootcamp_app.bootcamp_run), applicant=user\n )\n\n COMMAND.handle(\"retire_users\", users=[test_username])\n\n user.refresh_from_db()\n interview.refresh_from_db()\n assert user.is_active is False\n assert \"retired_email\" in user.email\n assert UserSocialAuth.objects.filter(user=user).count() == 0\n assert interview.status == \"expired\"",
"def test_sickleave_process(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.REJECTED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.SICK, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.REJECTED, leave.review_status)",
"def test_sickleave_apply(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.SICK, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)",
"def test_leaveform_max_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n allowed_days=21,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def OnLeaveEpisode(self):\n pass",
"def exitAppOnExpired(self)->bool:\n return self._lic.params['exitAppOnExpire'].value",
"def test_leaveform_process_with_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n # make sure leave obj already exists for said dates\n mommy.make(\n \"small_small_hr.Leave\",\n staff=staffprofile,\n start=start,\n end=end,\n leave_type=Leave.REGULAR,\n review_status=Leave.APPROVED,\n )\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.REJECTED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.REJECTED, leave.review_status)",
"def test_out_of_date(self):\n self.assertTrue(update_available(0.0))",
"def test_leaveform_apply(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)",
"def test_validate_expiration1(client):\n with client.application.app_context():\n pytest.skip(\"Not implemented\")",
"def test_past_event(self):\n pass",
"def test_validate_expiration2(client):\n with client.application.app_context():\n pytest.skip(\"Not implemented\")",
"def test_program_first_unexpired_run():\n program = ProgramFactory()\n course = CourseFactory.create(program=program)\n now = now_in_utc()\n end_date = now + timedelta(days=100)\n enr_end_date = now + timedelta(days=100)\n first_run = CourseRunFactory.create(\n start_date=now,\n course=course,\n end_date=end_date,\n enrollment_end=enr_end_date,\n live=True,\n )\n\n # create another course and course run in program\n another_course = CourseFactory.create(program=program)\n second_run = CourseRunFactory.create(\n start_date=now + timedelta(days=50),\n course=another_course,\n end_date=end_date,\n enrollment_end=enr_end_date,\n )\n\n assert first_run.start_date < second_run.start_date\n assert program.first_unexpired_run == first_run",
"def test_leaveform_admin(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.APPROVED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)",
"def test_check_yesterday(self):\n \n # the service calendar has two weekdays, back to back\n sc = ServiceCalendar()\n sc.add_period( 0, 3600*24, [\"WKDY\"] )\n sc.add_period( 3600*24, 2*3600*24, [\"WKDY\"] )\n \n # the timezone lasts for two days and has no offset\n # this is just boilerplate\n tz = Timezone()\n tz.add_period( TimezonePeriod(0, 2*3600*24, 0) )\n \n # tripboard runs on weekdays for agency 0\n al = TripAlight( \"WKDY\", sc, tz, 0 )\n \n # one alighting - one second before midnight\n al.add_alighting( \"1\", 86400-1, 0 )\n \n # our starting state is midnight between the two days\n s0 = State(1, 86400)\n \n # it should be one second after the last alighting \n s1 = al.walk_back( s0, WalkOptions() )\n self.assertEquals( s1.time, 86399 )",
"def test_holidays_validate(self):\n self.request.sudo(self.user_1.id).holidays_validate()\n\n accrual = self.employee.get_leave_accrual(self.leave_type.id)\n self.assertEqual(accrual.total_hours, 22.5)",
"def test_past_meeting_details(self):\n pass",
"def test_leaveform_no_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n mommy.make(\n \"small_small_hr.Leave\",\n leave_type=Leave.REGULAR,\n start=start,\n end=end,\n review_status=Leave.APPROVED,\n staff=staffprofile,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"start\"][0]\n )\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"end\"][0]\n )",
"def test_negative_is_active_of_homework():\n assert not expired_hw.is_active()",
"def test_get_remaining_event_dates(self):\n date = EventDate.objects.create(\n event=self.event_show2,\n date=(timezone.now() - timedelta(days=10))\n )\n dates = list(get_remaining_event_dates(self.event_show2))\n self.assertFalse(date in dates)\n self.assertTrue(self.future_date in dates)\n self.assertFalse(self.past_date in dates)",
"def test_expired_course_in_holdback(self):\n CourseDurationLimitConfig.objects.create(enabled=True, enabled_as_of=datetime(2010, 1, 1, tzinfo=UTC))\n\n course = CourseFactory.create(start=THREE_YEARS_AGO)\n url = course_home_url(course)\n\n for mode in [CourseMode.AUDIT, CourseMode.VERIFIED]:\n CourseModeFactory.create(course_id=course.id, mode_slug=mode)\n\n # assert that an if an expired audit user in the holdback tries to access the course\n # they are not redirected to the dashboard\n audit_user = UserFactory(password=self.TEST_PASSWORD)\n self.client.login(username=audit_user.username, password=self.TEST_PASSWORD)\n audit_enrollment = CourseEnrollment.enroll(audit_user, course.id, mode=CourseMode.AUDIT)\n Schedule.objects.update(start_date=THREE_YEARS_AGO)\n FBEEnrollmentExclusion.objects.create(\n enrollment=audit_enrollment\n )\n\n response = self.client.get(url)\n\n assert response.status_code == 200",
"def test_with_now_minus_1_day(self):\n self.assertEqual(ageid(self.now - timedelta(1)), 'age2')",
"def test_if_fires_on_zone_leave(self):\n self.hass.states.set('geo_location.entity', 'hello', {\n 'latitude': 32.880586,\n 'longitude': -117.237564,\n 'source': 'test_source'\n })\n self.hass.block_till_done()\n\n assert setup_component(self.hass, automation.DOMAIN, {\n automation.DOMAIN: {\n 'trigger': {\n 'platform': 'geo_location',\n 'source': 'test_source',\n 'zone': 'zone.test',\n 'event': 'leave',\n },\n 'action': {\n 'service': 'test.automation',\n }\n }\n })\n\n self.hass.states.set('geo_location.entity', 'hello', {\n 'latitude': 32.881011,\n 'longitude': -117.234758,\n 'source': 'test_source'\n })\n self.hass.block_till_done()\n\n self.assertEqual(1, len(self.calls))",
"def test_past_meetings(self):\n pass",
"def test_leaveform_max_sick_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 20, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=0,\n allowed_days=10,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough sick days. Available sick days are 10.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough sick days. Available sick days are 10.00\", form.errors[\"end\"][0]\n )",
"def test_leaveform_start_end(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(1, len(form.errors.keys()))\n self.assertEqual(\"end must be greater than start\", form.errors[\"end\"][0])\n\n # end year and start year must be the same\n\n end = datetime(2018, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n data2 = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form2 = LeaveForm(data=data2)\n self.assertFalse(form2.is_valid())\n self.assertEqual(2, len(form2.errors.keys()))\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"start\"][0]\n )\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"end\"][0]\n )",
"def test_issue_reset_time(self):\n pass"
] | [
"0.644819",
"0.6436864",
"0.63061",
"0.61098903",
"0.60137403",
"0.5964021",
"0.59452546",
"0.5901124",
"0.58988696",
"0.5884434",
"0.5868431",
"0.5864215",
"0.5861571",
"0.5769738",
"0.5756127",
"0.5754291",
"0.573856",
"0.5724665",
"0.5720167",
"0.57131606",
"0.5687971",
"0.56767",
"0.5675918",
"0.56691974",
"0.566758",
"0.56279117",
"0.5622029",
"0.5611456",
"0.5610286",
"0.5605333"
] | 0.671469 | 0 |
Test LeaveForm no overlap. | def test_leaveform_no_overlap(self):
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.REGULAR,
carried_over_days=12,
)
mommy.make(
"small_small_hr.Leave",
leave_type=Leave.REGULAR,
start=start,
end=end,
review_status=Leave.APPROVED,
staff=staffprofile,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.REGULAR,
"start": start,
"end": end,
"review_reason": "Need a break",
}
form = ApplyLeaveForm(data=data)
self.assertFalse(form.is_valid())
self.assertEqual(2, len(form.errors.keys()))
self.assertEqual(
"you cannot have overlapping leave days", form.errors["start"][0]
)
self.assertEqual(
"you cannot have overlapping leave days", form.errors["end"][0]
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_leaveform_process_with_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n # make sure leave obj already exists for said dates\n mommy.make(\n \"small_small_hr.Leave\",\n staff=staffprofile,\n start=start,\n end=end,\n leave_type=Leave.REGULAR,\n review_status=Leave.APPROVED,\n )\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.REJECTED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.REJECTED, leave.review_status)",
"def can_overlap(self):\n return False",
"def doesNotOverlap( self, other):\n return not self.overlaps( other)",
"def can_overlap(self):\n return self.is_open",
"def can_overlap(self):\n return self.is_open",
"def test_overtime_form_apply_no_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 hours of overtime\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 5, 6, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.OverTime\",\n start=start.time(),\n end=end.time(),\n review_status=OverTime.APPROVED,\n date=start.date,\n staff=staffprofile,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"date\": start.date(),\n \"start\": start.time(),\n \"end\": end.time(),\n \"review_reason\": \"Extra work\",\n }\n\n form = ApplyOverTimeForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(3, len(form.errors.keys()))\n self.assertEqual(\n \"you cannot have overlapping overtime hours on the same day\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"you cannot have overlapping overtime hours on the same day\",\n form.errors[\"date\"][0],\n )\n self.assertEqual(\n \"you cannot have overlapping overtime hours on the same day\",\n form.errors[\"end\"][0],\n )",
"def test_leaveform_start_end(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(1, len(form.errors.keys()))\n self.assertEqual(\"end must be greater than start\", form.errors[\"end\"][0])\n\n # end year and start year must be the same\n\n end = datetime(2018, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n data2 = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form2 = LeaveForm(data=data2)\n self.assertFalse(form2.is_valid())\n self.assertEqual(2, len(form2.errors.keys()))\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"start\"][0]\n )\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"end\"][0]\n )",
"def test_leave_oversubscribe_off(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def overlaps(self, other): # -> bool:\n ...",
"def is_overlapping(self, region):\n if self.x2 < region.x1:\n return False # this box is left the other\n if self.x1 > region.x2:\n return False # this box is right the other\n if self.y2 < region.y1:\n return False # this box is above the other\n if self.y1 > region.y2:\n return False # this box is below the other\n return True",
"def test_leaveform_max_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n allowed_days=21,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def clean(self):\r\n query = MidcourseReverificationWindow.objects.filter(\r\n course_id=self.course_id,\r\n end_date__gte=self.start_date,\r\n start_date__lte=self.end_date\r\n )\r\n if query.count() > 0:\r\n raise ValidationError('Reverification windows cannot overlap for a given course.')",
"def __is_position_overlapped(self, position, exon):\n start, end = self.__get_exon_coordinates(exon)\n return position >= start and position <= end",
"def _validate_leave_request(self):\n\t\tfor holiday in self.filtered (lambda request: request.type == 'remove' and request.holiday_type == 'employee'):\n\t\t\tmeeting_values = holiday._prepare_holidays_meeting_values ()\n\t\t\tmeeting = self.env['calendar.event'].with_context (no_mail_to_attendees=True).create (meeting_values)\n\t\t\tholiday.write ({'meeting_id': meeting.id})\n\t\t\tholiday._create_resource_leave ()",
"def test_leaveform_admin(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.APPROVED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)",
"def test_leaveform_apply(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)",
"def _validate_no_overlap(params, error_callback):\n dhcp_set = netaddr.IPSet(netaddr.IPRange(params['dhcp_start'],\n params['dhcp_end']))\n inspection_set = netaddr.IPSet(netaddr.IPRange(params['inspection_start'],\n params['inspection_end']))\n # If there is any intersection of the two sets then we have a problem\n if dhcp_set & inspection_set:\n message = ('Inspection DHCP range \"%s-%s\" overlaps provisioning '\n 'DHCP range \"%s-%s\".' %\n (params['inspection_start'], params['inspection_end'],\n params['dhcp_start'], params['dhcp_end']))\n error_callback(message)",
"def is_overlapping(box1, box2):\n if box1[2] <= box2[0]: # If box1 is to the left of box2\n return False\n elif box1[0] >= box2[2]: # If box1 is to the right of box2\n return False\n elif box1[3] <= box2[1]: # If box1 is below box2\n return False\n elif box1[1] >= box2[3]: # If box1 is above box2\n return False\n else:\n return True",
"def overlap(component1, component2):\n if component1[0].start <= component2[0].stop and component2[0].start <= component1[0].stop:\n if component1[1].start <= component2[1].stop and component2[1].start <= component1[1].stop:\n return True\n return False",
"def validate_collision(self):\n pass",
"def _bbox_overlap(self, other):\n reg0 = self.bbox\n reg1 = other.bbox\n return (reg0[0] <= reg1[2] and reg1[0] <= reg0[2] and\n reg0[1] <= reg1[3] and reg1[1] <= reg0[3])",
"def if_overlap(self, x, y) -> bool:\n if self.pos[y][x] != '-':\n print('此坐标已有棋子,请仔细观察棋盘')\n return True\n return False",
"def out_of_bounds(self):\n return self.rect.right <= 0",
"def test_merge_overlap(self):\n self.open_url('/group/list')\n \n el = self.wd.find_element(By.LINK_TEXT, \"First Group\")\n el.click()\n time.sleep(0.5)\n \n self.assert_num_rows(6)\n \n self.open_url('/group/list')\n self.wd.find_element(By.ID, \"subnav-merge\").click()\n time.sleep(0.5) # FIXME: Need to figure out how to wait on page loads; this is supposed to happen automatically ...\n self.assertEquals('Merge Group', self.wd.title)\n \n sel = Select(self.wd.find_element(By.ID, \"from_group_id\"))\n sel.select_by_visible_text(\"6th group\")\n \n sel = Select(self.wd.find_element(By.ID, \"to_group_id\"))\n sel.select_by_visible_text(\"First Group\")\n \n self.submit_form(\"merge_form\")\n \n self.open_url('/group/list')\n self.assert_not_in_list_table(\"6th group\")\n \n el = self.wd.find_element(By.LINK_TEXT, \"First Group\")\n el.click()\n \n self.assert_num_rows(6)",
"def all_leave(self):\n return self.num_leaves == self.num_workers",
"def overlap(a, b):\n return not(a[2]<=b[0] or a[3]<=b[1] or a[0]>=b[2] or a[1]>=b[3])",
"def out_of_bounds(self):\n return self.rect.right <= 0 or self.rect.left >= self.screen_rect.width",
"def overlap(start1, end1, start2, end2):\n return not (end1 < start2 or end2 < start1)",
"def boundary_checker(stage, player_new):\n # Go through each possible direction a player can travel\n if player_new[0] == 0:\n valid = False\n color.write(\"You can't leave the map!\\n\",\"ERROR\")\n elif player_new[1] == 0:\n valid = False\n color.write(\"You can't leave the map!\\n\",\"ERROR\")\n elif player_new[0] > stage[0]:\n valid = False\n color.write(\"You can't leave the map!\\n\",\"ERROR\")\n elif player_new[1] > stage[1]:\n valid = False\n color.write(\"You can't leave the map!\\n\",\"ERROR\")\n # Flag validity if player still within bounds of map\n else:\n valid = True\n\n return valid",
"def test_if_not_fires_for_leave_on_zone_enter(self):\n self.hass.states.set('geo_location.entity', 'hello', {\n 'latitude': 32.881011,\n 'longitude': -117.234758,\n 'source': 'test_source'\n })\n self.hass.block_till_done()\n\n assert setup_component(self.hass, automation.DOMAIN, {\n automation.DOMAIN: {\n 'trigger': {\n 'platform': 'geo_location',\n 'source': 'test_source',\n 'zone': 'zone.test',\n 'event': 'leave',\n },\n 'action': {\n 'service': 'test.automation',\n }\n }\n })\n\n self.hass.states.set('geo_location.entity', 'hello', {\n 'latitude': 32.880586,\n 'longitude': -117.237564\n })\n self.hass.block_till_done()\n\n self.assertEqual(0, len(self.calls))"
] | [
"0.67735326",
"0.63922834",
"0.59517926",
"0.5882709",
"0.5882709",
"0.58206546",
"0.5817428",
"0.5760936",
"0.57062036",
"0.56520534",
"0.5598502",
"0.554994",
"0.5541704",
"0.5528245",
"0.552553",
"0.5517636",
"0.55156803",
"0.5504129",
"0.54915005",
"0.54858375",
"0.5478036",
"0.54713553",
"0.5452617",
"0.5442781",
"0.54059476",
"0.5405888",
"0.5400143",
"0.53996474",
"0.5367586",
"0.5350447"
] | 0.7283469 | 0 |
Test LeaveForm process works even if leave object exists. | def test_leaveform_process_with_overlap(self):
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
# make sure leave obj already exists for said dates
mommy.make(
"small_small_hr.Leave",
staff=staffprofile,
start=start,
end=end,
leave_type=Leave.REGULAR,
review_status=Leave.APPROVED,
)
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.REGULAR,
carried_over_days=4,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.REGULAR,
"start": start,
"end": end,
"review_reason": "Need a break",
"review_status": Leave.REJECTED,
}
form = LeaveForm(data=data)
self.assertTrue(form.is_valid())
leave = form.save()
self.assertEqual(staffprofile, leave.staff)
self.assertEqual(Leave.REGULAR, leave.leave_type)
self.assertEqual(start, leave.start)
self.assertEqual(end, leave.end)
self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)
self.assertEqual("Need a break", leave.review_reason)
self.assertEqual(Leave.REJECTED, leave.review_status) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_leaveform_admin(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.APPROVED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)",
"def test_leaveform_no_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n mommy.make(\n \"small_small_hr.Leave\",\n leave_type=Leave.REGULAR,\n start=start,\n end=end,\n review_status=Leave.APPROVED,\n staff=staffprofile,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"start\"][0]\n )\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"end\"][0]\n )",
"def test_leaveform_apply(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)",
"def test_sickleave_process(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.REJECTED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.SICK, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.REJECTED, leave.review_status)",
"def test_leave_oversubscribe_off(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def test_leaveform_max_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n allowed_days=21,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def test_leaveform_start_end(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(1, len(form.errors.keys()))\n self.assertEqual(\"end must be greater than start\", form.errors[\"end\"][0])\n\n # end year and start year must be the same\n\n end = datetime(2018, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n data2 = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form2 = LeaveForm(data=data2)\n self.assertFalse(form2.is_valid())\n self.assertEqual(2, len(form2.errors.keys()))\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"start\"][0]\n )\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"end\"][0]\n )",
"def test_one_day_leave(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 1 day of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=0).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)\n self.assertEqual(\n 1,\n get_taken_leave_days(\n staffprofile, Leave.PENDING, Leave.REGULAR, 2017, 2017\n ),\n )",
"def test_leave_oversubscribe(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n\n # make it approved\n obj_type = ContentType.objects.get_for_model(leave)\n review = ModelReview.objects.get(content_type=obj_type, object_id=leave.id)\n review.review_status = ModelReview.APPROVED\n review.save()\n leave.refresh_from_db()\n\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=39).days, (leave.end - leave.start).days)\n self.assertEqual(\"Mini retirement\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)\n self.assertEqual(\n 40,\n get_taken_leave_days(\n staffprofile, Leave.APPROVED, Leave.REGULAR, 2017, 2017\n ),\n )\n self.assertEqual(-19, staffprofile.get_available_leave_days(year=2017))",
"def test_leaveform_max_sick_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 20, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=0,\n allowed_days=10,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough sick days. Available sick days are 10.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough sick days. Available sick days are 10.00\", form.errors[\"end\"][0]\n )",
"def test_sickleave_apply(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.SICK, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)",
"def test_annual_leave_form_decimals(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n data = {\n \"staff\": staffprofile.id,\n \"year\": 2018,\n \"leave_type\": Leave.REGULAR,\n \"allowed_days\": 16.5,\n \"carried_over_days\": 8.5,\n }\n\n form = AnnualLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n annual_leave = form.save()\n self.assertEqual(staffprofile, annual_leave.staff)\n self.assertEqual(2018, annual_leave.year)\n self.assertEqual(16.5, annual_leave.allowed_days)\n self.assertEqual(8.5, annual_leave.carried_over_days)\n self.assertEqual(Leave.REGULAR, annual_leave.leave_type)\n\n data2 = {\n \"staff\": staffprofile.id,\n \"year\": 2017,\n \"leave_type\": Leave.REGULAR,\n \"allowed_days\": 21,\n \"carried_over_days\": 5,\n }\n\n form = AnnualLeaveForm(data=data2, instance=annual_leave)\n self.assertTrue(form.is_valid())\n form.save()\n annual_leave.refresh_from_db()\n self.assertEqual(staffprofile, annual_leave.staff)\n self.assertEqual(2017, annual_leave.year)\n self.assertEqual(21, annual_leave.allowed_days)\n self.assertEqual(5, annual_leave.carried_over_days)\n self.assertEqual(Leave.REGULAR, annual_leave.leave_type)",
"def leave(self):\n p = GameOverPopup(self)\n p.open()",
"def leave(self):\n self.pleaseQuit=1",
"def test_not_logged_user_cannot_leave(self):\n\n utils.test_not_logged_cannot_access(self, self.url)",
"def leaveEvent(self, event):\n self.destroy()",
"def leave_group(self):\n\t\tself.sendMessage(ID_CTRL + \"LEAVE\", True)\n\t\tself.joinstate = 0\n\t\tself.createstate = 0\n\t\tself.__key = None",
"def leave(self, fsm):\n pass",
"def test_leave(self):\n client = self.mock_client(\n [\n defer.succeed(Mock(error_code=0)),\n ]\n )\n coord = self.make_coordinator(client)\n coord.coordinator_broker = Mock()\n coord.member_id = \"m1\"\n coord.generation_id = \"g1\"\n de = coord.send_leave_group_request()\n self.successResultOf(de)\n self.assertEqual(coord.member_id, \"\")\n self.assertIsNone(coord.generation_id)",
"def ev_windowleave(self, event: WindowEvent) -> None:",
"def ev_windowleave(self, event: tcod.event.WindowEvent) -> T | None:",
"def _validate_leave_request(self):\n\t\tfor holiday in self.filtered (lambda request: request.type == 'remove' and request.holiday_type == 'employee'):\n\t\t\tmeeting_values = holiday._prepare_holidays_meeting_values ()\n\t\t\tmeeting = self.env['calendar.event'].with_context (no_mail_to_attendees=True).create (meeting_values)\n\t\t\tholiday.write ({'meeting_id': meeting.id})\n\t\t\tholiday._create_resource_leave ()",
"def _check_leave_request(self, cr, uid, request, token, context=None):\n holidays_obj = request.registry['hr.holidays']\n holidays_ids = holidays_obj.search(cr, uid, [\n ('token', '=', token)\n ])\n\n if len(holidays_ids) == 0:\n return request.website.render(\n \"tk_hr_approve_request.leave_request_not_found\"\n )\n\n _id = holidays_ids[0] if len(holidays_ids) else None\n if _id:\n leave_request = holidays_obj.browse(\n cr, uid, _id, context=context\n )\n return leave_request",
"def test_if_not_fires_for_leave_on_zone_enter(self):\n self.hass.states.set('geo_location.entity', 'hello', {\n 'latitude': 32.881011,\n 'longitude': -117.234758,\n 'source': 'test_source'\n })\n self.hass.block_till_done()\n\n assert setup_component(self.hass, automation.DOMAIN, {\n automation.DOMAIN: {\n 'trigger': {\n 'platform': 'geo_location',\n 'source': 'test_source',\n 'zone': 'zone.test',\n 'event': 'leave',\n },\n 'action': {\n 'service': 'test.automation',\n }\n }\n })\n\n self.hass.states.set('geo_location.entity', 'hello', {\n 'latitude': 32.880586,\n 'longitude': -117.237564\n })\n self.hass.block_till_done()\n\n self.assertEqual(0, len(self.calls))",
"def test_if_fires_on_zone_leave(self):\n self.hass.states.set('geo_location.entity', 'hello', {\n 'latitude': 32.880586,\n 'longitude': -117.237564,\n 'source': 'test_source'\n })\n self.hass.block_till_done()\n\n assert setup_component(self.hass, automation.DOMAIN, {\n automation.DOMAIN: {\n 'trigger': {\n 'platform': 'geo_location',\n 'source': 'test_source',\n 'zone': 'zone.test',\n 'event': 'leave',\n },\n 'action': {\n 'service': 'test.automation',\n }\n }\n })\n\n self.hass.states.set('geo_location.entity', 'hello', {\n 'latitude': 32.881011,\n 'longitude': -117.234758,\n 'source': 'test_source'\n })\n self.hass.block_till_done()\n\n self.assertEqual(1, len(self.calls))",
"async def leave(ctx, *, check=\"\"):\r\n # if botv.isAdmin(ctx.message.author) and check == \"now, bot\":\r\n # if necessary, save checks can go here; check presently commented out because botv can\r\n # fail to initialize in testing\r\n await bot.say(\"Allan, please add dialogue!\")\r\n quit()",
"def test_format_leave_output(self):\n # Setup params and mock result for car successfully leave parking lot\n leave_cmd = \"leave\"\n slot_id = 1\n\n # Mock success resp and verify\n success, output = self.controller.format_leave_output(slot_id)\n self.assertTrue(success)\n self.assertEqual(output, \"Slot number {} is free\".format(slot_id))",
"def test_logged_user_in_group_can_leave(self):\n\n logged_user = utils.create_user_and_authenticate(self)\n self.group.users.add(logged_user)\n expected_url = reverse('my_groups_view')\n\n utils.test_can_access(self, self.url,\n post_redirect_url=expected_url)\n\n self.assertNotIn(logged_user, self.group.users.all())\n self.assertNotIn(self.group, logged_user.joined_groups.all())",
"def frameLeave(self):\n try:\n self.contentFrame.currFrame.leave()\n except AttributeError:\n pass",
"def test_focus_not_on_disabled(self):\n target = 'disable_field'\n field = self.form.fields.get(target, None)\n result_name = self.form.assign_focus_field(target)\n focused = self.find_focus_field()\n\n self.assertTrue(field.disabled)\n self.assertIn(target, self.form.fields)\n self.assertEqual(1, len(focused))\n self.assertNotEqual(target, focused[0])\n self.assertNotEqual(target, result_name)"
] | [
"0.7076453",
"0.7010631",
"0.6957248",
"0.6599136",
"0.6582377",
"0.6566351",
"0.65413404",
"0.6510575",
"0.62859243",
"0.6242199",
"0.61983263",
"0.59142435",
"0.5827286",
"0.57848144",
"0.5703488",
"0.5654972",
"0.5579377",
"0.5565576",
"0.5562733",
"0.5536838",
"0.5487293",
"0.54811144",
"0.5447539",
"0.5405641",
"0.5399108",
"0.5375197",
"0.53188646",
"0.5289898",
"0.52829355",
"0.52784705"
] | 0.7320913 | 0 |
Test LeaveForm apply for sick leave. | def test_sickleave_apply(self):
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.SICK,
carried_over_days=4,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.SICK,
"start": start,
"end": end,
"review_reason": "Need a break",
}
form = ApplyLeaveForm(data=data)
self.assertTrue(form.is_valid())
leave = form.save()
self.assertEqual(staffprofile, leave.staff)
self.assertEqual(Leave.SICK, leave.leave_type)
self.assertEqual(start, leave.start)
self.assertEqual(end, leave.end)
self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)
self.assertEqual("Need a break", leave.review_reason)
self.assertEqual(Leave.PENDING, leave.review_status) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_leaveform_apply(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)",
"def test_sickleave_process(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.REJECTED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.SICK, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.REJECTED, leave.review_status)",
"def test_leaveform_process_with_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n # make sure leave obj already exists for said dates\n mommy.make(\n \"small_small_hr.Leave\",\n staff=staffprofile,\n start=start,\n end=end,\n leave_type=Leave.REGULAR,\n review_status=Leave.APPROVED,\n )\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.REJECTED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.REJECTED, leave.review_status)",
"def test_leaveform_no_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n mommy.make(\n \"small_small_hr.Leave\",\n leave_type=Leave.REGULAR,\n start=start,\n end=end,\n review_status=Leave.APPROVED,\n staff=staffprofile,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"start\"][0]\n )\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"end\"][0]\n )",
"def test_leaveform_max_sick_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 20, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=0,\n allowed_days=10,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough sick days. Available sick days are 10.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough sick days. Available sick days are 10.00\", form.errors[\"end\"][0]\n )",
"def test_leaveform_admin(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.APPROVED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)",
"def test_leave_oversubscribe_off(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def test_one_day_leave(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 1 day of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=0).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)\n self.assertEqual(\n 1,\n get_taken_leave_days(\n staffprofile, Leave.PENDING, Leave.REGULAR, 2017, 2017\n ),\n )",
"def test_leaveform_max_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n allowed_days=21,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def test_leaveform_start_end(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(1, len(form.errors.keys()))\n self.assertEqual(\"end must be greater than start\", form.errors[\"end\"][0])\n\n # end year and start year must be the same\n\n end = datetime(2018, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n data2 = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form2 = LeaveForm(data=data2)\n self.assertFalse(form2.is_valid())\n self.assertEqual(2, len(form2.errors.keys()))\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"start\"][0]\n )\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"end\"][0]\n )",
"def test_leave_oversubscribe(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n\n # make it approved\n obj_type = ContentType.objects.get_for_model(leave)\n review = ModelReview.objects.get(content_type=obj_type, object_id=leave.id)\n review.review_status = ModelReview.APPROVED\n review.save()\n leave.refresh_from_db()\n\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=39).days, (leave.end - leave.start).days)\n self.assertEqual(\"Mini retirement\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)\n self.assertEqual(\n 40,\n get_taken_leave_days(\n staffprofile, Leave.APPROVED, Leave.REGULAR, 2017, 2017\n ),\n )\n self.assertEqual(-19, staffprofile.get_available_leave_days(year=2017))",
"def test_annual_leave_form_decimals(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n data = {\n \"staff\": staffprofile.id,\n \"year\": 2018,\n \"leave_type\": Leave.REGULAR,\n \"allowed_days\": 16.5,\n \"carried_over_days\": 8.5,\n }\n\n form = AnnualLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n annual_leave = form.save()\n self.assertEqual(staffprofile, annual_leave.staff)\n self.assertEqual(2018, annual_leave.year)\n self.assertEqual(16.5, annual_leave.allowed_days)\n self.assertEqual(8.5, annual_leave.carried_over_days)\n self.assertEqual(Leave.REGULAR, annual_leave.leave_type)\n\n data2 = {\n \"staff\": staffprofile.id,\n \"year\": 2017,\n \"leave_type\": Leave.REGULAR,\n \"allowed_days\": 21,\n \"carried_over_days\": 5,\n }\n\n form = AnnualLeaveForm(data=data2, instance=annual_leave)\n self.assertTrue(form.is_valid())\n form.save()\n annual_leave.refresh_from_db()\n self.assertEqual(staffprofile, annual_leave.staff)\n self.assertEqual(2017, annual_leave.year)\n self.assertEqual(21, annual_leave.allowed_days)\n self.assertEqual(5, annual_leave.carried_over_days)\n self.assertEqual(Leave.REGULAR, annual_leave.leave_type)",
"def leave(self):\n p = GameOverPopup(self)\n p.open()",
"async def leave(ctx, *, check=\"\"):\r\n # if botv.isAdmin(ctx.message.author) and check == \"now, bot\":\r\n # if necessary, save checks can go here; check presently commented out because botv can\r\n # fail to initialize in testing\r\n await bot.say(\"Allan, please add dialogue!\")\r\n quit()",
"def leave(self):\n self.pleaseQuit=1",
"def leave(self, fsm):\n pass",
"def _validate_leave_request(self):\n\t\tfor holiday in self.filtered (lambda request: request.type == 'remove' and request.holiday_type == 'employee'):\n\t\t\tmeeting_values = holiday._prepare_holidays_meeting_values ()\n\t\t\tmeeting = self.env['calendar.event'].with_context (no_mail_to_attendees=True).create (meeting_values)\n\t\t\tholiday.write ({'meeting_id': meeting.id})\n\t\t\tholiday._create_resource_leave ()",
"def checkCloseDo (self, event, byMenu):\r\n \r\n if (self.dirty):\r\n bits = os.path.splitext(self.saveDestination)\r\n title = '\"' + os.path.basename(bits[0]) + '\"'\r\n if title == '\"\"': title = 'your story' \r\n\r\n message = 'Are you sure you want to close ' + title + ' without saving changes?'\r\n dialog = wx.MessageDialog(self, message, 'Unsaved Changes', \\\r\n wx.ICON_WARNING | wx.YES_NO | wx.NO_DEFAULT)\r\n if (dialog.ShowModal() == wx.ID_NO):\r\n event.Veto()\r\n return\r\n else:\r\n self.dirty = False\r\n \r\n # ask all our widgets to close any editor windows\r\n \r\n for w in list(self.storyPanel.widgets):\r\n if isinstance(w, PassageWidget):\r\n w.closeEditor()\r\n\r\n self.app.removeStory(self, byMenu)\r\n if event != None:\r\n event.Skip()\r\n self.Destroy()",
"def leave_group(self):\n\t\tself.sendMessage(ID_CTRL + \"LEAVE\", True)\n\t\tself.joinstate = 0\n\t\tself.createstate = 0\n\t\tself.__key = None",
"def onchange_leave_date(self):\n warning = {}\n if self.date_of_leave and self.date_of_leave < self.date_of_join:\n warning.update({\n 'title': _('Information'),\n 'message': _(\"Leaving Date Must Be Greater Than Joining Date.\")})\n self.date_of_leave = False\n return {'warning': warning}",
"def test_not_logged_user_cannot_leave(self):\n\n utils.test_not_logged_cannot_access(self, self.url)",
"def ev_windowleave(self, event: WindowEvent) -> None:",
"def leave(event):\n if tooltip.event is not None:\n widget.after_cancel(tooltip.event)\n tooltip.event = None\n tooltip.hidetip()",
"def isLeaveLeft(self,leave_type,days):\n if leave_type == 1 :\n return days<=self.earned_balance\n elif leave_type == 2 :\n return days<=self.hp_balance\n elif leave_type == 3 :\n return days*2<=self.hp_balance \n else :\n return False",
"def leaveEvent (self, event):\n if not self.isEnabled():\n event.accept()\n else:\n # background: #EAEAEA;\n self.setStyleSheet( \"\"\"\n QWidget {\n margin-left: 10px;\n padding-top: 6px;\n } \"\"\")\n event.accept()",
"def ev_windowleave(self, event: tcod.event.WindowEvent) -> T | None:",
"def leave_notify_event(self, widget, event):\n self.logger.debug(\"leaving widget...\")\n return self.make_callback('leave')",
"def can_leave_team(uid):\n current_user = get_user(uid=uid)\n current_team = api.team.get_team(current_user[\"tid\"])\n if current_team[\"team_name\"] == current_user[\"username\"]:\n return False\n if current_team[\"creator\"] == uid and current_team[\"size\"] != 1:\n return False\n if len(api.submissions.get_submissions(uid=uid)) > 0:\n return False\n return True",
"def _check_leave_request(self, cr, uid, request, token, context=None):\n holidays_obj = request.registry['hr.holidays']\n holidays_ids = holidays_obj.search(cr, uid, [\n ('token', '=', token)\n ])\n\n if len(holidays_ids) == 0:\n return request.website.render(\n \"tk_hr_approve_request.leave_request_not_found\"\n )\n\n _id = holidays_ids[0] if len(holidays_ids) else None\n if _id:\n leave_request = holidays_obj.browse(\n cr, uid, _id, context=context\n )\n return leave_request",
"def test_logged_user_in_group_can_leave(self):\n\n logged_user = utils.create_user_and_authenticate(self)\n self.group.users.add(logged_user)\n expected_url = reverse('my_groups_view')\n\n utils.test_can_access(self, self.url,\n post_redirect_url=expected_url)\n\n self.assertNotIn(logged_user, self.group.users.all())\n self.assertNotIn(self.group, logged_user.joined_groups.all())"
] | [
"0.71577907",
"0.6866498",
"0.68400884",
"0.6765718",
"0.660228",
"0.65838856",
"0.65016943",
"0.6318992",
"0.62988096",
"0.6177277",
"0.60716784",
"0.57531554",
"0.56196386",
"0.5402689",
"0.5401456",
"0.5372556",
"0.53161985",
"0.5214679",
"0.51996654",
"0.51943445",
"0.514657",
"0.50625813",
"0.5048542",
"0.50440204",
"0.50154537",
"0.50099415",
"0.49982277",
"0.49793795",
"0.49774083",
"0.49732193"
] | 0.7177349 | 0 |
Test LeaveForm process sick leave. | def test_sickleave_process(self):
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.SICK,
carried_over_days=4,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.SICK,
"start": start,
"end": end,
"review_reason": "Need a break",
"review_status": Leave.REJECTED,
}
form = LeaveForm(data=data)
self.assertTrue(form.is_valid())
leave = form.save()
self.assertEqual(staffprofile, leave.staff)
self.assertEqual(Leave.SICK, leave.leave_type)
self.assertEqual(start, leave.start)
self.assertEqual(end, leave.end)
self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)
self.assertEqual("Need a break", leave.review_reason)
self.assertEqual(Leave.REJECTED, leave.review_status) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def leave(self):\n self.pleaseQuit=1",
"def test_leaveform_process_with_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n # make sure leave obj already exists for said dates\n mommy.make(\n \"small_small_hr.Leave\",\n staff=staffprofile,\n start=start,\n end=end,\n leave_type=Leave.REGULAR,\n review_status=Leave.APPROVED,\n )\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.REJECTED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.REJECTED, leave.review_status)",
"def leave(self, fsm):\n pass",
"def leave(self):\n p = GameOverPopup(self)\n p.open()",
"def test_leaveform_admin(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.APPROVED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)",
"def leaveEvent(self, event):\n self.destroy()",
"def test_leaveform_apply(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)",
"def test_leaveform_max_sick_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 20, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=0,\n allowed_days=10,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough sick days. Available sick days are 10.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough sick days. Available sick days are 10.00\", form.errors[\"end\"][0]\n )",
"def test_leaveform_no_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n mommy.make(\n \"small_small_hr.Leave\",\n leave_type=Leave.REGULAR,\n start=start,\n end=end,\n review_status=Leave.APPROVED,\n staff=staffprofile,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"start\"][0]\n )\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"end\"][0]\n )",
"def leave_group(self):\n\t\tself.sendMessage(ID_CTRL + \"LEAVE\", True)\n\t\tself.joinstate = 0\n\t\tself.createstate = 0\n\t\tself.__key = None",
"def ev_windowleave(self, event: WindowEvent) -> None:",
"def test_leaveform_max_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n allowed_days=21,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def test_sickleave_apply(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.SICK, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)",
"def test_leave_oversubscribe_off(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def ev_windowleave(self, event: tcod.event.WindowEvent) -> T | None:",
"def test_one_day_leave(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 1 day of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=0).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)\n self.assertEqual(\n 1,\n get_taken_leave_days(\n staffprofile, Leave.PENDING, Leave.REGULAR, 2017, 2017\n ),\n )",
"def test_leaveform_start_end(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(1, len(form.errors.keys()))\n self.assertEqual(\"end must be greater than start\", form.errors[\"end\"][0])\n\n # end year and start year must be the same\n\n end = datetime(2018, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n data2 = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form2 = LeaveForm(data=data2)\n self.assertFalse(form2.is_valid())\n self.assertEqual(2, len(form2.errors.keys()))\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"start\"][0]\n )\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"end\"][0]\n )",
"def leaveEvent(self, event):\n if self.responsive:\n self.in_focus = False\n self.set_background(self.default_background)\n event.accept()",
"def frameLeave(self):\n try:\n self.contentFrame.currFrame.leave()\n except AttributeError:\n pass",
"def leave_notify_event(self, widget, event):\n self.logger.debug(\"leaving widget...\")\n return self.make_callback('leave')",
"def leave_win_game(self):\n self.end = True\n self.canevas.config(bg='black')\n self.canevas.itemconfig(self.ball.ball, fill='black')\n self.canevas.itemconfig(self.paddle.paddle, fill='black')\n self.canevas.update()\n time.sleep(2)\n self.canevas.config(bg='light blue')\n self.canevas.itemconfig(self.ball.ball, fill='red')\n self.canevas.itemconfig(self.paddle.paddle, fill='grey')\n self.brick.next_level()",
"def leave(event):\n if tooltip.event is not None:\n widget.after_cancel(tooltip.event)\n tooltip.event = None\n tooltip.hidetip()",
"def on_mouse_leave(self, event):\n global controller\n if self == controller:\n self.set_help_text(None)\n if self.task:\n self.task.stop()\n self.task = None\n controller = None",
"def _leave(self, *args):\n if not self.game:\n raise ServerException('not playing a game')\n self.game.leave(self)\n self.game = self.player = None",
"async def leave(ctx, *, check=\"\"):\r\n # if botv.isAdmin(ctx.message.author) and check == \"now, bot\":\r\n # if necessary, save checks can go here; check presently commented out because botv can\r\n # fail to initialize in testing\r\n await bot.say(\"Allan, please add dialogue!\")\r\n quit()",
"def on_leave(self, event):\n self.pre_check(event)\n self.remove_player(event.guild.id)",
"def leave(self):\n print('%r: leaving', self)\n self.telepathy_text_chan.Close()",
"def test_not_logged_user_cannot_leave(self):\n\n utils.test_not_logged_cannot_access(self, self.url)",
"def leave_page(self):\n self.master.destroy()",
"def test_leave_oversubscribe(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n\n # make it approved\n obj_type = ContentType.objects.get_for_model(leave)\n review = ModelReview.objects.get(content_type=obj_type, object_id=leave.id)\n review.review_status = ModelReview.APPROVED\n review.save()\n leave.refresh_from_db()\n\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=39).days, (leave.end - leave.start).days)\n self.assertEqual(\"Mini retirement\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)\n self.assertEqual(\n 40,\n get_taken_leave_days(\n staffprofile, Leave.APPROVED, Leave.REGULAR, 2017, 2017\n ),\n )\n self.assertEqual(-19, staffprofile.get_available_leave_days(year=2017))"
] | [
"0.67501426",
"0.6595614",
"0.653262",
"0.65064394",
"0.64996254",
"0.64383876",
"0.640696",
"0.6336153",
"0.6308009",
"0.6307785",
"0.62981766",
"0.6237008",
"0.62151253",
"0.61743563",
"0.60905206",
"0.60466564",
"0.6023712",
"0.5977555",
"0.5957751",
"0.59405535",
"0.5850377",
"0.58178145",
"0.58175945",
"0.5817201",
"0.5756601",
"0.5737176",
"0.5733691",
"0.5713244",
"0.5701572",
"0.5679359"
] | 0.666937 | 1 |
Test leave days sufficient. | def test_leaveform_max_days(self):
user = mommy.make("auth.User", first_name="Bob", last_name="Ndoe")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
staffprofile.leave_days = 21
staffprofile.sick_days = 10
staffprofile.save()
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
# 6 days of leave
start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
end = datetime(2017, 7, 10, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))
mommy.make(
"small_small_hr.AnnualLeave",
staff=staffprofile,
year=2017,
leave_type=Leave.REGULAR,
allowed_days=21,
)
data = {
"staff": staffprofile.id,
"leave_type": Leave.REGULAR,
"start": start,
"end": end,
"review_reason": "Need a break",
}
form = LeaveForm(data=data)
self.assertFalse(form.is_valid())
self.assertEqual(2, len(form.errors.keys()))
self.assertEqual(
"Not enough leave days. Available leave days are 21.00",
form.errors["start"][0],
)
self.assertEqual(
"Not enough leave days. Available leave days are 21.00",
form.errors["end"][0],
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def Daysleftverification():\n pass",
"def isLeaveLeft(self,leave_type,days):\n if leave_type == 1 :\n return days<=self.earned_balance\n elif leave_type == 2 :\n return days<=self.hp_balance\n elif leave_type == 3 :\n return days*2<=self.hp_balance \n else :\n return False",
"def test_holidays_validate(self):\n self.request.sudo(self.user_1.id).holidays_validate()\n\n accrual = self.employee.get_leave_accrual(self.leave_type.id)\n self.assertEqual(accrual.total_hours, 22.5)",
"def test_one_day_leave(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 1 day of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=0).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)\n self.assertEqual(\n 1,\n get_taken_leave_days(\n staffprofile, Leave.PENDING, Leave.REGULAR, 2017, 2017\n ),\n )",
"def test_get_remaining_event_dates(self):\n date = EventDate.objects.create(\n event=self.event_show2,\n date=(timezone.now() - timedelta(days=10))\n )\n dates = list(get_remaining_event_dates(self.event_show2))\n self.assertFalse(date in dates)\n self.assertTrue(self.future_date in dates)\n self.assertFalse(self.past_date in dates)",
"def test_leave_oversubscribe_off(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough leave days. Available leave days are 21.00\",\n form.errors[\"end\"][0],\n )",
"def test_leaveform_max_sick_days(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 20, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=0,\n allowed_days=10,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"Not enough sick days. Available sick days are 10.00\",\n form.errors[\"start\"][0],\n )\n self.assertEqual(\n \"Not enough sick days. Available sick days are 10.00\", form.errors[\"end\"][0]\n )",
"def test_leaveform_process_with_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n # make sure leave obj already exists for said dates\n mommy.make(\n \"small_small_hr.Leave\",\n staff=staffprofile,\n start=start,\n end=end,\n leave_type=Leave.REGULAR,\n review_status=Leave.APPROVED,\n )\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.REJECTED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.REJECTED, leave.review_status)",
"def onchange_leave_date(self):\n warning = {}\n if self.date_of_leave and self.date_of_leave < self.date_of_join:\n warning.update({\n 'title': _('Information'),\n 'message': _(\"Leaving Date Must Be Greater Than Joining Date.\")})\n self.date_of_leave = False\n return {'warning': warning}",
"def test_holidays():\n\n assert not datetime.datetime(2003, 12, 25) in TRADING_DATES\n assert not datetime.datetime(2003, 5, 26) in TRADING_DATES # memorial day",
"def test_course_run_unexpired(end_days, enroll_days, expected):\n now = now_in_utc()\n end_date = now + timedelta(days=end_days)\n enr_end_date = now + timedelta(days=enroll_days)\n assert (\n CourseRunFactory.create(\n end_date=end_date, enrollment_end=enr_end_date\n ).is_unexpired\n is expected\n )",
"def test_leaveform_no_overlap(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n mommy.make(\n \"small_small_hr.Leave\",\n leave_type=Leave.REGULAR,\n start=start,\n end=end,\n review_status=Leave.APPROVED,\n staff=staffprofile,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(2, len(form.errors.keys()))\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"start\"][0]\n )\n self.assertEqual(\n \"you cannot have overlapping leave days\", form.errors[\"end\"][0]\n )",
"def enough_days(self, cur, username, start_date, end_date):\n cur.execute('SELECT days_free FROM users WHERE username = ?', (username,))\n days_free = cur.fetchone()[0]\n days_between = abs(self.days_difference(start_date, end_date))\n return days_free >= days_between",
"def test_date_interval(self, init_date, end_date):\n self.calc_earning(self.security[(self.security['Date'] > init_date) &\n (self.security['Date'] < end_date)])",
"def test_leave_oversubscribe(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 40 days of leave\n start = datetime(2017, 6, 1, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 7, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=0,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Mini retirement\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n\n # make it approved\n obj_type = ContentType.objects.get_for_model(leave)\n review = ModelReview.objects.get(content_type=obj_type, object_id=leave.id)\n review.review_status = ModelReview.APPROVED\n review.save()\n leave.refresh_from_db()\n\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=39).days, (leave.end - leave.start).days)\n self.assertEqual(\"Mini retirement\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)\n self.assertEqual(\n 40,\n get_taken_leave_days(\n staffprofile, Leave.APPROVED, Leave.REGULAR, 2017, 2017\n ),\n )\n self.assertEqual(-19, staffprofile.get_available_leave_days(year=2017))",
"def test_course_run_not_beyond_enrollment(\n end_days, enroll_start_days, enroll_end_days, expected\n):\n now = now_in_utc()\n end_date = None if end_days is None else now + timedelta(days=end_days)\n enr_end_date = (\n None if enroll_end_days is None else now + timedelta(days=enroll_end_days)\n )\n enr_start_date = (\n None if enroll_start_days is None else now + timedelta(days=enroll_start_days)\n )\n\n assert (\n CourseRunFactory.create(\n end_date=end_date,\n enrollment_end=enr_end_date,\n enrollment_start=enr_start_date,\n ).is_not_beyond_enrollment\n is expected\n )",
"def test_sickleave_process(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.REJECTED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.SICK, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.REJECTED, leave.review_status)",
"def test_non_holidays(self):\n # January 2nd was not public holiday between 2012 and 2017\n self.assertNotIn(date(2013, 1, 2), self.holidays)\n self.assertNotIn(date(2014, 1, 2), self.holidays)\n self.assertNotIn(date(2015, 1, 2), self.holidays)\n self.assertNotIn(date(2016, 1, 2), self.holidays)",
"def test_check_args_weekend(self):\n test_date = dt.datetime(2021, 6, 20, 11, 0, 0)\n with self.assertRaises(ValueError) as context:\n self.duedate.check_args(test_date, self.test_turn_time)\n self.assertTrue(\n \"You can submit requests during weekdays only.\" in str(context.exception))",
"def test_sickleave_apply(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.SICK, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)",
"def _check_dates_tarea_end(self, cr, uid, ids, context=None):\n for leave in self.read(cr, uid, ids, ['date_end_tarea', 'date_end_proyecto'], context=context):\n if leave['date_end_tarea'] and leave['date_end_proyecto']:\n if leave['date_end_tarea'] > leave['date_end_proyecto']:\n return False\n return True",
"def check_leave_request_holiday(self, cr, uid, att, context=None):\n if att:\n # check have overtime yet?\n att_name = datetime.strptime(att.name, DEFAULT_SERVER_DATETIME_FORMAT)\n param_obj = self.pool.get('ir.config_parameter') \n max_early = param_obj.get_param(cr, uid, 'maximum_early_minutes', default=60)\n max_late = param_obj.get_param(cr, uid, 'maximum_late_minutes', default=60)\n try:\n max_early = int (max_early)\n max_late = int (max_late)\n except:\n raise except_osv(_(\"Warning !\"),_(\"maximum_early_minutes or maximum_late_minutes in config parameter is incorrect\"))\n \n time_early = att_name + timedelta(minutes = max_early)\n time_late = att_name - timedelta(minutes = max_late)\n \n overtime_obj = self.pool.get('hr.overtime')\n overtime_confirmed_ids = overtime_obj.search(cr, uid, [('employee_id', '=', att.employee_id.id),\n ('mode', '=', 'by_employee'),\n ('name', '=', att.day_tz),\n ('datetime_start', '<=', time_early.strftime(DEFAULT_SERVER_DATETIME_FORMAT)),\n ('datetime_stop', '>=', time_late.strftime(DEFAULT_SERVER_DATETIME_FORMAT)),\n ('state', 'in', ['confirmed'])\n ])\n if overtime_confirmed_ids:\n return False\n \n public_holiday_obj = self.pool.get('trobz.hr.public.holidays')\n public_holiday_ids = public_holiday_obj.search(cr, uid, [('date', '=', att.day_tz), ('state', '=', 'approved')], context=context)\n if public_holiday_ids:\n return True\n sql = '''\n SELECT line.first_date_type, line.first_date, line.last_date_type, line.last_date\n FROM hr_holidays_line line JOIN hr_holidays h ON line.holiday_id = h.id\n WHERE h.employee_id = %d\n AND line.first_date <= '%s' AND line.last_date >= '%s'\n AND h.state = 'validate'\n '''% (att.employee_id.id, att.day_tz, att.day_tz)\n cr.execute(sql)\n for leave in cr.fetchall():\n if att.action == 'sign_out':\n afternoon = datetime.strptime(att.name_tz, DEFAULT_SERVER_DATETIME_FORMAT).hour >= 13\n else:\n afternoon = datetime.strptime(att.name_tz, DEFAULT_SERVER_DATETIME_FORMAT).hour >= 12\n if att.day_tz == leave[1]:\n if leave[0] == 'afternoon' and afternoon:\n return True\n if leave[0] == 'morning' and not afternoon:\n return True\n if leave[0] == 'full':\n return True\n if att.day_tz == leave[3]:\n if leave[2] == 'afternoon' and afternoon:\n return True\n if leave[2] == 'morning' and not afternoon:\n return True\n if leave[2] == 'full':\n return True\n if datetime.strptime(att.day_tz, '%Y-%m-%d') > datetime.strptime(leave[1], '%Y-%m-%d')\\\n and datetime.strptime(att.day_tz, '%Y-%m-%d') < datetime.strptime(leave[3], '%Y-%m-%d'):\n return True\n return False",
"def _validate_leave_request(self):\n\t\tfor holiday in self.filtered (lambda request: request.type == 'remove' and request.holiday_type == 'employee'):\n\t\t\tmeeting_values = holiday._prepare_holidays_meeting_values ()\n\t\t\tmeeting = self.env['calendar.event'].with_context (no_mail_to_attendees=True).create (meeting_values)\n\t\t\tholiday.write ({'meeting_id': meeting.id})\n\t\t\tholiday._create_resource_leave ()",
"def test_leaveform_apply(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = ApplyLeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.PENDING, leave.review_status)",
"def test_leaveform_admin(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 10, 7, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.REGULAR,\n carried_over_days=12,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.REGULAR,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n \"review_status\": Leave.APPROVED,\n }\n\n form = LeaveForm(data=data)\n self.assertTrue(form.is_valid())\n leave = form.save()\n self.assertEqual(staffprofile, leave.staff)\n self.assertEqual(Leave.REGULAR, leave.leave_type)\n self.assertEqual(start, leave.start)\n self.assertEqual(end, leave.end)\n self.assertEqual(timedelta(days=5).days, (leave.end - leave.start).days)\n self.assertEqual(\"Need a break\", leave.review_reason)\n self.assertEqual(Leave.APPROVED, leave.review_status)",
"def test_is_payday_negative2(self):\n date_to_check = date_class(2018,11,23)\n is_payday = self.pay_cycle.is_payday(date_to_check)\n assert is_payday == False\n\n date_to_check = date_class(2019,1,18)\n is_payday = self.pay_cycle.is_payday(date_to_check)\n assert is_payday == False\n\n date_to_check = date_class(2021,12,17)\n is_payday = self.pay_cycle.is_payday(date_to_check)\n assert is_payday == False",
"def _check_dates_tareas(self, cr, uid, ids, context=None):\n for leave in self.read(cr, uid, ids, ['date_start_tarea', 'date_end_tarea'], context=context):\n if leave['date_start_tarea'] and leave['date_end_tarea']:\n if leave['date_start_tarea'] > leave['date_end_tarea']:\n return False\n return True",
"def test_leaveform_start_end(self):\n user = mommy.make(\"auth.User\", first_name=\"Bob\", last_name=\"Ndoe\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n staffprofile.leave_days = 21\n staffprofile.sick_days = 10\n staffprofile.save()\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n # 6 days of leave\n start = datetime(2017, 6, 5, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n end = datetime(2017, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n mommy.make(\n \"small_small_hr.AnnualLeave\",\n staff=staffprofile,\n year=2017,\n leave_type=Leave.SICK,\n carried_over_days=4,\n )\n\n data = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form = LeaveForm(data=data)\n self.assertFalse(form.is_valid())\n self.assertEqual(1, len(form.errors.keys()))\n self.assertEqual(\"end must be greater than start\", form.errors[\"end\"][0])\n\n # end year and start year must be the same\n\n end = datetime(2018, 6, 1, 0, 0, 0, tzinfo=pytz.timezone(settings.TIME_ZONE))\n\n data2 = {\n \"staff\": staffprofile.id,\n \"leave_type\": Leave.SICK,\n \"start\": start,\n \"end\": end,\n \"review_reason\": \"Need a break\",\n }\n\n form2 = LeaveForm(data=data2)\n self.assertFalse(form2.is_valid())\n self.assertEqual(2, len(form2.errors.keys()))\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"start\"][0]\n )\n self.assertEqual(\n \"start and end must be from the same year\", form2.errors[\"end\"][0]\n )",
"def test_holidays(self):\n # New Year\n self.assertIn(date(2017, 1, 1), self.holidays)\n self.assertIn(date(2017, 1, 2), self.holidays)\n # Prešeren's day\n self.assertIn(date(2017, 2, 8), self.holidays)\n # Easter monday - 2016 and 2017\n self.assertIn(date(2016, 3, 28), self.holidays)\n self.assertIn(date(2017, 4, 17), self.holidays)\n # Day of uprising against occupation\n self.assertIn(date(2017, 4, 27), self.holidays)\n # Labour day\n self.assertIn(date(2017, 5, 1), self.holidays)\n # Labour day\n self.assertIn(date(2017, 5, 2), self.holidays)\n # Statehood day\n self.assertIn(date(2017, 6, 25), self.holidays)\n # Assumption day\n self.assertIn(date(2017, 8, 15), self.holidays)\n # Reformation day\n self.assertIn(date(2017, 10, 31), self.holidays)\n # Remembrance day\n self.assertIn(date(2017, 11, 1), self.holidays)\n # Christmas\n self.assertIn(date(2017, 12, 25), self.holidays)\n # Day of independence and unity\n self.assertIn(date(2017, 12, 26), self.holidays)",
"def test_is_payday_negative1(self):\n date_to_check = date_class(2020,12,25)\n is_payday = self.pay_cycle.is_payday(date_to_check)\n assert is_payday == False\n\n date_to_check = date_class(2021,12,24)\n is_payday = self.pay_cycle.is_payday(date_to_check)\n assert is_payday == False\n\n date_to_check = date_class(2022,11,11)\n is_payday = self.pay_cycle.is_payday(date_to_check)\n assert is_payday == False"
] | [
"0.7283778",
"0.69937253",
"0.69766676",
"0.6856982",
"0.6750773",
"0.66546655",
"0.6651582",
"0.6467872",
"0.6376243",
"0.6373224",
"0.6355564",
"0.635506",
"0.6275747",
"0.6274649",
"0.62272155",
"0.6193317",
"0.6190739",
"0.6174433",
"0.6147189",
"0.6145103",
"0.6110374",
"0.60788035",
"0.6075848",
"0.6072947",
"0.60652715",
"0.605758",
"0.60558826",
"0.60288835",
"0.5973555",
"0.5964119"
] | 0.7107806 | 1 |
Test StaffProfileUserForm image not required on update. | def test_staffprofile_user_form_no_image(self):
user = mommy.make("auth.User")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
path = os.path.join(BASE_DIR, "tests", "fixtures", "profile.png")
with open(path, "r+b") as image_file:
data = {
"first_name": "Bob",
"last_name": "Mbugua",
"id_number": "123456789",
"sex": StaffProfile.MALE,
"nhif": "111111",
"nssf": "222222",
"pin_number": "A0000000Y",
"emergency_contact_name": "Bob Father",
"emergency_contact_relationship": "Father",
"emergency_contact_number": "+254722111111",
"phone": "+254722111111",
"address": "This is the address.",
"birthday": "1996-01-27",
"image": image_file,
}
file_dict = {
"image": SimpleUploadedFile(
name=image_file.name,
content=image_file.read(),
content_type="image/png",
)
}
form = StaffProfileUserForm(
data=data, instance=staffprofile, request=request, files=file_dict
)
self.assertTrue(form.is_valid())
form.save()
staffprofile.refresh_from_db()
data2 = {
"first_name": "Bobbie",
"last_name": "B",
"id_number": 6666,
}
form2 = StaffProfileUserForm(data=data2, instance=staffprofile, request=request)
self.assertTrue(form2.is_valid())
form2.save()
staffprofile.refresh_from_db()
self.assertEqual("Bobbie B", user.staffprofile.get_name()) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_staffprofile_admin_form_no_image(self):\n user = mommy.make(\"auth.User\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n path = os.path.join(BASE_DIR, \"tests\", \"fixtures\", \"profile.png\")\n\n with open(path, \"r+b\") as image_file:\n data = {\n \"user\": user.id,\n \"first_name\": \"Bob\",\n \"last_name\": \"Mbugua\",\n \"id_number\": \"123456789\",\n \"sex\": StaffProfile.MALE,\n \"nhif\": \"111111\",\n \"nssf\": \"222222\",\n \"pin_number\": \"A0000000Y\",\n \"emergency_contact_name\": \"Bob Father\",\n \"emergency_contact_number\": \"+254722111111\",\n \"phone\": \"+254722111111\",\n \"address\": \"This is the address.\",\n \"birthday\": \"1996-01-27\",\n \"leave_days\": 21,\n \"sick_days\": 9,\n \"overtime_allowed\": True,\n \"start_date\": \"2017-09-25\",\n \"end_date\": \"2018-12-31\",\n \"image\": image_file,\n }\n\n file_dict = {\n \"image\": SimpleUploadedFile(\n name=image_file.name,\n content=image_file.read(),\n content_type=\"image/png\",\n )\n }\n\n form = StaffProfileAdminForm(\n data=data, instance=staffprofile, request=request, files=file_dict\n )\n self.assertTrue(form.is_valid())\n form.save()\n\n staffprofile.refresh_from_db()\n data2 = {\n \"user\": user.id,\n \"first_name\": \"Bobbie\",\n \"last_name\": \"B\",\n \"id_number\": 6666,\n }\n\n form2 = StaffProfileAdminForm(\n data=data2, instance=staffprofile, request=request\n )\n self.assertTrue(form2.is_valid())\n form2.save()\n staffprofile.refresh_from_db()\n self.assertEqual(\"Bobbie B\", user.staffprofile.get_name())",
"def test_profile_image_requested_field(self):\n self.register_get_user_response(self.user)\n cs_thread = make_minimal_cs_thread({\n \"id\": self.thread_id,\n \"course_id\": str(self.course.id),\n \"username\": self.user.username,\n \"user_id\": str(self.user.id),\n })\n self.register_get_thread_response(cs_thread)\n self.create_profile_image(self.user, get_profile_image_storage())\n response = self.client.get(self.url, {\"requested_fields\": \"profile_image\"})\n assert response.status_code == 200\n expected_profile_data = self.get_expected_user_profile(self.user.username)\n response_users = json.loads(response.content.decode('utf-8'))['users']\n assert expected_profile_data == response_users[self.user.username]",
"def test_save_profile_with_existing_photo(self):\n # Set a user with a photo\n user = UserFactory.create()\n file_path = os.path.join(os.path.dirname(__file__), \"normal_photo.jpg\")\n self._upload_photo(user, file_path)\n\n # Re-save profile without uploading a new photo.\n data = {\n \"full_name\": user.userprofile.full_name,\n \"email\": user.email,\n \"username\": user.username,\n \"lat\": 40.005814,\n \"lng\": -3.42071,\n \"externalaccount_set-MAX_NUM_FORMS\": \"1000\",\n \"externalaccount_set-INITIAL_FORMS\": \"0\",\n \"externalaccount_set-TOTAL_FORMS\": \"0\",\n \"language_set-MAX_NUM_FORMS\": \"1000\",\n \"language_set-INITIAL_FORMS\": \"0\",\n \"language_set-TOTAL_FORMS\": \"0\",\n \"basic_section\": \"\",\n }\n\n for field in UserProfilePrivacyModel._meta.fields:\n data[field.name] = MOZILLIANS\n data[\"privacy_tshirt\"] = PRIVATE\n\n with override_script_prefix(\"/en-US/\"):\n url = reverse(\"phonebook:profile_edit\")\n with self.login(user) as client:\n response = client.post(url, data=data, follow=True)\n eq_(response.status_code, 200)",
"def test_resource_user_resource_change_user_avatar_patch(self):\n pass",
"def test_invalid_update_post_form_with_image_missing(\n self, proto_post, proto_user\n ):\n\n data = {\n \"title\": \"This is the modified title\",\n \"categories\": [51, 52],\n \"overview\": \"This is the modified overview\",\n \"content\": \"This is the modified content\",\n \"featured\": True,\n \"status\": 1,\n }\n form = EditForm(data)\n assert not form.is_valid()\n assert len(form.errors) == 1\n assert \"thumbnail\" in form.errors",
"def test_upload_profile_pic(self):\n url = 'https://cdn.business2community.com/wp-content/uploads/2017/08/blank-profile-picture-973460_640.png'\n\n details = self.new_user.upload_profile_pic(url)\n\n self.assertEqual(self.new_user.profile_pic, details.get('url'))\n destroy(details.get('public_id'))\n\n # Test if invalid image path is inserted\n with self.assertRaises(Exception):\n details = self.new_user.upload_profile_pic('Random path')\n self.assertEqual(self.new_user.profile_pic, details.get('url'))",
"def test_profile_image_requested_field(self):\n user_2 = UserFactory.create(password=self.password)\n # Ensure that parental controls don't apply to this user\n user_2.profile.year_of_birth = 1970\n user_2.profile.save()\n source_threads = [\n self.create_source_thread(),\n self.create_source_thread({\"user_id\": str(user_2.id), \"username\": user_2.username}),\n ]\n\n self.register_get_user_response(self.user, upvoted_ids=[\"test_thread\"])\n self.register_get_threads_response(source_threads, page=1, num_pages=1)\n self.create_profile_image(self.user, get_profile_image_storage())\n self.create_profile_image(user_2, get_profile_image_storage())\n\n response = self.client.get(\n self.url,\n {\"course_id\": str(self.course.id), \"requested_fields\": \"profile_image\"},\n )\n assert response.status_code == 200\n response_threads = json.loads(response.content.decode('utf-8'))['results']\n\n for response_thread in response_threads:\n expected_profile_data = self.get_expected_user_profile(response_thread['author'])\n response_users = response_thread['users']\n assert expected_profile_data == response_users[response_thread['author']]",
"def makeProfile(request):\n upr = UserProfile()\n upr.user = request.user\n upr.image = \"images/no-pic.png\"\n upr.save()",
"def add_profile_photo():\n pass",
"def form_valid(self, form):\n User.objects.filter(username=self.object).update(\n user_image =form.cleaned_data['user_image'],\n )\n myfile = self.request.FILES['user_image']\n fs = FileSystemStorage()\n filename = fs.save(myfile.name, myfile)\n messages.success(self.request, 'Image uploaded successfully')\n return super().form_valid(form)",
"def change_profile_img(self):\n get_photo = reddit_scrapper()\n get_photo.get_image()\n # Send image to instagram profile picture on the hidden input tag\n profile_pic_button = self.driver.find_elements_by_xpath(\n '//*[@id=\"react-root\"]/section/main/section/div[3]/div[1]/div[2]/form/input')[0].send_keys(os.getcwd() + '/daily_image/daily.jpg')\n\n time.sleep(1)\n save_profile_pic = self.driver.find_elements_by_xpath(\n '//button[contains(text(), \"Save\")]')[0].click()\n time.sleep(1)\n self.driver.get(base_url)",
"def test_image(self):\n browser = self.layer.get_web_browser(smi_settings)\n\n image = self.layer.get_fixture('torvald.jpg')\n browser.login(self.username, self.username)\n self.assertEqual(browser.open('/root/edit'), 200)\n browser.macros.create(\n 'Silva Image', id='image', title='Torvald', file=image)\n self.assertEqual(\n browser.inspect.folder_listing, ['index', 'image'])\n\n # The user should by the last author on the content and container.\n self.assertEqual(\n self.root.sec_get_last_author_info().userid(),\n self.username)\n self.assertEqual(\n self.root.image.sec_get_last_author_info().userid(),\n self.username)\n\n # Visit the edit page\n self.assertEqual(\n browser.inspect.folder_listing['image'].click(),\n 200)\n self.assertEqual(browser.location, '/root/image/edit/tab_edit')\n\n # Change title\n form = browser.get_form('silvaObjects')\n self.assertEqual(\n form.get_control('field_image_title').value,\n 'Torvald')\n form.get_control('field_image_title').value = u'Picture of Torvald'\n form.get_control('submit:method').click()\n self.assertEqual(browser.inspect.feedback, ['Changes saved.'])\n\n # Change format\n form = browser.get_form('editform.scaling')\n self.assertEqual(form.get_control('field_web_format').value, 'JPEG')\n form.get_control('field_web_format').value = 'PNG'\n form.get_control('scale_submit:method').click()\n self.assertEqual(\n browser.inspect.feedback,\n ['Scaling and/or format changed.'])\n\n # Change scaling\n form = browser.get_form('editform.scaling')\n form.get_control('field_web_scaling').value = '100x200'\n form.get_control('scale_submit:method').click()\n self.assertEqual(\n browser.inspect.feedback,\n ['Scaling and/or format changed.'])\n\n # Change image\n form = browser.get_form('editform.upload')\n form.get_control('field_file').value = image\n form.get_control('upload_submit:method').click()\n self.assertEqual(\n browser.inspect.feedback,\n ['Image updated.'])\n\n self.assertEqual(\n browser.inspect.breadcrumbs,\n ['root', 'Picture of Torvald'])\n browser.inspect.breadcrumbs['root'].click()\n browser.macros.delete('image')",
"def user_profile(request):\n instance = Profile.objects.get(pk=request.user.pk)\n if request.method == \"POST\":\n\n form = ProfileForm(request.POST, request.FILES, instance=instance)\n form.save()\n return redirect(reverse('index'))\n messages.error(request, \"Profile Updated\")\n\n profile = ProfileForm(instance=instance)\n return render(request, 'profile.html', {'profile': profile, 'instance': instance})",
"def test_edit_image_instance(self):\n self.client.force_authenticate(self.user1)\n data = {\n \"img_name\": \"photo_user1\",\n \"img_description\": \"photo of user1\",\n \"favourite\": True,\n \"width\": 700,\n \"height\": 500,\n \"share_user\": [],\n }\n url = reverse(self.url_name_one, args=(1,))\n response = self.client.put(url, data, format=\"multipart\")\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n # Get edited object, convert to dict and compare with inputs\n obj = model_to_dict(Images.objects.get(id=1))\n for field, edited_data in data.items():\n self.assertEqual(edited_data, obj[field])\n # Check if image was edited to a new input\n edited_img = Image.open(self.test_pic_folder + \"/test.png\")\n self.assertEqual(edited_img.size, (700, 500))",
"def test_album_image_user(self):\n self.assertEqual(self.album.user, self.photo.user)",
"def test_user_get_registered(self):\n img = BytesIO(b'images/Screen_Shot_2019-12-24_at_12.33.34.png')\n img.name = \"myimage.png\"\n url = reverse(\"register_user\")\n response = self.client.post(url, { \"username\": \"janedoe\", \"email\":\"[email protected]\", \"password\":\"123\", \"photo\": img})\n self.assertEqual(response.status_code, 302)\n self.assertRedirects(response, reverse(\"login_user\"), 302)",
"def account():\n\n form = UpdateUserForm()\n\n if form.validate_on_submit():\n print(form)\n if form.picture.data:\n username = current_user.username\n pic = add_profile_pic(form.picture.data,username)\n current_user.profile_image = pic\n\n current_user.username = form.username.data\n current_user.email = form.email.data\n db.session.commit()\n flash('User Account Updated')\n return redirect(url_for('users.account'))\n\n elif request.method == 'GET':\n form.username.data = current_user.username\n form.email.data = current_user.email\n\n profile_image = url_for('static', filename='profile_pics/' + current_user.profile_image)\n return render_template('account.html', profile_image=profile_image, form=form)",
"def test_profile_image_requested_field(self):\n self.register_get_user_response(self.user)\n cs_comment_child = self.make_comment_data('test_child_comment', self.comment_id, children=[])\n cs_comment = self.make_comment_data(self.comment_id, None, [cs_comment_child])\n cs_thread = make_minimal_cs_thread({\n 'id': self.thread_id,\n 'course_id': str(self.course.id),\n 'children': [cs_comment],\n })\n self.register_get_thread_response(cs_thread)\n self.register_get_comment_response(cs_comment)\n self.create_profile_image(self.user, get_profile_image_storage())\n\n response = self.client.get(self.url, {'requested_fields': 'profile_image'})\n assert response.status_code == 200\n response_comments = json.loads(response.content.decode('utf-8'))['results']\n\n for response_comment in response_comments:\n expected_profile_data = self.get_expected_user_profile(response_comment['author'])\n response_users = response_comment['users']\n assert expected_profile_data == response_users[response_comment['author']]",
"def select_default_picture(sender, instance, **kwargs):\n if not instance.id:\n instance.picture = \"/static/user%s.png\"%(\"F\" if instance.female else \"M\")",
"def test_profile_image_requested_field(self):\n source_comments = [self.create_source_comment()]\n self.register_get_thread_response({\n \"id\": self.thread_id,\n \"course_id\": str(self.course.id),\n \"thread_type\": \"discussion\",\n \"children\": source_comments,\n \"resp_total\": 100,\n })\n self.register_get_user_response(self.user, upvoted_ids=[\"test_comment\"])\n self.create_profile_image(self.user, get_profile_image_storage())\n\n response = self.client.get(self.url, {\"thread_id\": self.thread_id, \"requested_fields\": \"profile_image\"})\n assert response.status_code == 200\n response_comments = json.loads(response.content.decode('utf-8'))['results']\n for response_comment in response_comments:\n expected_profile_data = self.get_expected_user_profile(response_comment['author'])\n response_users = response_comment['users']\n assert expected_profile_data == response_users[response_comment['author']]",
"def test_user_avatar_serving(self):\n User = get_user_model()\n test_user = User.objects.create_user('Bob', '[email protected]', 'pass123',\n set_default_avatar=True)\n\n avatar_url = reverse('misago:user-avatar', kwargs={\n 'pk': test_user.pk,\n 'hash': test_user.avatar_hash,\n 'size': 150,\n })\n response = self.client.get(avatar_url)\n\n self.assertEqual(response.status_code, 200)\n self.assertEqual(response['Content-Type'], 'image/png')",
"def profile():\n\n if not g.user:\n flash(\"Access unauthorized.\", \"danger\")\n return redirect(\"/\")\n\n form = UserEditForm(obj=g.user)\n\n if form.validate_on_submit():\n if not User.authenticate(g.user.username, form.data[\"password\"]):\n flash(\"Invalid password.\", \"danger\")\n return render_template('/users/edit.html', form=form) \n # data = {k:v for k,v in form.data.items() if k != \"csrf_token\"}\n # data[\"image_url\"] = data[\"image_url\"] or None\n # data[\"header_image_url\"] = data[\"header_image_url\"] or None\n\n g.user.username = form.data[\"username\"]\n g.user.email = form.data[\"email\"]\n g.user.image_url = form.data[\"image_url\"] or None\n g.user.header_image_url = form.data[\"header_image_url\"] or None\n g.user.bio = form.data[\"bio\"]\n\n db.session.commit()\n\n flash(\"Profile edited!\", \"success\")\n return redirect(f'/users/{g.user.id}')\n\n return render_template('/users/edit.html', form=form)",
"def edit_user_profile(request):\n user = request.user\n user_profile = UserProfile.objects.filter(user=user)[0]\n if request.method == 'POST':\n form = MemberProfileForm(request.POST)\n additional_form = MemberAdditionalProfileForm(request.POST)\n if form.is_valid() and additional_form.is_valid():\n cd = form.cleaned_data\n user.first_name = cd['first_name']\n user.last_name = cd['last_name']\n user.email = cd['email']\n user.save()\n if 'picture' in request.FILES:\n file = request.FILES['picture']\n user_profile.picture.save(file.name, file, save=True)\n user_profile.gravatar = additional_form.cleaned_data['gravatar']\n user_profile.save()\n return HttpResponseRedirect('/')\n else:\n form = MemberProfileForm(instance=request.user)\n additional_form = MemberAdditionalProfileForm(instance=user_profile)\n return render_to_response('edit_profile.html', locals())",
"def test_user_update(self):\n update_data = {\n \"username\": \"testnotUser\",\n \"email\": \"[email protected]\",\n \"first_name\": \"Test\",\n \"last_name\": \"User\",\n \"profile\": {\n \"user\": 1,\n \"contact_number\": \"9860476499\",\n \"address\": \"kapan\",\n \"education\": self.education,\n },\n }\n # files = {'media': open('accounts/tests/1.png', 'rb')}\n self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.get_token())\n response = self.client.put(reverse(\"account:user-update\"), update_data, format=\"json\")\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(response.data['username'], \"testnotUser\")\n self.assertNotEqual(response.data['username'], \"testUser\")",
"def test_signup_photo(self, mocked_sendy):\n url = '/0/chefs'\n data = {\n 'email': '[email protected]',\n 'password': 'secret',\n 'name': 'John',\n 'surname': 'Doe',\n 'language': 'es',\n 'photo': IMAGES['png'],\n }\n resp = self.client.post(url, data=data)\n self.assertEqual(resp.status_code, 200)\n self.assertIn('auth', resp.data)\n self.assertIn('token', resp.data['auth'])\n # Check that the photo exists\n self.assertTrue(Chefs.objects.last().avatar_photos.all())",
"def edit_user():\n if CURR_USER_KEY in session:\n user = g.user\n form = ProfileEditForm(obj=user)\n\n if form.validate_on_submit():\n user.first_name = form.first_name.data\n user.last_name = form.last_name.data\n user.description = form.description.data\n user.email = form.email.data\n user.image_url = form.image_url.data or \"/static/images/default-pic.png\"\n\n db.session.commit()\n\n flash(\"Profile edited.\")\n return redirect(\"/profile\")\n\n return render_template('/profile/edit-form.html', form=form)\n else:\n return redirect('/login')",
"def setUp(self):\n\t\tself.username = fake.user_name()\n\t\tself.password = fake.password()\n\n\t\tself.image_name = 'test.png'\n\t\tself.img_url = 'static/img/test.png'\t\n\n\t\tself.user = User.objects.create_user(\n\t\t\tusername=self.username, password=self.password)\n\t\tself.user = authenticate(username=self.username, password=self.password)\n\t\tself.client.login(username=self.username, password=self.password)\n\n\t\tself.image = Image.frombytes('L', (100, 100), \"\\x00\" * 100 * 100)\n\t\tself.image = pil_to_django(self.image, 'png')\n\n\t\tself.created_image = UserPhoto(image=self.image, name=self.image_name, created_by=self.user).save()",
"def show_profile(request, profile_uuid):\n context = context_assign_user(request.user)\n context['profile_form'] = ProfileForm(instance=context['current_user'])\n if request.method == 'POST':\n form = ProfileForm(request.POST)\n if form.is_valid():\n Profile.objects.filter(pk=context['current_user'].id).update(bio=request.POST['bio'], palette=request.POST['palette'], iex_api_key=request.POST['iex_api_key'])\n messages.success(request, 'Your settings have been saved.')\n return redirect('dash:dashboard')\n errors = form.errors\n form = ProfileForm(request, request.POST)\n messages.warning(request, f\"There's a problem with the form: {errors}\")\n return render(request, 'dash/users/show_profile.html', context)",
"def test_valid_update_post_form_with_featured_not_checked(\n self, proto_post, proto_user\n ):\n\n testfile = (\n b\"\\x47\\x49\\x46\\x38\\x39\\x61\\x01\\x00\\x01\\x00\\x00\\x00\\x00\\x21\\xf9\\x04\"\n b\"\\x01\\x0a\\x00\\x01\\x00\\x2c\\x00\\x00\\x00\\x00\\x01\\x00\\x01\\x00\\x00\\x02\"\n b\"\\x02\\x4c\\x01\\x00\\x3b\"\n )\n data = {\n \"title\": \"This is the modified title\",\n \"categories\": [56, 57],\n \"overview\": \"This is the modified overview\",\n \"content\": \"This is the modified content\",\n \"featured\": False,\n \"status\": 1,\n }\n form = EditForm(\n data,\n {\n \"thumbnail\": SimpleUploadedFile(\n \"small.gif\",\n testfile,\n content_type=\"image/gif\",\n )\n },\n )\n assert form.is_valid()",
"def test_update_user(self):\n pass"
] | [
"0.7652241",
"0.6728502",
"0.66071117",
"0.6606162",
"0.6591402",
"0.6577869",
"0.6530034",
"0.64637095",
"0.640589",
"0.6283228",
"0.61998254",
"0.61802197",
"0.6131706",
"0.6127896",
"0.612147",
"0.6116664",
"0.6115772",
"0.6107535",
"0.60589457",
"0.60509855",
"0.6047661",
"0.6007653",
"0.5986846",
"0.59859794",
"0.59711367",
"0.5932106",
"0.59307075",
"0.5930277",
"0.5908457",
"0.5899105"
] | 0.78006655 | 0 |
Test StaffProfileAdminForm image not required when editting. | def test_staffprofile_admin_form_no_image(self):
user = mommy.make("auth.User")
staffprofile = mommy.make("small_small_hr.StaffProfile", user=user)
request = self.factory.get("/")
request.session = {}
request.user = AnonymousUser()
path = os.path.join(BASE_DIR, "tests", "fixtures", "profile.png")
with open(path, "r+b") as image_file:
data = {
"user": user.id,
"first_name": "Bob",
"last_name": "Mbugua",
"id_number": "123456789",
"sex": StaffProfile.MALE,
"nhif": "111111",
"nssf": "222222",
"pin_number": "A0000000Y",
"emergency_contact_name": "Bob Father",
"emergency_contact_number": "+254722111111",
"phone": "+254722111111",
"address": "This is the address.",
"birthday": "1996-01-27",
"leave_days": 21,
"sick_days": 9,
"overtime_allowed": True,
"start_date": "2017-09-25",
"end_date": "2018-12-31",
"image": image_file,
}
file_dict = {
"image": SimpleUploadedFile(
name=image_file.name,
content=image_file.read(),
content_type="image/png",
)
}
form = StaffProfileAdminForm(
data=data, instance=staffprofile, request=request, files=file_dict
)
self.assertTrue(form.is_valid())
form.save()
staffprofile.refresh_from_db()
data2 = {
"user": user.id,
"first_name": "Bobbie",
"last_name": "B",
"id_number": 6666,
}
form2 = StaffProfileAdminForm(
data=data2, instance=staffprofile, request=request
)
self.assertTrue(form2.is_valid())
form2.save()
staffprofile.refresh_from_db()
self.assertEqual("Bobbie B", user.staffprofile.get_name()) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_staffprofile_user_form_no_image(self):\n user = mommy.make(\"auth.User\")\n staffprofile = mommy.make(\"small_small_hr.StaffProfile\", user=user)\n\n request = self.factory.get(\"/\")\n request.session = {}\n request.user = AnonymousUser()\n\n path = os.path.join(BASE_DIR, \"tests\", \"fixtures\", \"profile.png\")\n\n with open(path, \"r+b\") as image_file:\n data = {\n \"first_name\": \"Bob\",\n \"last_name\": \"Mbugua\",\n \"id_number\": \"123456789\",\n \"sex\": StaffProfile.MALE,\n \"nhif\": \"111111\",\n \"nssf\": \"222222\",\n \"pin_number\": \"A0000000Y\",\n \"emergency_contact_name\": \"Bob Father\",\n \"emergency_contact_relationship\": \"Father\",\n \"emergency_contact_number\": \"+254722111111\",\n \"phone\": \"+254722111111\",\n \"address\": \"This is the address.\",\n \"birthday\": \"1996-01-27\",\n \"image\": image_file,\n }\n\n file_dict = {\n \"image\": SimpleUploadedFile(\n name=image_file.name,\n content=image_file.read(),\n content_type=\"image/png\",\n )\n }\n\n form = StaffProfileUserForm(\n data=data, instance=staffprofile, request=request, files=file_dict\n )\n self.assertTrue(form.is_valid())\n form.save()\n\n staffprofile.refresh_from_db()\n data2 = {\n \"first_name\": \"Bobbie\",\n \"last_name\": \"B\",\n \"id_number\": 6666,\n }\n\n form2 = StaffProfileUserForm(data=data2, instance=staffprofile, request=request)\n self.assertTrue(form2.is_valid())\n form2.save()\n staffprofile.refresh_from_db()\n self.assertEqual(\"Bobbie B\", user.staffprofile.get_name())",
"def test_invalid_update_post_form_with_image_missing(\n self, proto_post, proto_user\n ):\n\n data = {\n \"title\": \"This is the modified title\",\n \"categories\": [51, 52],\n \"overview\": \"This is the modified overview\",\n \"content\": \"This is the modified content\",\n \"featured\": True,\n \"status\": 1,\n }\n form = EditForm(data)\n assert not form.is_valid()\n assert len(form.errors) == 1\n assert \"thumbnail\" in form.errors",
"def add_profile_photo():\n pass",
"def test_upload_profile_pic(self):\n url = 'https://cdn.business2community.com/wp-content/uploads/2017/08/blank-profile-picture-973460_640.png'\n\n details = self.new_user.upload_profile_pic(url)\n\n self.assertEqual(self.new_user.profile_pic, details.get('url'))\n destroy(details.get('public_id'))\n\n # Test if invalid image path is inserted\n with self.assertRaises(Exception):\n details = self.new_user.upload_profile_pic('Random path')\n self.assertEqual(self.new_user.profile_pic, details.get('url'))",
"def edit_profile(request, pk=None):\n profiledetails = UserProfile.objects.filter(user=request.user).first()\n if UserProfile.objects.filter(user=request.user or request.user.is_superuser):\n\n if request.method == \"POST\":\n profile_details_form = UserProfileForm(request.POST, request.FILES, instance=profiledetails)\n if profile_details_form.is_valid():\n profiledetails = profile_details_form.save()\n messages.success(request, 'Your profile has been updated!')\n return redirect(user_profile)\n else:\n profile_details_form = UserProfileForm(instance=profiledetails)\n else:\n return HttpResponseForbidden()\n \n return render(request, 'newprofiledetails.html', {'profile_details_form': profile_details_form})",
"def select_default_picture(sender, instance, **kwargs):\n if not instance.id:\n instance.picture = \"/static/user%s.png\"%(\"F\" if instance.female else \"M\")",
"def test_image(self):\n browser = self.layer.get_web_browser(smi_settings)\n\n image = self.layer.get_fixture('torvald.jpg')\n browser.login(self.username, self.username)\n self.assertEqual(browser.open('/root/edit'), 200)\n browser.macros.create(\n 'Silva Image', id='image', title='Torvald', file=image)\n self.assertEqual(\n browser.inspect.folder_listing, ['index', 'image'])\n\n # The user should by the last author on the content and container.\n self.assertEqual(\n self.root.sec_get_last_author_info().userid(),\n self.username)\n self.assertEqual(\n self.root.image.sec_get_last_author_info().userid(),\n self.username)\n\n # Visit the edit page\n self.assertEqual(\n browser.inspect.folder_listing['image'].click(),\n 200)\n self.assertEqual(browser.location, '/root/image/edit/tab_edit')\n\n # Change title\n form = browser.get_form('silvaObjects')\n self.assertEqual(\n form.get_control('field_image_title').value,\n 'Torvald')\n form.get_control('field_image_title').value = u'Picture of Torvald'\n form.get_control('submit:method').click()\n self.assertEqual(browser.inspect.feedback, ['Changes saved.'])\n\n # Change format\n form = browser.get_form('editform.scaling')\n self.assertEqual(form.get_control('field_web_format').value, 'JPEG')\n form.get_control('field_web_format').value = 'PNG'\n form.get_control('scale_submit:method').click()\n self.assertEqual(\n browser.inspect.feedback,\n ['Scaling and/or format changed.'])\n\n # Change scaling\n form = browser.get_form('editform.scaling')\n form.get_control('field_web_scaling').value = '100x200'\n form.get_control('scale_submit:method').click()\n self.assertEqual(\n browser.inspect.feedback,\n ['Scaling and/or format changed.'])\n\n # Change image\n form = browser.get_form('editform.upload')\n form.get_control('field_file').value = image\n form.get_control('upload_submit:method').click()\n self.assertEqual(\n browser.inspect.feedback,\n ['Image updated.'])\n\n self.assertEqual(\n browser.inspect.breadcrumbs,\n ['root', 'Picture of Torvald'])\n browser.inspect.breadcrumbs['root'].click()\n browser.macros.delete('image')",
"def makeProfile(request):\n upr = UserProfile()\n upr.user = request.user\n upr.image = \"images/no-pic.png\"\n upr.save()",
"def test_edit_image_instance(self):\n self.client.force_authenticate(self.user1)\n data = {\n \"img_name\": \"photo_user1\",\n \"img_description\": \"photo of user1\",\n \"favourite\": True,\n \"width\": 700,\n \"height\": 500,\n \"share_user\": [],\n }\n url = reverse(self.url_name_one, args=(1,))\n response = self.client.put(url, data, format=\"multipart\")\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n\n # Get edited object, convert to dict and compare with inputs\n obj = model_to_dict(Images.objects.get(id=1))\n for field, edited_data in data.items():\n self.assertEqual(edited_data, obj[field])\n # Check if image was edited to a new input\n edited_img = Image.open(self.test_pic_folder + \"/test.png\")\n self.assertEqual(edited_img.size, (700, 500))",
"def test_save_profile_with_existing_photo(self):\n # Set a user with a photo\n user = UserFactory.create()\n file_path = os.path.join(os.path.dirname(__file__), \"normal_photo.jpg\")\n self._upload_photo(user, file_path)\n\n # Re-save profile without uploading a new photo.\n data = {\n \"full_name\": user.userprofile.full_name,\n \"email\": user.email,\n \"username\": user.username,\n \"lat\": 40.005814,\n \"lng\": -3.42071,\n \"externalaccount_set-MAX_NUM_FORMS\": \"1000\",\n \"externalaccount_set-INITIAL_FORMS\": \"0\",\n \"externalaccount_set-TOTAL_FORMS\": \"0\",\n \"language_set-MAX_NUM_FORMS\": \"1000\",\n \"language_set-INITIAL_FORMS\": \"0\",\n \"language_set-TOTAL_FORMS\": \"0\",\n \"basic_section\": \"\",\n }\n\n for field in UserProfilePrivacyModel._meta.fields:\n data[field.name] = MOZILLIANS\n data[\"privacy_tshirt\"] = PRIVATE\n\n with override_script_prefix(\"/en-US/\"):\n url = reverse(\"phonebook:profile_edit\")\n with self.login(user) as client:\n response = client.post(url, data=data, follow=True)\n eq_(response.status_code, 200)",
"def test_should_file_field(self):\n self.assertIn(\"image\", self.fields)",
"def test_valid_update_post_form_with_featured_not_checked(\n self, proto_post, proto_user\n ):\n\n testfile = (\n b\"\\x47\\x49\\x46\\x38\\x39\\x61\\x01\\x00\\x01\\x00\\x00\\x00\\x00\\x21\\xf9\\x04\"\n b\"\\x01\\x0a\\x00\\x01\\x00\\x2c\\x00\\x00\\x00\\x00\\x01\\x00\\x01\\x00\\x00\\x02\"\n b\"\\x02\\x4c\\x01\\x00\\x3b\"\n )\n data = {\n \"title\": \"This is the modified title\",\n \"categories\": [56, 57],\n \"overview\": \"This is the modified overview\",\n \"content\": \"This is the modified content\",\n \"featured\": False,\n \"status\": 1,\n }\n form = EditForm(\n data,\n {\n \"thumbnail\": SimpleUploadedFile(\n \"small.gif\",\n testfile,\n content_type=\"image/gif\",\n )\n },\n )\n assert form.is_valid()",
"def test_profile_image_requested_field(self):\n self.register_get_user_response(self.user)\n cs_thread = make_minimal_cs_thread({\n \"id\": self.thread_id,\n \"course_id\": str(self.course.id),\n \"username\": self.user.username,\n \"user_id\": str(self.user.id),\n })\n self.register_get_thread_response(cs_thread)\n self.create_profile_image(self.user, get_profile_image_storage())\n response = self.client.get(self.url, {\"requested_fields\": \"profile_image\"})\n assert response.status_code == 200\n expected_profile_data = self.get_expected_user_profile(self.user.username)\n response_users = json.loads(response.content.decode('utf-8'))['users']\n assert expected_profile_data == response_users[self.user.username]",
"def test_resource_user_resource_change_user_avatar_patch(self):\n pass",
"def edit_user_profile(request):\n user = request.user\n user_profile = UserProfile.objects.filter(user=user)[0]\n if request.method == 'POST':\n form = MemberProfileForm(request.POST)\n additional_form = MemberAdditionalProfileForm(request.POST)\n if form.is_valid() and additional_form.is_valid():\n cd = form.cleaned_data\n user.first_name = cd['first_name']\n user.last_name = cd['last_name']\n user.email = cd['email']\n user.save()\n if 'picture' in request.FILES:\n file = request.FILES['picture']\n user_profile.picture.save(file.name, file, save=True)\n user_profile.gravatar = additional_form.cleaned_data['gravatar']\n user_profile.save()\n return HttpResponseRedirect('/')\n else:\n form = MemberProfileForm(instance=request.user)\n additional_form = MemberAdditionalProfileForm(instance=user_profile)\n return render_to_response('edit_profile.html', locals())",
"def test04_add_photo_first_student_with_admin(self):\n actual_name_file = self.students_page.\\\n click_edit_students_list_button().\\\n click_add_new_student_button().\\\n add_photo(data['path_file_photo']).\\\n get_name_photo_file()\n self.assertEqual(actual_name_file,\n data['expected_name_file_photo'])",
"def edit_profile(request):\n form = ProfileForm(instance=request.user.profile)\n if request.method == \"POST\":\n form = ProfileForm(data=request.POST, files=request.FILES,\n instance=request.user.profile)\n if form.is_valid():\n form.save()\n return redirect('profile')\n return render(request, 'accounts/forms.html', {'form': form})",
"def test_edit(self):\n # Test using the Trovebox class\n html = self.client.photo.edit(self.photos[0])\n self.assertIn(\"<form\", html.lower())\n\n # And the Photo object directly\n html = self.photos[0].edit()\n self.assertIn(\"<form\", html.lower())",
"def test_invalid_add_post_form_with_image_missing(\n self, proto_post, proto_user\n ):\n\n data = {\n \"title\": \"This is the added title\",\n \"author\": proto_user,\n \"categories\": [21, 22],\n \"overview\": \"This is the added overview\",\n \"content\": \"This is the added content\",\n \"featured\": True,\n \"status\": 1,\n }\n form = PostForm(data)\n assert not form.is_valid()\n assert len(form.errors) == 1\n assert \"thumbnail\" in form.errors",
"def edit_profile(request):\n profile = request.user.profile\n form = forms.ProfileForm(instance=profile)\n\n if request.method == 'POST':\n if settings.SYSTEM_MAINTENANCE_NO_UPLOAD:\n # Allow submitting the form, but do not allow the photo to\n # be modified.\n if 'delete_photo' in request.POST or request.FILES:\n raise ServiceUnavailable()\n\n if 'edit_profile' in request.POST:\n # Update the profile and return to the same page. Place a message\n # at the top of the page: 'your profile has been updated'\n form = forms.ProfileForm(data=request.POST, files=request.FILES,\n instance=profile)\n if form.is_valid():\n form.save()\n messages.success(request, 'Your profile has been updated.')\n elif 'delete_photo' in request.POST:\n profile.delete_photo()\n messages.success(request, 'Your profile photo has been deleted.')\n\n if not form.errors:\n form = forms.ProfileForm(instance=profile)\n\n return render(request, 'user/edit_profile.html', {'form':form})",
"def formfield(self, **kwargs):\n kwargs['widget'] = DelAdminFileWidget\n kwargs['form_class'] = StdImageFormField\n return super(StdImageField, self).formfield(**kwargs)",
"def display_form(self):\n\n result = self.client.get(\"/submit_image\")\n self.assertIn(b\"multipart/form-data\", result.data)",
"def manage_myprofile(request):\n profile = request.user.get_profile()\n users_image = profile.users_image\n if not profile:\n raise Http404\n if request.method == 'POST':\n profile_form = MyProfileForm(request.POST, instance = profile)\n address_contact_form = AddressForm(request.POST,\n instance = profile.address_contact, prefix = 'contact')\n address_permanent_form = AddressForm(request.POST,\n instance = profile.address_permanent, prefix = 'permanent')\n\n if profile_form.is_valid() and address_contact_form.is_valid() \\\n and address_permanent_form.is_valid():\n address_contact = address_contact_form.save()\n address_permanent = address_permanent_form.save()\n\n profile_form.save(address_contact = address_contact,\n address_permanent = address_permanent)\n messages.success(request,\n _('your profile details saved sucessfully'))\n else:\n profile_form = MyProfileForm(instance = profile)\n address_contact_form = AddressForm(instance = profile.address_contact,\n prefix = 'contact')\n address_permanent_form = AddressForm(instance\n = profile.address_permanent, prefix = 'permanent')\n\n return render(request, 'myprofile.html', {\n 'profile_form': profile_form,\n 'address_contact_form': address_contact_form,\n 'address_permanent_form': address_permanent_form,\n 'users_image': users_image\n },\n )# Create your views here.",
"def test_user_profile_picture_invalid_image_fails(self):\n image_upload_url = PROCEDURE_URL\n\n payload = {\n 'name': 'temp',\n 'speciality': [self.speciality.pk],\n 'image': 'invalid image',\n 'overview': 'bla bla bla'\n }\n\n res = self.client.post(\n image_upload_url,\n payload,\n format=\"multipart\"\n )\n\n self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)",
"def test_image_display(self):\n\n result = self.client.get(\"/select_image\")\n\n self.assertIn(b\"/static/uploads/girl-glowing-skin-blue-eyes.jpg\", result.data)",
"def form_valid(self, form):\n User.objects.filter(username=self.object).update(\n user_image =form.cleaned_data['user_image'],\n )\n myfile = self.request.FILES['user_image']\n fs = FileSystemStorage()\n filename = fs.save(myfile.name, myfile)\n messages.success(self.request, 'Image uploaded successfully')\n return super().form_valid(form)",
"def user_profile(request):\n instance = Profile.objects.get(pk=request.user.pk)\n if request.method == \"POST\":\n\n form = ProfileForm(request.POST, request.FILES, instance=instance)\n form.save()\n return redirect(reverse('index'))\n messages.error(request, \"Profile Updated\")\n\n profile = ProfileForm(instance=instance)\n return render(request, 'profile.html', {'profile': profile, 'instance': instance})",
"def display_image(obj):\n\n # Hard code 30x30 due to Django admin template list size.\n return format_html(\n '<img src=%s alt=\"Profile picture\" width=\"30\" height=\"30\" />' %\n (obj.image.url if obj.image else static(\"images/users/default-profile.jpg\"))\n )",
"def delete_profile_pic(sender, instance, **kwargs):\n if instance.profile_picture:\n if instance.profile_picture.name != \"default.png\":\n path = instance.profile_picture.path\n os.remove(path)",
"def test_profile_image_requested_field(self):\n user_2 = UserFactory.create(password=self.password)\n # Ensure that parental controls don't apply to this user\n user_2.profile.year_of_birth = 1970\n user_2.profile.save()\n source_threads = [\n self.create_source_thread(),\n self.create_source_thread({\"user_id\": str(user_2.id), \"username\": user_2.username}),\n ]\n\n self.register_get_user_response(self.user, upvoted_ids=[\"test_thread\"])\n self.register_get_threads_response(source_threads, page=1, num_pages=1)\n self.create_profile_image(self.user, get_profile_image_storage())\n self.create_profile_image(user_2, get_profile_image_storage())\n\n response = self.client.get(\n self.url,\n {\"course_id\": str(self.course.id), \"requested_fields\": \"profile_image\"},\n )\n assert response.status_code == 200\n response_threads = json.loads(response.content.decode('utf-8'))['results']\n\n for response_thread in response_threads:\n expected_profile_data = self.get_expected_user_profile(response_thread['author'])\n response_users = response_thread['users']\n assert expected_profile_data == response_users[response_thread['author']]"
] | [
"0.7230931",
"0.65301496",
"0.60166913",
"0.6003747",
"0.5991661",
"0.59901386",
"0.5923907",
"0.5909916",
"0.58569574",
"0.58297867",
"0.57913953",
"0.5783393",
"0.5762155",
"0.57606184",
"0.5758113",
"0.569092",
"0.5689196",
"0.56693393",
"0.56252676",
"0.5616211",
"0.5583912",
"0.5574496",
"0.556061",
"0.5559172",
"0.55388206",
"0.5525463",
"0.5523979",
"0.5514353",
"0.5508305",
"0.5499645"
] | 0.7694595 | 0 |
convert a TSV row to a dict | def tsvRowToDict(row):
return {col: getattr(row, col) for col in row._columns_} | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def sv_to_dict(sv_data, cell_delimiter=\"\\t\"):\n result = {}\n rows = [row.split(cell_delimiter) for row in sv_data.splitlines()]\n\n if rows:\n header = rows.pop(0)\n header_len = len(header)\n\n for idx, header_col in enumerate(header):\n result[header_col] = []\n\n for row in rows:\n # Makes sure all rows size equals header's size\n if len(row) < header_len:\n [row.append(\"\") for x in range(0, (header_len - len(row)))]\n\n row_val = int(row[idx]) if row[idx].isdigit() else row[idx]\n result[header_col].append(row_val)\n\n return result",
"def make_dict(row):\n return dict((key[0], value) for key, value in zip(colnames, row))",
"def _row_to_dict(row, fields):\n dict_row = {}\n for i, value in enumerate(row):\n key = fields[i]\n if value and str(value).lower() == 'nan':\n value = None\n dict_row[key] = value\n return dict_row",
"def parse_line(line):\n parts = line.strip().split('\\t')\n\n output = {}\n\n if len(parts) != len(COLUMNS):\n raise Exception('Incorrect number of columns in line.', parts, COLUMNS)\n\n for key, value in zip(COLUMNS, parts):\n if key == 'attributes':\n output[key] = parse_attributes(value)\n elif key == 'start' or key == 'stop':\n output[key] = int(value)\n else:\n output[key] = value\n\n return output",
"def chunk_to_dict(chunk):\n csv_cols = chunk.keys()\n return [dict(zip(csv_cols, v)) for v in chunk.values]",
"def table_row_to_dict(row, make_quantity=True):\n data = {}\n for name, col in row.columns.items():\n val = row[name]\n\n if make_quantity and col.unit:\n val = Quantity(val, unit=col.unit)\n data[name] = val\n return data",
"def csv_to_dict(fp):\n import pandas as pd\n df = pd.read_csv(fp, index_col=0, header=None)\n d = df.to_dict(orient='index')\n d = {k: v.values() for k, v in d.iteritems()}\n return d",
"def make_row(row: TRowResult,\n include_ts: bool = False) -> Union[Dict[bytes, bytes],\n Dict[bytes, Tuple[bytes, int]]]:\n cell_map = _get_cell_map(row).items()\n if include_ts:\n return {name: (cell.value, cell.timestamp) for name, cell in cell_map}\n else:\n return {name: cell.value for name, cell in cell_map}",
"def read_table_to_dict(fname, typedict=None, row_processor=None, default_type=None, **kwargs):\n if isinstance(fname, basestring):\n data = read_table(fname, **kwargs)\n else:\n data = fname\n colkeys = data[0]\n datadict = collections.OrderedDict()\n for i in xrange(1, len(data)):\n row = data[i]\n if row_processor is not None:\n row = row_processor(row)\n if not row:\n continue\n rowkey = row[0]\n valdict = {}\n for j in xrange(1, len(colkeys)):\n key = colkeys[j]\n x = row[j]\n if typedict is not None and key in typedict:\n try:\n x = typedict[key](x)\n except:\n print >>sys.stderr, \"col key:\", key\n print >>sys.stderr, \"field value:\", x\n print >>sys.stderr, \"type / conversion function:\", typedict[key]\n raise\n elif default_type is not None:\n x = default_type(x)\n valdict[key] = x\n datadict[rowkey] = valdict\n return datadict",
"def read_table(data, coerce_type, transpose=False):\n lines = data.splitlines()\n headings = lines[1].split()\n result = {}\n for row in lines[2:]:\n items = row.split()\n for i, item in enumerate(items[1:]):\n if transpose:\n key = (headings[i], items[0])\n else:\n key = (items[0], headings[i])\n result[key] = coerce_type(item)\n return result",
"def get_row_dict(self, row):\n return self.get_dict(self.possibles[row], \"R\", row)",
"def tsv_to_json(tsv_file, json_file):\n import csv\n import json\n\n try:\n with open(tsv_file, 'r') as tsvFile:\n file_reader = csv.DictReader(tsvFile, dialect='excel-tab')\n row_list = list(file_reader)\n with open(json_file, 'w+') as jsonFile:\n jsonFile.write(json.dumps(row_list, indent=4))\n return 1\n except (ValueError, FileNotFoundError):\n return 0",
"def read_features_dict(path):\n # type_dict specifies the type conversion to be applied. Each key denotes\n # a column name and the value is the conversion. Columns not included are\n # converted to floats.\n type_dict = {'source': str, 'target': str, 'status': int}\n with open(path) as feature_file:\n reader = csv.DictReader(feature_file, delimiter='\\t')\n for row in reader:\n yield {key: type_dict.get(key, float)(value) for key, value in row.items()}",
"def fields_to_dict(lines, delim='\\t', strip_f=strip):\r\n result = {}\r\n for line in lines:\r\n # skip empty lines\r\n if strip_f:\r\n fields = map(strip_f, line.split(delim))\r\n else:\r\n fields = line.split(delim)\r\n if not fields[0]: # empty string in first field implies problem\r\n continue\r\n result[fields[0]] = fields[1:]\r\n return result",
"def _parse_table(value):\n lines = value.split('\\n')\n header = None\n rows = []\n\n for l in lines:\n if l.startswith('+-'):\n pass\n elif l.startswith('|'):\n columns = [c.strip() for c in l.split('|')[1:-1]]\n if header is None:\n header = columns\n else:\n row = {}\n for i, c in enumerate(columns):\n if len(header)-1 <= i:\n row[i] = c\n else:\n row[header[i]] = c\n rows.append(row)\n return rows",
"def make_dicts(cursor, row):\n return dict((cursor.description[idx][0], value)\n for idx, value in enumerate(row))",
"def make_dicts(cursor, row):\n return dict((cursor.description[idx][0], value)\n for idx, value in enumerate(row))",
"def to_python(self):\r\n mapping = {}\r\n for row in self.rows:\r\n mapping[row[0]] = _format_python_value(row[1])\r\n return mapping",
"def map_tsv(tsv_file: IO) -> Dict[int, Tuple[int, int]]:\n sys.stderr.write(\"Mapping articles to lines in the de tsv file...\\n\")\n last_id = None\n document_start = 0\n current_line = 0\n mapping_dict = dict()\n article_length = 0\n mapped_articles = 0\n\n line = tsv_file.readline()\n while line:\n article_id = int(line.split(\"\\t\")[0])\n # new article begins\n if article_id != last_id:\n if last_id is None:\n mapping_dict[article_id] = (document_start, article_length)\n else:\n mapping_dict[last_id] = (document_start, article_length)\n document_start = current_line\n article_length = 0\n last_id = article_id\n mapped_articles += 1\n\n if mapped_articles % 100000 == 0:\n sys.stderr.write(f\"Mapped {mapped_articles} de articles...\\n\")\n\n article_length += 1\n current_line = tsv_file.tell()\n line = tsv_file.readline()\n\n mapping_dict[last_id] = (document_start, article_length)\n\n sys.stderr.write(f\"Done, mapped {len(mapping_dict)} unique articles to lines.\\n\")\n return mapping_dict",
"def row_to_dict(keys):\n return lambda row: dict(izip(keys, row))",
"def table_to_dict(self, tab):\n dict = {}\n for colname in tab.colnames:\n dict[colname] = tab[colname].data\n return dict",
"def dict_factory(cursor, row):\n rowdict = {}\n for idx, col in enumerate(cursor.description):\n rowdict[col[0]] = row[idx]\n return rowdict",
"def read_changes_tsv(tsv_file):\r\n changes = {}\r\n with open(tsv_file, 'r') as info_file:\r\n for info in info_file:\r\n split_info = info.strip().split('/t')\r\n changes[split_info[0]] = split_info[1]\r\n return changes",
"def csvToDict(filepath):\n data = []\n with open(getcwd() + filepath, 'r') as dataset:\n assert csv.Sniffer().has_header(dataset.read(9999)), 'No headers'\n dataset.seek(0)\n dialect = csv.Sniffer().sniff(dataset.read(99999))\n dataset.seek(0)\n reader = csv.DictReader(dataset, dialect=dialect)\n headers = reader.fieldnames\n for row in reader:\n data.append(row)\n\n data = assert_data_format(data)[0]\n\n return data, headers",
"def parse_distmat_to_dict(table):\r\n\r\n col_headers, row_headers, data = parse_matrix(table)\r\n assert(col_headers == row_headers)\r\n\r\n result = defaultdict(dict)\r\n for (sample_id_x, row) in zip(col_headers, data):\r\n for (sample_id_y, value) in zip(row_headers, row):\r\n result[sample_id_x][sample_id_y] = value\r\n return result",
"def read_merged_file(self, tsv_filename):\n d = {}\n for index, line in enumerate(open(tsv_filename, 'rb')):\n chunks = line.replace('\\n', '').split('\\t')\n if index == 0:\n n1_pos = chunks.index('n1')\n n2_pos = chunks.index('n2')\n n3_pos = chunks.index('n3')\n \n s1_pos = chunks.index('s1')\n s2_pos = chunks.index('s2')\n s3_pos = chunks.index('s3')\n s12_pos = chunks.index('s12')\n s13_pos = chunks.index('s13')\n s23_pos = chunks.index('s23')\n s123_pos = chunks.index('s123')\n\n maxent_pos = chunks.index('est')\n ext_pos = chunks.index('ext')\n obs_pos = chunks.index('obs')\n ratio_pos = chunks.index('pair_trip_ratio')\n else:\n triangle = (int(chunks[s1_pos]), int(chunks[s2_pos]), int(chunks[s3_pos]), int(chunks[s12_pos]), int(chunks[s13_pos]), int(chunks[s23_pos]), int(chunks[s123_pos]))\n d[((chunks[n1_pos], chunks[n2_pos], chunks[n3_pos]), index)] = (float(chunks[maxent_pos]), float(chunks[ext_pos]), float(chunks[obs_pos]), float(chunks[ratio_pos]), triangle)\n return d",
"def parse_row(row):\n station_id = parse_substation_code(row)\n date_and_time = parse_date_and_time(row)\n depth = parse_depth(row)\n quality_code = parse_quality_code(row)\n value = parse_value(row)\n return [station_id, date_and_time, depth, quality_code, value]",
"def open_tsv(path, multi=False, encoding='utf-8'):\n xx = 0\n DRtn = {}\n for line in file_iter(path, encoding=encoding):\n line = line.strip('\\r\\n')\n LSplit = line.split('\\t')\n \n if xx == 0:\n LKeys = LSplit\n else: \n yy = 0\n DItem = {}\n for key in LKeys:\n DItem[key] = LSplit[yy]\n yy += 1\n \n key = DItem[LKeys[0]]\n if not multi:\n # A single primary key\n assert not key in DRtn, key\n DRtn[key] = DItem\n elif multi == -1:\n # Country codes HACK!\n if key in DRtn: \n continue\n DRtn[key] = DItem\n else: \n # Can have multiple primary keys\n # (e.g. language index info)\n if not key in DRtn: \n DRtn[key] = []\n DRtn[key].append(DItem)\n del DItem[LKeys[0]]\n xx += 1\n return DRtn",
"def readData(filename):\r\n data_d = {}\r\n with open(filename) as f:\r\n df = pd.read_csv(f, header=0, dtype='str',sep=';')\r\n df = df.loc[:, ~df.columns.str.contains('^Unnamed')]\r\n df_dict = df.to_dict(orient='index')\r\n for i,val in df_dict.iteritems(): \r\n clean_row = [(k, p.proc(v)) for (k, v) in val.iteritems()]\r\n row_id = val['line_nr']\r\n data_d[row_id] = dict(clean_row)\r\n return data_d\r\n return df",
"def parse_csv_row(self, row):\n\n for key in self.field_map:\n if self.field_map[key] is not None:\n if key == 'marking':\n self.obstacle_data[key] = self.get_marking_value(row[self.field_map[key]].strip())\n elif key == 'lighting':\n self.obstacle_data[key] = self.get_lighting_value(row[self.field_map[key]].strip())\n elif key == 'obst_type':\n self.obstacle_data['obst_type_id'] = self.get_obstacle_type_id(row[self.field_map[key]].strip())\n else:\n self.obstacle_data[key] = row[self.field_map[key]].strip()"
] | [
"0.6821006",
"0.6447306",
"0.63589346",
"0.6260246",
"0.60955495",
"0.6078354",
"0.5975733",
"0.58779573",
"0.58404744",
"0.5821709",
"0.58196867",
"0.58150125",
"0.58036083",
"0.5798473",
"0.5777305",
"0.57152206",
"0.57152206",
"0.5710342",
"0.5708072",
"0.57009596",
"0.56814593",
"0.56658614",
"0.5659527",
"0.5634537",
"0.56218624",
"0.5612922",
"0.56067383",
"0.56040496",
"0.55921423",
"0.55841416"
] | 0.83762723 | 0 |
Testing whether the clusters are correctly created and if the old and new dataframes are the exact same aside from the Topic column | def test_cluster_embeddings(base_bertopic, samples, features, centers):
embeddings, _ = make_blobs(n_samples=samples, centers=centers, n_features=features, random_state=42)
documents = [str(i + 1) for i in range(embeddings.shape[0])]
old_df = pd.DataFrame({"Document": documents,
"ID": range(len(documents)),
"Topic": None})
new_df, _ = base_bertopic._cluster_embeddings(embeddings, old_df)
assert len(new_df.Topic.unique()) == centers
assert "Topic" in new_df.columns
pd.testing.assert_frame_equal(old_df.drop("Topic", 1), new_df.drop("Topic", 1)) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_assign_clusters_nonsparse(self, new_data, filename):\n\n sqlalchemy_conn_str = open('../conf/sqlalchemy_conn_str.txt', 'r').read()\n engine = create_engine(sqlalchemy_conn_str)\n if self.split_type == 'random':\n averages_seg = pd.read_sql('SELECT * FROM clust_nonsparse_avebysegment_random',con=engine)\n averages_rt = pd.read_sql('SELECT * FROM clust_nonsparse_avebyrt_random',con=engine)\n elif self.split_type == 'date':\n averages_seg = pd.read_sql('SELECT * FROM clust_nonsparse_avebysegment_date',con=engine)\n averages_rt = pd.read_sql('SELECT * FROM clust_nonsparse_avebyrt_date',con=engine)\n \n averages_seg['exists'] = 1\n test_data_exists = pd.merge(new_data, averages_seg[['segment_id', 'day_of_week', 'time_idx', 'exists']], on=['segment_id', 'day_of_week', 'time_idx'])\n test_exists = test_data_exists[test_data_exists['exists']==1]\n test_notexists = test_data_exists[test_data_exists['exists']!=1]\n \n test_exists_tmp = test_exists[['date','time','date_idx', 'time_idx', 'day_of_week', 'segment_id', 'road_type', 'lat1', 'lat2', 'lon1', 'lon2']]\n test_notexists_tmp = test_notexists[['date','time','date_idx', 'time_idx', 'day_of_week', 'segment_id', 'road_type', 'lat1', 'lat2', 'lon1', 'lon2']]\n test_matrix_exists = pd.merge(test_exists_tmp, averages_seg, how='left', on=['segment_id', 'day_of_week', 'time_idx'])\n test_matrix_notexists = pd.merge(test_notexists_tmp, averages_rt, how='left', on=['road_type', 'day_of_week', 'time_idx'])\n test_matrix = pd.concat([test_matrix_exists, test_matrix_notexists])\n test_matrix = test_matrix.fillna(0)\n \n test_nonsparse_matrix = test_matrix[['segment_id','date','time','date_idx', 'time_idx', 'day_of_week', 'road_type', 'lat1', 'lat2', 'lon1', 'lon2', 'level_binary', 'level_min', 'level_max', 'level_mean', 'level_count']]\n test_nonsparse_matrix = self.scale_matrix(test_nonsparse_matrix)\n\n print('clustering new data...')\n cluster_model = joblib.load(filename)\n cluster_predictions = cluster_model.predict(test_nonsparse_matrix.drop(columns = ['segment_id','date','time']))\n \n clusterdf = pd.DataFrame(cluster_predictions,columns = ['cluster_nonsparse']).reset_index()\n keydf = test_matrix[['segment_id','date','time']].reset_index()\n test_cluster_df_sparse = pd.merge(clusterdf, keydf, on=['index'])\n \n return test_cluster_df_sparse[['segment_id','date','time','cluster_nonsparse']]",
"def test_cluster_embeddings(samples, features, centers):\n embeddings, _ = make_blobs(n_samples=samples, centers=centers, n_features=features, random_state=42)\n documents = [str(i + 1) for i in range(embeddings.shape[0])]\n old_df = pd.DataFrame({\"Document\": documents,\n \"ID\": range(len(documents)),\n \"Topic\": None})\n model = BERTopic()\n new_df, _ = model._cluster_embeddings(embeddings, old_df)\n\n assert len(new_df.Topic.unique()) == centers\n assert \"Topic\" in new_df.columns\n pd.testing.assert_frame_equal(old_df.drop(\"Topic\", 1), new_df.drop(\"Topic\", 1))",
"def test_assign_clusters_sparse(self, new_data, filename):\n\n sqlalchemy_conn_str = open('../conf/sqlalchemy_conn_str.txt', 'r').read()\n engine = create_engine(sqlalchemy_conn_str)\n \n print('creating test sparse matrix...')\n if self.split_type == 'random':\n averages_seg = pd.read_sql('SELECT * FROM clust_sparse_avebysegment_random',con=engine)\n averages_rt = pd.read_sql('SELECT * FROM clust_sparse_avebyrt_random',con=engine)\n if self.split_type == 'date':\n averages_seg = pd.read_sql('SELECT * FROM clust_sparse_avebysegment_date',con=engine)\n averages_rt = pd.read_sql('SELECT * FROM clust_sparse_avebyrt_date',con=engine)\n\n averages_seg['exists'] = 1\n test_data_exists = pd.merge(new_data, averages_seg[['segment_id', 'exists']], on=['segment_id'])\n test_exists = test_data_exists[test_data_exists['exists']==1]\n test_notexists = test_data_exists[test_data_exists['exists']!=1] \n \n test_matrix_exists = pd.merge(test_exists[['segment_id', 'road_type']], averages_seg, how='left', on=['segment_id'])\n test_matrix_notexists = pd.merge(test_notexists[['segment_id', 'road_type']], averages_rt, how='left', on=['road_type'])\n test_matrix = pd.concat([test_matrix_exists, test_matrix_notexists])\n test_matrix = test_matrix.fillna(0) \n \n test_sparse_matrix = test_matrix.drop(columns = ['segment_id', 'road_type', 'exists', 'index', 'roadtypekey', 'segmentskey'])\n num = list(range(len(list(averages_seg))-4))\n columns = [str(item) for item in num]\n test_sparse_matrix = test_sparse_matrix[columns] \n \n print('clustering new data...')\n cluster_model = joblib.load(filename)\n cluster_predictions = cluster_model.predict(test_sparse_matrix)\n \n clusterdf = pd.DataFrame(cluster_predictions,columns = ['cluster_sparse'])\n clusterdf['index'] = clusterdf.index\n segmentdf = test_matrix['segment_id'].to_frame()\n segmentdf['index'] = segmentdf.index\n test_cluster_df_sparse = pd.merge(clusterdf, segmentdf, on=['index'])\n test_cluster_df_sparse = test_cluster_df_sparse[['segment_id','cluster_sparse']].groupby(['segment_id','cluster_sparse']).count()\n \n return test_cluster_df_sparse.reset_index()",
"def validate_new_curriculum_topics(self, curriculum_topics):\n\n for cur in curriculum_topics:\n # check to make sure its in the general topics table\n self.db_cursor.execute(\"\"\"SELECT COUNT(*) FROM Topic WHERE name = %s\"\"\", (cur,))\n ct = self.db_cursor.fetchone()\n ct = ct[0]\n if ct == 0:\n print(\"topic does not exist, we must create new one or cancel\") # todo\n\n return True",
"def check_if_cluster_was_upgraded():\n return True if \"replaces\" in get_ocs_csv().get().get(\"spec\") else False",
"def test_assign_clusters_sparse_long(self, new_data, filename):\n\n sqlalchemy_conn_str = open('../conf/sqlalchemy_conn_str.txt', 'r').read()\n engine = create_engine(sqlalchemy_conn_str)\n \n print('creating test sparse matrix...')\n if self.split_type == 'random':\n averages_seg = pd.read_sql('SELECT * FROM clust_sparse_long_avebysegment_random',con=engine)\n if self.split_type == 'date':\n averages_seg = pd.read_sql('SELECT * FROM clust_sparse_long_avebysegment_date',con=engine)\n \n test_matrix = pd.merge(new_data['segment_id'].to_frame(), averages_seg, how='inner', on=['segment_id'])\n test_sparse_matrix = test_matrix.drop(columns = ['segment_id','segmentskey','index'])\n \n print('clustering new data...')\n cluster_model = joblib.load(filename)\n cluster_predictions = cluster_model.predict(test_sparse_matrix)\n \n clusterdf = pd.DataFrame(cluster_predictions,columns = ['cluster_sparse_long'])\n clusterdf['index'] = clusterdf.index\n segmentdf = test_matrix['segment_id'].to_frame()\n segmentdf['index'] = segmentdf.index\n test_cluster_df_sparse = pd.merge(clusterdf, segmentdf, on=['index'])\n test_cluster_df_sparse = test_cluster_df_sparse[['segment_id', 'cluster_sparse_long']].groupby(['segment_id', 'cluster_sparse_long']).count()\n \n return test_cluster_df_sparse.reset_index()",
"def test_topic_reduction_edge_cases():\n model = BERTopic()\n nr_topics = 5\n model.nr_topics = 100\n old_documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n model._update_topic_size(old_documents)\n model._extract_topics(old_documents)\n old_freq = model.get_topic_freq()\n\n new_documents = model._reduce_topics(old_documents)\n new_freq = model.get_topic_freq()\n\n assert not set(old_documents.Topic).difference(set(new_documents.Topic))\n pd.testing.assert_frame_equal(old_documents, new_documents)\n pd.testing.assert_frame_equal(old_freq, new_freq)",
"def compare(status, old_pred):\n num_cl = len(status)\n new_clusters = 0\n merged_clusters = 0\n\n for i in range(num_cl):\n group = status[i]\n # print('Group: ', group)\n\n were_noise = True\n has_merged = False\n\n for k in range(len(group)):\n sample = group[k]\n old_tag = old_pred[sample]\n # print('Sample {:d}, old tag: {:d}'.format(sample, int(old_tag)))\n if old_tag != -1:\n were_noise = False\n if len(np.unique(old_pred[group])) > 1:\n has_merged = True\n\n if were_noise:\n new_clusters += 1\n if has_merged:\n merged_clusters += len(np.unique(old_pred[group])) - 1\n\n return new_clusters, merged_clusters",
"def generate_clusters_n(df, tweet_deleted):\n # cluster_labels, n_clusters = dbscan(normalised_df, true_labels, 0.25, 30)\n # print(\"normalised_df.head()\", normalised_df.head())\n clusterer = hdbscan.HDBSCAN(min_cluster_size=10)\n clusterer.fit(df)\n labels = clusterer.labels_\n cluster_groups = {}\n for i in labels:\n if cluster_groups.get(i):\n cluster_groups[i] = cluster_groups[i] + 1\n else:\n cluster_groups[i] = 1\n print(\"cluster_groups\", cluster_groups)\n df[\"cluster\"] = labels\n df[\"tweet_deleted\"] = tweet_deleted\n cluster_results = list()\n for cluster_no in cluster_groups.keys():\n print(\"++++++++++\")\n print(\"cluster_no\", cluster_no)\n cluster_result = list()\n cluster_result.append(cluster_no)\n\n cluster = df.mask('cluster', cluster_no)\n print(cluster_no, \" :\")\n tweet_deleted = cluster.mask('tweet_deleted', True).shape[0]\n not_tweet_deleted = cluster.mask('tweet_deleted', False).shape[0]\n print(\"deleted_df len:\", tweet_deleted)\n print(\"not_deleted_df len:\", not_tweet_deleted)",
"def cluster(self):\n assert False",
"def test_other_clusters(self):\n url = \"?filter[time_scope_units]=month&filter[time_scope_value]=-1&filter[resolution]=monthly&filter[limit]=1&group_by[cluster]=*\" # noqa: E501\n query_params = self.mocked_query_params(url, OCPCpuView)\n handler = OCPReportQueryHandler(query_params)\n\n query_data = handler.execute_query()\n for data in query_data.get(\"data\"):\n for cluster_data in data.get(\"clusters\"):\n cluster_name = cluster_data.get(\"cluster\", \"\")\n if cluster_name == \"Other\":\n for cluster_value in cluster_data.get(\"values\"):\n self.assertTrue(len(cluster_value.get(\"clusters\", [])) == 1)\n self.assertTrue(len(cluster_value.get(\"source_uuid\", [])) == 1)\n elif cluster_name == \"Others\":\n for cluster_value in cluster_data.get(\"values\"):\n self.assertTrue(len(cluster_value.get(\"clusters\", [])) > 1)\n self.assertTrue(len(cluster_value.get(\"source_uuid\", [])) > 1)",
"def __create_cluster_profiles(self,\n clustered_dataframes,\n shrunken_df,\n numerical_features,\n le_map,\n output_path,\n find_nearest_on_cols=False,\n show=True):\n\n def find_nearest(numbers, target):\n \"\"\"\n Find the closest fitting number to the target number\n \"\"\"\n numbers = np.asarray(numbers)\n idx = (np.abs(numbers - target)).argmin()\n return numbers[idx]\n\n cluster_profiles_df = pd.DataFrame(columns=shrunken_df.columns).drop(\n 'Cluster_Name', axis=1)\n rows_count = 0\n for cluster_identfier, cluster_dataframe in \\\n clustered_dataframes.items():\n df = pd.DataFrame(columns=cluster_dataframe.columns)\n df = df.append(cluster_dataframe.mean(), ignore_index=True)\n df.index = [cluster_identfier]\n\n if cluster_dataframe.shape[0] <= 1:\n continue\n\n # Attempt to convert numbers found within the full set of data\n for col in cluster_dataframe.columns:\n if col not in numerical_features or find_nearest_on_cols:\n df[col] = find_nearest(numbers=shrunken_df[\n col].value_counts().index.tolist(),\n target=df[col].values[0])\n\n # Evaluate cluster dataframe by dataframe\n eval_df = pd.DataFrame(columns=cluster_dataframe.columns)\n eval_df = eval_df.append(\n cluster_dataframe.mean(), ignore_index=True)\n eval_df = eval_df.append(\n cluster_dataframe.min(), ignore_index=True)\n eval_df = eval_df.append(\n cluster_dataframe.median(),\n ignore_index=True)\n eval_df = eval_df.append(\n cluster_dataframe.max(), ignore_index=True)\n eval_df = eval_df.append(\n cluster_dataframe.std(), ignore_index=True)\n eval_df = eval_df.append(\n cluster_dataframe.var(), ignore_index=True)\n eval_df.index = [\"Mean\", \"Min\", \"Median\",\n \"Max\", \"Standard Deviation\", \"Variance\"]\n\n if show:\n print(\"Total found in {0} is {1}\".format(\n cluster_identfier, cluster_dataframe.shape[0]))\n self.__render_mpl_table(\n df,\n sub_dir=output_path,\n filename=cluster_identfier +\n \"_Means_Rounded_To_Nearest_Real_Numbers\",\n header_columns=0,\n col_width=4.0)\n\n self.__render_mpl_table(\n eval_df,\n sub_dir=output_path,\n filename=cluster_identfier +\n \"_Eval_Df\",\n header_columns=0,\n col_width=4.0)\n display(df)\n display(eval_df)\n self.__vertical_spacing(7)\n\n cluster_profiles_df = cluster_profiles_df.append(\n self.__decode_df(df, le_map))\n\n rows_count += cluster_dataframe.shape[0]\n\n return rows_count, cluster_profiles_df",
"def test_example(self): \n num_clusters = 2\n iterations = 5\n ts1 = 2.5 * np.random.randn(100,) + 3\n X_1 = pd.Series(ts1)\n ts2 = 2 * np.random.randn(100,) + 5\n X_2 = pd.Series(ts2)\n ts3 = -2.5 * np.random.randn(100,) + 3\n X_3 = pd.Series(ts3)\n list_of_series = [X_1, X_2, X_3]\n from pynuTS.clustering import DTWKmeans\n clts = DTWKmeans(num_clust = num_clusters, num_iter = iterations)\n clts.fit(list_of_series)\n ts4 = 3.5 * np.random.randn(100,) + 2\n ts5 = -3.5 * np.random.randn(100,) + 2\n X_4 = pd.Series(ts4)\n X_5 = pd.Series(ts5)\n list_new = [X_4, X_5]\n clustering_dict = clts.predict(list_new)\n\n assert type(clustering_dict) is dict\n assert len(clustering_dict) == num_clusters",
"def test_store_cluster(self):\r\n\r\n self.tmpdir = mkdtemp(dir=\"./\", suffix=\"_store_clusters/\")\r\n\r\n self.files_to_remove.append(self.tmpdir + \"singletons.fasta\")\r\n self.files_to_remove.append(self.tmpdir + \"centroids.fasta\")\r\n\r\n # empty map results in empty files\r\n store_clusters({}, self.tiny_test, self.tmpdir)\r\n actual_centroids = list(\r\n parse_fasta(open(self.tmpdir + \"centroids.fasta\")))\r\n self.assertEqual(actual_centroids, [])\r\n actual_singletons = list(\r\n parse_fasta(open(self.tmpdir + \"singletons.fasta\")))\r\n self.assertEqual(actual_singletons, [])\r\n\r\n # non-empty map creates non-empty files, centroids sorted by size\r\n mapping = {'FZTHQMS01B8T1H': [],\r\n 'FZTHQMS01DE1KN': ['FZTHQMS01EHAJG'],\r\n 'FZTHQMS01EHAJG': [1, 2, 3]} # content doesn't really matter\r\n\r\n centroids = [(\r\n 'FZTHQMS01EHAJG | cluster size: 4', 'CATGCTGCCTCCCGTAGGAGTTTGGACCGTGTCTCAGTTCCAATGTGGGGGACCTTCCTCTCAGAACCCCTATCCATCGAAGGTTTGGTGAGCCGTTACCTCACCAACTGCCTAATGGAACGCATCCCCATCGATAACCGAAATTCTTTAATAACAAGACCATGCGGTCTGATTATACCATCGGGTATTAATCTTTCTTTCGAAAGGCTATCCCCGAGTTATCGGCAGGTTGGATACGTGTTACTCACCCGTGCGCCGGTCGCCA'),\r\n ('FZTHQMS01DE1KN | cluster size: 2', 'CATGCTGCCTCCCGTAGGAGTTTGGACCGTGTCTCAGTTCCAATGTGGGGGACCTTCCTCTCAGAACCCCTATCCATCGAAGGTTTGGTGAGCCGTTACCTCACCAACTGCCTAATGGAACGCATCCCCATCGATAACCGAAATTCTTTAATAACAAGACCATGCGGTCTGATTATACCATCGGGTATTAATCTTTCTTTCGAAAGGCTATCCCCGAGTTATCGGCAGGTTGGATACGTGTTACTCACCCGTGCGCCGGTCGCCA')]\r\n\r\n singletons = [(\r\n 'FZTHQMS01B8T1H',\r\n 'CATGCTGCCTCCCGTAGGAGTTTGGACCGTGTCTCAGTTCCAATGTGGGGGACCTTCCTCTCAGAACCCCTATCCATCGAAGGTTTGGTGAGCCGTTACCTCACCAACTGCCTAATGGAACGCATCCCCATCGATAACCGAAATTCTTTAATAATTAAACCATGCGGTTTTATTATACCATCGGGTATTAATCTTTCTTTCGAAAGGCTATCCCCGAGTTATCGGCAGGTTGGATACGTGTTACTCACCCGTGCGCCGGTCGCCATCACTTA')]\r\n\r\n store_clusters(mapping, self.tiny_test, self.tmpdir)\r\n actual_centroids = list(\r\n parse_fasta(open(self.tmpdir + \"centroids.fasta\")))\r\n self.assertEqual(actual_centroids, centroids)\r\n actual_singletons = list(\r\n parse_fasta(open(self.tmpdir + \"singletons.fasta\")))\r\n self.assertEqual(actual_singletons, singletons)",
"def compare_three_members(df_dict_new, df_dict_old) -> bool:\n return df_dict_new[\"DM_three_members\"].iloc[:, :-1].equals(\n df_dict_old[\"DM_three_members\"].iloc[:, :-1]\n )",
"def check_consistency(self, es):",
"def _create_topic_if_not_exists(self, topic):\n if topic in self.environments['cluster'].kafka.consumer().topics():\n return True\n\n new_topic = NewTopic(name=topic, num_partitions=MAX_CONCURRENCY*2, replication_factor=1)\n admin_client = KafkaAdminClient(bootstrap_servers=self.environments['cluster'].kafka.brokers,\n request_timeout_ms=180000)\n admin_client.create_topics(new_topics=[new_topic], timeout_ms=180000)\n return False",
"def is_converged(clusters1, clusters2, k, num_of_cords):\r\n for i in range(k):\r\n for j in range(num_of_cords):\r\n if clusters1[i][j] != clusters2[i][j]:\r\n return False\r\n return True",
"def test_topic_reduction_edge_cases(base_bertopic):\n\n nr_topics = 5\n base_bertopic.nr_topics = 100\n old_documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n base_bertopic._update_topic_size(old_documents)\n c_tf_idf = base_bertopic._extract_topics(old_documents, topic_reduction=True)\n old_freq = base_bertopic.get_topics_freq()\n\n new_documents = base_bertopic._reduce_topics(old_documents, c_tf_idf)\n new_freq = base_bertopic.get_topics_freq()\n\n assert not set(old_documents.Topic).difference(set(new_documents.Topic))\n pd.testing.assert_frame_equal(old_documents, new_documents)\n pd.testing.assert_frame_equal(old_freq, new_freq)",
"def evaulate_clusters(self, pred_dict, model_dir):\n\t\tclustering_dict = {\"Topic\":[], \"Text\":[], \"Keywords\": []}\n\t\tfor cluster_num, sents_list in pred_dict.items():\n\t\t\tprint(\"\\n cluster number : \", cluster_num)\n\t\t\tprint(\"\\n number of sents : \", len(sents_list))\n\t\t\ttfidf_vec = TfidfVectorizer(use_idf=True, sublinear_tf=True, max_df=0.8, max_features=20, ngram_range=(1,5), min_df=1)\n\t\t\tX_tfidf = tfidf_vec.fit_transform(sents_list).toarray()\n\t\t\ttotal_tfidf = tfidf_vec.get_feature_names()\n\t\t\tfor sent in sents_list:\n\t\t\t\tclustering_dict[\"Topic\"].append(cluster_num)\n\t\t\t\tclustering_dict[\"Text\"].append(sent)\n\t\t\t\tclustering_dict[\"Keywords\"].append(\",\".join(total_tfidf))\n\t\t\"\"\" save the clusters to csv file \"\"\"\n\t\tdf_dominant_topic = defaultdict(list) \n\t\tdf_dominant_topic[\"Topic\"] = clustering_dict[\"Topic\"]\n\t\tdf_dominant_topic[\"Text\"] = clustering_dict[\"Text\"]\n\t\tdf_dominant_topic[\"Keywords\"] = clustering_dict[\"Keywords\"]\n\t\tdf_dominant_topic = pd.DataFrame(df_dominant_topic)\n\t\tdf_dominant_topic.to_csv(os.path.join(model_dir, \"cluster_sentence_topic_mapping.csv\"))\n\t\treturn df_dominant_topic",
"def check_for_drift(self):\n old_clusters = self.active_core_clusters.copy()\n groups = self.denstream.generate_clusters()\n for group in groups[0]:\n for cluster in group:\n if cluster.id not in self.active_core_clusters:\n self.active_core_clusters.add(cluster.id)\n self.drift_indexes.append(self.event_index)\n print(\"DRIFT ALERT\")\n print(\"Stream position:\", self.event_index)\n print(\"New core behavior detected: cluster\", cluster.id)\n print(\"Cluster weight:\", cluster.weight)\n print(\"Cluster radius:\", cluster.radius)\n print(\"Cases in cluster:\", cluster.case_ids)\n print()\n else:\n old_clusters.remove(cluster.id)\n\n # for cluster in old_clusters:\n # self.active_core_clusters.remove(cluster)\n # self.drift_indexes.append(event_index)\n # print(\"DRIFT ALERT\")\n # print(\"Cluster\", cluster, \"ceased to exist\")\n # print()",
"def check_components(self, data, _cluster, _linked_clusters):\n\n do_not_merge = []\n clustercoords = data[0:2,_cluster.cluster_members]\n _linked_clusters = [_link.antecessor for _link in _linked_clusters]\n\n if _cluster.number_of_members > 50:\n # This is faster for large numbers of cluster_members but slower when\n # number_of_members is small. A value of 50 is arbitrary but selected\n # empirically.\n for _link in _linked_clusters:\n linkcoords = data[0:2,_link.cluster_members]\n concatcoords = np.concatenate([linkcoords.T, clustercoords.T])\n concatcoords = concatcoords.T\n vals, idx, count = np.unique(concatcoords, return_index=True, return_counts=True, axis = 1)\n idx_vals_repeated = np.where(count > 1)[0]\n if np.size(idx_vals_repeated) > 0:\n do_not_merge.append(True)\n else:\n do_not_merge.append(False)\n\n else:\n for _link in _linked_clusters:\n boolval = []\n for j in range(_cluster.number_of_members):\n # Check all cluster components against those belonging to another cluster\n multiple_components = (data[0,_cluster.cluster_members[j]] == data[0,_link.cluster_members]) & \\\n (data[1,_cluster.cluster_members[j]] == data[1,_link.cluster_members])\n if np.any(multiple_components):\n boolval.append(True)\n else:\n boolval.append(False)\n if np.any(boolval):\n do_not_merge.append(True)\n else:\n do_not_merge.append(False)\n boolval = None\n\n return do_not_merge",
"def _predefined_cluster_topics(self):\n\n self.clusters = ['Doesnt fit',\n 'Weight changes', \n 'Mood and behavioral changes', \n 'Vision changes',\n 'Headaches',\n 'Body aches and pain',\n 'Memory and concentration issues',\n 'Menstrual changes',\n 'Sleep issues and drowsiness',\n 'Balance, coordination, and muscle control',\n 'Dizziness and fainting',\n 'Stomach issues',\n 'Intestinal issues',\n 'Skin issues',\n 'Dry mouth and changes in taste',\n 'Blood sugar changes',\n 'Hair loss and abnormal growth',\n 'Changes in libido and sexual performance',\n 'Changes in energy',\n 'Sweating and temperature control issues',\n 'Eye itching or sensitivity changes',\n 'Blood pressure and heart rate changes',\n 'Changes in appetite',\n 'Urinary changes',\n 'Kidney issues',\n 'Hearing issues',\n 'Respiratory issues and coughing',\n 'Salivary issues',\n 'Breast growth and swelling (all genders)',\n 'Dental issues']",
"def validate_new_topic(self, topic_id):\n\n self.db_cursor.execute(\"\"\"SELECT COUNT(*) FROM Topic WHERE id == %s\"\"\", (topic_id,))\n ct = self.db_cursor.fetchone()\n ct = ct[0]\n if ct == 0:\n return False\n return True",
"def are_in_same_cluster(self, id_emitter, id_cluster, ei, clusters):\n\n list_of_data, step_nb = create_comparison_one_to_one(\n id_emitter, ei, clusters[id_cluster], 50)\n\n to_predict = np.array(list_of_data)\n\n prediction_on_sequence = self.model.predict(to_predict)\n final_prediction = one_prediction(\n prediction_on_sequence, step_nb, self.threshold)\n return(final_prediction)",
"def compare(self):\n len0 = len(self.cluster_lists[0])\n len1 = len(self.cluster_lists[1])\n longer_index = 0 if len0 >= len1 else 1\n shorter_index = 1 if len1 <= len0 else 0\n self.stars_length = len(self.cluster_lists[shorter_index]) \n self.starlets_length = len(self.cluster_lists[longer_index]) \n # build the noeds for shorter cluster list, and get the\n # distribution of cluster size.\n for cluster in self.cluster_lists[shorter_index]:\n len_spectra = len(cluster.get_spectra())\n star = ClusterNode(cluster.id, len_spectra) \n self.stars[cluster.id] = star\n\n self.cluster_spectra_num[shorter_index] += len_spectra\n self.cluster_size_dist[shorter_index][len_spectra] = self.cluster_size_dist[shorter_index].get(len_spectra,0) + 1\n # build the noeds for longer cluster list, and get the\n # distribution of cluster size.\n for cluster in self.cluster_lists[longer_index]:\n len_spectra = len(cluster.get_spectra())\n starlet = ClusterNode(cluster.id, len_spectra) \n self.starlets[cluster.id] = starlet\n\n self.cluster_spectra_num[longer_index] += len_spectra\n self.cluster_size_dist[longer_index][len_spectra] = self.cluster_size_dist[longer_index].get(len_spectra,0) + 1\n # do the comparing, and network building\n for i in range (0, len(self.cluster_lists[shorter_index])):\n cluster0 = self.cluster_lists[shorter_index][i] \n for j in range (i, len(self.cluster_lists[longer_index])):\n cluster1 = self.cluster_lists[longer_index][j] \n (shared_spec_num, similarity) = self.calculate_similarity(cluster0, cluster1)\n if similarity == 0:\n continue\n self.similarity_dist[int(similarity*10)] = self.similarity_dist.get(int(similarity*10),0) + 1\n self.shared_spec_num += shared_spec_num\n\n self.stars[cluster0.id].add_nb_node(cluster1.id, similarity, shared_spec_num)\n self.starlets[cluster1.id].add_nb_node(cluster0.id, similarity, shared_spec_num)\n\n self.ave_star_size = self.cluster_spectra_num[shorter_index]/self.stars_length\n self.ave_starlet_size = self.cluster_spectra_num[longer_index]/self.starlets_length",
"def test_topic_reduction(reduced_topics):\n model = BERTopic()\n nr_topics = reduced_topics + 2\n model.nr_topics = reduced_topics\n old_documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n model._update_topic_size(old_documents)\n model._extract_topics(old_documents.copy())\n old_freq = model.get_topic_freq()\n\n new_documents = model._reduce_topics(old_documents.copy())\n new_freq = model.get_topic_freq()\n\n assert old_freq.Count.sum() == new_freq.Count.sum()\n assert len(old_freq.Topic.unique()) == len(old_freq)\n assert len(new_freq.Topic.unique()) == len(new_freq)\n assert isinstance(model.mapped_topics, dict)\n assert not set(model.get_topic_freq().Topic).difference(set(new_documents.Topic))\n assert model.mapped_topics",
"def check_ensemble_similarities(new_ensemble_name, new_ensemble_datasets):\n\n\texisting_ensembles = db.get_engine(current_app, 'methylation_data').execute(\"SELECT * FROM ensembles\").fetchall()\n\texisting_ensembles_list = [ dict(d) for d in existing_ensembles ]\n\texisting_ensembles_names_list = [ d['ensemble_name'] for d in existing_ensembles ]\n\n\t# New ensemble_name must be unique.\n\tif new_ensemble_name in existing_ensembles_names_list:\n\t\treturn json.dumps({\"result\": \"failure\", \"reason\": \"The name {} is already in use, please choose a different name\".format(new_ensemble_name)})\n\n\tnew_ensemble_datasets = new_ensemble_datasets.split('+')\n\n\tquery = \"SELECT cell_id FROM cells WHERE dataset IN (\" + \",\".join(('%s',)*len(new_ensemble_datasets)) + \")\"\n\tcells_in_new_ensemble = db.get_engine(current_app, 'methylation_data').execute(query, tuple(new_ensemble_datasets)).fetchall()\n\tcells_in_new_ensemble_set = set([ cell['cell_id'] for cell in cells_in_new_ensemble ])\n\n\tif len(cells_in_new_ensemble_set) <= 200:\n\t\treturn json.dumps({\"result\": \"failure\", \"reason\": \"Ensembles must contain more than 200 cells.\"})\n\n\tsame_datasets_in_both = []\n\tnew_ensemble_datasets_set = set(new_ensemble_datasets)\n\tfor existing_ensemble in existing_ensembles_list:\n\t\texisting_ensemble_datasets = set(existing_ensemble['datasets'].split(','))\n\t\tdatasets_difference = new_ensemble_datasets_set ^ existing_ensemble_datasets # datasets in new or existing but not both (difference).\n\t\tif len(datasets_difference) == 0:\n\t\t\tsame_datasets_in_both.append(existing_ensemble)\n\n\tfor similar_ensemble in same_datasets_in_both:\n\t\tquery = \"SELECT cell_id FROM Ens{}\".format(similar_ensemble['ensemble_id'])\n\t\tcells_in_similar_ensemble = db.get_engine(current_app, 'methylation_data').execute(query).fetchall()\n\t\tcells_in_similar_ensemble_set = set([ cell['cell_id'] for cell in cells_in_similar_ensemble ])\n\t\tdifferent_cells = cells_in_new_ensemble_set ^ cells_in_similar_ensemble_set\n\n\t\t# If a pre-existing ensemble with the same datasets also has the same exact cells as the new ensemble, tell user a duplicate ensemble exists\n\t\tif len(different_cells) == 0:\n\t\t\treturn json.dumps({\"result\": \"failure\", \"reason\": \"Another ensemble with the same cells already exists: {}.\".format(similar_ensemble['ensemble_name'])})\n\n\t# If none of the pre-existing ensembles with the same datasets has the same exact cells as the new ensemble, warn user that similar ensembles exist.\n\tif len(same_datasets_in_both) > 0:\n\t\treturn json.dumps({\"result\": \"warning\", \"reason\": \"The following pre-existing ensembles are similar: \"+\", \".join((\"%s\",)*len(same_datasets_in_both)) %(tuple([ ensemble['ensemble_name'] for ensemble in same_datasets_in_both]))+\". Are you sure you want to request the new ensemble?\"})\n\n\t# Success\n\treturn json.dumps({\"result\": \"success\",\n\t\t\t\t\t \"reason\": \"Click submit to finalize request.\",\n\t\t\t\t\t \"new_ensemble_name\": new_ensemble_name,\n\t\t\t\t\t \"new_ensemble_datasets\": new_ensemble_datasets,\n\t\t\t\t\t \"new_ensemble_cells\": list(cells_in_new_ensemble_set)})",
"def cluster(self, n_clusters, filename=None, df=None, tofile=False):\r\n path = '/mnt/disk2/data/YuanHAO/对应关系应用/files/{}'.format(filename)\r\n if not os.path.exists(path) and not isinstance(df, pd.DataFrame):\r\n raise Exception(\"Please upload cluster file to ./files folder OR specify the df\")\r\n if not filename:\r\n # ws = df[\"组合需求\"].apply(lambda x: x.split('.')[-1]).tolist()\r\n ws = df[\"问题期许\"].tolist()\r\n else:\r\n ws = pd.read_excel(path, header=None)[0].tolist()\r\n\r\n with codecs.open('/mnt/disk2/data/YuanHAO/对应关系应用/tongyong.pickle', \"rb\") as f:\r\n VECTOR = pickle.load(f)\r\n W_norm = VECTOR['W_norm']\r\n vocab = VECTOR['vocab']\r\n wsvec = []\r\n newws = []\r\n print('****不在词典里的词有****')\r\n\r\n _fix = '的|了|有点|多|没有|长了|不|的|特别|不错|好|好处|不|没有|无|不会|可以|一直|不用|超级|慢慢|会'.split('|')\r\n for x in ws:\r\n cut_x = x\r\n for _ in _fix:\r\n cut_x = cut_x.strip(_)\r\n\r\n if cut_x in vocab:\r\n vec = W_norm[vocab[cut_x]]\r\n if sum(np.isnan(vec)) != 0 or sum(pd.DataFrame(vec)[0].apply(math.isinf)) != 0:\r\n print(x, ': nan or inf')\r\n continue\r\n wsvec.append(vec)\r\n newws.append(x)\r\n else:\r\n cut_x = list(jieba.cut(cut_x))\r\n tmp = [W_norm[vocab[w]] for w in cut_x if w in vocab]\r\n if len(tmp) == 0:\r\n print(x)\r\n else:\r\n wsvec.append(sum(tmp) / len(tmp))\r\n newws.append(x)\r\n\r\n n_clusters = n_clusters\r\n kmeans_model = KMeans(n_clusters=n_clusters, random_state=1).fit(wsvec)\r\n labels = kmeans_model.labels_\r\n\r\n outputwords = []\r\n outputvecs = []\r\n for i in range(n_clusters):\r\n tmpword, tmpvec = [], []\r\n if not isinstance(df, pd.DataFrame):\r\n print('\\nClass {}'.format(i))\r\n for j in range(len(wsvec)):\r\n if labels[j] == i:\r\n if not isinstance(df, pd.DataFrame):\r\n print(newws[j], end=',')\r\n tmpword.append(newws[j])\r\n tmpvec.append(wsvec[j])\r\n outputvecs.append(tmpvec)\r\n outputwords.append(tmpword)\r\n respath = None\r\n if tofile and filename:\r\n respath = '/mnt/disk2/data/YuanHAO/对应关系应用/files/{}聚类结果.txt'.format(filename)\r\n elif tofile and isinstance(df, pd.DataFrame):\r\n respath = '/mnt/disk2/data/YuanHAO/对应关系应用/files/聚类结果.txt'\r\n if respath:\r\n with open(respath, 'w', encoding='utf-8') as f:\r\n for cls, wl in enumerate(outputwords):\r\n f.write(\"--------------class: {}---------------\".format(cls) + '\\n')\r\n for word in wl:\r\n f.write(word + '\\n')",
"def createcluster(self):\n for hostitem in OTHER_NODES:\n checkhost(hostitem)\n if OTHER_WSREP:\n for wsrepitem in OTHER_WSREP:\n REMAINING_NODES.append(wsrepitem)\n if REMAINING_NODES:\n alive = str(REMAINING_NODES)[1:-1]\n print \"{}\\nThe following nodes are alive in cluster:{}\\n {}\".format(\n RED, WHITE, alive)\n print \"\\n\\nTo boostrap a new cluster you need to switch them off\\n\"\n os.sys.exit(1)\n else:\n if self.mode == \"new\" and not self.force:\n ask('\\nThis operation will destroy the local data')\n clean_dir(self.datadir)\n initialize_mysql(self.datadir)\n bootstrap_mysql(self.mode)\n if self.mode == \"new\":\n create_monitor_table()\n ALL_NODES.append(\"localhost\")\n for creditem in CREDENTIALS:\n create_users(creditem)\n print \"\"\n drop_anonymous()"
] | [
"0.6708922",
"0.63803506",
"0.619212",
"0.6143325",
"0.6065571",
"0.6004695",
"0.595611",
"0.5744816",
"0.57216233",
"0.567206",
"0.5561389",
"0.55570865",
"0.55501693",
"0.5544099",
"0.55346334",
"0.55069065",
"0.54677224",
"0.5462687",
"0.5441739",
"0.5440946",
"0.5409589",
"0.5408735",
"0.5397156",
"0.5376138",
"0.53672755",
"0.5358134",
"0.5344247",
"0.53437275",
"0.5340764",
"0.5331473"
] | 0.6381417 | 1 |
Test whether the topics are correctly extracted using cTFIDF | def test_extract_topics(base_bertopic):
nr_topics = 5
documents = pd.DataFrame({"Document": newsgroup_docs,
"ID": range(len(newsgroup_docs)),
"Topic": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})
base_bertopic._update_topic_size(documents)
c_tf_idf = base_bertopic._extract_topics(documents, topic_reduction=False)
freq = base_bertopic.get_topics_freq()
assert c_tf_idf.shape[0] == 5
assert c_tf_idf.shape[1] > 100
assert isinstance(freq, pd.DataFrame)
assert nr_topics == len(freq.Topic.unique())
assert freq.Count.sum() == len(documents)
assert len(freq.Topic.unique()) == len(freq) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def test_extract_topics():\n nr_topics = 5\n documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n model = BERTopic()\n model._update_topic_size(documents)\n model._extract_topics(documents)\n freq = model.get_topic_freq()\n\n assert model.c_tf_idf.shape[0] == 5\n assert model.c_tf_idf.shape[1] > 100\n assert isinstance(freq, pd.DataFrame)\n assert nr_topics == len(freq.Topic.unique())\n assert freq.Count.sum() == len(documents)\n assert len(freq.Topic.unique()) == len(freq)",
"def test_extract_topics_custom_cv(base_bertopic_custom_cv):\n nr_topics = 5\n documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n base_bertopic_custom_cv._update_topic_size(documents)\n c_tf_idf = base_bertopic_custom_cv._extract_topics(documents, topic_reduction=False)\n freq = base_bertopic_custom_cv.get_topics_freq()\n\n assert c_tf_idf.shape[0] == 5\n assert c_tf_idf.shape[1] > 100\n assert isinstance(freq, pd.DataFrame)\n assert nr_topics == len(freq.Topic.unique())\n assert freq.Count.sum() == len(documents)\n assert len(freq.Topic.unique()) == len(freq)",
"def test_extract_topics_custom_cv():\n nr_topics = 5\n documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n\n cv = CountVectorizer(ngram_range=(1, 2))\n model = BERTopic(vectorizer=cv)\n model._update_topic_size(documents)\n model._extract_topics(documents)\n freq = model.get_topic_freq()\n\n assert model.c_tf_idf.shape[0] == 5\n assert model.c_tf_idf.shape[1] > 100\n assert isinstance(freq, pd.DataFrame)\n assert nr_topics == len(freq.Topic.unique())\n assert freq.Count.sum() == len(documents)\n assert len(freq.Topic.unique()) == len(freq)",
"def test_get_topics(self):\n\n for m in self.models:\n\n topics = m.topics\n self.assertTrue(isinstance(topics, turicreate.SFrame))\n self.assertEqual(topics.num_rows(), 25)\n self.assertEqual(topics.num_columns(), 2)\n z = m.topics[\"topic_probabilities\"]\n for k in range(m.num_topics):\n self.assertTrue(\n abs(sum(z.vector_slice(k)) - 1) < DELTA,\n \"Returned probabilities do not sum to 1.\",\n )\n\n # Make sure returned object is an SFrame of the right size\n topics = m.get_topics()\n self.assertTrue(isinstance(topics, turicreate.SFrame))\n self.assertTrue(\n topics.num_columns() == 3,\n \"Returned SFrame should have a topic, word, and probs.\",\n )\n\n # Make sure that requesting a single topic returns only that topic\n num_words = 8\n topics = m.get_topics([5], num_words=num_words)\n self.assertTrue(\n all(topics[\"topic\"] == 5), \"Returned topics do not have the right id.\"\n )\n self.assertEqual(topics.num_rows(), num_words)\n topics = m.get_topics([2, 4], num_words=num_words)\n self.assertEqual(set(list(topics[\"topic\"])), set([2, 4]))\n self.assertEqual(topics.num_rows(), num_words + num_words)\n\n # Make sure the cumulative probability of the returned words is\n # is less than the cutoff we provided.\n # A cutoff of 1.0 should return num_words for every topic.\n cutoff = 1.0\n topics = m.get_topics(cdf_cutoff=cutoff, num_words=len(m.vocabulary))\n totals = topics.groupby(\n \"topic\", {\"total_score\": turicreate.aggregate.SUM(\"score\")}\n )\n self.assertTrue(\n all(totals[\"total_score\"] <= (cutoff + DELTA)),\n \"More words were returned than expected for this cutoff.\",\n )\n\n # Make sure we raise errors for bad input\n with self.assertRaises(ValueError):\n m.get_topics([-1])\n with self.assertRaises(ValueError):\n m.get_topics([10000])\n with self.assertRaises(ToolkitError):\n topics = m.get_topics(output_type=\"other\")\n\n # Test getting topic_words\n topic_words = m.get_topics(output_type=\"topic_words\", num_words=5)\n self.assertEqual(type(topic_words), turicreate.SFrame)\n\n # Test words are sorted correctly for the first topic\n # TODO: Make this more deterministic.\n\n # topic_probs = m.get_topics(num_words=5)\n # expected = [w for w in topic_probs['word'][:5]]\n # observed = topic_words['words'][0]\n # self.assertEqual(observed[0], expected[0])",
"def validate_new_curriculum_topics(self, curriculum_topics):\n\n for cur in curriculum_topics:\n # check to make sure its in the general topics table\n self.db_cursor.execute(\"\"\"SELECT COUNT(*) FROM Topic WHERE name = %s\"\"\", (cur,))\n ct = self.db_cursor.fetchone()\n ct = ct[0]\n if ct == 0:\n print(\"topic does not exist, we must create new one or cancel\") # todo\n\n return True",
"def is_valid_topic(index):\n return all_topics[index][1] == \"1\"",
"def test_wiki_topics(self):\n t1 = TopicFactory(slug='doesnotexist')\n t2 = TopicFactory(slug='extant')\n t3 = TopicFactory(slug='tagged')\n\n doc = DocumentFactory(locale=u'en-US', category=10)\n doc.topics.add(t2)\n RevisionFactory(document=doc, is_approved=True)\n\n doc = DocumentFactory(locale=u'en-US', category=10)\n doc.topics.add(t2)\n doc.topics.add(t3)\n RevisionFactory(document=doc, is_approved=True)\n\n self.refresh()\n\n topic_vals = (\n (t1.slug, 0),\n (t2.slug, 2),\n (t3.slug, 1),\n ([t2.slug, t3.slug], 1),\n )\n\n qs = {'a': 1, 'w': 1, 'format': 'json'}\n for topics, number in topic_vals:\n qs.update({'topics': topics})\n response = self.client.get(reverse('search.advanced'), qs)\n eq_(number, json.loads(response.content)['total'])",
"def test_tfidf_scorer(self):\n\n \"\"\"\n Create the test data.\n \"\"\"\n tokenizer = Tokenizer(stem=False)\n posts = [\n \"Erdogan with threats to attack regime forces 'everywhere' in Syria\",\n \"Damascus says Erdogan 'disconnected from reality' after threats\",\n ]\n\n corpus = [ Document(post, tokenizer.tokenize(post)) for post in posts ]\n\n extractor = TokenExtractor(tokenizer=tokenizer)\n scorer = TFIDFScorer({ 'erdogan': 1, 'threats': 2 }, 10)\n candidates = extractor.extract(corpus)\n scores = scorer.score(candidates)\n self.assertGreater(scores.get('erdogan'), scores.get('damascus'))\n self.assertEqual(scores.get('everywhere'), scores.get('disconnected')) # they appear the same number of times\n self.assertGreater(scores.get('erdogan'), scores.get('threats')) # 'threats' and 'erdogan' appear with the same frequency, but 'threats' has a higher DF",
"def test_topic_reduction_edge_cases():\n model = BERTopic()\n nr_topics = 5\n model.nr_topics = 100\n old_documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n model._update_topic_size(old_documents)\n model._extract_topics(old_documents)\n old_freq = model.get_topic_freq()\n\n new_documents = model._reduce_topics(old_documents)\n new_freq = model.get_topic_freq()\n\n assert not set(old_documents.Topic).difference(set(new_documents.Topic))\n pd.testing.assert_frame_equal(old_documents, new_documents)\n pd.testing.assert_frame_equal(old_freq, new_freq)",
"def test_full_model(model, documents, request):\n topic_model = copy.deepcopy(request.getfixturevalue(model))\n if model == \"base_topic_model\":\n topic_model.save(\"model_dir\", serialization=\"pytorch\", save_ctfidf=True, save_embedding_model=\"sentence-transformers/all-MiniLM-L6-v2\")\n topic_model = BERTopic.load(\"model_dir\")\n topics = topic_model.topics_\n\n for topic in set(topics):\n words = topic_model.get_topic(topic)[:10]\n assert len(words) == 10\n\n for topic in topic_model.get_topic_freq().Topic:\n words = topic_model.get_topic(topic)[:10]\n assert len(words) == 10\n\n assert len(topic_model.get_topic_freq()) > 2\n assert len(topic_model.get_topics()) == len(topic_model.get_topic_freq())\n\n # Test extraction of document info\n document_info = topic_model.get_document_info(documents)\n assert len(document_info) == len(documents)\n\n # Test transform\n doc = \"This is a new document to predict.\"\n topics_test, probs_test = topic_model.transform([doc, doc])\n\n assert len(topics_test) == 2\n\n # Test topics over time\n timestamps = [i % 10 for i in range(len(documents))]\n topics_over_time = topic_model.topics_over_time(documents, timestamps)\n\n assert topics_over_time.Frequency.sum() == len(documents)\n assert len(topics_over_time.Topic.unique()) == len(set(topics))\n\n # Test hierarchical topics\n hier_topics = topic_model.hierarchical_topics(documents)\n\n assert len(hier_topics) > 0\n assert hier_topics.Parent_ID.astype(int).min() > max(topics)\n\n # Test creation of topic tree\n tree = topic_model.get_topic_tree(hier_topics, tight_layout=False)\n assert isinstance(tree, str)\n assert len(tree) > 10\n\n # Test find topic\n similar_topics, similarity = topic_model.find_topics(\"query\", top_n=2)\n assert len(similar_topics) == 2\n assert len(similarity) == 2\n assert max(similarity) <= 1\n\n # Test topic reduction\n nr_topics = len(set(topics))\n nr_topics = 2 if nr_topics < 2 else nr_topics - 1\n topic_model.reduce_topics(documents, nr_topics=nr_topics)\n\n assert len(topic_model.get_topic_freq()) == nr_topics\n assert len(topic_model.topics_) == len(topics)\n\n # Test update topics\n topic = topic_model.get_topic(1)[:10]\n vectorizer_model = topic_model.vectorizer_model\n topic_model.update_topics(documents, n_gram_range=(2, 2))\n\n updated_topic = topic_model.get_topic(1)[:10]\n\n topic_model.update_topics(documents, vectorizer_model=vectorizer_model)\n original_topic = topic_model.get_topic(1)[:10]\n\n assert topic != updated_topic\n if topic_model.representation_model is not None:\n assert topic != original_topic\n\n # Test updating topic labels\n topic_labels = topic_model.generate_topic_labels(nr_words=3, topic_prefix=False, word_length=10, separator=\", \")\n assert len(topic_labels) == len(set(topic_model.topics_))\n\n # Test setting topic labels\n topic_model.set_topic_labels(topic_labels)\n assert topic_model.custom_labels_ == topic_labels\n\n # Test merging topics\n freq = topic_model.get_topic_freq(0)\n topics_to_merge = [0, 1]\n topic_model.merge_topics(documents, topics_to_merge)\n assert freq < topic_model.get_topic_freq(0)\n\n # Test reduction of outliers\n if -1 in topics:\n new_topics = topic_model.reduce_outliers(documents, topics, threshold=0.0)\n nr_outliers_topic_model = sum([1 for topic in topic_model.topics_ if topic == -1])\n nr_outliers_new_topics = sum([1 for topic in new_topics if topic == -1])\n\n if topic_model._outliers == 1:\n assert nr_outliers_topic_model > nr_outliers_new_topics\n\n # # Save and load model\n # if topic_model.topic_embeddings_ is not None:\n # topic_model.save(\"model_dir\", serialization=\"pytorch\", save_ctfidf=True)\n # loaded_model = BERTopic.load(\"model_dir\")",
"def test_topic_reduction(reduced_topics):\n model = BERTopic()\n nr_topics = reduced_topics + 2\n model.nr_topics = reduced_topics\n old_documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n model._update_topic_size(old_documents)\n model._extract_topics(old_documents.copy())\n old_freq = model.get_topic_freq()\n\n new_documents = model._reduce_topics(old_documents.copy())\n new_freq = model.get_topic_freq()\n\n assert old_freq.Count.sum() == new_freq.Count.sum()\n assert len(old_freq.Topic.unique()) == len(old_freq)\n assert len(new_freq.Topic.unique()) == len(new_freq)\n assert isinstance(model.mapped_topics, dict)\n assert not set(model.get_topic_freq().Topic).difference(set(new_documents.Topic))\n assert model.mapped_topics",
"def check_topic(t):\n empty_string_check(t['@id'])\n filesystem_exists_check(t['@filename'])\n filesystem_exists_check(t['@qrelsFilename'])\n \n if '@backgroundFilename' in t: # A background file was specified.\n filesystem_exists_check(t['@backgroundFilename'])\n else:\n t['@backgroundFilename'] = None # No background file was specified.",
"def test_topic_reduction(reduced_topics):\n base_bertopic = BERTopic(bert_model='distilbert-base-nli-mean-tokens', verbose=False)\n nr_topics = reduced_topics + 2\n base_bertopic.nr_topics = reduced_topics\n old_documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n base_bertopic._update_topic_size(old_documents)\n c_tf_idf = base_bertopic._extract_topics(old_documents.copy(), topic_reduction=True)\n old_freq = base_bertopic.get_topics_freq()\n\n new_documents = base_bertopic._reduce_topics(old_documents.copy(), c_tf_idf)\n new_freq = base_bertopic.get_topics_freq()\n\n assert old_freq.Count.sum() == new_freq.Count.sum()\n assert len(old_freq.Topic.unique()) == len(old_freq)\n assert len(new_freq.Topic.unique()) == len(new_freq)\n assert isinstance(base_bertopic.mapped_topics, dict)\n assert not set(base_bertopic.get_topics_freq().Topic).difference(set(new_documents.Topic))\n assert base_bertopic.mapped_topics",
"def test_topic_retrieval_authenticated(self):\n self.client.credentials(HTTP_AUTHORIZATION='Token ' + self.free_token.key)\n response = self.client.get('/topics/', format='json')\n data = json.loads(response.content)\n self.assertEqual(response.status_code, status.HTTP_200_OK)\n self.assertEqual(data['count'],3)\n self.assertTrue({'name': 'Topic 1', 'description': 'The first topic.'} in data['results'])\n self.assertTrue({'name': 'Topic 2', 'description': 'The second topic.'} in data['results'])",
"def test_text_classifier_tsne_get(self):\n pass",
"def test_question_topics(self):\n p = ProductFactory()\n t1 = TopicFactory(slug='doesnotexist', product=p)\n t2 = TopicFactory(slug='cookies', product=p)\n t3 = TopicFactory(slug='sync', product=p)\n\n QuestionFactory(topic=t2)\n QuestionFactory(topic=t2)\n QuestionFactory(topic=t3)\n\n self.refresh()\n\n topic_vals = (\n (t1.slug, 0),\n (t2.slug, 2),\n (t3.slug, 1),\n )\n\n qs = {'a': 1, 'w': 2, 'format': 'json'}\n for topics, number in topic_vals:\n qs.update({'topics': topics})\n response = self.client.get(reverse('search.advanced'), qs)\n eq_(number, json.loads(response.content)['total'])",
"def test_text_classifier_retrieve(self):\n pass",
"def test_topics_for_products(self):\n desktop_topics = topics_for(product=self.desktop)\n eq_(len(desktop_topics), 3)\n\n mobile_topics = topics_for(product=self.mobile)\n eq_(len(mobile_topics), 2)",
"def __find_topics(self, concepts):\n\n # Set up\n found_topics = dict() # to store the matched topics\n explanation = dict()\n\n # finding matches\n for concept in concepts:\n evgrams = everygrams(concept.split(), 1, 3) # list of unigrams, bigrams, trigrams\n for grams in evgrams:\n gram = \"_\".join(grams)\n gram_without_underscore = \" \".join(grams)\n #### Finding similar words contained in the model\n\n list_of_matched_topics = []\n\n if self.fast_classification:\n list_of_matched_topics = self.__get_similar_words_from_cached_model(gram,grams)\n else:\n list_of_matched_topics = self.__get_similar_words_from_full_model(gram, grams)\n\n\n for topic_item in list_of_matched_topics:\n\n topic = topic_item[\"topic\"]\n str_sim = topic_item[\"sim_t\"]\n wet = topic_item[\"wet\"]\n sim = topic_item[\"sim_w\"]\n\n\n if str_sim >= self.min_similarity and topic in self.cso.topics_wu:\n\n\n if topic in found_topics:\n #tracking this match\n found_topics[topic][\"times\"] += 1\n\n found_topics[topic][\"gram_similarity\"].append(sim)\n\n #tracking the matched gram\n if gram in found_topics[topic][\"grams\"]:\n found_topics[topic][\"grams\"][gram] += 1\n else:\n found_topics[topic][\"grams\"][gram] = 1\n\n #tracking the most similar gram to the topic\n if str_sim > found_topics[topic][\"embedding_similarity\"]:\n found_topics[topic][\"embedding_similarity\"] = str_sim\n found_topics[topic][\"embedding_matched\"] = wet\n\n else:\n #creating new topic in the result set\n found_topics[topic] = {'grams': {gram:1},\n 'embedding_matched': wet,\n 'embedding_similarity': str_sim,\n 'gram_similarity':[sim],\n 'times': 1,\n 'topic':topic}\n\n\n\n if sim == 1:\n found_topics[topic][\"syntactic\"] = True\n\n\n\n primary_label_topic = self.cso.get_primary_label_wu(topic)\n if primary_label_topic not in explanation:\n explanation[primary_label_topic] = set()\n\n explanation[primary_label_topic].add(gram_without_underscore)\n\n return found_topics, explanation",
"def is_topic(cls, topic: str) -> bool:\n return re.match(AsrTrainSuccess.TOPIC_PATTERN, topic) is not None",
"def test_classifiers(train_docs, train_target, test_docs, test_target, min_docs, K, K2, removeStopWords):\n # test_classifiers(train_docs, train_target, test_docs, test_targets, i, 3)\n X_train_counts, X_train_tfidf, X_test_counts, X_test_tfidf = extract_text_features(train_docs, test_docs, min_docs, removeStopWords)\n \n \n num_docs, vocab_size = X_train_counts.shape\n print('Number of (training) documents =',num_docs)\n print('Vocabulary size =',vocab_size)\n \n\n # Now evaluate the classifiers on the test data\n # Print out the accuracy as a percentage for each classifier.\n # np.mean() can be used to calculate the accuracy. Round the accuracy to 2 decimal places.\n\n #predict according to different classifier--evaluate results \n predicted_multNB = fit_and_predict_multinomialNB(X_train_tfidf, train_target, X_test_tfidf)\n predicted_bernNB = fit_and_predict_BernoulliNB(X_train_tfidf, train_target, X_test_tfidf)\n predicted_LR = fit_and_predict_LR(X_train_tfidf, train_target, X_test_tfidf)\n predicted_LR = fit_and_predict_LR(X_train_counts, train_target, X_test_counts)\n predicted_KNN = fit_and_predict_KNN(X_train_tfidf, train_target, X_test_tfidf, K)\n predicted_KNN2 = fit_and_predict_KNN(X_train_tfidf, train_target, X_test_tfidf, K2)\n \n predicted_base = np.array([FreqDist(test_target).most_common(1)[0][0]]*len(test_target))\n\n # count num of correct predictions / total\n np_test_target = np.array(test_target)\n base = np.sum(predicted_base == np_test_target)/len(np_test_target)*100\n multNB = np.sum(predicted_multNB == np_test_target)/len(np_test_target)*100\n bernNB = np.sum(predicted_bernNB == np_test_target)/len(np_test_target)*100\n LR = np.sum(predicted_LR == np_test_target)/len(np_test_target)*100\n KN = np.sum(predicted_KNN == np_test_target)/len(np_test_target)*100\n KN2 = np.sum(predicted_KNN2 == np_test_target)/len(np_test_target)*100\n\n \n print('\\tBase Accuracy: {:.3f}'.format(base))\n print('\\tAccuracy with multinomial naive Bayes: {:.2f}'.format(multNB))\n print('\\tAccuracy with Bernoulli naive Bayes: {:.2f}'.format(bernNB))\n print('\\tAccuracy with logistic regression: {:.2f}'.format(LR))\n print('\\tAccuracy with kNN, k={} classifier: {:2f}'.format(K, KN))\n print('\\tAccuracy with kNN, k={} classifier: {:.2f}'.format(K2, KN2))",
"def test__validate_topic__0():\n for input_value, expected_output in (\n (None, None),\n ('', None),\n ('a', 'a'),\n ):\n output = validate_topic(input_value)\n vampytest.assert_eq(output, expected_output)",
"def test_text_classifier_get_details(self):\n pass",
"def test__extract_features(self):\n text_sample = \"I really really love this movie\"\n feature_sample = ['really','love','good']\n feature_score_type = \"presence\"\n model_sample = Model(feature_sample,feature_score_type)\n result_features = model_sample.extract_features(text_sample)\n assert_equal(result_features,{'really':1,'love':1,'good':0})\n feature_score_type = \"term_frequency\"\n model_sample = Model(feature_sample,feature_score_type)\n result_features = model_sample.extract_features(text_sample)\n assert_equal(result_features,{'really':2,'love':1,'good':0})",
"def check_classifier():\n content = []\n labels = []\n file = 'COMP3074-CW1-Dataset.csv'\n content, labels = get_tag(file, \"question_book\", content, labels)\n file = 'name.csv'\n content, labels = get_tag(file, \"question_book\", content, labels)\n file = 'Small_talk.csv'\n content, labels = get_tag(file, \"small_talk\", content, labels, )\n x_train, x_test, y_train, y_test = train_test_split(content, # Sample feature set to be divided\n labels, # The sample result to be divided (label)\n stratify=labels, # Keep the category proportions\n # the same in training and testing\n test_size=0.25, # Refers to the proportion of\n # samples reserved for testing\n random_state=22) # Random seed\n count_vect = CountVectorizer(stop_words=stopwords.words('english'))\n x_train_counts = count_vect.fit_transform(x_train)\n tfidf_transformer = TfidfTransformer(use_idf=True, # Tf_idf\n sublinear_tf=True).fit(x_train_counts)\n x_train_tf = tfidf_transformer.transform(x_train_counts) # Standardize the inherent attributes of the training set,\n # reduce dimensionality and normalize\n classify = LogisticRegression(random_state=0).fit(x_train_tf, y_train) # Logistic regression\n return classify, tfidf_transformer, count_vect",
"def test_topic_reduction_edge_cases(base_bertopic):\n\n nr_topics = 5\n base_bertopic.nr_topics = 100\n old_documents = pd.DataFrame({\"Document\": newsgroup_docs,\n \"ID\": range(len(newsgroup_docs)),\n \"Topic\": np.random.randint(-1, nr_topics-1, len(newsgroup_docs))})\n base_bertopic._update_topic_size(old_documents)\n c_tf_idf = base_bertopic._extract_topics(old_documents, topic_reduction=True)\n old_freq = base_bertopic.get_topics_freq()\n\n new_documents = base_bertopic._reduce_topics(old_documents, c_tf_idf)\n new_freq = base_bertopic.get_topics_freq()\n\n assert not set(old_documents.Topic).difference(set(new_documents.Topic))\n pd.testing.assert_frame_equal(old_documents, new_documents)\n pd.testing.assert_frame_equal(old_freq, new_freq)",
"def is_topic(cls, topic: str) -> bool:\n return re.match(AsrTrain.TOPIC_PATTERN, topic) is not None",
"def test_text_classifier_tsne_post(self):\n pass",
"def has_training_docs(self):\n pass",
"def validate_new_topic(self, topic_id):\n\n self.db_cursor.execute(\"\"\"SELECT COUNT(*) FROM Topic WHERE id == %s\"\"\", (topic_id,))\n ct = self.db_cursor.fetchone()\n ct = ct[0]\n if ct == 0:\n return False\n return True"
] | [
"0.7501682",
"0.71820104",
"0.7172594",
"0.65129846",
"0.6319828",
"0.6298175",
"0.6200613",
"0.61970544",
"0.6028055",
"0.6013558",
"0.5967163",
"0.59517914",
"0.5941613",
"0.5909699",
"0.5895177",
"0.5893227",
"0.58905315",
"0.58895624",
"0.5875359",
"0.58233863",
"0.5777374",
"0.5771724",
"0.57558054",
"0.5703887",
"0.5687981",
"0.5679008",
"0.56692326",
"0.5663802",
"0.56560016",
"0.56407756"
] | 0.72383255 | 1 |
Replace terminator with given operator. | def replaceTerminator(self, op):
self._children[0].replaceTerminator(op) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def replaceTerminator(self, op):\n if not (op in (',', ';')):\n raise RuntimeError(\"invalid replacement terminator for GlslBlockStatement: '%s'\" % (op))\n self.__terminator = op",
"def set_terminator (self, term):\r\n self.terminator = term",
"def _remove_operator(self, operator):",
"def change_operator(self, text):\n self.operator = text\n if self.current_num:\n self.prev_num = self.current_num\n self.current_num = \"\"",
"def operator(self):\n col = self.pos\n operators = [\"||\", \"&&\", \">>\", \"<<\", \"!=\", \">=\", \"<=\", \"==\", \"##\"] + \\\n [\"-\", \"+\", \"!\", \"*\", \"/\", \"|\", \"&\", \"^\", \"<\", \">\", \"?\", \":\", \"~\", \"#\", \"=\", \"%\"]\n try:\n index = self.match_any(operators)\n\n op = Operator(self.line, col, self.prev_white, operators[index])\n return op\n except TokenError:\n self.pos = col\n raise TokenError(\"Invalid operator.\")",
"def _append_operator(self, operator):",
"def replace_op(self, op, replacement):\n self.replacement_list.append((op, replacement))",
"def operator(self, operator: str):\n\n self._operator = operator",
"def set_operator(self, op):\n self.operator = op",
"def apply_rule(operator, pattern, replacement):\n new_op = operator.match_first(pattern)\n if new_op is None:\n return None\n return new_op.replace_first(\"generic\", replacement)",
"def RewriteTerm(self, key, op, operand, key_type):\n if key not in self._keys or op != '=':\n return None\n return operand",
"def set_operator(self, operator):\n\n self['dimensionFilterClauses']['operator'] = operator.upper()\n\n return self",
"def prefix_to_postfix(input_str): # prefix requires that all operators precede the two operands that they work on\n\n \"\"\"Input argument: a string containing a prefix expression where tokens are \n space separated. Tokens are either operators + - * / ** << >> or numbers (integers or floats)\n Returns a String containing a postfix expression(tokens are space separated)\"\"\"\n if input_str is None: raise ValueError\n # split input string into list\n term_list = input_str.split()\n #print(\"TERM LIST \",term_list) \n # initialize output list\n output_list = []\n #print(\"OUT SIZE \", len(output_list))\n # initialize operator stack\n operator_stack = Stack(len(term_list)//3+1)\n for i in range(len(term_list)):\n term = term_list[i]\n # prefix should begin with an operator otherwise raise Exception\n if i == 0:\n if operator_present(term) is True: operator_stack.push(term)\n else: raise PostfixFormatException()\n # Check for operator\n elif operator_present(term): \n operator_stack.push(term)\n # check for operand\n elif operand_present(term):\n output_list.append(term)\n # if previous two terms in output list were operands, pop operator stack to output list once\n if operand_present(term_list[i-1]):\n output_list.append(operator_stack.pop())\n # for every three operands there should be an additional operator\n if operand_present(term_list[i-3]) and operator_stack.size() != 0:\n output_list.append(operator_stack.pop())\n while operator_stack.size() != 0:\n output_list.append(operator_stack.pop())\n new_str = (\" \".join(output_list))\n #print(\"NEW STR \", new_str)\n return new_str",
"def postfix(self):\n return self.leftOperand.postfix() + \" \" + self.rightOperand.postfix() + \" \" + str(self.operator)",
"def literal_symbol(literal):\n if literal.op == '~':\n return literal.args[0]\n else:\n return literal",
"def operator(self, operator):\n\n self._operator = operator",
"def SetOperator(self, op):\n return _hypre.HypreParaSails_SetOperator(self, op)",
"def is_operator(obj):\n return isinstance(obj, Token) and obj[0] not in '/01234567890+-.<[('",
"def applyOperator(self, operator, operand):\n if self.currentTotal == None:\n self.currentTotal = operand\n elif operator == \"=\":\n self.equalsOp(operand)\n elif self.previousOperand:\n self.previousOperand = None\n else:\n self.computeTotal(operator, operand)\n if operator != \"=\":\n self.previousOperator = operator",
"def output_op(self, op):\n self.output['text'] += ' ' + op + ' ' \n self.seen_op = True",
"def parse_single_op_string(opstring) :\n ops = {'+' : \"plus\",\n '?' : \"opt\" , \n '*' : \"star\"}\n return '('.join(ops[c] for c in reversed(opstring)) + '('",
"def replace_operators(self, instr):\n # change ++, -- to add(1), sub(1)\n instr = re.sub(r\"\\+\\+\", \".add(1)\", instr)\n instr = re.sub(r\"--\", \".sub(1)\", instr)\n\n m1 = re.search(r\"[+\\-*/]=\", instr)\n result = \"\"\n if m1:\n # handle the string with +=, -=, *=. /=\n v = instr[: m1.start()].rstrip(\" \")\n v1 = v.strip(\" \")\n expressions = [v1, m1.group()[: 1], \"(\", instr[m1.end():].strip().strip(\";\"), \");\"]\n instr = v + \"= \" + \" \".join(expressions)\n\n # split by !, &&, ||\n equations = re.split(r\"(!|&&|\\|\\||)\", instr)\n for equation in equations:\n # split by <=, >=, ==, !=, =\n expressions = re.split(r\"([<>=!]*=)\", equation)\n if len(expressions) == 1:\n result += equation\n else:\n for expression in expressions:\n if re.search(r\"[+\\-*/]\", expression):\n # with math operators\n # 0.exclude ;\n rc = \"\"\n pos = expression.find(';')\n if pos != -1:\n rc = expression[pos:]\n expression = expression[:pos]\n\n # 1.exclude independent ( or )\n lbc = expression.count(\"(\")\n rbc = expression.count(\")\")\n lc = \"\"\n if lbc > rbc:\n # ( is more than )\n pos = expression.replace('(', 'X', lbc - rbc - 1).find('(')\n lc = expression[: pos + 1]\n expression = expression[pos + 1:]\n else:\n if lbc < rbc:\n # ( is less than )\n pos = 'X'.join(expression.rsplit(')', rbc - lbc - 1)).rfind(')')\n rc = expression[pos:] + rc\n expression = expression[:pos]\n\n # 2.change normal notation to RPN, in order to change math operators to SafeMath operators\n # 3.change RPN to normal notation\n result += lc + self.rpn_to_nn(self.nn_to_rpn(expression)) + rc\n else:\n result += expression\n\n return result",
"def _ExpectSpaceBeforeOperator(self, token):\n if token.string == ',' or token.metadata.IsUnaryPostOperator():\n return False\n\n # Colons should appear in labels, object literals, the case of a switch\n # statement, and ternary operator. Only want a space in the case of the\n # ternary operator.\n if (token.string == ':' and\n token.metadata.context.type in (Context.LITERAL_ELEMENT,\n Context.CASE_BLOCK,\n Context.STATEMENT)):\n return False\n\n if token.metadata.IsUnaryOperator() and token.IsFirstInLine():\n return False\n\n return True",
"def found_terminator(self):\n self.signal_filter.send((self, \"\".join(self.buffer)))\n del(self.buffer[:])",
"def replace_entry_point(self, op):\n lst = self.want_entry_point()\n if lst:\n self.__content[lst[0]] = \"%s:\\n\" % op",
"def __rxor__(self, other):\n return whitespaces.CURRENT.normalize(other) ^ self",
"def add_repair_operator(\n self, op: _OperatorType, name: Optional[str] = None\n ):\n logger.debug(f\"Adding repair operator {op.__name__}.\")\n self._r_ops[name if name else op.__name__] = op",
"def prefix_to_postfix(input_str: str) -> Any:\n \"\"\"Input argument: a string containing a prefix expression where tokens are \n space separated. Tokens are either operators + - * / ** << >> or numbers (integers or floats)\n Returns a String containing a postfix expression(tokens are space separated)\"\"\"\n stack = Stack(30)\n if input_str == \"\":\n return (\"\")\n op_list = [\"+\", \"-\", \"*\", \"/\", \"<<\", \">>\", \"**\"]\n split_list = input_str.split()\n track = len(split_list) - 1\n while track >= 0:\n new_val = split_list[track].lstrip(\"-\")\n new_val = new_val.replace(\".\", \"\", 1)\n if new_val.isdigit():\n stack.push(split_list[track])\n track = track - 1\n elif split_list[track] in op_list:\n first = stack.pop()\n second = stack.pop()\n stack.push(first + \" \" + second + \" \" + split_list[track])\n track = track - 1\n else:\n break\n postfix = stack.pop()\n return postfix",
"def mutate_single_node(self, node, operator):\n if node.__class__ is operator[0] or (operator[1] is StatementDeletion and node.__class__ is ast.Pass):\n mutated_node = operator[1].mutate(node)\n node = mutated_node\n\n return node",
"def replace_logical_ops(line, direction):\n\n # Find list indexes for to and from conversions\n index_a = 0 if direction == \"to\" else 1\n index_b = (index_a + 1) % 2\n\n # Create replacement maps\n replacement_list = [[\"and\", \"&&\"], [\"or\", \"||\"], [\"not\", \"!\"]]\n\n # Split line to words list\n words = re.split(r\"([^&|!\\w+])\", line)\n\n # Replace logical operators\n i, words_count = 0, len(words)\n while i != words_count:\n\n # Catch exception if index error due to last word\n try:\n # Remove space after 'not'\n if words[i] == \"not\" and words[i + 1] == \" \" and not index_a:\n del words[i + 1]\n words_count -= 1\n\n except IndexError:\n pass\n\n # Replace words\n for opr in replacement_list:\n if words[i] == opr[index_a]:\n words[i] = opr[index_b]\n\n # If '!' is attached with words, replace it with 'not '\n if words[i].startswith(\"!\") and index_a:\n words.insert(i + 1, words[i][1:])\n words[i] = \"not \"\n\n # Increment count\n i += 1\n\n # Return modified line\n return \"\".join(words)"
] | [
"0.6887956",
"0.64663404",
"0.6182046",
"0.6009709",
"0.5759515",
"0.5676872",
"0.5563232",
"0.55417585",
"0.5515796",
"0.5486866",
"0.54747343",
"0.5472199",
"0.5358069",
"0.5353344",
"0.530891",
"0.5292029",
"0.52111715",
"0.5193843",
"0.51775455",
"0.5096455",
"0.5080006",
"0.5079052",
"0.50395435",
"0.50318974",
"0.5011657",
"0.50059515",
"0.4989215",
"0.49824184",
"0.4952105",
"0.49359202"
] | 0.79558337 | 0 |
Tell if given object is GlslBlockUnary. | def is_glsl_block_unary(op):
return isinstance(op, GlslBlockUnary) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_glsl_block_function(op):\n return isinstance(op, GlslBlockFunction)",
"def is_unary(*args):\n return _ida_hexrays.is_unary(*args)",
"def isLux(self):\n return _libsbml.Unit_isLux(self)",
"def is_block(modules):\n if isinstance(modules, (BasicBlock, Bottleneck)):\n return True\n return False",
"def _is_unary_op(op):\n if op.type == TokenType.BitwiseNot:\n return True\n return False",
"def isMergableWith(self, op):\n if not is_glsl_block_function(op):\n return False\n if (self.getName() != op.getName()) or (self.getType() != op.getType()):\n return False\n return True",
"def isLitre(self):\n return _libsbml.Unit_isLitre(self)",
"def is_block(self):\n return self.v & 1 == 0",
"def is_vertex(self): \n return False",
"def isUMinus(self):\n return _libsbml.ASTNode_isUMinus(self)",
"def is_block(self):\n\t\treturn self.name in get_elements_collection(self.__res, 'block_level')",
"def is_unary(s):\n return s == '~'",
"def representsUnaryFunction(self, *args):\n return _libsbml.ASTBasePlugin_representsUnaryFunction(self, *args)",
"def is_unit(xblock):\r\n if xblock.category == 'vertical':\r\n parent_xblock = get_parent_xblock(xblock)\r\n parent_category = parent_xblock.category if parent_xblock else None\r\n return parent_category == 'sequential'\r\n return False",
"def _filter_if(node):\n return (\n isinstance(node.expression, UnaryOperation)\n and node.expression.type == UnaryOperationType.BANG\n )",
"def isinstance_blender_object(self, b_obj):\n # lame and slow, but functional\n return b_obj in Blender.Object.Get()",
"def is_vertex(self):\n return False",
"def is_unary_operator(oper):\n # definition:\n # memeber in class\n # ret-type operator symbol()\n # ret-type operator [++ --](int)\n # globally\n # ret-type operator symbol( arg )\n # ret-type operator [++ --](X&, int)\n symbols = ['!', '&', '~', '*', '+', '++', '-', '--']\n if not isinstance(oper, calldef.operator_t):\n return False\n if oper.symbol not in symbols:\n return False\n if isinstance(oper, calldef.member_operator_t):\n if 0 == len(oper.arguments):\n return True\n elif oper.symbol in ['++', '--'] and \\\n isinstance(oper.arguments[0].type, cpptypes.int_t):\n return True\n else:\n return False\n else:\n if 1 == len(oper.arguments):\n return True\n elif oper.symbol in ['++', '--'] \\\n and 2 == len(oper.arguments) \\\n and isinstance(oper.arguments[1].type, cpptypes.int_t):\n # may be I need to add additional check whether first argument is\n # reference or not?\n return True\n else:\n return False",
"def isUPlus(self):\n return _libsbml.ASTNode_isUPlus(self)",
"def has_nlu(self):\n return self.metadata.has_been_coded_for(\"nlu\")",
"def _target_filter(self, obj):\r\n return type(obj).__name__ in ['Cube'] and not obj.is_grasped # List because may be extended to other objects.\r",
"def is_icecube_class(obj: Any) -> bool:\n classname = str(type(obj))\n return \"icecube.\" in classname",
"def __bool__(self):\n return _osgAnimation.mapVertexInfluence___bool__(self)",
"def check(self, grain=50):\r\n opengles.glDisable(GL_SCISSOR_TEST)\r\n self.s_flg = False\r\n opengles.glReadPixels(0, self.y0, self.ix, 1,\r\n GL_RGB, GL_UNSIGNED_BYTE,\r\n ctypes.byref(self.img))\r\n r0 = self.img[0:3]\r\n for i in xrange(0, self.img_sz, self.step):\r\n if self.img[i:(i+3)] != r0:\r\n return True\r\n\r\n return False",
"def _has_unary_negation(self, child_types, expression):\n \n return len(expression) in [0,2] and child_types[0] == '-'",
"def has_node(self, u: Hashable) -> bool:\n return u in self._names",
"def is_vertex(self):\n return True",
"def is_vertex(self):\n return True",
"def is_ray(self): \n return False",
"def check_block(self, block):\n pass"
] | [
"0.6930309",
"0.575511",
"0.57203484",
"0.55796754",
"0.55182594",
"0.540804",
"0.53220403",
"0.52651286",
"0.52540934",
"0.5241585",
"0.5217723",
"0.51737624",
"0.51331514",
"0.5100314",
"0.5092402",
"0.50843877",
"0.50426793",
"0.5042636",
"0.49764892",
"0.4973854",
"0.49662828",
"0.49597505",
"0.4957318",
"0.49524304",
"0.49483353",
"0.49281487",
"0.49258995",
"0.49258995",
"0.4916681",
"0.48950088"
] | 0.8295164 | 0 |
Delete all user channel (AdminDeleteAllUserChannels) | def admin_delete_all_user_channels(
user_id: str,
namespace: Optional[str] = None,
x_additional_headers: Optional[Dict[str, str]] = None,
**kwargs
):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = AdminDeleteAllUserChannels.create(
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def admin_delete_all_user_channels_async(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = AdminDeleteAllUserChannels.create(\n user_id=user_id,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )",
"async def channeldelete(ctx):\r\n await ctx.send(\"🉐Deleting all channels...\")\r\n for channel in ctx.guild.channels:\r\n try:\r\n await channel.delete()\r\n except:\r\n print(f\"{Fore.RED}[-]CHANNEL => {Fore.RESET}Failed to delete: {channel}\")",
"async def delete_all_user_channel_async(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteAllUserChannel.create(\n user_id=user_id,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )",
"def delete_all_user_channel(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteAllUserChannel.create(\n user_id=user_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"def clear_all():\n viewer = connect_to_ginga()\n shell = viewer.shell()\n chnames = shell.get_channel_names()\n for ch in chnames:\n shell.delete_channel(ch)",
"async def fulldelete(ctx):\r\n await ctx.message.delete()\r\n roles = ctx.guild.roles\r\n roles.pop(0)\r\n for role in roles:\r\n if ctx.guild.roles[-1] > role:\r\n try:\r\n await role.delete()\r\n except:\r\n print(\r\n f\"{Fore.RED}[-]ROLE => {Fore.RESET}Failed to delete role: {role}\"\r\n )\r\n for channel in ctx.guild.channels:\r\n try:\r\n await channel.delete()\r\n except:\r\n print(f\"{Fore.RED}[-]CHANNEL => {Fore.RESET}Failed to delete: {channel}\")",
"def delete_all_users(self):\n\n User.query.delete()",
"def delete_all_users():\n\tUser.drop_collection()",
"def hard_delete_user_related_data(self):\n from contentcuration.viewsets.common import SQCount\n\n # Hard delete invitations associated to this account.\n self.sent_to.all().delete()\n self.sent_by.all().delete()\n\n editable_channels_user_query = (\n User.objects.filter(editable_channels__id=OuterRef('id'))\n .values_list('id', flat=True)\n .distinct()\n )\n non_public_channels_sole_editor = self.editable_channels.annotate(num_editors=SQCount(\n editable_channels_user_query, field=\"id\")).filter(num_editors=1, public=False)\n\n # Point sole editor non-public channels' contentnodes to orphan tree to let\n # our garbage collection delete the nodes and underlying files.\n ContentNode._annotate_channel_id(ContentNode.objects).filter(channel_id__in=list(\n non_public_channels_sole_editor.values_list(\"id\", flat=True))).update(parent_id=settings.ORPHANAGE_ROOT_ID)\n\n # Hard delete non-public channels associated with this user (if user is the only editor).\n non_public_channels_sole_editor.delete()\n\n # Hard delete non-public channel collections associated with this user (if user is the only editor).\n user_query = (\n User.objects.filter(channel_sets__id=OuterRef('id'))\n .values_list('id', flat=True)\n .distinct()\n )\n self.channel_sets.annotate(num_editors=SQCount(user_query, field=\"id\")).filter(num_editors=1, public=False).delete()\n\n # Create history!\n self.history.create(user_id=self.pk, action=user_history.RELATED_DATA_HARD_DELETION)",
"async def clear(self, ctx):\n await self.config.guild(ctx.guild).channels.clear()\n await ctx.send(\"Spoiler channel list cleared.\")",
"def reset_all_users():\n for user in User.objects.all():\n user.delete()",
"async def channel_(self, ctx, number=10):\n number = number if number <= 100 else 100\n question = await ctx.send(f\"this will delete the last {number} messages from ALL users. Continue?\")\n await question.add_reaction(self.reactions[0])\n await question.add_reaction(self.reactions[1])\n\n def check_is_author(reaction, user):\n return reaction.message.id == question.id and user.id == ctx.author.id and \\\n reaction.emoji in self.reactions\n try:\n reaction, user = await self.bot.wait_for(\"reaction_add\", check=check_is_author, timeout=20)\n if reaction.emoji == self.reactions[1]:\n await question.delete()\n return\n except asyncio.TimeoutError:\n await question.delete()\n return\n\n try:\n messages = await ctx.channel.purge(limit=number+1)\n await ctx.send(f\"deleted the last {len(messages)-1} messages from this channel\")\n except (discord.ClientException, discord.Forbidden, discord.HTTPException) as e:\n await ctx.send(str(e))\n except Exception as ex:\n import traceback\n owner = ctx.guild.get_member(self.bot.owner_id)\n if owner:\n await owner.send(traceback.print_exc())\n self.error_log.error(traceback.print_exc())",
"async def delete_bot_msg(self, channel):\n await channel.purge(limit=100, check=self.is_me)",
"async def remove_bot_channels(self, guild_id):\n api_cog = self.bot.get_cog('RR_API')\n channelInfo = await api_cog.get_channel_info(guild_id)\n\n if not channelInfo:\n print(\"Server Name Not in DB, Can't delete channels. Server: \" + str(guild_id))\n return\n if channelInfo['futurechannelid']:\n await self.bot.get_channel(int(channelInfo['futurechannelid'])).delete()\n if channelInfo['pastchannelid']:\n await self.bot.get_channel(int(channelInfo['pastchannelid'])).delete()\n if channelInfo['lootchannelid']:\n await self.bot.get_channel(int(channelInfo['lootchannelid'])).delete()\n if channelInfo['commandschannelid']:\n await self.bot.get_channel(int(channelInfo['commandschannelid'])).delete()\n if channelInfo['categoryid']:\n await self.bot.get_channel(int(channelInfo['categoryid'])).delete()",
"async def channel(self, ctx, limit: int=100, channel: discord.TextChannel=None):\n\n if channel is None:\n channel = ctx.channel\n\n # noinspection PyUnresolvedReferences\n messages = await channel.purge(limit=limit)\n messages = len(messages)\n\n plural = '' if messages == 1 else 's'\n\n await ctx.send('Purged {} message{}.'.format(messages, plural), delete_after=10)",
"def delete_from_all(self, user_id):\n self.execute(TABELLE['id_users']['delete'], (user_id,))\n self.execute(TABELLE['users']['delete'], (user_id,))\n self.execute(TABELLE['punteggio']['delete'], (user_id,))\n self.execute(TABELLE['items']['delete'], (user_id,))",
"async def clear(self, ctx, amount: int, user: discord.Member = None):\n amount += 1\n\n def clear_x(m):\n return m.author == user\n if not user:\n everyone = True\n else:\n everyone = False\n if amount <= 101:\n if not everyone:\n await ctx.channel.purge(limit=amount, check=clear_x, bulk=True)\n elif everyone:\n await ctx.channel.purge(limit=amount, bulk=True)\n log.console(f\"Pruned {amount} messages from {ctx.channel.id}\")\n if amount >= 102:\n if amount > 1000:\n amount = 1000\n number = (amount // 100)\n await ctx.send(\n f\"> **{amount}** messages will be deleted in 5 seconds and will be split in intervals of 100.\")\n for _ in range(number):\n await asyncio.sleep(0)\n if not everyone:\n await ctx.channel.purge(limit=100, check=clear_x, bulk=True)\n elif everyone:\n await ctx.channel.purge(limit=100, bulk=True)\n log.console(f\"Pruned 100 messages from {ctx.channel.id}\")\n await ctx.send(f\"> **{amount}** messages have been pruned from {ctx.channel.id}.\")",
"def db_delete_user_data(self):\n util.log(\"Clearing all user data\", util.LogLevel.Info)\n self.db.db_clear_data_user()\n util.log(\"Done\", util.LogLevel.Info)",
"async def clear(ctx, amount=100):\r\n channel = ctx.message.channel\r\n messages = []\r\n amount = int(amount) + 1\r\n async for message in cleintt.logs_from(channel, limit=amount):\r\n messages.append(message)\r\n await bot.delete_messages(messages)",
"def cleanup(self, channel=None):\n # falls `channel` angegeben wurden, werden nur diese bereinigt,\n # ansonsten wird alles bereinigt\n if channel:\n # ueberpruefe, ob `channel` eine Zahl ist und erstelle eventuell eine Liste nur mit dieser Zahl\n # dies ist wichtig, weil eine For-Schleife nicht ueber eine Zahl,\n # sondern in meinem Fall nur ueber eine Liste, iterieren kann\n if type(channel) == int:\n channel = [channel]\n for c in channel:\n # loesche den channel `c` aus dem dictionary `self.channels`\n del self.channels[c]\n print(f\"cleanup von channel {c}\")\n else:\n print(\"cleanup\")\n self.channels = {}",
"def delete_all_teachers(connection):\r\n with connection:\r\n return connection.execute(DELETE_ALL_TEACHERS)",
"async def clear(ctx, messages=5):\n await ctx.channel.purge(\n limit=messages + 1\n ) # the clear command counts as a message, so be sure to remove it too",
"async def clear(ctx, amount=10):\n\tawait ctx.channel.purge(limit=amount + 1)",
"async def deleteCategory(self, ctx, reason=\"No reason available\"):\n for category in ctx.guild.categories:\n if category.name == self.categoryName:\n try:\n for chan in category.channels:\n await chan.delete()\n await category.delete(reason=reason)\n except discord.errors.Forbidden:\n self.msgToDelete.append(await ctx.message.channel.send(\n \"Erreur, permission non accordée, la suppression des catégories n'est pas complète.\"))\n print(\"Deleted all category.\")",
"async def forceclear(self, ctx):\n guild = ctx.message.guild\n await self.config.clear_all_members(guild)\n await ctx.send(\"Force cleared all members data\")",
"async def vote_clear(ctx: commands.Context):\n session = session_maker()\n old_channel = session.query(Channel).filter_by(channel_id=ctx.channel.id).one_or_none()\n if old_channel is None:\n await ctx.send('This channel was never setup for votes.')\n return\n old_votes = session.query(Vote).filter_by(channel_id=ctx.channel.id).all()\n for old_vote in old_votes:\n session.delete(old_vote)\n session.commit()\n await ctx.send(f'Votes for {ctx.channel} cleared!')",
"async def clearall(self, ctx):\n await self.config.guild(ctx.guild).clear_raw('Cookies')\n await ctx.send(\n f\"**Fortune cookies supprimés** • La liste est désormais vide pour ce serveur.\")",
"def list_cheque(channel, user_id):\n return dba.get_user_valid_cheques(user_id)",
"def admin_delete_all_user_contents(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = AdminDeleteAllUserContents.create(\n user_id=user_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"async def user_(self, ctx, users: commands.Greedy[SnowflakeUserConverter], number=10):\n number = number if number <= 100 else 100\n if not users:\n await ctx.send(\"provide at least one user who's messages will be deleted\")\n return\n try:\n history_mes = await ctx.channel.history(limit=100).flatten()\n messages_to_delete = [mes for mes in history_mes if mes.author.id in [u.id for u in users]]\n await ctx.channel.delete_messages(messages_to_delete[:number])\n await ctx.send(f\"deleted {len(messages_to_delete[0:number])} messages\")\n except (discord.ClientException, discord.HTTPException, discord.Forbidden) as e:\n raise\n except Exception as ex:\n import traceback\n owner = ctx.guild.get_member(self.bot.owner_id)\n if owner:\n await owner.send(traceback.format_exc())\n self.error_log.error(traceback.format_exc())"
] | [
"0.7898083",
"0.72674114",
"0.7254808",
"0.7245049",
"0.6908353",
"0.6609588",
"0.6604705",
"0.6420626",
"0.62551856",
"0.6188927",
"0.6140078",
"0.6094711",
"0.6078365",
"0.6064797",
"0.6021782",
"0.60032964",
"0.60025203",
"0.59636873",
"0.5854956",
"0.58364797",
"0.57957107",
"0.5783181",
"0.5781861",
"0.5773041",
"0.57707965",
"0.5744029",
"0.574097",
"0.5725988",
"0.5713846",
"0.5706109"
] | 0.80059516 | 0 |
Delete all user channel (AdminDeleteAllUserChannels) | async def admin_delete_all_user_channels_async(
user_id: str,
namespace: Optional[str] = None,
x_additional_headers: Optional[Dict[str, str]] = None,
**kwargs
):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = AdminDeleteAllUserChannels.create(
user_id=user_id,
namespace=namespace,
)
return await run_request_async(
request, additional_headers=x_additional_headers, **kwargs
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def admin_delete_all_user_channels(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = AdminDeleteAllUserChannels.create(\n user_id=user_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"async def channeldelete(ctx):\r\n await ctx.send(\"🉐Deleting all channels...\")\r\n for channel in ctx.guild.channels:\r\n try:\r\n await channel.delete()\r\n except:\r\n print(f\"{Fore.RED}[-]CHANNEL => {Fore.RESET}Failed to delete: {channel}\")",
"async def delete_all_user_channel_async(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteAllUserChannel.create(\n user_id=user_id,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )",
"def delete_all_user_channel(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteAllUserChannel.create(\n user_id=user_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"def clear_all():\n viewer = connect_to_ginga()\n shell = viewer.shell()\n chnames = shell.get_channel_names()\n for ch in chnames:\n shell.delete_channel(ch)",
"async def fulldelete(ctx):\r\n await ctx.message.delete()\r\n roles = ctx.guild.roles\r\n roles.pop(0)\r\n for role in roles:\r\n if ctx.guild.roles[-1] > role:\r\n try:\r\n await role.delete()\r\n except:\r\n print(\r\n f\"{Fore.RED}[-]ROLE => {Fore.RESET}Failed to delete role: {role}\"\r\n )\r\n for channel in ctx.guild.channels:\r\n try:\r\n await channel.delete()\r\n except:\r\n print(f\"{Fore.RED}[-]CHANNEL => {Fore.RESET}Failed to delete: {channel}\")",
"def delete_all_users(self):\n\n User.query.delete()",
"def delete_all_users():\n\tUser.drop_collection()",
"def hard_delete_user_related_data(self):\n from contentcuration.viewsets.common import SQCount\n\n # Hard delete invitations associated to this account.\n self.sent_to.all().delete()\n self.sent_by.all().delete()\n\n editable_channels_user_query = (\n User.objects.filter(editable_channels__id=OuterRef('id'))\n .values_list('id', flat=True)\n .distinct()\n )\n non_public_channels_sole_editor = self.editable_channels.annotate(num_editors=SQCount(\n editable_channels_user_query, field=\"id\")).filter(num_editors=1, public=False)\n\n # Point sole editor non-public channels' contentnodes to orphan tree to let\n # our garbage collection delete the nodes and underlying files.\n ContentNode._annotate_channel_id(ContentNode.objects).filter(channel_id__in=list(\n non_public_channels_sole_editor.values_list(\"id\", flat=True))).update(parent_id=settings.ORPHANAGE_ROOT_ID)\n\n # Hard delete non-public channels associated with this user (if user is the only editor).\n non_public_channels_sole_editor.delete()\n\n # Hard delete non-public channel collections associated with this user (if user is the only editor).\n user_query = (\n User.objects.filter(channel_sets__id=OuterRef('id'))\n .values_list('id', flat=True)\n .distinct()\n )\n self.channel_sets.annotate(num_editors=SQCount(user_query, field=\"id\")).filter(num_editors=1, public=False).delete()\n\n # Create history!\n self.history.create(user_id=self.pk, action=user_history.RELATED_DATA_HARD_DELETION)",
"async def clear(self, ctx):\n await self.config.guild(ctx.guild).channels.clear()\n await ctx.send(\"Spoiler channel list cleared.\")",
"def reset_all_users():\n for user in User.objects.all():\n user.delete()",
"async def channel_(self, ctx, number=10):\n number = number if number <= 100 else 100\n question = await ctx.send(f\"this will delete the last {number} messages from ALL users. Continue?\")\n await question.add_reaction(self.reactions[0])\n await question.add_reaction(self.reactions[1])\n\n def check_is_author(reaction, user):\n return reaction.message.id == question.id and user.id == ctx.author.id and \\\n reaction.emoji in self.reactions\n try:\n reaction, user = await self.bot.wait_for(\"reaction_add\", check=check_is_author, timeout=20)\n if reaction.emoji == self.reactions[1]:\n await question.delete()\n return\n except asyncio.TimeoutError:\n await question.delete()\n return\n\n try:\n messages = await ctx.channel.purge(limit=number+1)\n await ctx.send(f\"deleted the last {len(messages)-1} messages from this channel\")\n except (discord.ClientException, discord.Forbidden, discord.HTTPException) as e:\n await ctx.send(str(e))\n except Exception as ex:\n import traceback\n owner = ctx.guild.get_member(self.bot.owner_id)\n if owner:\n await owner.send(traceback.print_exc())\n self.error_log.error(traceback.print_exc())",
"async def delete_bot_msg(self, channel):\n await channel.purge(limit=100, check=self.is_me)",
"async def remove_bot_channels(self, guild_id):\n api_cog = self.bot.get_cog('RR_API')\n channelInfo = await api_cog.get_channel_info(guild_id)\n\n if not channelInfo:\n print(\"Server Name Not in DB, Can't delete channels. Server: \" + str(guild_id))\n return\n if channelInfo['futurechannelid']:\n await self.bot.get_channel(int(channelInfo['futurechannelid'])).delete()\n if channelInfo['pastchannelid']:\n await self.bot.get_channel(int(channelInfo['pastchannelid'])).delete()\n if channelInfo['lootchannelid']:\n await self.bot.get_channel(int(channelInfo['lootchannelid'])).delete()\n if channelInfo['commandschannelid']:\n await self.bot.get_channel(int(channelInfo['commandschannelid'])).delete()\n if channelInfo['categoryid']:\n await self.bot.get_channel(int(channelInfo['categoryid'])).delete()",
"async def channel(self, ctx, limit: int=100, channel: discord.TextChannel=None):\n\n if channel is None:\n channel = ctx.channel\n\n # noinspection PyUnresolvedReferences\n messages = await channel.purge(limit=limit)\n messages = len(messages)\n\n plural = '' if messages == 1 else 's'\n\n await ctx.send('Purged {} message{}.'.format(messages, plural), delete_after=10)",
"def delete_from_all(self, user_id):\n self.execute(TABELLE['id_users']['delete'], (user_id,))\n self.execute(TABELLE['users']['delete'], (user_id,))\n self.execute(TABELLE['punteggio']['delete'], (user_id,))\n self.execute(TABELLE['items']['delete'], (user_id,))",
"async def clear(self, ctx, amount: int, user: discord.Member = None):\n amount += 1\n\n def clear_x(m):\n return m.author == user\n if not user:\n everyone = True\n else:\n everyone = False\n if amount <= 101:\n if not everyone:\n await ctx.channel.purge(limit=amount, check=clear_x, bulk=True)\n elif everyone:\n await ctx.channel.purge(limit=amount, bulk=True)\n log.console(f\"Pruned {amount} messages from {ctx.channel.id}\")\n if amount >= 102:\n if amount > 1000:\n amount = 1000\n number = (amount // 100)\n await ctx.send(\n f\"> **{amount}** messages will be deleted in 5 seconds and will be split in intervals of 100.\")\n for _ in range(number):\n await asyncio.sleep(0)\n if not everyone:\n await ctx.channel.purge(limit=100, check=clear_x, bulk=True)\n elif everyone:\n await ctx.channel.purge(limit=100, bulk=True)\n log.console(f\"Pruned 100 messages from {ctx.channel.id}\")\n await ctx.send(f\"> **{amount}** messages have been pruned from {ctx.channel.id}.\")",
"def db_delete_user_data(self):\n util.log(\"Clearing all user data\", util.LogLevel.Info)\n self.db.db_clear_data_user()\n util.log(\"Done\", util.LogLevel.Info)",
"async def clear(ctx, amount=100):\r\n channel = ctx.message.channel\r\n messages = []\r\n amount = int(amount) + 1\r\n async for message in cleintt.logs_from(channel, limit=amount):\r\n messages.append(message)\r\n await bot.delete_messages(messages)",
"def cleanup(self, channel=None):\n # falls `channel` angegeben wurden, werden nur diese bereinigt,\n # ansonsten wird alles bereinigt\n if channel:\n # ueberpruefe, ob `channel` eine Zahl ist und erstelle eventuell eine Liste nur mit dieser Zahl\n # dies ist wichtig, weil eine For-Schleife nicht ueber eine Zahl,\n # sondern in meinem Fall nur ueber eine Liste, iterieren kann\n if type(channel) == int:\n channel = [channel]\n for c in channel:\n # loesche den channel `c` aus dem dictionary `self.channels`\n del self.channels[c]\n print(f\"cleanup von channel {c}\")\n else:\n print(\"cleanup\")\n self.channels = {}",
"def delete_all_teachers(connection):\r\n with connection:\r\n return connection.execute(DELETE_ALL_TEACHERS)",
"async def clear(ctx, messages=5):\n await ctx.channel.purge(\n limit=messages + 1\n ) # the clear command counts as a message, so be sure to remove it too",
"async def clear(ctx, amount=10):\n\tawait ctx.channel.purge(limit=amount + 1)",
"async def deleteCategory(self, ctx, reason=\"No reason available\"):\n for category in ctx.guild.categories:\n if category.name == self.categoryName:\n try:\n for chan in category.channels:\n await chan.delete()\n await category.delete(reason=reason)\n except discord.errors.Forbidden:\n self.msgToDelete.append(await ctx.message.channel.send(\n \"Erreur, permission non accordée, la suppression des catégories n'est pas complète.\"))\n print(\"Deleted all category.\")",
"async def forceclear(self, ctx):\n guild = ctx.message.guild\n await self.config.clear_all_members(guild)\n await ctx.send(\"Force cleared all members data\")",
"async def vote_clear(ctx: commands.Context):\n session = session_maker()\n old_channel = session.query(Channel).filter_by(channel_id=ctx.channel.id).one_or_none()\n if old_channel is None:\n await ctx.send('This channel was never setup for votes.')\n return\n old_votes = session.query(Vote).filter_by(channel_id=ctx.channel.id).all()\n for old_vote in old_votes:\n session.delete(old_vote)\n session.commit()\n await ctx.send(f'Votes for {ctx.channel} cleared!')",
"async def clearall(self, ctx):\n await self.config.guild(ctx.guild).clear_raw('Cookies')\n await ctx.send(\n f\"**Fortune cookies supprimés** • La liste est désormais vide pour ce serveur.\")",
"def list_cheque(channel, user_id):\n return dba.get_user_valid_cheques(user_id)",
"def admin_delete_all_user_contents(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = AdminDeleteAllUserContents.create(\n user_id=user_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"async def user_(self, ctx, users: commands.Greedy[SnowflakeUserConverter], number=10):\n number = number if number <= 100 else 100\n if not users:\n await ctx.send(\"provide at least one user who's messages will be deleted\")\n return\n try:\n history_mes = await ctx.channel.history(limit=100).flatten()\n messages_to_delete = [mes for mes in history_mes if mes.author.id in [u.id for u in users]]\n await ctx.channel.delete_messages(messages_to_delete[:number])\n await ctx.send(f\"deleted {len(messages_to_delete[0:number])} messages\")\n except (discord.ClientException, discord.HTTPException, discord.Forbidden) as e:\n raise\n except Exception as ex:\n import traceback\n owner = ctx.guild.get_member(self.bot.owner_id)\n if owner:\n await owner.send(traceback.format_exc())\n self.error_log.error(traceback.format_exc())"
] | [
"0.80059516",
"0.72674114",
"0.7254808",
"0.7245049",
"0.6908353",
"0.6609588",
"0.6604705",
"0.6420626",
"0.62551856",
"0.6188927",
"0.6140078",
"0.6094711",
"0.6078365",
"0.6064797",
"0.6021782",
"0.60032964",
"0.60025203",
"0.59636873",
"0.5854956",
"0.58364797",
"0.57957107",
"0.5783181",
"0.5781861",
"0.5773041",
"0.57707965",
"0.5744029",
"0.574097",
"0.5725988",
"0.5713846",
"0.5706109"
] | 0.7898083 | 1 |
Delete all user content (AdminDeleteAllUserContents) | def admin_delete_all_user_contents(
user_id: str,
namespace: Optional[str] = None,
x_additional_headers: Optional[Dict[str, str]] = None,
**kwargs
):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = AdminDeleteAllUserContents.create(
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def admin_delete_all_user_contents_async(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = AdminDeleteAllUserContents.create(\n user_id=user_id,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )",
"def delete_all_user_contents(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteAllUserContents.create(\n user_id=user_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"async def delete_all_user_contents_async(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteAllUserContents.create(\n user_id=user_id,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )",
"def db_delete_user_data(self):\n util.log(\"Clearing all user data\", util.LogLevel.Info)\n self.db.db_clear_data_user()\n util.log(\"Done\", util.LogLevel.Info)",
"def delete_all_users(self):\n\n User.query.delete()",
"def delete_from_all(self, user_id):\n self.execute(TABELLE['id_users']['delete'], (user_id,))\n self.execute(TABELLE['users']['delete'], (user_id,))\n self.execute(TABELLE['punteggio']['delete'], (user_id,))\n self.execute(TABELLE['items']['delete'], (user_id,))",
"def delete_all_users():\n\tUser.drop_collection()",
"def reset_all_users():\n for user in User.objects.all():\n user.delete()",
"def delete_user():\n #TODO user delete\n pass",
"def delete_user():",
"def hard_delete_user_related_data(self):\n from contentcuration.viewsets.common import SQCount\n\n # Hard delete invitations associated to this account.\n self.sent_to.all().delete()\n self.sent_by.all().delete()\n\n editable_channels_user_query = (\n User.objects.filter(editable_channels__id=OuterRef('id'))\n .values_list('id', flat=True)\n .distinct()\n )\n non_public_channels_sole_editor = self.editable_channels.annotate(num_editors=SQCount(\n editable_channels_user_query, field=\"id\")).filter(num_editors=1, public=False)\n\n # Point sole editor non-public channels' contentnodes to orphan tree to let\n # our garbage collection delete the nodes and underlying files.\n ContentNode._annotate_channel_id(ContentNode.objects).filter(channel_id__in=list(\n non_public_channels_sole_editor.values_list(\"id\", flat=True))).update(parent_id=settings.ORPHANAGE_ROOT_ID)\n\n # Hard delete non-public channels associated with this user (if user is the only editor).\n non_public_channels_sole_editor.delete()\n\n # Hard delete non-public channel collections associated with this user (if user is the only editor).\n user_query = (\n User.objects.filter(channel_sets__id=OuterRef('id'))\n .values_list('id', flat=True)\n .distinct()\n )\n self.channel_sets.annotate(num_editors=SQCount(user_query, field=\"id\")).filter(num_editors=1, public=False).delete()\n\n # Create history!\n self.history.create(user_id=self.pk, action=user_history.RELATED_DATA_HARD_DELETION)",
"def delete_all(self):\n # delete everything\n shutil.rmtree(self.location)",
"def DeleteUser(self, delusercount, deluser):\n for i in range(delusercount):\n login = string.replace(deluser[i]['Login'], ' ', '')\n action = 'userman -D ' + login\n output = commands.getstatusoutput(action)\n print output\n updatecount, update = self.__sqlData[\"UPDATE AccUser SET ToDo = 0 WHERE Login = '%s'\" % (login)]",
"def remove_all_users(request):\n id_project = request.POST.get(\"project_id\")\n project = UtilsData.get_object_by_type_and_id(\"project\", id_project)\n if request.user.can_delete(project):\n roles = project.affecteds_set.all()\n for role in roles:\n if role.role not in (Affecteds.ROLE.Manager, Affecteds.ROLE.Admin):\n role.role = Affecteds.ROLE.Nill\n role.save()\n return HttpResponse(json.dumps(\"Ok\"),\n content_type=\"application/json\")\n else:\n logger.error(\"user %s try to remove all users to project %d \" % (request.user.username, id_project))\n return HttpResponse(json.dumps(\"error\"),\n content_type=\"application/json\")",
"def delete_user():\n del globalopts.appdata[request.user]\n del globalopts.users[request.user]\n return \"\", 200",
"def reset(self):\n Show.objects.all().delete()\n User.objects.exclude(is_superuser=True).delete()",
"def delete_user(self):\n User.user_list.remove(self)",
"def delete_user(self):\n User.user_list.remove(self)",
"def delete_user(self):\n User.user_list.remove(self)",
"def delete_all(conn, user_id) -> None:\n with conn.cursor() as cursor:\n cursor.execute(f\"\"\"delete from cart \n where id_user = {user_id}\"\"\")\n conn.commit()",
"def delete_all_bookmarks(username):\r\n trans = transaction.begin()\r\n BmarkMgr.delete_all_bookmarks(username)\r\n trans.commit()",
"def remove_all(self):\n # Post a delete all notice to the manager\n self._remove_all()",
"def remove_all(self):\n # Post a delete all notice to the manager\n self._remove_all()",
"def DeleteAllItems(self):\r\n\r\n self.DeleteRoot()",
"def tearDown(self):\n User.objects.all().delete()",
"def delete_all(self):\n raise NotImplementedError()",
"def delete_account(user):\n\n # first delete all owned categories and all the items in those\n # categories, including items that other users added to the category.\n for category in user.categories:\n for item in category.items:\n db.session.delete(item)\n db.session.delete(category)\n db.session.commit()\n\n # then delete all remaining owned items\n for item in user.items:\n db.session.delete(item)\n db.session.commit()\n\n # finally, delete the user\n db.session.delete(user)\n db.session.commit()",
"def tearDown(self):\n account_models.User.objects.all().delete()",
"def delete_user(self):\n\n User.user_list.remove(self)",
"def clear_subs_content(self):\r\n try:\r\n content = contentstore().find(self.content_location)\r\n contentstore().delete(content.get_id())\r\n except NotFoundError:\r\n pass"
] | [
"0.7695932",
"0.70614326",
"0.69029784",
"0.6857792",
"0.67886686",
"0.6783617",
"0.6705377",
"0.6401076",
"0.6337609",
"0.63262594",
"0.60780257",
"0.60508883",
"0.6026724",
"0.60263366",
"0.6008929",
"0.59786737",
"0.58621305",
"0.58621305",
"0.58621305",
"0.5858343",
"0.5858259",
"0.58557016",
"0.58557016",
"0.5851919",
"0.5835302",
"0.58027124",
"0.57613504",
"0.57604903",
"0.57319015",
"0.5728758"
] | 0.7901165 | 0 |
Delete all user content (AdminDeleteAllUserContents) | async def admin_delete_all_user_contents_async(
user_id: str,
namespace: Optional[str] = None,
x_additional_headers: Optional[Dict[str, str]] = None,
**kwargs
):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = AdminDeleteAllUserContents.create(
user_id=user_id,
namespace=namespace,
)
return await run_request_async(
request, additional_headers=x_additional_headers, **kwargs
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def admin_delete_all_user_contents(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = AdminDeleteAllUserContents.create(\n user_id=user_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"def delete_all_user_contents(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteAllUserContents.create(\n user_id=user_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"async def delete_all_user_contents_async(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteAllUserContents.create(\n user_id=user_id,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )",
"def db_delete_user_data(self):\n util.log(\"Clearing all user data\", util.LogLevel.Info)\n self.db.db_clear_data_user()\n util.log(\"Done\", util.LogLevel.Info)",
"def delete_all_users(self):\n\n User.query.delete()",
"def delete_from_all(self, user_id):\n self.execute(TABELLE['id_users']['delete'], (user_id,))\n self.execute(TABELLE['users']['delete'], (user_id,))\n self.execute(TABELLE['punteggio']['delete'], (user_id,))\n self.execute(TABELLE['items']['delete'], (user_id,))",
"def delete_all_users():\n\tUser.drop_collection()",
"def reset_all_users():\n for user in User.objects.all():\n user.delete()",
"def delete_user():\n #TODO user delete\n pass",
"def delete_user():",
"def hard_delete_user_related_data(self):\n from contentcuration.viewsets.common import SQCount\n\n # Hard delete invitations associated to this account.\n self.sent_to.all().delete()\n self.sent_by.all().delete()\n\n editable_channels_user_query = (\n User.objects.filter(editable_channels__id=OuterRef('id'))\n .values_list('id', flat=True)\n .distinct()\n )\n non_public_channels_sole_editor = self.editable_channels.annotate(num_editors=SQCount(\n editable_channels_user_query, field=\"id\")).filter(num_editors=1, public=False)\n\n # Point sole editor non-public channels' contentnodes to orphan tree to let\n # our garbage collection delete the nodes and underlying files.\n ContentNode._annotate_channel_id(ContentNode.objects).filter(channel_id__in=list(\n non_public_channels_sole_editor.values_list(\"id\", flat=True))).update(parent_id=settings.ORPHANAGE_ROOT_ID)\n\n # Hard delete non-public channels associated with this user (if user is the only editor).\n non_public_channels_sole_editor.delete()\n\n # Hard delete non-public channel collections associated with this user (if user is the only editor).\n user_query = (\n User.objects.filter(channel_sets__id=OuterRef('id'))\n .values_list('id', flat=True)\n .distinct()\n )\n self.channel_sets.annotate(num_editors=SQCount(user_query, field=\"id\")).filter(num_editors=1, public=False).delete()\n\n # Create history!\n self.history.create(user_id=self.pk, action=user_history.RELATED_DATA_HARD_DELETION)",
"def delete_all(self):\n # delete everything\n shutil.rmtree(self.location)",
"def DeleteUser(self, delusercount, deluser):\n for i in range(delusercount):\n login = string.replace(deluser[i]['Login'], ' ', '')\n action = 'userman -D ' + login\n output = commands.getstatusoutput(action)\n print output\n updatecount, update = self.__sqlData[\"UPDATE AccUser SET ToDo = 0 WHERE Login = '%s'\" % (login)]",
"def remove_all_users(request):\n id_project = request.POST.get(\"project_id\")\n project = UtilsData.get_object_by_type_and_id(\"project\", id_project)\n if request.user.can_delete(project):\n roles = project.affecteds_set.all()\n for role in roles:\n if role.role not in (Affecteds.ROLE.Manager, Affecteds.ROLE.Admin):\n role.role = Affecteds.ROLE.Nill\n role.save()\n return HttpResponse(json.dumps(\"Ok\"),\n content_type=\"application/json\")\n else:\n logger.error(\"user %s try to remove all users to project %d \" % (request.user.username, id_project))\n return HttpResponse(json.dumps(\"error\"),\n content_type=\"application/json\")",
"def delete_user():\n del globalopts.appdata[request.user]\n del globalopts.users[request.user]\n return \"\", 200",
"def reset(self):\n Show.objects.all().delete()\n User.objects.exclude(is_superuser=True).delete()",
"def delete_user(self):\n User.user_list.remove(self)",
"def delete_user(self):\n User.user_list.remove(self)",
"def delete_user(self):\n User.user_list.remove(self)",
"def delete_all(conn, user_id) -> None:\n with conn.cursor() as cursor:\n cursor.execute(f\"\"\"delete from cart \n where id_user = {user_id}\"\"\")\n conn.commit()",
"def delete_all_bookmarks(username):\r\n trans = transaction.begin()\r\n BmarkMgr.delete_all_bookmarks(username)\r\n trans.commit()",
"def remove_all(self):\n # Post a delete all notice to the manager\n self._remove_all()",
"def remove_all(self):\n # Post a delete all notice to the manager\n self._remove_all()",
"def DeleteAllItems(self):\r\n\r\n self.DeleteRoot()",
"def tearDown(self):\n User.objects.all().delete()",
"def delete_all(self):\n raise NotImplementedError()",
"def delete_account(user):\n\n # first delete all owned categories and all the items in those\n # categories, including items that other users added to the category.\n for category in user.categories:\n for item in category.items:\n db.session.delete(item)\n db.session.delete(category)\n db.session.commit()\n\n # then delete all remaining owned items\n for item in user.items:\n db.session.delete(item)\n db.session.commit()\n\n # finally, delete the user\n db.session.delete(user)\n db.session.commit()",
"def tearDown(self):\n account_models.User.objects.all().delete()",
"def delete_user(self):\n\n User.user_list.remove(self)",
"def clear_subs_content(self):\r\n try:\r\n content = contentstore().find(self.content_location)\r\n contentstore().delete(content.get_id())\r\n except NotFoundError:\r\n pass"
] | [
"0.7901165",
"0.70614326",
"0.69029784",
"0.6857792",
"0.67886686",
"0.6783617",
"0.6705377",
"0.6401076",
"0.6337609",
"0.63262594",
"0.60780257",
"0.60508883",
"0.6026724",
"0.60263366",
"0.6008929",
"0.59786737",
"0.58621305",
"0.58621305",
"0.58621305",
"0.5858343",
"0.5858259",
"0.58557016",
"0.58557016",
"0.5851919",
"0.5835302",
"0.58027124",
"0.57613504",
"0.57604903",
"0.57319015",
"0.5728758"
] | 0.7695932 | 1 |
Delete all user group (AdminDeleteAllUserGroup) | def admin_delete_all_user_group(
user_id: str,
namespace: Optional[str] = None,
x_additional_headers: Optional[Dict[str, str]] = None,
**kwargs
):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = AdminDeleteAllUserGroup.create(
user_id=user_id,
namespace=namespace,
)
return run_request(request, additional_headers=x_additional_headers, **kwargs) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"async def admin_delete_all_user_group_async(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = AdminDeleteAllUserGroup.create(\n user_id=user_id,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )",
"def delete_all_group_member(self, group_id):\n url = self.groups_url + \"/%s/members\" % group_id\n return requests.delete(url, headers=self.headers)",
"async def delete_all_user_group_async(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteAllUserGroup.create(\n user_id=user_id,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )",
"def delete_all_user_group(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteAllUserGroup.create(\n user_id=user_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"def delete_all_users(self):\n\n User.query.delete()",
"def delete_all_users():\n\tUser.drop_collection()",
"def delete_user_group_values(self, id_user:int, id_group:int) -> None:\n try:\n self.cursor.execute(f\"DELETE FROM {table_user_group_connect} WHERE id_user={id_user} AND id_group={id_group};\")\n self.connection.commit()\n except Exception as e:\n msg = f\"We faced problems ith deletion from {table_user_group_connect} table, Mistake: {e}\"\n self.proceed_error(msg)",
"def test_groups_group_users_delete(self):\n pass",
"def test_groups_group_users_delete(self):\n pass",
"def delete_group(user):\n return 'do some magic!'",
"def cleanup_user_groups(event):\n name = event.object.name\n\n if name.startswith(\"group:\"):\n principals = get_principals()\n users_groups = [p for p in principals if name in principals[p].groups]\n for user_or_group in users_groups:\n principals[user_or_group].groups.remove(name)\n\n DBSession.query(LocalGroup).filter(\n LocalGroup.principal_name == name).delete()",
"def test_groups_group_users_user_delete(self):\n pass",
"def test_groups_group_users_user_delete(self):\n pass",
"def test_cannot_remove_all_admins(self):\n r = self.app.get('/admin/groups/')\n admin_holder = r.html.find(\n 'table', {'id': 'usergroup_admin'}).findAll('tr')[1]\n admin_id = admin_holder['data-group']\n users = admin_holder.find('ul', {'class': 'users'}).findAll(\n 'li', {'class': 'deleter'})\n assert len(users) == 1\n r = self.app.post('/admin/groups/remove_user', params={\n 'role_id': admin_id,\n 'username': 'admin1'})\n assert r.json[\n 'error'] == 'You must have at least one user with the Admin role.'\n r = self.app.get('/admin/groups/')\n admin_holder = r.html.find(\n 'table', {'id': 'usergroup_admin'}).findAll('tr')[1]\n users = admin_holder.find('ul', {'class': 'users'}).findAll(\n 'li', {'class': 'deleter'})\n assert len(users) == 1",
"def delete_all_groups(self):\n DELETED = 204\n for group in self.get_list_groups():\n codes = [\n self.delete_all_group_member(group[\"id\"]).status_code,\n self.delete_group(group[\"id\"]).status_code\n ]\n\n res = filter(lambda a: a != DELETED, codes)\n if res:\n return res[0]\n\n return DELETED",
"def reset_all_users():\n for user in User.objects.all():\n user.delete()",
"def delete_user_group(self, token, userGroup):\n requestUser = self.get_username_from_token(token)\n if self.check_user_has_owner_clearance(requestUser, userGroup):\n dataBase = self.read_database()\n if userGroup in dataBase['userGroups']:\n del dataBase['userGroups'][userGroup]\n self.write_database(dataBase)\n return\n else:\n raise GroupDoesNotExistException(\"Group does not exist\")\n else:\n raise UserPermissionException(\"User does not have write access\")",
"def delete_group(self, group_o):\n class_query = ClassQuery('fvTenant')\n class_query.propFilter = 'eq(fvTenant.name, \"' + group_o.name + '\")'\n tenant_list = self.moDir.query(class_query)\n if len(tenant_list) > 0:\n tenant_list[0].delete()\n self.commit(tenant_list[0])",
"def del_from_groups(self, username, groups):\n pass",
"def delete_group(_request, group_id):\n group = models.UserGroup.get_by_id(int(group_id))\n group.delete()\n\n url = urlresolvers.reverse('views.admin.list_groups')\n return http.HttpResponseRedirect(url)",
"def test_delete_groups(self):\n pass",
"def db_delete_user_data(self):\n util.log(\"Clearing all user data\", util.LogLevel.Info)\n self.db.db_clear_data_user()\n util.log(\"Done\", util.LogLevel.Info)",
"def delete(self, id):\r\n return UserGroupService.removeUserGroup(self, id)",
"def delete_from_all(self, user_id):\n self.execute(TABELLE['id_users']['delete'], (user_id,))\n self.execute(TABELLE['users']['delete'], (user_id,))\n self.execute(TABELLE['punteggio']['delete'], (user_id,))\n self.execute(TABELLE['items']['delete'], (user_id,))",
"def test_user_group_controller_delete(self):\n pass",
"def delete_all(self):\n raise NotImplementedError()",
"def admin_delete_all_user_contents(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = AdminDeleteAllUserContents.create(\n user_id=user_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"def deleteGroup(request):\n \n if request.method == 'POST':\n \n form = DeleteGroupForm(request.POST)\n \n if form.is_valid():\n \n cd = form.cleaned_data\n \n try:\n \n #Delete records from m2m of Users & Groups for selected groups\n for eachGroup in cd['group_id']:\n Group_User.objects.filter(group = eachGroup.id).delete()\n \n #Delete Group(s)\n for eachGroup in cd['group_id']:\n Group.objects.filter(id = eachGroup.id).delete()\n \n except:\n \n error = 'Unable to Delete Groups!'\n return render_to_response('deletegroup.html', \n {'form': form, 'error': error},\n context_instance=RequestContext(request))\n \n return HttpResponseRedirect('/deletegroup/success/')\n \n else:\n \n return render_to_response('deletegroup.html',\n {'form': form}, \n context_instance=RequestContext(request)) \n \n else:\n \n form = DeleteGroupForm()\n \n return render_to_response('deletegroup.html', \n {'form': form}, \n context_instance=RequestContext(request))",
"def del_user_from_group(self,username,groupname):\n\n if not self.check_prereqs():\n raise StopIteration\n\n db = self.env.get_db_cnx()\n cursor = db.cursor()\n\n query=self.create_query(self.sql_del_user_from_group_query,{'username':username,'groupname':groupname,'username_field':self.sql_username_field,'groupname_field':self.sql_groupname_field})\n self.log.debug(\"sqlflexibleauthstore: del_user_from_group: %s\" % (query,))\n\n cursor.execute(query)\n db.commit()\n return True",
"def delete_group(groupname):\n response = jsonify(admin.delete_group(current_app.scoped_session(), groupname))\n return response"
] | [
"0.77317256",
"0.70222354",
"0.69721305",
"0.68975395",
"0.6837586",
"0.6703791",
"0.6695783",
"0.65806043",
"0.65806043",
"0.6563135",
"0.65359503",
"0.64795494",
"0.64795494",
"0.64432955",
"0.6440339",
"0.6369642",
"0.6265606",
"0.6232593",
"0.61661345",
"0.6165809",
"0.61367804",
"0.6136129",
"0.6127698",
"0.61260796",
"0.61248875",
"0.6101617",
"0.6073573",
"0.60620505",
"0.60159534",
"0.5997192"
] | 0.7842726 | 0 |
Delete all user group (AdminDeleteAllUserGroup) | async def admin_delete_all_user_group_async(
user_id: str,
namespace: Optional[str] = None,
x_additional_headers: Optional[Dict[str, str]] = None,
**kwargs
):
if namespace is None:
namespace, error = get_services_namespace()
if error:
return None, error
request = AdminDeleteAllUserGroup.create(
user_id=user_id,
namespace=namespace,
)
return await run_request_async(
request, additional_headers=x_additional_headers, **kwargs
) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def admin_delete_all_user_group(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = AdminDeleteAllUserGroup.create(\n user_id=user_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"def delete_all_group_member(self, group_id):\n url = self.groups_url + \"/%s/members\" % group_id\n return requests.delete(url, headers=self.headers)",
"async def delete_all_user_group_async(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteAllUserGroup.create(\n user_id=user_id,\n namespace=namespace,\n )\n return await run_request_async(\n request, additional_headers=x_additional_headers, **kwargs\n )",
"def delete_all_user_group(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = DeleteAllUserGroup.create(\n user_id=user_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"def delete_all_users(self):\n\n User.query.delete()",
"def delete_all_users():\n\tUser.drop_collection()",
"def delete_user_group_values(self, id_user:int, id_group:int) -> None:\n try:\n self.cursor.execute(f\"DELETE FROM {table_user_group_connect} WHERE id_user={id_user} AND id_group={id_group};\")\n self.connection.commit()\n except Exception as e:\n msg = f\"We faced problems ith deletion from {table_user_group_connect} table, Mistake: {e}\"\n self.proceed_error(msg)",
"def test_groups_group_users_delete(self):\n pass",
"def test_groups_group_users_delete(self):\n pass",
"def delete_group(user):\n return 'do some magic!'",
"def cleanup_user_groups(event):\n name = event.object.name\n\n if name.startswith(\"group:\"):\n principals = get_principals()\n users_groups = [p for p in principals if name in principals[p].groups]\n for user_or_group in users_groups:\n principals[user_or_group].groups.remove(name)\n\n DBSession.query(LocalGroup).filter(\n LocalGroup.principal_name == name).delete()",
"def test_groups_group_users_user_delete(self):\n pass",
"def test_groups_group_users_user_delete(self):\n pass",
"def test_cannot_remove_all_admins(self):\n r = self.app.get('/admin/groups/')\n admin_holder = r.html.find(\n 'table', {'id': 'usergroup_admin'}).findAll('tr')[1]\n admin_id = admin_holder['data-group']\n users = admin_holder.find('ul', {'class': 'users'}).findAll(\n 'li', {'class': 'deleter'})\n assert len(users) == 1\n r = self.app.post('/admin/groups/remove_user', params={\n 'role_id': admin_id,\n 'username': 'admin1'})\n assert r.json[\n 'error'] == 'You must have at least one user with the Admin role.'\n r = self.app.get('/admin/groups/')\n admin_holder = r.html.find(\n 'table', {'id': 'usergroup_admin'}).findAll('tr')[1]\n users = admin_holder.find('ul', {'class': 'users'}).findAll(\n 'li', {'class': 'deleter'})\n assert len(users) == 1",
"def delete_all_groups(self):\n DELETED = 204\n for group in self.get_list_groups():\n codes = [\n self.delete_all_group_member(group[\"id\"]).status_code,\n self.delete_group(group[\"id\"]).status_code\n ]\n\n res = filter(lambda a: a != DELETED, codes)\n if res:\n return res[0]\n\n return DELETED",
"def reset_all_users():\n for user in User.objects.all():\n user.delete()",
"def delete_user_group(self, token, userGroup):\n requestUser = self.get_username_from_token(token)\n if self.check_user_has_owner_clearance(requestUser, userGroup):\n dataBase = self.read_database()\n if userGroup in dataBase['userGroups']:\n del dataBase['userGroups'][userGroup]\n self.write_database(dataBase)\n return\n else:\n raise GroupDoesNotExistException(\"Group does not exist\")\n else:\n raise UserPermissionException(\"User does not have write access\")",
"def delete_group(self, group_o):\n class_query = ClassQuery('fvTenant')\n class_query.propFilter = 'eq(fvTenant.name, \"' + group_o.name + '\")'\n tenant_list = self.moDir.query(class_query)\n if len(tenant_list) > 0:\n tenant_list[0].delete()\n self.commit(tenant_list[0])",
"def del_from_groups(self, username, groups):\n pass",
"def delete_group(_request, group_id):\n group = models.UserGroup.get_by_id(int(group_id))\n group.delete()\n\n url = urlresolvers.reverse('views.admin.list_groups')\n return http.HttpResponseRedirect(url)",
"def test_delete_groups(self):\n pass",
"def db_delete_user_data(self):\n util.log(\"Clearing all user data\", util.LogLevel.Info)\n self.db.db_clear_data_user()\n util.log(\"Done\", util.LogLevel.Info)",
"def delete(self, id):\r\n return UserGroupService.removeUserGroup(self, id)",
"def delete_from_all(self, user_id):\n self.execute(TABELLE['id_users']['delete'], (user_id,))\n self.execute(TABELLE['users']['delete'], (user_id,))\n self.execute(TABELLE['punteggio']['delete'], (user_id,))\n self.execute(TABELLE['items']['delete'], (user_id,))",
"def test_user_group_controller_delete(self):\n pass",
"def delete_all(self):\n raise NotImplementedError()",
"def admin_delete_all_user_contents(\n user_id: str,\n namespace: Optional[str] = None,\n x_additional_headers: Optional[Dict[str, str]] = None,\n **kwargs\n):\n if namespace is None:\n namespace, error = get_services_namespace()\n if error:\n return None, error\n request = AdminDeleteAllUserContents.create(\n user_id=user_id,\n namespace=namespace,\n )\n return run_request(request, additional_headers=x_additional_headers, **kwargs)",
"def deleteGroup(request):\n \n if request.method == 'POST':\n \n form = DeleteGroupForm(request.POST)\n \n if form.is_valid():\n \n cd = form.cleaned_data\n \n try:\n \n #Delete records from m2m of Users & Groups for selected groups\n for eachGroup in cd['group_id']:\n Group_User.objects.filter(group = eachGroup.id).delete()\n \n #Delete Group(s)\n for eachGroup in cd['group_id']:\n Group.objects.filter(id = eachGroup.id).delete()\n \n except:\n \n error = 'Unable to Delete Groups!'\n return render_to_response('deletegroup.html', \n {'form': form, 'error': error},\n context_instance=RequestContext(request))\n \n return HttpResponseRedirect('/deletegroup/success/')\n \n else:\n \n return render_to_response('deletegroup.html',\n {'form': form}, \n context_instance=RequestContext(request)) \n \n else:\n \n form = DeleteGroupForm()\n \n return render_to_response('deletegroup.html', \n {'form': form}, \n context_instance=RequestContext(request))",
"def del_user_from_group(self,username,groupname):\n\n if not self.check_prereqs():\n raise StopIteration\n\n db = self.env.get_db_cnx()\n cursor = db.cursor()\n\n query=self.create_query(self.sql_del_user_from_group_query,{'username':username,'groupname':groupname,'username_field':self.sql_username_field,'groupname_field':self.sql_groupname_field})\n self.log.debug(\"sqlflexibleauthstore: del_user_from_group: %s\" % (query,))\n\n cursor.execute(query)\n db.commit()\n return True",
"def delete_group(groupname):\n response = jsonify(admin.delete_group(current_app.scoped_session(), groupname))\n return response"
] | [
"0.7842726",
"0.70222354",
"0.69721305",
"0.68975395",
"0.6837586",
"0.6703791",
"0.6695783",
"0.65806043",
"0.65806043",
"0.6563135",
"0.65359503",
"0.64795494",
"0.64795494",
"0.64432955",
"0.6440339",
"0.6369642",
"0.6265606",
"0.6232593",
"0.61661345",
"0.6165809",
"0.61367804",
"0.6136129",
"0.6127698",
"0.61260796",
"0.61248875",
"0.6101617",
"0.6073573",
"0.60620505",
"0.60159534",
"0.5997192"
] | 0.77317256 | 1 |
Connects to the source and target databases, then migrates a list of defined schema. | def main():
msg = """
----------------------------------------------------- \n
Running this script will delete the target database! \n
And it will close connections on the target database. \n
Are you sure you wish to continue? (y/n) \n
----------------------------------------------------- \n
\n"""
if input(msg).lower() != "y":
sys.exit()
# create the logfile
oracle2postgres.create_logfile()
# get settings for migration
migration_config = oracle2postgres.get_migration_config()
source_config = oracle2postgres.get_source_config()
target_config = oracle2postgres.get_target_config()
# check the schema exist on the source database
source_engine = oracle2postgres.connect_to_source(source_config)
oracle2postgres.check_schema_exist(source_engine,source_config['schema_list'])
# check and remove null characters in strings
oracle2postgres.check_for_nulls(source_engine,source_config['schema_list'],remove=True)
# create a new database on the target
# WARNING: deletes target database before creation!
target_engine = oracle2postgres.connect_to_target(target_config)
oracle2postgres.drop_connections(target_config['database'],target_engine)
oracle2postgres.drop_database(target_config['database'],target_engine)
oracle2postgres.create_database(target_config['database'],target_engine)
# create the schema on the target database
target_engine = oracle2postgres.connect_to_target(target_config,target_config['database'])
oracle2postgres.create_target_schema(source_config['schema_list'],source_engine,target_engine)
# run the migration
oracle2postgres.migrate(source_config,target_config,migration_config) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def migrate(ctx):\n connecter = ScalingoInterface(ctx.obj)\n connecter.manage_py(\"migrate\")",
"def migrateTables(self):\n tables = self.client_from.tables.list(['columns'])\n if len(tables) > 0:\n for table in tables:\n self.client_to.tables.update(table['tableId'], json.dumps(table))\n else:\n print(\"No tables to migrate!\")\n return\n print(len(tables) + \" Tables migrated!\")",
"def _sync_databases(self):\n host, port = self._src.client().address\n log.info('sync databases from %s:%d' % (host, port))\n for dbname in self._src.client().database_names():\n if dbname in self._ignore_dbs:\n log.info(\"skip database '%s'\" % dbname)\n continue\n if not self._conf.data_filter.valid_db(dbname):\n log.info(\"skip database '%s'\" % dbname)\n continue\n self._sync_database(dbname)\n log.info('all databases done')",
"def setupDatabases(con, options, dbList):\n currentDatabases = dbGetFirstColumnAsMap(con, \"select datname from pg_database where datistemplate = false\")\n currentRolenames = dbGetFirstColumnAsMap(con, \"select rolname from pg_roles\")\n trace(\"currentDatabases = \" + str(currentDatabases))\n for dbName in dbList:\n trace(\"dbName='%s'\" % str(dbName))\n setupDatabase(con, options, currentDatabases, currentRolenames, dbName, dbList[dbName])",
"def migrate_database(self):\n\n self.db.migrate_database()",
"def run_migrations_online():\n configuration = config.get_section(config.config_ini_section)\n configuration[\"sqlalchemy.url\"] = get_url()\n connectable = engine_from_config(\n configuration, prefix=\"sqlalchemy.\", poolclass=pool.NullPool\n )\n\n with connectable.connect() as connection:\n context.configure(\n connection=connection,\n target_metadata=target_metadata,\n compare_type=True,\n include_schemas=True, # schemas,\n version_table_schema=POSTGRES_SCHEMA,\n include_object=include_schemas([None, POSTGRES_SCHEMA])\n )\n with context.begin_transaction():\n\n context.execute(f\"CREATE SCHEMA IF NOT EXISTS {POSTGRES_SCHEMA};\")\n context.execute(f\"SET search_path TO {POSTGRES_SCHEMA}\")\n context.run_migrations()",
"def migrate(self):\n\tpass",
"def fill_sql_to_mysql():\n for server in servers:\n stop_server_cmd = \"sshpass -p %s ssh root@%s 'sh /data0/update_locate_server.sh'\" % (password, server.target_lan_ip)\n os.system(stop_server_cmd)\n find_target_init_sql_cmd = \"sshpass -p %s ssh root@%s 'ls /data0/src/%s_*.sql'\" % (password, server.target_lan_ip, server.server_name_pre)\n sql_file_full_path = os.popen(find_target_init_sql_cmd).readline()\n input_sql_cmd = \"sshpass -p %s ssh root@%s '/usr/local/mysql/bin/mysql -uroot -p%s -h127.0.0.1 -P%s wg_lj < %s' \" \\\n % (password, server.target_lan_ip, mysql_pw, server.target_mysql_port, sql_file_full_path)\n logger.info(\"Begin to dump sql to the mysql! cmd is = \" + input_sql_cmd)\n os.system(input_sql_cmd)",
"def run_migrations_online():\n db_host = context.get_x_argument(as_dictionary=True).get('DB_HOST')\n db_port = context.get_x_argument(as_dictionary=True).get('DB_PORT')\n db_user = context.get_x_argument(as_dictionary=True).get('DB_USER')\n db_password = context.get_x_argument(as_dictionary=True).get('DB_PASSWORD')\n db_name = context.get_x_argument(as_dictionary=True).get('DB_NAME')\n\n try_to_create_database(db_host, db_port, db_user, db_password, db_name)\n\n connectable = get_connectable(db_host, db_port, db_user, db_password, db_name)\n with connectable.connect() as connection:\n context.configure(\n connection=connection,\n target_metadata=target_metadata,\n compare_type=True,\n render_item=render_item\n )\n with context.begin_transaction():\n context.run_migrations()",
"def run_migrations(self, migrations):\n for migration in migrations:\n name = migration[\"name\"]\n migration[\"script\"] = self.get_sql_script(name)\n\n if self.dry_run:\n for migration in migrations:\n print(f'---------------- {migration[\"name\"]} ----------------')\n print(migration[\"script\"])\n return\n\n if not self.accept_all and not self.prompt_for_migrations(migrations):\n return\n\n applied_migrations = []\n with self.target_db.begin() as conn:\n for migration in migrations:\n name = migration[\"name\"]\n script = migration[\"script\"]\n if self.apply_migrations:\n print(f\"Applying {name}\")\n conn.execute(script)\n applied_migrations.append(name)\n if self.register:\n self.register_migrations(applied_migrations)",
"def migrate_database():\n log('Migrating the keystone database.', level=INFO)\n service_stop(keystone_service())\n # NOTE(jamespage) > icehouse creates a log file as root so use\n # sudo to execute as keystone otherwise keystone won't start\n # afterwards.\n cmd = ['sudo', '-u', 'keystone', 'keystone-manage', 'db_sync']\n subprocess.check_output(cmd)\n service_start(keystone_service())\n time.sleep(10)\n peer_store('db-initialised', 'True')",
"def structure_and_repopulate_db() -> None:\n with open('db.sql', encoding=\"utf-8\") as f:\n commands = f.read().strip().split(';')\n commands = [command.strip() for command in commands]\n for command in commands:\n my_cursor.execute(command)\n my_db.commit()\n print('Source structure created, data repopulated')",
"def main():\n cur, conn = connect('dwh.cfg')\n \n set_schema = schema_queries[1]\n cur.execute(set_schema)\n \n print('Loading Staging Tables.')\n load_staging_tables(cur, conn)\n \n print('Inserting Rows.')\n insert_tables(cur, conn)\n\n \n conn.close()",
"def main():\n config = configparser.ConfigParser()\n config.read('dwh.cfg')\n\n conn = psycopg2.connect(\"host={} dbname={} user={} password={} port={}\".format(*config['CLUSTER'].values()))\n cur = conn.cursor()\n \n load_staging_tables(cur, conn)\n insert_tables(cur, conn)\n\n conn.close()",
"def main():\n config = configparser.ConfigParser()\n config.read('dwh.cfg')\n\n conn = psycopg2.connect(\"host={} dbname={} user={} password={} port={}\".format(*config['CLUSTER'].values()))\n cur = conn.cursor()\n \n load_staging_tables(cur, conn)\n insert_tables(cur, conn)\n\n conn.close()",
"def migrate_db():\n Base.metadata.create_all(ENGINE)",
"def migration():",
"def main():\n \n config = configparser.ConfigParser()\n config.read('dwh.cfg')\n\n conn = psycopg2.connect(\"host={} dbname={} user={} password={} port={}\".format(*config['CLUSTER'].values()))\n cur = conn.cursor()\n \n load_staging_tables(cur, conn)\n insert_tables(cur, conn)\n\n conn.close()",
"def main():\n config = configparser.ConfigParser()\n config.read('dwh.cfg')\n\n conn = psycopg2.connect(\"host={} dbname={} user={} password={} \\\n port={}\".format(*config['CLUSTER'].values()))\n cur = conn.cursor()\n \n load_staging_tables(cur, conn)\n insert_tables(cur, conn)\n\n conn.close()",
"def add_schema_copying_to_pipeline(pipeline: Pipeline, schema_name,\n source_db_alias: str, target_db_alias: str,\n max_number_of_parallel_tasks: int = 4):\n task_id = \"copy_schema\"\n description = f\"Copies the {schema_name} schema to the {target_db_alias} db\"\n commands = []\n if pipeline.final_node:\n assert (isinstance(pipeline.final_node, Task))\n description = pipeline.final_node.description + ' + ' + description\n task_id = pipeline.final_node.id + '_and_' + task_id\n commands = pipeline.final_node.commands\n pipeline.remove(pipeline.final_node)\n\n pipeline.add_final(\n ParallelCopySchema(id=task_id, description=description, schema_name=schema_name,\n source_db_alias=source_db_alias, target_db_alias=target_db_alias,\n max_number_of_parallel_tasks=max_number_of_parallel_tasks,\n commands_before=commands[:-1], commands_after=commands[-1:]))",
"def mergeDatabases(self, toDB, fromDB):\n for aTable in fromDB.getAllTableNames():\n # first copy table structure\n firstCreation = toDB.createTableIfNotExists(aTable, fromDB.getTableInfo(aTable))\n if firstCreation:\n # just copy\n toDB.insertIntoTable(aTable, fromDB.selectFromTable(aTable))\n else: # treatment depends on table type\n if \"lookup\" in aTable: continue # if it's a lookup table, nothing to be done\n # not a lookup table: shift up event_id by the current existing max\n currentEventIdMax = toDB.selectFromTable(aTable, \"max(event_id)\")[0][0]\n def shiftEID(row):\n newRow = list(row)\n newRow[0] += currentEventIdMax\n return newRow\n toDB.insertIntoTable(aTable, list(map(shiftEID, fromDB.selectFromTable(aTable))))\n toDB.closeConnection() # commit",
"def load_staging_tables(cur, conn):\n for query in copy_table_queries:\n print('staging', query)\n cur.execute(query)\n conn.commit()",
"def migrate_external_courseware(apps, schema_editor):\n\n migrate_external_courses(apps, schema_editor)\n migrate_external_programs(apps, schema_editor)",
"def main():\n config = configparser.ConfigParser()\n config.read('dwh.cfg')\n\n conn = psycopg2.connect(\"host={} dbname={} user={} password={} port={}\".format(config['CLUSTER']['HOST'], config['CLUSTER']['DB_NAME'], config['CLUSTER']['DB_USER'], config['CLUSTER']['DB_PASSWORD'], config['CLUSTER']['DB_PORT']))\n cur = conn.cursor()\n \n load_staging_tables(cur, conn)\n insert_tables(cur, conn)\n\n conn.close()",
"def load_staging_tables(cur, conn):\n for query in copy_table_queries:\n try:\n cur.execute(query)\n conn.commit()\n except Exception as e:\n print(e)",
"def databaseConnect():\n connection = None\n try:\n params = config()\n connection = psycopg2.connect(**params)\n cursor = connection.cursor()\n\n \"\"\"for query in createTables():\n cursor.execute(query)\"\"\"\n [cursor.execute(query) for query in createTables()]\n\n cursor.close()\n connection.commit()\n\n except (Exception, psycopg2.DatabaseError) as errorDescription:\n print('Error description:\\n', errorDescription)\n finally:\n if connection is not None:\n connection.close()",
"def migratedb(rollback=False):\n\n require(\"virtualenv_path\", \"project_path\", \"sudo_user\")\n\n #\n # Some things need to be done first (i.e. if they need a different\n # database connection or some custom args)\n #\n if \"migratedb_first\" in env:\n\n for app, args in env.migratedb_first.iteritems():\n\n version = get_south_migrate_version(app, rollback)\n\n migrate_app_db(app, version, args)\n\n #\n # Do the rest afterwards\n #\n if has_version_info():\n\n apps = env.south_migrations.keys()\n\n for app in apps:\n\n print app\n\n version = get_south_migrate_version(app, rollback)\n\n migrate_app_db(app, version)\n\n #\n # If we know nothing, just migrate everything\n #\n else:\n migrate_app_db()",
"def load_staging_tables(cur, conn):\n for query in copy_table_queries:\n try:\n cur.execute(query)\n conn.commit()\n \n except psycopg2.Error as e:\n print(e)",
"def run_migrations_online():\n connectable = engine_from_config(\n config.get_section(config.config_ini_section),\n prefix=\"sqlalchemy.\",\n poolclass=pool.NullPool,\n )\n\n with connectable.connect() as connection:\n context.configure(\n connection=connection,\n target_metadata=target_metadata,\n include_object=include_object,\n include_schemas=True,\n )\n\n with context.begin_transaction():\n context.run_migrations()",
"def main(dest_dir, db_host, db_port, db_name, db_schema, db_username, db_password, ssl_mode,\n force, cores, memory_per_core, default_partition_col, partition_col,\n nr_partitions):\n partition_col_dict = {k: v for k, v in partition_col}\n nr_partitions_dict = {k: v for k, v in nr_partitions}\n\n dest_dir_path = Path(dest_dir)\n dest_dir_path.mkdir(exist_ok=True, parents=True)\n\n db_params = PostgresDBParams(user=db_username, host=db_host, password=db_password,\n port=db_port, db=db_name, schema=db_schema, ssl_mode=ssl_mode)\n\n with PostgresDBConnectionWrapper(db_params) as db_wrapper:\n tables = db_wrapper.list_tables()\n\n spark_cfg = spark_wrapper.default_spark_config(cores, memory_per_core, use_utc=True)\n with spark_wrapper.create_spark_session_from_config(spark_cfg) as spark:\n dumper = PostgresTableDumper(db_params, spark)\n for t in tables:\n logging.info('Dumping table %s', t)\n\n tbl_path = Path(dest_dir_path, t)\n\n if not tbl_path.exists() and not force:\n default_col = None\n\n if default_partition_col:\n cols = db_wrapper.list_columns(t)\n if default_partition_col in cols:\n default_col = default_partition_col\n else:\n logging.warning(\n \"Default partition column %s not found among columns [%s]\",\n default_partition_col, ','.join(cols))\n\n p_col = partition_col_dict.get(t, default_col)\n nr_part = nr_partitions_dict.get(t, None)\n\n dumper.dump_table(t, tbl_path, p_col, nr_part)\n else:\n logging.info('Path %s already exists, not dumping table %s',\n tbl_path, t)\n\n counts_match = row_counts_match(tbl_path, t, db_wrapper, spark)\n\n if counts_match:\n logging.info(\"Counts for %s match\", t)\n else:\n logging.error(\"Counts for %s don't match\", t)"
] | [
"0.66270685",
"0.65551245",
"0.63657236",
"0.6327135",
"0.62920964",
"0.6264679",
"0.6139678",
"0.59504956",
"0.5932942",
"0.5913813",
"0.58875227",
"0.58672184",
"0.5828043",
"0.57921326",
"0.57921326",
"0.5790024",
"0.57847106",
"0.57833004",
"0.577031",
"0.57422334",
"0.57337934",
"0.5729654",
"0.57262105",
"0.5706353",
"0.56936765",
"0.5688843",
"0.5671195",
"0.5661009",
"0.5658899",
"0.56495243"
] | 0.6691668 | 0 |
Getter method for hop_id, mapped from YANG variable /input/LocatorRecord/rloc/explicit_locator_path/hop/hop_id (string) | def _get_hop_id(self):
return self.__hop_id | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_hop_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"hop-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"hop_id must be of a type compatible with base=unicode, is_leaf=True, yang_name=\"hop-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True\"\"\")\n self.__hop_id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_hop(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGListType(\"hop_id\",yc_hop_pyangbind_example__input_LocatorRecord_rloc_explicit_locator_path_hop, yang_name=\"hop\", parent=self, is_container='list', user_ordered=True, path_helper=self._path_helper), is_container='list', yang_name=\"hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"hop must be of a type compatible with base=YANGListType(\"hop_id\",yc_hop_pyangbind_example__input_LocatorRecord_rloc_explicit_locator_path_hop, yang_name=\"hop\", parent=self, is_container='list', user_ordered=True, path_helper=self._path_helper), is_container='list', yang_name=\"hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _get_hop(self):\n return self.__hop",
"def ipv6_gateway_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv6_gateway_id\")",
"def router_id(obj, data):\n router_id_configuration = \"set protocols ospf parameters router-id %s\"\n try:\n # Configure router id\n obj.execute(router_id_configuration % data['config']['id'])\n return {\"Result\": \"Configured successfully\"}\n except Exception, e:\n return {\"Error\": e}",
"def get_t1_logical_router_path_by_id(self, router_id=None):\n t1_info = self.get_t1_logical_router(router_id=router_id)\n t1_path = t1_info.get(\"path\")\n return t1_path",
"def transit_gateway_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"transit_gateway_id\")",
"def transit_gateway_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"transit_gateway_id\")",
"def get_pathway(identifier, organism):\n pass",
"def transit_router_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"transit_router_id\")",
"def transit_router_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"transit_router_id\")",
"def ipv6_gateway_id(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"ipv6_gateway_id\")",
"def to_id(self, relation, project_id=None):\n if relation == ConnectorRelation.other:\n return -1\n return self._get_dict(True, project_id)[relation]",
"def find_path(self):\n \n if self.line_num != -1:\n return self.line_num\n\n max_line = self.graph.gps_length - 1\n min_line = 0\n #last_id = dg.normalize(self.graph.lines[-1])[0]\n last_id = normalize_simple(self.graph.lines[-1])[0]\n pivot = int((self.trip_id-1)/float(last_id)*self.graph.gps_length)\n #cur_id = dg.normalize(self.graph.lines[pivot])[0]\n cur_id = normalize_simple(self.graph.lines[pivot])[0]\n while cur_id != self.trip_id:\n if cur_id < self.trip_id:\n min_line = pivot\n else:\n max_line = pivot\n #TODO: could make this run in essentially constant time by hopping predetermined distance\n pivot = (min_line + max_line) / 2\n #cur_id = dg.normalize(self.graph.lines[pivot])[0]\n cur_id = normalize_simple(self.graph.lines[pivot])[0]\n\n #while dg.normalize(self.graph.lines[pivot])[0] == self.trip_id:\n while normalize_simple(self.graph.lines[pivot])[0] == self.trip_id:\n pivot -= 1\n\n pivot += 1\n self.line_num = pivot\n return pivot",
"def internet_gateway_rule_id(self) -> Optional[str]:\n return pulumi.get(self, \"internet_gateway_rule_id\")",
"def transit_gateway_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"transit_gateway_id\")",
"def unique_id(self):\n return self.heater.id + \"_switch\"",
"def railwaytunnels_id_get(id): # noqa: E501\n\n\n return query_manager.get_resource(id=id,\n rdf_type_uri=RAILWAYTUNNEL_TYPE_URI,\n rdf_type_name=RAILWAYTUNNEL_TYPE_NAME, \n kls=RailwayTunnel)",
"def country_id(self):\n return self._country_id",
"def country_id(self):\n return self._country_id",
"def get_route_id(self):\n\n return self.route_id",
"def transit_router_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"transit_router_id\")",
"def transit_router_id(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"transit_router_id\")",
"def test_routerid(self):\n self.assertTrue(\n self.ospf.parse_state(\n pattern='routerid',\n cmd_key='sh_ospf_ints') == '192.168.45.1', 'OSPF Interface: router ID not found')",
"def id2int(go_id):\n return int(go_id.replace(\"GO:\", \"\", 1))",
"def get_id_shortlink(link = None):\n choppedLink = legacy_check(link)\n id = None\n try:\n id = choppedLink[3] # or -1 instead of 3\n except:\n pass #dont care bout issues here\n return id",
"def id(self) -> Optional[str]:\n return self.elem.get('id')",
"def getid(data):\n return int(data.split('/')[-1])",
"def step_id(self):\n return self._step_id",
"def customer_gateway_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"customer_gateway_id\")"
] | [
"0.68267083",
"0.627477",
"0.48852605",
"0.46058208",
"0.452317",
"0.45115888",
"0.4469593",
"0.4469593",
"0.44516543",
"0.4372103",
"0.4372103",
"0.43642756",
"0.43634617",
"0.43230346",
"0.4271133",
"0.42687374",
"0.42391634",
"0.42179748",
"0.42127243",
"0.42127243",
"0.42023867",
"0.41817752",
"0.41817752",
"0.41667217",
"0.41570127",
"0.4154949",
"0.4146506",
"0.41420528",
"0.41417775",
"0.41403663"
] | 0.6507531 | 1 |
Setter method for hop_id, mapped from YANG variable /input/LocatorRecord/rloc/explicit_locator_path/hop/hop_id (string) | def _set_hop_id(self, v, load=False):
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="hop-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True)
except (TypeError, ValueError):
raise ValueError("""hop_id must be of a type compatible with base=unicode, is_leaf=True, yang_name="hop-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True""")
self.__hop_id = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_hop(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGListType(\"hop_id\",yc_hop_pyangbind_example__input_LocatorRecord_rloc_explicit_locator_path_hop, yang_name=\"hop\", parent=self, is_container='list', user_ordered=True, path_helper=self._path_helper), is_container='list', yang_name=\"hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"hop must be of a type compatible with base=YANGListType(\"hop_id\",yc_hop_pyangbind_example__input_LocatorRecord_rloc_explicit_locator_path_hop, yang_name=\"hop\", parent=self, is_container='list', user_ordered=True, path_helper=self._path_helper), is_container='list', yang_name=\"hop\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__hop = t\n if hasattr(self, '_set'):\n self._set()",
"def _get_hop_id(self):\n return self.__hop_id",
"def router_id(obj, data):\n router_id_configuration = \"set protocols ospf parameters router-id %s\"\n try:\n # Configure router id\n obj.execute(router_id_configuration % data['config']['id'])\n return {\"Result\": \"Configured successfully\"}\n except Exception, e:\n return {\"Error\": e}",
"def _set_interface_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface_id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__interface_id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface_id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__interface_id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface_id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__interface_id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface_id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__interface_id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface_id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__interface_id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_interface_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"interface_id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"interface-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__interface_id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def prepend_protocol_id(self, id_to_prepend):\n property_name, protocol_ids = ProtocolPath.to_components(self._full_path)\n\n if len(protocol_ids) == 0 or (len(protocol_ids) > 0 and protocol_ids[0] != id_to_prepend):\n protocol_ids.insert(0, id_to_prepend)\n\n self._from_components(property_name, *protocol_ids)",
"def set_id(self, value: str) -> None:\n if not isinstance(value, str):\n raise TypeError('id must be a string, not {0}'.format(type(value)))\n self._id = value",
"def set_element(self, rel_unit_cell, element, hop):\n self.dict[element[0]][rel_unit_cell + (element[1],)] = hop",
"def _set_locator_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"locator-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"locator_id must be of a type compatible with base=unicode, is_leaf=True, yang_name=\"locator-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True\"\"\")\n self.__locator_id = t\n if hasattr(self, '_set'):\n self._set()",
"def gateway_id(self, gateway_id):\n\n self._gateway_id = gateway_id",
"def id(self, id):\n if self._configuration.client_side_validation and id is None:\n raise ValueError(\"Invalid value for `id`, must not be `None`\") # noqa: E501\n\n self._id = id",
"def id(self, id):\n if id is None:\n raise ValueError(\"Invalid value for `id`, must not be `None`\")\n if id is not None and len(id) > 36:\n raise ValueError(\"Invalid value for `id`, length must be less than or equal to `36`\")\n if id is not None and len(id) < 36:\n raise ValueError(\"Invalid value for `id`, length must be greater than or equal to `36`\")\n\n self._id = id",
"def ipv6_gateway_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ipv6_gateway_id\")",
"def protocol_id(self, protocol_id):\n self._protocol_id = protocol_id"
] | [
"0.6690221",
"0.5993064",
"0.4821764",
"0.4768249",
"0.4768249",
"0.4768249",
"0.47625202",
"0.47625202",
"0.47625202",
"0.4690694",
"0.4690694",
"0.4690694",
"0.4690694",
"0.4690694",
"0.4690694",
"0.46848753",
"0.46848753",
"0.46848753",
"0.46848753",
"0.46848753",
"0.46848753",
"0.445044",
"0.44297078",
"0.44244203",
"0.4420751",
"0.44197664",
"0.4378517",
"0.43760735",
"0.4361578",
"0.43588594"
] | 0.7772029 | 0 |
Setter method for address, mapped from YANG variable /input/LocatorRecord/rloc/explicit_locator_path/hop/address (simpleaddress) | def _set_address(self, v, load=False):
try:
t = YANGDynClass(v,base=[unicode,unicode,unicode,unicode,unicode,], is_leaf=True, yang_name="address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""address must be of a type compatible with base=[unicode,unicode,unicode,unicode,unicode,], is_leaf=True, yang_name="address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__address = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_address(self, address):\n if address == \"\":\n self.address = Address(\"\", \"\", \"\")\n else:\n self.address = address",
"def address(self, address: str):\n if address is None:\n raise ValueError(\"Invalid value for `address`, must not be `None`\") # noqa: E501\n\n self._address = address",
"def create_address(self, address: str) -> Optional[Address]:\n raise NotImplemented",
"def set_address(self, address):\n pass",
"def address(self, address):\n if address is None:\n raise ValueError(\"Invalid value for `address`, must not be `None`\") # noqa: E501\n\n self._address = address",
"def address(self, address):\n if address is None:\n raise ValueError(\"Invalid value for `address`, must not be `None`\")\n\n self._address = address",
"def address_1(self, address_1):\n\n self._address_1 = address_1",
"def address1(self, address1):\n\n self._address1 = address1",
"def address(self, address):\n if self.local_vars_configuration.client_side_validation and address is None: # noqa: E501\n raise ValueError(\"Invalid value for `address`, must not be `None`\") # noqa: E501\n\n self._address = address",
"def street_address1(self) -> str:\n return pulumi.get(self, \"street_address1\")",
"def address_street(self, address_street):\n if self.local_vars_configuration.client_side_validation and address_street is None: # noqa: E501\n raise ValueError(\"Invalid value for `address_street`, must not be `None`\") # noqa: E501\n if (self.local_vars_configuration.client_side_validation and\n address_street is not None and len(address_street) > 128):\n raise ValueError(\"Invalid value for `address_street`, length must be less than or equal to `128`\") # noqa: E501\n\n self._address_street = address_street",
"def address(self, address: str):\n\n self._address = address",
"def toAddr(self, addressString: unicode) -> ghidra.program.model.address.Address:\n ...",
"def address(self, address: object):\n\n self._address = address",
"def parse_address(self, address: str) -> Optional[Address]:\n raise NotImplemented",
"def address(self, new_address):\n house_num, street_name, apt_num = new_address\n self._address.house_num = house_num\n self._address.street_name = street_name\n self._address.apt_num = apt_num",
"def toAddr(self, offset: long) -> ghidra.program.model.address.Address:\n ...",
"def address_line1(self, address_line1):\n\n self._address_line1 = address_line1",
"def address_line1(self, address_line1):\n\n self._address_line1 = address_line1",
"def set_address(self, address):\n self._java_ref.setAddress(address)",
"def address_line1(self, address_line1):\n if address_line1 is None:\n raise ValueError(\n \"Invalid value for `address_line1`, must not be `None`\"\n ) # noqa: E501\n\n self._address_line1 = address_line1",
"def format_single_address(address: Address | str) -> str:\n address = coerce_address(address)\n name = address.display_name\n if not name:\n return address.addr_spec\n\n if not needs_qp_encode(name):\n if specials_regex.search(name):\n # simple quoting works here, since we disallow\n # backslash escaping double quotes.\n name = f'\"{name}\"'\n return f'{name} <{address.addr_spec}>'\n\n name = qp_encode_display_name(name)\n return f'{name} <{address.addr_spec}>'",
"def address(self):\n return str(self.street) + str(self.city) + str(self.state) + str(self.zipcode)",
"def street_address(self):\n if \"streetAddress\" in self._prop_dict:\n return self._prop_dict[\"streetAddress\"]\n else:\n return None",
"def address(self, address):\n\n self._address = address",
"def address(self, address):\n\n self._address = address",
"def address(self, address):\n\n self._address = address",
"def address(self, address):\n\n self._address = address",
"def address(self, address):\n\n self._address = address",
"def address(self, address):\n\n self._address = address"
] | [
"0.62030184",
"0.6122669",
"0.60337484",
"0.5985457",
"0.59457284",
"0.59440863",
"0.58759737",
"0.58455026",
"0.58099014",
"0.5727717",
"0.5699049",
"0.56339836",
"0.5617944",
"0.55898726",
"0.55279684",
"0.5515963",
"0.5509817",
"0.5505075",
"0.5505075",
"0.55014354",
"0.5489912",
"0.5469146",
"0.5444923",
"0.54410446",
"0.54262245",
"0.54262245",
"0.54262245",
"0.54262245",
"0.54262245",
"0.54262245"
] | 0.6426262 | 0 |
Getter method for lrs_bits, mapped from YANG variable /input/LocatorRecord/rloc/explicit_locator_path/hop/lrs_bits (string) | def _get_lrs_bits(self):
return self.__lrs_bits | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_lrs_bits(self, v, load=False):\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"lrs-bits\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"lrs_bits must be of a type compatible with base=unicode, is_leaf=True, yang_name=\"lrs-bits\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__lrs_bits = t\n if hasattr(self, '_set'):\n self._set()",
"def test_bit_lscan_across_bytes(self):\n value = False\n ops = [bitwise_operations.bit_lscan(self.test_bin_ones, 7, 8, value)]\n\n expected_value = 1\n _, _, result = self.as_connection.operate(self.test_key, ops)\n assert result[self.test_bin_ones] == expected_value",
"def test_bit_lshift(self):\n ops = [bitwise_operations.bit_lshift(self.test_bin_ones, 0, 8, 3, None)]\n\n self.as_connection.operate(self.test_key, ops)\n\n _, _, bins = self.as_connection.get(self.test_key)\n expected_result = bytearray([8] * 1 + [1] * 4)\n assert bins[self.test_bin_ones] == expected_result",
"def test_bit_lshift_wrap(self):\n ops = [bitwise_operations.bit_lshift(self.test_bin_ones, 0, 40, 8, None)]\n\n self.as_connection.operate(self.test_key, ops)\n\n _, _, bins = self.as_connection.get(self.test_key)\n expected_result = bytearray([1] * 4 + [0])\n assert bins[self.test_bin_ones] == expected_result",
"def test_bit_lscan(self):\n value = True\n ops = [bitwise_operations.bit_lscan(self.count_bin, 32, 8, value)]\n\n expected_value = 6\n _, _, result = self.as_connection.operate(self.test_key, ops)\n assert result[self.count_bin] == expected_value",
"def SrlbFlags(self):\r\n\t\treturn self._get_attribute('srlbFlags')",
"def test_bit_lshift_across_bytes(self):\n ops = [bitwise_operations.bit_lshift(self.test_bin_ones, 4, 12, 3, None)]\n\n self.as_connection.operate(self.test_key, ops)\n\n _, _, bins = self.as_connection.get(self.test_key)\n expected_result = bytearray([8] * 2 + [1] * 3)\n assert bins[self.test_bin_ones] == expected_result",
"def magic_ll(self, parameter_s=''):\n self.magic_lc(parameter_s+' | grep ^l')",
"def lrs(st):\n\n length, shifts = __lrs(st.root, 0)\n result = [length, []]\n for shift in shifts:\n lrs_string = st.text[shift[0]-length:shift[0]]\n result[1].append((lrs_string, [x-length for x in shift]))\n return result",
"def bits(self):\n return list(range(self.lsb, self.msb + 1))",
"def test_bit_lscan_bad_bin_name(self):\n value = True\n ops = [bitwise_operations.bit_lscan(\"bad_name\", 0, 8, value)]\n\n expected_value = None\n _, _, result = self.as_connection.operate(self.test_key, ops)\n assert result[\"bad_name\"] == expected_value",
"def get_level(raw_data, bits):\n level = 0\n for i in range(13, -1, -1):\n level <<= 1\n b, o = (bits[i] / 8) + 1, bits[i] % 8\n level |= (ord(raw_data[b]) >> o) & 1\n return level",
"def bit_to_long(bits: str) -> Decimal:\n ints = int(bits, 2)\n result = Decimal(ints) / Decimal(_max_32bit)\n return result",
"def slb_lists(self) -> Sequence['outputs.GetGatewaysGatewaySlbListResult']:\n return pulumi.get(self, \"slb_lists\")",
"def compute_lsb(n_bits, fsr_min, fsr_max, half_bit=None):\n n_bits, half_bit = parse_bits(n_bits, half_bit)\n n_codes = compute_n_codes(n_bits, half_bit)\n\n diff = fsr_max - fsr_min\n\n if half_bit:\n lsb = diff/(n_codes + 1)\n else:\n lsb = diff/n_codes\n\n return lsb",
"def getBL(self):\r\n return self.bL;",
"def bitrange(self):\n return self._bitrange",
"def get_bitmask ( self, rwx_bits ):\n ret = 0\n if self.readable:\n ret |= rwx_bits[0]\n\n if self.writable:\n ret |= rwx_bits[1]\n\n if self.executable:\n ret |= rwx_bits[2]\n\n return ret",
"def radecs_to_lb(ras, decs):\n obj = coord.SkyCoord(ras, decs, unit = \"deg\", frame = \"icrs\")\n obj = obj.galactic\n \n ls = obj.l.degree\n bs = obj.b.degree\n \n return ls, bs",
"def lcBin(lcMat0, lcErrMat0, wavelength0, wavelength):\n lcLength = lcMat0.shape[1]\n nBin = len(wavelength) - 1 # -1 because wavelegnth represent the edges\n lcBinned = np.zeros((nBin, lcLength))\n errBinned = np.zeros((nBin, lcLength))\n binID = np.digitize(wavelength0, wavelength)\n for i in range(1, 1 + nBin):\n indexBin = np.where(binID == i)[0]\n lcBinned[i-1, :] = lcMat0[indexBin, :].mean(axis=0)\n errBinned[i-1, :] = np.sqrt(np.sum(lcErrMat0[indexBin, :]**2, axis=0)) / len(indexBin)\n return lcBinned, errBinned",
"def test_bit_lscan_bit_size_too_large(self):\n value = True\n ops = [bitwise_operations.bit_lscan(self.test_bin_ones, 0, 41, value)]\n\n with pytest.raises(e.OpNotApplicable):\n self.as_connection.operate(self.test_key, ops)",
"def gateway_slb_status(self) -> str:\n return pulumi.get(self, \"gateway_slb_status\")",
"def lps(mask):\n if not mask: return 0\n if not mask & (mask-1): return 1\n lo = int(log2(mask & ~(mask-1))) # least significant set bi\n hi = int(log2(mask)) # most significant set bit \n if s[lo] == s[hi]: return 2 + lps(mask^(1<<lo)^(1<<hi))\n return max(lps(mask^(1<<lo)), lps(mask^(1<<hi)))",
"def test_bit_lscan_value_not_found(self):\n value = False\n ops = [bitwise_operations.bit_lscan(self.five_255_bin, 0, 40, value)]\n\n expected_value = -1\n _, _, result = self.as_connection.operate(self.test_key, ops)\n assert result[self.five_255_bin] == expected_value",
"def LFlag(self):\n return self._get_attribute('lFlag')",
"def siglml(self, s):\n try:\n s = float(s) # raises TypeError for arrays of length != 1\n llike, nt, err = slmlike(s, self.on_cts, self.on_intvl,\n self.off_cts, self.off_intvl, self.offset, self.cutoff)\n if err != 0:\n raise ValueError('Underflow/overflow in likelihood calculation!')\n return llike\n except TypeError:\n if len(s.shape) != 1:\n raise ValueError('sigll handles only 1-D arrays!')\n llvals = zeros_like(s)\n for i, sval in enumerate(s):\n llvals[i], nt, err = slmlike(sval, self.on_cts, self.on_intvl,\n self.off_cts, self.off_intvl, self.offset, self.cutoff)\n if err != 0:\n raise ValueError('Underflow/overflow in likelihood calculation!')\n return llvals",
"def __init__(self, *args, **kwargs):\n super(LinlLis, self).__init__(\n ('linl', Bits(maxlen=4)),\n ('lis', Bits(maxlen=4)),\n *args, **kwargs\n )",
"def _get_lsp_config_frr_bandwidth_configured(self):\n return self.__lsp_config_frr_bandwidth_configured",
"def gateway_slb_status(self) -> Optional[str]:\n return pulumi.get(self, \"gateway_slb_status\")",
"def bsr(value, bits):\n minint = -2147483648\n if bits == 0:\n return value\n elif bits == 31:\n if value & minint:\n return 1\n else:\n return 0\n elif bits < 0 or bits > 31:\n raise ValueError('bad shift count')\n tmp = (value & 0x7FFFFFFE) // 2**bits\n if (value & minint):\n return (tmp | (0x40000000 // 2**(bits-1)))\n else:\n return tmp"
] | [
"0.76941806",
"0.51797223",
"0.5079757",
"0.50214094",
"0.49667132",
"0.49497706",
"0.47777623",
"0.47258523",
"0.46695194",
"0.46509945",
"0.45755452",
"0.4493895",
"0.44842264",
"0.44806886",
"0.44159406",
"0.4404732",
"0.4398201",
"0.4387897",
"0.43735254",
"0.4366293",
"0.43628094",
"0.43566367",
"0.4356033",
"0.43468946",
"0.43436992",
"0.43419632",
"0.4338168",
"0.4337688",
"0.43358576",
"0.43204656"
] | 0.7207082 | 1 |
Setter method for lrs_bits, mapped from YANG variable /input/LocatorRecord/rloc/explicit_locator_path/hop/lrs_bits (string) | def _set_lrs_bits(self, v, load=False):
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="lrs-bits", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""lrs_bits must be of a type compatible with base=unicode, is_leaf=True, yang_name="lrs-bits", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__lrs_bits = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_lrs_bits(self):\n return self.__lrs_bits",
"def test_bit_lscan_across_bytes(self):\n value = False\n ops = [bitwise_operations.bit_lscan(self.test_bin_ones, 7, 8, value)]\n\n expected_value = 1\n _, _, result = self.as_connection.operate(self.test_key, ops)\n assert result[self.test_bin_ones] == expected_value",
"def test_bit_lshift(self):\n ops = [bitwise_operations.bit_lshift(self.test_bin_ones, 0, 8, 3, None)]\n\n self.as_connection.operate(self.test_key, ops)\n\n _, _, bins = self.as_connection.get(self.test_key)\n expected_result = bytearray([8] * 1 + [1] * 4)\n assert bins[self.test_bin_ones] == expected_result",
"def SrlbFlags(self):\r\n\t\treturn self._get_attribute('srlbFlags')",
"def test_bit_lscan(self):\n value = True\n ops = [bitwise_operations.bit_lscan(self.count_bin, 32, 8, value)]\n\n expected_value = 6\n _, _, result = self.as_connection.operate(self.test_key, ops)\n assert result[self.count_bin] == expected_value",
"def test_bit_lshift_wrap(self):\n ops = [bitwise_operations.bit_lshift(self.test_bin_ones, 0, 40, 8, None)]\n\n self.as_connection.operate(self.test_key, ops)\n\n _, _, bins = self.as_connection.get(self.test_key)\n expected_result = bytearray([1] * 4 + [0])\n assert bins[self.test_bin_ones] == expected_result",
"def __init__(self, *args, **kwargs):\n super(LinlLis, self).__init__(\n ('linl', Bits(maxlen=4)),\n ('lis', Bits(maxlen=4)),\n *args, **kwargs\n )",
"def magic_ll(self, parameter_s=''):\n self.magic_lc(parameter_s+' | grep ^l')",
"def test_bit_lshift_across_bytes(self):\n ops = [bitwise_operations.bit_lshift(self.test_bin_ones, 4, 12, 3, None)]\n\n self.as_connection.operate(self.test_key, ops)\n\n _, _, bins = self.as_connection.get(self.test_key)\n expected_result = bytearray([8] * 2 + [1] * 3)\n assert bins[self.test_bin_ones] == expected_result",
"def set_plyrbolts(self, plyrbolts):\n self._plyrbolts = plyrbolts",
"def bits(self):\n return list(range(self.lsb, self.msb + 1))",
"def bit_to_long(bits: str) -> Decimal:\n ints = int(bits, 2)\n result = Decimal(ints) / Decimal(_max_32bit)\n return result",
"def compute_lsb(n_bits, fsr_min, fsr_max, half_bit=None):\n n_bits, half_bit = parse_bits(n_bits, half_bit)\n n_codes = compute_n_codes(n_bits, half_bit)\n\n diff = fsr_max - fsr_min\n\n if half_bit:\n lsb = diff/(n_codes + 1)\n else:\n lsb = diff/n_codes\n\n return lsb",
"def get_bitmask ( self, rwx_bits ):\n ret = 0\n if self.readable:\n ret |= rwx_bits[0]\n\n if self.writable:\n ret |= rwx_bits[1]\n\n if self.executable:\n ret |= rwx_bits[2]\n\n return ret",
"def test_bit_lscan_bad_bin_name(self):\n value = True\n ops = [bitwise_operations.bit_lscan(\"bad_name\", 0, 8, value)]\n\n expected_value = None\n _, _, result = self.as_connection.operate(self.test_key, ops)\n assert result[\"bad_name\"] == expected_value",
"def test_bit_lscan_bit_size_too_large(self):\n value = True\n ops = [bitwise_operations.bit_lscan(self.test_bin_ones, 0, 41, value)]\n\n with pytest.raises(e.OpNotApplicable):\n self.as_connection.operate(self.test_key, ops)",
"def lrs(st):\n\n length, shifts = __lrs(st.root, 0)\n result = [length, []]\n for shift in shifts:\n lrs_string = st.text[shift[0]-length:shift[0]]\n result[1].append((lrs_string, [x-length for x in shift]))\n return result",
"def _set_lsp_config_frr_bandwidth(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"lsp-config-frr-bandwidth\", rest_name=\"lsp-config-frr-bandwidth\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"lsp_config_frr_bandwidth must be of a type compatible with uint32\"\"\",\n 'defined-type': \"uint32\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name=\"lsp-config-frr-bandwidth\", rest_name=\"lsp-config-frr-bandwidth\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint32', is_config=True)\"\"\",\n })\n\n self.__lsp_config_frr_bandwidth = t\n if hasattr(self, '_set'):\n self._set()",
"def lsits(self, lsits: List[LsitsParam]):\n\n self._lsits = lsits",
"def bsr(value, bits):\n minint = -2147483648\n if bits == 0:\n return value\n elif bits == 31:\n if value & minint:\n return 1\n else:\n return 0\n elif bits < 0 or bits > 31:\n raise ValueError('bad shift count')\n tmp = (value & 0x7FFFFFFE) // 2**bits\n if (value & minint):\n return (tmp | (0x40000000 // 2**(bits-1)))\n else:\n return tmp",
"def test_bit_lscan_value_not_found(self):\n value = False\n ops = [bitwise_operations.bit_lscan(self.five_255_bin, 0, 40, value)]\n\n expected_value = -1\n _, _, result = self.as_connection.operate(self.test_key, ops)\n assert result[self.five_255_bin] == expected_value",
"def bitrange(self):\n return self._bitrange",
"def create_llrs_combinations_to_edges(self):\n\n generator_polys = self.code_gm[0, :self.rate_inverse * (int(np.log2(self.n_states)) + 1)]\n generator_polys = generator_polys.reshape(int(np.log2(self.n_states)) + 1, -1).T\n generator_polys = np.fliplr(generator_polys)\n states_binary_combinations = np.array(\n list(itertools.product(range(2), repeat=int(np.log2(self.n_states))))).repeat(2, axis=0)\n input_bits = np.tile(np.array([1, 0]), self.n_states).reshape(-1, 1)\n\n binary_combinations = np.concatenate([input_bits, states_binary_combinations], axis=1)\n bits_outputs_on_edges = np.matmul(binary_combinations, generator_polys.T) % 2\n llr_outputs_on_edges = (-1) ** bits_outputs_on_edges\n llrs_combinations_to_edges_mat = np.zeros([2 ** self.rate_inverse, 2 * self.n_states])\n\n for row_ind in range(llrs_combinations_to_edges_mat.shape[0]):\n llrs_combinations_to_edges_mat[row_ind] = np.equal(llr_outputs_on_edges,\n self.all_llrs_combinations_mat[row_ind]).all(1)\n\n self.llrs_combinations_to_edges = torch.Tensor(llrs_combinations_to_edges_mat)",
"def ls_sr_band_correction(self,\n img):\n return \\\n ee.Algorithms.If(\n ee.String(img.get('SATELLITE')).compareTo('LANDSAT_8'),\n ee.Algorithms.If(ee.String(img.get('SATELLITE')).compareTo('LANDSAT_5'),\n ee.Image(img.select(['B1', 'B2', 'B3', 'B4', 'B5', 'B7', 'pixel_qa', 'radsat_qa'],\n ['BLUE', 'GREEN', 'RED', 'NIR', 'SWIR1', 'SWIR2', 'PIXEL_QA',\n 'RADSAT_QA'])\n .int16()\n .copyProperties(img)\n .copyProperties(img,\n ['system:time_start',\n 'system:time_end',\n 'system:index',\n 'system:footprint'])),\n ee.Algorithms.If(ee.Number(int(self.auto_ls5_correction)),\n ee.Image(EEHelper.ls5_sr_corr(img)),\n ee.Image(img.select(\n ['B1', 'B2', 'B3', 'B4', 'B5', 'B7', 'pixel_qa', 'radsat_qa'],\n ['BLUE', 'GREEN', 'RED', 'NIR', 'SWIR1', 'SWIR2', 'PIXEL_QA',\n 'RADSAT_QA'])\n .int16()\n .copyProperties(img)\n .copyProperties(img,\n ['system:time_start',\n 'system:time_end',\n 'system:index',\n 'system:footprint']))\n )\n ),\n ee.Algorithms.If(ee.Number(int(self.auto_ls8_correction)),\n ee.Image(EEHelper.ls8_sr_corr(img)),\n ee.Image(img.select(['B2', 'B3', 'B4', 'B5', 'B6', 'B7', 'pixel_qa', 'radsat_qa'],\n ['BLUE', 'GREEN', 'RED', 'NIR', 'SWIR1', 'SWIR2', 'PIXEL_QA',\n 'RADSAT_QA'])\n .int16()\n .copyProperties(img)\n .copyProperties(img,\n ['system:time_start',\n 'system:time_end',\n 'system:index',\n 'system:footprint']))\n )\n )",
"def siglml(self, s):\n try:\n s = float(s) # raises TypeError for arrays of length != 1\n llike, nt, err = slmlike(s, self.on_cts, self.on_intvl,\n self.off_cts, self.off_intvl, self.offset, self.cutoff)\n if err != 0:\n raise ValueError('Underflow/overflow in likelihood calculation!')\n return llike\n except TypeError:\n if len(s.shape) != 1:\n raise ValueError('sigll handles only 1-D arrays!')\n llvals = zeros_like(s)\n for i, sval in enumerate(s):\n llvals[i], nt, err = slmlike(sval, self.on_cts, self.on_intvl,\n self.off_cts, self.off_intvl, self.offset, self.cutoff)\n if err != 0:\n raise ValueError('Underflow/overflow in likelihood calculation!')\n return llvals",
"def set_number_of_bits(self, number_of_bits):\n self.number_of_bits = number_of_bits",
"def _set_lsp_config_frr_bandwidth_configured(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"lsp-config-frr-bandwidth-configured\", rest_name=\"lsp-config-frr-bandwidth-configured\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"lsp_config_frr_bandwidth_configured must be of a type compatible with boolean\"\"\",\n 'defined-type': \"boolean\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGBool, is_leaf=True, yang_name=\"lsp-config-frr-bandwidth-configured\", rest_name=\"lsp-config-frr-bandwidth-configured\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)\"\"\",\n })\n\n self.__lsp_config_frr_bandwidth_configured = t\n if hasattr(self, '_set'):\n self._set()",
"def __init__(self, bits=8):\n\n self.map = {}\n self.bits = bits\n for r, g, b in itertools.product(range(2 ** bits), repeat=3):\n self.map[(r << 2 * bits) + (g << bits) + b] = rgb2lab(r << (8 - bits), g << (8 - bits), b << (8 - bits))",
"def has_lvar_bit(self):\n try:\n return (self.parts[1] & self.EXTENSION_BIT_MASK) > 0\n except IndexError:\n return False",
"def modulate(self, input_bits):\n\n index_list = map(lambda i: self.table[tuple((input_bits[i:i+self.num_bits_symbol]))], \\\n xrange(0, len(input_bits), self.num_bits_symbol))\n baseband_symbols = self.constellation[index_list]\n\n return baseband_symbols"
] | [
"0.70125633",
"0.5115934",
"0.49813104",
"0.49572697",
"0.49218857",
"0.49136877",
"0.48856583",
"0.48110783",
"0.47007787",
"0.46906585",
"0.4647708",
"0.45777962",
"0.45075318",
"0.4459902",
"0.44569954",
"0.4419056",
"0.4411302",
"0.44077265",
"0.44067883",
"0.44016144",
"0.43967265",
"0.43758392",
"0.43559077",
"0.43465385",
"0.43279567",
"0.43036157",
"0.43035915",
"0.42949075",
"0.42919108",
"0.42912006"
] | 0.83665675 | 0 |
Setter method for hop, mapped from YANG variable /input/LocatorRecord/rloc/explicit_locator_path/hop (list) | def _set_hop(self, v, load=False):
try:
t = YANGDynClass(v,base=YANGListType("hop_id",yc_hop_pyangbind_example__input_LocatorRecord_rloc_explicit_locator_path_hop, yang_name="hop", parent=self, is_container='list', user_ordered=True, path_helper=self._path_helper), is_container='list', yang_name="hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""hop must be of a type compatible with base=YANGListType("hop_id",yc_hop_pyangbind_example__input_LocatorRecord_rloc_explicit_locator_path_hop, yang_name="hop", parent=self, is_container='list', user_ordered=True, path_helper=self._path_helper), is_container='list', yang_name="hop", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__hop = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_explicit_locator_path(self, v, load=False):\n try:\n t = YANGDynClass(v,base=yc_explicit_locator_path_pyangbind_example__input_LocatorRecord_rloc_explicit_locator_path, is_container='container', yang_name=\"explicit-locator-path\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"explicit_locator_path must be of a type compatible with base=yc_explicit_locator_path_pyangbind_example__input_LocatorRecord_rloc_explicit_locator_path, is_container='container', yang_name=\"explicit-locator-path\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__explicit_locator_path = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_hop_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"hop-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"hop_id must be of a type compatible with base=unicode, is_leaf=True, yang_name=\"hop-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True\"\"\")\n self.__hop_id = t\n if hasattr(self, '_set'):\n self._set()",
"def set_element(self, rel_unit_cell, element, hop):\n self.dict[element[0]][rel_unit_cell + (element[1],)] = hop",
"def _get_hop(self):\n return self.__hop",
"def _set_next_hop_unchanged(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"next-hop-unchanged\", rest_name=\"next-hop-unchanged\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Next hop unchanged', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"next_hop_unchanged must be of a type compatible with empty\"\"\",\n 'defined-type': \"empty\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGBool, is_leaf=True, yang_name=\"next-hop-unchanged\", rest_name=\"next-hop-unchanged\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Next hop unchanged', u'cli-full-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)\"\"\",\n })\n\n self.__next_hop_unchanged = t\n if hasattr(self, '_set'):\n self._set()",
"def pathways(self) -> str:\n return self._pathways",
"def pathways(self, pathways: str):\n if pathways is None:\n raise ValueError(\"Invalid value for `pathways`, must not be `None`\") # noqa: E501\n\n self._pathways = pathways",
"def _get_explicit_locator_path(self):\n return self.__explicit_locator_path",
"def hopping(h,name=\"HOPPING.OUT\",reps=0):\n if h.has_eh: raise\n if h.has_spin: (ii,jj,ts) = extract.hopping_spinful(h.intra)\n else: (ii,jj,ts) = extract.hopping_spinless(h.intra)\n f = open(name,\"w\") # write file\n for (i,j,t) in zip(ii,jj,ts):\n f.write(str(h.geometry.r[i][0])+\" \")\n f.write(str(h.geometry.r[i][1])+\" \")\n f.write(str(h.geometry.r[j][0])+\" \")\n f.write(str(h.geometry.r[j][1])+\" \")\n f.write(str(t)+\"\\n\")\n f.close()",
"def route_accepted(self, prefix, next_hop, as_path):",
"def prepend_as_paths(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"prepend_as_paths\")",
"def prepend_as_paths(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"prepend_as_paths\")",
"def shortest_path(self, source, destination, parameter=None):\n paths = []\n for path in self.graph.shortest_paths(source, destination, parameter):\n paths.append({'hops': path})\n return jsonify({'paths': paths})",
"def _set_neighs_list_list(self, key):\n if self._constant_neighs:\n key = np.array(key)\n if self.staticneighs:\n self.idxs = key\n self.ks = range(1) if self.ks is None else self.ks\n else:\n self.ks = range(1) if self.ks is None else self.ks\n len_ks = len(self.ks)\n self.idxs = [key for k in range(len_ks)]\n if type(key) == np.ndarray:\n self.idxs = np.array(self.idxs)\n if len(self.iss) != len(key):\n if len(self.iss) != len(key):\n self.iss = range(len(key))\n# if len(self.idxs[0]) > 0:\n# self.iss = list(range(len(self.idxs)))\n self._setted = True",
"def shortest_path_to_hypernym(self, hypernym):\n if self == hypernym:\n return [[self]]\n assert hypernym in self.all_hypernyms(), \"given hypernym is not a hypernym of this synset\"\n shortest_path = []\n shortest = math.inf\n for path in self.hypernym_paths():\n if hypernym in path:\n index = path.index(hypernym)\n current_path = path[index:]\n path_len = len(current_path)\n if path_len <= shortest:\n shortest = path_len\n current_path.reverse()\n shortest_path.append(current_path)\n shortest_dist = min([len(p) for p in shortest_path])\n shortest_path = [p for p in shortest_path if len(p) == shortest_dist]\n return shortest_path",
"def _set_lsp_config_shortcut_ospf(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"lsp-config-shortcut-ospf\", rest_name=\"lsp-config-shortcut-ospf\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"lsp_config_shortcut_ospf must be of a type compatible with boolean\"\"\",\n 'defined-type': \"boolean\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGBool, is_leaf=True, yang_name=\"lsp-config-shortcut-ospf\", rest_name=\"lsp-config-shortcut-ospf\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)\"\"\",\n })\n\n self.__lsp_config_shortcut_ospf = t\n if hasattr(self, '_set'):\n self._set()",
"def __init__(self, hop_interval=0):\n\n self.hop_interval = hop_interval",
"def _set_neighs_list_list_list(self, key):\n self.ks = list(range(len(key))) if self.ks is None else self.ks\n if self._constant_neighs:\n self.idxs = np.array(key)\n else:\n self.idxs = key\n if len(self.idxs[0]) != len(self.iss):\n self.iss = list(range(len(self.idxs[0])))\n if self.staticneighs:\n self.idxs = self.idxs[0]\n self._setted = True",
"def path(self, path: List[Path]):\n\n self._path = path",
"def hyponym_paths(self):\n paths = []\n hyponyms = self._direct_hyponyms\n if self.is_leaf():\n paths = [[self]]\n for hyponym in hyponyms:\n for ancestor_list in hyponym.hyponym_paths():\n ancestor_list.append(self)\n paths.append(ancestor_list)\n return paths",
"def append_step(path, neighbours_list):\n index = neighbours_list.index(8)\n directions = ['L', 'U', 'R', 'D']\n return path + directions[index]",
"def set_ip_opt(self, opt, value):\r\n if isinstance(opt, str):\r\n o = globals()[self.ip_opt_prefix+opt]\r\n elif isinstance(opt, list) or isinstance(opt, tuple):\r\n o = globals()[self.ip_opt_prefix+opt[self.v6]]\r\n else:\r\n raise TypeError('opt argument is of wrong type: '+repr(opt))\r\n self.setsockopt(self.ip_proto, o, value)",
"def get_pathways_list(org='hsa'):\r\n\r\n resp = requests.get(''.join([Kegg.BASE_URL, 'list/pathway/', org]))\r\n if resp.status_code == 200:\r\n d = csv.DictReader(resp.text.split('\\n'),\r\n delimiter='\\t',\r\n fieldnames=('id', 'name'))\r\n return [row for row in d]\r\n return {}",
"def prepend_as_paths(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"prepend_as_paths\")",
"def ex_ridme1(param): \r\n param = _parsargs(param, npar=2) \r\n \r\n # Dipolar pathways\r\n lam = param.copy()\r\n pathways = [[] for _ in lam]\r\n pathways[0] = [lam[0]]\r\n pathways[1] = [lam[1], 0, 1]\r\n return pathways",
"def test_hop_params():\n sp, v = sphinx_path_from_test_vector(\n 'tests/vectors/onion-test-multi-frame.json'\n )\n\n params = sp.get_hop_params()\n\n expected = [(\n '02eec7245d6b7d2ccb30380bfbe2a3648cd7a942653f5aa340edcea1f283686619',\n '53eb63ea8a3fec3b3cd433b85cd62a4b145e1dda09391b348c4e1cd36a03ea66',\n '2ec2e5da605776054187180343287683aa6a51b4b1c04d6dd49c45d8cffb3c36'\n ), (\n '028f9438bfbf7feac2e108d677e3a82da596be706cc1cf342b75c7b7e22bf4e6e2',\n 'a6519e98832a0b179f62123b3567c106db99ee37bef036e783263602f3488fae',\n 'bf66c28bc22e598cfd574a1931a2bafbca09163df2261e6d0056b2610dab938f'\n ), (\n '03bfd8225241ea71cd0843db7709f4c222f62ff2d4516fd38b39914ab6b83e0da0',\n '3a6b412548762f0dbccce5c7ae7bb8147d1caf9b5471c34120b30bc9c04891cc',\n 'a1f2dadd184eb1627049673f18c6325814384facdee5bfd935d9cb031a1698a5'\n ), (\n '031dde6926381289671300239ea8e57ffaf9bebd05b9a5b95beaf07af05cd43595',\n '21e13c2d7cfe7e18836df50872466117a295783ab8aab0e7ecc8c725503ad02d',\n '7cfe0b699f35525029ae0fa437c69d0f20f7ed4e3916133f9cacbb13c82ff262'\n ), (\n '03a214ebd875aab6ddfd77f22c5e7311d7f77f17a169e599f157bbcdae8bf071f4',\n 'b5756b9b542727dbafc6765a49488b023a725d631af688fc031217e90770c328',\n 'c96e00dddaf57e7edcd4fb5954be5b65b09f17cb6d20651b4e90315be5779205'\n )]\n assert(len(params) == len(sp.hops))\n\n for a, b in zip(expected, params):\n assert(a[0] == bytes.hex(b.ephemeralkey.to_bytes()))\n assert(a[1] == bytes.hex(b.secret.to_bytes()))\n assert(a[2] == bytes.hex(b.blind.to_bytes()))",
"def __get_hops(self, traceroute):\n # This breaks up the line into hop num => host data\n #hop_pattern = '^(?P<hop_num>\\w+)\\s+(?P<hosts>.*)'\n hop_pattern = '^(?P<hop_num>[0-9]+)\\s+(?P<hosts>.*)'\n # This matches hosts which are ip or dns mapped \n host_pattern = '([\\d\\w.-]+\\s+\\(\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\)\\s+\\d+\\.\\d+ ms)'\n # This is essentially the same as the previous pattern but breaks into usable chunks\n hop_element_pattern = '([\\d\\w.-]+)\\s+\\((\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3})\\)\\s+(\\d+\\.\\d+ ms)'\n hp = re.compile(hop_element_pattern)\n\n alertTriggered = False\n for entry in traceroute.split('\\n'):\n entry = entry.strip()\n result = re.match(hop_pattern,entry)\n\n if result is None: # should only fail on first line\n continue\n hop = result.groupdict()\n hop_num = int(hop['hop_num'])\n \n hop_hosts = re.findall(host_pattern, hop['hosts'])\n\n self.hops[hop_num] = []\n \n for host in hop_hosts:\n m = hp.search(host)\n (hostname, ip, ping_time) = m.groups()\n \n # Check ping time to see if it exceeds threshold. Once one is found, don't need any more info from other hops\n if alertTriggered is False:\n if self._exceeds_hop_latency(ping_time):\n self.latency_exceeded = True\n alertTriggered = True\n\n if self.no_geo:\n self.hops[hop_num].append(\n { \n 'hostname' : hostname,\n 'ip_address' : ip,\n 'rtt' : ping_time\n }\n )\n else:\n location = self.__get_geocoded_data(ip)\n if location:\n self.hops[hop_num].append(\n { \n 'hostname' : hostname,\n 'ip_address' : ip,\n 'rtt' : ping_time,\n 'latitude' : location['latitude'],\n 'longitude' : location['longitude']\n }\n )\n else:\n self.hops[hop_num].append(\n { \n 'hostname' : hostname,\n 'ip_address' : ip,\n 'rtt' : ping_time\n }\n )",
"def _set_LocatorRecord(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"LocatorRecord must be of a type compatible with base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__LocatorRecord = t\n if hasattr(self, '_set'):\n self._set()",
"def get_ha_path():\n query = {\n \"type\": \"op\",\n \"cmd\": \"<show><high-availability><path-monitoring></path-monitoring></high-availability></show>\",\n }\n\n return __proxy__[\"panos.call\"](query)",
"def InitWayR(session):\n global way_r\n q = session.query(melt.StreetAssoc)\n way_r = set([it.osm_way for it in q.all()])"
] | [
"0.53979826",
"0.5190791",
"0.48206985",
"0.4552641",
"0.44636524",
"0.44104028",
"0.43401894",
"0.4337924",
"0.43294084",
"0.43070313",
"0.42820817",
"0.42820817",
"0.4262065",
"0.42046434",
"0.42014894",
"0.4173866",
"0.4170466",
"0.41347787",
"0.41344467",
"0.41000643",
"0.409633",
"0.40885445",
"0.40705317",
"0.40631062",
"0.404512",
"0.40353605",
"0.40281928",
"0.4008345",
"0.39698055",
"0.3969012"
] | 0.7980384 | 0 |
Getter method for address_type, mapped from YANG variable /input/LocatorRecord/rloc/address_type (string) | def _get_address_type(self):
return self.__address_type | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def address_type(self) -> str:\n return pulumi.get(self, \"address_type\")",
"def _set_address_type(self, v, load=False):\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"address-type\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"address_type must be of a type compatible with base=unicode, is_leaf=True, yang_name=\"address-type\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__address_type = t\n if hasattr(self, '_set'):\n self._set()",
"def address_type(self, address_type):\n\n self._address_type = address_type",
"def get_type(self):\n types = dict(ADDRESS_TYPE_CHOICES)\n return types.get(self.address_type, \"N/A\")",
"def type_address(self, address):\n\n\t\twith allure.step(\"Type payee address\"):\n\t\t\telement = Element(driver=self.driver,\n\t\t\t explicit_wait_time=self.explicit_wait_time,\n\t\t\t locator=BillPayPageLocator.ADDRESS_INPUT)\n\t\t\telement.write(address)\n\t\t\treturn None",
"def _address_type(self, address):\n parsed_type = None\n parsed = urlparse.urlparse(address)\n if parsed.scheme not in ('http', 'https', 'ipc', 'tcp'):\n raise ValueError('Invalid volttron central address.')\n\n return parsed.scheme",
"def AddrType(self) -> AddrTypes:\n return self.m_addr_type",
"def get_type_and_rel_addr(cls, addr):\n\t\ttype = abs(addr // 1000) # integer division\n\t\trelative_address = abs(addr) - (type * 1000)\n\t\treturn (type, relative_address)",
"def get_record_type(rr, offset=0):\n (generic_type, _) = RR.fromData(rr,offset)\n return {\n RR.TYPE_A : RR_A,\n RR.TYPE_AAAA : RR_AAAA,\n RR.TYPE_NS : RR_NS,\n RR.TYPE_CNAME : RR_CNAME\n }[generic_type._type]",
"def address_type(self):\n return addresser.AddressSpace.PROPOSALS",
"def get_address_file(addresses_path, address_type, name):\n return get_address_key_file(addresses_path, address_type, 'address', name)",
"def address_code(self, address_code):\n if self.local_vars_configuration.client_side_validation and address_code is None: # noqa: E501\n raise ValueError(\"Invalid value for `address_code`, must not be `None`\") # noqa: E501\n if (self.local_vars_configuration.client_side_validation and\n address_code is not None and len(address_code) > 10):\n raise ValueError(\"Invalid value for `address_code`, length must be less than or equal to `10`\") # noqa: E501\n\n self._address_code = address_code",
"def address_str(self):\n return self._plrevgeoloc.addressString",
"def type_name(attr_type: AttrType) -> str:\n return attr_type.native_name or class_name(attr_type.name)",
"def location_type(self, location_type):\n\n self._location_type = location_type",
"def type(self) -> Optional[pulumi.Input[Union[str, 'ExtendedLocationTypes']]]:\n return pulumi.get(self, \"type\")",
"def type(self) -> Optional[pulumi.Input[Union[str, 'ExtendedLocationTypes']]]:\n return pulumi.get(self, \"type\")",
"def get_str_address(address):\n return \\\n get_ob_value_primitive(address, 'AddrLine1', exception_return_value='') + ' ' + \\\n get_ob_value_primitive(address, 'AddrLine2', exception_return_value='') + ' ' + \\\n get_ob_value_primitive(address, 'AddrLine3', exception_return_value='') + ', ' + \\\n get_ob_value_primitive(address, 'City', exception_return_value='') + ' ' + \\\n get_ob_value_primitive(address, 'County', exception_return_value='') + ' ' + \\\n get_ob_value_primitive(address, 'StateProvince', exception_return_value='') + ' ' + \\\n get_ob_value_primitive(address, 'ZipPostalCode', exception_return_value='')",
"def outside_ip_address_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"outside_ip_address_type\")",
"def outside_ip_address_type(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"outside_ip_address_type\")",
"def get_addressbook_entry_name(\n self,\n book_type: AddressbookType,\n chain_address: OptionalChainAddress,\n ) -> Optional[str]:\n with self.read_ctx(book_type) as read_cursor:\n query = read_cursor.execute(\n 'SELECT name FROM address_book WHERE address=? AND blockchain IS ?',\n (chain_address.address, chain_address.blockchain.value if chain_address.blockchain is not None else None), # noqa: E501\n )\n result = query.fetchone()\n\n return None if result is None else result[0]",
"def address_nr(self, address_nr):\n if self.local_vars_configuration.client_side_validation and address_nr is None: # noqa: E501\n raise ValueError(\"Invalid value for `address_nr`, must not be `None`\") # noqa: E501\n if (self.local_vars_configuration.client_side_validation and\n address_nr is not None and len(address_nr) > 10):\n raise ValueError(\"Invalid value for `address_nr`, length must be less than or equal to `10`\") # noqa: E501\n\n self._address_nr = address_nr",
"def outside_ip_address_type(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"outside_ip_address_type\")",
"def _get_type_string(attr_type):\n if isinstance(attr_type, (list, tuple)):\n if len(attr_type) > 1:\n return (\n \", \".join([x.__name__ for x in attr_type[:-1]])\n + \" or \"\n + attr_type[-1].__name__\n )\n return attr_type[0].__name__\n return attr_type.__name__",
"def get_reb_type(component_type=None):\n reb_types = {'LCA-13574': 'REB5',\n 'LCA-13537': 'WREB',\n 'LCA-13540': 'GREB'}\n if component_type is None:\n component_type = os.environ['LCATR_UNIT_TYPE']\n return reb_types[component_type]",
"def address(self):\n return f\"{self._type}.{self._id}\"",
"def _get_type_name(self, st_type):\n if st_type <= 2045: return 'str' + str(st_type)\n return self._type_names[st_type]",
"def read_type(adr, tyname):\n data = Rsp.read_mem(adr,tyname2size(tyname))\n data = bytes.fromhex(data)\n res, = struct.unpack(tyname2fmt(tyname), data)\n return(res)",
"def get_reverse_geocode_result(userlat_long, result_type=None, location_type=None):\n\n gmaps = googlemaps.Client(key=GooglemapsService.api_key)\n json_response = gmaps.reverse_geocode(userlat_long, result_type, location_type)\n return json_response[0]['formatted_address']",
"def geocode(addr_str):\n\n\tbase_url = 'http://gis.oregonmetro.gov/rlisapi2/locate/'\n\turl_template = '{0}?token={1}&input={2}&form=json'\n\turl = url_template.format(base_url, token, addr_str)\n\tresponse = requests.get(url)\n\n\tif response.status_code != 200:\n\t\tprint 'unable to establish connection with rlis api'\n\t\tprint 'status code is: {0}'.format(response.status_code)\n\t\treturn response.status_code\n\t\n\tjson_rsp = response.json()\n\tif json_rsp['error']:\n\t\tprint 'the following address could not be geocoded:'\n\t\tprint '\\'{0}\\''.format(addr_str)\n\t\tprint 'the following error message was returned:'\n\t\tprint '\\'{0}\\''.format(json_rsp['error']), '\\n'\n\telse:\n\t\treturn json_rsp['data'][0]"
] | [
"0.733212",
"0.6835204",
"0.6450108",
"0.633493",
"0.6064406",
"0.55583787",
"0.54384947",
"0.5180128",
"0.5110294",
"0.50746757",
"0.5072969",
"0.48838633",
"0.48105177",
"0.48083943",
"0.47836375",
"0.47796622",
"0.47796622",
"0.47796443",
"0.47665113",
"0.47665113",
"0.4750135",
"0.47009155",
"0.46786383",
"0.46656466",
"0.46646804",
"0.46359542",
"0.46130237",
"0.46102512",
"0.46083352",
"0.46078256"
] | 0.69393504 | 1 |
Setter method for address_type, mapped from YANG variable /input/LocatorRecord/rloc/address_type (string) | def _set_address_type(self, v, load=False):
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="address-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""address_type must be of a type compatible with base=unicode, is_leaf=True, yang_name="address-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__address_type = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def address_type(self, address_type):\n\n self._address_type = address_type",
"def address_type(self) -> str:\n return pulumi.get(self, \"address_type\")",
"def _get_address_type(self):\n return self.__address_type",
"def type_address(self, address):\n\n\t\twith allure.step(\"Type payee address\"):\n\t\t\telement = Element(driver=self.driver,\n\t\t\t explicit_wait_time=self.explicit_wait_time,\n\t\t\t locator=BillPayPageLocator.ADDRESS_INPUT)\n\t\t\telement.write(address)\n\t\t\treturn None",
"def get_type(self):\n types = dict(ADDRESS_TYPE_CHOICES)\n return types.get(self.address_type, \"N/A\")",
"def location_type(self, location_type):\n\n self._location_type = location_type",
"def set_type(self, type, asset=None):\n self._set_property('pc:type', type, asset)",
"def _address_type(self, address):\n parsed_type = None\n parsed = urlparse.urlparse(address)\n if parsed.scheme not in ('http', 'https', 'ipc', 'tcp'):\n raise ValueError('Invalid volttron central address.')\n\n return parsed.scheme",
"def set_address(self, address):\n pass",
"def _set_address(self, v, load=False):\n try:\n t = YANGDynClass(v,base=[unicode,unicode,unicode,unicode,unicode,], is_leaf=True, yang_name=\"address\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"address must be of a type compatible with base=[unicode,unicode,unicode,unicode,unicode,], is_leaf=True, yang_name=\"address\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__address = t\n if hasattr(self, '_set'):\n self._set()",
"def address_code(self, address_code):\n if self.local_vars_configuration.client_side_validation and address_code is None: # noqa: E501\n raise ValueError(\"Invalid value for `address_code`, must not be `None`\") # noqa: E501\n if (self.local_vars_configuration.client_side_validation and\n address_code is not None and len(address_code) > 10):\n raise ValueError(\"Invalid value for `address_code`, length must be less than or equal to `10`\") # noqa: E501\n\n self._address_code = address_code",
"def address(self, address: str):\n if address is None:\n raise ValueError(\"Invalid value for `address`, must not be `None`\") # noqa: E501\n\n self._address = address",
"def address_nr(self, address_nr):\n if self.local_vars_configuration.client_side_validation and address_nr is None: # noqa: E501\n raise ValueError(\"Invalid value for `address_nr`, must not be `None`\") # noqa: E501\n if (self.local_vars_configuration.client_side_validation and\n address_nr is not None and len(address_nr) > 10):\n raise ValueError(\"Invalid value for `address_nr`, length must be less than or equal to `10`\") # noqa: E501\n\n self._address_nr = address_nr",
"def AddrType(self) -> AddrTypes:\n return self.m_addr_type",
"def set_type(self, type):\n self._type = type",
"def set_type(self, type):\n self.type = type",
"def set_type(self, type):\n self.type = type",
"def __init__(self, address, type,):\n self.address = address\n self.type = type",
"def type(self, type: str):\n\n self._type = type",
"def address(self, address):\n if address is None:\n raise ValueError(\"Invalid value for `address`, must not be `None`\") # noqa: E501\n\n self._address = address",
"def type(self, type):\n if type is not None and len(type) < 1:\n raise ValueError(\"Invalid value for `type`, length must be greater than or equal to `1`\") # noqa: E501\n\n self._type = type",
"def account_type(self, account_type):\n\n self._account_type = account_type",
"def account_type(self, account_type):\n\n self._account_type = account_type",
"def account_type(self, account_type):\n\n self._account_type = account_type",
"def account_type(self, account_type):\n\n self._account_type = account_type",
"def type(self, type):\n if type is None:\n raise ValueError(\"Invalid value for `type`, must not be `None`\") # noqa: E501\n if type is not None and len(type) < 1:\n raise ValueError(\"Invalid value for `type`, length must be greater than or equal to `1`\") # noqa: E501\n\n self._type = type",
"def type(self, type):\n if type is None:\n raise ValueError(\"Invalid value for `type`, must not be `None`\") # noqa: E501\n\n self._type = type",
"def type(self, type):\n if type is None:\n raise ValueError(\"Invalid value for `type`, must not be `None`\") # noqa: E501\n\n self._type = type",
"def type(self, type):\n if type is None:\n raise ValueError(\"Invalid value for `type`, must not be `None`\") # noqa: E501\n\n self._type = type",
"def address(self, address):\n if self.local_vars_configuration.client_side_validation and address is None: # noqa: E501\n raise ValueError(\"Invalid value for `address`, must not be `None`\") # noqa: E501\n\n self._address = address"
] | [
"0.7591355",
"0.6947403",
"0.631784",
"0.6311072",
"0.580061",
"0.55720127",
"0.5236602",
"0.520395",
"0.5185082",
"0.5172828",
"0.51605636",
"0.5091175",
"0.50769246",
"0.5018744",
"0.50039303",
"0.49844176",
"0.49844176",
"0.49465698",
"0.4937902",
"0.4934913",
"0.49161026",
"0.49069035",
"0.49069035",
"0.49069035",
"0.49069035",
"0.4890311",
"0.48453522",
"0.48453522",
"0.48453522",
"0.48256755"
] | 0.79477996 | 0 |
Getter method for explicit_locator_path, mapped from YANG variable /input/LocatorRecord/rloc/explicit_locator_path (container) | def _get_explicit_locator_path(self):
return self.__explicit_locator_path | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_explicit_locator_path(self, v, load=False):\n try:\n t = YANGDynClass(v,base=yc_explicit_locator_path_pyangbind_example__input_LocatorRecord_rloc_explicit_locator_path, is_container='container', yang_name=\"explicit-locator-path\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"explicit_locator_path must be of a type compatible with base=yc_explicit_locator_path_pyangbind_example__input_LocatorRecord_rloc_explicit_locator_path, is_container='container', yang_name=\"explicit-locator-path\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__explicit_locator_path = t\n if hasattr(self, '_set'):\n self._set()",
"def _get_localLocator(self):\n return self.__localLocator",
"def prepend_as_paths(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"prepend_as_paths\")",
"def prepend_as_paths(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"prepend_as_paths\")",
"def _set_localLocator(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"localLocator\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"localLocator must be of a type compatible with base=YANGBool, is_leaf=True, yang_name=\"localLocator\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__localLocator = t\n if hasattr(self, '_set'):\n self._set()",
"def _get_LocatorRecord(self):\n return self.__LocatorRecord",
"def get_locator(self, path, *args, **kwargs):\n locator = lex_locators\n for key in path.split(\".\"):\n locator = locator[key]\n return locator.format(*args, **kwargs)",
"def prepend_as_paths(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"prepend_as_paths\")",
"def record_locator(self):\n return self._record_locator",
"def parse(locator: LocatorType, path: Optional[str] = None) -> Locator:\n if isinstance(locator, Locator):\n return locator\n\n try:\n typename, _, value = str(locator).partition(\":\")\n except ValueError as err:\n raise ValueError(f\"Invalid locator format: {locator}\") from err\n\n if not value:\n typename, value = \"alias\", typename\n\n typename = typename.strip().lower()\n if typename == \"alias\":\n return LocatorsDatabase.load_by_name(_unquote(value), path)\n else:\n klass = TYPES.get(typename)\n if not klass:\n raise ValueError(f\"Unknown locator type: {typename}\")\n\n args = [_unquote(arg) for arg in value.split(\",\")]\n return klass(*args)",
"def _set_LocatorRecord(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"LocatorRecord must be of a type compatible with base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__LocatorRecord = t\n if hasattr(self, '_set'):\n self._set()",
"def abspath(self, path):\n return DataSource.abspath(self, self._fullpath(path))",
"def get_canonical_path(*args, **kwargs):\n return atable.get_canonical_path(*args, **kwargs)",
"def _set_rloc(self, v, load=False):\n try:\n t = YANGDynClass(v,base=yc_rloc_pyangbind_example__input_LocatorRecord_rloc, is_container='container', yang_name=\"rloc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"rloc must be of a type compatible with base=yc_rloc_pyangbind_example__input_LocatorRecord_rloc, is_container='container', yang_name=\"rloc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__rloc = t\n if hasattr(self, '_set'):\n self._set()",
"def aisappium_get_element_location(self, locator, oAppiumInfo=None):\n if oAppiumInfo is not None:\n element = self._element_find_atlas(locator, True, True, oAppiumInfo.driver)\n else:\n element = self._element_find(locator, True, True)\n element_location = element.location\n self._info(\"Element '%s' location: %s \" % (locator, element_location))\n return element_location",
"def explicit_path(cls, ndivsm, kpath_bounds):\n return cls._path(ndivsm, kpath_bounds=kpath_bounds, comment=\"Explicit K-path\")",
"def is_xpath_locator(locator_string: str) -> bool:\n if locator_string.startswith(\"/\"):\n return True\n return False",
"def relative_path(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"relative_path\")",
"def relative_path(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"relative_path\")",
"def relative_path(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"relative_path\")",
"def relative_path(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"relative_path\")",
"def relative_path(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"relative_path\")",
"def _resolve_relative_path(filepath: str):\n if not filepath:\n return None\n\n inf_path = os.path.join(os.path.dirname(__file__), filepath)\n\n return inf_path",
"def get_literal_path(path_or_autoloader):\n\n try:\n return path_or_autoloader.path\n except AttributeError:\n assert type(path_or_autoloader) is str, \"beard_path is not a str or an AutoLoader!\"\n return path_or_autoloader",
"def extern_to_local_path(self, path: PurePath) -> Path:\n return self.path_supervisor / path.relative_to(self.path_extern_supervisor)",
"def resolved(rpath):\r\n return realpath(abspath(rpath))",
"def get_realpath(cls, path_str):\n if path_str.startswith('/'):\n return path_str\n return os.path.abspath(os.path.join(cls.apollo_root, path_str))",
"def _get_locator_id(self):\n return self.__locator_id",
"def testPathToLocator(self, _mock_inside, mock_cwd):\n ws = self.workspace_dir\n mock_cwd.return_value = ws\n\n foo_path = workspace_lib.PathToLocator(os.path.join(ws, 'foo'))\n baz_path = workspace_lib.PathToLocator(os.path.join(ws, 'bar', 'foo',\n 'baz'))\n daisy_path = workspace_lib.PathToLocator(os.path.join(constants.SOURCE_ROOT,\n 'src', 'overlays',\n 'overlay-daisy'))\n some_path = workspace_lib.PathToLocator(os.path.join(constants.SOURCE_ROOT,\n 'srcs', 'bar'))\n\n self.assertEqual('//foo', foo_path)\n self.assertEqual('//bar/foo/baz', baz_path)\n self.assertEqual('board:daisy', daisy_path)\n self.assertEqual(None, some_path)\n\n def assertReversible(loc):\n path = workspace_lib.LocatorToPath(loc)\n self.assertEqual(loc, workspace_lib.PathToLocator(path))\n\n assertReversible('//foo')\n assertReversible('//foo/bar/baz')\n assertReversible('board:gizmo')",
"def local_to_extern_path(self, path: PurePath) -> PurePath:\n return self.path_extern_supervisor / path.relative_to(self.path_supervisor)"
] | [
"0.8274631",
"0.5152143",
"0.4848081",
"0.4848081",
"0.47201043",
"0.4715327",
"0.46240458",
"0.4464271",
"0.43966737",
"0.4369412",
"0.43402007",
"0.43370667",
"0.43309107",
"0.427388",
"0.4268046",
"0.426027",
"0.42431262",
"0.42356735",
"0.42356735",
"0.42356735",
"0.42356735",
"0.42356735",
"0.4235331",
"0.41937745",
"0.4181349",
"0.4176628",
"0.41748852",
"0.4164764",
"0.41508773",
"0.41365004"
] | 0.7231172 | 1 |
Setter method for explicit_locator_path, mapped from YANG variable /input/LocatorRecord/rloc/explicit_locator_path (container) | def _set_explicit_locator_path(self, v, load=False):
try:
t = YANGDynClass(v,base=yc_explicit_locator_path_pyangbind_example__input_LocatorRecord_rloc_explicit_locator_path, is_container='container', yang_name="explicit-locator-path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""explicit_locator_path must be of a type compatible with base=yc_explicit_locator_path_pyangbind_example__input_LocatorRecord_rloc_explicit_locator_path, is_container='container', yang_name="explicit-locator-path", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__explicit_locator_path = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_explicit_locator_path(self):\n return self.__explicit_locator_path",
"def _set_localLocator(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"localLocator\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"localLocator must be of a type compatible with base=YANGBool, is_leaf=True, yang_name=\"localLocator\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__localLocator = t\n if hasattr(self, '_set'):\n self._set()",
"def prepend_as_paths(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"prepend_as_paths\")",
"def prepend_as_paths(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:\n return pulumi.get(self, \"prepend_as_paths\")",
"def set_explicit(self, explicit: bool) -> None:\n self.explicit = explicit",
"def explicit_path(cls, ndivsm, kpath_bounds):\n return cls._path(ndivsm, kpath_bounds=kpath_bounds, comment=\"Explicit K-path\")",
"def _set_LocatorRecord(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"LocatorRecord must be of a type compatible with base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__LocatorRecord = t\n if hasattr(self, '_set'):\n self._set()",
"def _init_path(path: Union[str, PurePath]) -> Path:\n if not isinstance(path, Path):\n path = Path(path)\n\n path = path.expanduser()\n return path",
"def _expand_path(self, path):\n return os.path.abspath(os.path.expanduser(path))",
"def get_locator(self, path, *args, **kwargs):\n locator = lex_locators\n for key in path.split(\".\"):\n locator = locator[key]\n return locator.format(*args, **kwargs)",
"def _get_localLocator(self):\n return self.__localLocator",
"def abspath(self, path):\n return DataSource.abspath(self, self._fullpath(path))",
"def extern_to_local_path(self, path: PurePath) -> Path:\n return self.path_supervisor / path.relative_to(self.path_extern_supervisor)",
"def prepend_as_paths(self) -> pulumi.Output[Optional[Sequence[str]]]:\n return pulumi.get(self, \"prepend_as_paths\")",
"def local_to_extern_path(self, path: PurePath) -> PurePath:\n return self.path_extern_supervisor / path.relative_to(self.path_supervisor)",
"def _set_as_path(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"as-path\", rest_name=\"as-path\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"as_path must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"as-path\", rest_name=\"as-path\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__as_path = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_locator_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"locator-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"locator_id must be of a type compatible with base=unicode, is_leaf=True, yang_name=\"locator-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True\"\"\")\n self.__locator_id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_rloc(self, v, load=False):\n try:\n t = YANGDynClass(v,base=yc_rloc_pyangbind_example__input_LocatorRecord_rloc, is_container='container', yang_name=\"rloc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"rloc must be of a type compatible with base=yc_rloc_pyangbind_example__input_LocatorRecord_rloc, is_container='container', yang_name=\"rloc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__rloc = t\n if hasattr(self, '_set'):\n self._set()",
"def to_rooted_path(self, filepath: Union[Path, PathLike, str]) -> PurePath:\n fp = Path(filepath)\n if not fp.is_absolute():\n fp = Path(self._root, filepath)\n \n return PurePath(fp.absolute())",
"def parse(locator: LocatorType, path: Optional[str] = None) -> Locator:\n if isinstance(locator, Locator):\n return locator\n\n try:\n typename, _, value = str(locator).partition(\":\")\n except ValueError as err:\n raise ValueError(f\"Invalid locator format: {locator}\") from err\n\n if not value:\n typename, value = \"alias\", typename\n\n typename = typename.strip().lower()\n if typename == \"alias\":\n return LocatorsDatabase.load_by_name(_unquote(value), path)\n else:\n klass = TYPES.get(typename)\n if not klass:\n raise ValueError(f\"Unknown locator type: {typename}\")\n\n args = [_unquote(arg) for arg in value.split(\",\")]\n return klass(*args)",
"def prepend_path_env(self, path):\n self._cmd_runner.prepend_to_env_var('PATH', os.path.expanduser(path), sep=os.pathsep)",
"def _init_pure_path(path: Union[str, PurePath]) -> PurePath:\n if not isinstance(path, PurePath):\n path = PurePath(path)\n return path",
"def set_zonepath(self, path):\n self.set_attr(ZONE_ENTRY['ZROOT'], path)",
"def test_set_path_1(self):\n self.file.touch()\n # Since using tempfile, there is an added quirk.\n # the tempfile path may be a symlink, so passing it through set path\n # will resolve the symlink, changing the path, and breaking the test.\n self.file = self.file.resolve()\n output = basic.set_path(self.file, kind=\"file\", expect=True)\n with self.subTest():\n self.assertIsInstance(output, Path)\n with self.subTest():\n self.assertEqual(str(self.file), str(output))",
"def _config_absolute_paths(path):\n\n # Since I am calling the script from elsewhere these must be\n # absolute paths\n prepend = \"rpki.validator.data.path=\"\n replace = \".\"\n # Must remove trailing backslash at the end\n replace_with = RPKI_Validator_Wrapper.rpki_package_path[:-1]\n utils.replace_line(path, prepend, replace, replace_with)\n\n prepend = \"rpki.validator.preconfigured.trust.anchors.directory=\"\n replace = \"./preconfigured-tals\"\n replace_with = (f\"{RPKI_Validator_Wrapper.rpki_package_path}\"\n \"preconfigured-tals\")\n utils.replace_line(path, prepend, replace, replace_with)\n\n prepend = \"rpki.validator.rsync.local.storage.directory=\"\n replace = \"./rsync\"\n replace_with = f\"{RPKI_Validator_Wrapper.rpki_package_path}rsync\"\n utils.replace_line(path, prepend, replace, replace_with)",
"def test_expand_path_3(self):\n partial_path = \"/fake/path\"\n input_path = \".\" + partial_path\n expanded_path = basic.expand_path(input_path)\n local_path = Path(\".\").resolve()\n expected_path = str(local_path) + partial_path\n self.assertEqual(expanded_path, expected_path)",
"def resolved(rpath):\r\n return realpath(abspath(rpath))",
"def fixpaths(d):\n if isinstance(d, dict):\n if \"path\" in d:\n if \":\" not in d[\"path\"]:\n local_path = os.path.normpath(\n os.path.join(os.getcwd(), basedir, d[\"path\"])\n )\n d[\"location\"] = pathname2url(local_path)\n else:\n d[\"location\"] = d[\"path\"]\n del d[\"path\"]",
"def get_canonical_path(*args, **kwargs):\n return atable.get_canonical_path(*args, **kwargs)",
"def constrain_path_relative_to(path):\n environ_backup = os.environ\n environ = os.environ\n\n if path:\n environ = os.environ.copy()\n environ[\"PATH\"] = path\n\n os.environ = environ\n\n try:\n yield\n finally:\n os.environ = environ_backup"
] | [
"0.69107413",
"0.49755952",
"0.47283068",
"0.47283068",
"0.45995176",
"0.45432085",
"0.4512959",
"0.44011647",
"0.43912807",
"0.43638387",
"0.43551135",
"0.43550828",
"0.43502715",
"0.43456292",
"0.43268922",
"0.4310026",
"0.4305122",
"0.42458752",
"0.41864803",
"0.41821715",
"0.41566142",
"0.4106371",
"0.41000855",
"0.40927064",
"0.405667",
"0.40565273",
"0.40497914",
"0.40473616",
"0.40457216",
"0.40434673"
] | 0.88693 | 0 |
Getter method for locator_id, mapped from YANG variable /input/LocatorRecord/locator_id (string) | def _get_locator_id(self):
return self.__locator_id | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_locator_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"locator-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"locator_id must be of a type compatible with base=unicode, is_leaf=True, yang_name=\"locator-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True\"\"\")\n self.__locator_id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_LocatorRecord(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"LocatorRecord must be of a type compatible with base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__LocatorRecord = t\n if hasattr(self, '_set'):\n self._set()",
"def _get_LocatorRecord(self):\n return self.__LocatorRecord",
"def get_current_record_id(self):\n url = self.selenium.get_location()\n for part in url.split(\"/\"):\n oid_match = re.match(OID_REGEX, part)\n if oid_match is not None:\n return oid_match.group(2)\n raise AssertionError(\"Could not parse record id from url: {}\".format(url))",
"def find_id(self):\n\t\tx , y = self.id.split(':')\n\t\treturn int(x), int(y)",
"def identifier(self):\n return location_id(self.__dict__)",
"def record_locator(self):\n return self._record_locator",
"def record_locator(self, record_locator):\n if record_locator is None:\n raise ValueError(\"Invalid value for `record_locator`, must not be `None`\")\n\n self._record_locator = record_locator",
"def identifier(self):\n return self.element.xpath('./@Id')",
"def id_getter(self):\n return self._id",
"def getId(self):\n return self.identifier",
"def get_id(self):\n if self.is_root():\n return self.id_field.name\n elif self.has_id():\n return getattr(self, self.id_field)\n elif self.has_cid():\n return self.cid\n else:\n raise Exception(\"No id-like value set when get_id() called.\")",
"def LineNumber(self):\n ret = libxml2mod.xmlTextReaderLocatorLineNumber(self._o)\n return ret",
"def note_id(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"note_id\")",
"def id(self) -> Optional[str]:\n return self.elem.get('id')",
"def location_id(location_dict):\n d = location_dict\n iden = \"%s|%s|%s|%s|%s|%s\" % \\\n (d['line1'], d['line2'], d['line3'], d['city'], d['state'],\n d['postal_code'])\n if d['bbox_width'] and d['bbox_height']:\n iden += \"|%r|%r\" % (d['bbox_width'], d['bbox_height'])\n\n return iden.lower()",
"def id(self) -> str:\n return self.properties[DBUS_ATTR_ID]",
"def get_id_number(self) -> str:\n return self.id_number",
"def id(self) -> str:\n return pulumi.get(self, \"id\")",
"def id(self) -> str:\n return pulumi.get(self, \"id\")",
"def id(self) -> str:\n return pulumi.get(self, \"id\")",
"def id(self) -> str:\n return pulumi.get(self, \"id\")",
"def id(self) -> str:\n return pulumi.get(self, \"id\")",
"def id(self) -> str:\n return pulumi.get(self, \"id\")",
"def id(self) -> str:\n return pulumi.get(self, \"id\")",
"def id(self) -> str:\n return pulumi.get(self, \"id\")",
"def id(self) -> str:\n return pulumi.get(self, \"id\")",
"def id(self) -> str:\n return pulumi.get(self, \"id\")",
"def id(self) -> str:\n return pulumi.get(self, \"id\")",
"def id(self) -> str:\n return pulumi.get(self, \"id\")"
] | [
"0.7350876",
"0.60492724",
"0.55044645",
"0.5292573",
"0.5196207",
"0.5154262",
"0.5143212",
"0.510555",
"0.510013",
"0.5021925",
"0.49630928",
"0.49584314",
"0.49514553",
"0.48948523",
"0.4884165",
"0.4883817",
"0.48638391",
"0.48410615",
"0.48236477",
"0.48236477",
"0.48236477",
"0.48236477",
"0.48236477",
"0.48236477",
"0.48236477",
"0.48236477",
"0.48236477",
"0.48236477",
"0.48236477",
"0.48236477"
] | 0.678453 | 1 |
Setter method for locator_id, mapped from YANG variable /input/LocatorRecord/locator_id (string) | def _set_locator_id(self, v, load=False):
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="locator-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True)
except (TypeError, ValueError):
raise ValueError("""locator_id must be of a type compatible with base=unicode, is_leaf=True, yang_name="locator-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True""")
self.__locator_id = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_LocatorRecord(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"LocatorRecord must be of a type compatible with base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__LocatorRecord = t\n if hasattr(self, '_set'):\n self._set()",
"def _get_locator_id(self):\n return self.__locator_id",
"def record_locator(self, record_locator):\n if record_locator is None:\n raise ValueError(\"Invalid value for `record_locator`, must not be `None`\")\n\n self._record_locator = record_locator",
"def set_id(self, value: str) -> None:\n if not isinstance(value, str):\n raise TypeError('id must be a string, not {0}'.format(type(value)))\n self._id = value",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def set_id(self, refobj, identifier):\n cmds.setAttr(\"%s.identifier\" %refobj, identifier)",
"def lm_locator(self, lm_locator):\n\n self._lm_locator = lm_locator",
"def set_id(self, id_=None):\n if id_ is None:\n self.id = id(self)\n else:\n self.id = id_",
"def id(self, value: str):\n self._id = value",
"def id(self, id: \"str\"):\n self._attrs[\"id\"] = id",
"def _set_id(self, value):\n pass",
"def _set_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"id must be of a type compatible with leafref\"\"\",\n 'defined-type': \"leafref\",\n 'generated-type': \"\"\"YANGDynClass(base=ReferenceType(referenced_path='../config/id', caller=self._path() + ['id'], path_helper=self._path_helper, require_instance=True), is_leaf=True, yang_name=\"id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='leafref', is_config=True)\"\"\",\n })\n\n self.__id = t\n if hasattr(self, '_set'):\n self._set()"
] | [
"0.63118845",
"0.6116834",
"0.541008",
"0.5226008",
"0.52146155",
"0.52146155",
"0.52146155",
"0.52146155",
"0.52146155",
"0.52146155",
"0.5208086",
"0.5208086",
"0.5208086",
"0.5208086",
"0.5208086",
"0.5208086",
"0.5152715",
"0.5113063",
"0.5104444",
"0.51016283",
"0.50507605",
"0.50302744",
"0.49846444",
"0.49846444",
"0.49846444",
"0.49846444",
"0.49846444",
"0.49846444",
"0.49846444",
"0.49846444"
] | 0.8190384 | 0 |
Setter method for priority, mapped from YANG variable /input/LocatorRecord/priority (uint8) | def _set_priority(self, v, load=False):
try:
t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""priority must be of a type compatible with base=np.uint8, is_leaf=True, yang_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__priority = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def set_priority(self, priority):\n self._priority = priority",
"def set_priority(self, priority):\n self.options[\"priority\"] = priority",
"def _set_lsp_config_frr_setup_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"lsp-config-frr-setup-priority\", rest_name=\"lsp-config-frr-setup-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"lsp_config_frr_setup_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"lsp-config-frr-setup-priority\", rest_name=\"lsp-config-frr-setup-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)\"\"\",\n })\n\n self.__lsp_config_frr_setup_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def set_priority(self, priority):\n self.options['priority'] = priority",
"def priority(self, priority):\n self._priority = priority",
"def _set_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type=\"dict_key\", restriction_arg={u'STRICT': {}},), is_leaf=True, yang_name=\"priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='enumeration', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"priority must be of a type compatible with enumeration\"\"\",\n 'defined-type': \"openconfig-qos:enumeration\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type=\"dict_key\", restriction_arg={u'STRICT': {}},), is_leaf=True, yang_name=\"priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='enumeration', is_config=True)\"\"\",\n })\n\n self.__priority = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type=\"dict_key\", restriction_arg={u'STRICT': {}},), is_leaf=True, yang_name=\"priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='enumeration', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"priority must be of a type compatible with enumeration\"\"\",\n 'defined-type': \"openconfig-qos:enumeration\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type=\"dict_key\", restriction_arg={u'STRICT': {}},), is_leaf=True, yang_name=\"priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='enumeration', is_config=True)\"\"\",\n })\n\n self.__priority = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type=\"dict_key\", restriction_arg={u'STRICT': {}},), is_leaf=True, yang_name=\"priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='enumeration', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"priority must be of a type compatible with enumeration\"\"\",\n 'defined-type': \"openconfig-qos:enumeration\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type=\"dict_key\", restriction_arg={u'STRICT': {}},), is_leaf=True, yang_name=\"priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='enumeration', is_config=True)\"\"\",\n })\n\n self.__priority = t\n if hasattr(self, '_set'):\n self._set()",
"def setPriority(self, p):\n self.priority = p",
"def _set_fabric_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"fabric_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=False)\"\"\",\n })\n\n self.__fabric_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_fabric_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"fabric_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=False)\"\"\",\n })\n\n self.__fabric_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_fabric_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"fabric_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=False)\"\"\",\n })\n\n self.__fabric_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_fabric_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"fabric_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=True)\"\"\",\n })\n\n self.__fabric_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_fabric_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"fabric_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=True)\"\"\",\n })\n\n self.__fabric_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_fabric_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"fabric_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=True)\"\"\",\n })\n\n self.__fabric_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def priority(self, priority):\n\n self._priority = priority",
"def priority(self, priority):\n\n self._priority = priority",
"def priority(self, priority):\n\n self._priority = priority",
"def _set_priority(self, args):\n if 'priority' in args:\n try:\n self._priority = float(args['priority'])\n except TypeError:\n raise InvalidPriority('Invalid priority: %s' % args['priority'])\n except ValueError:\n raise InvalidPriority()\n else:\n self._priority = None",
"def setpriority(self, pid=None, priority=5):\n\t \n\t import win32api,win32process,win32con\n\t \n\t priorityclasses = [win32process.IDLE_PRIORITY_CLASS,\n\t win32process.BELOW_NORMAL_PRIORITY_CLASS,\n\t win32process.NORMAL_PRIORITY_CLASS,\n\t win32process.ABOVE_NORMAL_PRIORITY_CLASS,\n\t win32process.HIGH_PRIORITY_CLASS,\n\t win32process.REALTIME_PRIORITY_CLASS]\n\t if pid == None:\n\t pid = win32api.GetCurrentProcessId()\n\t handle = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, True, pid)\n\t win32process.SetPriorityClass(handle, priorityclasses[priority])",
"def priority(self) -> int:\n return pulumi.get(self, \"priority\")",
"def set_priority(priority=2, pid=None):\n print \"TODO: add os independent support\"\n priorityclasses = [win32process.IDLE_PRIORITY_CLASS,\n win32process.BELOW_NORMAL_PRIORITY_CLASS,\n win32process.NORMAL_PRIORITY_CLASS,\n win32process.ABOVE_NORMAL_PRIORITY_CLASS,\n win32process.HIGH_PRIORITY_CLASS,\n win32process.REALTIME_PRIORITY_CLASS]\n if pid == None:\n pid = win32api.GetCurrentProcessId()\n handle = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, True, pid)\n win32process.SetPriorityClass(handle, priorityclasses[priority])",
"def set_thread_priority(self, priority: \"int\") -> \"int\":\n return _beamforming_swig.beamformer_sptr_set_thread_priority(self, priority)",
"def _set_lsp_config_frr_holding_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"lsp-config-frr-holding-priority\", rest_name=\"lsp-config-frr-holding-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"lsp_config_frr_holding_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"lsp-config-frr-holding-priority\", rest_name=\"lsp-config-frr-holding-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)\"\"\",\n })\n\n self.__lsp_config_frr_holding_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def set_thread_priority(self, priority: \"int\") -> \"int\":\n return _beamforming_swig.doaesprit_sptr_set_thread_priority(self, priority)",
"def priority(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"priority\")",
"def priority(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"priority\")",
"def priority(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"priority\")",
"def priority(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"priority\")",
"def SetPriorityValue(self, *args, **kwargs):\n pass"
] | [
"0.6697263",
"0.661856",
"0.6600593",
"0.65722907",
"0.65712124",
"0.65397406",
"0.65397406",
"0.65397406",
"0.6527119",
"0.6515176",
"0.6515176",
"0.6515176",
"0.65130204",
"0.65130204",
"0.65130204",
"0.64481825",
"0.64481825",
"0.64481825",
"0.6142723",
"0.60599023",
"0.6032981",
"0.603021",
"0.58972526",
"0.58926666",
"0.5842866",
"0.580479",
"0.580479",
"0.580479",
"0.580479",
"0.5802196"
] | 0.74554783 | 0 |
Setter method for weight, mapped from YANG variable /input/LocatorRecord/weight (uint8) | def _set_weight(self, v, load=False):
try:
t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""weight must be of a type compatible with base=np.uint8, is_leaf=True, yang_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__weight = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['-2147483648..2147483647']}, int_size=32), is_leaf=True, yang_name=\"weight\", rest_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='int32', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with int32\"\"\",\n 'defined-type': \"int32\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['-2147483648..2147483647']}, int_size=32), is_leaf=True, yang_name=\"weight\", rest_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='int32', is_config=False)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def set_weight(self, weight):\n self.weight = weight # overwrite the existing weight with the input weight value",
"def weight(self, weight):\n\n self._weight = weight",
"def weight(self, weight):\n\n self._weight = weight",
"def weight(self, weight):\n\n self._weight = weight",
"def set_weight(self, weight, layer_name, tag):\n if tag != 'bias' and tag != 'wmat':\n raise Exception('tag must be bias or wmat')\n cxnlib.CXNNetSetWeight(self.handle,\n weight.ctypes.data_as(ctypes.POINTER(ctypes.c_float)),\n weight.size,\n ctypes.c_char_p(layer_name.encode('utf-8')),\n ctypes.c_char_p(tag.encode('utf-8')))",
"def setWeight(self, w):\n self._W = w",
"def _weight_changed(self, value):\r\n # update internal data\r\n self._weight = value",
"def change_weight(self, new_weight):\r\n self.old_weight = self.weight\r\n self.weight = new_weight",
"def get_weight(self):\r\n weight = self.weight\r\n if weight is None:\r\n weight = 1\r\n return weight",
"def change_weight(self, new_weight_arr):\n self.weights = new_weight_arr",
"def get_weight(self):\n return self.weight # return the weight value",
"def weight(self) -> pulumi.Input[int]:\n return pulumi.get(self, \"weight\")",
"def weight(self) -> int:\n return pulumi.get(self, \"weight\")",
"def weight(self) -> Optional[int]:\n return pulumi.get(self, \"weight\")",
"def set_weights(self, weights):\r\n self.weights = weights",
"def weight(self, modifier=None):\n if modifier is None:\n return self._weight\n elif \"Down\" in modifier and modifier not in self._modifiers:\n return self._weight / self._modifiers[modifier.replace(\"Down\", \"Up\")]\n return self._weight * self._modifiers[modifier]",
"def _set_multicastWeight(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name=\"multicastWeight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"multicastWeight must be of a type compatible with base=np.uint8, is_leaf=True, yang_name=\"multicastWeight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__multicastWeight = t\n if hasattr(self, '_set'):\n self._set()",
"def setupWeightComputation(self, file, weight, p, wordsize, ignoreMSBs = 0):\n file.write(\"weight: BITVECTOR(16);\\n\")\n file.write(self.getWeightString(p, wordsize, ignoreMSBs) + \"\\n\")\n file.write(\"ASSERT(weight = {0:#018b});\\n\".format(weight))\n return",
"def weight(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"weight\")",
"def weight(self):\n return self._weight",
"def weight(self):\n return self._weight",
"def weight(self):\n return self._weight",
"def weight(self):\r\n return self._weight"
] | [
"0.7173742",
"0.71261907",
"0.71261907",
"0.71261907",
"0.7124982",
"0.7124982",
"0.7124982",
"0.6930075",
"0.6854091",
"0.6854091",
"0.6854091",
"0.654635",
"0.65132165",
"0.6213096",
"0.60744756",
"0.60718274",
"0.606918",
"0.6024914",
"0.6020157",
"0.5972273",
"0.5965551",
"0.59517145",
"0.59095156",
"0.5903665",
"0.5893925",
"0.58681595",
"0.5829951",
"0.5829951",
"0.5829951",
"0.58172184"
] | 0.774903 | 0 |
Getter method for multicastPriority, mapped from YANG variable /input/LocatorRecord/multicastPriority (uint8) | def _get_multicastPriority(self):
return self.__multicastPriority | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_multicastPriority(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name=\"multicastPriority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"multicastPriority must be of a type compatible with base=np.uint8, is_leaf=True, yang_name=\"multicastPriority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__multicastPriority = t\n if hasattr(self, '_set'):\n self._set()",
"def multicast_vlan(self):\n if self.segment.multicast_vlan_policy == \"d\":\n return None\n elif self.segment.multicast_vlan_policy == \"e\":\n return self.segment.multicast_vlan\n else:\n return self.segment.profile.multicast_vlan",
"def _set_multicastWeight(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name=\"multicastWeight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"multicastWeight must be of a type compatible with base=np.uint8, is_leaf=True, yang_name=\"multicastWeight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__multicastWeight = t\n if hasattr(self, '_set'):\n self._set()",
"def multicast_address(self):\n\n return self.config.dict[\"ssdp\"][\"multicast_address\"]",
"def _get_multicastWeight(self):\n return self.__multicastWeight",
"def priority(self):\n # type: () -> GroupPriority\n if self._priority is not None:\n return self._priority\n if self.group_name == get_constant(\"MAGPIE_ANONYMOUS_GROUP\"):\n self._priority = -1 # lowest of all for *special* public group\n elif self.group_name == get_constant(\"MAGPIE_ADMIN_GROUP\"):\n self._priority = math.inf # everything will be lower than admins\n else:\n self._priority = 0 # nothing can be lower/equal to anonymous, equal for any *generic* group\n return self._priority",
"def fetch_operators_priority(self, operator):\n priority = self.operators_dict[operator]['priority']\n return priority",
"def vm_priority(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"vm_priority\")",
"def get_priority(self):\n return self._priority",
"def get_priority(self):\n return self._priority",
"def _get_priority(self):\n return self.__priority",
"def _get_priority(self):\n return self.__priority",
"def _get_priority(self):\n return self.__priority",
"def _get_priority(self):\n return self.__priority",
"def priority_speaker(_) -> int:\n return 1 << 8",
"def priority_speaker(_) -> int:\n return 1 << 8",
"def multicast_address_tuple(self):\n\n return (self.multicast_address, int(self.port))",
"def VlanPriority(self):\n if self.force_auto_sync:\n self.get('VlanPriority')\n return self._VlanPriority",
"def getpriority(self, name):\n\t\tif name not in self:\n\t\t\treturn None\n\t\treturn self.attributes[name].priority",
"def priority(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"priority\")",
"def priority(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"priority\")",
"def priority(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"priority\")",
"def priority(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"priority\")",
"def _set_priority(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name=\"priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"priority must be of a type compatible with base=np.uint8, is_leaf=True, yang_name=\"priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__priority = t\n if hasattr(self, '_set'):\n self._set()",
"def get_foreground_priority(self):\n\t\treturn call_sdk_function('PrlVmCfg_GetForegroundPriority', self.handle)",
"def priority(self):\n return self._priority",
"def priority(self):\n return self._priority",
"def priority(self):\n return self._priority",
"def getPriority(self):\n return self.priority",
"def priority(self) -> int:\n return pulumi.get(self, \"priority\")"
] | [
"0.74490094",
"0.5090839",
"0.50058645",
"0.489588",
"0.4887395",
"0.47485903",
"0.4735448",
"0.46581355",
"0.46484846",
"0.46484846",
"0.4638555",
"0.4638555",
"0.4638555",
"0.4638555",
"0.46264255",
"0.46264255",
"0.4592114",
"0.45748588",
"0.45698082",
"0.45512205",
"0.45512205",
"0.45512205",
"0.45512205",
"0.45462012",
"0.4539503",
"0.4526068",
"0.4526068",
"0.4526068",
"0.45154983",
"0.44928893"
] | 0.7061607 | 1 |
Setter method for multicastPriority, mapped from YANG variable /input/LocatorRecord/multicastPriority (uint8) | def _set_multicastPriority(self, v, load=False):
try:
t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name="multicastPriority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""multicastPriority must be of a type compatible with base=np.uint8, is_leaf=True, yang_name="multicastPriority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__multicastPriority = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_multicastPriority(self):\n return self.__multicastPriority",
"def _set_multicastWeight(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name=\"multicastWeight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"multicastWeight must be of a type compatible with base=np.uint8, is_leaf=True, yang_name=\"multicastWeight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__multicastWeight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_priority(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name=\"priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"priority must be of a type compatible with base=np.uint8, is_leaf=True, yang_name=\"priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__priority = t\n if hasattr(self, '_set'):\n self._set()",
"def set_foreground_priority(self, nVmForegroundPriority):\n\t\tcall_sdk_function('PrlVmCfg_SetForegroundPriority', self.handle, nVmForegroundPriority)",
"def multicast_vlan(self):\n if self.segment.multicast_vlan_policy == \"d\":\n return None\n elif self.segment.multicast_vlan_policy == \"e\":\n return self.segment.multicast_vlan\n else:\n return self.segment.profile.multicast_vlan",
"def _set_fabric_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"fabric_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=False)\"\"\",\n })\n\n self.__fabric_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_fabric_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"fabric_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=False)\"\"\",\n })\n\n self.__fabric_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_fabric_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"fabric_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=False)\"\"\",\n })\n\n self.__fabric_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_fabric_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"fabric_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=True)\"\"\",\n })\n\n self.__fabric_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_fabric_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"fabric_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=True)\"\"\",\n })\n\n self.__fabric_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_fabric_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"fabric_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"fabric-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint8', is_config=True)\"\"\",\n })\n\n self.__fabric_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def multicast_address(self):\n\n return self.config.dict[\"ssdp\"][\"multicast_address\"]",
"def _set_lsp_config_frr_setup_priority(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"lsp-config-frr-setup-priority\", rest_name=\"lsp-config-frr-setup-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"lsp_config_frr_setup_priority must be of a type compatible with uint8\"\"\",\n 'defined-type': \"uint8\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), is_leaf=True, yang_name=\"lsp-config-frr-setup-priority\", rest_name=\"lsp-config-frr-setup-priority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='uint8', is_config=True)\"\"\",\n })\n\n self.__lsp_config_frr_setup_priority = t\n if hasattr(self, '_set'):\n self._set()",
"def add_multicast_routing_entry(self, multicast_routing_entry):\n routing_entry_key = multicast_routing_entry.routing_entry_key\n mask = multicast_routing_entry.mask\n\n tuple_key = (routing_entry_key, mask)\n if tuple_key in self._entries_by_key_mask:\n # Only fail if they don't go to the same place\n if self._entries_by_key_mask[tuple_key] == multicast_routing_entry:\n return\n raise PacmanAlreadyExistsException(\n f\"Multicast_routing_entry {tuple_key}: \"\n f\"{self._entries_by_key_mask[tuple_key]} on \"\n f\"{self._x}, {self._y}\",\n str(multicast_routing_entry))\n\n self._entries_by_key_mask[tuple_key] = multicast_routing_entry\n\n # update default routed counter if required\n if multicast_routing_entry.defaultable:\n self._number_of_defaulted_routing_entries += 1",
"def priority(self, priority):\n self._priority = priority",
"def SetPriority(self, priority=1, interruptMenuAfter=3, timeoutAfter=2):\n self.ListenToMenu(interruptMenuAfter) # listen to 'To sent with normal priority...'\n self.SipPhone.SendDTMF(str(priority))\n self.ListenToMenu(timeoutAfter) # listen to 'Message Sent'\n mailbox = self.getMailBoxDN()\n mailbox.SetPriority(int(priority))\n time.sleep(1)\n for owner in mailbox.owners:\n owner.CheckMWI()",
"def setPriority(self, p):\n self.priority = p",
"def set_priority(self, priority):\n self._priority = priority",
"def translate_control_packet(self, multicast_packet):",
"def priority(self, priority):\n\n self._priority = priority",
"def priority(self, priority):\n\n self._priority = priority",
"def priority(self, priority):\n\n self._priority = priority",
"def set_io_priority(self, nVmIoPriority):\n\t\tcall_sdk_function('PrlVmCfg_SetIoPriority', self.handle, nVmIoPriority)",
"def set_priority(self, priority):\n self.options[\"priority\"] = priority",
"def priority(self):\n # type: () -> GroupPriority\n if self._priority is not None:\n return self._priority\n if self.group_name == get_constant(\"MAGPIE_ANONYMOUS_GROUP\"):\n self._priority = -1 # lowest of all for *special* public group\n elif self.group_name == get_constant(\"MAGPIE_ADMIN_GROUP\"):\n self._priority = math.inf # everything will be lower than admins\n else:\n self._priority = 0 # nothing can be lower/equal to anonymous, equal for any *generic* group\n return self._priority",
"def set_priority(self, priority):\n self.options['priority'] = priority",
"def _get_multicastWeight(self):\n return self.__multicastWeight",
"def set_background_priority(self, nVmBackgroundPriority):\n\t\tcall_sdk_function('PrlVmCfg_SetBackgroundPriority', self.handle, nVmBackgroundPriority)",
"def set_thread_priority(self, priority):\n return _spacegrant_swig.ax25_udp_pdu_receiver_sptr_set_thread_priority(self, priority)",
"def associate_multicast_group(self) -> Optional[str]:\n return pulumi.get(self, \"associate_multicast_group\")"
] | [
"0.66138566",
"0.58428895",
"0.53215593",
"0.49812725",
"0.49296415",
"0.4885944",
"0.4885944",
"0.4885944",
"0.48854005",
"0.48854005",
"0.48854005",
"0.48235166",
"0.47812468",
"0.47345918",
"0.4722102",
"0.4720543",
"0.47152364",
"0.4700948",
"0.46751964",
"0.46575275",
"0.46575275",
"0.46575275",
"0.46102384",
"0.45321447",
"0.45159498",
"0.45088404",
"0.44997463",
"0.44890767",
"0.4481748",
"0.4480032"
] | 0.84143496 | 0 |
Getter method for multicastWeight, mapped from YANG variable /input/LocatorRecord/multicastWeight (uint8) | def _get_multicastWeight(self):
return self.__multicastWeight | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_multicastWeight(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name=\"multicastWeight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"multicastWeight must be of a type compatible with base=np.uint8, is_leaf=True, yang_name=\"multicastWeight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__multicastWeight = t\n if hasattr(self, '_set'):\n self._set()",
"def weight(self, modifier=None):\n if modifier is None:\n return self._weight\n elif \"Down\" in modifier and modifier not in self._modifiers:\n return self._weight / self._modifiers[modifier.replace(\"Down\", \"Up\")]\n return self._weight * self._modifiers[modifier]",
"def get_weight(self,neighbor):\n \n return self.adjacent[neighbor]",
"def _set_weight(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"weight must be of a type compatible with base=np.uint8, is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _get_multicastPriority(self):\n return self.__multicastPriority",
"def get_weight(self):\n return self.weight # return the weight value",
"def get_weight(self):\r\n weight = self.weight\r\n if weight is None:\r\n weight = 1\r\n return weight",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def get_weight(self, a, b):\n return self.matrix[b][a]",
"def get_weight(self, layer_name, tag):\n\n if tag != 'bias' and tag != 'wmat':\n raise Exception('tag must be bias or wmat')\n oshape = (ctypes.c_uint * 4)()\n odim = ctypes.c_uint()\n ret = cxnlib.CXNNetGetWeight(self.handle,\n ctypes.c_char_p(layer_name.encode('utf-8')),\n ctypes.c_char_p(tag.encode('utf-8')),\n oshape, ctypes.byref(odim))\n if odim.value == 0 or ret is None:\n return None\n return ctypes2numpyT(ret, [oshape[i] for i in range(odim.value)], 'float32')",
"def get_weights(self):\n return self._weight",
"def get_weight(self):\n return self.weight",
"def get_weight(self):\n return self.weight",
"def weight(self):\n return self._weight",
"def weight(self):\n return self._weight",
"def weight(self):\n return self._weight",
"def weight(self):\r\n return self._weight",
"def weight(self):\n return self._hx711.get_weight()",
"def weight(self) -> Optional[int]:\n return pulumi.get(self, \"weight\")",
"def weight(self, edge):\n \n return self._weights[frozenset(edge)]",
"def weight_nei(self):\n if self.cur_neighbor is None:\n return None\n else:\n weight = self.cur_neighbor['weight'] # [neighbor_k]\n if isinstance(weight, torch.Tensor):\n weight = weight.to(torch.device('cpu')).numpy()\n return weight",
"def _get_weight(self):\n return self.__weight",
"def _get_weight(self):\n return self.__weight",
"def _get_weight(self):\n return self.__weight",
"def _get_weight(self):\n return self.__weight"
] | [
"0.7588154",
"0.5257662",
"0.5250413",
"0.5210578",
"0.5175591",
"0.5127283",
"0.5110381",
"0.50396127",
"0.50396127",
"0.50396127",
"0.5034055",
"0.5034055",
"0.5034055",
"0.5029845",
"0.5006056",
"0.4981525",
"0.49774846",
"0.49774846",
"0.49386257",
"0.49386257",
"0.49386257",
"0.49179223",
"0.49168733",
"0.4905223",
"0.48886755",
"0.48756397",
"0.48597655",
"0.48597655",
"0.48597655",
"0.48597655"
] | 0.70765233 | 1 |
Setter method for multicastWeight, mapped from YANG variable /input/LocatorRecord/multicastWeight (uint8) | def _set_multicastWeight(self, v, load=False):
try:
t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name="multicastWeight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""multicastWeight must be of a type compatible with base=np.uint8, is_leaf=True, yang_name="multicastWeight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__multicastWeight = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_multicastWeight(self):\n return self.__multicastWeight",
"def _set_weight(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"weight must be of a type compatible with base=np.uint8, is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=False)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with uint64\"\"\",\n 'defined-type': \"uint64\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='uint64', is_config=True)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_weight(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['-2147483648..2147483647']}, int_size=32), is_leaf=True, yang_name=\"weight\", rest_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='int32', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"weight must be of a type compatible with int32\"\"\",\n 'defined-type': \"int32\",\n 'generated-type': \"\"\"YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['-2147483648..2147483647']}, int_size=32), is_leaf=True, yang_name=\"weight\", rest_name=\"weight\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='int32', is_config=False)\"\"\",\n })\n\n self.__weight = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_multicastPriority(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name=\"multicastPriority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"multicastPriority must be of a type compatible with base=np.uint8, is_leaf=True, yang_name=\"multicastPriority\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__multicastPriority = t\n if hasattr(self, '_set'):\n self._set()",
"def weightKmers(self, weightDict):\n for k, w in weightDict.iteritems():\n assert k in self.kmers\n self.G.edge[k + \"_L\"][k + \"_R\"]['weight'] = w",
"def weight(self, weight):\n\n self._weight = weight",
"def weight(self, weight):\n\n self._weight = weight",
"def weight(self, weight):\n\n self._weight = weight",
"def set_weight(self, weight, layer_name, tag):\n if tag != 'bias' and tag != 'wmat':\n raise Exception('tag must be bias or wmat')\n cxnlib.CXNNetSetWeight(self.handle,\n weight.ctypes.data_as(ctypes.POINTER(ctypes.c_float)),\n weight.size,\n ctypes.c_char_p(layer_name.encode('utf-8')),\n ctypes.c_char_p(tag.encode('utf-8')))",
"def set_weight(self, weight):\n self.weight = weight # overwrite the existing weight with the input weight value",
"def weight(self, modifier=None):\n if modifier is None:\n return self._weight\n elif \"Down\" in modifier and modifier not in self._modifiers:\n return self._weight / self._modifiers[modifier.replace(\"Down\", \"Up\")]\n return self._weight * self._modifiers[modifier]",
"def updateNetworkSwitchRoutingMulticast(self, networkId: str, **kwargs):\n\n kwargs.update(locals())\n\n metadata = {\n 'tags': ['switch', 'configure', 'routing', 'multicast'],\n 'operation': 'updateNetworkSwitchRoutingMulticast',\n }\n resource = f'/networks/{networkId}/switch/routing/multicast'\n\n body_params = ['defaultSettings', 'overrides']\n payload = {k: v for (k, v) in kwargs.items() if k in body_params}\n\n return self._session.put(metadata, resource, payload)",
"def setGroupDataWeight(self, groupName, weight):\n if weight is not None:\n if not isinstance(weight, float):\n return\n if weight < 0.0:\n weight = 0.0\n self.setGroupSetting(groupName, self._dataWeightToken, weight)",
"def setWeight(self, w):\n self._W = w",
"def _determine_new_weight(self, weight, input, currentNeuron, bmu):\n return weight \\\n + (self.neighborhood.fn(currentNeuron, bmu) \\\n * self.learning_rate * (input - weight))",
"def _get_multicastPriority(self):\n return self.__multicastPriority",
"def set_output_weights(self, weight_matrix):\n pass",
"def set_edge_weight(self, vertex1, vertex2, weight):\n if not self.is_weighted():\n print(\"WARNING: Graph is NOT weighted!\")\n return None\n self._graph[vertex1][vertex2] = weight\n if self.is_directed():\n self._graph[vertex2][vertex1] = weight\n return True",
"def multicast_address(self):\n\n return self.config.dict[\"ssdp\"][\"multicast_address\"]",
"def update_weights(self):\n self._weights = self._weights + self.update_weights_value",
"def set_weight(self, dest, weight):\n self.points_to[dest] = weight",
"def get_weight(self):\r\n weight = self.weight\r\n if weight is None:\r\n weight = 1\r\n return weight",
"def change_weight(self, new_weight_arr):\n self.weights = new_weight_arr",
"def _weight_changed(self, value):\r\n # update internal data\r\n self._weight = value"
] | [
"0.6678886",
"0.5881262",
"0.5806964",
"0.5806964",
"0.5806964",
"0.58052194",
"0.58052194",
"0.58052194",
"0.5402897",
"0.5298001",
"0.5271714",
"0.5237159",
"0.5237159",
"0.5237159",
"0.5186103",
"0.51658547",
"0.4932866",
"0.49096778",
"0.48995125",
"0.48133203",
"0.4811269",
"0.47525436",
"0.4742168",
"0.4695939",
"0.46791103",
"0.46528116",
"0.4618636",
"0.45984",
"0.45809585",
"0.45461875"
] | 0.84586775 | 0 |
Getter method for localLocator, mapped from YANG variable /input/LocatorRecord/localLocator (boolean) | def _get_localLocator(self):
return self.__localLocator | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_localLocator(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"localLocator\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"localLocator must be of a type compatible with base=YANGBool, is_leaf=True, yang_name=\"localLocator\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__localLocator = t\n if hasattr(self, '_set'):\n self._set()",
"def local(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"local\")",
"def local(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"local\")",
"def local(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"local\")",
"def ip_is_local(ip):\n return localinterfaces.is_public_ip(ip) or localinterfaces.is_local_ip(ip)",
"def is_link_local(addr: object):\n # pyre-fixme[6]: For 1st param expected `Union[bytes, int, IPv4Address,\n # IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network, str]` but\n # got `object`.\n return ipaddress.ip_network(addr).is_link_local",
"def is_local(self): # -> Any | bool:\n ...",
"def globusLocalEndpointExistence(self):\n\t\tsuccess = False\n\t\ttry:\n\t\t\tlocal_id = self.transfer_client.endpoint_search(socket.gethostname())[0]['name']\n\t\t\tself.transfer_client.operation_ls(local_id)\n\t\t\tself.local_ep_id = local_id\n\t\t\tsuccess = True\n\t\texcept:\n\t\t\tpass\n\t\treturn success",
"def pg_is_local(self) -> bool:\n query = queries.get(\"get_pga_inet_addresses\")\n ret = pg.fetchone(self.pg_conn, query)\n if ret[\"inet_server_addr\"] == ret[\"inet_client_addr\"]:\n return True\n return False",
"def is_local(self):\n if not \"COLLABORATIVE\" in self._file.upper():\n LOGGER.debug(['AIE4606', 'match_false'], {'file': self._file})\n return True\n else:\n LOGGER.debug(['AIE4607', 'match_true'], {'file': self._file})\n return False\n return self._is_local",
"def localip(self) :\n\t\ttry :\n\t\t\treturn self._localip\n\t\texcept Exception as e:\n\t\t\traise e",
"def is_local_administrator(self):\n\t\treturn bool(call_sdk_function('PrlUsrCfg_IsLocalAdministrator', self.handle))",
"def is_local_client(self):\n return self.msg.is_local_client",
"def is_local(self) -> bool:\n if not self.source:\n return False\n\n if self.source.master_name.startswith(MODULE_NAME):\n return True\n\n if self.is_type_defs():\n return True\n\n return False",
"def get_lldp_local():\n query = {\"type\": \"op\", \"cmd\": \"<show><lldp><local>all</local></lldp></show>\"}\n\n return __proxy__[\"panos.call\"](query)",
"def isLocal(self, connectionInfo):\n return False",
"def local_object_reference(self) -> Optional[pulumi.Input['LocalObjectReferenceArgs']]:\n return pulumi.get(self, \"local_object_reference\")",
"def local_object_reference(self) -> Optional[pulumi.Input['LocalObjectReferenceArgs']]:\n return pulumi.get(self, \"local_object_reference\")",
"def local_object_reference(self) -> Optional[pulumi.Input['LocalObjectReferenceArgs']]:\n return pulumi.get(self, \"local_object_reference\")",
"def local_object_reference(self) -> Optional[pulumi.Input['LocalObjectReferenceArgs']]:\n return pulumi.get(self, \"local_object_reference\")",
"def _set_local_preference(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"local-preference\", rest_name=\"local-preference\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"local_preference must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"local-preference\", rest_name=\"local-preference\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__local_preference = t\n if hasattr(self, '_set'):\n self._set()",
"def has_local_state(self) -> bool:\n return True",
"def is_local(self):\n try:\n return os.path.isfile(self.get_absolute_path())\n except ValueError:\n logger.error(\"'%s' is not a file\", self.get_absolute_path())\n except TypeError: # no datafile available or file does not exist\n pass\n return False",
"def is_local_file(string):\n assert isinstance(string, basestring)\n return os.path.isfile(string)",
"def use_local_search(self) -> bool:\n return self.__use_local_search",
"def local(root, listdir=None, recognizer=None, **kwds):\n # build a walker if necessary\n listdir = walker() if listdir is None else listdir\n # build a recognizer\n recognizer = stat() if recognizer is None else recognizer\n\n # ensure that {root} is an absolute path so that we can protect the filesystem\n # representation in case the application manipulates the current working directory of the\n # process\n root = primitives.path(root).resolve()\n # grab the location metadata\n info = recognizer.recognize(root)\n\n # if the location doesn't exist\n if not info:\n # complain\n raise MountPointError(uri=root, error=\"mount point not found\")\n\n # if the root is a directory\n if info.isFolder:\n # access the local filesystem factory\n from .Local import Local\n\n # build one\n return Local(metadata=info, walker=listdir, recognizer=recognizer, **kwds)\n\n # perhaps it is a zipfile\n import zipfile\n\n # so check, and if so\n if zipfile.is_zipfile(str(root)):\n # access the zip filesystem factory\n from .Zip import Zip\n\n # build one and return it\n return Zip(metadata=info)\n\n # out of ideas\n raise MountPointError(uri=root, error=\"invalid mount point\")",
"def localip(self, localip) :\n\t\ttry :\n\t\t\tself._localip = localip\n\t\texcept Exception as e:\n\t\t\traise e",
"def local_root_visible(self):\n return self._local_root_visible",
"def IsLocalField(self, field_name):\n if not self._field_universe:\n return False\n return self._field_universe.IsFieldDefined(field_name, self.namespace)",
"def test_local_path():\n URL_PATH = \"http://www.google.com\"\n URL_PATH1 = \"www.google.com\"\n LOCAL_PATH = \"tests/index.html\"\n\n assert URL_PATH == is_local(URL_PATH)\n assert \"file\" in is_local(os.path.abspath(LOCAL_PATH))\n assert URL_PATH1 == is_local(URL_PATH1)"
] | [
"0.77122736",
"0.61343336",
"0.61343336",
"0.594526",
"0.58300143",
"0.5516242",
"0.5371214",
"0.535761",
"0.5356122",
"0.5291819",
"0.52905875",
"0.5190979",
"0.50963354",
"0.50106007",
"0.50024956",
"0.4956631",
"0.49452806",
"0.49452806",
"0.49452806",
"0.49452806",
"0.49283922",
"0.49262753",
"0.48902094",
"0.48796448",
"0.48694846",
"0.4867798",
"0.48294428",
"0.48230007",
"0.48169002",
"0.48039246"
] | 0.67667943 | 1 |
Setter method for localLocator, mapped from YANG variable /input/LocatorRecord/localLocator (boolean) | def _set_localLocator(self, v, load=False):
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="localLocator", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""localLocator must be of a type compatible with base=YANGBool, is_leaf=True, yang_name="localLocator", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__localLocator = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_localLocator(self):\n return self.__localLocator",
"def local(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"local\")",
"def local(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"local\")",
"def local(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"local\")",
"def localip(self, localip) :\n\t\ttry :\n\t\t\tself._localip = localip\n\t\texcept Exception as e:\n\t\t\traise e",
"def ip_is_local(ip):\n return localinterfaces.is_public_ip(ip) or localinterfaces.is_local_ip(ip)",
"def _set_local_preference(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"local-preference\", rest_name=\"local-preference\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"local_preference must be of a type compatible with string\"\"\",\n 'defined-type': \"string\",\n 'generated-type': \"\"\"YANGDynClass(base=unicode, is_leaf=True, yang_name=\"local-preference\", rest_name=\"local-preference\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='string', is_config=False)\"\"\",\n })\n\n self.__local_preference = t\n if hasattr(self, '_set'):\n self._set()",
"def is_local(self): # -> Any | bool:\n ...",
"def is_link_local(addr: object):\n # pyre-fixme[6]: For 1st param expected `Union[bytes, int, IPv4Address,\n # IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network, str]` but\n # got `object`.\n return ipaddress.ip_network(addr).is_link_local",
"def globusLocalEndpointExistence(self):\n\t\tsuccess = False\n\t\ttry:\n\t\t\tlocal_id = self.transfer_client.endpoint_search(socket.gethostname())[0]['name']\n\t\t\tself.transfer_client.operation_ls(local_id)\n\t\t\tself.local_ep_id = local_id\n\t\t\tsuccess = True\n\t\texcept:\n\t\t\tpass\n\t\treturn success",
"def local_id_lt(self, local_id_lt):\n\n self._local_id_lt = local_id_lt",
"def local_id_lt(self, local_id_lt):\n\n self._local_id_lt = local_id_lt",
"def local_id_lt(self, local_id_lt):\n\n self._local_id_lt = local_id_lt",
"def local_id_lt(self, local_id_lt):\n\n self._local_id_lt = local_id_lt",
"def local_id(self, local_id):\n if self.local_vars_configuration.client_side_validation and local_id is None: # noqa: E501\n raise ValueError(\"Invalid value for `local_id`, must not be `None`\") # noqa: E501\n\n self._local_id = local_id",
"def set_location(self):\n if self.has_non_empty_attribute(\"localitate\"):\n loc_item = None\n if utils.count_wikilinks(self.localitate) > 0:\n loc_link = utils.get_wikilinks(self.localitate)[0]\n loc_item = utils.q_from_wikipedia(\"ro\", loc_link.title)\n adm_item = self.get_statement_values(\"located_adm\")\n if loc_item and loc_item != adm_item[0]:\n self.add_statement(\"location\", loc_item)\n\n if not loc_item:\n self.add_to_report(\"localitate\", self.localitate, \"location\")",
"def local_root_visible(self, local_root_visible):\n \n self._local_root_visible = local_root_visible",
"def pg_is_local(self) -> bool:\n query = queries.get(\"get_pga_inet_addresses\")\n ret = pg.fetchone(self.pg_conn, query)\n if ret[\"inet_server_addr\"] == ret[\"inet_client_addr\"]:\n return True\n return False",
"def is_local(self):\n if not \"COLLABORATIVE\" in self._file.upper():\n LOGGER.debug(['AIE4606', 'match_false'], {'file': self._file})\n return True\n else:\n LOGGER.debug(['AIE4607', 'match_true'], {'file': self._file})\n return False\n return self._is_local",
"def _set_LocatorRecord(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"LocatorRecord must be of a type compatible with base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__LocatorRecord = t\n if hasattr(self, '_set'):\n self._set()",
"def is_local_administrator(self):\n\t\treturn bool(call_sdk_function('PrlUsrCfg_IsLocalAdministrator', self.handle))",
"def localip(self) :\n\t\ttry :\n\t\t\treturn self._localip\n\t\texcept Exception as e:\n\t\t\traise e",
"def local_id_in(self, local_id_in):\n\n self._local_id_in = local_id_in",
"def local_id_in(self, local_id_in):\n\n self._local_id_in = local_id_in",
"def local_id_in(self, local_id_in):\n\n self._local_id_in = local_id_in",
"def local_id_in(self, local_id_in):\n\n self._local_id_in = local_id_in",
"def isLocal(self, connectionInfo):\n return False",
"def _annotate_local(self) -> None:\n if self._has_annotation(self.primaryjoin, \"local\"):\n return\n\n if self._local_remote_pairs:\n local_side = util.column_set(\n [l for (l, r) in self._local_remote_pairs]\n )\n else:\n local_side = util.column_set(self.parent_persist_selectable.c)\n\n def locals_(element: _CE, **kw: Any) -> Optional[_CE]:\n if \"remote\" not in element._annotations and element in local_side:\n return element._annotate({\"local\": True})\n return None\n\n self.primaryjoin = visitors.replacement_traverse(\n self.primaryjoin, {}, locals_\n )",
"def use_local_search(self) -> bool:\n return self.__use_local_search",
"def has_local_state(self) -> bool:\n return True"
] | [
"0.6287664",
"0.5828949",
"0.5828949",
"0.55667734",
"0.5547197",
"0.553913",
"0.54638076",
"0.52337706",
"0.52014136",
"0.51928854",
"0.51270527",
"0.51270527",
"0.51270527",
"0.51270527",
"0.50773776",
"0.5034757",
"0.5028427",
"0.5021666",
"0.50191003",
"0.49850848",
"0.49689755",
"0.49459398",
"0.49322176",
"0.49322176",
"0.49322176",
"0.49322176",
"0.49024847",
"0.48555747",
"0.48524505",
"0.48419353"
] | 0.8276338 | 0 |
Getter method for rlocProbed, mapped from YANG variable /input/LocatorRecord/rlocProbed (boolean) | def _get_rlocProbed(self):
return self.__rlocProbed | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_rlocProbed(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"rlocProbed\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"rlocProbed must be of a type compatible with base=YANGBool, is_leaf=True, yang_name=\"rlocProbed\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__rlocProbed = t\n if hasattr(self, '_set'):\n self._set()",
"def IsVendorStatLearnedInformationRefreshed(self):\n\t\treturn self._get_attribute('isVendorStatLearnedInformationRefreshed')",
"def _is_rr_present() -> bool:\n\n # this is ugly but I couldn't find a better way to do it\n # feel free to refactor it\n globals_list_literal_str = gdb.execute(\"python print(list(globals().keys()))\", to_string=True)\n interpreter_globals = ast.literal_eval(globals_list_literal_str)\n\n return \"RRCmd\" in interpreter_globals and \"RRWhere\" in interpreter_globals",
"def HasRestoredProp(self):\n\n return self._hasRestored",
"def promoter(self):\n return self.mut['ProMutation'] is None",
"def fetchLigandNeighborMappingData(self):\n logger.info(\"Running fetchLigandNeighborMappingData...\")\n ok = False\n try:\n ptsW = ProteinTargetSequenceWorkflow(self.__cfgOb, self.__cachePath)\n ok = ptsW.exportRCSBLigandNeighborMapping()\n except Exception as e:\n logger.exception(\"Failing with %s\", str(e))\n return ok",
"def started_path(self):\n if self.ros_node.get_data('/diff_drive/path_achieved') is None:\n return False\n return not self.ros_node.get_data('/diff_drive/path_achieved')",
"def getBool( self, par, path ):\n\n return self.db.getBoolPar( par, path )",
"def pollable(self):\n return bool(self.ScopeCollector)",
"def get_prog_enable(self):\n #en = self._get_prop(\"enabled\")\n #return bool( en == \"true\" )\n if \"enabled\" in self._mydict:\n return bool(self._mydict[\"enabled\"] == \"true\")\n return True",
"def _get_lsp_config_frr_configured(self):\n return self.__lsp_config_frr_configured",
"def _get_rloc(self):\n return self.__rloc",
"def rdb_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"rdb_enabled\")",
"def release_candidate(self) -> bool:\n return \"rc\" in self.modifier if self.modifier else \"rc\" in self.string",
"def is_scr_res_enabled(self):\n\t\treturn bool(call_sdk_function('PrlVmCfg_IsScrResEnabled', self.handle))",
"def is_residential(self):\n\n return self._is_residential",
"def _is_drs_enabled(session, cluster):\n drs_config = session._call_method(vim_util, \"get_object_property\", cluster,\n \"configuration.drsConfig\")\n if drs_config and hasattr(drs_config, 'enabled'):\n return drs_config.enabled\n\n return False",
"def check_rpt_status(self) -> bool:\n return self.allele == self.fasta_alt",
"def is_revelant(self):\n return getattr(self, 'revelant', self.__class__._revelant_)",
"def HasRestored(self):\n\n return self.HasRestoredProp",
"def data_residency_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"data_residency_enabled\")",
"def data_residency_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"data_residency_enabled\")",
"def data_residency_enabled(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"data_residency_enabled\")",
"def HasRestoredProp(self, flag):\n\n if flag:\n self._hasRestored = flag",
"def _set_rloc(self, v, load=False):\n try:\n t = YANGDynClass(v,base=yc_rloc_pyangbind_example__input_LocatorRecord_rloc, is_container='container', yang_name=\"rloc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"rloc must be of a type compatible with base=yc_rloc_pyangbind_example__input_LocatorRecord_rloc, is_container='container', yang_name=\"rloc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__rloc = t\n if hasattr(self, '_set'):\n self._set()",
"def is_proved(self):\n return len(self.proofs) > 0",
"def isranked(self):\n rank = self.console.getCvar('sv_ranked')\n return rank and rank.getInt() == 2",
"def isPassable(self, newLocation, includeRobots=True):\n passable = True\n\n if newLocation[0] < 0 or newLocation[0] > len(self.map):\n passable = False\n elif newLocation[1] < 0 or newLocation[1] > len(self.map):\n passable = False\n elif not self.map[newLocation[1]][newLocation[0]]:\n passable = False\n elif includeRobots and self.get_visible_robot_map()[newLocation[1]][newLocation[0]] > 0:\n passable = False\n\n return passable",
"def is_current_venue_residential() -> bool:\n venue_instance = CommonLocationUtils.get_venue_of_current_lot()\n if venue_instance is None:\n return False\n # noinspection PyUnresolvedReferences\n return venue_instance.residential",
"def ms_get_rstp_enabled(self):\n self.open_route('/configure/switch_settings', \"Switch\")\n dropdown_value = page_utils.get_dropdown_value(\n self.get_page(),\n var_id='node_group_use_stp')\n return dropdown_value == 'Enable RSTP'"
] | [
"0.73260176",
"0.46317473",
"0.46158537",
"0.45694757",
"0.45441782",
"0.45398918",
"0.45247114",
"0.45137352",
"0.44872126",
"0.44861192",
"0.4473698",
"0.44509727",
"0.44429824",
"0.44327995",
"0.4392198",
"0.4387672",
"0.4379323",
"0.43745342",
"0.43587065",
"0.43559256",
"0.43513897",
"0.43513897",
"0.43373185",
"0.4310293",
"0.43086717",
"0.42944804",
"0.4287466",
"0.42861572",
"0.42729998",
"0.42729515"
] | 0.7142277 | 1 |
Setter method for rlocProbed, mapped from YANG variable /input/LocatorRecord/rlocProbed (boolean) | def _set_rlocProbed(self, v, load=False):
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="rlocProbed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""rlocProbed must be of a type compatible with base=YANGBool, is_leaf=True, yang_name="rlocProbed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__rlocProbed = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_rlocProbed(self):\n return self.__rlocProbed",
"def _set_rloc(self, v, load=False):\n try:\n t = YANGDynClass(v,base=yc_rloc_pyangbind_example__input_LocatorRecord_rloc, is_container='container', yang_name=\"rloc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"rloc must be of a type compatible with base=yc_rloc_pyangbind_example__input_LocatorRecord_rloc, is_container='container', yang_name=\"rloc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__rloc = t\n if hasattr(self, '_set'):\n self._set()",
"def isPassable(self, newLocation, includeRobots=True):\n passable = True\n\n if newLocation[0] < 0 or newLocation[0] > len(self.map):\n passable = False\n elif newLocation[1] < 0 or newLocation[1] > len(self.map):\n passable = False\n elif not self.map[newLocation[1]][newLocation[0]]:\n passable = False\n elif includeRobots and self.get_visible_robot_map()[newLocation[1]][newLocation[0]] > 0:\n passable = False\n\n return passable",
"def release_candidate(self) -> bool:\n return \"rc\" in self.modifier if self.modifier else \"rc\" in self.string",
"def rp_rerun(self):\n if self._rp_rerun is None:\n if self.rp_rerun_of:\n self._rp_rerun = True\n else:\n self._rp_rerun = (self.pconfig.option.rp_rerun or\n self.pconfig.getini('rp_rerun'))\n return self._rp_rerun",
"def HasRestoredProp(self, flag):\n\n if flag:\n self._hasRestored = flag",
"def promoter(self):\n return self.mut['ProMutation'] is None",
"def is_scr_res_enabled(self):\n\t\treturn bool(call_sdk_function('PrlVmCfg_IsScrResEnabled', self.handle))",
"def data_residency_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"data_residency_enabled\")",
"def data_residency_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"data_residency_enabled\")",
"def _set_lsp_config_frr_configured(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"lsp-config-frr-configured\", rest_name=\"lsp-config-frr-configured\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"lsp_config_frr_configured must be of a type compatible with boolean\"\"\",\n 'defined-type': \"boolean\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGBool, is_leaf=True, yang_name=\"lsp-config-frr-configured\", rest_name=\"lsp-config-frr-configured\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)\"\"\",\n })\n\n self.__lsp_config_frr_configured = t\n if hasattr(self, '_set'):\n self._set()",
"def HasRestoredProp(self):\n\n return self._hasRestored",
"def is_new_snp(self,seqid,pos,allele):\r\n self.seqid = seqid\r\n self.source = \"gff3_manager\"\r\n self.type = \"SNP\"\r\n self.start = pos\r\n self.end = pos\r\n self.score = \".\"\r\n self.strand = \"+\"\r\n self.phase = \".\"\r\n self.attributes.id = seqid+\"_\"+str(pos)\r\n self.attributes.note = \"new sequence variant found after sequencing\"\r\n self.attributes.allele = allele\r\n self.attributes.active = True\r\n self.attributes.discovered = True\r\n self.attributes.validated = False",
"def started_path(self):\n if self.ros_node.get_data('/diff_drive/path_achieved') is None:\n return False\n return not self.ros_node.get_data('/diff_drive/path_achieved')",
"def data_residency_enabled(self) -> pulumi.Output[Optional[bool]]:\n return pulumi.get(self, \"data_residency_enabled\")",
"def startRclTool(self, parent = None):\n\n self.rclParent = parent\n self.iface.mapCanvas().setMapTool(self._rcltool)\n self._rcltool.setEnabled(True)",
"def check_rpt_status(self) -> bool:\n return self.allele == self.fasta_alt",
"def isSetReactant(self):\n return _libsbml.SpeciesTypeComponentMapInProduct_isSetReactant(self)",
"def pollable(self):\n return bool(self.ScopeCollector)",
"def _set_lsp_config_frr_priority_configured(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"lsp-config-frr-priority-configured\", rest_name=\"lsp-config-frr-priority-configured\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"lsp_config_frr_priority_configured must be of a type compatible with boolean\"\"\",\n 'defined-type': \"boolean\",\n 'generated-type': \"\"\"YANGDynClass(base=YANGBool, is_leaf=True, yang_name=\"lsp-config-frr-priority-configured\", rest_name=\"lsp-config-frr-priority-configured\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='boolean', is_config=True)\"\"\",\n })\n\n self.__lsp_config_frr_priority_configured = t\n if hasattr(self, '_set'):\n self._set()",
"def get_prog_enable(self):\n #en = self._get_prop(\"enabled\")\n #return bool( en == \"true\" )\n if \"enabled\" in self._mydict:\n return bool(self._mydict[\"enabled\"] == \"true\")\n return True",
"def set_scr_res_enabled(self, bVmScrResEnabled):\n\t\tcall_sdk_function('PrlVmCfg_SetScrResEnabled', self.handle, bVmScrResEnabled)",
"def rdb_enabled(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"rdb_enabled\")",
"def IsVendorStatLearnedInformationRefreshed(self):\n\t\treturn self._get_attribute('isVendorStatLearnedInformationRefreshed')",
"def is_residential(self):\n\n return self._is_residential",
"def isSetReactantComponent(self):\n return _libsbml.SpeciesTypeComponentMapInProduct_isSetReactantComponent(self)",
"def _set_cr(self, cr):\n self.__cr = bool(cr)",
"def is_proved(self):\n return len(self.proofs) > 0",
"def isSetGeneProduct(self):\n return _libsbml.GeneProductRef_isSetGeneProduct(self)",
"def fetchLigandNeighborMappingData(self):\n logger.info(\"Running fetchLigandNeighborMappingData...\")\n ok = False\n try:\n ptsW = ProteinTargetSequenceWorkflow(self.__cfgOb, self.__cachePath)\n ok = ptsW.exportRCSBLigandNeighborMapping()\n except Exception as e:\n logger.exception(\"Failing with %s\", str(e))\n return ok"
] | [
"0.65422535",
"0.4735449",
"0.45066586",
"0.45007536",
"0.44398454",
"0.4437598",
"0.44301248",
"0.43837652",
"0.43508556",
"0.43508556",
"0.4340041",
"0.4306401",
"0.42857504",
"0.428212",
"0.42812702",
"0.42800307",
"0.42746672",
"0.42536786",
"0.42527962",
"0.42236567",
"0.4198623",
"0.4195025",
"0.41913304",
"0.41753975",
"0.41727895",
"0.415374",
"0.4129222",
"0.41240165",
"0.41177744",
"0.41063893"
] | 0.8125802 | 0 |
Getter method for routed, mapped from YANG variable /input/LocatorRecord/routed (boolean) | def _get_routed(self):
return self.__routed | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def is_route_throu(self):\n\n # VPR stores route-through LUTs as \"open\" blocks with mode set to\n # \"wire\".\n return self.is_leaf and self.name == \"open\" and self.mode == \"wire\"",
"def can_location_be_routed_to(location: CommonLocation) -> bool:\n return CommonLocationUtils.can_position_be_routed_to(location.transform.translation, location.routing_surface)",
"def use_routes(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"use_routes\")",
"def _set_routed(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"routed\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"routed must be of a type compatible with base=YANGBool, is_leaf=True, yang_name=\"routed\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__routed = t\n if hasattr(self, '_set'):\n self._set()",
"def UseRouteDistinguisherAsRouteTarget(self):\n return self._get_attribute('useRouteDistinguisherAsRouteTarget')",
"def route(self) -> Optional[RoadMap.Route]:\n return self._route",
"def routes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RouteTableRouteArgs']]]]:\n return pulumi.get(self, \"routes\")",
"def routes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RouteTableRouteArgs']]]]:\n return pulumi.get(self, \"routes\")",
"def RouteDistinguisherType(self):\n return self._get_attribute('routeDistinguisherType')",
"def test_routed(self):\n self.assertTrue(\n self.ospf.parse_state(\n pattern='routed',\n cmd_key='sh_ospf_ints') == 'Hello due', 'OSPF Interface: OSPF hellos not found')",
"def routes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VirtualHubRouteTableRouteArgs']]]]:\n return pulumi.get(self, \"routes\")",
"def routes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VirtualHubRouteTableRouteArgs']]]]:\n return pulumi.get(self, \"routes\")",
"def can_be_routed_to(game_object: GameObject) -> bool:\n position = CommonObjectLocationUtils.get_position(game_object) + CommonObjectLocationUtils.get_forward_vector(game_object)\n routing_surface = CommonObjectLocationUtils.get_routing_surface(game_object)\n return CommonLocationUtils.can_position_be_routed_to(position, routing_surface)",
"def routes(self) -> pulumi.Output[Sequence['outputs.RouteTableRoute']]:\n return pulumi.get(self, \"routes\")",
"def routed_to(self, routed_to):\n\n self._routed_to = routed_to",
"def _is_ethernet_is_routed(self):\n E = data_element_maker()\n top = E.top(\n E.Ifmgr(\n E.Interfaces(\n E.Interface(\n E.IfIndex(self.iface_index)\n )\n )\n )\n )\n\n nc_get_reply = self.device.get(('subtree', top))\n reply_data = find_in_data('ifType', nc_get_reply.data_ele)\n\n routed_reply_data = find_in_data('PortLayer', nc_get_reply.data_ele)\n\n is_ethernet = False\n is_routed = False\n try:\n if reply_data.text == '6':\n is_ethernet = True\n except AttributeError:\n pass\n\n try:\n if routed_reply_data.text == '2':\n is_routed = True\n except AttributeError:\n pass\n\n return is_ethernet, is_routed",
"def routes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VpnConnectionRouteArgs']]]]:\n return pulumi.get(self, \"routes\")",
"def get_routers(self, location=None, mask=None):\n\n if mask is None:\n mask = \"\"\"\n topLevelLocation\n \"\"\"\n object_filter = ''\n if location:\n object_filter = {\n 'routers': {\n 'topLevelLocation': {'name': {'operation': location}}\n }\n }\n\n return self.client['SoftLayer_Account'].getRouters(filter=object_filter, mask=mask)",
"def get_direction_to_right(self, direction):\r\n return direction_to_right[direction]",
"def routes(self) -> pulumi.Output[Sequence['outputs.VirtualHubRouteTableRoute']]:\n return pulumi.get(self, \"routes\")",
"def validate_route(self, route):\n\n for router in ROUTER:\n if router.value == route:\n return True\n return False",
"def get_self_origin(self, routes):\n outroutes = []\n all_non_self = True\n for route in routes:\n if route[SORG]:\n outroutes.append(route)\n all_non_self = False\n if all_non_self:\n return routes\n return outroutes",
"def is_correct_route(network, route):\n id_actual = 0\n id_next = 1\n while id_next < len(route):\n road_id_actual = route[id_actual]\n road_id_next = route[id_next]\n if get_end(network, road_id_actual) != get_start(network, road_id_next):\n return False\n id_actual += 1\n id_next += 1\n return True",
"def is_radrad(rxn_class):\n return radrad(rxn_class)",
"def current_venue_allows_role_state_routing() -> bool:\n venue_instance = CommonLocationUtils.get_venue_of_current_lot()\n if venue_instance is None:\n return False\n # noinspection PyUnresolvedReferences\n return venue_instance.allow_rolestate_routing_on_navmesh",
"def get_self_origin(self, routes):\n # TODO\n outroutes = {}\n\n for ip in routes.keys():\n if routes[ip][SORG]:\n outroutes[ip] = routes[ip]\n\n return outroutes",
"def truck_route(\n self,\n origin: List,\n destination: List,\n via: Optional[List[Tuple]] = None,\n origin_place_options: Optional[PlaceOptions] = None,\n destination_place_options: Optional[PlaceOptions] = None,\n via_place_options: Optional[PlaceOptions] = None,\n destination_waypoint_options: Optional[WayPointOptions] = None,\n via_waypoint_options: Optional[WayPointOptions] = None,\n departure_time: Optional[datetime] = None,\n routing_mode: str = \"fast\",\n alternatives: int = 0,\n units: str = \"metric\",\n lang: str = \"en-US\",\n return_results: Optional[List] = None,\n spans: Optional[List] = None,\n truck: Optional[Truck] = None,\n avoid_features: Optional[List[str]] = None,\n avoid_areas: Optional[List[AvoidBoundingBox]] = None,\n exclude: Optional[List[str]] = None,\n ) -> RoutingResponse: # noqa E501\n resp = self.routing_api.route(\n transport_mode=\"truck\",\n origin=origin,\n destination=destination,\n via=via,\n origin_place_options=origin_place_options,\n destination_place_options=destination_place_options,\n via_place_options=via_place_options,\n destination_waypoint_options=destination_waypoint_options,\n via_waypoint_options=via_waypoint_options,\n departure_time=departure_time,\n routing_mode=routing_mode,\n alternatives=alternatives,\n units=units,\n lang=lang,\n return_results=return_results,\n spans=spans,\n truck=truck,\n avoid_features=avoid_features,\n avoid_areas=avoid_areas,\n exclude=exclude,\n )\n return RoutingResponse.new(resp.json())",
"def _get_route_map(self):\n return self.__route_map",
"def RouteTargetType(self):\n return self._get_attribute('routeTargetType')",
"def route_table(self) -> Optional['outputs.RouteTableResponse']:\n return pulumi.get(self, \"route_table\")"
] | [
"0.57929003",
"0.57313263",
"0.56877166",
"0.5655214",
"0.5447337",
"0.54195905",
"0.54025376",
"0.54025376",
"0.54015297",
"0.5378665",
"0.5280592",
"0.5280592",
"0.52611727",
"0.5238895",
"0.5148455",
"0.50744885",
"0.50203764",
"0.5011416",
"0.49734855",
"0.49599394",
"0.4957235",
"0.49524927",
"0.48852548",
"0.48830563",
"0.48705694",
"0.4846853",
"0.4846208",
"0.4842678",
"0.48377588",
"0.48366523"
] | 0.5745694 | 1 |
Setter method for routed, mapped from YANG variable /input/LocatorRecord/routed (boolean) | def _set_routed(self, v, load=False):
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="routed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""routed must be of a type compatible with base=YANGBool, is_leaf=True, yang_name="routed", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__routed = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def routed_to(self, routed_to):\n\n self._routed_to = routed_to",
"def can_location_be_routed_to(location: CommonLocation) -> bool:\n return CommonLocationUtils.can_position_be_routed_to(location.transform.translation, location.routing_surface)",
"def use_routes(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"use_routes\")",
"def is_route_throu(self):\n\n # VPR stores route-through LUTs as \"open\" blocks with mode set to\n # \"wire\".\n return self.is_leaf and self.name == \"open\" and self.mode == \"wire\"",
"def _get_routed(self):\n return self.__routed",
"def test_routed(self):\n self.assertTrue(\n self.ospf.parse_state(\n pattern='routed',\n cmd_key='sh_ospf_ints') == 'Hello due', 'OSPF Interface: OSPF hellos not found')",
"def set_routing(self, rinfo):\n\n self.routing = [ self.Routing(*r) for r in rinfo ]",
"def toggleRouterConnected(self, flagToSet=None):\n if flagToSet is None:\n self.isRouterConnected = not (self.isRouterConnected)\n return\n self.isRouterConnected = flagToSet",
"def UseRouteDistinguisherAsRouteTarget(self):\n return self._get_attribute('useRouteDistinguisherAsRouteTarget')",
"def can_be_routed_to(game_object: GameObject) -> bool:\n position = CommonObjectLocationUtils.get_position(game_object) + CommonObjectLocationUtils.get_forward_vector(game_object)\n routing_surface = CommonObjectLocationUtils.get_routing_surface(game_object)\n return CommonLocationUtils.can_position_be_routed_to(position, routing_surface)",
"def run_router(self, detour_scale):\n\n # Double check source and taget are not same node, if so, we are done!\n for k,v in self.rg.map.items():\n if v.source and v.target:\n debug.error(\"Grid cell is source and target! {}\".format(k))\n return False\n \n # returns the path in tracks\n (path,cost) = self.rg.route(detour_scale)\n if path:\n debug.info(1,\"Found path: cost={0} \".format(cost))\n debug.info(1,str(path))\n\n self.paths.append(path)\n self.add_route(path)\n \n path_set = grid_utils.flatten_set(path)\n self.path_blockages.append(path_set)\n else:\n self.write_debug_gds(\"failed_route.gds\")\n # clean up so we can try a reroute\n self.rg.reinit()\n return False\n return True",
"def scooter_route(\n self,\n origin: List,\n destination: List,\n via: Optional[List[Tuple]] = None,\n origin_place_options: Optional[PlaceOptions] = None,\n destination_place_options: Optional[PlaceOptions] = None,\n via_place_options: Optional[PlaceOptions] = None,\n destination_waypoint_options: Optional[WayPointOptions] = None,\n via_waypoint_options: Optional[WayPointOptions] = None,\n scooter: Optional[Scooter] = None,\n departure_time: Optional[datetime] = None,\n routing_mode: str = \"fast\",\n alternatives: int = 0,\n units: str = \"metric\",\n lang: str = \"en-US\",\n return_results: Optional[List] = None,\n spans: Optional[List] = None,\n avoid_features: Optional[List[str]] = None,\n avoid_areas: Optional[List[AvoidBoundingBox]] = None,\n exclude: Optional[List[str]] = None,\n ) -> RoutingResponse: # noqa E501\n resp = self.routing_api.route(\n transport_mode=\"scooter\",\n origin=origin,\n destination=destination,\n via=via,\n origin_place_options=origin_place_options,\n destination_place_options=destination_place_options,\n via_place_options=via_place_options,\n destination_waypoint_options=destination_waypoint_options,\n via_waypoint_options=via_waypoint_options,\n scooter=scooter,\n departure_time=departure_time,\n routing_mode=routing_mode,\n alternatives=alternatives,\n units=units,\n lang=lang,\n return_results=return_results,\n spans=spans,\n avoid_features=avoid_features,\n avoid_areas=avoid_areas,\n exclude=exclude,\n )\n return RoutingResponse.new(resp.json())",
"def RouteDistinguisherType(self):\n return self._get_attribute('routeDistinguisherType')",
"def truck_route(\n self,\n origin: List,\n destination: List,\n via: Optional[List[Tuple]] = None,\n origin_place_options: Optional[PlaceOptions] = None,\n destination_place_options: Optional[PlaceOptions] = None,\n via_place_options: Optional[PlaceOptions] = None,\n destination_waypoint_options: Optional[WayPointOptions] = None,\n via_waypoint_options: Optional[WayPointOptions] = None,\n departure_time: Optional[datetime] = None,\n routing_mode: str = \"fast\",\n alternatives: int = 0,\n units: str = \"metric\",\n lang: str = \"en-US\",\n return_results: Optional[List] = None,\n spans: Optional[List] = None,\n truck: Optional[Truck] = None,\n avoid_features: Optional[List[str]] = None,\n avoid_areas: Optional[List[AvoidBoundingBox]] = None,\n exclude: Optional[List[str]] = None,\n ) -> RoutingResponse: # noqa E501\n resp = self.routing_api.route(\n transport_mode=\"truck\",\n origin=origin,\n destination=destination,\n via=via,\n origin_place_options=origin_place_options,\n destination_place_options=destination_place_options,\n via_place_options=via_place_options,\n destination_waypoint_options=destination_waypoint_options,\n via_waypoint_options=via_waypoint_options,\n departure_time=departure_time,\n routing_mode=routing_mode,\n alternatives=alternatives,\n units=units,\n lang=lang,\n return_results=return_results,\n spans=spans,\n truck=truck,\n avoid_features=avoid_features,\n avoid_areas=avoid_areas,\n exclude=exclude,\n )\n return RoutingResponse.new(resp.json())",
"def route(self):\n # TODO: wenn keine url, herausfinden, welche ????\n # TODO: wenn url = hostname (fqdn), dann -> google.ch\n if not (self.META.has_key('REMOTE_ADDR') and \n self.GET.has_key('provider')):\n #self.GET.has_key('url')):\n #return HttpResponseRedirect('/index.php')\n # TODO: Auf die Fehlerseite Link zu back.php\n return render_to_response('error.htm', {\n 'error': \"Falsche Parameter auf route.php\",\n })\n src_ip = self.META['REMOTE_ADDR']\n prov = self.GET['provider']\n url = \"http://www.google.ch\"\n if self.GET.has_key('url'):\n url = self.GET['url']\n # Add and save new route\n add_active_route(src_ip = src_ip, prov = prov)\n return HttpResponseRedirect(url)",
"def routes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RouteTableRouteArgs']]]]:\n return pulumi.get(self, \"routes\")",
"def routes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['RouteTableRouteArgs']]]]:\n return pulumi.get(self, \"routes\")",
"def route(self) -> Optional[RoadMap.Route]:\n return self._route",
"def _alter_route(self, ifname, action, destination, next_hop):\n version = destination.version\n ifname = self.generic_to_host(ifname)\n try:\n LOG.debug(self.sudo(\n '-%s' % version, 'route', action, str(destination), 'via',\n str(next_hop), 'dev', ifname\n ))\n return True\n except RuntimeError as e:\n # Since these are user-supplied custom routes, it's very possible\n # that adding/removing them will fail. A failure to apply one of\n # these custom rules, however, should *not* cause an overall router\n # failure.\n LOG.warn('Route could not be %sed: %s' % (action, unicode(e)))\n return False",
"def motorsDirection(self, direction):\n\n print (direction)\n if direction == 'r' or direction == 'R':\n self.motorDirection(self.motor1DirectionPin, self.motorReverse)\n self.motorDirection(self.motor2DirectionPin, self.motorReverse)\n print (\"Direction reverse\")\n else:\n self.motorDirection(self.motor1DirectionPin, self.motorForward)\n self.motorDirection(self.motor2DirectionPin, self.motorForward)\n print (\"Direction forward\")",
"def current_venue_allows_role_state_routing() -> bool:\n venue_instance = CommonLocationUtils.get_venue_of_current_lot()\n if venue_instance is None:\n return False\n # noinspection PyUnresolvedReferences\n return venue_instance.allow_rolestate_routing_on_navmesh",
"def packet_routed(self, packet, node, booked_route, best_route,\n transmission_start_time):\n for subscriber in self.subscribers:\n subscriber.packet_routed(packet, node, booked_route, best_route,\n transmission_start_time)",
"def routes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VirtualHubRouteTableRouteArgs']]]]:\n return pulumi.get(self, \"routes\")",
"def routes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VirtualHubRouteTableRouteArgs']]]]:\n return pulumi.get(self, \"routes\")",
"def toggleTradeRoutes(self):\n if self.game.myEmpire['viewTradeRoutes'] == 0:\n self.game.myEmpire['viewTradeRoutes'] = 1\n self.createTradeRouteSims()\n else:\n self.game.myEmpire['viewTradeRoutes'] = 0\n self.removeTradeRouteSims()",
"def validate_route(self, route):\n\n for router in ROUTER:\n if router.value == route:\n return True\n return False",
"def route(self):\n pass",
"def routes(self) -> pulumi.Output[Sequence['outputs.RouteTableRoute']]:\n return pulumi.get(self, \"routes\")",
"def is_radrad(rxn_class):\n return radrad(rxn_class)",
"def same_attributes(self, route, packet) -> bool:\n msg = packet\n return route[LPRF] == msg[LPRF] and route[SORG] == msg[SORG] and route[APTH] == msg[APTH] and route[ORIG] == msg[ORIG] and route[PEER] == packet[SRCE]"
] | [
"0.6153026",
"0.5552645",
"0.5525019",
"0.53847706",
"0.5309402",
"0.52833045",
"0.5196852",
"0.5075154",
"0.50694686",
"0.50254524",
"0.49547634",
"0.4937431",
"0.49291715",
"0.49152592",
"0.49117863",
"0.48675567",
"0.48675567",
"0.48473886",
"0.48398617",
"0.48210603",
"0.48154715",
"0.4811391",
"0.48038056",
"0.48038056",
"0.48004565",
"0.47598097",
"0.47330174",
"0.47285554",
"0.47217458",
"0.4648909"
] | 0.6550641 | 0 |
Getter method for rloc, mapped from YANG variable /input/LocatorRecord/rloc (container) | def _get_rloc(self):
return self.__rloc | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_rloc(self, v, load=False):\n try:\n t = YANGDynClass(v,base=yc_rloc_pyangbind_example__input_LocatorRecord_rloc, is_container='container', yang_name=\"rloc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"rloc must be of a type compatible with base=yc_rloc_pyangbind_example__input_LocatorRecord_rloc, is_container='container', yang_name=\"rloc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__rloc = t\n if hasattr(self, '_set'):\n self._set()",
"def _get_LocatorRecord(self):\n return self.__LocatorRecord",
"def locate(self):\n return utils.get_object(\"crds\", self.observatory, \"locate\")",
"def _get_location_from_cache(self, locator):\r\n return self.cache.get(unicode(locator))",
"def loc(self):\n return self._loc",
"def loc(self):\n return self._loc",
"def _get_rlocProbed(self):\n return self.__rlocProbed",
"def rpos(self):\n return self._rpos",
"def query_radius(self,\n location: np.ndarray,\n r: float) -> np.ndarray:\n lat = location[0, 0]\n lon = location[0, 1]\n d0 = num_haversine(lat, lon, 0.0, 0.0)\n d1 = num_haversine(lat, lon, 90.0, 0.0)\n\n i0 = np.searchsorted(self.sorted0, d0 - r)\n i1 = np.searchsorted(self.sorted0, d0 + r)\n match0 = self.idx0[i0:i1+1]\n\n i0 = np.searchsorted(self.sorted1, d1 - r)\n i1 = np.searchsorted(self.sorted1, d1 + r)\n match1 = self.idx1[i0:i1 + 1]\n\n intersect = np.intersect1d(match0, match1)\n dist = vec_haversine(self.lats[intersect],\n self.lons[intersect],\n lat, lon)\n return intersect[dist <= r]",
"def get_location(self):\n return self.location",
"def locations(self):\n node = self.dismod_file.node\n assert not ({\"node_id\", \"node_name\", \"parent\"} - set(node.columns))\n if \"c_location_id\" not in node.columns:\n node = node.assign(c_location_id=node.node_id)\n location_map = node[[\"node_id\", \"c_location_id\"]].rename(\n columns={\"node_id\": \"parent\", \"c_location_id\": \"parent_location_id\"})\n parent_location = node.merge(\n location_map, on=\"parent\", how=\"left\")\n missing = parent_location[parent_location.parent_location_id.isna()]\n if len(missing) > 1: # Root will have nan for parent.\n raise ValueError(f\"parent location IDs unknown {missing}\")\n return parent_location.rename(columns=dict(\n parent_location_id=\"parent_id\", c_location_id=\"location_id\",\n node_name=\"name\"\n ))[[\"parent_id\", \"location_id\", \"name\", \"node_id\"]]",
"def getContainerRRD(self,node,vmid):\n data = self.connect('get','nodes/%s/lxc/%s/rrd' % (node,vmid),None)\n return data",
"def readLocation(self):\n return self._readLocation",
"def _get_localLocator(self):\n return self.__localLocator",
"def _get_locator_from_cache(self, location, published):\r\n entry = self.cache.get(u'{}+{}'.format(location.course_key, location))\r\n if entry is not None:\r\n if published:\r\n return entry[0]\r\n else:\r\n return entry[1]\r\n return None",
"def location_by_coordinate(r, lat, lon):\n upstream_rnode = r.rnodes[0]\n corr = upstream_rnode.corridor\n (upnode, dnnode) = geo.find_updown_rnodes(lat, lon, corr.rnodes, d_limit=1)\n if not upnode:\n return False\n\n f_done = False\n\n # check to downstream\n dist = distutil.distance_in_mile_with_coordinate(upnode.lat, upnode.lon, lat, lon)\n cur_node = upstream_rnode\n for next_node in geo.iter_to_downstream(upstream_rnode):\n dist += distutil.distance_in_mile(cur_node, next_node)\n if upnode == next_node:\n f_done = True\n break\n cur_node = next_node\n\n if not f_done:\n # check to upstream\n dist = distutil.distance_in_mile_with_coordinate(dnnode.lat, dnnode.lon, lat, lon)\n cur_node = upstream_rnode\n for next_node in geo.iter_to_upstream(upstream_rnode):\n dist += distutil.distance_in_mile(cur_node, next_node)\n if dnnode == next_node:\n f_done = True\n break\n cur_node = next_node\n if f_done:\n dist = -1 * dist\n\n if f_done:\n return dist\n else:\n return False",
"def loc_value(self):\n return self._loc_value[1]",
"def test_get_zr_location_structure(self):\n pass",
"def record_locator(self):\n return self._record_locator",
"def get_location(self):\r\n response = self.connection.make_request('GET', self.name,\r\n query_args='location')\r\n body = response.read()\r\n if response.status == 200:\r\n rs = ResultSet(self)\r\n h = handler.XmlHandler(rs, self)\r\n xml.sax.parseString(body, h)\r\n return rs.LocationConstraint\r\n else:\r\n raise self.connection.provider.storage_response_error(\r\n response.status, response.reason, body)",
"def get_location(self):\n\t\treturn self.location",
"def getRoverLocation(self):\n return self.rover.getPosition()",
"def RCL(self, loc):\n cmd = f\"*RCL {loc}\"\n self.instr.write(cmd)",
"def getRPos(self):\n c = ALIENS_IN_ROW-1\n while c >=0:\n i = 0\n for a in range(ALIEN_ROWS):\n if self._aliens[a][c] != None:\n return self._aliens[a][c].x + ALIEN_WIDTH/2\n else:\n i +=1\n if i == ALIEN_ROWS:\n c -=1",
"def item_location_retriever(item_pid):\n raise NotImplementedConfigurationError(\n config_variable=\"CIRCULATION_ITEM_LOCATION_RETRIEVER\"\n )",
"def determine_rv_location(self, ret_type): # pragma: no cover\n raise NotImplementedError(\"Implement this\")",
"def query_radius(self,\n location: np.ndarray,\n r: float) -> np.ndarray:\n lat = location[0, 0]\n lon = location[0, 1]\n dist = vec_haversine(self.lats, self.lons, lat, lon)\n return np.argwhere(dist <= r)",
"def _rloc_ip_net_addr(self):\n self.net_addr = ':'.join(self.rloc.split(':')[:-1]) + ':'\n return self.net_addr",
"def get_location(self):\r\n return self.__location",
"def get_regional_geolevel(self, rnode, idattr):\n return self.get_node(\n 'GeoLevels//GeoLevel[@ref=\"%s\"]' % idattr, parent=rnode)"
] | [
"0.7430711",
"0.5485765",
"0.5229584",
"0.5177095",
"0.5112131",
"0.5112131",
"0.5078246",
"0.49354967",
"0.48200157",
"0.48152867",
"0.47953466",
"0.4789419",
"0.47867963",
"0.47811654",
"0.47619376",
"0.4741405",
"0.4719583",
"0.4718814",
"0.4703106",
"0.46604514",
"0.46533173",
"0.46490067",
"0.46157858",
"0.46075365",
"0.45901436",
"0.45826733",
"0.45812544",
"0.45566535",
"0.4536894",
"0.45005155"
] | 0.7014321 | 1 |
Setter method for rloc, mapped from YANG variable /input/LocatorRecord/rloc (container) | def _set_rloc(self, v, load=False):
try:
t = YANGDynClass(v,base=yc_rloc_pyangbind_example__input_LocatorRecord_rloc, is_container='container', yang_name="rloc", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""rloc must be of a type compatible with base=yc_rloc_pyangbind_example__input_LocatorRecord_rloc, is_container='container', yang_name="rloc", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__rloc = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_rloc(self):\n return self.__rloc",
"def setRLC(self, r, l, c):\n return",
"def RCL(self, loc):\n cmd = f\"*RCL {loc}\"\n self.instr.write(cmd)",
"def set_loc(self, loc):\n self.loc = loc",
"def _set_rlocProbed(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"rlocProbed\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"rlocProbed must be of a type compatible with base=YANGBool, is_leaf=True, yang_name=\"rlocProbed\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__rlocProbed = t\n if hasattr(self, '_set'):\n self._set()",
"def __init__(self, loc):\n self.loc = loc",
"def _set_LocatorRecord(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"LocatorRecord must be of a type compatible with base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__LocatorRecord = t\n if hasattr(self, '_set'):\n self._set()",
"def set_location(self, location):\n self.location = location",
"def determine_rv_location(self, ret_type): # pragma: no cover\n raise NotImplementedError(\"Implement this\")",
"def set_location(self, location_set):",
"def set_loc(self, line):\n return Gumtree.gumtree.setLoc(line)",
"def _rloc_ip_net_addr(self):\n self.net_addr = ':'.join(self.rloc.split(':')[:-1]) + ':'\n return self.net_addr",
"def set_r(self, r):\n self._r = r",
"def _set_loc(self, args):\n if 'loc' not in args:\n raise ValueError('loc parameter is mandatory')\n if not self._is_valid_url(args['loc']):\n raise InvalidUrl('Invalid URL: %s' % args['loc']);\n self._loc = args['loc']",
"def __location_set(self, location):\n if isinstance(location, (str, int)):\n # allow setting of #dbref\n dbid = dbref(location, reqhash=False)\n if dbid:\n try:\n location = ObjectDB.objects.get(id=dbid)\n except ObjectDoesNotExist:\n # maybe it is just a name that happens to look like a dbid\n pass\n try:\n\n def is_loc_loop(loc, depth=0):\n \"\"\"Recursively traverse target location, trying to catch a loop.\"\"\"\n if depth > 10:\n return None\n elif loc == self:\n raise RuntimeError\n elif loc is None:\n raise RuntimeWarning\n return is_loc_loop(loc.db_location, depth + 1)\n\n try:\n is_loc_loop(location)\n except RuntimeWarning:\n # we caught an infinite location loop!\n # (location1 is in location2 which is in location1 ...)\n pass\n\n # if we get to this point we are ready to change location\n\n old_location = self.db_location\n\n # this is checked in _db_db_location_post_save below\n self._safe_contents_update = True\n\n # actually set the field (this will error if location is invalid)\n self.db_location = location\n self.save(update_fields=[\"db_location\"])\n\n # remove the safe flag\n del self._safe_contents_update\n\n # update the contents cache\n if old_location:\n old_location.contents_cache.remove(self)\n if self.db_location:\n self.db_location.contents_cache.add(self)\n\n except RuntimeError:\n errmsg = \"Error: %s.location = %s creates a location loop.\" % (self.key, location)\n raise RuntimeError(errmsg)\n except Exception:\n # raising here gives more info for now\n raise\n # errmsg = \"Error (%s): %s is not a valid location.\" % (str(e), location)\n # raise RuntimeError(errmsg)\n return",
"def query_radius(self,\n location: np.ndarray,\n r: float) -> np.ndarray:\n lat = location[0, 0]\n lon = location[0, 1]\n d0 = num_haversine(lat, lon, 0.0, 0.0)\n d1 = num_haversine(lat, lon, 90.0, 0.0)\n\n i0 = np.searchsorted(self.sorted0, d0 - r)\n i1 = np.searchsorted(self.sorted0, d0 + r)\n match0 = self.idx0[i0:i1+1]\n\n i0 = np.searchsorted(self.sorted1, d1 - r)\n i1 = np.searchsorted(self.sorted1, d1 + r)\n match1 = self.idx1[i0:i1 + 1]\n\n intersect = np.intersect1d(match0, match1)\n dist = vec_haversine(self.lats[intersect],\n self.lons[intersect],\n lat, lon)\n return intersect[dist <= r]",
"def _get_LocatorRecord(self):\n return self.__LocatorRecord",
"def set_radius(self, rmirr):\n self.F_EXT = 1\n self.RMIRR = rmirr",
"def set_rdate(self, rdate):\n self.__rdate = rdate",
"def loc(self, *args, **kwargs):\n if self._loc is None:\n try:\n self._loc = _MetaLocIndexer(self)\n ### New versions of _IXIndexer require \"name\" attribute.\n except TypeError as TE:\n self._loc= _MetaLocIndexer(self, 'loc')\n return self._loc",
"def add_loc_stub(self, player_id, loc):\n game = self.ctrl.game\n real_id = 0 if player_id == game.current_player else 1\n scale = .35\n\n r = rect.Rect(0, 0, 10, HandSprite.Size[1] * scale)\n\n num_play = len(game.players[player_id].play)\n if num_play == 0:\n x_rel = .5\n else:\n x_rel = min(.96, max(.04, loc / num_play))\n y_rel = (.38, .62)[real_id]\n r.center = pos(self.BoardL + x_rel * (self.HeroL - self.BoardL), y_rel)\n\n self.add(Rect(r, Colors['lightblue'], 5), name='loc_stub_{}_{}'.format(real_id, loc))",
"def loc(self):\n return self._loc",
"def loc(self):\n return self._loc",
"def query_radius(self,\n location: np.ndarray,\n r: float) -> np.ndarray:\n lat = location[0, 0]\n lon = location[0, 1]\n dist = vec_haversine(self.lats, self.lons, lat, lon)\n return np.argwhere(dist <= r)",
"def rpos(self):\n return self._rpos",
"def location_by_coordinate(r, lat, lon):\n upstream_rnode = r.rnodes[0]\n corr = upstream_rnode.corridor\n (upnode, dnnode) = geo.find_updown_rnodes(lat, lon, corr.rnodes, d_limit=1)\n if not upnode:\n return False\n\n f_done = False\n\n # check to downstream\n dist = distutil.distance_in_mile_with_coordinate(upnode.lat, upnode.lon, lat, lon)\n cur_node = upstream_rnode\n for next_node in geo.iter_to_downstream(upstream_rnode):\n dist += distutil.distance_in_mile(cur_node, next_node)\n if upnode == next_node:\n f_done = True\n break\n cur_node = next_node\n\n if not f_done:\n # check to upstream\n dist = distutil.distance_in_mile_with_coordinate(dnnode.lat, dnnode.lon, lat, lon)\n cur_node = upstream_rnode\n for next_node in geo.iter_to_upstream(upstream_rnode):\n dist += distutil.distance_in_mile(cur_node, next_node)\n if dnnode == next_node:\n f_done = True\n break\n cur_node = next_node\n if f_done:\n dist = -1 * dist\n\n if f_done:\n return dist\n else:\n return False",
"def _get_rlocProbed(self):\n return self.__rlocProbed",
"def parse_R(self, line: str):\n\n # R node1 node2 __value\n # R name node1 node2 __value\n name, node1, node2, value = self.parse_two_node_component(line,\"ohm\")\n return IComponent.R(node1, node2, value, name)",
"def Set(self, *args):\n return _XCAFDoc.XCAFDoc_Location_Set(self, *args)",
"def set_location(self, location):\r\n self.__location = location"
] | [
"0.64157027",
"0.54850066",
"0.5443288",
"0.51483464",
"0.50199974",
"0.5014761",
"0.48082906",
"0.47812858",
"0.47574958",
"0.4746225",
"0.4733928",
"0.471587",
"0.468939",
"0.46738097",
"0.46594927",
"0.4655104",
"0.46222645",
"0.45074654",
"0.44889754",
"0.44816402",
"0.44757146",
"0.44690552",
"0.44690552",
"0.44663155",
"0.44618917",
"0.4454799",
"0.44430462",
"0.4440211",
"0.44144484",
"0.44071832"
] | 0.8643157 | 0 |
Getter method for recordTtl, mapped from YANG variable /input/mapping_record/recordTtl (int32) | def _get_recordTtl(self):
return self.__recordTtl | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_recordTtl(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.int32, is_leaf=True, yang_name=\"recordTtl\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"recordTtl must be of a type compatible with base=np.int32, is_leaf=True, yang_name=\"recordTtl\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__recordTtl = t\n if hasattr(self, '_set'):\n self._set()",
"def ttl_seconds(self) -> \"int\":\n return self._attrs.get(\"ttlSeconds\")",
"def record_duration(self):\n return self.config.get('record_duration', 5)",
"def ttl(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"ttl\")",
"def ttl(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"ttl\")",
"def ttl(self) -> pulumi.Output[int]:\n return pulumi.get(self, \"ttl\")",
"def dd_ptv_duration_map_nb(record):\n return record['valley_idx'] - record['start_idx']",
"def ttl(self):\n return self._ttl",
"def maximum_record_age_in_seconds(self) -> pulumi.Output[Optional[int]]:\n return pulumi.get(self, \"maximum_record_age_in_seconds\")",
"def dd_vtr_duration_map_nb(record):\n return record['end_idx'] - record['valley_idx']",
"def get_tmval(record, field_name):\n val = recordval(record, field_name)\n if (val != \"\" and not re.match(r'\\d?\\d:\\d\\d(:\\d\\d)?', val)):\n parser_error(\"bad value in \"+field_name+\": '\"+val+\"'-- try HH:MM:SS\")\n return val",
"def maximum_record_age_in_seconds(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"maximum_record_age_in_seconds\")",
"def parse_time_record(self, record):\n\n time_record = TIME_RECORD_MATCHER.match(record)\n if not time_record:\n time_data = None\n else:\n time_data = struct.unpack(TIME_FORMAT, \n time_record.group(0)[0:TIME_RECORD_SIZE])\n\n return time_data",
"def ttl(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ttl\")",
"def ttl(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ttl\")",
"def ttl(self) -> Optional[pulumi.Input[str]]:\n return pulumi.get(self, \"ttl\")",
"def getTTL(self):\n return self.TTL",
"def dd_duration_map_nb(record):\n return record['end_idx'] - record['start_idx']",
"def dd_vtr_duration_ratio_map_nb(record):\n return dd_vtr_duration_map_nb(record) / dd_duration_map_nb(record)",
"def get_recordId(self):\n return self.metadata['recordId']",
"def get_recordId(self):\n return self.metadata['recordId']",
"def check_record(self, domain, request_type):\n key = \"{0},{1}\".format(domain, request_type)\n if key in self._records:\n record = self._records[key]\n\n record_start_time = record.get_start_time()\n record_ttl = record.get_ttl()\n current_time = time.time()\n passed_time = current_time - record_start_time\n\n # Check if the record's TTL has passed.\n if passed_time <= record_ttl:\n return record\n else:\n # Delete the record from the cache.\n self.delete_record(key)\n\n return None",
"def get_ttl(self, key, now=None):\n if now is None:\n now = time.time()\n with self._lock:\n # pylint: disable=unused-variable\n expire, _value = self._values[key]\n return expire - now",
"def ttl(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"ttl\")",
"def ttl(self) -> pulumi.Output[str]:\n return pulumi.get(self, \"ttl\")",
"def calculate_record_number(self):\n return (self._read_state[StateKey.POSITION] - FLAG_RECORD_SIZE) / \\\n self.velocity_record_size",
"def get_ttl(self, default_ttl: Optional[int] = None) -> Optional[int]:\n return default_ttl if self.ttl is None else self.ttl",
"def get_datetime(self, record):\n value = RecordValue(self.timestamp_attribute).render(record)\n return datetime.datetime.fromtimestamp(value)",
"def ttl_seconds_after_finished(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"ttl_seconds_after_finished\")",
"def ttl(self, key):\n return self._command(b'PTTL', key, handler=lambda ms: .001 * ms)"
] | [
"0.7424597",
"0.5785556",
"0.5482547",
"0.5277504",
"0.5277504",
"0.5223717",
"0.51692307",
"0.51057476",
"0.5098125",
"0.5057996",
"0.50471157",
"0.49814385",
"0.49490383",
"0.4941643",
"0.4941643",
"0.4941643",
"0.48078465",
"0.47643143",
"0.4747422",
"0.47402343",
"0.47402343",
"0.4730465",
"0.47254425",
"0.472092",
"0.472092",
"0.46816722",
"0.46658856",
"0.4665438",
"0.46230146",
"0.4617583"
] | 0.7055444 | 1 |
Setter method for recordTtl, mapped from YANG variable /input/mapping_record/recordTtl (int32) | def _set_recordTtl(self, v, load=False):
try:
t = YANGDynClass(v,base=np.int32, is_leaf=True, yang_name="recordTtl", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""recordTtl must be of a type compatible with base=np.int32, is_leaf=True, yang_name="recordTtl", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__recordTtl = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_recordTtl(self):\n return self.__recordTtl",
"def ttl_seconds(self, ttl_seconds: \"int\"):\n self._attrs[\"ttlSeconds\"] = ttl_seconds",
"def record_duration(self):\n return self.config.get('record_duration', 5)",
"def ttl_seconds(self) -> \"int\":\n return self._attrs.get(\"ttlSeconds\")",
"def set_ttl(self, ttl):",
"def ttl(self, ttl):\n\n self._ttl = ttl",
"def ttl(self, ttl):\n\n self._ttl = ttl",
"def dd_ptv_duration_map_nb(record):\n return record['valley_idx'] - record['start_idx']",
"def maximum_record_age_in_seconds(self) -> pulumi.Output[Optional[int]]:\n return pulumi.get(self, \"maximum_record_age_in_seconds\")",
"def set(self, key, value, ttl=None):\n if ttl and (type(ttl) is int) and (ttl > 0):\n ttl += int(dt.now().strftime('%s'))\n self.dadd('ttl', (key, ttl))\n return super(MyCache, self).set(key, value)",
"def dd_vtr_duration_map_nb(record):\n return record['end_idx'] - record['valley_idx']",
"def ttl(self, key):\n return self._command(b'PTTL', key, handler=lambda ms: .001 * ms)",
"def maximum_record_age_in_seconds(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"maximum_record_age_in_seconds\")",
"def test_record_eq_record_different_ttl(self):\n zone = Zone('test.example.com')\n record_current = Record(zone, 'test-record', {'type': 'A', 'ttl': 30})\n record_desired = Record(zone, 'test-record', {'type': 'A', 'ttl': 300})\n self.assertTrue(record_current != record_desired)",
"def ttl(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"ttl\")",
"def ttl(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"ttl\")",
"def testHrtDuration(self):\n attr = self.session.create_visit_attr()\n\n self.util.stringTypeTest(self, attr, \"duration\")\n\n self.util.stringPropertyTest(self, attr, \"duration\")",
"def ttl(self):\n return self._ttl",
"def set_ttl(self, key, ttl, now=None):\n if now is None:\n now = time.time()\n with self._lock:\n # pylint: disable=unused-variable\n _expire, value = self._values[key]\n self._values[key] = (now + ttl, value)",
"def _set_mapping_record(self, v, load=False):\n try:\n t = YANGDynClass(v,base=yc_mapping_record_pyangbind_example__input_mapping_record, is_container='container', yang_name=\"mapping-record\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"mapping_record must be of a type compatible with base=yc_mapping_record_pyangbind_example__input_mapping_record, is_container='container', yang_name=\"mapping-record\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__mapping_record = t\n if hasattr(self, '_set'):\n self._set()",
"def set_duration(self, duration):\n self.__test_result[Result.__DURATION] = round(duration * 1000)",
"def get_tmval(record, field_name):\n val = recordval(record, field_name)\n if (val != \"\" and not re.match(r'\\d?\\d:\\d\\d(:\\d\\d)?', val)):\n parser_error(\"bad value in \"+field_name+\": '\"+val+\"'-- try HH:MM:SS\")\n return val",
"def ttl(self) -> pulumi.Output[int]:\n return pulumi.get(self, \"ttl\")",
"def block_override_ttl(self) -> int:\n return pulumi.get(self, \"block_override_ttl\")",
"def set(self, key, value, ttl=None):\n with self.lock:\n max_age = None\n if ttl is not None:\n max_age = time.time() + ttl\n dict.__setitem__(self, key, (value, max_age))\n\n return value",
"def parse_time_record(self, record):\n\n time_record = TIME_RECORD_MATCHER.match(record)\n if not time_record:\n time_data = None\n else:\n time_data = struct.unpack(TIME_FORMAT, \n time_record.group(0)[0:TIME_RECORD_SIZE])\n\n return time_data",
"def reduceTTL(self):\n self.TTL -= 1\n return self.TTL <= 0",
"def dd_vtr_duration_ratio_map_nb(record):\n return dd_vtr_duration_map_nb(record) / dd_duration_map_nb(record)",
"def dd_duration_map_nb(record):\n return record['end_idx'] - record['start_idx']",
"def _addTiming(self, key, duration):\n pass"
] | [
"0.6537405",
"0.5423284",
"0.53877944",
"0.5307785",
"0.5183947",
"0.50947696",
"0.50947696",
"0.48951116",
"0.48640734",
"0.4802925",
"0.48009953",
"0.47961047",
"0.4780839",
"0.4754166",
"0.46808767",
"0.46808767",
"0.46310925",
"0.46241784",
"0.46237284",
"0.46001944",
"0.45571563",
"0.45501807",
"0.4527818",
"0.44967315",
"0.44552863",
"0.44472077",
"0.44348294",
"0.43979493",
"0.4374436",
"0.43655738"
] | 0.8461138 | 0 |
Getter method for maskLength, mapped from YANG variable /input/mapping_record/maskLength (uint8) | def _get_maskLength(self):
return self.__maskLength | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_maskLength(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name=\"maskLength\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"maskLength must be of a type compatible with base=np.uint8, is_leaf=True, yang_name=\"maskLength\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__maskLength = t\n if hasattr(self, '_set'):\n self._set()",
"def mask_size(self):\n m = self.size * self.mask()\n return m.astype(np.int8)",
"def get_lengths_from_binary_sequence_mask(self, mask: torch.Tensor):\n return mask.long().sum(-1)",
"def _get_masked_bits(mask):\n\n if not is_valid_ipv4_address(mask):\n raise ValueError(\"'%s' is an invalid subnet mask\" % mask)\n\n # converts octets to binary representation\n mask_bin = _get_address_binary(mask)\n mask_match = re.match('^(1*)(0*)$', mask_bin)\n\n if mask_match:\n return 32 - len(mask_match.groups()[1])\n else:\n raise ValueError('Unable to convert mask to a bit count: %s' % mask)",
"def fieldsToLengthBits(thisPacket):\n for field in thisPacket.iter('field'):\n if fieldLooksLikeBitmask(field):\n reMatch = bitmaskRE.match(field.attrib['showname'])\n field.attrib['lengthBits'] = str(len(reMatch.group(1).replace(' ','').replace('.','')))\n elif 'size' in field.attrib:\n field.attrib['lengthBits'] = str(8*int(field.attrib['size']))\n else:\n raise AssertionError(\"Missing size on non-bitmask field \"+field.attrib['name'])",
"def get_lengths_from_binary_sequence_mask(mask: torch.Tensor):\n return mask.long().sum(-1)",
"def get_lengths_from_binary_sequence_mask(\n mask: torch.BoolTensor,\n) -> torch.LongTensor:\n return mask.sum(-1)",
"def length_in_bits(self):\n if hasattr(self, '_m_length_in_bits'):\n return self._m_length_in_bits if hasattr(self, '_m_length_in_bits') else None\n\n self._m_length_in_bits = ((self.len - 1) * 8)\n return self._m_length_in_bits if hasattr(self, '_m_length_in_bits') else None",
"def get_length(binary_mask):\n mask_T = binary_mask.T\n\n tip_index = get_index_of_tip(mask_T)\n shoulder_index = get_index_of_shoulder(mask_T)\n\n return shoulder_index - tip_index",
"def vint_mask_for_length(length):\n\t\n\treturn 0b10000000 >> (length - 1)",
"def bit_length(self, ???):",
"def length(self):\n return struct.unpack('<H', self.pkt.payload[6:8])[0]",
"def mvarlen():\n\n global offset\n\n x=0L\n for i in range(4):\n\n try:\n byte=ord(midifile[offset])\n offset += 1\n except:\n error(\"Invalid MIDI file include (varlen->int)\")\n\n if byte < 0x80:\n x = ( x << 7 ) + byte\n break\n else:\n x = ( x << 7 ) + ( byte & 0x7f )\n\n return int(x)",
"def decode_var_len_uint8(br):\n if br.read_bits(1):\n nbits = br.read_bits(3)\n if nbits == 0:\n return 1\n return br.read_bits(nbits) + (1 << nbits)\n return 0",
"def length(self):\n return struct.unpack('<H', self.pkt.payload[2:4])[0]",
"def allocation_max_netmask_length(self) -> pulumi.Output[Optional[int]]:\n return pulumi.get(self, \"allocation_max_netmask_length\")",
"def _getVal(self, mask, size):\n\t\tif (int(self.header['flags']) & mask) > 0:\n\t\t\t# First unsigned short is the length of the string\n\t\t\ttxt = self.fpLnk.read(2)\n\t\t\tlength = struct.unpack(\"<H\", txt)[0]\n\t\t\t# Adjust for unicode\n\t\t\tlength = length * size\n\t\t\treturn self.fpLnk.read(length)",
"def data_length(self):\n size = self.unpack_dword(0x4)\n if size >= 0x80000000:\n size -= 0x80000000\n return size",
"def length(self):\n return struct.unpack('<B', self.pkt.payload[2:3])[0]",
"def length(self):\n return struct.unpack('<B', self.pkt.payload[2:3])[0]",
"def subnet_prefix_length(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"subnet_prefix_length\")",
"def mask(self):\n return ((2**(self.width) - 1) << self.lsb)",
"def allocation_max_netmask_length(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"allocation_max_netmask_length\")",
"def allocation_max_netmask_length(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"allocation_max_netmask_length\")",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass"
] | [
"0.7444396",
"0.6525655",
"0.59941226",
"0.5959563",
"0.59322095",
"0.58424264",
"0.57360834",
"0.5563699",
"0.54108727",
"0.53078985",
"0.5169768",
"0.51070464",
"0.5102177",
"0.5080575",
"0.5043805",
"0.50004286",
"0.49808195",
"0.4906646",
"0.4898116",
"0.4898116",
"0.48980364",
"0.48869017",
"0.48551005",
"0.48551005",
"0.4800684",
"0.4800684",
"0.4800684",
"0.4800684",
"0.4800684",
"0.4800684"
] | 0.71786106 | 1 |
Setter method for maskLength, mapped from YANG variable /input/mapping_record/maskLength (uint8) | def _set_maskLength(self, v, load=False):
try:
t = YANGDynClass(v,base=np.uint8, is_leaf=True, yang_name="maskLength", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""maskLength must be of a type compatible with base=np.uint8, is_leaf=True, yang_name="maskLength", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__maskLength = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_maskLength(self):\n return self.__maskLength",
"def mask_size(self):\n m = self.size * self.mask()\n return m.astype(np.int8)",
"def fieldsToLengthBits(thisPacket):\n for field in thisPacket.iter('field'):\n if fieldLooksLikeBitmask(field):\n reMatch = bitmaskRE.match(field.attrib['showname'])\n field.attrib['lengthBits'] = str(len(reMatch.group(1).replace(' ','').replace('.','')))\n elif 'size' in field.attrib:\n field.attrib['lengthBits'] = str(8*int(field.attrib['size']))\n else:\n raise AssertionError(\"Missing size on non-bitmask field \"+field.attrib['name'])",
"def vint_mask_for_length(length):\n\t\n\treturn 0b10000000 >> (length - 1)",
"def get_lengths_from_binary_sequence_mask(self, mask: torch.Tensor):\n return mask.long().sum(-1)",
"def length_in_bits(self):\n if hasattr(self, '_m_length_in_bits'):\n return self._m_length_in_bits if hasattr(self, '_m_length_in_bits') else None\n\n self._m_length_in_bits = ((self.len - 1) * 8)\n return self._m_length_in_bits if hasattr(self, '_m_length_in_bits') else None",
"def _get_masked_bits(mask):\n\n if not is_valid_ipv4_address(mask):\n raise ValueError(\"'%s' is an invalid subnet mask\" % mask)\n\n # converts octets to binary representation\n mask_bin = _get_address_binary(mask)\n mask_match = re.match('^(1*)(0*)$', mask_bin)\n\n if mask_match:\n return 32 - len(mask_match.groups()[1])\n else:\n raise ValueError('Unable to convert mask to a bit count: %s' % mask)",
"def get_lengths_from_binary_sequence_mask(mask: torch.Tensor):\n return mask.long().sum(-1)",
"def get_lengths_from_binary_sequence_mask(\n mask: torch.BoolTensor,\n) -> torch.LongTensor:\n return mask.sum(-1)",
"def bit_length(self, ???):",
"def allocation_max_netmask_length(self) -> pulumi.Output[Optional[int]]:\n return pulumi.get(self, \"allocation_max_netmask_length\")",
"def allocation_max_netmask_length(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"allocation_max_netmask_length\")",
"def allocation_max_netmask_length(self) -> Optional[pulumi.Input[int]]:\n return pulumi.get(self, \"allocation_max_netmask_length\")",
"def mask(self):\n return ((2**(self.width) - 1) << self.lsb)",
"def get_mask_from_lengths(memory, memory_lengths):\n mask = memory.data.new(memory.size(0), memory.size(1)).byte().zero_()\n for idx, l in enumerate(memory_lengths):\n mask[idx][:l] = 1\n return ~mask",
"def Mask(self) -> int:",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass",
"def bit_length(self): # real signature unknown; restored from __doc__\n pass"
] | [
"0.6653406",
"0.6379773",
"0.58128196",
"0.55888116",
"0.53854203",
"0.53751683",
"0.5288363",
"0.5091646",
"0.50880945",
"0.4921406",
"0.4910974",
"0.48586044",
"0.48586044",
"0.47661933",
"0.4757868",
"0.47240102",
"0.47114295",
"0.47114295",
"0.47114295",
"0.47114295",
"0.47114295",
"0.47114295",
"0.47114295",
"0.47114295",
"0.47114295",
"0.47114295",
"0.47114295",
"0.47114295",
"0.47114295",
"0.47114295"
] | 0.8388851 | 0 |
Getter method for mapVersion, mapped from YANG variable /input/mapping_record/mapVersion (int16) | def _get_mapVersion(self):
return self.__mapVersion | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_mapVersion(self, v, load=False):\n try:\n t = YANGDynClass(v,base=np.int16, is_leaf=True, yang_name=\"mapVersion\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"mapVersion must be of a type compatible with base=np.int16, is_leaf=True, yang_name=\"mapVersion\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__mapVersion = t\n if hasattr(self, '_set'):\n self._set()",
"def read_versionInfo(self):\n # PROTECTED REGION ID(SdpMasterLeafNode.versionInfo_read) ENABLED START #\n return self.attr_map[\"versionInfo\"]\n # PROTECTED REGION END # // SdpMasterLeafNode.versionInfo_read",
"def read_version():\n # code parts were taken from here https://stackoverflow.com/a/67692\n\n path2setup = os.path.dirname(__file__)\n version_file = os.path.abspath(\n os.path.join(path2setup, \"diffusion_maps\", \"version.py\"))\n\n spec = importlib.util.spec_from_file_location(\"version\", version_file)\n version = importlib.util.module_from_spec(spec)\n spec.loader.exec_module(version)\n return version.version.v_short",
"def getVersion(self):\n return self.get('Version', type=\"numeric\")",
"def vector_info(map, layer=1):\n\n s = read_command('v.info', flags='get', layer=layer, map=map)\n\n kv = parse_key_val(s)\n for k in ['north', 'south', 'east', 'west', 'top', 'bottom']:\n kv[k] = float(kv[k])\n for k in ['level', 'num_dblinks']:\n kv[k] = int(kv[k])\n for k in ['nodes', 'points', 'lines', 'boundaries', 'centroids', 'areas',\n 'islands', 'primitives']:\n kv[k] = int(kv[k])\n if 'map3d' in kv:\n kv['map3d'] = bool(int(kv['map3d']))\n if kv['map3d']:\n for k in ['faces', 'kernels', 'volumes', 'holes']:\n kv[k] = int(kv[k])\n\n return kv",
"def convert(self):\n return _libsbml.SBMLLevelVersionConverter_convert(self)",
"def loadVersionMap():\n\tlines = readVersionList(cfg.FILE_VERSION)\n\tver_map = {}\n\tval = []\n\tflag = False\n\n\tfor line in lines:\n\t\tline = line.strip()\n\t\tif line.startswith(cfg.FLAG_TIPS):\n\t\t\tline_list = line.split()\n\t\t\tlen_row = len(line_list)\n\t\t\ta_DmnNum = {}\n\t\t\tDOMAIN = cfg.FLAG_NULL\n\n\t\t\tfor i in range(0,len_row):\n\t\t\t\tDOMAIN = line_list[i]\n\t\t\t\ta_DmnNum[DOMAIN] = i\n\t\t\tval = line_list\n\t\telif line.startswith(cfg.OPEN_BRACKET):\n\t\t\tleft = line.find(cfg.OPEN_BRACKET)\n\t\t\tright = line.find(cfg.CLOSE_BRACKET)\n\t\t\tName = line[left+1:right].strip()\n\t\t\tver_map[Name] = []\n\t\t\tver_map[Name].append(val[1:])\n\t\telif not line:\n\t\t\tcontinue\n\t\telse:\n\t\t\tline_list = line.split()\n\t\t\tver_map[Name].append(line_list)\n\tsortVersion(ver_map)\n\treturn ver_map",
"def version(self):\n a = re.search('(?<=_V)\\d{1,2}', self.fname)\n if a is None:\n return None\n else:\n return int(a.group())",
"def GetVersion(self) -> \"char const *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMD2_GetVersion(self)",
"def GetVersion(self) -> \"char const *\":\n return _itkVTKPolyDataReaderPython.itkVTKPolyDataReaderMF2_GetVersion(self)",
"def version_code(self):\n return self.proto.details.appDetails.versionCode",
"def version(self):\n if \"version\" in self._prop_dict:\n return self._prop_dict[\"version\"]\n else:\n return None",
"def parse_version(version):\n if version is None:\n raise ValueError(\"sbe:messageSchema/@version is required\")\n\n return int(version)",
"def version(self):\r\n return self.version_guid",
"def _get_version(self, identifier: Identifier,\n version: Optional[int] = None) -> DocMetadata:\n parent_path = self._get_parent_path(identifier=identifier,\n version=version)\n path = os.path.join(parent_path,\n (f'{identifier.filename}.abs' if not version\n else f'{identifier.filename}v{version}.abs'))\n return self.parse_abs_file(filename=path)",
"def version(self):\n return self._get(\"version\")",
"def get_version(self, project_id, model_id, version_id):\n endpoint = \"/project/{}/model/{}/version/{}\".format(\n project_id, model_id, version_id\n )\n return self._get(endpoint, _ModelVersionSchema())",
"def get_version(self, direc_path):\n try:\n archive = zipfile.ZipFile(direc_path, 'r')\n if u'cc/mallet/regression/' not in archive.namelist():\n return '2.0.7'\n else:\n return '2.0.8RC3'\n except Exception:\n\n xml_path = direc_path.split(\"bin\")[0]\n try:\n doc = et.parse(xml_path + \"pom.xml\").getroot()\n namespace = doc.tag[:doc.tag.index('}') + 1]\n return doc.find(namespace + 'version').text.split(\"-\")[0]\n except Exception:\n return \"Can't parse pom.xml version file\"",
"def version_id(self) -> pulumi.Output[Optional[str]]:\n return pulumi.get(self, \"version_id\")",
"def getVersion(self):\n return _libsbml.XMLInputStream_getVersion(self)",
"def model_version_id(self) -> Optional[str]:\n return pulumi.get(self, \"model_version_id\")",
"def get_version_details(self, project_id, document_id, version=None):\n url = base_url + 'portal/' + str(self.portal_id) + '/projects/' + str(project_id) + '/documents/' + str(document_id) + '/'\n if version is not None: \n param = {\n 'version': version\n }\n else:\n param = None\n response = zoho_http_client.get(url, self.details, param)\n return parser.get_documents(response)[0]",
"def vector_info_topo(map, layer=1):\n s = read_command('v.info', flags='t', layer=layer, map=map)\n ret = parse_key_val(s, val_type=int)\n if 'map3d' in ret:\n ret['map3d'] = bool(ret['map3d'])\n\n return ret",
"def get_version_info(self, key_name='ver_sw_release'):\n if key_name in self._msg_info_dict:\n val = self._msg_info_dict[key_name]\n return ((val >> 24) & 0xff, (val >> 16) & 0xff, (val >> 8) & 0xff, val & 0xff)\n return None",
"def get_version_info(self):\n return self._jadeRpc('get_version_info')",
"def getVersion(self):\n return _libsbml.SBase_getVersion(self)",
"def verNumber(version, versionPattern=''):\n if not versionPattern:\n versionPattern = os.environ.get('KOMBI_VERSION_PATTERN', DEFAULT_VERSION_PATTERN)\n\n patternParts = __splitVersionPattern(versionPattern)\n return str(version)[len(patternParts['prefix']): len(patternParts['prefix']) + len(patternParts['padding'])]",
"def version(self):\n data = self._ftdi.spi_read(self.VERSION_ADDR, len=1, burst='fixed')\n return data[0] & self.VERSION_MASK",
"def get_version(self):\n return self._version",
"def get_version(self):\n return self._version"
] | [
"0.74593055",
"0.55581903",
"0.5232696",
"0.5110775",
"0.5033999",
"0.50145763",
"0.49021116",
"0.4732159",
"0.4721976",
"0.4653788",
"0.46497813",
"0.4641943",
"0.46388727",
"0.46224704",
"0.4616354",
"0.46102205",
"0.46023163",
"0.46010247",
"0.4586401",
"0.45821977",
"0.45753545",
"0.4573129",
"0.45654416",
"0.45487696",
"0.45426047",
"0.45421565",
"0.4540312",
"0.4520539",
"0.45170307",
"0.45170307"
] | 0.63023394 | 1 |
Setter method for mapVersion, mapped from YANG variable /input/mapping_record/mapVersion (int16) | def _set_mapVersion(self, v, load=False):
try:
t = YANGDynClass(v,base=np.int16, is_leaf=True, yang_name="mapVersion", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""mapVersion must be of a type compatible with base=np.int16, is_leaf=True, yang_name="mapVersion", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__mapVersion = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_mapVersion(self):\n return self.__mapVersion",
"def SetVersion(self, addonVersion):\n self._addonVersion = addonVersion",
"def version(self, version):\n self._version = utils.VersionParser().parse(version)",
"def convert(self):\n return _libsbml.SBMLLevelVersionConverter_convert(self)",
"def version(self, version):\n \n self._version = version",
"def read_versionInfo(self):\n # PROTECTED REGION ID(SdpMasterLeafNode.versionInfo_read) ENABLED START #\n return self.attr_map[\"versionInfo\"]\n # PROTECTED REGION END # // SdpMasterLeafNode.versionInfo_read",
"def version(self, version: int):\n\n self._version = version",
"def version(self, version):\n self._version = version",
"def version(self, version):\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version):\n\n self._version = version",
"def version(self, version: str):\n\n self._version = version"
] | [
"0.5692604",
"0.5344507",
"0.49350342",
"0.49146506",
"0.48997697",
"0.48858517",
"0.48856238",
"0.48540726",
"0.48540726",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47561356",
"0.47560245"
] | 0.83946174 | 0 |
Setter method for action, mapped from YANG variable /input/mapping_record/action (enumeration) | def _set_action(self, v, load=False):
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'Drop': {}, u'NativelyForward': {}, u'SendMapRequest': {}, u'NoAction': {}},), is_leaf=True, yang_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""action must be of a type compatible with base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'Drop': {}, u'NativelyForward': {}, u'SendMapRequest': {}, u'NoAction': {}},), is_leaf=True, yang_name="action", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__action = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def action(self, action):\n if action is None:\n raise ValueError(\"Invalid value for `action`, must not be `None`\") # noqa: E501\n\n self._action = action",
"def set_action(self, action):\n self.action = action",
"def set_action(self, action):\n self.action = action",
"def set_action(self, action):\n self._action = action\n return self",
"def setAction(self, action):\n self.action = action\n return self",
"def set_action(self,action):\n self.__action = action",
"def setAction(self, value):\n return self._set(action=value)",
"def action(self, action):\n allowed_values = [\"APPLY\", \"PRECHECK\"]\n if action not in allowed_values:\n raise ValueError(\n \"Invalid value for `action`, must be one of {0}\"\n .format(allowed_values)\n )\n self._action = action",
"def action(self, action):\n self._action = action",
"def action(self, action):\n allowed_values = [\"DELETE\", \"NONE\"]\n if action not in allowed_values:\n raise ValueError(\n \"Invalid value for `action` ({0}), must be one of {1}\"\n .format(action, allowed_values)\n )\n\n self._action = action",
"def _act(self, action):\n self._set_action(action)",
"def _set_action(self, action):\n raise NotImplementedError()",
"def _set_action(self, action):\n raise NotImplementedError()",
"def _set_action(self, action):\n raise NotImplementedError()",
"def _set_action(self, action):\n raise NotImplementedError()",
"def _set_action(self, action):\n raise NotImplementedError()",
"def _set_action(self, action):\n raise NotImplementedError()",
"def action(self, action):\n\n self._action = action",
"def get_action(self, action=None):\n if action:\n self.action = action\n\n if self.action not in AjaxResponseAction.choices:\n raise ValueError(\n \"Invalid action selected: '{}'\".format(self.action))\n\n return self.action",
"def set_action(self, action):\n if action not in self.images:\n raise Exception('Action not defined for {}'.format(\n self.__name__\n ))\n self._action_i = 0\n self._action = action",
"def add_action(self, action):\n self.action = action",
"def add_action(self, action):\n self.action = action",
"def ToAction(self):\n action = self.action_key.get()\n if not action:\n raise ValueError('Test run action %s not found' % self.action_key)\n options = NameValuePair.ToDict(action.options or [])\n options.update(NameValuePair.ToDict(self.options or []))\n action.options = NameValuePair.FromDict(options)\n return action",
"def define_action(char):\n if char == \"checks\":\n return ActionType.CHECK\n elif char == \"folds\":\n return ActionType.FOLD\n elif char == \"bets\":\n return ActionType.BET\n elif char == \"raises\":\n return ActionType.RAISE\n elif char == \"calls\":\n return ActionType.CALL\n else:\n return ActionType.UNDEFINED",
"def action_code(self, action_code):\n\n self._action_code = action_code",
"def convert_to_low_level_action(self, i_state, action):\n pass",
"def action(self) -> pulumi.Input[str]:\n return pulumi.get(self, \"action\")",
"def setAction(self, func):\n\t\tself.action = func",
"def receiveAction(self, action):\n self.action = action",
"def action(self, action):\n if self.is_valid_action(action):\n # Modify the board\n self.board[action[0]][action[1]] = self.player\n\n # Switch player\n self.player = X if self.player == O else O\n else:\n raise Exception('Invalid action')"
] | [
"0.7073768",
"0.6777562",
"0.6777562",
"0.6746833",
"0.67244196",
"0.66816896",
"0.66607445",
"0.6581482",
"0.6524658",
"0.65044004",
"0.640012",
"0.6387688",
"0.6387688",
"0.6387688",
"0.6387688",
"0.6387688",
"0.6387688",
"0.6386035",
"0.62811095",
"0.6280877",
"0.6124923",
"0.6124923",
"0.5937993",
"0.5934825",
"0.59030944",
"0.5885622",
"0.58600354",
"0.58495253",
"0.58451366",
"0.5788543"
] | 0.7640915 | 0 |
Getter method for authoritative, mapped from YANG variable /input/mapping_record/authoritative (boolean) | def _get_authoritative(self):
return self.__authoritative | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_authoritative(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"authoritative\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"authoritative must be of a type compatible with base=YANGBool, is_leaf=True, yang_name=\"authoritative\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__authoritative = t\n if hasattr(self, '_set'):\n self._set()",
"def is_authorised_representative(self):\n if not hasattr(self, '_is_authorised_representative'):\n self._is_authorised_representative = hasattr(self, 'authorised_representative')\n\n return self._is_authorised_representative",
"def is_achromatic(self) -> bool:\n\n value = self._space.is_achromatic(self.coords(nans=False))\n if value is None:\n xyz = self.convert('xyz-d65')\n return bool(xyz._space.is_achromatic(xyz[:-1]))\n return value",
"def is_canonical(self):\n return False",
"def get_explicit(self) -> bool:\n return self.explicit",
"def is_quantitative(self):\n return self._type == 'quantitative'",
"def is_explicit(a_rel):\n return bool(a_rel[CONNECTIVE][TOK_LIST])",
"def is_canonical(self):\n if self.canonical_acceptor == self.acceptor:\n return True\n else:\n return False",
"def is_amino_acid(self):\n return True",
"def is_authorized_contact(self, dialersetting, str_contact):\n return common_contact_authorization(dialersetting, str_contact)",
"def is_amino_acid(self):\n return False",
"def __nonzero__(self):\n # XXX: check the name and the characterID?\n if self.data.get('name'): return 1\n return 0",
"def is_authorized(self, attributes, attribute_mapping):\n LOG.debug('is_authorized() attributes = %s' % attributes)\n LOG.debug('is_authorized() attribute_mapping = %s' % attribute_mapping)\n return True",
"def enable_custom_ca_trust(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_custom_ca_trust\")",
"def is_CA(self):\n\n basicConstraints = self.get_basicConstraints()\n return basicConstraints is not None and basicConstraints[0]",
"def is_in_adr_lexicon(text, adr_lexicon_dict):\n for item in adr_lexicon_dict:\n if item.lower() == text.lower():\n return True\n\n return False",
"def is_aromatic(self):\n\n return np.array([bond.is_aromatic for bond in self])",
"def isAlgebraic(self):\n return _libsbml.Rule_isAlgebraic(self)",
"def get_marccode(config,fieldname):\n\tif fieldname in config[\"fielddesc\"].keys(): \n\t\tif config[\"fielddesc\"][fieldname][1] == \"\": return False \n\t\telse: return config[\"fielddesc\"][fieldname][1]\n\telse: return False",
"def bool_attr(attr):\n if attr.lower() == \"true\":\n val = True\n elif attr.lower() == \"false\":\n val = False\n else:\n raise EzXMLError(\"Must be \"\\\n \"'true' or 'false'. Not %s\" % (attr))\n return val",
"def is_biological(self) -> bool:\n return ATTRIBUTE.Biological.value in self.type_data.attributes",
"def is_biological(self) -> bool:\n return ATTRIBUTE.Biological.value in self.type_data.attributes",
"def attestation_authority(self) -> pulumi.Output['outputs.AttestationAuthorityResponse']:\n return pulumi.get(self, \"attestation_authority\")",
"def test_is_canonical_by_transcript(self):\n assert self.icd.is_canonical_by_transcript(\"ENST00000373656\") is True\n assert self.icd.is_canonical_by_transcript(\"ENST00000373654\") is False\n assert self.icd.is_canonical_by_transcript(\"ENST00000337451\") is True\n assert self.icd.is_canonical_by_transcript(\"ENST00000398013\") is False",
"def is_nucleic_acid(self):\n return True",
"def is_nucleic_acid(self):\n return False",
"def case_sensitive_names(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"case_sensitive_names\")",
"def case_sensitive_names(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"case_sensitive_names\")",
"def case_sensitive_names(self) -> pulumi.Output[bool]:\n return pulumi.get(self, \"case_sensitive_names\")",
"def _ca_atom(self, residue_id):\n\n residue = self._residue_from_residue_id(residue_id)\n try:\n return residue[\"CA\"]\n except (KeyError, TypeError):\n return None"
] | [
"0.6718088",
"0.5188267",
"0.48823407",
"0.4824742",
"0.48030117",
"0.4789381",
"0.46382806",
"0.44682306",
"0.4430279",
"0.44179815",
"0.43863338",
"0.4384197",
"0.4380008",
"0.4363615",
"0.4360782",
"0.43460184",
"0.43358245",
"0.43231368",
"0.43094635",
"0.43013746",
"0.4297009",
"0.4297009",
"0.4285716",
"0.42838082",
"0.42766032",
"0.42736253",
"0.4268898",
"0.4268898",
"0.42671147",
"0.42373428"
] | 0.6757098 | 0 |
Setter method for authoritative, mapped from YANG variable /input/mapping_record/authoritative (boolean) | def _set_authoritative(self, v, load=False):
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="authoritative", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""authoritative must be of a type compatible with base=YANGBool, is_leaf=True, yang_name="authoritative", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__authoritative = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_authoritative(self):\n return self.__authoritative",
"def is_authorised_representative(self):\n if not hasattr(self, '_is_authorised_representative'):\n self._is_authorised_representative = hasattr(self, 'authorised_representative')\n\n return self._is_authorised_representative",
"def is_canonical(self):\n return False",
"def set_authoritative(subdom, ns_dn):\n for super_domain in subdom:\n if super_domain in nscache:\n logger.log(DEBUG2, \"setting {0} as auth for parent domain {1} of {2}\".format(ns_dn, super_domain, subdom))\n nscache[super_domain][ns_dn]._authoritative = True \n now = int(time())\n ttl = nscache[super_domain][ns_dn]._expiration - now\n #dn, ttl, nsdn\n #build a resource record representive this authority\n rr_ns = RR_NS(DomainName(super_domain), ttl, ns_dn)\n rr_ns.pack()\n return rr_ns",
"def set_authorisation(self, keys: Dict[str, str]) -> bool:\n raise NotImplementedError",
"def is_quantitative(self):\n return self._type == 'quantitative'",
"def is_achromatic(self) -> bool:\n\n value = self._space.is_achromatic(self.coords(nans=False))\n if value is None:\n xyz = self.convert('xyz-d65')\n return bool(xyz._space.is_achromatic(xyz[:-1]))\n return value",
"def a_flag(self):\n if self.datalogflag:\n self.datalog = DEFAULT_DATALOG_AQ",
"def get_explicit(self) -> bool:\n return self.explicit",
"def is_explicit(a_rel):\n return bool(a_rel[CONNECTIVE][TOK_LIST])",
"def is_amino_acid(self):\n return True",
"def is_canonical(self):\n if self.canonical_acceptor == self.acceptor:\n return True\n else:\n return False",
"def enable_custom_ca_trust(self) -> Optional[pulumi.Input[bool]]:\n return pulumi.get(self, \"enable_custom_ca_trust\")",
"def is_amino_acid(self):\n return False",
"def set_explicit(self, explicit: bool) -> None:\n self.explicit = explicit",
"def setAccidental(self, accidental):\n acc = _int(accidental)\n if acc != None:\n self.accidental = acc",
"def bool_attr(attr):\n if attr.lower() == \"true\":\n val = True\n elif attr.lower() == \"false\":\n val = False\n else:\n raise EzXMLError(\"Must be \"\\\n \"'true' or 'false'. Not %s\" % (attr))\n return val",
"def approve_public_credit_name(self):\n self.public_credit_name_approved = True\n self.save()",
"def is_nucleic_acid(self):\n return False",
"def test_is_canonical_by_transcript(self):\n assert self.icd.is_canonical_by_transcript(\"ENST00000373656\") is True\n assert self.icd.is_canonical_by_transcript(\"ENST00000373654\") is False\n assert self.icd.is_canonical_by_transcript(\"ENST00000337451\") is True\n assert self.icd.is_canonical_by_transcript(\"ENST00000398013\") is False",
"def extend_author_field(author_field, cds_id):\n cds_authority_id = \"AUTHOR|(CDS){0}\".format(cds_id)\n if cds_authority_id not in field_get_subfield_values(author_field, '0'):\n field_add_subfield(author_field, \"0\", cds_authority_id)\n field_add_subfield(author_field, \"9\", \"#BEARD#\")\n return True\n\n return False",
"def is_nucleic_acid(self):\n return True",
"def _bool_encode(self, d):\n for k, v in d.items():\n if isinstance(v, bool):\n d[k] = str(v).lower()\n \n return d",
"def set_ascertained(self):\n\n # Do the correction if the tree is calibrated, as ascertainment\n # correction influences timing estimates\n if self.ascertained == None:\n self.ascertained = not self.global_config.arbitrary_tree",
"def isAlgebraic(self):\n return _libsbml.Rule_isAlgebraic(self)",
"def _analyze_author(self, response, frontpage_author):\n author_location = response.get_url().get_domain_path().url_join(\n frontpage_author.group(1))\n\n # Check for anomalies in the location of author.exe\n if frontpage_author.group(1) != '_vti_bin/_vti_aut/author.exe':\n name = 'Customized frontpage configuration'\n\n desc = 'The FPAuthorScriptUrl is at: \"%s\" instead of the default'\\\n ' location: \"/_vti_bin/_vti_adm/author.exe\". This is very'\\\n ' uncommon.'\n desc = desc % author_location\n else:\n name = 'FrontPage FPAuthorScriptUrl'\n\n desc = 'The FPAuthorScriptUrl is at: \"%s\".'\n desc = desc % author_location\n\n i = Info(name, desc, response.id, self.get_name())\n i.set_url(author_location)\n i['FPAuthorScriptUrl'] = author_location\n \n kb.kb.append(self, 'frontpage_version', i)\n om.out.information(i.get_desc())",
"def persist_canonical(self, canonical: pd.DataFrame, **kwargs) -> bool:\n if not isinstance(self.connector_contract, ConnectorContract):\n return False\n return self.backup_canonical(canonical=canonical, uri=self.connector_contract.uri, **kwargs)",
"def setTrue(self):\n self.cond = CT.TRUE\n self.left = self.right = None\n self.z3 = BoolSort().cast(True)\n self.cleaned = self.Z3Simplified = self.customSimplified = self.checked = True\n self.customSimplifiedValue = CE.TRUE",
"def is_authorized_contact(self, dialersetting, str_contact):\n return common_contact_authorization(dialersetting, str_contact)",
"def primary_flag(self, primary_flag):\n\n self._primary_flag = primary_flag"
] | [
"0.6427643",
"0.49042958",
"0.48028368",
"0.46999156",
"0.4692326",
"0.46825922",
"0.45674053",
"0.44718954",
"0.44571823",
"0.44474322",
"0.4403648",
"0.43995556",
"0.4394256",
"0.4390693",
"0.43134287",
"0.43096423",
"0.43039706",
"0.4281209",
"0.42597413",
"0.42550257",
"0.4252473",
"0.4227118",
"0.42217705",
"0.4211324",
"0.4205975",
"0.42021915",
"0.4201268",
"0.4177822",
"0.41658947",
"0.41435516"
] | 0.75602883 | 0 |
Getter method for LocatorRecord, mapped from YANG variable /input/LocatorRecord (list) | def _get_LocatorRecord(self):
return self.__LocatorRecord | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_LocatorRecord(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"LocatorRecord must be of a type compatible with base=YANGListType(\"locator_id\",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name=\"LocatorRecord\", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name=\"LocatorRecord\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__LocatorRecord = t\n if hasattr(self, '_set'):\n self._set()",
"def record_locator(self):\n return self._record_locator",
"def record_locator(self, record_locator):\n if record_locator is None:\n raise ValueError(\"Invalid value for `record_locator`, must not be `None`\")\n\n self._record_locator = record_locator",
"def get_location_list(self) -> DBRecList:\n raise NotImplementedError('not implemented')",
"async def get_record_info(self, zone_id: str) -> list[CloudflareDNSRecord]:\n record_information: list[CloudflareDNSRecord] = []\n if self.records is None:\n self.records = []\n data = await self.get_zone_records(zone_id)\n\n if data is None:\n raise CloudflareException(f\"No records found for {zone_id}\")\n\n self.records = data\n\n if not self.records:\n return record_information\n\n for record in self.records:\n if self.zone not in record:\n record = f\"{record}.{self.zone}\"\n\n recorddata: list[dict[str, Any]] = await self.api.get(\n self._endpoint(\n path=f\"{zone_id}/dns_records\",\n query={\"name\": record},\n )\n )\n\n first_record = recorddata[0]\n record_information.append(\n CloudflareDNSRecord(\n content=first_record[\"content\"],\n id=first_record[\"id\"],\n name=first_record[\"name\"],\n proxied=first_record[\"proxied\"],\n type=first_record[\"type\"],\n )\n )\n return record_information",
"def list_records(self, zone):\n return self._zones[zone.id][\"records\"].values()",
"def getElementList(self,locator,locatorType='id'):\n\n\n element = None\n try:\n locatorType = locatorType.lower()\n byType = self.getByType(locatorType)\n element = self.driver.find_elements(byType, locator)\n self.logger.info('Element list Found with Locator ' + locator + ' and locatorType ' + locatorType)\n\n except:\n self.logger.info('Element list not found with Locator ' + locator + ' and locatorType ' + locatorType)\n return element",
"def record_finder(lines):\n for line in lines:\n if not line.strip():\n continue\n if line.startswith(\"@\"): #separate each block of reads by @\n try:\n yield curr\n except:\n pass\n curr = []\n curr.append(line.strip())\n else:\n curr.append(line.strip())\n if curr:\n yield curr #Sandra et al. 2019",
"def list_records(self, zone):\n data = self._paginated_request(\"/v2/domains/%s/records\" % (zone.id), \"domain_records\")\n # TODO: Not use list comprehension to add zone to record for proper data map\n # functionality? This passes a reference to zone for each data currently\n # to _to_record which returns a Record. map() does not take keywords\n return list(map(self._to_record, data, [zone for z in data]))",
"def record_sets(self) -> pulumi.Input[Sequence[pulumi.Input['ElastigroupIntegrationRoute53DomainRecordSetArgs']]]:\n return pulumi.get(self, \"record_sets\")",
"def get_recordIds(self):\n record_ids = []\n for item in self.order_items:\n record_ids.append(item.get_recordId())\n \n return record_ids",
"def _get_record(self, pos, alt1, alt2, ref, ref_count):\n alt1_tuple, alt1_count, alt1_freq = alt1\n alt1_seq, alt1_type = alt1_tuple\n if alt2 == '.':\n pos_end = pos + len(alt1_seq) - 1\n return [pos, pos_end, ref, alt1_seq, '.', alt1_type, 0, ref_count, alt1_count, 0, alt1_freq, 0]\n alt2_tuple, alt2_count, alt2_freq = alt2\n alt2_seq, alt2_type = alt2_tuple\n pos_end = pos + max(len(alt1_seq), len(alt2_seq)) - 1\n\n return [pos, pos_end, ref, alt1_seq, alt2_seq, alt1_type, alt2_type, ref_count, alt1_count, alt2_count,\n alt1_freq, alt2_freq]",
"def spot_record(\n self,\n did: Optional[int] = None,\n daterange: Optional[Tuple[dt, dt]] = None) -> RecordThunkIter:\n if not self.device_list:\n return iter([])\n sr = self._SpotRecord(self)\n dr: Tuple[dt, dt]\n dr = (dt.now() - timedelta(days=1), dt.now()) \\\n if not daterange else daterange\n\n if did is None:\n generator = sr.all()\n else:\n generator = sr.one(did, dr)\n\n return generator",
"def _set_locator_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"locator-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"locator_id must be of a type compatible with base=unicode, is_leaf=True, yang_name=\"locator-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True\"\"\")\n self.__locator_id = t\n if hasattr(self, '_set'):\n self._set()",
"def get_zonerecords(self):\n\n response = self.call(method='getZoneRecords', args=[self.domainname, self.subdomain])\n records = []\n for r in response:\n record = self.zonerecord(\n domain=self.domainname,\n subdomain=self.subdomain,\n record_id=r['record_id'],\n type=r['type'],\n ttl=r['ttl'],\n priority=r['priority'],\n rdata=r['rdata']\n )\n records.append(record)\n return records",
"def linelist(self):\n line_list = Marker()\n line_list.header = self._header\n line_list.type = Marker.LINE_LIST\n line_list.action = Marker.ADD\n line_list.scale.x = 0.005\n line_list.color = self.YELLOW\n line_list.pose = deepcopy(self.POSE)\n\n line_list.points.extend((self._p1, self._p2))\n line_list.points.extend((self._p2, self._p3))\n line_list.points.extend((self._p3, self._p4))\n line_list.points.extend((self._p4, self._p1))\n line_list.points.extend((self._p5, self._p6))\n line_list.points.extend((self._p6, self._p7))\n line_list.points.extend((self._p7, self._p8))\n line_list.points.extend((self._p8, self._p5))\n line_list.points.extend((self._p1, self._p5))\n line_list.points.extend((self._p2, self._p6))\n line_list.points.extend((self._p3, self._p7))\n line_list.points.extend((self._p4, self._p8))\n\n return line_list",
"def getLocations(self, fileBlockList, **kwd):\n\n # Keywords (flags)\n errorTolerant = False\n if(kwd.has_key(\"errorTolerant\")): errorTolerant = kwd.get(\"errorTolerant\")\n\n result = []\n \n # Make sure the argument is a list\n if (isinstance(fileBlockList, list)):\n theList = fileBlockList\n else:\n theList = [fileBlockList]\n\n # Query the DLI\n for fB in theList:\n # Check what was passed (DlsFileBlock or string)\n if(isinstance(fB, DlsFileBlock)):\n lfn = fB.name\n else:\n lfn = fB\n lfn = self._checkDlsHome(lfn)\n userlfn = self._removeRootPath(lfn)\n entry = DlsEntry(DlsFileBlock(userlfn))\n\n # Get the list of locations\n locList = []\n if(self.verb >= DLS_VERB_HIGH):\n print \"--DliClient.listLocations(%s)\" % lfn\n try: \n for host in self.iface.listLocations(lfn, fileType = \"lfn\"):\n locList.append(DlsLocation(host))\n except dliClient.DliClientError, inst:\n msg = inst.msg\n msg = \"Error querying for %s: %s\" % (userlfn, inst.msg)\n if(isinstance(inst, dliClient.SoapError)):\n for i in [inst.actor, inst.detail]:\n if(i): msg += \". \" + str(i)\n if(errorTolerant):\n if(self.verb >= DLS_VERB_WARN):\n print \"Warning: \" + msg\n locList = None \n else:\n e = DlsDliClientError(msg)\n if(isinstance(inst, dliClient.SoapError)):\n if(inst.faultcode): \n if(inst.faultstring): e.code = inst.faultcode + \", \" + inst.faultstring\n else: e.code = inst.faultcode \n else:\n if(inst.faultstring): e.code = inst.faultstring\n raise e\n if(locList != None):\n entry.locations = locList\n result.append(entry)\n\n # Return\n return result",
"def get_data_on_polyline(self, varname, record, polyline_points,\n discretized_number=None):\n if self.get_mesh_dimension() != 2:\n raise TelemacException(\"Action possible only on 2d mesh\")\n\n if len(np.shape(np.array(polyline_points))) != 2:\n raise TelemacException('Warning problem with the list of '\\\n 'extraction points')\n\n if discretized_number is None:\n discretized_number = self.discretize_polyline(polyline_points)\n\n # dimension of the computation result\n dim = np.shape(np.array(polyline_points))[1]\n if dim == 2:\n polygone_discretized_points = linspace_poly(polyline_points,\\\n discretized_number)\n values_polylines = self.get_data_on_points(\\\n varname,\n record,\n polygone_discretized_points)\n abs_curv = curvilinear_abscissa(polygone_discretized_points)\n else:\n raise TelemacException('Warning the extraction on a polyline'\\\n ' is valid only in 2d')\n return polygone_discretized_points, abs_curv, values_polylines",
"async def get_zone_records(\n self,\n zone_id: str,\n *,\n record_type: str | None = None,\n ) -> list[str] | None:\n data: list[dict[str, str]] | None = await self.api.get(\n self._endpoint(\n path=f\"{zone_id}/dns_records\",\n query={\"per_page\": \"100\", \"type\": record_type},\n )\n )\n\n if data is None:\n return None\n\n return [record[\"name\"] for record in data]",
"def parse_record(self, record):\n data = defaultdict(list)\n\n for trait, parser in self.parsers:\n for field_name in self.search_fields:\n field = record.get(field_name)\n if not field:\n continue\n parsed = parser.parse(field, field_name)\n if parsed:\n data[trait] += parsed\n\n return data",
"def fromrecord(self, record):\n return _coordsys.coordsys_fromrecord(self, record)",
"def getRecordListZipped(self, region=None, chrom=None, start=None,\n end=None):\n if region is not None:\n c = matchChrom(region.chrom,self.chr_in_chrom)\n var_sites = self.reader.fetch(c, region.start, region.end)\n else:\n c = matchChrom(chrom,self.chr_in_chrom)\n var_sites = self.reader.fetch(c, start, end)\n lst = []\n for rec in var_sites:\n lst.append(rec)\n return lst",
"def zr_parser(filepath):\n\n with open(filepath, mode='r', encoding=\"Utf-8\") as input_file:\n\n record_list = list()\n record = dict()\n\n for line in input_file:\n # Avoid the first two lines of the file\n if line[1:3] != 'FN' and line[:2] != 'VR':\n # If the line means 'End of Record', then add the dict to the\n # list, and renew the dict\n if line[:2] == 'ER':\n record_list.append(record)\n record = dict()\n # Search for lines that start with a field. If so, create the\n # key on the current dictionary\n elif re.search('[A-Z]+', line[:2]) != None:\n cur = line[:2]\n record[zr_fields.get(cur)] = line[3:].strip()\n # If not, check if there are two spaces at the beginning. These\n # are associated with multi-value fields, like Authors. Plus,\n # the lines in between records are singled spaced, which means,\n # this also avoids them.\n elif line[:2] == \" \":\n record[zr_fields.get(cur)] += \", \" + line[3: ].strip()\n\n return record_list",
"def read_records(self, input_path, offset_range_tracker):\n start_offset = offset_range_tracker.start_position()\n with tf.io.gfile.GFile(input_path, 'r') as f:\n f.seek(start_offset)\n # Read lines in and proceed once a `+` is in the 3rd position.\n record = collections.deque([], 4)\n while True:\n line = f.readline()\n if not line:\n # End of file reached\n break\n record.append(line.strip())\n if len(record) == 4 and record[0].startswith('@') and record[2] == '+':\n # Once a '+' is in position 3 a full record exists in the record.\n read_name = record[0]\n sequence = record[1]\n qual = record[3]\n self.fastq_records_counter.inc()\n yield (read_name, sequence, qual)\n record.clear()",
"def location(self):\n return [self.lat, self.lon]",
"def get_record_device_index(self):\n\n rospy.loginfo(\"Attempting to find device named 'record'\")\n for i in range(self.p.get_device_count()):\n device = self.p.get_device_info_by_index(i)\n if device['name'] == 'record':\n rospy.loginfo(\"Found device 'record' at index %d\" % i)\n self.input_device_index = i\n return\n rospy.loginfo(\"Could not find device named 'record', falling back to default recording device\")",
"def gen_records(self, count=None):\n if not count:\n count = self.num_rec\n tt = time.localtime(time.time())\n addr = None\n for i in range(count):\n logdbg(\"reading record %d of %d\" % (i+1, count))\n addr, record = self.get_record(addr, tt.tm_year, tt.tm_mon)\n yield addr, record",
"def records(self):\n return self._records",
"def list_locations(self, _id):\n \n self.options['usr_locator_id'] = _id\n self.options['action'] = 'locator.location.list'\n return self.call(self.options)",
"def _get_localLocator(self):\n return self.__localLocator"
] | [
"0.7702962",
"0.6407192",
"0.5898516",
"0.55848175",
"0.48844913",
"0.4773935",
"0.46494174",
"0.45777026",
"0.45454535",
"0.45231923",
"0.45080623",
"0.44717428",
"0.44609767",
"0.44564545",
"0.44507366",
"0.44452652",
"0.4416199",
"0.44136074",
"0.43898392",
"0.4389652",
"0.43573198",
"0.435116",
"0.4351048",
"0.4349675",
"0.4346907",
"0.4330619",
"0.43254903",
"0.43134025",
"0.43094265",
"0.42948067"
] | 0.6818552 | 1 |
Setter method for LocatorRecord, mapped from YANG variable /input/LocatorRecord (list) | def _set_LocatorRecord(self, v, load=False):
try:
t = YANGDynClass(v,base=YANGListType("locator_id",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name="LocatorRecord", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name="LocatorRecord", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""LocatorRecord must be of a type compatible with base=YANGListType("locator_id",yc_LocatorRecord_pyangbind_example__input_LocatorRecord, yang_name="LocatorRecord", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper), is_container='list', yang_name="LocatorRecord", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__LocatorRecord = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def record_locator(self, record_locator):\n if record_locator is None:\n raise ValueError(\"Invalid value for `record_locator`, must not be `None`\")\n\n self._record_locator = record_locator",
"def _get_LocatorRecord(self):\n return self.__LocatorRecord",
"def _set_locator_id(self, v, load=False):\n parent = getattr(self, \"_parent\", None)\n if parent is not None and load is False:\n raise AttributeError(\"Cannot set keys directly when\" +\n \" within an instantiated list\")\n\n try:\n t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name=\"locator-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"locator_id must be of a type compatible with base=unicode, is_leaf=True, yang_name=\"locator-id\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True\"\"\")\n self.__locator_id = t\n if hasattr(self, '_set'):\n self._set()",
"def record_locator(self):\n return self._record_locator",
"def _set_rloc(self, v, load=False):\n try:\n t = YANGDynClass(v,base=yc_rloc_pyangbind_example__input_LocatorRecord_rloc, is_container='container', yang_name=\"rloc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"rloc must be of a type compatible with base=yc_rloc_pyangbind_example__input_LocatorRecord_rloc, is_container='container', yang_name=\"rloc\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__rloc = t\n if hasattr(self, '_set'):\n self._set()",
"def lm_locator(self, lm_locator):\n\n self._lm_locator = lm_locator",
"def get_location_list(self) -> DBRecList:\n raise NotImplementedError('not implemented')",
"def fromrecord(self, record):\n return _coordsys.coordsys_fromrecord(self, record)",
"def emit(self, record):\n if self.list is not None:\n try:\n self.r.lpush(self.list, json.dumps(self.format(record)))\n except Exception:\n self.handleError(record)",
"def setRecord(self,record):\n idLower = record.getId().lower()\n type = record.name\n typeIds = self.indexed[type]\n if idLower in typeIds:\n oldRecord = typeIds[idLower]\n index = self.records.index(oldRecord)\n self.records[index] = record\n else:\n self.records.append(record)\n typeIds[idLower] = record",
"def _set_explicit_locator_path(self, v, load=False):\n try:\n t = YANGDynClass(v,base=yc_explicit_locator_path_pyangbind_example__input_LocatorRecord_rloc_explicit_locator_path, is_container='container', yang_name=\"explicit-locator-path\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"explicit_locator_path must be of a type compatible with base=yc_explicit_locator_path_pyangbind_example__input_LocatorRecord_rloc_explicit_locator_path, is_container='container', yang_name=\"explicit-locator-path\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__explicit_locator_path = t\n if hasattr(self, '_set'):\n self._set()",
"def records(self, records):\n\n self._records = records",
"def records(self, records):\n\n self._records = records",
"def _set_localLocator(self, v, load=False):\n try:\n t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name=\"localLocator\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"localLocator must be of a type compatible with base=YANGBool, is_leaf=True, yang_name=\"localLocator\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__localLocator = t\n if hasattr(self, '_set'):\n self._set()",
"def emit_marker(record):\n logging.debug(\"Formatting individual record {}\".format(record))\n global individual_markers \n marker = record.copy()\n # logging.debug(\"Emitting individual marker: {}\".format(marker))\n individual_markers.append(marker)",
"def set_addresses(cls, records, name, value=None):\n Party = Pool().get('party.party')\n\n for record in records:\n Party.write([record.party], {'addresses': value})",
"def parse_record(self, record):\n raise NotImplementedError()",
"def setZoneRecords(self, records):\n self._dump_data['zone'] = records",
"def _set_mapping_record(self, v, load=False):\n try:\n t = YANGDynClass(v,base=yc_mapping_record_pyangbind_example__input_mapping_record, is_container='container', yang_name=\"mapping-record\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)\n except (TypeError, ValueError):\n raise ValueError(\"\"\"mapping_record must be of a type compatible with base=yc_mapping_record_pyangbind_example__input_mapping_record, is_container='container', yang_name=\"mapping-record\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True\"\"\")\n self.__mapping_record = t\n if hasattr(self, '_set'):\n self._set()",
"def convert_lat_long_list(result: ResponseObject):\n # Format inner record if present, e.g. for search results\n if 'record' in result:\n result['record'] = convert_lat_long_list(result['record'])\n return result\n\n if ',' in (result.get('location') or ''):\n result['location'] = [try_float(coord) for coord in result['location'].split(',')]\n return result",
"def __init__(self,\n locations: List['LocationOutput']) -> None:\n self.locations = locations",
"def _appendRecord(self, rec, reclistname):\n if reclistname not in self.__dict__: # if not already an attrib\n self.__dict__[reclistname] = [] # init it\n self.__dict__[reclistname].append(rec) # append this record to its list",
"def record_marker(self, marker_name, details=None):\n o = {}\n o['decisionType'] = 'RecordMarker'\n attrs = o['recordMarkerDecisionAttributes'] = {}\n attrs['markerName'] = marker_name\n if details is not None:\n attrs['details'] = details\n self._data.append(o)",
"def put_record(self, record):\r\n row = [record.get(field) for field in self.fields.names()]\r\n\r\n self.put(row)",
"def spot_record(\n self,\n did: Optional[int] = None,\n daterange: Optional[Tuple[dt, dt]] = None) -> RecordThunkIter:\n if not self.device_list:\n return iter([])\n sr = self._SpotRecord(self)\n dr: Tuple[dt, dt]\n dr = (dt.now() - timedelta(days=1), dt.now()) \\\n if not daterange else daterange\n\n if did is None:\n generator = sr.all()\n else:\n generator = sr.one(did, dr)\n\n return generator",
"def polyline(self, pointlist, cls=None, style=None, attrs=None):\n payload = self._meta.make_payload(cls, style, attrs)\n pts_str = ' '.join('%s,%s' % (x, y) for x, y in pointlist)\n self.elements.append(\"\"\"<polyline points=\"%s\" %s/>\"\"\" % (pts_str, payload))\n return self",
"def polyline(self, pointlist, cls=None, style=None, attrs=None):\n payload = self._meta.make_payload(cls, style, attrs)\n pts_str = ' '.join('%s,%s' % (x, y) for x, y in pointlist)\n self.elements.append(\"\"\"<polyline points=\"%s\" %s/>\"\"\" % (pts_str, payload))\n return self",
"def __init__(self, record_locator=None, header=None, messages=None, travelers=None, reservation=None):\n\n self._record_locator = None\n self._header = None\n self._messages = None\n self._travelers = None\n self._reservation = None\n\n self.record_locator = record_locator\n if header is not None:\n self.header = header\n if messages is not None:\n self.messages = messages\n if travelers is not None:\n self.travelers = travelers\n if reservation is not None:\n self.reservation = reservation",
"def record_location(data):\n product = OrderDetail.objects.get(ac_od_id=data['ac_od_id'])\n product.location = data['location']\n return product.save()",
"def __init__(self, trip_update, stops, position_in_list):\n self.trip_update = trip_update\n self.stops = stops\n self.routeID = str(self.trip_update.trip.route_id)\n # A minor quirk in the MTA's data is fixed here. S trains were listed as GS for some reason\n if self.routeID == \"GS\":\n self.routeID = \"S\"\n self.index = position_in_list"
] | [
"0.685963",
"0.63189214",
"0.567086",
"0.56593966",
"0.5313092",
"0.5301535",
"0.4916309",
"0.47038326",
"0.47023335",
"0.47008383",
"0.46737063",
"0.46672797",
"0.46672797",
"0.45666236",
"0.45338166",
"0.45061657",
"0.45037052",
"0.4468762",
"0.44054282",
"0.4387563",
"0.43729714",
"0.43657523",
"0.4310758",
"0.4310055",
"0.4290492",
"0.42866755",
"0.42866755",
"0.42657533",
"0.42217937",
"0.42029262"
] | 0.86509675 | 0 |
Setter method for mapping_record, mapped from YANG variable /input/mapping_record (container) | def _set_mapping_record(self, v, load=False):
try:
t = YANGDynClass(v,base=yc_mapping_record_pyangbind_example__input_mapping_record, is_container='container', yang_name="mapping-record", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""mapping_record must be of a type compatible with base=yc_mapping_record_pyangbind_example__input_mapping_record, is_container='container', yang_name="mapping-record", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__mapping_record = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _get_map_record(self):\n return self.mapper.map_record(self.binding_record)",
"def _get_mapping_record(self):\n return self.__mapping_record",
"def mapping(self, mapping):\n self.set_mapping(mapping)",
"def set_mapping(self, mapping):\n mapping = pylastica.doc_type.Mapping.create(mapping)\n mapping.doc_type = self\n return mapping.send()",
"def setRecord(self,record):\n idLower = record.getId().lower()\n type = record.name\n typeIds = self.indexed[type]\n if idLower in typeIds:\n oldRecord = typeIds[idLower]\n index = self.records.index(oldRecord)\n self.records[index] = record\n else:\n self.records.append(record)\n typeIds[idLower] = record",
"def mapLogRecord(self, record):\n newrec = record.__dict__\n for p in self.params:\n newrec[p] = self.params[p]\n maxParamLength = 4000\n # truncate and clean the message from non-UTF-8 characters\n try:\n newrec['msg'] = newrec['msg'][:maxParamLength].decode('utf-8', 'ignore').encode('utf-8')\n except Exception:\n pass\n try:\n newrec['message'] = newrec['message'][:maxParamLength].decode('utf-8', 'ignore').encode('utf-8')\n except Exception:\n pass\n return newrec",
"def process_record(self, record):\n raise NotImplementedError('Process record needs to be customized')",
"def put_record(self, record):\r\n row = [record.get(field) for field in self.fields.names()]\r\n\r\n self.put(row)",
"def add_record_container(self, container: BaseRecordContainer) -> None:\n self.record_containers.append(container)",
"def record_locator(self, record_locator):\n if record_locator is None:\n raise ValueError(\"Invalid value for `record_locator`, must not be `None`\")\n\n self._record_locator = record_locator",
"def map_record(row: DLCSRecord, solr_client: Solr, config: typing.Dict) -> UrsusRecord: # pylint: disable=too-many-statements\n record: UrsusRecord = {\n field_name: map_field_value(row, field_name, config=config)\n for field_name in mapper.FIELD_MAPPING\n }\n\n # THUMBNAIL\n record[\"thumbnail_url_ss\"] = (\n record.get(\"thumbnail_url_ss\")\n or thumbnail_from_child(record, config=config)\n or thumbnail_from_manifest(record)\n )\n\n # COLLECTION NAME\n if \"Parent ARK\" in row and row[\"Parent ARK\"] in config[\"collection_names\"]:\n dlcs_collection_name = config[\"collection_names\"][row[\"Parent ARK\"]]\n record[\"dlcs_collection_name_tesim\"] = [dlcs_collection_name]\n\n # FIELDS\n record[\"uniform_title_sim\"] = record.get(\"uniform_title_tesim\")\n record[\"architect_sim\"] = record.get(\"architect_tesim\")\n record[\"author_sim\"] = record.get(\"author_tesim\")\n record[\"illuminator_sim\"] = record.get(\"illuminator_tesim\")\n record[\"scribe_sim\"] = record.get(\"scribe_tesim\")\n record[\"rubricator_sim\"] = record.get(\"rubricator_tesim\")\n record[\"commentator_sim\"] = record.get(\"commentator_tesim\")\n record[\"translator_sim\"] = record.get(\"translator_tesim\")\n record[\"lyricist_sim\"] = record.get(\"lyricist_tesim\")\n record[\"composer_sim\"] = record.get(\"composer_tesim\")\n record[\"illustrator_sim\"] = record.get(\"illustrator_tesim\")\n record[\"editor_sim\"] = record.get(\"editor_tesim\")\n record[\"calligrapher_sim\"] = record.get(\"calligrapher_tesim\")\n record[\"engraver_sim\"] = record.get(\"engraver_tesim\")\n record[\"printmaker_sim\"] = record.get(\"printmaker_tesim\")\n record[\"human_readable_language_sim\"] = record.get(\"human_readable_language_tesim\")\n record[\"names_sim\"] = name_fields(record)\n record[\"keywords_sim\"] = keywords_fields(record)\n record[\"collection_sim\"] = record.get(\"collection_ssi\")\n # explicit\n record[\"features_sim\"] = record.get(\"features_tesim\")\n # incipit\n # inscription\n record[\"script_sim\"] = record.get(\"script_tesim\")\n record[\"writing_system_sim\"] = record.get(\"writing_system_tesim\")\n record[\"year_isim\"] = year_parser.integer_years(record.get(\"normalized_date_tesim\"))\n record[\"date_dtsim\"] = solr_transformed_dates(solr_client,\n (date_parser.get_dates(record.get(\"normalized_date_tesim\"))))\n record[\"place_of_origin_sim\"] = record.get(\"place_of_origin_tesim\")\n record[\"associated_name_sim\"] = record.get(\"associated_name_tesim\")\n record[\"form_sim\"] = record.get(\"form_tesim\")\n record[\"support_sim\"] = record.get(\"support_tesim\")\n record[\"genre_sim\"] = record.get(\"genre_tesim\")\n record[\"subject_sim\"] = record.get(\"subject_tesim\")\n record[\"location_sim\"] = record.get(\"location_tesim\")\n record[\"named_subject_sim\"] = record.get(\"named_subject_tesim\")\n record[\"human_readable_resource_type_sim\"] = record.get(\"resource_type_tesim\")\n record[\"member_of_collections_ssim\"] = record.get(\"dlcs_collection_name_tesim\")\n\n # SINAI INDEX\n record[\"header_index_tesim\"] = header_fields(record)\n record[\"name_fields_index_tesim\"] = name_fields_index(record)\n\n # SORT FIELDS\n titles = record.get(\"title_tesim\")\n if isinstance(titles, typing.Sequence) and len(titles) >= 1:\n record[\"sort_title_ssort\"] = titles[0]\n\n # used a solr copyfield for shelfmark sorting\n # shelfmarks = record.get(\"shelfmark_ssi\")\n # print(shelfmarks)\n # if isinstance(shelfmarks, typing.Sequence) and len(shelfmarks) >= 1:\n # print(shelfmarks[0])\n # record[\"shelfmark_aplha_numeric_ssort\"] = shelfmarks[0]\n\n# -----------------------------------------------------------------------\n years = record.get(\"year_isim\")\n if isinstance(years, typing.Sequence) and len(years) >= 1:\n record[\"sort_year_isi\"] = min(years)\n\n dates = record.get(\"date_dtsim\")\n if isinstance(dates, typing.Sequence) and len(dates) >= 1:\n record[\"date_dtsort\"] = dates[0]\n return record",
"def parse_record(self, record):\n raise NotImplementedError()",
"def parse_record(self, in_rec):\n \n geo_util = geo.Geo()\n \n self.metadata = {}\n for k, v in in_rec.items():\n if k == 'metadata2': continue\n elif k == 'geometry':\n self.metadata['geometry'] = v\n coords = v['coordinates']\n self.metadata['wkt'] = geo_util.convert_imageGeom(\\\n coords, 'wkt')\n elif k == 'metadata':\n for m in v:\n key = to_camelCase(m[0])\n self.metadata[key] = m[1]\n else:\n self.metadata[k] = v",
"def update(self, mapping):\n if not ismapping(mapping):\n raise TypeError(\"mapping type required\")\n field_names = getpyattr(type(self), 'field_names')\n for key, value in mapping.items():\n if key in field_names:\n setattr(self, key, value)",
"def handle_record(self, record):\n raise NotImplementedError",
"def addMapping(mapping):\n defaultMapping_.addMapping(mapping)",
"def setMappedInfo(self, mapped_info):\n \n self.mapped_info = mapped_info",
"def connection_record(self, record: ConnectionRecord):\n self._connection_record = record",
"def memcacheSetRecord(self, key, record):\n\n self.memcacheSet(key, self.pickleRecord(record))",
"def add_record(self, record: Dict, src_name: SourceName) -> None:\n concept_id = record[\"concept_id\"]\n record[\"src_name\"] = src_name.value\n label_and_type = f\"{concept_id.lower()}##identity\"\n record[\"label_and_type\"] = label_and_type\n record[\"item_type\"] = \"identity\"\n try:\n self.batch.put_item(Item=record)\n except ClientError as e:\n logger.error(\n \"boto3 client error on add_record for \"\n f\"{concept_id}: {e.response['Error']['Message']}\"\n )\n for attr_type, item_type in ITEM_TYPES.items():\n if attr_type in record:\n value = record.get(attr_type)\n if not value:\n continue\n if isinstance(value, str):\n items = [value.lower()]\n else:\n items = {item.lower() for item in value}\n for item in items:\n self._add_ref_record(\n item, record[\"concept_id\"], item_type, src_name\n )",
"def patch_record(self, bucket_id, collection_id, record_id, **kwargs):\n kwargs['_return_http_data_only'] = True\n if kwargs.get('callback'):\n return self.patch_record_with_http_info(bucket_id, collection_id, record_id, **kwargs)\n else:\n (data) = self.patch_record_with_http_info(bucket_id, collection_id, record_id, **kwargs)\n return data",
"def _apply_mapping(self, document, mapping):\n if not mapping:\n return document\n new_document = {v: document[k] for k, v in mapping.items() if k in document}\n # Keep track of missing keys\n self.missing_keys.update([k for k in mapping if k not in document])\n\n # Document errors for missing documents\n if not new_document:\n self.failed += 1\n self.failed_ids.append(\n document.get(\"id\", document.get(\"ID\", document.get(\"_id\", None)))\n )\n return new_document",
"def to_json(self, record: Mapping[str, Any]) -> str:\n return self.json_lib.dumps(record, cls=ObjectEncoder)",
"def transform_record(self, pid, record, links_factory=None, **kwargs):\n context = kwargs.get(\"marshmallow_context\", {})\n context.setdefault(\"pid\", pid)\n context.setdefault(\"record\", record)\n return self.dump(\n self.preprocess_record(pid, record, links_factory=links_factory, **kwargs),\n context,\n )",
"def _set_record_to_backend(self, key: str, record: CacheRecord):\n raise NotImplementedError",
"def _set_document_attribute(self, doc, row, mapping):\n # Unpack mapping info.\n try:\n attr, col_idx, convertor = mapping\n except ValueError:\n try:\n attr, col_idx = mapping\n except ValueError:\n print mapping\n raise ValueError()\n convertor = None\n\n # Convert cell value.\n if col_idx.find(\"-\") == -1:\n attr_value = self._get_cell_value(row, convert_col_idx(col_idx), convertor)\n else:\n col_idx_from, col_idx_to = [convert_col_idx(i) for i in col_idx.split(\"-\")]\n attr_value = [i for i in (self._get_cell_value(row, i, convertor)\n for i in range(col_idx_from, col_idx_to + 1)) if i]\n\n # Set aattribute value.\n setattr(doc, attr, attr_value)",
"def add_record(self, record):\n # Store the domain as the key, and the rest as value.\n new_key = \"{0},{1}\".format(record.get_domain(), record.get_record_type())\n self._records[new_key] = record",
"def change_biopython_record_sequence(record, new_seq):\n new_record = deepcopy(record)\n\n if has_dna_alphabet:\n seq = Seq(new_seq, alphabet=DNAAlphabet())\n else:\n seq = Seq(new_seq)\n\n new_record.seq = seq\n return new_record",
"def recordToDict(self, record):\n fields = {}\n if record is not None:\n for field, value in record.fields.iteritems():\n\n # FIXME: need to sort out dealing with enormous groups; we\n # can ignore these when sending AMP responses because the\n # client will always fetch members via a members( ) AMP\n # command.\n if field.name in (u\"memberDNs\", u\"memberUIDs\"):\n continue\n\n valueType = record.service.fieldName.valueType(field)\n if valueType in (unicode, bool):\n fields[field.name] = value\n elif valueType is uuid.UUID:\n fields[field.name] = str(value)\n elif issubclass(valueType, (Names, NamedConstant)):\n fields[field.name] = value.name if value else None\n return fields",
"def parse_record(self, in_rec):\n \n self.metadata = {}\n for k, v in in_rec.items():\n if k == 'parameters':\n for m, mv in v.items():\n self.metadata[m] = mv\n else:\n self.metadata[k] = v\n \n if self.image is not None:\n self.metadata['imageUrl'] = self.image.get_metadata(\\\n 'thisRecordUrl')\n self.metadata['imageMetadata'] = self.image.get_metadata(\\\n 'metadataUrl')\n self.metadata['imageStartDate'] = self.image.get_date()\n \n if 'dateRapiOrdered' not in self.metadata.keys():\n self.metadata['dateRapiOrdered'] = self.image.get_metadata(\\\n 'dateRapiOrdered')\n self.metadata['orderSubmitted'] = self.image.get_metadata(\\\n 'orderSubmitted')"
] | [
"0.5970671",
"0.5902689",
"0.5657304",
"0.55116606",
"0.5374943",
"0.5234628",
"0.50860834",
"0.50424844",
"0.50336903",
"0.50079304",
"0.50042784",
"0.4991072",
"0.49719772",
"0.4959843",
"0.49564373",
"0.4931594",
"0.49017704",
"0.48650196",
"0.4817201",
"0.481521",
"0.4772289",
"0.47634825",
"0.47065118",
"0.47016668",
"0.4685669",
"0.46796814",
"0.46680248",
"0.46585512",
"0.464386",
"0.4643007"
] | 0.8538443 | 0 |
Setter method for input, mapped from YANG variable /input (container) | def _set_input(self, v, load=False):
try:
t = YANGDynClass(v,base=yc_input_pyangbind_example__input, is_container='container', yang_name="input", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True)
except (TypeError, ValueError):
raise ValueError("""input must be of a type compatible with base=yc_input_pyangbind_example__input, is_container='container', yang_name="input", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True""")
self.__input = t
if hasattr(self, '_set'):
self._set() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _set_input(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_input_openconfig_qos__qos_interfaces_interface_input, is_container='container', yang_name=\"input\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"input must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_input_openconfig_qos__qos_interfaces_interface_input, is_container='container', yang_name=\"input\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__input = t\n if hasattr(self, '_set'):\n self._set()",
"def set_input(self, input):\n pass",
"def set_input(self, input):\n pass",
"def _set_input(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_input_openconfig_qos_interfaces__qos_interfaces_interface_input, is_container='container', yang_name=\"input\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"input must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_input_openconfig_qos_interfaces__qos_interfaces_interface_input, is_container='container', yang_name=\"input\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__input = t\n if hasattr(self, '_set'):\n self._set()",
"def _set_input(self, v, load=False):\n if hasattr(v, \"_utype\"):\n v = v._utype(v)\n try:\n t = YANGDynClass(v,base=yc_input_openconfig_qos_elements__qos_interfaces_interface_input, is_container='container', yang_name=\"input\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\n except (TypeError, ValueError):\n raise ValueError({\n 'error-string': \"\"\"input must be of a type compatible with container\"\"\",\n 'defined-type': \"container\",\n 'generated-type': \"\"\"YANGDynClass(base=yc_input_openconfig_qos_elements__qos_interfaces_interface_input, is_container='container', yang_name=\"input\", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/qos', defining_module='openconfig-qos', yang_type='container', is_config=True)\"\"\",\n })\n\n self.__input = t\n if hasattr(self, '_set'):\n self._set()",
"def input(self, input):\n\n self._input = input",
"def set_input(self, input):\r\n\r\n self.reset()\r\n self.input = input",
"def set_input(self, input):\n self.input = transfer_to_device(input, self.device)",
"def override_input(cls):\n globals()[\"input\"] = cls.input",
"def set_val(self, input):\n return",
"def add_input(self, var):\n raise NotImplementedError",
"def SetInput(self, , , p_float_6):\n ...",
"def assigninput(self, input):\n if type(input) == str: # string type\n self.name = input\n else: # array, list, tuple\n if len(input) == 2:\n print input[0], 'input0'\n try:\n n = len(input[0])\n except:\n n = 1\n if n == 1:\n\t\t print 'INITIALIZING SPARSE ARRAY TO ZEROS'\n self.ny, self.nx = input\n input = zeros(input, int)\n ## BELOW NOT QUITE RIGHT...\n ## ACTUALLY IT JUST TAKES LONG TO BUILD THE HUGE ARRAY\n # TAKES TIME TO UNRAVEL\n # LET'S JUST MAKE IT RIGHT THE FIRST TIME:\n #input = zeros(input[0] * input[1], 'int')\n #self.data = input\n self.data = ravel(array(input)) # ravel ADDED MUCH LATER",
"def SetInput(self, *args):\n return _itkSpatialObjectWriterPython.itkSpatialObjectWriter3_SetInput(self, *args)",
"def SetInput(self, *args):\n return _itkSpatialObjectWriterPython.itkSpatialObjectWriter2_SetInput(self, *args)",
"def set_input(self, input):\n self.real_A = input['A'].to(self.device)\n self.image_paths = input['A_paths']",
"def __set_inputs__(self):\n self.__set_in_out_var__(None, 0) # TODO: inspect None",
"def input(self):\r\n pass",
"def d_input(self):\n pass",
"def __init__(self, name, node, value=None):\n super(InputPlug, self).__init__(name, node, (OutputPlug, ))\n self.value = value\n self.is_dirty = True\n self.node.inputs[self.name] = self",
"def set_input(self, input):\n AtoB = self.opt.direction == 'AtoB'\n self.real_A = input['A' if AtoB else 'B'].to(self.device)\n self.inst = input['inst'].to(self.device)\n self.real_B = input['B' if AtoB else 'A'].to(self.device)\n self.image_paths = input['A_paths' if AtoB else 'B_paths']",
"def _update_input_type(self):\n pass",
"def input(self, inputters, field, number=None):\n raise NotImplementedError",
"def set_item(self, input=None, output=None):\n self.remove()\n if input is not None:\n self.inp = input\n if output is not None:\n self.out = output\n self.add()",
"def input(self):",
"def set_inputs(self, inputs):\n self.attributes[\"inputs\"] = inputs",
"def set_input(self, in_stream):\n self._in = self._wrap_stream(in_stream, 'in')\n return self._in",
"def set_input(self, idx, input_stream):\n \n raise NotImplementedError",
"def _setInputValue(self, name, value, typeString = ''):\n method = getattr(self.__class__, \"_setInputValue\" + getTypeString(value))\n return method(self, name, value, typeString)",
"def set_input_type(self, input_type):\n if input_type is not None: self._input_type.value = input_type\n return self"
] | [
"0.7814106",
"0.780715",
"0.780715",
"0.777229",
"0.7730762",
"0.7484346",
"0.71174186",
"0.68705064",
"0.677871",
"0.6680047",
"0.6550577",
"0.6451878",
"0.64363635",
"0.64027745",
"0.6382179",
"0.6357305",
"0.63393605",
"0.63311434",
"0.6296951",
"0.6284459",
"0.6236926",
"0.6232082",
"0.6219123",
"0.6179576",
"0.61745924",
"0.6155045",
"0.61306965",
"0.61026794",
"0.60800034",
"0.6044617"
] | 0.7847966 | 0 |
create a new object based on this genotype | def fromgenotype(self):
pass | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def new_object(self):\r\n\t\tpass",
"def new(self, obj):\n pass",
"def create_individual(self):\n pass",
"def new_object(cls):\n return cls.for_value([])",
"def __init__(self, *args):\n this = _libsbml.new_SpeciesType(*args)\n try: self.this.append(this)\n except: self.this = this",
"def __init__(self, type='lg_asteroid', dataset=obstacles, **kwargs):\n super().__init__(type=type, dataset=dataset, **kwargs)\n self.obj_type = \"asteroid\"",
"def make(self, **kwargs):\n raise NotImplementedError",
"def create(self):",
"def create(self, odometryType): # real signature unknown; restored from __doc__\n pass",
"def __init__(self, *args):\n this = _libsbml.new_Species(*args)\n try: self.this.append(this)\n except: self.this = this",
"def create_individual(self):\n self.genes = np.random.rand(self.chromosome_size)",
"def make(self):\n pass",
"def create(self):\n\n raise NotImplementedError",
"def createGeneticInstance(self, context):\n genetic_instance = GeneticInstance(ParametricLSystem(self.seed))\n fromBlenderToGeneticInstance(self,genetic_instance)\n return genetic_instance",
"def __init__(self, gen):\n self.gen = gen",
"def create_ion_object(self, object_params):\n new_obj = IonObject(object_params[\"type_\"])\n\n # Iterate over the parameters to add to object; have to do this instead\n # of passing a dict to get around restrictions in object creation on setting _id, _rev params\n for param in object_params:\n self.set_object_field(new_obj, param, object_params.get(param))\n\n new_obj._validate() # verify that all of the object fields were set with proper types\n return new_obj",
"def _new_instance(self):\n return self.__class__(self._vmodule)",
"def create_individual(self):\n self.genes = np.random.rand(self.chromosome_size)\n self.personal_best = self.genes.copy",
"def object_via_gen_from(self, fit: af.Fit, galaxies: List[Galaxy]) -> object:",
"def new(self):\n self._init()",
"def make(self, atype, **kwargs):\n obj = self.api.get_type(f\"VSO:{atype}\")\n return obj(**kwargs)",
"def create(self):\n pass",
"def create(self):\n pass",
"def create(self):\n pass",
"def create():",
"def create():",
"def create(self):\n\n pass",
"def create(self):\n ...",
"def _new_instance(self):\n return self.__class__(self._vmodule, self._tensor_rank)",
"def create(cls, **dictionary):\n new_inst = cls.__new__(cls)\n if cls.__name__ == \"Rectangle\":\n new_inst.__init__(42, 98)\n elif cls.__name__ == \"Square\":\n new_inst.__init__(42)\n new_inst.update(**dictionary)\n return new_inst"
] | [
"0.728881",
"0.6973893",
"0.6677975",
"0.64741004",
"0.6446734",
"0.6379876",
"0.6334149",
"0.62880665",
"0.6268098",
"0.6260632",
"0.624735",
"0.6235976",
"0.62314445",
"0.6190096",
"0.61697304",
"0.61627156",
"0.61618036",
"0.6153238",
"0.6152914",
"0.61267525",
"0.61266625",
"0.61189103",
"0.61189103",
"0.61189103",
"0.6058415",
"0.6058415",
"0.6058084",
"0.60569733",
"0.60454667",
"0.604086"
] | 0.71769667 | 1 |
Gets the ParaMeshBodies object from a component. | def getFromComponent(self, component):
return ParaMeshBodies() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def nativeObject(self):\n return ParaMeshBody()",
"def item(self, index):\n return ParaMeshBody()",
"def item(self, index):\n return ParaMeshBody()",
"def createForAssemblyContext(self, occurrence):\n return ParaMeshBody()",
"def getMesh(self):\n return self.mesh",
"def get_mesh(self):\n return self.mesh",
"def mesh(self):\n self._ensure_mesh()\n return self._mesh",
"def mesh(self):\n return self._mesh",
"def mesh(self):\n return self._mesh",
"def mesh(self):\n return self._mesh",
"def mesh(self):\n return self._mesh",
"def get_bmesh(obj):\n bm = bmesh.new()\n bm.from_mesh(obj.data)\n\n # Scene update necessary, as matrix_world is updated lazily\n bpy.context.scene.update()\n\n return bm",
"def body_contacts(self, physics):\n return self.collect_contacts(physics, self._body_geom_ids)",
"def add(self, fullFilename, units, baseOrFormFeature):\n return ParaMeshBodyList()",
"def getMeshes():\n nodes = pipernode.get('piperSkinnedMesh')\n return {mesh.getParent() for skin in nodes for mesh in skin.getChildren(ad=True, type='mesh') if mesh.getParent()}",
"def get_mesh_ids(self, body):\n with self.lock:\n return self.send_command('get_kinbody_link_mesh_ids ' + body.GetName())",
"def getMesh(self, tstep):\n return(self.fields.xs(tstep, level='time')[0].getMesh())",
"def getCurrentBMesh():\n # if there aren't active objects, return error\n if bpy.context.object is None:\n print(\"No object is selected!\")\n return None, None\n\n return bpy.context.object.data",
"def GetOutput(self, *args) -> \"itkMeshF3 *\":\n return _itkMeshSourcePython.itkMeshSourceMF3_GetOutput(self, *args)",
"def mesh_names(self):\n meshes = []\n for vname in self.nc.variables.keys():\n try:\n if self.nc.variables[vname].cf_role == 'mesh_topology':\n meshes.append(vname)\n except AttributeError:\n pass\n return meshes",
"def makePhysicsBody(self):\n space = self.environment.space\n geom = GeomBox(space, self.dim)\n geom.setPosition(self.centerPos)\n geom.setCategoryBits(2)\n geom.setCollideBits(1)\n self.geomList = [geom]",
"def meshy(self):\n if self._meshy is None:\n self._meshy = self.get_mesh_coord(2)\n\n return self._meshy",
"def getMaterialPhysics():\r\n physicsProperties = {}\r\n for material in bpy.data.materials:\r\n properties = utils.extract_cryblend_properties(material.name)\r\n if properties:\r\n physicsProperties[properties[\"Name\"]] = properties[\"Physics\"]\r\n return physicsProperties",
"def _final_mesh(self):\n assert (\n \"final_mesh\" in self.__dict__.keys()\n ), \"Final Mesh does not exist yet - please run multi-view optimization before getting\"\n return self.final_mesh",
"def convert_to_mesh_array(scene_or_mesh):\n if isinstance(scene_or_mesh, trimesh.Scene):\n if len(scene_or_mesh.geometry) == 0:\n mesh_array = [] # empty scene\n else:\n # we lose texture information here\n mesh_array = [g for g in scene_or_mesh.geometry.values()] \n else:\n assert(isinstance(scene_or_mesh, trimesh.Trimesh))\n mesh_array = [scene_or_mesh]\n return mesh_array",
"def get_component(self):\n component = []\n component = [self.component_type, self.component_value, self.spot]\n\n if component[2] != None:\n print component\n return component",
"def get_meshes(path='../../../models', cutoff=None):\r\n\r\n bodies = collections.deque()\r\n for file_name in os.listdir(path):\r\n try:\r\n mesh = trimesh.load(os.path.join(path, file_name))\r\n split = mesh.split()\r\n bodies.extend(split)\r\n if len(split) > 1:\r\n bodies.append(mesh)\r\n except BaseException:\r\n continue\r\n\r\n if cutoff is not None and len(bodies) > cutoff:\r\n return np.array(bodies)\r\n\r\n for _i in range(100):\r\n cylinder = trimesh.creation.cylinder(\r\n radius=np.random.random() * 100,\r\n height=np.random.random() * 1000,\r\n sections=int(np.clip(np.random.random() * 720,\r\n 20,\r\n 720)))\r\n\r\n capsule = trimesh.creation.capsule(\r\n radius=np.random.random() * 100,\r\n height=np.random.random() * 1000,\r\n count=np.clip(np.random.random(2) * 720,\r\n 20,\r\n 720).astype(int))\r\n bodies.append(cylinder)\r\n bodies.append(capsule)\r\n for _i in range(10):\r\n bodies.append(trimesh.creation.random_soup(\r\n int(np.clip(np.random.random() * 1000,\r\n 20,\r\n 1000))))\r\n bodies.append(trimesh.creation.icosphere())\r\n bodies.append(trimesh.creation.uv_sphere())\r\n bodies.append(trimesh.creation.icosahedron())\r\n\r\n return np.array(bodies)",
"def as_mesh(scene_or_mesh):\n if isinstance(scene_or_mesh, trimesh.Scene):\n if len(scene_or_mesh.geometry) == 0:\n mesh = None # empty scene\n else:\n # we lose texture information here\n mesh = trimesh.util.concatenate(\n tuple(trimesh.Trimesh(vertices=g.vertices, faces=g.faces)\n for g in scene_or_mesh.geometry.values()))\n else:\n mesh = scene_or_mesh\n assert(isinstance(mesh, trimesh.Trimesh))\n return mesh",
"def mesh(self, centered=True):\n xx = np.linspace(0, self.Lx, self.Nx, endpoint=False)\n yy = np.linspace(0, self.Ly, self.Ny, endpoint=False)\n\n if centered:\n xx += self.hx/2\n yy += self.hy/2\n\n return np.meshgrid(xx, yy, indexing=\"ij\")",
"def meshz(self):\n if self._meshz is None:\n if self.ndim == 3:\n self._meshz = self.get_mesh_coord(3)\n else:\n self._meshz = None\n\n return self._meshy"
] | [
"0.6462085",
"0.6373467",
"0.6373467",
"0.59029144",
"0.54926586",
"0.5220117",
"0.5198538",
"0.50059587",
"0.50059587",
"0.50059587",
"0.50059587",
"0.4859322",
"0.48247197",
"0.47570008",
"0.45531186",
"0.45432347",
"0.44658598",
"0.44331133",
"0.4367574",
"0.43619215",
"0.4317085",
"0.43065754",
"0.4299826",
"0.42962164",
"0.42887416",
"0.42756546",
"0.42580906",
"0.42532107",
"0.424855",
"0.42357093"
] | 0.90093076 | 0 |
Creates a new mesh body by importing an .stl or .obj file. Because of a current limitation, if you want to create a mesh body in a parametric model, you must first call the edit method of the base or form feature, use this method to create the mesh body, and then call the finishEdit method of the base or form feature. The base or form feature must be in an 'edit' state to be able to add any additional items to it. | def add(self, fullFilename, units, baseOrFormFeature):
return ParaMeshBodyList() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def create_mesh(self):\n print(\"create_mesh\")\n faces = self.get_faces()\n print(\"num faces: {}\".format(len(faces)))\n\n # TODO: perform face filtering to remove long edges in Z direction\n # filtered_faces = self.get_filtered_faces(faces)\n # print(\"num filtered faces: {}\".format(len(filtered_faces)))\n\n vertices = self.xyz_points.T\n\n # handle texture mappings\n vertex_index_to_texture = []\n for j in range(0, self.height):\n for i in range(0, self.width):\n # vertex_index = (j * self.width) + ij\n w = i / self.width\n h = (self.height - j - 1) / self.height\n vertex_index_to_texture.append(\n (w, h)\n )\n\n # Create material.\n # TODO: make the string/filename randomly generated and unique\n file0 = open(os.path.join(self.args.path, \"triangle_mesh.obj.mtl\"), \"w\") # write mode\n file0.write(\"newmtl material_0\\n\")\n # Save image here.\n cv2.imwrite(os.path.join(self.args.path, \"triangle_mesh.png\"), self.bgr)\n file0.write(\"map_Kd triangle_mesh.png\\n\")\n file0.close()\n\n # https://en.wikipedia.org/wiki/Wavefront_.obj_file\n # https://github.com/mmatl/pyrender/blob/master/examples/models/fuze.obj\n obj_path = os.path.join(self.args.path, \"triangle_mesh.obj\")\n file1 = open(obj_path, \"w\") # write mode\n file1.write(\"mtllib ./triangle_mesh.obj.mtl\\n\")\n for vertex in vertices:\n x, y, z = vertex\n file1.write(\"v {} {} {}\\n\".format(x, y, z))\n file1.write(\"usemtl material_0\\n\")\n for w, h in vertex_index_to_texture:\n file1.write(\"vt {} {}\\n\".format(w, h))\n for face in faces:\n a, b, c = face\n a += 1\n b += 1\n c += 1\n file1.write(\"f {}/{} {}/{} {}/{}\\n\".format(\n a, a, b, b, c, c\n )\n )\n file1.close()\n\n # Load the trimesh from OBJ file.\n trimesh_mesh = trimesh.load(obj_path)\n # trimesh_mesh.show()\n\n mesh = pyrender.Mesh.from_trimesh(trimesh_mesh, smooth=False)\n self.scene = pyrender.Scene(ambient_light=[3.0, 3.0, 3.0])\n\n camera = pyrender.IntrinsicsCamera(\n self.focal_length, self.focal_length, self.width / 2, self.height / 2\n )\n self.camera_pose = np.array([\n [1.0, 0.0, 0.0, 0.0],\n [0.0, 1.0, 0.0, 0.0],\n [0.0, 0.0, 1.0, 0.0],\n [0.0, 0.0, 0.0, 1.0],\n ])\n # https://pyrender.readthedocs.io/en/latest/examples/cameras.html#creating-cameras\n # https://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.transform.Rotation.html\n r = R.from_rotvec(np.array([0, np.pi, 0]))\n r = R.from_rotvec(np.array([0.0, 0, np.pi])) * r\n matrix = r.as_matrix()\n self.camera_pose[:3, :3] = matrix\n\n light = pyrender.PointLight(\n color=[1.0, 1.0, 1.0],\n intensity=0.0\n )\n\n self.nm = pyrender.Node(mesh=mesh, matrix=np.eye(4))\n self.nl = pyrender.Node(light=light, matrix=np.eye(4))\n self.nc = pyrender.Node(camera=camera, matrix=np.eye(4))\n self.scene.add_node(self.nm)\n self.scene.add_node(self.nl)\n self.scene.add_node(self.nc)\n\n # Set the pose and show the image.\n temppose = self.extrinsics @ self.camera_pose\n self.scene.set_pose(self.nl, pose=temppose)\n self.scene.set_pose(self.nc, pose=temppose)\n pyrender.Viewer(self.scene, use_raymond_lighting=True,\n viewport_size=(self.width, self.height))",
"def __create_scene(self):\n\n print 'creating a scene'\n # create scenegraph by the ifgi scene parser\n _infilepath = '../../sampledata/cornel_box.ifgi'\n # _infilepath = '../../sampledata/one_tri_full.ifgi'\n ifgireader = IfgiSceneReader.IfgiSceneReader()\n if(not ifgireader.read(_infilepath)):\n raise StandardError, ('load file [' + _infilepath + '] failed.')\n\n # add a new scene\n # A ifgi file may have many cameras, but only default camera\n # is handled.\n cam_dict = ifgireader.camera_dict_dict['default']\n\n assert(self.__ifgi_cpp_render_core != None)\n self.__ifgi_cpp_render_core.create_scene(ifgireader.material_dict_list,\\\n ifgireader.geometry_dict_list,\\\n cam_dict)\n # check the camera correctly pushed\n # print cam_dict\n # dir(ifgi_cpp_render_core)\n # ret_cam_dict = ifgi_cpp_render_core.get_camera_pydict()\n # print ret_cam_dict\n\n # self.__scenegraph.update_all_bbox()\n # -- now all primitive (TriMesh) can look up the material\n\n # # added RGBA buffer and Hit buffer to the current camera.\n # imgsz = (self.__image_xsize, self.__image_ysize, 4)\n # cur_cam.set_film('RGBA', Film.ImageFilm(imgsz, 'RGBA'))\n # # cur_cam.print_obj()",
"def createObjectWithMesh(self, new_mesh, objId, meshId, materials=[]):\n editor = self._parent\n obj = editor.getcreate_object(objId, \"opensim\", new_mesh)\n editor.setMeshMaterials(new_mesh, materials)\n if objId in editor.positions:\n pos = editor.positions[objId]\n editor.apply_position(obj, pos, raw=True)\n if objId in editor.rotations:\n rot = editor.rotations[objId]\n editor.apply_rotation(obj, rot, raw=True)\n if objId in editor.scales:\n scale = editor.scales[objId]\n editor.apply_scale(obj, scale)\n editor.set_uuid(obj, objId)\n editor.set_uuid(new_mesh, meshId)\n scene = editor.get_current_scene()\n if not obj.name in scene.objects:\n if hasattr(obj, '_obj'):\n try:\n scene.objects.link(obj._obj)\n except:\n pass # XXX :-P\n else:\n scene.objects.link(obj)\n new_mesh.update()\n editor.trigger_callback('object.precreate', str(objId))\n return obj",
"def createMesh(objname,Vert,Edges=[],Faces=[]):\n me = bpy.data.meshes.new(objname)\n ob = bpy.data.objects.new(objname,me)\n bpy.context.scene.objects.link(ob)\n \n me.from_pydata(Vert,Edges,Faces)\n me.update(calc_edges=True)",
"def import_object(self, scenegroup, new_mesh, materials=None, offset_x=128.0, offset_y=128.0,\n offset_z=20.0):\n logger.debug(\"import_object\")\n pos = parse_vector(scenegroup[\"position\"])\n scale = parse_vector(scenegroup[\"scale\"])\n\n\n\n obj = self.getcreate_object(scenegroup[\"id\"], scenegroup[\"asset\"], new_mesh)\n\n if not scenegroup['groupid'] == '00000000-0000-0000-0000-000000000000':\n parent = self.findWithUUID(scenegroup['groupid'])\n if not parent:\n # XXX should register\n pass\n else:\n obj.parent = parent\n\n self.apply_position(obj, pos)\n self.apply_rotation(obj, parse_vector(scenegroup[\"rotation\"]))\n self.apply_scale(obj, scale)\n self.set_uuid(obj, str(scenegroup[\"id\"]))\n\n\n # new_mesh properties have to be set here otherwise blender\n # can crash!!\n self.set_uuid(new_mesh, str(scenegroup[\"asset\"]))\n if materials:\n if bversion == 3:\n for mat in materials:\n new_mesh.materials.append(mat)\n else:\n new_mesh.materials = materials\n scene = self.get_current_scene()\n try:\n if hasattr(obj, '_obj'):\n scene.objects.link(obj._obj)\n else:\n scene.objects.link(obj)\n except RuntimeError:\n pass # object already in scene\n editor.set_loading_state(obj, 'OK')\n #new_mesh.update()\n #obj.makeDisplayList()\n #new_mesh.hasVertexColours(True) # for now we create them as blender does\n\n return obj",
"def import_object(self, filename, pose=np.eye(4), size=None, oid=1):\n\n # extract name and extension of the model file\n name, ext = os.path.basename(filename).split(\".\")\n\n # load model according to file extension\n if ext == \"ply\":\n bpy.ops.import_mesh.ply(filepath=filename)\n else:\n raise NotImplementedError()\n\n # the name of the file is assigned\n # to the mesh object in blender engine\n model = bpy.data.objects[name]\n model.name = name + str(oid)\n\n # set object reference point (origin) and pose\n bpy.ops.object.origin_set(type=\"ORIGIN_CENTER_OF_MASS\", center=\"BOUNDS\")\n self.set_model_pose(model, pose)\n\n # normalize and scale model dimensions\n if size is not None:\n model.dimensions = size * model.dimensions / max(model.dimensions)\n\n # add material\n # FIXME: adjust properties\n material = bpy.data.materials.new(name=\"Material\")\n material.specular_intensity = 0.25\n model.data.materials.append(material)\n # enable vertex color rendering\n # this is necessary to render the vertex color\n # in the rgb branch of the rendering node tree\n model.active_material.use_vertex_color_paint = True\n\n # if rendering is not photorealistic, render only\n # the vertex color information of the model\n if not self.photorealism:\n model.active_material.use_shadeless = True\n\n # set object id\n model.pass_index = oid\n\n return model",
"def create_mesh_from_data(mesh_name, bsp_verts, bsp_faces, materials, scale_factor):\n\n\n def vertex_stream(vertices, stream_id):\n for vertex in vertices:\n yield vertex[stream_id]\n\n # Create mesh and object\n me = bpy.data.meshes.new(mesh_name+'Mesh')\n ob = bpy.data.objects.new(\"LEVEL\" + mesh_name, me)\n ob.show_name = True\n\n # Link object to scene\n bpy.context.scene.objects.link(ob)\n \n # Create the vertex data\n face_list = list(vertex_stream(bsp_faces, 1))\n mesh_verts = list(vertex_stream(bsp_verts, 0))\n\n me.from_pydata(mesh_verts, [], face_list)\n\n # Update mesh with new data\n me.update()\n apply_uvs(me, bsp_verts)\n\n # Add materials to mesh\n for cmaterial in materials:\n me.materials.append(cmaterial)\n\n # Apply material indexes to mesh faces\n face_materials = list(vertex_stream(bsp_faces, 0))\n\n for polygon_idx, current_polygon in enumerate(me.polygons):\n current_polygon.material_index = face_materials[polygon_idx]\n\n # Add additional properties to the new object\n ob['scale_factor'] = scale_factor\n\n return ob",
"def LoadStructuredMeshDefinition(cls, metadata_object):\n pass",
"def create_obj(destination,mtl_name):\r\n\tshutil.copyfile(\"file_cube.obj\",destination)\r\n\tf=open(destination,\"r\")\r\n\tlines=f.readlines()\r\n\tlines[0]=\"mtllib \"+mtl_name+\"\\n\"\r\n\tf.close()\r\n\tf=open(destination,\"w\")\r\n\tf.writelines(lines)\r\n\tf.close()",
"def create_simpleMesh(self, forceNew = True, skin = False,connect=True,reverseNormal = None,\n deleteHistory=False,loftMode = None ):#'evenCubic'\n _str_func = 'create_simpleMesh'\n log.debug(\"|{0}| >> forceNew: {1} | skin: {2} \".format(_str_func,forceNew,skin)+ '-'*80)\n log.debug(\"{0}\".format(self))\n \n if self.getMayaAttr('isBlockFrame'):\n log.debug(cgmGEN.logString_sub(_str_func,'blockFrame bypass'))\n return \n \n mParent = False\n #Check for existance of mesh ========================================================================\n if connect:\n bfr = self.msgList_get('simpleMesh',asMeta=True)\n if skin and bfr:\n log.debug(\"|{0}| >> simpleMesh detected...\".format(_str_func)) \n if forceNew:\n log.debug(\"|{0}| >> force new...\".format(_str_func)) \n mc.delete([mObj.mNode for mObj in bfr])\n else:\n return bfr\n if skin:\n mModuleTarget = self.getMessageAsMeta('moduleTarget')\n if not mModuleTarget:\n return log.error(\"|{0}| >> Must have moduleTarget for skining mode\".format(_str_func)) \n mPuppet = puppet_get(self,mModuleTarget)\n if not mPuppet:\n return log.error(\"|{0}| >> Must have puppet for skining mode\".format(_str_func))\n mGeoGroup = mPuppet.masterNull.geoGroup\n mParent = mGeoGroup\n log.debug(\"|{0}| >> mPuppet: {1}\".format(_str_func,mPuppet))\n log.debug(\"|{0}| >> mGeoGroup: {1}\".format(_str_func,mGeoGroup)) \n log.debug(\"|{0}| >> mModuleTarget: {1}\".format(_str_func,mModuleTarget))\n \n #BlockModule call? ====================================================================================\n mBlockModule = self.p_blockModule\n if mBlockModule.__dict__.has_key('create_simpleMesh'):\n log.debug(\"|{0}| >> BlockModule 'create_simpleMesh' call found...\".format(_str_func)) \n ml_mesh = mBlockModule.create_simpleMesh(self,skin=skin,parent=mParent,deleteHistory=deleteHistory)\n \n else:#Create ======================================================================================\n if not loftMode:\n if self.getEnumValueString('loftDegree') == 'cubic':\n loftMode = 'evenCubic'\n else:\n loftMode = 'evenLinear'\n \n \n kws = {}\n if self.blockType in ['limb']:\n if self.addLeverBase and self.getEnumValueString('addLeverBase') != 'joint' and skin:\n kws['skip'] = [0]\n \n ml_mesh = create_simpleLoftMesh(self,form=2,degree=None,divisions=2,deleteHistory=deleteHistory,loftMode=loftMode,**kws)\n \n \n #Get if skin data -------------------------------------------------------------------------------\n if skin:\n log.debug(\"|{0}| >> skinnable? ...\".format(_str_func)) \n ml_moduleJoints = mModuleTarget.rigNull.msgList_get('moduleJoints')\n if not ml_moduleJoints:\n return log.error(\"|{0}| >> Must have moduleJoints for skining mode\".format(_str_func))\n log.debug(\"|{0}| >> ml_moduleJoints: {1}\".format(_str_func,ml_moduleJoints)) \n\n md_parents = {}#We're going to un parent our joints before skinning and then reparent\n for i,mJnt in enumerate(ml_moduleJoints):\n md_parents[mJnt] = mJnt.getParent(asMeta=True)\n if i:mJnt.p_parent = ml_moduleJoints[i-1]\n \n\n log.debug(\"|{0}| >> skinning..\".format(_str_func))\n #l_joints= [mJnt.mNode for mJnt in ml_moduleJoints]\n for mMesh in ml_mesh:\n log.debug(\"|{0}| >> skinning {1}\".format(_str_func,mMesh))\n mMesh.p_parent = mParent\n \n MRSPOST.skin_mesh(mMesh,ml_moduleJoints)\n \n \"\"\"\n #mMesh.doCopyPivot(mGeoGroup.mNode)\n try:\n skin = mc.skinCluster (l_joints,\n mMesh.mNode,\n tsb=True,\n bm=2,\n wd=0,\n heatmapFalloff = 1,\n maximumInfluences = 2,\n normalizeWeights = 1, dropoffRate=5)\n except Exception,err:\n log.warning(\"|{0}| >> heat map fail: {1}.. | {2}\".format(_str_func,format(self.mNode),err))\n skin = mc.skinCluster (l_joints,\n mMesh.mNode,\n tsb=True,\n bm=0,\n maximumInfluences = 2,\n wd=0,\n normalizeWeights = 1,dropoffRate=10)\n skin = mc.rename(skin,'{0}_skinCluster'.format(mMesh.p_nameBase))\"\"\"\n \n #Reparent\n for i,mJnt in enumerate(ml_moduleJoints):\n mJnt.p_parent = md_parents[mJnt]\n #pprint.pprint(md_parents)\n if connect and ml_mesh:\n self.msgList_connect('simpleMesh',ml_mesh) \n return ml_mesh",
"def MeshMachine(main):\n\n # oDesign definition\n oDesign = main['ANSYS']['oDesign']\n\n # Data for the rotor mesh\n RotorName = main['ANSYS']['Rotor&Magnets']['Name'][0]\n RotorNumMaxElem = main['ANSYS']['Mesh']['Rotor']['NumMaxElem']\n RotorMaxLength = main['ANSYS']['Mesh']['Rotor']['MaxLength']\n\n # Data for the magnets mesh\n PMNames = main['ANSYS']['Rotor&Magnets']['PMNames']\n PMNumMaxElem = main['ANSYS']['Mesh']['Magnets']['NumMaxElem']\n PMMaxLength = main['ANSYS']['Mesh']['Magnets']['MaxLength']\n\n # Data for the Stator mesh\n StatorName = main['ANSYS']['Stator']['Name']\n StatorNormalDev = main['ANSYS']['Mesh']['Stator']['NormalDev']\n StatorAspectRatio = main['ANSYS']['Mesh']['Stator']['AspectRatio']\n\n # Data for the Stator mesh\n CoilNames = main['ANSYS']['Winding']['CoilNames']\n WindingNumMaxElem = main['ANSYS']['Mesh']['Winding']['NumMaxElem']\n WindingMaxLength = main['ANSYS']['Mesh']['Winding']['MaxLength']\n\n WindingName = []\n for phase in CoilNames:\n for direction in phase:\n WindingName += direction\n\n # Creating meshes\n oModule = oDesign.GetModule(\"MeshSetup\")\n\n # Rotor meshes\n oModule.AssignLengthOp(\n [\n \"NAME:Rotor\",\n \"RefineInside:=\", True,\n \"Enabled:=\", True,\n \"Objects:=\", [RotorName],\n \"RestrictElem:=\", False,\n \"NumMaxElem:=\", str(RotorNumMaxElem),\n \"RestrictLength:=\", True,\n \"MaxLength:=\", str(RotorMaxLength)+\"mm\"\n ]\n )\n # Magnet meshes\n oModule.AssignLengthOp(\n [\n \"NAME:Magnets\",\n \"RefineInside:=\", True,\n \"Enabled:=\", True,\n \"Objects:=\", PMNames,\n \"RestrictElem:=\", False,\n \"NumMaxElem:=\", str(PMNumMaxElem),\n \"RestrictLength:=\", True,\n \"MaxLength:=\", str(PMMaxLength)+\"mm\"\n ]\n )\n # Stator meshes\n oModule.AssignTrueSurfOp(\n [\n \"NAME:Stator\",\n \"Objects:=\", [StatorName],\n \"CurvedSurfaceApproxChoice:=\", \"ManualSettings\",\n \"SurfDevChoice:=\", 0,\n \"NormalDevChoice:=\", 2,\n \"NormalDev:=\", str(StatorNormalDev) + \"deg\",\n \"AspectRatioChoice:=\", 2,\n \"AspectRatio:=\", str(StatorAspectRatio)\n ]\n )\n\n # Coil meshes\n oModule.AssignLengthOp(\n [\n \"NAME:Coils\",\n \"RefineInside:=\"\t, True,\n \"Enabled:=\"\t\t, True,\n \"Objects:=\"\t\t, WindingName,\n \"RestrictElem:=\"\t, False,\n \"NumMaxElem:=\"\t\t, str(WindingNumMaxElem),\n \"RestrictLength:=\"\t, True,\n \"MaxLength:=\"\t\t, str(WindingMaxLength) +\"mm\"\n ]\n )\n\n return main",
"def New(*args, **kargs):\n obj = itkMeshSourceMF3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def processMeshCreated(self, obj_uuid, mesh_uuid, new_obj_uuid, asset_id):\n foundobject = False\n foundmesh = False\n editor = self._parent\n for obj in editor.getSelected():\n if obj.type == 'MESH' and obj.opensim.uuid == obj_uuid:\n foundobject = obj\n if obj.type == 'MESH' and obj.data.opensim.uuid == mesh_uuid:\n foundmesh = obj.data\n\n if not foundmesh:\n foundmesh = editor.find_with_uuid(mesh_uuid,\n bpy.data.meshes, \"meshes\")\n if not foundobject:\n foundobject = editor.find_with_uuid(obj_uuid,\n bpy.data.objects, \"objects\")\n if foundobject:\n editor.set_uuid(foundobject, new_obj_uuid)\n b2rexpkg.editor.set_loading_state(foundobject, 'OK')\n else:\n logger.warning(\"Could not find object for meshcreated\")\n if foundmesh:\n editor.set_uuid(foundmesh, asset_id)\n else:\n logger.warning(\"Could not find mesh for meshcreated\")",
"def create_simpleLoftMesh(self, form = 2, degree=None, uSplit = None,vSplit=None,cap=True,uniform = False,skip=[],\n reverseNormal = None,deleteHistory = True,divisions=None, loftMode = None,flipUV = False):\n _str_func = 'create_simpleLoftMesh'\n log.debug(\"|{0}| >> \".format(_str_func)+ '-'*80)\n log.debug(\"{0}\".format(self))\n \n if self.getMayaAttr('isBlockFrame'):\n log.debug(cgmGEN.logString_sub(_str_func,'blockFrame bypass'))\n return \n\n mBlockModule = self.p_blockModule\n\n ml_delete = []\n ml_formHandles = self.msgList_get('formHandles')\n ml_loftCurves = []\n \n if degree == None:\n degree = 1 + self.loftDegree\n if degree ==1:\n form = 3\n if vSplit == None:\n vSplit = self.loftSplit#-1\n if uSplit == None:\n uSplit = self.loftSides\n \n \n log.debug(cgmGEN.logString_sub(_str_func,\"Gather loft curves\"))\n for i,mHandle in enumerate(ml_formHandles):\n if skip and i in skip:\n continue\n if mHandle.getMessage('loftCurve'):\n ml_loftCurves.append(mHandle.getMessage('loftCurve',asMeta=1)[0])\n ml_subShapers = mHandle.msgList_get('subShapers')\n if ml_subShapers:\n for mSub in ml_subShapers:\n if mSub.getMessage('loftCurve'):\n ml_loftCurves.append(mSub.getMessage('loftCurve',asMeta=1)[0])\n \n if ml_formHandles[-1].getMessage('pivotHelper') and self.blockProfile not in ['arm']:\n mPivotHelper = ml_formHandles[-1].pivotHelper\n log.debug(\"|{0}| >> pivot helper found \".format(_str_func))\n \n #make the foot geo.... \n mBaseCrv = mPivotHelper.doDuplicate(po=False)\n mBaseCrv.parent = False\n mShape2 = False\n ml_delete.append(mBaseCrv)\n \n mTopLoft = mPivotHelper.getMessageAsMeta('topLoft')\n if mTopLoft:\n mShape2 = mTopLoft.doDuplicate(po=False) \n ml_loftCurves.append(mShape2)\n ml_delete.append(mShape2)\n \"\"\"\n for mChild in mBaseCrv.getChildren(asMeta=True):\n if mChild.cgmName == 'topLoft':\n mShape2 = mChild.doDuplicate(po=False)\n mShape2.parent = False\n ml_loftCurves.append(mShape2)\n ml_delete.append(mShape2) \n mChild.delete()\"\"\"\n ml_loftCurves.append(mBaseCrv)\n \n \"\"\"\n if cap:\n log.debug(cgmGEN.logString_sub(_str_func,\"cap\")) \n ml_use = copy.copy(ml_loftCurves)\n for i,mLoft in enumerate([ml_loftCurves[0],ml_loftCurves[-1]]):\n log.debug(cgmGEN.logString_msg(_str_func,\"duping: {0}\".format(mLoft.mNode)))\n \n mStartCollapse = mLoft.doDuplicate(po=False)\n mStartCollapse.p_parent = False\n mStartCollapse.scale = [.0001 for i in range(3)]\n if mLoft == ml_loftCurves[0]:\n ml_use.insert(0,mStartCollapse)\n else:\n ml_use.append(mStartCollapse)\n ml_delete.append(mStartCollapse)\n ml_loftCurves = ml_use\"\"\"\n \n log.debug(cgmGEN.logString_sub(_str_func,\"Build\"))\n #pprint.pprint(vars())\n \n _d = {'uSplit':uSplit,\n 'vSplit':vSplit,\n 'cap' : cap,\n 'form':form,\n 'uniform':uniform,\n 'deleteHistory':deleteHistory,\n 'merge':deleteHistory,\n 'reverseNormal':reverseNormal,\n 'degree':degree}\n \n if loftMode:\n if loftMode in ['evenCubic','evenLinear']:\n d_tess = {'format':2,#General\n 'polygonType':1,#'quads',\n 'vType':3,\n 'uType':1,\n 'vNumber':1}\n _d['d_tess'] = d_tess\n if loftMode == 'evenCubic':\n _d['degree'] = 3\n _d['uniform'] = True\n d_tess['uNumber'] = (4 + vSplit + (len(ml_loftCurves)) * vSplit)*2\n #..attempting to fix inconsistency in which is u and which is v\n #d_tess['vNumber'] = d_tess['uNumber']\n #d_tess['vType'] = 1\n else:\n _d['degree'] = 1\n d_tess['uNumber'] = (vSplit + (len(ml_loftCurves)) * vSplit)\n \n if flipUV:\n log.warning(cgmGEN.logString_msg(_str_func,\"FLIPPING UV\"))\n \"\"\"\n dTmp = {}\n for i,k in enumerate(['u','v']):\n for k2 in 'Type','Number':\n if i:\n dTmp['u'+k2] = d_tess['v'+k2]\n else:\n dTmp['v'+k2] = d_tess['u'+k2]\n d_tess.update(dTmp)\"\"\"\n \n \n elif loftMode == 'default':\n pass\n \n\n #pprint.pprint(vars())\n \n _mesh = BUILDUTILS.create_loftMesh([mCrv.mNode for mCrv in ml_loftCurves],\n **_d)\n \n \"\"\"\n if form in [1,2]:\n mc.polyNormal(_mesh,nm=0) \n if form == 3 and degree ==1:\n mc.polyNormal(_mesh,nm=0) \"\"\"\n \n \n _mesh = mc.rename(_mesh,'{0}_0_geo'.format(self.p_nameBase))\n \n if deleteHistory:\n log.debug(\"|{0}| >> delete history...\".format(_str_func)) \n mc.delete(_mesh, ch=True)\n if ml_delete:mc.delete([mObj.mNode for mObj in ml_delete])\n \n return cgmMeta.validateObjListArg(_mesh,'cgmObject',setClass=True)\n\n\n ml_shapes = []\n \n mMesh_tmp = get_castMesh(self)\n str_meshShape = mMesh_tmp.getShapes()[0]\n \n _l_targets = ATTR.msgList_get(self.mNode,'loftTargets')\n\n\n mc.select(cl=True)\n log.debug(\"|{0}| >> loftTargets: {1}\".format(_str_func,_l_targets))\n\n #>>Body -----------------------------------------------------------------\n _res_body = mc.loft(_l_targets, o = True, d = degree, po = 1 )\n\n _inputs = mc.listHistory(_res_body[0],pruneDagObjects=True)\n _tessellate = _inputs[0]\n\n _d = {'format':2,#General\n 'polygonType':1,#'quads',\n 'uNumber': 1 + jointCount}\n for a,v in _d.iteritems():\n ATTR.set(_tessellate,a,v)\n\n #>>Top/Bottom bottom -----------------------------------------------------------------\n if cap:\n _l_combine = [_res_body[0]] \n for crv in _l_targets[0],_l_targets[-1]:\n _res = mc.planarSrf(crv,po=1)\n _inputs = mc.listHistory(_res[0],pruneDagObjects=True)\n _tessellate = _inputs[0] \n _d = {'format':2,#General\n 'polygonType':1,#'quads',\n 'vNumber':1,\n 'uNumber':1}\n for a,v in _d.iteritems():\n ATTR.set(_tessellate,a,v)\n _l_combine.append(_res[0])\n\n _res = mc.polyUnite(_l_combine,ch=False,mergeUVSets=1,n = \"{0}_proxy_geo\".format(root))\n if merge:\n mc.polyMergeVertex(_res[0], d= .01, ch = 0, am = 1 )\n #polyMergeVertex -d 0.01 -am 1 -ch 1 box_3_proxy_geo;\n mc.polySetToFaceNormal(_res[0],setUserNormal = True) \n else:\n _res = _res_body\n return _res[0]\n \n \n \n \n return \n l_uIsos = SURF.get_dat(str_meshShape, uKnots=True)['uKnots']\n log.debug(\"|{0}| >> Isoparms U: {1}\".format(_str_func,l_uIsos))\n \n #Process ----------------------------------------------------------------------------------\n l_newCurves = []\n d_curves = {}\n \n def getCurve(uValue,l_curves):\n _crv = d_curves.get(uValue)\n if _crv:return _crv\n _crv = mc.duplicateCurve(\"{0}.u[{1}]\".format(str_meshShape,uValue), ch = 0, rn = 0, local = 0)[0]\n mCrv = cgmMeta.asMeta(_crv)\n mCrv.p_parent=False\n d_curves[uValue] = mCrv\n log.debug(\"|{0}| >> created: {1} ...\".format(_str_func,_crv)) \n l_curves.append(mCrv)\n return mCrv\n \n for uValue in l_uIsos:\n mCrv = getCurve(uValue,l_newCurves)",
"def add_mesh_to_scene(sdk, scene, mesh, contentid):\n global n\n name = contentid+\"_\"+str(n)\n n+=1\n # Todo: pass scene instead?\n fbx_mesh = FbxMesh.Create(sdk, name)\n fbx_mesh.CreateLayer()\n layer0 = fbx_mesh.GetLayer(0)\n\n # Verts\n\n fbx_mesh.InitControlPoints(len(mesh.v))\n if RELOCATE_BRUSHES is True:\n print mesh.v\n #MM TRANSLATE BRUSHES\n filler=(0,0,0)\n newmeshv=[]\n for i, v in enumerate(mesh.v):\n if i==0:\n reference=v\n newmeshv.append(filler)\n else:\n newmeshv.append(tuple(numpy.subtract(v,reference)))\n print newmeshv\n mesh.v=newmeshv\n \n for i, v in enumerate(mesh.v):\n fbx_mesh.SetControlPointAt(as_fvec4(v, scale=100), i)\n\n layer_elt = create_fbx_layer(\n fbx_mesh, mesh.n, as_fvec4, FbxLayerElementNormal)\n if layer_elt is not None:\n layer0.SetNormals(layer_elt)\n\n layer_elt = create_fbx_layer(\n fbx_mesh, mesh.c, as_fcolor, FbxLayerElementVertexColor,\n allow_index = True,\n allow_allsame = True)\n if layer_elt is not None:\n layer0.SetVertexColors(layer_elt)\n\n # Tilt Brush may have 3- or 4-element UV channels, and may have multiple\n # UV channels. This only handles the standard case of 2-component UVs\n layer_elt = create_fbx_layer(\n fbx_mesh, mesh.uv0, as_fvec2, FbxLayerElementUV,\n allow_index = True)\n if layer_elt is not None:\n layer0.SetUVs(layer_elt, FbxLayerElement.eTextureDiffuse)\n pass\n\n layer_elt = create_fbx_layer(\n fbx_mesh, mesh.t, as_fvec4, FbxLayerElementTangent,\n allow_index = True)\n if layer_elt is not None:\n layer0.SetTangents(layer_elt)\n\n # Unity's FBX import requires Binormals to be present in order to import the\n # tangents but doesn't actually use them, so we just output some dummy data.\n layer_elt = create_fbx_layer(\n fbx_mesh, ((0, 0, 0, 0),), as_fvec4, FbxLayerElementBinormal,\n allow_allsame = True)\n if layer_elt is not None:\n layer0.SetBinormals(layer_elt)\n\n layer_elt = create_fbx_layer(\n fbx_mesh, (), lambda x: x, FbxLayerElementMaterial, allow_allsame = True)\n if layer_elt is not None:\n layer0.SetMaterials(layer_elt)\n\n # Polygons\n\n for triplet in mesh.tri:\n fbx_mesh.BeginPolygon(-1, -1, False)\n fbx_mesh.AddPolygon(triplet[0])\n fbx_mesh.AddPolygon(triplet[1])\n fbx_mesh.AddPolygon(triplet[2])\n fbx_mesh.EndPolygon()\n\n material = FbxSurfaceLambert.Create(sdk, mesh.brush_name)\n name=mesh.brush_name+\"_\"+str(mesh.c[0])+\"_\"+name\n \n if EXPORT_BRUSH_AREA is True:\n ps=[]\n for t in mesh.v:\n ps.append(list(t))\n #ps2=[]\n #for t in mesh.t:\n # ps2.append(list(t[0:3])) \n # print len(mesh.tri)\n #print len(mesh.v)\n #print ps\n print name+\",\"+str(poly_area(ps))\n #print poly_area(ps2)\n #poly = [[0, 3, 1], [0, 2, 3], [2, 5, 3], [2, 4, 5], [4, 7, 5], [4, 6, 7], [6, 9, 7], [6, 8, 9], [8, 11, 9], [8, 10, 11], [10, 13, 11], [10, 12, 13], [12, 15, 13], [12, 14, 15]]\n #print poly_area(poly) \n global polyareadata\n polyareadata.append(name+\",\"+str(poly_area(ps)))\n \n print name\n mm_save_mesh_metadata(name,mesh)\n #print mesh.brush_name #Roughly analagous to a material\n #print mesh.brush_guid\n #print mesh.v #list of positions (3-tuples)\n #print mesh.n #list of normals (3-tuples, or None if missing)\n #print mesh.uv0 #list of uv0 (2-, 3-, 4-tuples, or None if missing)\n #print mesh.uv1 #see uv0\n #print mesh.c #list of colors, as a uint32. abgr little-endian, rgba big-endian\n #print mesh.t #list of tangents (4-tuples, or None if missing)\n #print mesh.tri #list of triangles (3-tuples of ints)\n \n # Node tree\n\n root = scene.GetRootNode()\n node = FbxNode.Create(sdk, name)\n node.SetNodeAttribute(fbx_mesh)\n node.AddMaterial(material)\n node.SetShadingMode(FbxNode.eTextureShading) # Hmm\n root.AddChild(node)",
"def __init__(self, pos, points, triangles):\n\n CPoints = c.c_float * (len(points) * 3)\n cpoints = CPoints()\n for i in xrange(len(points)):\n for j in xrange(3):\n cpoints[(i*3)+j] = points[i][j]\n CTris = c.c_int * len(triangles*3)\n ctris = CTris()\n for i in xrange(len(triangles)):\n for j in xrange(3):\n ctris[(i*3)+j] = triangles[i][j]\n\n self.obj = _pal.lib.body_static_mesh_terrain_create(c.c_float(pos[0]), c.c_float(pos[1]), c.c_float(pos[2]),\n c.pointer(cpoints),len(points)*3, c.pointer(ctris), len(triangles)*3)\n\n self.points = points\n self._body_base = _pal.lib.cast_static_mesh_terrain_body_base(self.obj)",
"def New(*args, **kargs):\n obj = itkMeshSourceMF2.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj",
"def import_submesh(self, meshId, new_mesh, vertex, vbuffer, indices, materialName,\n matIdx):\n vertex_legend = get_vertex_legend(vertex)\n pos_offset = vertex_legend[VES_POSITION][1]\n no_offset = vertex_legend[VES_NORMAL][1]\n image = None\n if materialName in self._imported_ogre_materials:\n ogremat = self._imported_ogre_materials[materialName]\n if ogremat.btex and ogremat.btex.image:\n image = ogremat.btex.image\n if VES_TEXTURE_COORDINATES in vertex_legend:\n uvco_offset = vertex_legend[VES_TEXTURE_COORDINATES][1]\n vertmaps = {}\n indices_map = []\n # vertices\n for idx in range(max(indices)+1):\n coords = get_vcoords(vbuffer, idx, pos_offset)\n if coords:\n if not coords in vertmaps:\n new_mesh.verts.extend(*coords)\n vertmaps[coords] = len(new_mesh.verts)-1\n indices_map.append(vertmaps[coords])\n else:\n new_mesh.verts.extend(0.0,0.0,0.0)\n indices_map.append(len(new_mesh.verts)-1)\n if not len(new_mesh.verts):\n logger.debug(\"mesh with no vertex!!\")\n # faces\n for idx in range(len(indices)/3):\n idx = idx*3\n new_mesh.vertexUV = False\n face = [indices_map[indices[idx]],\n indices_map[indices[idx+1]],\n indices_map[indices[idx+2]]]\n new_mesh.faces.extend(face, ignoreDups=True)\n if len(new_mesh.faces) == 0:\n logger.debug(\"Degenerate face!\")\n continue\n face = new_mesh.faces[len(new_mesh.faces)-1]\n if image:\n face.image = image\n try:\n no1 = get_nor(indices[idx], vbuffer, no_offset)\n except:\n no1 = [0.0,0.0,0.0]\n try:\n no2 = get_nor(indices[idx+1], vbuffer, no_offset)\n except:\n no2 = [0.0,0.0,0.0]\n try:\n no3 = get_nor(indices[idx+2], vbuffer, no_offset)\n except:\n no3 = [0.0,0.0,0.0]\n if VES_TEXTURE_COORDINATES in vertex_legend:\n uv1 = get_uv(indices[idx], vbuffer, uvco_offset)\n uv2 = get_uv(indices[idx+1], vbuffer, uvco_offset)\n uv3 = get_uv(indices[idx+2], vbuffer, uvco_offset)\n face.uv = (mathutils.Vector(uv1),\n mathutils.Vector(uv2),\n mathutils.Vector(uv3))\n if not len(new_mesh.faces):\n logger.warning(\"mesh with no faces!!\")\n #sys.stderr.write(\"*\")\n #sys.stderr.flush()\n return new_mesh",
"def __init__(self, file_path):\n\n # Comments\n # mtllib mtl_name\n # o object_name\n # v x y z\n # vt u v\n # vn x y z\n # f v0/t0/n0 v1/t1/n1 v2/t2/n2\n\n print('loading mesh \"%s\"' % file_path)\n mesh_file = open(file_path, 'r')\n\n verts = []\n texs = []\n normals = []\n faces = []\n\n # For each line of the input file\n for line in mesh_file:\n line = line.rstrip(' \\r\\n')\n\n # Skip comments\n if line.startswith('#') or line == '':\n continue\n\n tokens = line.split(' ')\n tokens = map(lambda t: t.strip(' '), tokens)\n tokens = list(filter(lambda t: t != '', tokens))\n\n prefix = tokens[0]\n tokens = tokens[1:]\n\n if prefix == 'v':\n vert = list(map(lambda v: float(v), tokens))\n verts.append(vert)\n\n if prefix == 'vt':\n tc = list(map(lambda v: float(v), tokens))\n texs.append(tc)\n\n if prefix == 'vn':\n normal = list(map(lambda v: float(v), tokens))\n normals.append(normal)\n\n if prefix == 'f':\n assert len(tokens) == 3, \"only triangle faces are supported\"\n\n face = []\n for token in tokens:\n indices = list(map(lambda idx: int(idx), token.split('/')))\n face.append(indices)\n\n faces.append(face)\n\n mesh_file.close()\n\n self.num_faces = len(faces)\n\n print('num verts=%d' % len(verts))\n print('num_faces=%d' % self.num_faces)\n\n # Create numpy arrays to store the vertex data\n list_verts = np.zeros(shape=(3 * self.num_faces, 3), dtype=np.float32)\n list_texcs = np.zeros(shape=3 * 2 * self.num_faces, dtype=np.float32)\n list_norms = np.zeros(shape=3 * 3 * self.num_faces, dtype=np.float32)\n\n cur_vert_idx = 0\n\n # For each triangle\n for face in faces:\n # For each triplet of indices\n for triplet in face:\n v_idx, t_idx, n_idx = triplet\n\n # Note: OBJ uses 1-based indexing\n vert = verts[v_idx-1]\n texc = texs[t_idx-1]\n normal = normals[n_idx-1]\n\n list_verts[cur_vert_idx, :] = vert\n list_texcs[2*cur_vert_idx:2*(cur_vert_idx+1)] = texc\n list_norms[3*cur_vert_idx:3*cur_vert_idx+3] = normal\n\n cur_vert_idx += 1\n\n # Re-center the object so that y=0 is at the base,\n # and the object is centered in x and z\n x_coords = list_verts[:, 0]\n z_coords = list_verts[:, 2]\n min_y = list_verts[:, 1].min()\n mean_x = (x_coords.min() + x_coords.max()) / 2\n mean_z = (z_coords.min() + z_coords.max()) / 2\n list_verts[:, 1] -= min_y\n list_verts[:, 0] -= mean_x\n list_verts[:, 2] -= mean_z\n\n # Compute the object extents after centering\n x_coords = list_verts[:, 0]\n y_coords = list_verts[:, 1]\n z_coords = list_verts[:, 2]\n self.y_max = y_coords.max()\n\n # Create a vertex list to be used for rendering\n self.vlist = pyglet.graphics.vertex_list(\n 3 * self.num_faces,\n ('v3f', list_verts.reshape(-1)),\n ('t2f', list_texcs),\n ('n3f', list_norms)\n )\n\n # Load the texture associated with this mesh\n file_name = os.path.split(file_path)[-1]\n tex_name = file_name.split('.')[0]\n tex_path = get_file_path('textures', tex_name, 'png')\n self.texture = load_texture(tex_path)",
"def load_body(data):\n\n name = data[\"name\"]\n parent = None\n if \"parent\" in data:\n parent = data[\"parent\"]\n texture = data[\"texture\"]\n basecolor = data[\"basecolor\"]\n radius = data[\"radius\"]\n axial_tilt = data[\"axial_tilt\"]\n sidereal_rotation_period = data[\"sidereal_rotation_period\"] * dts\n mass = data[\"mass\"]\n has_orbit = False\n orbit = None\n has_ring = False\n ring_texture = None\n ring_inner_radius = None\n ring_outer_radius = None\n\n if \"orbit\" in data:\n has_orbit = True\n orbit = load_orbit(data[\"orbit\"])\n if \"ring\" in data:\n ring_data = data[\"ring\"]\n has_ring = True\n ring_texture = ring_data[\"texture\"]\n ring_inner_radius = ring_data[\"radius\"][\"inner\"]\n ring_outer_radius = ring_data[\"radius\"][\"outer\"]\n\n body = None\n\n if has_orbit:\n body = OrbitingBody(None, name, texture, basecolor, radius, orbit, axial_tilt, sidereal_rotation_period, mass)\n if has_ring:\n body.renderer = OrbitingBodyWithRingRenderer()\n body = setup_ring_renderer(ring_texture, ring_inner_radius, ring_outer_radius, body)\n else:\n body = StationaryBody(None, name, texture, basecolor, radius, axial_tilt, sidereal_rotation_period, mass)\n\n body.parent_internal_name = parent\n return body",
"def load_mesh(name):\n if name[-4:] == \".obj\":\n bpy.ops.import_scene.obj(filepath=name)\n mesh_name = (os.path.basename(name)).replace('.obj','')\n return mesh_name\n else:\n raise ValueError(\"{} not an obj file\".format(name))",
"def create_sat_body(self):\n\n # Dimensions of body\n SAT_SIZE = self.ANI_SCALE*self.SAT_SCALE*np.asarray(self.SAT_PROPS[\"Size\"])/2\n bx = SAT_SIZE[0]\n by = SAT_SIZE[1]\n bz = SAT_SIZE[2]\n\n # Create vertices in body frame\n ind = 0\n V = []\n for x in [-1, 1]:\n for y in [-1, 1]:\n for z in [-1, 1]:\n V.append((bx*x, by*y, bz*z))\n \n # Create faces\n F = [\n (0, 1, 3, 2),\n (4, 5, 7, 6),\n (0, 1, 5, 4),\n (2, 3, 7, 6),\n (0, 2, 6, 4),\n (1, 3, 7, 5)\n ]\n\n # Create building blocks of polydata\n sat = vtk.vtkPolyData()\n points = vtk.vtkPoints()\n polys = vtk.vtkCellArray()\n scalars = vtk.vtkFloatArray()\n\n # Load the point, cell and data attributes\n for i in range(len(V)):\n points.InsertPoint(i, V[i])\n for i in range(len(F)):\n polys.InsertNextCell(self.mkVtkIdList(F[i]))\n for i in range(len(V)):\n scalars.InsertTuple1(i, i)\n \n # Assign the pieces to the vtkPolyData.\n sat.SetPoints(points)\n del points\n sat.SetPolys(polys)\n del polys\n sat.GetPointData().SetScalars(scalars)\n del scalars\n\n # Mapper\n mapper = vtk.vtkPolyDataMapper()\n mapper.SetInputData(sat)\n mapper.ScalarVisibilityOff()\n\n # Actor\n actor = vtk.vtkActor()\n actor.SetMapper(mapper)\n actor.GetProperty().SetColor(0.5, 0.5, 0.5)\n actor.GetProperty().SetAmbient(0.5)\n actor.GetProperty().SetSpecular(1.0)\n actor.GetProperty().SetSpecularPower(5.0)\n actor.GetProperty().SetDiffuse(0.2)\n\n # Move to sat position\n actor.SetPosition(0, 0, -self.SAT_PROPS[\"Alt\"])\n\n return actor",
"def import_droplet(ply_path, object_name, dim, scale, material_name):\n\n # Import geometry\n bpy.ops.import_mesh.ply(filepath = ply_path)\n \n # Get name of just-imported object \n name_starts_with = os.path.basename(ply_path)[:-4] # Base name of ply file without \".ply\" extension\n print(object_name)\n print(bpy.data.objects.keys())\n for object_name_infile in bpy.data.objects.keys():\n if object_name_infile.startswith(name_starts_with):\n current_object = object_name_infile\n break\n \n # Select this object\n bpy.context.scene.objects.active = bpy.data.objects[current_object]\n \n # Get this object\n ob = bpy.context.active_object\n\n # Re-name current object\n ob.name = object_name\n\n # Remove doubled vertices\n remove_doubles()\n\n # Move object to center stage and rescale to appropriate size\n center_databox(dim[0], dim[1], dim[2], scale)\n\n # Get interface material\n mat = bpy.data.materials.get(material_name)\n # Assign it to object\n if ob.data.materials:\n # assign to 1st material slot\n ob.data.materials[0] = mat\n else:\n # no slots; create new slot\n ob.data.materials.append(mat)\n\n # Enable smooth shading on current mesh object\n bpy.ops.object.shade_smooth()\n\n return ob",
"def load_glb(self):\n with open(str(self.path), \"rb\") as fd:\n # Check header\n magic = fd.read(4)\n if magic != GLTF_MAGIC_HEADER:\n raise ValueError(\n \"{} has incorrect header {} != {}\".format(\n self.path, magic, GLTF_MAGIC_HEADER\n )\n )\n\n version = struct.unpack(\"<I\", fd.read(4))[0]\n if version != 2:\n raise ValueError(\n \"{} has unsupported version {}\".format(self.path, version)\n )\n\n # Total file size including headers\n _ = struct.unpack(\"<I\", fd.read(4))[0] # noqa\n\n # Chunk 0 - json\n chunk_0_length = struct.unpack(\"<I\", fd.read(4))[0]\n chunk_0_type = fd.read(4)\n if chunk_0_type != b\"JSON\":\n raise ValueError(\n \"Expected JSON chunk, not {} in file {}\".format(\n chunk_0_type, self.path\n )\n )\n\n json_meta = fd.read(chunk_0_length).decode()\n\n # chunk 1 - binary buffer\n chunk_1_length = struct.unpack(\"<I\", fd.read(4))[0]\n chunk_1_type = fd.read(4)\n if chunk_1_type != b\"BIN\\x00\":\n raise ValueError(\n \"Expected BIN chunk, not {} in file {}\".format(\n chunk_1_type, self.path\n )\n )\n\n self.gltf = GLTFMeta(\n self.path,\n json.loads(json_meta),\n self.meta,\n binary_buffer=fd.read(chunk_1_length),\n )",
"def importMesh(self, name, file, mtype, material, **args):\n args = dictToTuple(**args)\n\n if not self.rank:\n logging.info('Importing mesh from {}'.format(file))\n\n self.lmp.command('fix {} all {} file {} type {} '.format(name, mtype, file, material) + ('{} ' * len(args)).format(*args))",
"def load_mesh(self, script_to_apply=None): \n # convert to an obj file using meshlab\n if script_to_apply is None:\n meshlabserver_cmd = 'meshlabserver -i \\\"%s\\\" -o \\\"%s\\\"' %(self.filename, self.obj_filename)\n else:\n meshlabserver_cmd = 'meshlabserver -i \\\"%s\\\" -o \\\"%s\\\" -s \\\"%s\\\"' %(self.filename, self.obj_filename, script_to_apply) \n os.system(meshlabserver_cmd)\n logging.info('MeshlabServer Command: %s' %(meshlabserver_cmd))\n\n if not os.path.exists(self.obj_filename):\n raise ValueError('Meshlab conversion failed for %s' %(self.obj_filename))\n \n # read mesh from obj file\n of = obj_file.ObjFile(self.obj_filename)\n self.mesh_ = of.read()\n return self.mesh_",
"def WriteOBJ(self, filename, write_texture=False, write_normal=False):\n\n self.__do_essential_memebers_exist__()\n\n mesh = deepcopy(self)\n p = self.InferPolynomialDegree()\n\n if p > 1:\n mesh = self.GetLinearMesh(remap=True)\n\n edim = mesh.InferElementalDimension()\n\n if edim == 2:\n elements = np.copy(mesh.elements).astype(np.int64)\n elif edim == 3:\n elements = np.copy(mesh.faces).astype(np.int64)\n else:\n raise RuntimeError(\"Writing obj file for {} elements not supported\".format(mesh.element_type))\n\n points = mesh.points[np.unique(elements),:]\n if points.shape[1] == 2:\n points = np.hstack((points,np.zeros((points.shape[0],1))))\n\n points_repr = np.zeros((points.shape[0],points.shape[1]+1), dtype=object)\n points_repr[:,0] = \"v\"\n points_repr[:,1:] = points\n\n elements_repr = np.zeros((elements.shape[0],elements.shape[1]+1), dtype=object)\n elements_repr[:,0] = \"f\"\n elements_repr[:,1:] = elements + 1\n\n if write_texture:\n textures = mesh.textures[np.unique(elements),:]\n\n textures_repr = np.zeros((textures.shape[0],textures.shape[1]+1), dtype=object)\n textures_repr[:,0] = \"vt\"\n textures_repr[:,1:] = textures\n\n elements_repr = np.zeros((mesh.telements.shape[0],mesh.telements.shape[1]+1), dtype=object)\n elements_repr[:,0] = \"f\"\n # elements_repr[:,1:] = telements + 1\n counter = 0\n for i, j in zip(elements,mesh.telements):\n curr_row = [str(ii+1)+\"/\"+str(jj+1) for ii,jj in zip(i,j)]\n elements_repr[counter,1:] = curr_row\n counter += 1\n\n with open(filename, \"w\") as f:\n # f.write(\"# \"+ str(mesh.nnode))\n # f.write('\\n')\n # f.write(\"# \"+ str(mesh.nelem))\n # f.write('\\n')\n\n np.savetxt(f, points_repr, fmt=\"%s\")\n if write_texture:\n np.savetxt(f, textures_repr, fmt=\"%s\")\n\n if write_normal:\n if self.normals is None:\n enormals = self.Normals()\n els = self.GetNodeCommonality()[0]\n self.normals = np.zeros((self.nnode, 3))\n for counter, el in enumerate(els):\n self.normals[counter] = np.sum(enormals[el], axis=0) / enormals[el].shape[0]\n\n normals_repr = np.zeros((self.normals.shape[0], self.normals.shape[1]+1), dtype=object)\n normals_repr[:,0] = \"vn\"\n normals_repr[:,1:] = self.normals\n np.savetxt(f, normals_repr, fmt=\"%s\")\n\n f.write('\\n')\n np.savetxt(f, elements_repr, fmt=\"%s\")",
"def add_mesh(\n self,\n mesh,\n boundary_id=-1,\n destroy_order=True,\n ):\n assert hasattr(mesh, (\"vertices\" and \"faces\")), \"Invalid Mesh type!\"\n\n if not destroy_order:\n\n # (Probably) slow, but proper.\n # Takes care of connectivity, meaning, also usable in 2D. \n for f in mesh.faces:\n self.add_polygon(\n nodes=mesh.vertices[f],\n subsections=1, # Don't alter anything\n boundary_id=boundary_id,\n )\n logging.debug(\"Segment - Succesfully added mesh as `nodes`, \"+\\\n \"`connectivity`, `polygon`.\")\n\n else:\n # Act tough.\n logging.warning(\"Segment - Destroying ORDNUNG! Adding mesh with \"+\\\n \"`destroy_order=True`\")\n logging.warning(\"Segment - Destroying ORDNUNG! `connectivity` is \"+\\\n \"no more valid.\")\n logging.warning(\"Segment - Destroying ORDNUNG! `reference_node` \"+\\\n \"is no more valid.\")\n logging.warning(\"Segment - Destroying ORDNUNG! \"+\\\n \"`last_sequence_ind` is no more valid.\")\n logging.warning(\"Segment - Destroying ORDNUNG! More stuffs are \"+\\\n \"no more valid.\")\n logging.warning(\"Segment - Destroying ORDNUNG! I hope you only \"+\\\n \"add mesh from now.\")\n\n # Add nodes and polygons\n if self.nodes is None:\n self.nodes = mesh.vertices\n ind_offset = self.nodes.shape[0]\n\n else:\n ind_offset = self.nodes.shape[0]\n self.nodes_ = np.vstack(\n (self.nodes_,\n mesh.vertices)\n )\n\n self.polygons_.extend(\n (mesh.faces + ind_offset).tolist()\n )\n\n # And boundary conditions\n self.add_boundary_id(\n boundary_id,\n len(mesh.faces),\n facet=True,\n )",
"def makeModel(self):\n\n # Get the script\n modelScript = os.path.join(self.datapath, 'make3FGLxml.py')\n if not os.path.isfile(modelScript):\n # download it\n print(\"\\t=== Downloading make3FGLxml.py ===\")\n os.system('wget https://fermi.gsfc.nasa.gov/ssc/data/analysis/user/make3FGLxml.py -O {}'.format(modelScript))\n\n # Create the model using Tyrel's script\n galModel = os.path.join(self.diffpath, 'gll_iem_v06.fits')\n isoModel = os.path.join(self.diffpath, 'iso_'+self.irf+'_v06.txt')\n if (not os.path.isfile(galModel)) or (not os.path.isfile(isoModel)):\n print(\"\\t=== Unable to find the diffuse models, check the variable '$FERMI_DIR' ===\")\n return\n if not os.path.isdir(self.extpath):\n print(\"\\t=== Unable to find models of extended sources, check the variable '$LATEXTDIR' ===\")\n return\n if not os.path.isfile(self.fermicat):\n # download it\n print(\"\\t=== Downloading 3FGL catalog ===\")\n os.system('wget https://fermi.gsfc.nasa.gov/ssc/data/access/lat/4yr_catalog/gll_psc_v16.fit -O {}'.format(self.fermicat))\n\n os.popen(\"python {} {} {} -o {} -G {} -g 'gll_iem_v06'\\\n -I {} -i 'iso_source_v06' -e {} -r 5 -R 10 -ER 10\\\n -s 9 -m False -GIF False\".format(modelScript, self.fermicat,\n self.ft1, self.model, galModel, isoModel, self.extpath))\n\n # Add the target to the model\n tmpName = self.model + '.tmp'\n rfil = open(self.model, 'r')\n wfil = open(tmpName, 'w')\n # Copy the XML to the temporary model\n wfil.writelines([l for l in rfil.readlines() if not l=='</source_library>']) # copy everything but the last line\n wfil.write(' <source ROI_Center_Distance=\"0.00\" name=\"TARGET\" type=\"PointSource\">\\n')\n wfil.write(' <spectrum type=\"PowerLaw2\">\\n')\n wfil.write(' <parameter free=\"1\" max=\"1000\" min=\"1e-05\" name=\"Integral\" scale=\"1e-08\" value=\"0.3591824258\"/>\\n')\n wfil.write(' <parameter free=\"1\" max=\"1\" min=\"-5\" name=\"Index\" scale=\"1\" value=\"-2.7\"/>\\n')\n wfil.write(' <parameter free=\"0\" max=\"1000000\" min=\"20\" name=\"LowerLimit\" scale=\"1\" value=\"100\"/>\\n')\n wfil.write('<parameter free=\"0\" max=\"1000000\" min=\"20\" name=\"UpperLimit\" scale=\"1\" value=\"100000\"/>\\n')\n wfil.write(' </spectrum>\\n')\n wfil.write(' <spatialModel type=\"SkyDirFunction\">\\n')\n wfil.write(' <parameter free=\"0\" max=\"360.0\" min=\"-360.0\" name=\"RA\" scale=\"1.0\" value=\"'+str(self.ra)+'\"/>\\n')\n wfil.write(' <parameter free=\"0\" max=\"360.0\" min=\"-360.0\" name=\"DEC\" scale=\"1.0\" value=\"'+str(self.dec)+'\"/>\\n')\n wfil.write(' </spatialModel>\\n')\n wfil.write(' </source>\\n')\n wfil.write('</source_library>\\n')\n rfil.close()\n wfil.close()\n\n os.remove(self.model)\n os.rename(tmpName, self.model)\n \n print(\"\\t=== Source model {} added ===\".format(self.model))\n return",
"def New(*args, **kargs):\n obj = itkMeshSourceMUS3.__New_orig__()\n import itkTemplate\n itkTemplate.New(obj, *args, **kargs)\n return obj"
] | [
"0.5972571",
"0.59495294",
"0.5882489",
"0.5880396",
"0.58730125",
"0.5865938",
"0.56890905",
"0.56812716",
"0.5676533",
"0.56725544",
"0.5665458",
"0.5633514",
"0.56177545",
"0.56158864",
"0.5609427",
"0.5551452",
"0.5526342",
"0.5510303",
"0.5461769",
"0.54555094",
"0.54292744",
"0.5407272",
"0.54023826",
"0.5391818",
"0.53914285",
"0.5385282",
"0.53667897",
"0.53340596",
"0.53201747",
"0.5311031"
] | 0.63767177 | 0 |
Provides access to a mesh body within the collection. | def item(self, index):
return ParaMeshBody() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def nativeObject(self):\n return ParaMeshBody()",
"def get_mesh(self):\n return self.mesh",
"def getMesh(self):\n return self.mesh",
"def mesh(self):\n return self._mesh",
"def mesh(self):\n return self._mesh",
"def mesh(self):\n return self._mesh",
"def mesh(self):\n return self._mesh",
"def getFromComponent(self, component):\n return ParaMeshBodies()",
"def mesh(self):\n self._ensure_mesh()\n return self._mesh",
"def body_contacts(self, physics):\n return self.collect_contacts(physics, self._body_geom_ids)",
"def get_mesh_ids(self, body):\n with self.lock:\n return self.send_command('get_kinbody_link_mesh_ids ' + body.GetName())",
"def _final_mesh(self):\n assert (\n \"final_mesh\" in self.__dict__.keys()\n ), \"Final Mesh does not exist yet - please run multi-view optimization before getting\"\n return self.final_mesh",
"def body(self) -> Body:\n return self._body",
"def __init__(self, mesh):\n self._mesh = mesh",
"def load_body(data):\n\n name = data[\"name\"]\n parent = None\n if \"parent\" in data:\n parent = data[\"parent\"]\n texture = data[\"texture\"]\n basecolor = data[\"basecolor\"]\n radius = data[\"radius\"]\n axial_tilt = data[\"axial_tilt\"]\n sidereal_rotation_period = data[\"sidereal_rotation_period\"] * dts\n mass = data[\"mass\"]\n has_orbit = False\n orbit = None\n has_ring = False\n ring_texture = None\n ring_inner_radius = None\n ring_outer_radius = None\n\n if \"orbit\" in data:\n has_orbit = True\n orbit = load_orbit(data[\"orbit\"])\n if \"ring\" in data:\n ring_data = data[\"ring\"]\n has_ring = True\n ring_texture = ring_data[\"texture\"]\n ring_inner_radius = ring_data[\"radius\"][\"inner\"]\n ring_outer_radius = ring_data[\"radius\"][\"outer\"]\n\n body = None\n\n if has_orbit:\n body = OrbitingBody(None, name, texture, basecolor, radius, orbit, axial_tilt, sidereal_rotation_period, mass)\n if has_ring:\n body.renderer = OrbitingBodyWithRingRenderer()\n body = setup_ring_renderer(ring_texture, ring_inner_radius, ring_outer_radius, body)\n else:\n body = StationaryBody(None, name, texture, basecolor, radius, axial_tilt, sidereal_rotation_period, mass)\n\n body.parent_internal_name = parent\n return body",
"def collectionMeshes(collection):\n return [o for o in collection.all_objects if o.type == 'MESH']",
"def add(self, fullFilename, units, baseOrFormFeature):\n return ParaMeshBodyList()",
"def draw_body(node, body):\r\n\t\tx,y,z = body.getPosition()\r\n\t\tnode.setPosition(vector3df(x,y,z)*10)\r\n\t\tw,xx,yy,zz = body.getQuaternion()\r\n\t\tnode.setRotation(vector3df(degrees(xx), degrees(yy), degrees(zz)))\r\n\t\tif body.shape == \"box\":\r\n\t\t\tsx,sy,sz = body.boxsize\r\n\t\t\tnode.setScale(vector3df(sx,sy,sz))",
"def __init__(self, mesh: Mesh):\n self.mesh = mesh\n self.children = []",
"def name(self):\n return self.mesh.name",
"def name(self):\n return self.mesh.name",
"def makePhysicsBody(self):\n space = self.environment.space\n geom = GeomBox(space, self.dim)\n geom.setPosition(self.centerPos)\n geom.setCategoryBits(2)\n geom.setCollideBits(1)\n self.geomList = [geom]",
"def body(self):\n return self.getattr('body')",
"def getCurrentBMesh():\n # if there aren't active objects, return error\n if bpy.context.object is None:\n print(\"No object is selected!\")\n return None, None\n\n return bpy.context.object.data",
"def create_sat_body(self):\n\n # Dimensions of body\n SAT_SIZE = self.ANI_SCALE*self.SAT_SCALE*np.asarray(self.SAT_PROPS[\"Size\"])/2\n bx = SAT_SIZE[0]\n by = SAT_SIZE[1]\n bz = SAT_SIZE[2]\n\n # Create vertices in body frame\n ind = 0\n V = []\n for x in [-1, 1]:\n for y in [-1, 1]:\n for z in [-1, 1]:\n V.append((bx*x, by*y, bz*z))\n \n # Create faces\n F = [\n (0, 1, 3, 2),\n (4, 5, 7, 6),\n (0, 1, 5, 4),\n (2, 3, 7, 6),\n (0, 2, 6, 4),\n (1, 3, 7, 5)\n ]\n\n # Create building blocks of polydata\n sat = vtk.vtkPolyData()\n points = vtk.vtkPoints()\n polys = vtk.vtkCellArray()\n scalars = vtk.vtkFloatArray()\n\n # Load the point, cell and data attributes\n for i in range(len(V)):\n points.InsertPoint(i, V[i])\n for i in range(len(F)):\n polys.InsertNextCell(self.mkVtkIdList(F[i]))\n for i in range(len(V)):\n scalars.InsertTuple1(i, i)\n \n # Assign the pieces to the vtkPolyData.\n sat.SetPoints(points)\n del points\n sat.SetPolys(polys)\n del polys\n sat.GetPointData().SetScalars(scalars)\n del scalars\n\n # Mapper\n mapper = vtk.vtkPolyDataMapper()\n mapper.SetInputData(sat)\n mapper.ScalarVisibilityOff()\n\n # Actor\n actor = vtk.vtkActor()\n actor.SetMapper(mapper)\n actor.GetProperty().SetColor(0.5, 0.5, 0.5)\n actor.GetProperty().SetAmbient(0.5)\n actor.GetProperty().SetSpecular(1.0)\n actor.GetProperty().SetSpecularPower(5.0)\n actor.GetProperty().SetDiffuse(0.2)\n\n # Move to sat position\n actor.SetPosition(0, 0, -self.SAT_PROPS[\"Alt\"])\n\n return actor",
"def multibody(body):\n if len(body) > 1:\n return [\"begin\"] + body\n else:\n return body[0]",
"def get_body_extents(self, bodyName: str, shapeIdx: int = 0) -> np.ndarray:\n return self._sim.getBodyExtents(bodyName, shapeIdx)",
"def getRuptureAsMesh(self):\n rupture = Mesh(self._lon, self._lat, self._depth)\n return rupture",
"def HelioVector(body, time):\n if body == Body.Pluto:\n return _CalcPluto(time)\n\n if 0 <= body.value < len(_vsop):\n return _CalcVsop(_vsop[body.value], time)\n\n if body == Body.Sun:\n return Vector(0.0, 0.0, 0.0, time)\n\n if body == Body.Moon:\n e = _CalcEarth(time)\n m = GeoMoon(time)\n return Vector(e.x+m.x, e.y+m.y, e.z+m.z, time)\n\n if body == Body.EMB:\n e = _CalcEarth(time)\n m = GeoMoon(time)\n d = 1.0 + _EARTH_MOON_MASS_RATIO\n return Vector(e.x+(m.x/d), e.y+(m.y/d), e.z+(m.z/d), time)\n\n if body == Body.SSB:\n return _CalcSolarSystemBarycenter(time)\n\n raise InvalidBodyError()",
"def body(self):\n\n return self._body"
] | [
"0.6957442",
"0.6633259",
"0.6631735",
"0.6371821",
"0.6371821",
"0.6371821",
"0.6371821",
"0.63649315",
"0.62939316",
"0.5899391",
"0.57027155",
"0.5660068",
"0.56338304",
"0.55298495",
"0.5479331",
"0.547045",
"0.5432832",
"0.5420212",
"0.54182005",
"0.53654414",
"0.53654414",
"0.5362228",
"0.5354277",
"0.5318616",
"0.5305592",
"0.52732015",
"0.52675444",
"0.5261785",
"0.5245713",
"0.5239069"
] | 0.752903 | 0 |
Returns the parent Component. | def parentComponent(self):
return fusion.Component() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def get_parent(self):\n return self._find_by_locator().parent",
"def get_parent(self):\n return self.__parent",
"def get_parent(self):\n return self.__parent",
"def get_parent(self):\n return self.parent",
"def get_parent(self):\n return self.parent",
"def get_parent(self):\n return self.parent",
"def GetParent(self):\n return self.parent",
"def get_parent(self) :\n return self.parent",
"def get_parent(self):\n return self._parent",
"def GetParent(self):\r\n\r\n return self._parent",
"def get_parent(self):\n if self.parent:\n return self.parent()\n else:\n return None",
"def parent(self):\n\t\treturn self._parent",
"def parent(self):\n \n return self._parent",
"def parent(self):\r\n return self._parent",
"def parent(self):\n return self._parent",
"def parent(self):\n return self._parent",
"def parent(self):\n return self._parent",
"def parent(self):\n return self._parent",
"def parent(self):\n return self._parent",
"def parent(self):\n return self._parent",
"def parent(self):\n return self._parent",
"def parent(self):\n return self._parent",
"def parent(self):\n return self._parent",
"def parent(self):\n return self._parent",
"def parent(self):\n return self._parent",
"def parent(self):\n if self._parent is not None:\n return self._parent()\n else:\n return None",
"def component_parent(self, component):\n \n list = self.components(\"ANY children.identifier = '%s'\" % _obj_id(component))\n if len(list) > 0:\n return list[0]\n else:\n return None",
"def get_parent(self): # real signature unknown; restored from __doc__\n pass",
"def parent(self):\n return self.get_parent().specific",
"def parent(self):\n if self.__parent is None:\n return None\n parent = self.__parent()\n if parent is None:\n self.__parent = parent\n return parent"
] | [
"0.8493198",
"0.8421001",
"0.8421001",
"0.84084857",
"0.84084857",
"0.84084857",
"0.838501",
"0.8367333",
"0.8352728",
"0.83304477",
"0.82310194",
"0.81645995",
"0.81499857",
"0.8140345",
"0.81360763",
"0.81360763",
"0.81360763",
"0.81360763",
"0.81360763",
"0.81360763",
"0.81360763",
"0.81360763",
"0.81360763",
"0.81360763",
"0.81360763",
"0.7963105",
"0.7950473",
"0.7903962",
"0.7901388",
"0.7859506"
] | 0.8622309 | 0 |
Returns the assembly occurrence (i.e. the occurrence) of this object in an assembly. This is only valid in the case where this is acting as a proxy in an assembly. Returns null in the case where the object is not in the context of an assembly but is already the native object. | def assemblyContext(self):
return fusion.Occurrence() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def assembly(self):\n return self._assembly",
"def _getReflectiveDuplicateAssembly(self, neighborLoc):\n duplicates = []\n otherTwoLocations = self.spatialGrid.getSymmetricEquivalents(neighborLoc)\n for i, j in otherTwoLocations:\n neighborLocation2 = self.spatialGrid[i, j, 0]\n duplicateAssem = self.childrenByLocator.get(neighborLocation2)\n if duplicateAssem is not None:\n duplicates.append(duplicateAssem)\n\n # should always be 0 or 1\n nDuplicates = len(duplicates)\n if nDuplicates == 1:\n return duplicates[0]\n elif nDuplicates > 1:\n raise ValueError(\"Too many neighbors found!\")\n return None",
"def refAssem(self):\n key = lambda a: a.spatialLocator.getRingPos()\n assems = self.getAssemblies(Flags.FUEL, sortKey=key)\n if not assems:\n assems = self.getAssemblies(sortKey=key)\n\n return assems[0]",
"def getFirstAssembly(self, typeSpec=None, exact=False):\n if typeSpec:\n try:\n return next(a for a in self if a.hasFlags(typeSpec, exact))\n except StopIteration:\n runLog.warning(\"No assem of type {0} in reactor\".format(typeSpec))\n return None\n\n # Assumes at least one assembly in `self`\n return next(iter(self))",
"def getAssembly(\n self, assemNum=None, locationString=None, assemblyName=None, *args, **kwargs\n ):\n if assemblyName:\n return self.getAssemblyByName(assemblyName)\n\n for a in self.getAssemblies(*args, **kwargs):\n if a.getLocation() == locationString:\n return a\n if a.getNum() == assemNum:\n return a\n\n return None",
"def get_sobj(self):\n return self._std.FindObjectID(self.entry)",
"def FindObjectOrSmi(self, tagged_address):\n found_obj = self.SenseObject(tagged_address)\n if found_obj: return found_obj\n if self.IsSmi(tagged_address):\n return self.FormatSmi(tagged_address)\n else:\n return \"Unknown(%s)\" % self.reader.FormatIntPtr(tagged_address)",
"def get_src_to_inst(self) -> int:\n\n # get the Qt document\n doc: QCodeDocument = self.document()\n\n # get the current position of the cursor\n cursor = self.textCursor()\n pos = cursor.position()\n\n # get the node at the associated cursor position\n current_node = doc.get_stmt_node_at_position(pos)\n\n if (\n current_node is not None\n and hasattr(current_node, \"tags\")\n and current_node.tags is not None\n and \"ins_addr\" in current_node.tags\n ):\n asm_ins_addr = current_node.tags[\"ins_addr\"]\n\n else:\n # the top of the function decompiled\n asm_ins_addr = self._code_view.function.addr\n\n return asm_ins_addr",
"def get_child_index(self, name):\n log = logging.getLogger(__name__)\n if self.synthetic_type == self.SYNTHETIC_CHILDREN:\n r = self.get_registered_child_value_parameter(ivar_name=name)\n index = None\n if r is None:\n log.debug(\"get_child_index: Cannot find registered child with ivar name: {} for class {}.\".format(name, self.type_name))\n return index\n\n if self.synthetic_children.count(r.attribute_name):\n index = self.synthetic_children.index(r.attribute_name)\n else:\n log = logging.getLogger(__name__)\n log.debug(\"get_child_index: Cannot find child with name: {} for class {}.\".format(name, self.type_name))\n return index\n elif self.synthetic_type == self.SYNTHETIC_PROXY_NAME:\n value = getattr(self, self.synthetic_proxy_name)\n \"\"\":type: lldb.SBValue\"\"\"\n if value is not None:\n value = get_synthetic_value_copy(value)\n index = value.GetIndexOfChildWithName(name)\n \"\"\":type: int\"\"\"\n return index\n log.error(\"get_child_index: Cannot get proxy value: {} for type {}.\".format(self.synthetic_proxy_name, self.type_name))\n return None\n elif self.synthetic_type == self.SYNTHETIC_PROXY_VALUE:\n if self.synthetic_proxy_value is not None:\n value = get_synthetic_value_copy(self.synthetic_proxy_value)\n index = value.GetIndexOfChildWithName(name)\n \"\"\":type: int\"\"\"\n return index\n log.error(\"get_child_index: No proxy value for type {}.\".format(self.type_name))\n # Returns index of child for current object.\n return self.value_obj.GetIndexOfChildWithName(name)\n\n log.error(\"get_child_index: Unknown synthetic type: {} for type {}.\".format(self.synthetic_type, self.type_name))\n return None",
"def getAssemblyByName(self, name):\n return self.assembliesByName[name]",
"def find_class(self):\n stack = inspect.stack()\n frame = stack[1][0]\n return frame.f_locals.get('self', None)",
"def value(self):\n try:\n return self._local.stack[-1]\n except (AttributeError, IndexError):\n raise UnboundProxyError('object unbound')",
"def lookup_object(self, address):\n obj_id = self.source_map.get(address)\n if not obj_id:\n # Register invalid event source\n if self.source_map:\n self.invalid_sources[address] += 1\n return None\n return obj_id",
"def _getRef(self, label):\r\n\r\n ref = None\r\n\r\n for mapping in reversed(self.blscope):\r\n if label in mapping:\r\n ref = mapping[label]\r\n break\r\n\r\n if ref is None and label in self.fnscope:\r\n ref = self.fnscope[label]\r\n\r\n return ref",
"def GetHandle(self):\n return _XCAFDoc.XCAFDoc_Location_GetHandle(self)",
"def __current_object__(self):\n return self.__lookup()",
"def _get_current_object(self):\n loc = object.__getattribute__(self, '_Proxy__local')\n if not hasattr(loc, '__release_local__'):\n return loc(*self.__args, **self.__kwargs)\n try:\n return getattr(loc, self.__name__)\n except AttributeError:\n raise RuntimeError('no object bound to {0.__name__}'.format(self))",
"def assembly(self) -> servo.Assembly:\n return self._assembly",
"def lookup_obj(self,):\n return self._lookup_obj",
"def find_wrapper_by_native(cls, native):\n with cls._native_id_map_lock:\n return cls._native_id_to_wrapper_map[id(native)]",
"def getPyObj(self):\n return _SALOMERuntime.OutputCorbaPort_getPyObj(self)",
"def get_hookscript ( self, unsafe=False ):\n ref = self.hook_script_ref\n if ref is False:\n return None\n elif unsafe:\n return None if ref is None else ref.deref_unsafe()\n elif ref is None:\n raise roverlay.util.objects.ObjectDisappeared()\n else:\n return ref.deref_safe()",
"def get_instance (self):\n instances = self.data['instances']\n if not len(instances):\n raise Exception, \"ArchivalObject: No Instances found\"\n for instance in instances:\n # print json.dumps(instance, indent=3)\n try:\n instance['sub_container']['top_container']\n return instance\n except:\n pass\n return None",
"def get_address_value(obj):\n no_dynamic = obj.GetDynamicValue(lldb.eNoDynamicValues)\n \"\"\":type: lldb.SBValue\"\"\"\n address = no_dynamic.GetAddress()\n \"\"\":type: lldb.SBAddress\"\"\"\n address_value = address.GetFileAddress()\n \"\"\":type: int\"\"\"\n return address_value",
"def class_ref(self):\n return self._class_ref",
"def ref_ii(self):\n return self._ref_ii",
"def get_line_no(obj):\n try:\n lineno = getsourcelines(obj)[1]\n except:\n # no code found\n lineno = None\n return lineno",
"def _get_co_from_dump(data):\n # Read py2exe header\n current = struct.calcsize(b'iiii')\n metadata = struct.unpack(b'iiii', data[:current])\n\n # check py2exe magic number\n # assert(metadata[0] == 0x78563412)\n logging.info(\"Magic value: %x\", metadata[0])\n logging.info(\"Code bytes length: %d\", metadata[3])\n\n arcname = ''\n while six.indexbytes(data, current) != 0:\n arcname += chr(six.indexbytes(data, current))\n current += 1\n logging.info(\"Archive name: %s\", arcname or '-')\n\n code_bytes = data[current + 1:]\n return marshal.loads(code_bytes)",
"def breakpoint_find(self, addr):\n return self._dll.JLINKARM_FindBP(addr)",
"def circular_reference_count(obj: typing.Any) -> int:\r\n if np is not None:\r\n result = _numpy_circular_ref_count(obj)\r\n if result is not NotImplemented:\r\n return result\r\n return _get_circular_ref_count(obj)"
] | [
"0.540894",
"0.51332605",
"0.50047857",
"0.49513108",
"0.48807183",
"0.48727012",
"0.48692793",
"0.4843391",
"0.4826178",
"0.47773117",
"0.46998453",
"0.46970183",
"0.46879742",
"0.46757603",
"0.46634528",
"0.4658668",
"0.46533868",
"0.46437517",
"0.46167672",
"0.46072188",
"0.45897746",
"0.4572608",
"0.45692992",
"0.45640454",
"0.45610332",
"0.45553696",
"0.4506211",
"0.45057634",
"0.44977713",
"0.4494259"
] | 0.5453074 | 0 |
Provides access to a mesh body within the collection. | def item(self, index):
return ParaMeshBody() | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def nativeObject(self):\n return ParaMeshBody()",
"def get_mesh(self):\n return self.mesh",
"def getMesh(self):\n return self.mesh",
"def mesh(self):\n return self._mesh",
"def mesh(self):\n return self._mesh",
"def mesh(self):\n return self._mesh",
"def mesh(self):\n return self._mesh",
"def getFromComponent(self, component):\n return ParaMeshBodies()",
"def mesh(self):\n self._ensure_mesh()\n return self._mesh",
"def body_contacts(self, physics):\n return self.collect_contacts(physics, self._body_geom_ids)",
"def get_mesh_ids(self, body):\n with self.lock:\n return self.send_command('get_kinbody_link_mesh_ids ' + body.GetName())",
"def _final_mesh(self):\n assert (\n \"final_mesh\" in self.__dict__.keys()\n ), \"Final Mesh does not exist yet - please run multi-view optimization before getting\"\n return self.final_mesh",
"def body(self) -> Body:\n return self._body",
"def __init__(self, mesh):\n self._mesh = mesh",
"def load_body(data):\n\n name = data[\"name\"]\n parent = None\n if \"parent\" in data:\n parent = data[\"parent\"]\n texture = data[\"texture\"]\n basecolor = data[\"basecolor\"]\n radius = data[\"radius\"]\n axial_tilt = data[\"axial_tilt\"]\n sidereal_rotation_period = data[\"sidereal_rotation_period\"] * dts\n mass = data[\"mass\"]\n has_orbit = False\n orbit = None\n has_ring = False\n ring_texture = None\n ring_inner_radius = None\n ring_outer_radius = None\n\n if \"orbit\" in data:\n has_orbit = True\n orbit = load_orbit(data[\"orbit\"])\n if \"ring\" in data:\n ring_data = data[\"ring\"]\n has_ring = True\n ring_texture = ring_data[\"texture\"]\n ring_inner_radius = ring_data[\"radius\"][\"inner\"]\n ring_outer_radius = ring_data[\"radius\"][\"outer\"]\n\n body = None\n\n if has_orbit:\n body = OrbitingBody(None, name, texture, basecolor, radius, orbit, axial_tilt, sidereal_rotation_period, mass)\n if has_ring:\n body.renderer = OrbitingBodyWithRingRenderer()\n body = setup_ring_renderer(ring_texture, ring_inner_radius, ring_outer_radius, body)\n else:\n body = StationaryBody(None, name, texture, basecolor, radius, axial_tilt, sidereal_rotation_period, mass)\n\n body.parent_internal_name = parent\n return body",
"def collectionMeshes(collection):\n return [o for o in collection.all_objects if o.type == 'MESH']",
"def add(self, fullFilename, units, baseOrFormFeature):\n return ParaMeshBodyList()",
"def draw_body(node, body):\r\n\t\tx,y,z = body.getPosition()\r\n\t\tnode.setPosition(vector3df(x,y,z)*10)\r\n\t\tw,xx,yy,zz = body.getQuaternion()\r\n\t\tnode.setRotation(vector3df(degrees(xx), degrees(yy), degrees(zz)))\r\n\t\tif body.shape == \"box\":\r\n\t\t\tsx,sy,sz = body.boxsize\r\n\t\t\tnode.setScale(vector3df(sx,sy,sz))",
"def __init__(self, mesh: Mesh):\n self.mesh = mesh\n self.children = []",
"def name(self):\n return self.mesh.name",
"def name(self):\n return self.mesh.name",
"def makePhysicsBody(self):\n space = self.environment.space\n geom = GeomBox(space, self.dim)\n geom.setPosition(self.centerPos)\n geom.setCategoryBits(2)\n geom.setCollideBits(1)\n self.geomList = [geom]",
"def body(self):\n return self.getattr('body')",
"def getCurrentBMesh():\n # if there aren't active objects, return error\n if bpy.context.object is None:\n print(\"No object is selected!\")\n return None, None\n\n return bpy.context.object.data",
"def create_sat_body(self):\n\n # Dimensions of body\n SAT_SIZE = self.ANI_SCALE*self.SAT_SCALE*np.asarray(self.SAT_PROPS[\"Size\"])/2\n bx = SAT_SIZE[0]\n by = SAT_SIZE[1]\n bz = SAT_SIZE[2]\n\n # Create vertices in body frame\n ind = 0\n V = []\n for x in [-1, 1]:\n for y in [-1, 1]:\n for z in [-1, 1]:\n V.append((bx*x, by*y, bz*z))\n \n # Create faces\n F = [\n (0, 1, 3, 2),\n (4, 5, 7, 6),\n (0, 1, 5, 4),\n (2, 3, 7, 6),\n (0, 2, 6, 4),\n (1, 3, 7, 5)\n ]\n\n # Create building blocks of polydata\n sat = vtk.vtkPolyData()\n points = vtk.vtkPoints()\n polys = vtk.vtkCellArray()\n scalars = vtk.vtkFloatArray()\n\n # Load the point, cell and data attributes\n for i in range(len(V)):\n points.InsertPoint(i, V[i])\n for i in range(len(F)):\n polys.InsertNextCell(self.mkVtkIdList(F[i]))\n for i in range(len(V)):\n scalars.InsertTuple1(i, i)\n \n # Assign the pieces to the vtkPolyData.\n sat.SetPoints(points)\n del points\n sat.SetPolys(polys)\n del polys\n sat.GetPointData().SetScalars(scalars)\n del scalars\n\n # Mapper\n mapper = vtk.vtkPolyDataMapper()\n mapper.SetInputData(sat)\n mapper.ScalarVisibilityOff()\n\n # Actor\n actor = vtk.vtkActor()\n actor.SetMapper(mapper)\n actor.GetProperty().SetColor(0.5, 0.5, 0.5)\n actor.GetProperty().SetAmbient(0.5)\n actor.GetProperty().SetSpecular(1.0)\n actor.GetProperty().SetSpecularPower(5.0)\n actor.GetProperty().SetDiffuse(0.2)\n\n # Move to sat position\n actor.SetPosition(0, 0, -self.SAT_PROPS[\"Alt\"])\n\n return actor",
"def multibody(body):\n if len(body) > 1:\n return [\"begin\"] + body\n else:\n return body[0]",
"def get_body_extents(self, bodyName: str, shapeIdx: int = 0) -> np.ndarray:\n return self._sim.getBodyExtents(bodyName, shapeIdx)",
"def getRuptureAsMesh(self):\n rupture = Mesh(self._lon, self._lat, self._depth)\n return rupture",
"def HelioVector(body, time):\n if body == Body.Pluto:\n return _CalcPluto(time)\n\n if 0 <= body.value < len(_vsop):\n return _CalcVsop(_vsop[body.value], time)\n\n if body == Body.Sun:\n return Vector(0.0, 0.0, 0.0, time)\n\n if body == Body.Moon:\n e = _CalcEarth(time)\n m = GeoMoon(time)\n return Vector(e.x+m.x, e.y+m.y, e.z+m.z, time)\n\n if body == Body.EMB:\n e = _CalcEarth(time)\n m = GeoMoon(time)\n d = 1.0 + _EARTH_MOON_MASS_RATIO\n return Vector(e.x+(m.x/d), e.y+(m.y/d), e.z+(m.z/d), time)\n\n if body == Body.SSB:\n return _CalcSolarSystemBarycenter(time)\n\n raise InvalidBodyError()",
"def body(self):\n\n return self._body"
] | [
"0.6957442",
"0.6633259",
"0.6631735",
"0.6371821",
"0.6371821",
"0.6371821",
"0.6371821",
"0.63649315",
"0.62939316",
"0.5899391",
"0.57027155",
"0.5660068",
"0.56338304",
"0.55298495",
"0.5479331",
"0.547045",
"0.5432832",
"0.5420212",
"0.54182005",
"0.53654414",
"0.53654414",
"0.5362228",
"0.5354277",
"0.5318616",
"0.5305592",
"0.52732015",
"0.52675444",
"0.5261785",
"0.5245713",
"0.5239069"
] | 0.752903 | 1 |
Create object session from the app key, secret and type | def create_session(self):
try:
self.session = session.DropboxSession(APP_KEY, APP_SECRET, ACCESS_TYPE)
except Exception, e:
logger.error('Exception at create_session')
logger.debug('*' + sys.exc_info()[0]) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _createSessionObject(self, request):\n # Preload necessary data items\n user = endpoints.get_current_user()\n if not user:\n raise endpoints.UnauthorizedException('Authorization required')\n user_id = user.email()\n # Get the conference entity\n conf = _getEntityByWebsafeKey(request.websafeConferenceKey,\n 'Conference')\n # Ensure that the current user is the conference organizer\n if user_id != conf.organizerUserId:\n raise endpoints.UnauthorizedException(\n 'Only the conference organizer can create a new session')\n # Verify that the speaker exists\n speaker = _getEntityByWebsafeKey(request.websafeSpeakerKey, 'Speaker')\n # Ensure that the user submitted the required name property\n if not request.name:\n raise endpoints.BadRequestException(\n \"Session 'name' field required\")\n # Copy SessionForm/ProtoRPC Message into dict\n data = {\n field.name: getattr(request, field.name) for field in\n request.all_fields()\n }\n # Remove data that isn't destined for the Session entity\n del data['websafeConferenceKey']\n del data['websafeSpeakerKey']\n del data['websafeKey']\n # Add default values for those missing in the data model\n for df in SESSION_DEFAULTS:\n if data[df] in (None, []):\n data[df] = SESSION_DEFAULTS[df]\n # Ensure the string version of typeOfSession is what is stored\n # in the NDB model\n data['typeOfSession'] = str(data['typeOfSession'])\n # Convert date from string to Date object\n if data['date'] is not None:\n try:\n data['date'] = datetime.strptime(\n data['date'][:10], '%Y-%m-%d').date()\n except:\n raise endpoints.BadRequestException(\n \"Invalid 'date' value\")\n # Convert startTime from string to Time object\n if data['startTime'] is not None:\n try:\n data['startTime'] = datetime.strptime(\n data['startTime'], '%H:%M').time()\n except:\n raise endpoints.BadRequestException(\n \"Invalid 'startTime' value\")\n # Create Session\n session = Session(**data)\n session.conference = conf.key\n session.speaker = speaker.key\n session.put()\n # Add the session key to the speaker's sessions list\n speaker.sessions.append(session.key)\n speaker.put()\n # Add a task to task queue which checks if the speaker of this session\n # should be the new featured speaker\n taskqueue.add(params={'websafeSpeakerKey': request.websafeSpeakerKey,\n 'websafeConferenceKey': request.websafeConferenceKey},\n url='/tasks/update_featured_speaker'\n )\n # Return SessionForm object\n return self._copySessionToForm(session)",
"def create_session():\n app = Application.query.filter_by(id=request.json['app']).first()\n questionnaire = Questionnaire.query.filter_by(id=request.json['questionnaire']).first()\n expected_emotions = request.json['expected_emotions']\n\n # validate application type\n if not app:\n return {'status': 'error', 'message': 'Invalid application.'}, 400\n\n new_session = Session(app=app, expected_emotions=expected_emotions, questionnaire=questionnaire)\n\n db.session.add(new_session)\n db.session.commit()\n\n result = session_schema.dump(new_session).data\n\n return jsonify({'status': 'success', 'message': 'Created new session for application with id of {}.'.format(request.json['app']), 'data': result}), 201",
"def create_session(self, **params):\n raise NotImplementedError('Should be implemented by a sub-class.')",
"def create_session(credentials):\n if type(credentials) == dict:\n pass\n elif type(credentials) == str:\n credentials = json.loads(credentials)\n else:\n credentials = json.load(credentials)\n\n session = Session(aws_access_key_id = credentials[\"aws_access_key\"],\n aws_secret_access_key = credentials[\"aws_secret_key\"],\n region_name = credentials.get('aws_region', const.REGION))\n return session",
"def generate_temp_session(self):\n return OAuth1Session(client_key=self.public_key,\n client_secret=self.private_key,\n resource_owner_key=self.token,\n resource_owner_secret=self.token_secret)",
"def __init__(self):\n self.application_id = None\n self.secret = None\n self.token = {}",
"def test_new_session_create_with_auth_json(self):\n\n with self.app_sess1 as c:\n data = {\n \"token\": \"pretend_token\"\n }\n ret1 = c.post('/', data=json.dumps(data), headers={'Content-Type': 'application/json'})\n ret2 = c.get('/', headers={'X-Auth-Token': 'pretend_token'})\n\n self.assertEqual(ret1.data, ret2.data)",
"def test_new_session_created_with_auth_json_no_cookie(self):\n\n with self.app_sess1 as c:\n data = {\n \"token\": \"pretend_token\"\n }\n ret = c.post('/', data=json.dumps(data), headers={'Content-Type': 'application/json'})\n self.assertNotIn('Set-Cookie', ret.headers)",
"async def session(self, request):\n body = await api_validate(SCHEMA_SESSION, request)\n self._check_password(body)\n\n # check TOTP\n if self.config.security_totp:\n totp = pyotp.TOTP(self.config.security_totp)\n if body[ATTR_TOTP] != totp.now():\n raise RuntimeError(\"Invalid TOTP token!\")\n\n # create session\n valid_until = datetime.now() + timedelta(days=1)\n session = hashlib.sha256(os.urandom(54)).hexdigest()\n\n # store session\n self.config.add_security_session(session, valid_until)\n return {ATTR_SESSION: session}",
"def create_boto_session(account):\n aws_access_key_id = account['aws_access_key_id']\n aws_secret_access_key = account['aws_secret_access_key']\n region = account['region']\n #aws_profile = account['aws_profile']\n\n\n session = boto3.Session(\n aws_access_key_id=aws_access_key_id,\n aws_secret_access_key=aws_secret_access_key,\n region_name=region,\n #profile_name=aws_profile,\n )\n\n return session",
"def __init__(self, access_key, secret_key, **kwargs):\r\n pass",
"def create_session(\n path: str,\n type: str,\n name: Optional[str] = None,\n kernel_name: Optional[str] = None,\n kernel_id: Optional[str] = None,\n) -> str:\n ...",
"def create_session(obj):\n session = requests.Session()\n if obj.user is not None and obj.password is not None:\n session.auth = (obj.user, obj.password)\n\n # Proxy setup\n if obj.proxy is not None:\n proxy = '%s://%s:%s' % (translate_proxy_scheme(obj.proxy_type),\n obj.proxy_host, obj.proxy_port)\n session.proxies = {'http': proxy, 'https': proxy}\n\n # Emulate curl's way of handling SSL\n if obj.cainfo is not None:\n # CA certificates\n session.verify = obj.cainfo\n if obj.sslcert is not None:\n # Client certificate\n session.cert = obj.sslcert\n if obj.verifypeer is not None and not obj.verifypeer:\n # Disable certificate validation\n session.verify = False\n if obj.verifyhost is not None and not obj.verifyhost:\n # Check the certificate, but do not verify that the hostname matches it.\n session.mount('https://', HostNameIgnoringAdapter())\n else:\n # Setup the retry strategy\n session.mount('https://', HTTPAdapter(max_retries=retries))\n # setup retry strategy for http connections\n session.mount('http://', HTTPAdapter(max_retries=retries))\n\n return session",
"def create_session(aq_instance):\n dirname = os.path.dirname(__file__)\n filename = os.path.join(dirname, 'secrets.json')\n\n with open(filename) as f:\n secrets = json.load(f)\n\n credentials = secrets[aq_instance]\n session = AqSession(\n credentials[\"login\"],\n credentials[\"password\"],\n credentials[\"aquarium_url\"]\n )\n\n msg = \"Connected to Aquarium at {} using pydent version {}\"\n print(msg.format(session.url, str(__version__)))\n\n me = session.User.where({'login': credentials['login']})[0]\n print('Logged in as {}\\n'.format(me.name))\n\n return session",
"def req_session():\n request = Request()\n session = PoorSession(request.secret_key)\n session.data['test'] = True\n session.write()\n request.cookies = session.cookie\n return request",
"def create_session():\n with open(CONFIG_PATH) as config_file:\n config_json = json.load(config_file)\n return boto3.Session(\n aws_access_key_id=config_json['awsAccessKeyId'],\n aws_secret_access_key= config_json['awsSecretAccessKey'],\n region_name=config_json['awsRegionName']\n )",
"def create_session(self,session_id,host_id,host_name,spotify_token):\n self.sessions[session_id] = {\n \"HOST\" : {\n \"ID\" : host_id,\n \"NAME\" : host_name,\n \"spotify_token\" : spotify_token,\n \"spotify_player\": None,\n },\n \"queue\" : [],\n \"queue_lock\" : False,\n \"current_track\" : \"\",\n \"previous_track\" : \"\",\n \"USERS\" : {}\n }",
"def test_ctor_no_cookie(self):\n request = self._make_request()\n session = self._makeOne(request)\n session_dict = session.managed_dict\n self.assertDictEqual(session_dict, {})\n self.assertIs(session.new, True)",
"def _create_session(self) -> Session:\n session = Session()\n\n # Sets the client side and server side SSL cert verification, if provided as properties.\n if ssl_config := self.properties.get(SSL):\n if ssl_ca_bundle := ssl_config.get(CA_BUNDLE): # type: ignore\n session.verify = ssl_ca_bundle\n if ssl_client := ssl_config.get(CLIENT): # type: ignore\n if all(k in ssl_client for k in (CERT, KEY)):\n session.cert = (ssl_client[CERT], ssl_client[KEY])\n elif ssl_client_cert := ssl_client.get(CERT):\n session.cert = ssl_client_cert\n\n # If we have credentials, but not a token, we want to fetch a token\n if TOKEN not in self.properties and CREDENTIAL in self.properties:\n self.properties[TOKEN] = self._fetch_access_token(session, self.properties[CREDENTIAL])\n\n # Set Auth token for subsequent calls in the session\n if token := self.properties.get(TOKEN):\n session.headers[AUTHORIZATION_HEADER] = f\"{BEARER_PREFIX} {token}\"\n\n # Set HTTP headers\n session.headers[\"Content-type\"] = \"application/json\"\n session.headers[\"X-Client-Version\"] = ICEBERG_REST_SPEC_VERSION\n session.headers[\"User-Agent\"] = f\"PyIceberg/{__version__}\"\n\n # Configure SigV4 Request Signing\n if str(self.properties.get(SIGV4, False)).lower() == \"true\":\n self._init_sigv4(session)\n\n return session",
"def create_device():\n sonyapilib.device.TIMEOUT = 0.1\n device = SonyDevice(\"test\", \"test\")\n device.api_version = 3\n device.cookies = jsonpickle.decode(read_file(\"data/cookies.json\"))\n return device",
"def get_session():\n\n jwt_secret = base64.urlsafe_b64decode(os.getenv('AUTH0_CLIENT_SECRET'))\n claims = {\n 'sub': 'rf|airflow-user',\n 'iat': datetime.utcnow(),\n 'exp': datetime.utcnow() + timedelta(hours=3)\n }\n encoded_jwt = jwt.encode(claims, jwt_secret, algorithm='HS256')\n session = requests.Session()\n\n session.headers.update({'Authorization': 'Bearer {}'.format(encoded_jwt)})\n return session",
"def __init__(self):\r\n # create a session id\r\n self.session = ViSession()",
"def _create_session(self):\n self.session = requests.Session() # pragma: no cover\n self.session.headers[\"Accept\"] = \"application/json\" # pragma: no cover\n if self.user: # pragma: no cover\n self.session.auth = (self.user, self.cred) # pragma: no cover",
"def create(id = None, expires=None):\n\n\t# Init the data\n\tdData = {}\n\n\t# If we have an expires time\n\tif expires:\n\t\tdData['__expire'] = expires\n\n\t# Create a new Session using a UUID as the id\n\treturn _Session(id and id or uuid.uuid4().hex, dData)",
"async def create(self, session, *, dc=None):\n response = await self._api.put(\n \"/v1/session/create\",\n data=session,\n params={\"dc\": dc})\n return response.body",
"def _create_new_session_token(self):\n session_token = self.__generate_session_token()\n payload = {\n 'token' : session_token\n }\n self.encoded_token = jwt.encode(payload, 'secret', algorithm='HS256')\n Token.objects.create(session_token=session_token)",
"def make_new_session():\n session = Session.objects.create(uuid=str(uuid4()), container_id=None)\n return session.id",
"def _new_session(self, username_key=None, **attributes):\n for key in ['username', 'token', 'tenant_id']:\n if attributes.get(key, None) is None:\n attributes[key] = key + \"_\" + text_type(uuid4())\n if 'expires' not in attributes:\n attributes['expires'] = (\n datetime.utcfromtimestamp(self._clock.seconds())\n + timedelta(days=1)\n )\n session = Session(**attributes)\n if username_key is None:\n username_key = session.username\n self._username_to_token[username_key] = session.token\n self._token_to_session[session.token] = session\n self._tenant_to_token[session.tenant_id] = session.token\n return session",
"def create(self):\n id_access_secretkey = uuid.uuid4()\n id_webuser = Base.logged_id_webuser or None\n keys = Token().generate_secretkey(config.PACKAGE_NAME)\n\n with Database() as db:\n db.insert(Table(id_access_secretkey, id_webuser, config.PACKAGE_NAME,\n keys['randomkey'], keys['secretkey']))\n db.commit()\n\n return {\n 'secretkey': keys['secretkey'],\n 'message': 'access secretkey successfully created'\n }",
"def __init__(self):\n #print (\"Object created\")\n self.apikey='acc_4fc1a435b3188b5'\n self.secret = 'f49c4be14a048d5de7e7f6c564b52022'\n self.fileToIdMap = {}"
] | [
"0.6261222",
"0.6257226",
"0.6163003",
"0.6113781",
"0.6092789",
"0.6075058",
"0.59492284",
"0.58802646",
"0.58801895",
"0.5873072",
"0.58676744",
"0.58611274",
"0.5855583",
"0.585542",
"0.58453065",
"0.58350897",
"0.58148146",
"0.57952935",
"0.5793354",
"0.5761899",
"0.57494754",
"0.5746524",
"0.5704121",
"0.56940573",
"0.56899214",
"0.5652502",
"0.56336385",
"0.5615905",
"0.56151795",
"0.56061524"
] | 0.6547813 | 0 |
Obtains an authorization url; After authorization, creates an access token and builds an instance of the Dropbox client. Creates the metadata cache. | def create_access_token(self):
# Wraper for also caching invalid results
#def getMetadataRofs(path):
# try:
# return self.client.metadata(path)
# except Exception, e:
# log.write('Exception at getMetadataRofs for path '+ path + '\n')
# pprint(e, log)
# return False
try:
request_token = self.session.obtain_request_token()
url = self.session.build_authorize_url(request_token)
print url
raw_input()
access_token = self.session.obtain_access_token(request_token)
self.client = client.DropboxClient(self.session)
# Build cache for metadata querying
# Wraper for also caching invalid results
def getMetadataRofs(path):
try:
return self.client.metadata(path)
except Exception, e:
logger.error('Exception at getMetadataRofs for path '+ path + '\n')
logger.debug(sys.exc_info()[0])
return False
self.cache_metadata = Cache(getMetadataRofs)
self.cache_files = {}
except Exception, e:
logger.error('Exception %s at create_access_token' % (sys.exc_info()[0]))
logger.debug(pformat(sys.exc_info())) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def build_client(config, auth_token = None):\n if auth_token:\n pass\n\n elif not auth_token and config.get(\"auth_token\"):\n auth_token = config.get(\"auth_token\")\n\n elif not auth_token and not config.get(\"auth_token\"):\n auth_token, config = start_auth_flow(config)\n\n __log__.debug(\"Creating the dropbox client!\")\n client = DropboxClient(auth_token)\n __log__.debug(\"Successfully created client!\")\n\n # Put the information on a copy of config object\n configClone = config.copy()\n configClone.update({\n \"auth_token\": auth_token,\n \"client\": client,\n })\n\n return (client, configClone)",
"def __init__(self, credentials):\n self.credentials = credentials\n http = httplib2.Http()\n http = self.credentials.authorize(http)\n self.service = build(\"drive\", \"v2\", http=http)",
"def __init__(self):\n self.authurl = Config().auth\n self.baseurl = Config().api\n self.s = Session()\n self.s.headers = {'Accept': 'application/json'}\n data = {\"grant_type\": \"client_credentials\", \"scope\": \"/read-public\", \"client_id\": Config().client_id,\n \"client_secret\": Config().client_secret}\n r = self.s.request(method=\"post\", url=self.authurl, data=data)\n self.s.headers = {'Accept': 'application/json', \"Access token\": r.json()[\"access_token\"]}",
"def __init__(self, authorization_url, token_url, token_refresh_url, client_id, xapi_key,\n local_server_address=LOCAL_OAUTH_SERVER_URL):\n\n # Grab the client info needed\n self.local_server_address = local_server_address\n self.authorization_url = authorization_url\n self.token_url = token_url\n self.token_refresh_url = token_refresh_url\n self.client_id = client_id\n self.xapi_key = xapi_key",
"def init_api(self):\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists(self.gdrive_config.TOKEN_PICK_PATH):\n with open(self.gdrive_config.TOKEN_PICK_PATH, 'rb') as token:\n creds = pickle.load(token)\n # If there are no (valid) credentials available, let the user log in.\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n self.gdrive_config.CREDENTIAL_PATH, self.gdrive_config.SCOPES)\n creds = flow.run_local_server(port=0)\n # Save the credentials for the next run\n with open(self.gdrive_config.TOKEN_PICK_PATH, 'wb') as token:\n pickle.dump(creds, token)\n\n service = build('drive', 'v3', credentials=creds)\n return service",
"def authorize(self, oauth2_token):\r\n storage = file.Storage(oauth2_token)\r\n credentials = storage.get()\r\n http = credentials.authorize(httplib2.Http())\r\n self.service = discovery.build('youtube', 'v3', http=http)",
"def __init__(self, api_version=_BIGQUERY_API_VERSION):\n self.http = httplib2.Http(cache=memcache)\n self.service = discovery.build('bigquery',\n api_version,\n http=self.http,\n discoveryServiceUrl=DISCOVERY_URL)\n if _CREDENTIALS is None:\n raise BigQueryClientException(\n 'Needed Credentials are missing from this source code!')\n credentials = Credentials.new_from_json(_CREDENTIALS)\n logging.info('Authorizing...')\n self.http = credentials.authorize(self.http)",
"def create_api_client(base_path, access_token):\n api_client = ApiClient()\n api_client.host = base_path\n api_client.set_default_header(header_name=\"Authorization\",\n header_value=f\"Bearer {access_token}\")\n return api_client",
"def auth(self):\n auth = OAuthHandler(self._api_key, self._api_secret)\n auth.set_access_token(self._access_token, self._access_secret)\n return auth",
"def create_access_token(oauth):\n #create parameters for API authorization\n\tredirect_uri = 'oob'\n\tparams = {'client_secret': oauth.client_secret,\n\t\t\t 'redirect_uri': redirect_uri,\n\t\t\t 'response_type': 'code'}\n\t#store the access code\n\turl = oauth.get_authorize_url(**params)\n\n\t#open a web browser to get access token and then store it via manual input\n\twebbrowser.open(url)\n\tcode = input('Enter code: ')\n\t#create credentials item\n\tstart_time = time.time()\n\t#create dictionary to hold credentials and store beginning time\n\tcredentials = {'token_time': start_time}\n\n\t#NEED TO ADD IN 'REFRESH TOKEN' FUNCTION HERE SOMEWHERE\n\t#\n\t\n\t#create parameters\n\tdata = {'code': code,\n\t\t\t'redirect_uri': redirect_uri,\n\t\t\t'grant_type': 'authorization_code'}\n\t#build the headers\n\theaders = oauth_headers(oauth)\n\t#create the raw access token\n\traw_access = oauth.get_raw_access_token(data=data, headers=headers)\n\t#parse the raw access token and add to credentials variable\n\tcredentials.update(access_parse(raw_access))\n\n\t#parse access token from credentials\n\taccess_token = credentials['access_token']\n\t#return access token\n\treturn access_token",
"def initialize_oauth2_session(self):\n\n def token_updater(token):\n \"\"\"Stores oauth2 token on disk\"\"\"\n try:\n with open(self.OAUTH_TOKEN_PATH, 'w') as f:\n json.dump(token, f)\n except Exception as err:\n log.Error('Could not save the OAuth2 token to %s. This means '\n 'you may need to do the OAuth2 authorization '\n 'process again soon. Original error: %s' % (\n self.OAUTH_TOKEN_PATH, err))\n\n token = None\n try:\n with open(self.OAUTH_TOKEN_PATH) as f:\n token = json.load(f)\n except IOError as err:\n log.Notice('Could not load OAuth2 token. '\n 'Trying to create a new one. (original error: %s)' % err)\n\n self.http_client = OAuth2Session(\n self.CLIENT_ID,\n scope=self.OAUTH_SCOPE,\n redirect_uri=self.OAUTH_REDIRECT_URL,\n token=token,\n auto_refresh_kwargs={\n 'client_id': self.CLIENT_ID,\n 'client_secret': self.CLIENT_SECRET,\n },\n auto_refresh_url=self.OAUTH_TOKEN_URL,\n token_updater=token_updater)\n\n if token is not None:\n self.http_client.refresh_token(self.OAUTH_TOKEN_URL)\n\n endpoints_response = self.http_client.get(self.metadata_url +\n 'account/endpoint')\n if endpoints_response.status_code != requests.codes.ok:\n token = None\n\n if token is None:\n if not sys.stdout.isatty() or not sys.stdin.isatty():\n log.FatalError('The OAuth2 token could not be loaded from %s '\n 'and you are not running duplicity '\n 'interactively, so duplicity cannot possibly '\n 'access Amazon Drive.' % self.OAUTH_TOKEN_PATH)\n authorization_url, _ = self.http_client.authorization_url(\n self.OAUTH_AUTHORIZE_URL)\n\n print('')\n print('In order to allow duplicity to access Amazon Drive, please '\n 'open the following URL in a browser and copy the URL of the '\n 'page you see after authorization here:')\n print(authorization_url)\n print('')\n\n redirected_to = (raw_input('URL of the resulting page: ')\n .replace('http://', 'https://', 1)).strip()\n\n token = self.http_client.fetch_token(\n self.OAUTH_TOKEN_URL,\n client_secret=self.CLIENT_SECRET,\n authorization_response=redirected_to)\n\n endpoints_response = self.http_client.get(self.metadata_url +\n 'account/endpoint')\n endpoints_response.raise_for_status()\n token_updater(token)\n\n urls = endpoints_response.json()\n if 'metadataUrl' not in urls or 'contentUrl' not in urls:\n log.FatalError('Could not retrieve endpoint URLs for this account')\n self.metadata_url = urls['metadataUrl']\n self.content_url = urls['contentUrl']",
"def get_http_client():\n store = file.Storage(TOKEN_STORE_FILE)\n creds = store.get()\n if not creds or creds.invalid:\n flow = client.flow_from_clientsecrets(CLIENT_ID_FILE, SCOPES)\n creds = tools.run_flow(flow, store)\n return creds.authorize(Http())",
"def make_oauth_client(base_url) -> requests.Session:\n config_file = os.path.join(os.environ['HOME'], '.allurarc')\n cp = ConfigParser()\n cp.read(config_file)\n\n REQUEST_TOKEN_URL = base_url + '/rest/oauth/request_token'\n AUTHORIZE_URL = base_url + '/rest/oauth/authorize'\n ACCESS_TOKEN_URL = base_url + '/rest/oauth/access_token'\n oauth_key = option(cp, base_url, 'oauth_key',\n 'Forge API OAuth Consumer Key (%s/auth/oauth/): ' % base_url)\n oauth_secret = option(cp, base_url, 'oauth_secret',\n 'Forge API Oauth Consumer Secret: ')\n\n try:\n oauth_token = cp.get(base_url, 'oauth_token')\n oauth_token_secret = cp.get(base_url, 'oauth_token_secret')\n except NoOptionError:\n oauthSess = OAuth1Session(oauth_key, client_secret=oauth_secret, callback_uri='oob')\n request_token = oauthSess.fetch_request_token(REQUEST_TOKEN_URL)\n pin_url = oauthSess.authorization_url(AUTHORIZE_URL, request_token['oauth_token'])\n if isinstance(webbrowser.get(), webbrowser.GenericBrowser):\n print(\"Go to %s\" % pin_url)\n else:\n webbrowser.open(pin_url)\n oauth_verifier = input('What is the PIN? ')\n access_token = oauthSess.fetch_access_token(ACCESS_TOKEN_URL, oauth_verifier)\n oauth_token = access_token['oauth_token']\n oauth_token_secret = access_token['oauth_token_secret']\n\n cp.set(base_url, 'oauth_token', oauth_token)\n cp.set(base_url, 'oauth_token_secret', oauth_token_secret)\n # save oauth token for later use\n cp.write(open(config_file, 'w'))\n print(f'Saving oauth tokens in {config_file} for later re-use')\n print()\n\n else:\n oauthSess = OAuth1Session(oauth_key, client_secret=oauth_secret,\n resource_owner_key=oauth_token, resource_owner_secret=oauth_token_secret)\n\n return oauthSess",
"def create_oauth2_access_token(self):\n if not isinstance(self.session, DropboxSession):\n raise ValueError(\"This call requires a DropboxClient that is configured with an \"\n \"OAuth 1 access token.\")\n url, params, headers = self.request(\"/oauth2/token_from_oauth1\", method='POST')\n\n r = self.rest_client.POST(url, params, headers)\n return r['access_token']",
"def __init__(self):\n AUTHORIZATION_SCOPES = [\n 'https://www.googleapis.com/auth/devstorage.full_control']\n\n creds = None\n # The file token.pickle stores the user's access and refresh tokens, and is\n # created automatically when the authorization flow completes for the first\n # time.\n if os.path.exists('token_cloud_storage.pickle'):\n with open('token_cloud_storage.pickle', 'rb') as token:\n creds = pickle.load(token)\n\n # If not successful, collecting new Token to access Calendar\n if not creds or not creds.valid:\n if creds and creds.expired and creds.refresh_token:\n creds.refresh(Request())\n else:\n flow = InstalledAppFlow.from_client_secrets_file(\n 'credentials_google.json', AUTHORIZATION_SCOPES)\n creds = flow.run_local_server(port=0)\n\n # Save the credentials for the next run\n with open('token_cloud_storage.pickle', 'wb') as token:\n pickle.dump(creds, token)\n\n # Instantiates a client\n self.storage_client = storage.Client(\n project=\"piot-assignment2-287110\", credentials=creds)\n\n # Connect to bucket on Cloud Storage\n self.bucket = self.storage_client.get_bucket(\"facial_img\")",
"def __init__(self, credentials):\n http = credentials.authorize(httplib2.Http())\n self.service = googleapiclient.discovery.build(\"drive\", \"v2\", http=http)",
"def __get_client(self):\n flow = flow_from_clientsecrets(self.secrets_file,\n message=self.MISSING_SECRETS_MSG,\n scope=self.YOUTUBE_READ_WRITE_SCOPE)\n\n # TODO: Fix this! Came with boilerplate\n storage = Storage(\"%s-oauth2.json\" % sys.argv[0])\n credentials = storage.get()\n\n if credentials is None or credentials.invalid:\n flags = argparser.parse_args()\n credentials = run_flow(flow, storage, flags)\n\n youtube = build(self.YOUTUBE_API_SERVICE_NAME,\n self.YOUTUBE_API_VERSION,\n http=credentials.authorize(httplib2.Http()))\n return youtube",
"def setup_oauth():\n # Request token\n oauth = OAuth1(CONSUMER_KEY, client_secret=CONSUMER_SECRET)\n r = requests.post(url=REQUEST_TOKEN_URL, auth=oauth)\n credentials = parse_qs(r.content)\n\n resource_owner_key = credentials.get('oauth_token')[0]\n resource_owner_secret = credentials.get('oauth_token_secret')[0]\n\n # Authorize\n authorize_url = AUTHORIZE_URL + resource_owner_key\n print 'Please go here and authorize: ' + authorize_url\n\n verifier = raw_input('Please input the verifier: ')\n oauth = OAuth1(CONSUMER_KEY,\n client_secret=CONSUMER_SECRET,\n resource_owner_key=resource_owner_key,\n resource_owner_secret=resource_owner_secret,\n verifier=verifier)\n\n # Finally, Obtain the Access Token\n r = requests.post(url=ACCESS_TOKEN_URL, auth=oauth)\n credentials = parse_qs(r.content)\n token = credentials.get('oauth_token')[0]\n secret = credentials.get('oauth_token_secret')[0]\n\n return token, secret",
"def __init__(self, access_token, base_url=\"https://api.crowdstrike.com\"):\n self.headers = {'Authorization': 'Bearer {}'.format(access_token)}\n self.base_url = base_url",
"def _create_auth(self, auth_uri, username, password): # pylint: disable=no-self-use\n return authentication.SASTokenAuth.from_shared_access_key(auth_uri, username, password)",
"def __init__(self, client_id: str, client_secret: str, access_token_publish_url: str, access_token: str = None):\n\n self.client_id = client_id\n self.client_secret = client_secret\n self.access_token_publish_url = access_token_publish_url\n self.api_base_url = 'https://api.ce-cotoha.com/api/dev/'\n\n if access_token is not None:\n self.access_token = access_token\n else:\n self.access_token = self.update_access_token()",
"def __init__(self, access_token, base_url='https://api.crowdstrike.com'):\n self.headers = { 'Authorization': 'Bearer {}'.format(access_token) }\n self.base_url = base_url",
"def __init__(self, access_token_cache, account_id, credentials):\n super(AccessTokenStore, self).__init__(lock=None)\n self._access_token_cache = access_token_cache\n self._account_id = account_id\n self._credentials = credentials",
"def auth(access_token, access_token_secret, consumer_key, consumer_secret):\n auth = tweepy.OAuthHandler(consumer_key, consumer_secret)\n auth.set_access_token(access_token,access_token_secret)\n return auth",
"def request_access_token():\n\n # For Private application authentication, you must specifiy\n # grant_type=client_credentials and the service scope. For the \n # Content API, scope=contentapi\n post_data = {\"grant_type\": APP_CONFIG['GRANT_TYPE'],\n \"scope\": APP_CONFIG['SCOPE']}\n post_data_string = json.dumps(post_data)\n\n # Construct authentication string:\n # 1. Concatenate the client id, a colon character \":\", and the client secret into a single string\n # 2. URL encode the string from step 1\n # 3. Base64 encode the string from step 2\n authstr = to_native_string(\n b64encode(('%s:%s' % (APP_CONFIG['CLIENT_ID'], APP_CONFIG['CLIENT_SECRET'])).encode('utf-8'))).strip()\n\n # Construct an Authorization header with the value of 'Basic <base64 encoded auth string>'\n headers = {\n \"Content-Type\": \"application/json;charset=UTF-8\",\n \"Accept\": \"application/json\",\n \"Authorization\": \"Basic \" + authstr\n }\n\n r = s.post(APP_CONFIG['OAUTH_TOKEN_URL'], data=post_data_string, headers=headers, verify=(app.config['SSLVERIFY'] == 'True'))\n\n if r.status_code in (400,500):\n\n # Handle known error\n result = r.json() \n return jsonify(result)\n\n elif r.status_code == 200:\n\n result = r.json() \n access_token = result['access_token']\n token_type = result['token_type']\n timestamp = result.get('timestamp', None)\n expires_in = result.get('expires_in', None)\n token_expiry = None\n if expires_in is not None:\n token_expiry = datetime.datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S')\n token_expiry = token_expiry + datetime.timedelta(seconds=expires_in)\n token_expiry = token_expiry.isoformat()\n\n html = '<pre>';\n html += '<h3>Successfully retrieved access token!</h3>' \n html += '<pre>';\n html += 'access_token : ' + access_token\n html += '<pre>';\n html += 'token_type : ' + token_type\n html += '<pre>';\n html += 'expires_in (sec) : ' + str(expires_in)\n html += '<pre>';\n html += 'token_expiry : ' + token_expiry\n html += '<pre>';\n html += 'timestamp : ' + timestamp\n\n html += '<pre>';\n html += '<h3>Query Content API with Access Token</h3>'\n html += '<pre>';\n html += '<a href=\"/query-collection-myhuman?access_token='+access_token+'\">Query Collection: myhuman</a>'\n\n return html\n\n else:\n # Handle unknown error\n return (r.text, r.status_code, r.headers.items())",
"def __init__(self, base_url, client_id, client_secret, client_scope, api_json = None):\n # type: (str, str, str, str, str) -> None\n\n self.base_url = base_url\n self.client_id = client_id\n self.client_secret = client_secret\n self.client_scope = client_scope\n\n # If the user doesn't pass an alternate API file use the included one\n if not api_json:\n api_json = pkg_resources.resource_filename(__name__, 'apis.json')\n\n with open(api_json, encoding='utf-8') as api_file:\n apis = json.loads(api_file.read())\n\n if client_scope in apis: \n api = apis.get(client_scope)\n self.token_url = api.get('token_url')\n self.api_call = sleep_and_retry(limits(calls=api.get('limits_calls'), period=api.get('limits_period'))(self._api_call))\n self.access_token = self.get_access_token(self.token_url)\n else: \n raise Exception(f\"Scope {client_scope} not in known API dict\")",
"def __init__(self, adapter, config, createToken=None):\r\n self._adapter = adapter\r\n self.oauthToken = None\r\n\r\n #make sure their request implementation matches our adapter\r\n if not hasattr(adapter, \"getRequest\"):\r\n raise TypeError(\"Your http request implementation is missing the getRequest method\")\r\n if not hasattr(adapter, \"postRequest\"):\r\n raise TypeError(\"Your http request implementation is missing the postRequest method\")\r\n if not hasattr(adapter, \"deleteRequest\"):\r\n raise TypeError(\"Your http request implementation is missing the deleteRequest method\")\r\n if not hasattr(adapter, \"putRequest\"):\r\n raise TypeError(\"Your http request implementation is missing the putRequest method\")\r\n\r\n self._config = config\r\n self._oauth = OAuth(config, self._adapter)\r\n\r\n if createToken is not None:\r\n self.createAccessToken = createToken\r\n else:\r\n self.createAccessToken = self.createAccessTokenReplacement()",
"def authenticate():\n\n # We are uploading and then downloading so we want Musicmanager\n api = Musicmanager()\n\n # Attempt to authenticate and log in\n logged_in = api.login()\n\n # If login() returns false, you have not performed oauth yet, or did not\n # write your credentials to your disk. Using oauth allows authentication\n # without providing plaintext credentials to the application\n if not logged_in:\n print('No oauth credentials found, please authenticate your account')\n\n # Performs oauth and stores generated credentials to Appdirs \n # 'user_data_dir' by default. oauth only needs to be performed once per \n # machine if the credentials are stored, which is the default behavior.\n authenticated = api.perform_oauth(open_browser=True)\n else:\n print('Successfully logged in.\\n')\n\n return api",
"def _get_access_token(self):\n\n self._access_token = None\n if not self._refresh_token:\n raise ValueError(\"Refresh Token not set\")\n\n doc = minidom.Document()\n root = doc.createElement('tokenAuthRequest')\n doc.appendChild(root)\n aki = doc.createElement('accessKeyId')\n aki.appendChild(doc.createTextNode(self.publicAccessKey))\n root.appendChild(aki)\n pak = doc.createElement('privateAccessKey')\n pak.appendChild(doc.createTextNode(self.privateAccessKey))\n root.appendChild(pak)\n rt = doc.createElement('refreshToken')\n rt.appendChild(doc.createTextNode(self._refresh_token))\n root.appendChild(rt)\n data = doc.toprettyxml()\n\n resp = requests.post(BASE_URL + \"authorization\", data=data, headers=self._default_headers, verify=False)\n if resp.status_code >= 300:\n raise Exception(\"Failed to claim access token: {}\".format(resp))\n\n vals = etree_to_dict(ET.XML(resp.content.decode('utf-8')))\n\n self._access_token = resp.headers.get('Location', None)\n if not self._access_token:\n raise ValueError(\"Unable to get access token\")\n\n self._user_id = os.path.basename(vals.get('authorization').get('user'))\n\n # Always set the expiry 30 minutes from now so we dont have to deal with parsing timezones\n # self._access_token_expiry = dateutil_parser.parse(vals.get('authorization').get('expiration'))\n self._access_token_expiry = datetime.datetime.utcnow() + datetime.timedelta(minutes=30)",
"def acquire_token(client_secrets, oauth2_token, flags):\r\n scope = ['https://www.googleapis.com/auth/youtube.upload']\r\n message = (\"Please specify a valid client_secrets.json file.\\n\"\r\n \"For instructions to obtain one, please visit:\\n\"\r\n \"https://docs.google.com/document/d/1ro9I8jnOCgQlWRRVCPbrNnQ5-bMvQxDVg6o45zxud4c/edit\")\r\n flow = client.flow_from_clientsecrets(client_secrets, scope=scope, message=message)\r\n storage = file.Storage(oauth2_token)\r\n tools.run_flow(flow, storage, flags)"
] | [
"0.6216887",
"0.60293674",
"0.5986677",
"0.58676577",
"0.58599424",
"0.58420867",
"0.58145756",
"0.5771057",
"0.56834906",
"0.5679991",
"0.5664196",
"0.5645943",
"0.56381226",
"0.56176925",
"0.5562125",
"0.55257547",
"0.55207974",
"0.5509377",
"0.55081314",
"0.5507393",
"0.5487566",
"0.5470494",
"0.54471093",
"0.54362106",
"0.54242516",
"0.5412808",
"0.54095817",
"0.5408706",
"0.5386615",
"0.5386596"
] | 0.73717004 | 0 |
Downloads the file given by path and writes using the file descriptor out | def downloadFile(self, path, out):
try:
logger.info("downloadFile('%s', ...)" % (path))
# Downloads from dropbox
# Manually :( update the metadata cache
f, metadata = self.client.get_file_and_metadata(path)
f = f.read()
logger.info('* file downloaded')
self.cache_metadata.setNewValue(path, metadata)
logger.info('* metadata updated')
# Write to tmp file and close
os.write(out, f)
logger.info("* file written")
os.close(out)
logger.info('* file closed')
return True
except Exception, e:
logger.error("Exception %s at downloadFile(%s)" % (sys.exc_info()[0], path))
logger.debug(pformat(sys.exc_info()))
return False | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def download_file(self, url, path):\n print('\\tDownloading: ', path)\n with open(path, 'w') as outfile:\n try:\n response = self._http_client.get(url)\n outfile.write(response.text)\n finally:\n response.close()\n outfile.close()\n gc.collect()",
"def downloadAndReplaceFile(file_path, download_url):\r\n file = urllib.request.urlopen(download_url)\r\n with open(file_path, 'wb') as output:\r\n output.write(file.read())",
"def download_file(src_url, dst_path):\n logger.info(f'Downloading file from: {src_url}')\n with src_url.open(mode='r') as in_file:\n with open(dst_path, 'wb') as out_file:\n out_file.write(in_file.read())\n logger.info(f'Downloaded file path on disk: {dst_path}')\n return dst_path",
"def download_file_from_url(url, PATH, file_name):\n with requests.get(url) as r:\n with open(PATH+'/'+file_name, 'wb') as f:\n f.write(r.content)",
"def download(self, download_path):\n return",
"def download_file(url_path):\n local_filename = url_path.split('/')[-3] + \"-\" + url_path.split('/')[-1]\n local_filename = OUT_DIR + local_filename\n print local_filename\n url = \"https://commoncrawl.s3.amazonaws.com/\" + url_path\n # NOTE the stream=True parameter\n req = requests.get(url, stream=True)\n with open(local_filename, 'wb') as write_f:\n for chunk in req.iter_content(chunk_size=1024):\n if chunk: # filter out keep-alive new chunks\n write_f.write(chunk)\n write_f.close()\n return local_filename",
"def download_to_file(url, filename):\n with browser_spoof_open(url) as download_conn:\n with open(filename, \"wb\") as out_file:\n shutil.copyfileobj(download_conn, out_file, 1024 * 8)",
"def _download_file(self, artifact_path, local_path):\n full_path = self.base_artifact_path / artifact_path\n with self.managed_folder.get_file(str(full_path)) as remote_file:\n with open(local_path, \"wb\") as local_file:\n for line in remote_file:\n local_file.write(line)",
"def download_file(self, url, filename):\n r = requests.get(url, stream=True)\n r.raise_for_status()\n\n with open(filename, 'wb') as f:\n for chunk in r.iter_content():\n if chunk:\n f.write(chunk)\n f.flush()",
"def download(filename):\n print \"Downloading\", filename\n file_content = urlopen(\n urljoin(URL_PATH, filename)\n )\n write_data_to_file(\n file_content.read(),\n os.path.join(\n '/tmp',\n filename\n )\n )",
"def _download(self, url, output_dir, dataset, chunk_size=1024):\n r = self.session.get(url, stream=True, allow_redirects=True)\n if not r.ok:\n r = self.session.get(r.url, stream=True, allow_redirects=True, auth=(self._username, self._password))\n file_size = int(r.headers['Content-Length'])\n\n with tqdm(total=file_size, unit_scale=True, unit='B', unit_divisor=1024) as pbar:\n ### GET FILE NAME ###\n if \"Content-Disposition\" in r.headers.keys():\n local_filename = re.findall(\"filename=(.+)\", r.headers[\"Content-Disposition\"])[0]\n else:\n local_filename = url.split(\"/\")[-3]\n local_filename = self.api.lookup(dataset, local_filename)[0]\n local_filename = local_filename + util.convert_to_extension(r.headers['content-type'])\n print(\"*** FNAME\", local_filename)\n\n local_filename = os.path.join(output_dir, local_filename)\n\n ### WRITE FILE ###\n with open(local_filename, 'wb') as f:\n for chunk in r.iter_content(chunk_size=chunk_size):\n if chunk:\n f.write(chunk)\n pbar.update(chunk_size)\n return local_filename",
"def download_file(filename, url):\n with open(filename, 'wb') as fout:\n response = requests.get(url, stream=True)\n response.raise_for_status()\n # Write response data to file\n for block in response.iter_content(4096):\n fout.write(block)",
"def _download_to_file(session: requests.Session, url: str, pyfile: Path):\n with session.get(url, stream=True) as r:\n r.raise_for_status()\n pyfile.parent.mkdir(parents=True, exist_ok=True)\n with pyfile.open(mode=\"wb\") as f:\n for chunk in r.iter_content(chunk_size=40960):\n f.write(chunk)",
"def stream_to_file(self, path):\n # Using default state of files being overwritten for now\n if os.path.exists(path):\n os.remove(path)\n\n # Stream downloaded contents to file and show progress\n with open(path, 'wb') as f:\n for chunk in self.stream.iter_content(chunk_size=self.chunk_size):\n f.write(chunk)\n self.progress += int(len(chunk))\n self.prog_bar.show(self.progress)",
"def _download_file(url: str, output_path: str):\n\n def write_to_file(response: requests.Response, output_path: str) -> int:\n \"\"\"Write the response content to the given file.\n\n :param response: Response to be written to the output file.\n :param output_path: Path to the output file.\n :returns: Number of bytes read from the response content.\n \"\"\"\n read_bytes = 0\n with open(output_path, \"wb\") as output_file:\n # Use the same chunk size of `urlretrieve`\n for chunk in response.iter_content(chunk_size=1024 * 8):\n read_bytes += len(chunk)\n output_file.write(chunk)\n if read_bytes > FETCHER_MAXIMUM_FILE_SIZE:\n break\n return read_bytes\n\n try:\n with requests.get(\n url, stream=True, timeout=FETCHER_REQUEST_TIMEOUT\n ) as response:\n response.raise_for_status()\n\n content_length = int(response.headers.get(\"Content-Length\", 0))\n if content_length > FETCHER_MAXIMUM_FILE_SIZE:\n raise REANAFetcherError(\"Maximum file size exceeded\")\n\n read_bytes = write_to_file(response, output_path)\n\n if read_bytes > FETCHER_MAXIMUM_FILE_SIZE:\n os.remove(output_path)\n raise REANAFetcherError(\"Maximum file size exceeded\")\n except HTTPError as e:\n error = f\"Cannot fetch the workflow specification: {e.response.reason} ({response.status_code})\"\n if response.status_code == 404:\n error = \"Cannot find the given workflow specification\"\n raise REANAFetcherError(error)\n except Timeout:\n raise REANAFetcherError(\n \"Timed-out while fetching the workflow specification\"\n )\n except RequestException:\n raise REANAFetcherError(\n \"Something went wrong while fetching the workflow specification\"\n )",
"def __download_file(self, filename):\r\n \r\n respons = requests.get(self.__url + filename, stream=True)\r\n save_filename = os.path.join(self.__folder, os.path.basename(filename))\r\n with open(save_filename, 'wb') as output_file:\r\n for chunk in respons.iter_content(chunk_size=128):\r\n output_file.write(chunk)",
"def download_file(url, output_filename):\n print(\"Downloading\", url, \"to\", output_filename)\n r = requests.get(url)\n r.raise_for_status()\n with open(output_filename, 'wb') as f:\n f.write(r.content)",
"def download(self, url: str, dest: PathLike, force: bool = False):",
"def download(self):\n\n # os.open *should* give a thread-safe way to exlusivly open files\n filepath = self.film\n try:\n # os.O_BINARY is only avilable and needed on windows\n flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY | os.O_BINARY\n except:\n flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY\n try:\n fd = os.open(filepath, flags)\n except:\n return\n\n try:\n response = self.session.get(self.filmurl, stream=True)\n if response.status_code == 200:\n for chunk in response.iter_content(1024):\n os.write(fd, chunk)\n except:\n # Remove partial img file if request or stream fails\n os.close(fd)\n os.remove(filepath)",
"def save_file(url, output_path):\n\n print(url)\n\n try:\n response = requests.get(url, stream = True)\n except:\n print(\"=> Download failed: %s\" % url)\n return False\n\n if (response.status_code == 200):\n try:\n with open(output_path, \"wb\") as f:\n for chunk in response.iter_content(chunk_size = 512):\n if (chunk):\n f.write(chunk)\n sys.stdout.write(\"\\r=> %s (%dkb)\" % (output_path,\n f.tell()/1024))\n sys.stdout.flush()\n sys.stdout.write(\"\\r=> %s (%dkb)\" % (output_path,\n f.tell()/1024))\n sys.stdout.flush()\n print(\"\")\n return True\n\n except Exception as err:\n print(\"\\n=> Error: %s (%s)\" % (err, url))\n\n else:\n print(\"=> Download failed: %s\" % url)\n return False",
"def url_retrieve(url, output_file):\n r = requests.get(url, allow_redirects=True)\n if r.status_code != 200:\n raise ConnectionError(f\"Could not download {url}\\nError code: {r.status_code}\")\n\n output_file.write_bytes(r.content)",
"def download(url, path):\n response = requests.get(url)\n\n if response.ok:\n print(\"response is ok file is downloading ... \")\n # start to download file from url.\n with open(path, \"wb\") as f:\n f.write(response.content)\n else:\n print(\"Error!\", response.status_code)\n return False\n\n print(\"File downloaded succusfully.\")\n return True",
"def download_from_url(url, output_path):\n\n print('Pulling data from {} to {}'.format(url, output_path))\n wget.download(url, output_path)\n print('done')",
"def download(self, *path, **kwargs):\n callback = kwargs.get(\"callback\", None)\n target = kwargs.get(\"target\", None)\n _create_path(os.path.dirname(target))\n\n req = self._open(*path)\n if req.status_code == 404:\n raise FileNotFoundError\n elif req.status_code != 200:\n raise IOError\n\n size = req.headers.get('content-length')\n if size:\n size = int(size)\n\n f = tempfile.TemporaryFile()\n\n chunksize = 1024*8\n lastchunkreport= 0.0001\n\n readb = 0\n\n for buf in req.iter_content(chunksize):\n readb += len(buf)\n while size and float(readb) / size > lastchunkreport+0.01:\n lastchunkreport += 0.01\n if callback:\n callback()\n f.write(buf)\n\n f.seek(0)\n\n with open(target, \"wb\") as fo:\n shutil.copyfileobj(f, fo)\n\n if callback and not size: #size was unknown, call callbacks\n for i in range(99):\n callback()\n\n if callback:\n callback()",
"def download_file(url, outputfile):\r\n try:\r\n req = requests.get(url, stream=True, timeout=120)\r\n try:\r\n with open(outputfile, 'wb') as file_download:\r\n for chunk in req.iter_content(chunk_size=1024): \r\n if chunk: \r\n file_download.write(chunk)\r\n except IOError as error:\r\n print error\r\n except requests.exceptions.RequestException as err:\r\n print err\r\n except socket.error as err:\r\n print err\r\n return None",
"def _download_file(file_url: str, file_path: str) -> str:\n if os.path.exists(file_path):\n return file_path\n op_desc = f\"Downloading {os.path.basename(file_path)}\"\n try:\n with requests.Session() as req_sess:\n req_res = req_sess.get(file_url, stream=True)\n total_length = int(req_res.headers.get(\"Content-Length\"))\n with tqdm.wrapattr(req_res.raw, \"read\", total=total_length, desc=op_desc) as raw:\n with open(file_path , \"wb\") as file:\n shutil.copyfileobj(raw,file)\n return file_path\n except Exception as network_error:\n if os.path.exists(file_path):\n os.remove(file_path)\n raise network_error",
"def download(url, save_as):\n\topen(save_as, 'w').write(urllib2.urlopen(url).read())",
"def _download_file(self, path, info=None):\n self._log.debug(\"Downloading file {!r}\".format(path))\n\n if info is None:\n info = self._git_show(path)\n\n # info *SHOULD* be a basestring\n if not isinstance(info, basestring):\n raise Exception(\"{!r} was not a file! (info was {!r})\".format(\n path,\n info\n ))\n\n dest_path = os.path.join(self._code_dir, path.replace(\"/\", os.path.sep))\n self._save_file(dest_path, info)",
"def download_file(url, target_path):\n\n r = requests.get(url, stream=True)\n\n with open(target_path, 'wb') as f:\n for chunk in r.iter_content(chunk_size=1024):\n if chunk:\n f.write(chunk)",
"def download_file(url, outfile=None):\n if not outfile:\n outfile = url.split(\"/\")[-1]\n info(\"Downloading %s to %s\" % (url, outfile))\n with requests.get(url, stream=True) as r:\n r.raise_for_status()\n with open(outfile, \"wb\") as f:\n for chunk in r.iter_content(chunk_size=8192):\n f.write(chunk)\n return outfile"
] | [
"0.73627925",
"0.6694261",
"0.65461224",
"0.65308464",
"0.65222704",
"0.65146464",
"0.6499754",
"0.6463992",
"0.6460364",
"0.64219147",
"0.64182365",
"0.6414628",
"0.6405327",
"0.6385253",
"0.63750607",
"0.63738686",
"0.63608336",
"0.63561326",
"0.6352939",
"0.63480806",
"0.63458085",
"0.6330841",
"0.63294446",
"0.63142854",
"0.63049304",
"0.63038135",
"0.6299532",
"0.62974554",
"0.6286512",
"0.6270756"
] | 0.7041003 | 1 |
Iterate over groups of `df`, and, if provided, matching labels. | def _iter_groups(self, df, y=None):
groups = df.groupby(self.groupby).indices
for key, sub_idx in groups.items():
sub_df = df.iloc[sub_idx]
if y is not None:
# y is either a numpy array or a pd.Series so index accordingly
sub_y = y.iloc[sub_idx] if type(y) is pd.Series else y[sub_idx]
else:
sub_y = None
yield key, sub_df, sub_y | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def _iter_objs_labels(objs):\n # Account for (1) multiple columns of data, (2) functions that return\n # multiple values (e.g. hist() returns (bins, values, patches)), and\n # (3) matplotlib.Collection list subclasses.\n label = _get_label(objs)\n if label:\n yield (objs, label)\n elif isinstance(objs, list):\n for obj in objs:\n yield from _iter_objs_labels(obj)",
"def _compare_groups_and_labels(self, groups, labels):\n # function that compares two lists without taking into account the order\n def comp_lists(l1, l2):\n len_match = len(l1) == len(l2)\n return len_match and np.all([g1 == g2 for g1, g2 in zip(l1, l2)])\n\n # comparison of the given groups\n groups_same = comp_lists(groups, self.selected_values['group_keys'])\n\n # if groups are the same, then compare the labels\n if groups_same:\n len_match = len(labels) == len(self.selected_values['labels_keys'])\n tmp = [comp_lists(g1, g2)\n for g1, g2 in zip(labels,\n self.selected_values['labels_keys'])]\n return len_match and np.all(tmp)\n else:\n return False",
"def get_labels(df):\n labels = []\n for i in df.index:\n label = sample_label_from_sample_name(i)\n labels.append(label)\n return labels",
"def split(df, group):\n\n data = namedtuple(\"data\", [\"filename\", \"object\"]) #initiate \"data\" tyoe\n gb = df.groupby(group) #group df by group attribute\n return [\n data(filename, gb.get_group(x))\n for filename, x in zip(gb.groups.keys(), gb.groups)\n ]",
"def generate_labels(cfg, split_files):\n for file_name in split_files:\n file_name = join(cfg.data_dir, file_name)\n\n for example in generate_examples(file_name):\n yield from example['labels']",
"def check_all_same_label(examplesSet, label):\n for example in examplesSet:\n if example[14] != label:\n return False\n return True",
"def labelDocByDataFrame(df, person_ids, id_field, label_field): \n assert id_field in df.columns and label_field in df.columns\n\n # filter, sort, extract\n sort_keys = [id_field, ]\n df = df.loc[df[id_field].isin(person_ids)]\n \n if df.empty: \n print('warning> No qualified entries found according to the input IDs: %s ...' % person_ids[:10])\n return [] # \n\n df.sort_values(sort_keys, ascending=True, inplace=True) # [1]\n\n return df[label_field].values",
"def matches(inline,groupby,groupvals):\n for i,m in enumerate(groupby):\n if inline[m] == groupvals[i]:\n continue\n else:\n return False\n return True",
"def groupByLabel( y ):\n index = []\n for i in np.unique(y): # pour toutes les classes\n ind, = np.where(y==i)\n index.append(ind)\n \n return index",
"def group(self, label: str):\n if not label:\n raise ValueError('Label should not be empty.')\n if self._current_group:\n raise ValueError('group() cannot be nested.')\n try:\n self._current_group = label\n yield\n finally:\n # Try/finally is to make sure that the current_group is correctly\n # reset even if an exception occurs.\n self._current_group = None",
"def grouping_cols(df, cat_percentage = 0.05, checking_itr = 10):",
"def matlabels(df, rowlabel_fn):\n return df.index.to_frame().apply(rowlabel_fn, axis=1)",
"def _AccumulateLabelValues(\n labels, columns, label_values, non_col_labels, is_derived=False):\n for label_name in labels:\n if '-' in label_name:\n parts = label_name.split('-')\n for pivot in range(1, len(parts)):\n column_name = '-'.join(parts[:pivot])\n value = '-'.join(parts[pivot:])\n column_name = column_name.lower()\n if column_name in columns:\n label_values[column_name].append((value, is_derived))\n else:\n non_col_labels.append((label_name, is_derived))",
"def __get_labels(self):\n\n uncertain_pairs_index = self.__query_pairs()\n\n to_label_raw = self.all_raw_data.loc[uncertain_pairs_index]\n to_label_features = self.all_features.loc[uncertain_pairs_index]\n\n # Remove uncertain pairs from the candidate pool\n self.all_features.drop(uncertain_pairs_index, axis=0, inplace=True)\n\n labels_list = []\n for index, row in to_label_raw.iterrows():\n\n print(\"\\n{0:30}\\t{1}\\n{2:30}\\t{3}\\n{4:30}\\t{5}\\n{6:30}\\t{7}\\n\".format(row.name_a, row.name_b,\n row.address_a, row.address_b,\n row.zip_a, row.zip_b,\n row.city_a, row.city_b))\n\n\n label = self.__user_input(\"Is this a match? (0/1)\")\n labels_list.append((index, label))\n\n labels_index = [index for index, label in labels_list]\n labels_values = [label for index, label in labels_list]\n\n # Create dataframe with index and labels\n add_labels = pd.Series(labels_values, index=labels_index, name='label')\n\n # Union the new training set to the full training set\n self.labeled_features = pd.concat([self.labeled_features, to_label_features], axis = 0, ignore_index=False)\n self.labeled_labels = pd.concat([self.labeled_labels, add_labels], axis = 0, ignore_index=False)\n\n return self",
"def df_group_opbreken(df_in):\n df_lijst_als_groter_dan=[]\n for df in df_in.itertuples():\n df_lijst_als_groter_dan.append(df)\n\n return df_lijst_als_groter_dan",
"def __multilabel_processing(self):\n # read the raw dataset\n self.data['image_name'] = self.data['image_name'].map(lambda x: '{}.{}'.format(x, img_format))\n self.data['tags'] = self.data['tags'].map(lambda x: x.split())\n\n # create a df with the same number of rows as the dataset filled with the name of the unique values in tags\n label_names = self.data['tags'].explode().unique().tolist()\n label_df = pd.DataFrame([label_names] * self.data.shape[0], columns=label_names)\n\n # binarize the labels according to if they exist for each image or not\n self.data = pd.concat([self.data, label_df], axis=1)\n self.data[['image_name'] + label_names] = self.data.apply(lambda x: pd.Series([x[0]] + [1 if label in x[1] else 0 for label in x[2:]]), axis=1)",
"def find_labels(df_in, period):\n \n #create regression label\n \n #make a dataframe to hold the last cycle for each enginge in the dataset\n df_max_cycle = pd.DataFrame(df_in.groupby(['id.engine.id','id.maintenanceIndex'])['id.cycle'].max())\n df_max_cycle.reset_index(inplace=True)\n df_max_cycle.columns = ['id.engine.id','id.maintenanceIndex', 'lastCycle']\n \n #add time-to-failure ttf as a new column - regression label\n df_in = pd.merge(df_in, df_max_cycle, on=['id.engine.id','id.maintenanceIndex'])\n df_in['labels.ttf'] = df_in['lastCycle'] - df_in['id.cycle']\n #df_in.drop(['lastCycleReached'], axis=1, inplace=True)\n \n #create binary classification label\n df_in['labels.bnc'] = df_in['labels.ttf'].apply(lambda x: 1 if x <= period else 0)\n \n #create multi-class classification label\n df_in['labels.mcc'] = df_in['labels.ttf'].apply(lambda x: 2 if x <= period/2 else 1 if x <= period else 0)\n \n return df_in",
"def labelSequence(df, pre, post):\n \n maskidx = df.index[df.flag] # series of (DT) indexes with true flags\n result = df.flag.copy()\n for idx in maskidx:\n mask = getIndexSequence(df, idx, pre, post)\n result[mask] = True\n\n return result",
"def parse_labels(df):\n # --- Define lambda to extract coords in list [y, x, height, width]\n extract_box = lambda row: [row['y'], row['x'], row['height'], row['width']]\n\n parsed = {}\n for _, row in df.iterrows():\n # --- Initialize patient entry into parsed\n pid = row['patientId']\n if pid not in parsed:\n parsed[pid] = []\n\n # --- Add box if opacity is present\n if row['Target'] == 1:\n parsed[pid].append(extract_box(row))\n\n return parsed",
"def __iter__(self):\n for g, xs in self._groups.items():\n dtype = dt.Struct(self._item_fields)\n df = ta.Column(dtype).append(\n tuple(\n tuple(\n self._parent._data.child_at(\n self._parent._data.type().get_child_idx(f.name)\n )[x]\n for f in self._item_fields\n )\n for x in xs\n )\n )\n\n yield g, df",
"def groupby_apply_parallel(grouped_df, func, *args):\n with Pool(cpu_count()) as p:\n return_list = p.starmap(func, [(group, *args) for name, group in grouped_df])\n return pd.concat(return_list)",
"def iter_color_groups(mapping, prefs):\r\n # Iterate through prefs and color by given mapping labels\r\n for key in natsort(prefs.keys()):\r\n col_name = prefs[key]['column']\r\n if 'colors' in prefs[key]:\r\n if isinstance(prefs[key]['colors'], dict):\r\n colors = prefs[key]['colors'].copy() # copy so we can mutate\r\n else:\r\n colors = prefs[key]['colors'][:]\r\n else:\r\n colors = {}\r\n labelname = prefs[key]['column']\r\n\r\n # Define groups and associate appropriate colors to each group\r\n groups = group_by_field(mapping, col_name)\r\n colors, data_colors, data_color_order = \\\r\n get_group_colors(groups, colors)\r\n\r\n yield labelname, groups, colors, data_colors, data_color_order",
"def draw_all_labels(df,root_folder_path,root_folder_name,logger):\n # df=df[df.type==\"Color\"]\n len_images=df['file_id'].nunique()\n perc_list=[i*0.05 for i in range(0,20,1)]\n grouped_df=df.groupby(['file_id','class'])\n coordinate_names=['x_max','x_min','y_max','y_min']\n group_len=len(grouped_df)\n\n class_label_dict={}\n label_info_list=[]\n for ind,(name, group) in enumerate(grouped_df):\n img_name,class_name=name\n img_type=group['type'].values[0]\n bb_list=group[coordinate_names].values.astype(int)\n if class_name not in class_label_dict.keys():\n class_label_dict[class_name]=get_random_color()\n bb_color=class_label_dict[class_name]\n label_info_list.append([img_name,img_type,class_name,bb_color,bb_list])\n draw_label_on_image(root_folder_path,root_folder_name,img_name,img_type,class_name,bb_color,bb_list)\n perc=float(\"{:.2f}\".format((ind+1)/group_len))\n if perc in perc_list:\n perc_list.remove(perc)\n logger.write(\"Classes annotated: \"+str(ind+1)+\"/\"+str(group_len))\n # print(\"Label list generated.\")\n # pool = Pool(1)\n # pool.starmap(draw_label_on_one_image, zip(\n # label_info_list, itertools.repeat(root_folder_path), itertools.repeat(root_folder_name)))\n # pool.close()\n # pool.join()\n # print(\"Drawing labels is finished.\")",
"def group_to_others(\n df: pd.DataFrame, to_be_grouped: dict, replace_value: str = \"Other\"\n) -> pd.DataFrame:\n\n for feature, values in to_be_grouped.items():\n df[feature] = [row if row in values else replace_value for row in df[feature]]\n return df",
"def prepare_data_iterators(self, df, text, label):\n mask = np.random.rand(len(df)) < 0.9\n train = df[mask]\n valid = df[~mask]\n self.fields = {'label': self.label_field, 'text': self.text_field}\n train_ds = pdfds.DataFrameDataset(train, self.fields)\n valid_ds = pdfds.DataFrameDataset(valid, self.fields)\n train_iter = BucketIterator(train_ds, batch_size=16, sort_key=lambda x: len(x.text),\n train=True, sort=True, sort_within_batch=True)\n valid_iter = BucketIterator(valid_ds, batch_size=16, sort_key=lambda x: len(x.text),\n train=True, sort=True, sort_within_batch=True)\n return train_iter, valid_iter",
"def get_labels():\n return if_found(dao.get_labels())",
"def label_map_gen(df_main):\n # Function to flatten a list of list\n flatten = lambda l: [item for sublist in l for item in sublist]\n labels = list(set(flatten([l.split(' ') for l in df_main['tags'].values])))\n\n # Create list of labels\n label_map = {l: i for i, l in enumerate(labels)}\n return label_map",
"def import_labels():\n\n dict_labels = df.set_index('id').to_dict()['breed']\n unique_labels = sorted(list(set(dict_labels.values())))\n for index, label in dict_labels.items():\n dict_labels[index] = unique_labels.index(label)\n return dict_labels, unique_labels",
"def test_labels_match_geographies(self):\n geo = self.geographies.find_one({ 'geoid': '15' })\n labels = self.labels.find_one({ 'dataset': 'SF1' })\n\n geo_tables = geo['data']['2010']\n labels_tables = labels['tables']\n\n self.assertEqual(sorted(geo_tables.keys()), sorted(labels_tables.keys()))\n\n # Test table has labels\n for table_name, geo_keys in geo_tables.items():\n label_keys = labels_tables[table_name]['labels']\n\n self.assertEqual(sorted(geo_keys.keys()), sorted(label_keys.keys()))\n\n for table_name, label_data in labels_tables.items():\n label_keys = label_data['labels']\n geo_keys = geo_tables[table_name]\n\n self.assertEqual(sorted(geo_keys.keys()), sorted(label_keys.keys()))",
"def _train(self, dataset):\n # Local bindings\n targets_sa_name = self.get_space() # name of targets sa\n targets_sa = dataset.sa[targets_sa_name] # actual targets sa\n clf, clfs, index2group = self.clf, self.clfs, self._index2group\n\n # Handle groups of labels\n groups = self._groups\n groups_labels = {} # just groups with numeric indexes\n label2index = {} # how to map old labels to new\n known = set()\n for gi, gk in enumerate(index2group):\n ls = groups[gk][0]\n known_already = known.intersection(ls)\n if len(known_already):\n raise ValueError, \"Grouping of labels is not appropriate. \" \\\n \"Got labels %s already among known in %s. \" % \\\n (known_already, known )\n groups_labels[gk] = ls # needed? XXX\n for l in ls :\n label2index[l] = gi\n known = known.union(ls )\n # TODO: check if different literal labels weren't mapped into\n # same numerical but here asked to belong to different groups\n # yoh: actually above should catch it\n\n # Check if none of the labels is missing from known groups\n dsul = set(targets_sa.unique)\n if known.intersection(dsul) != dsul:\n raise ValueError, \\\n \"Dataset %s had some labels not defined in groups: %s. \" \\\n \"Known are %s\" % \\\n (dataset, dsul.difference(known), known)\n\n # We can operate on the same dataset here \n # Nope: doesn't work nicely with the classifier like kNN\n # which links to the dataset used in the training,\n # so whenever if we simply restore labels back, we\n # would get kNN confused in _predict()\n # Therefore we need to create a shallow copy of\n # dataset and provide it with new labels\n ds_group = dataset.copy(deep=False)\n # assign new labels group samples into groups of labels\n ds_group.sa[targets_sa_name].value = [label2index[l]\n for l in targets_sa.value]\n\n # train primary classifier\n if __debug__:\n debug('CLFTREE', \"Training primary %s on %s with targets %s\",\n (clf, ds_group, ds_group.sa[targets_sa_name].unique))\n clf.train(ds_group)\n\n # ??? should we obtain values for anything?\n # may be we could training values of .clfs to be added\n # as features to the next level -- i.e. .clfs\n\n # Proceed with next 'layer' and train all .clfs on corresponding\n # selection of samples\n # ??? should we may be allow additional 'the other' category, to\n # signal contain all the other categories data? probably not\n # since then it would lead to undetermined prediction (which\n # might be not a bad thing altogether...)\n for gk in groups.iterkeys():\n clf = clfs[gk]\n group_labels = groups_labels[gk]\n if clf is None: # Trailing node\n if len(group_labels) != 1:\n raise ValueError(\n \"Trailing nodes with no classifier assigned must have \"\n \"only a single label associated. Got %s defined in \"\n \"group %r of %s\"\n % (group_labels, gk, self))\n else:\n # select samples per each group\n ids = get_samples_by_attr(dataset, targets_sa_name, groups_labels[gk])\n ds_group = dataset[ids]\n if __debug__:\n debug('CLFTREE', \"Training %s for group %s on %s\",\n (clfs[gk], gk, ds_group))\n # and train corresponding slave clf\n clf.train(ds_group)"
] | [
"0.5890573",
"0.5568137",
"0.55283284",
"0.5486085",
"0.5446619",
"0.5348538",
"0.5325845",
"0.53036046",
"0.52931404",
"0.52764827",
"0.52207327",
"0.51904327",
"0.51854444",
"0.5157948",
"0.51359797",
"0.5110827",
"0.5092669",
"0.5082624",
"0.50786346",
"0.50476575",
"0.50384295",
"0.50344944",
"0.5032002",
"0.50156474",
"0.49811772",
"0.4972276",
"0.49521765",
"0.49410346",
"0.49336737",
"0.4932535"
] | 0.66742736 | 0 |
json encode the message and prepend the topic | def themify(topic,msg):
return topic + ' ' + json.dumps(msg) | {
"objective": {
"self": [],
"paired": [],
"triplet": [
[
"query",
"document",
"negatives"
]
]
}
} | [
"def mogrify(topic, msg):\n return topic + ' ' + json.dumps(msg)",
"def _send(self, topic, message):\n\n body = {'message': encode(message)}\n result = requests.post('{0}/topics/{1}'.format(self.apiUrl, topic), json=body)\n return result.json()",
"def publish(self, topic, msg):\n formatted_msg = json.dumps(msg)\n self.client.publish(topic, formatted_msg) # json converting cause of mqtt's data transfer limit.",
"def get_mqtt_message(self):\n return json.dumps(self.get_json(), default=self.converter_callback)",
"def serialize(msg) -> str:\n try:\n return json.dumps(msg, separators=(',', ':'))\n except json.JSONDecodeError as err:\n return err.msg",
"def kafka_serializer(data):\n return json.dumps(data).encode('utf-8')",
"def encode(rosMsg): #@NoSelf",
"def construct_json(self):\n\n if 'message' not in self.data:\n self.data['message'] = self.message\n\n if self.status == 200:\n self.data['status'] = 'OK'\n else:\n self.data['status'] = 'Not OK'\n\n return json.dumps(self.data)",
"def __create_msg(self, ping):\n now = rospy.get_rostime()\n output = {\n \"info\": {},\n \"timestamp\": int(now.secs * 1e3 + now.nsecs * 1e-6),\n \"data\": ping.T.tolist()\n }\n return json.dumps(output)",
"def build_message(self, src, dst, typee, msg):\n my_dict = {\n SRCE: src,\n DEST: dst,\n TYPE: typee,\n MESG: msg\n }\n\n return json.dumps(my_dict).encode()",
"def _create_message(self, msg):\n head = msg[\"head\"]\n body = msg[\"body\"]\n body = body.format(**self.data)\n length = len(body)\n head = head.format(length=length, **self.data)\n return head + body",
"def message_sender(m):\n my_producer = KafkaProducer(\n bootstrap_servers='localhost:9092',\n value_serializer=lambda v: json.dumps(v).encode('utf-8'))\n my_producer.send(cfg.end_topic,m)\n return m",
"def message_sender(m):\n my_producer = KafkaProducer(\n bootstrap_servers='localhost:9092',\n value_serializer=lambda v: json.dumps(v).encode('utf-8'))\n my_producer.send(cfg.end_topic,m)\n return m",
"def json_msg(json_body, msg=null):\n # type: (JSON, Optional[Str]) -> Str\n json_str = json_pkg.dumps(json_body, indent=4, ensure_ascii=False)\n if msg is not null:\n return \"{}\\n{}\".format(msg, json_str)\n return json_str",
"def publish(self, message, topic=''):\n if type(message) != types.ListType:\n message = [message]\n if topic:\n message = [topic] + message\n self.send(message)",
"def send_jsonified(self, msg, stats=True):\n raise NotImplemented()",
"def __mqtt_topic_callback(self, client, userdata, message):\n\n res = message.payload.decode('utf-8')\n raw_json = json.loads(res)\n\n # Response:\n \"\"\"\n {\n \"proType\":0,\n \"msg\":\"{\\\"transaction\\\":\\\"1234567890\\\",\\\"sku\\\":\\\"H6163\\\",\\\"device\\\":\\\"A2:B2:C3:D4:E5:F6:77:88\\\",\\\"type\\\":0,\\\"cmd\\\":\\\"status\\\",\\\"data\\\":\\\"{\\\\\\\"softversion\\\\\\\":\\\\\\\"1.02.17\\\\\\\",\\\\\\\"wifiSoftVersion\\\\\\\":\\\\\\\"1.00.33\\\\\\\",\\\\\\\"turn\\\\\\\":1,\\\\\\\"brightness\\\\\\\":133,\\\\\\\"mode\\\\\\\":2,\\\\\\\"timer\\\\\\\":{\\\\\\\"enable\\\\\\\":0,\\\\\\\"time\\\\\\\":[{\\\\\\\"openHour\\\\\\\":18,\\\\\\\"openMin\\\\\\\":0,\\\\\\\"closeHour\\\\\\\":23,\\\\\\\"closeMin\\\\\\\":59}]},\\\\\\\"color\\\\\\\":{\\\\\\\"red\\\\\\\":255,\\\\\\\"green\\\\\\\":215,\\\\\\\"blue\\\\\\\":0},\\\\\\\"colorTemInKelvin\\\\\\\":0}\\\"}\",\n \"state\":{\n \"onOff\":1,\n \"brightness\":133,\n \"color\":{\n \"r\":255,\n \"g\":215,\n \"b\":0\n },\n \"colorTemInKelvin\":0,\n \"connected\":\"true\",\n \"sku\":\"H6163\",\n \"device\":\"A2:B2:C3:D4:E5:F6:77:88\"\n }\n }\n\n OR\n\n {\n \"proType\":0,\n \"msg\":\"{\\\"transaction\\\":\\\"1234567890\\\",\\\"sku\\\":\\\"H6163\\\",\\\"device\\\":\\\"A2:B2:C3:D4:E5:F6:77:88\\\",\\\"type\\\":0,\\\"cmd\\\":\\\"color\\\",\\\"data\\\":\\\"{\\\\\\\"red\\\\\\\":0,\\\\\\\"green\\\\\\\":0,\\\\\\\"blue\\\\\\\":0}\\\"}\",\n \"state\":{\n \"onOff\":1,\n \"brightness\":159,\n \"connected\":\"true\",\n \"sku\":\"H6163\",\n \"device\":\"A2:B2:C3:D4:E5:F6:77:88\"\n }\n }\n \"\"\"\n\n if not 'state' in raw_json:\n return\n state = raw_json['state']\n\n # Get device\n device_identifer = state['device']\n if not device_identifer in self.__devices:\n self.__http_update_device_list()\n if not device_identifer in self.__devices:\n return\n device = self.__devices[device_identifer]\n\n # Update device status\n device._update_state(state)\n self.on_device_update(self, device, raw_json)",
"def on_message(client, userdata, message):\n print(f'{message.topic} {message.payload.decode(\"utf-8\")}') # Print message topic and payload",
"def _publish(self, topic_name, message):\n msg = {\n 'op': 'publish',\n 'topic': topic_name,\n 'msg': message\n }\n json_msg = json.dumps(msg)\n self.ws.send(json_msg)",
"def add_message(self, msg):\n msg_string = json.dumps(msg)\n self.redis_client.publish(self.message_channel, msg_string)\n self.redis_client.lpush(self.message_list, msg_string)\n self.redis_client.ltrim(self.message_list, 0,\n app.config[\"MAX_MESSAGES\"]-1)",
"async def _send_json_message(self, message: Dict[str, Any]) -> None:\n if not self.connected:\n raise NotConnected\n\n if LOGGER.isEnabledFor(logging.DEBUG):\n LOGGER.debug(\"Publishing message:\\n%s\\n\", pprint.pformat(message))\n\n assert self._client\n assert \"id\" in message\n\n await self._client.send_json(message, dumps=ujson.dumps)",
"def on_message(client, userdata, message): \n print(\"Topic: \" + message.topic + \" Message: \" + message.payload.decode('utf-8'))",
"def gen_message(msg: Message) -> str:\n msg_dict = msg._asdict()\n msg_dict.update({MSG_TYPE_NAME: type(msg).__name__})\n\n return json.dumps(msg_dict)",
"def _construct_message(self):\n self.message = {\"token\": self._auth, \"channel\": self.channel}\n super()._construct_message()",
"def echo(self, message):\n \n data = {\n \"method\" : \"echo\",\n \"type\" : \"message\",\n \"data\" : json.dumps(message)\n }\n return json.dumps(data)",
"def encode(self, message):\n return message.encode()",
"def publish_mqtt_msg(topic, mqtt_msg):\n\n MQTT_HOST = settings.MQTT_HOST\n MQTT_PORT = settings.MQTT_PORT\n MQTT_KEEPALIVE_INTERVAL = settings.MQTT_KEEPALIVE_INTERVAL\n\n MQTT_TOPIC = topic\n\n MQTT_MSG = json.dumps(mqtt_msg)\n\n \"\"\" Celery task to create a password for the user \"\"\"\n\n celery_task.delay(MQTT_MSG)\n\n def on_publish(client, userdata, mid):\n print(\"Message Published...\")\n\n def on_connect(client, userdata, flags, rc):\n client.subscribe(MQTT_TOPIC)\n client.publish(MQTT_TOPIC, MQTT_MSG)\n\n def on_message(client, userdata, msg):\n print(msg.topic)\n print(msg.payload)\n payload = json.loads(msg.payload)\n print(payload['sepalWidth'])\n client.disconnect()\n\n mqttc = mqtt.Client()\n mqttc.on_publish = on_publish\n mqttc.on_connect = on_connect\n mqttc.on_message = on_message\n\n mqttc.connect(MQTT_HOST, MQTT_PORT, MQTT_KEEPALIVE_INTERVAL)",
"def format(self, record):\n data = {}\n\n data[\"category\"] = record.name\n data[\"timestamp\"] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')\n data[\"level\"] = record.levelname\n data[\"message\"] = record.msg\n data[\"threadName\"] = record.threadName\n return json.dumps(data)",
"def tojson(jmessage):\n jmessage = json.dumps(json.loads(json.JSONEncoder().encode(jmessage)),\n indent=4,\n sort_keys=True,\n ensure_ascii=True)\n return jmessage",
"def serialize(self, message):\n value = bytes(json.dumps(message, cls=DjangoJSONEncoder), encoding=\"utf-8\")\n if self.crypter:\n value = self.crypter.encrypt(value)\n\n # As we use an sorted set to expire messages we need to guarantee uniqueness,\n # with 12 bytes.\n random_prefix = random.getrandbits(8 * 12).to_bytes(12, \"big\")\n return random_prefix + value"
] | [
"0.79083043",
"0.671766",
"0.6527758",
"0.64427924",
"0.6238984",
"0.6164169",
"0.6155834",
"0.6121196",
"0.6061571",
"0.59668523",
"0.5952764",
"0.5930898",
"0.5930898",
"0.5927398",
"0.591593",
"0.59124506",
"0.58600277",
"0.58495414",
"0.5837054",
"0.58189356",
"0.57878685",
"0.578662",
"0.5753948",
"0.57366836",
"0.5707438",
"0.5702689",
"0.5693758",
"0.5668992",
"0.56644243",
"0.5659305"
] | 0.8218022 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.