repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
assamite/creamas
creamas/mp.py
MultiEnvManager.get_agents
async def get_agents(self, addr=True, agent_cls=None): """Get addresses of all agents in all the slave environments. This is a managing function for :meth:`creamas.mp.MultiEnvironment.get_agents`. .. note:: Since :class:`aiomas.rpc.Proxy` objects do not seem to handle (re)serialization, ``addr`` and ``agent_cls`` parameters are omitted from the call to underlying multi-environment's :meth:`get_agents`. If :class:`aiomas.rpc.Proxy` objects from all the agents are needed, call each slave environment manager's :meth:`get_agents` directly. """ return await self.menv.get_agents(addr=True, agent_cls=None, as_coro=True)
python
async def get_agents(self, addr=True, agent_cls=None): """Get addresses of all agents in all the slave environments. This is a managing function for :meth:`creamas.mp.MultiEnvironment.get_agents`. .. note:: Since :class:`aiomas.rpc.Proxy` objects do not seem to handle (re)serialization, ``addr`` and ``agent_cls`` parameters are omitted from the call to underlying multi-environment's :meth:`get_agents`. If :class:`aiomas.rpc.Proxy` objects from all the agents are needed, call each slave environment manager's :meth:`get_agents` directly. """ return await self.menv.get_agents(addr=True, agent_cls=None, as_coro=True)
[ "async", "def", "get_agents", "(", "self", ",", "addr", "=", "True", ",", "agent_cls", "=", "None", ")", ":", "return", "await", "self", ".", "menv", ".", "get_agents", "(", "addr", "=", "True", ",", "agent_cls", "=", "None", ",", "as_coro", "=", "True", ")" ]
Get addresses of all agents in all the slave environments. This is a managing function for :meth:`creamas.mp.MultiEnvironment.get_agents`. .. note:: Since :class:`aiomas.rpc.Proxy` objects do not seem to handle (re)serialization, ``addr`` and ``agent_cls`` parameters are omitted from the call to underlying multi-environment's :meth:`get_agents`. If :class:`aiomas.rpc.Proxy` objects from all the agents are needed, call each slave environment manager's :meth:`get_agents` directly.
[ "Get", "addresses", "of", "all", "agents", "in", "all", "the", "slave", "environments", "." ]
54dc3e31c97a3f938e58272f8ab80b6bcafeff58
https://github.com/assamite/creamas/blob/54dc3e31c97a3f938e58272f8ab80b6bcafeff58/creamas/mp.py#L251-L269
train
assamite/creamas
creamas/mp.py
MultiEnvManager.get_connections
async def get_connections(self, data=True): """Return connections for all the agents in the slave environments. This is a managing function for :meth:`~creamas.mp.MultiEnvironment.get_connections`. """ return await self.menv.get_connections(data=data, as_coro=True)
python
async def get_connections(self, data=True): """Return connections for all the agents in the slave environments. This is a managing function for :meth:`~creamas.mp.MultiEnvironment.get_connections`. """ return await self.menv.get_connections(data=data, as_coro=True)
[ "async", "def", "get_connections", "(", "self", ",", "data", "=", "True", ")", ":", "return", "await", "self", ".", "menv", ".", "get_connections", "(", "data", "=", "data", ",", "as_coro", "=", "True", ")" ]
Return connections for all the agents in the slave environments. This is a managing function for :meth:`~creamas.mp.MultiEnvironment.get_connections`.
[ "Return", "connections", "for", "all", "the", "agents", "in", "the", "slave", "environments", "." ]
54dc3e31c97a3f938e58272f8ab80b6bcafeff58
https://github.com/assamite/creamas/blob/54dc3e31c97a3f938e58272f8ab80b6bcafeff58/creamas/mp.py#L281-L287
train
assamite/creamas
creamas/mp.py
MultiEnvironment.get_agents
def get_agents(self, addr=True, agent_cls=None, as_coro=False): """Get agents from the slave environments. :param bool addr: If ``True``, returns only addresses of the agents, otherwise returns a :class:`Proxy` object for each agent. :param agent_cls: If specified, returns only agents that are members of that particular class. :param bool as_coro: If ``True``, returns a coroutine, otherwise runs the method in an event loop. :returns: A coroutine or list of :class:`Proxy` objects or addresses as specified by the input parameters. Slave environment managers are excluded from the returned list by default. Essentially, this method calls each slave environment manager's :meth:`creamas.mp.EnvManager.get_agents` asynchronously. .. note:: Calling each slave environment's manager might be costly in some situations. Therefore, it is advisable to store the returned agent list if the agent sets in the slave environments are not bound to change. """ async def slave_task(mgr_addr, addr=True, agent_cls=None): r_manager = await self.env.connect(mgr_addr, timeout=TIMEOUT) return await r_manager.get_agents(addr=addr, agent_cls=agent_cls) tasks = create_tasks(slave_task, self.addrs, addr, agent_cls) return run_or_coro(tasks, as_coro)
python
def get_agents(self, addr=True, agent_cls=None, as_coro=False): """Get agents from the slave environments. :param bool addr: If ``True``, returns only addresses of the agents, otherwise returns a :class:`Proxy` object for each agent. :param agent_cls: If specified, returns only agents that are members of that particular class. :param bool as_coro: If ``True``, returns a coroutine, otherwise runs the method in an event loop. :returns: A coroutine or list of :class:`Proxy` objects or addresses as specified by the input parameters. Slave environment managers are excluded from the returned list by default. Essentially, this method calls each slave environment manager's :meth:`creamas.mp.EnvManager.get_agents` asynchronously. .. note:: Calling each slave environment's manager might be costly in some situations. Therefore, it is advisable to store the returned agent list if the agent sets in the slave environments are not bound to change. """ async def slave_task(mgr_addr, addr=True, agent_cls=None): r_manager = await self.env.connect(mgr_addr, timeout=TIMEOUT) return await r_manager.get_agents(addr=addr, agent_cls=agent_cls) tasks = create_tasks(slave_task, self.addrs, addr, agent_cls) return run_or_coro(tasks, as_coro)
[ "def", "get_agents", "(", "self", ",", "addr", "=", "True", ",", "agent_cls", "=", "None", ",", "as_coro", "=", "False", ")", ":", "async", "def", "slave_task", "(", "mgr_addr", ",", "addr", "=", "True", ",", "agent_cls", "=", "None", ")", ":", "r_manager", "=", "await", "self", ".", "env", ".", "connect", "(", "mgr_addr", ",", "timeout", "=", "TIMEOUT", ")", "return", "await", "r_manager", ".", "get_agents", "(", "addr", "=", "addr", ",", "agent_cls", "=", "agent_cls", ")", "tasks", "=", "create_tasks", "(", "slave_task", ",", "self", ".", "addrs", ",", "addr", ",", "agent_cls", ")", "return", "run_or_coro", "(", "tasks", ",", "as_coro", ")" ]
Get agents from the slave environments. :param bool addr: If ``True``, returns only addresses of the agents, otherwise returns a :class:`Proxy` object for each agent. :param agent_cls: If specified, returns only agents that are members of that particular class. :param bool as_coro: If ``True``, returns a coroutine, otherwise runs the method in an event loop. :returns: A coroutine or list of :class:`Proxy` objects or addresses as specified by the input parameters. Slave environment managers are excluded from the returned list by default. Essentially, this method calls each slave environment manager's :meth:`creamas.mp.EnvManager.get_agents` asynchronously. .. note:: Calling each slave environment's manager might be costly in some situations. Therefore, it is advisable to store the returned agent list if the agent sets in the slave environments are not bound to change.
[ "Get", "agents", "from", "the", "slave", "environments", "." ]
54dc3e31c97a3f938e58272f8ab80b6bcafeff58
https://github.com/assamite/creamas/blob/54dc3e31c97a3f938e58272f8ab80b6bcafeff58/creamas/mp.py#L446-L481
train
assamite/creamas
creamas/mp.py
MultiEnvironment.is_ready
async def is_ready(self): """Check if the multi-environment has been fully initialized. This calls each slave environment managers' :py:meth:`is_ready` and checks if the multi-environment itself is ready by calling :py:meth:`~creamas.mp.MultiEnvironment.check_ready`. .. seealso:: :py:meth:`creamas.core.environment.Environment.is_ready` """ async def slave_task(addr, timeout): try: r_manager = await self.env.connect(addr, timeout=timeout) ready = await r_manager.is_ready() if not ready: return False except: return False return True if not self.env.is_ready(): return False if not self.check_ready(): return False rets = await create_tasks(slave_task, self.addrs, 0.5) if not all(rets): return False return True
python
async def is_ready(self): """Check if the multi-environment has been fully initialized. This calls each slave environment managers' :py:meth:`is_ready` and checks if the multi-environment itself is ready by calling :py:meth:`~creamas.mp.MultiEnvironment.check_ready`. .. seealso:: :py:meth:`creamas.core.environment.Environment.is_ready` """ async def slave_task(addr, timeout): try: r_manager = await self.env.connect(addr, timeout=timeout) ready = await r_manager.is_ready() if not ready: return False except: return False return True if not self.env.is_ready(): return False if not self.check_ready(): return False rets = await create_tasks(slave_task, self.addrs, 0.5) if not all(rets): return False return True
[ "async", "def", "is_ready", "(", "self", ")", ":", "async", "def", "slave_task", "(", "addr", ",", "timeout", ")", ":", "try", ":", "r_manager", "=", "await", "self", ".", "env", ".", "connect", "(", "addr", ",", "timeout", "=", "timeout", ")", "ready", "=", "await", "r_manager", ".", "is_ready", "(", ")", "if", "not", "ready", ":", "return", "False", "except", ":", "return", "False", "return", "True", "if", "not", "self", ".", "env", ".", "is_ready", "(", ")", ":", "return", "False", "if", "not", "self", ".", "check_ready", "(", ")", ":", "return", "False", "rets", "=", "await", "create_tasks", "(", "slave_task", ",", "self", ".", "addrs", ",", "0.5", ")", "if", "not", "all", "(", "rets", ")", ":", "return", "False", "return", "True" ]
Check if the multi-environment has been fully initialized. This calls each slave environment managers' :py:meth:`is_ready` and checks if the multi-environment itself is ready by calling :py:meth:`~creamas.mp.MultiEnvironment.check_ready`. .. seealso:: :py:meth:`creamas.core.environment.Environment.is_ready`
[ "Check", "if", "the", "multi", "-", "environment", "has", "been", "fully", "initialized", "." ]
54dc3e31c97a3f938e58272f8ab80b6bcafeff58
https://github.com/assamite/creamas/blob/54dc3e31c97a3f938e58272f8ab80b6bcafeff58/creamas/mp.py#L526-L554
train
assamite/creamas
creamas/mp.py
MultiEnvironment.spawn_slaves
async def spawn_slaves(self, slave_addrs, slave_env_cls, slave_mgr_cls, slave_kwargs=None): """Spawn slave environments. :param slave_addrs: List of (HOST, PORT) addresses for the slave-environments. :param slave_env_cls: Class for the slave environments. :param slave_kwargs: If not None, must be a list of the same size as *addrs*. Each item in the list containing parameter values for one slave environment. :param slave_mgr_cls: Class of the slave environment managers. """ pool, r = spawn_containers(slave_addrs, env_cls=slave_env_cls, env_params=slave_kwargs, mgr_cls=slave_mgr_cls) self._pool = pool self._r = r self._manager_addrs = ["{}{}".format(_get_base_url(a), 0) for a in slave_addrs]
python
async def spawn_slaves(self, slave_addrs, slave_env_cls, slave_mgr_cls, slave_kwargs=None): """Spawn slave environments. :param slave_addrs: List of (HOST, PORT) addresses for the slave-environments. :param slave_env_cls: Class for the slave environments. :param slave_kwargs: If not None, must be a list of the same size as *addrs*. Each item in the list containing parameter values for one slave environment. :param slave_mgr_cls: Class of the slave environment managers. """ pool, r = spawn_containers(slave_addrs, env_cls=slave_env_cls, env_params=slave_kwargs, mgr_cls=slave_mgr_cls) self._pool = pool self._r = r self._manager_addrs = ["{}{}".format(_get_base_url(a), 0) for a in slave_addrs]
[ "async", "def", "spawn_slaves", "(", "self", ",", "slave_addrs", ",", "slave_env_cls", ",", "slave_mgr_cls", ",", "slave_kwargs", "=", "None", ")", ":", "pool", ",", "r", "=", "spawn_containers", "(", "slave_addrs", ",", "env_cls", "=", "slave_env_cls", ",", "env_params", "=", "slave_kwargs", ",", "mgr_cls", "=", "slave_mgr_cls", ")", "self", ".", "_pool", "=", "pool", "self", ".", "_r", "=", "r", "self", ".", "_manager_addrs", "=", "[", "\"{}{}\"", ".", "format", "(", "_get_base_url", "(", "a", ")", ",", "0", ")", "for", "a", "in", "slave_addrs", "]" ]
Spawn slave environments. :param slave_addrs: List of (HOST, PORT) addresses for the slave-environments. :param slave_env_cls: Class for the slave environments. :param slave_kwargs: If not None, must be a list of the same size as *addrs*. Each item in the list containing parameter values for one slave environment. :param slave_mgr_cls: Class of the slave environment managers.
[ "Spawn", "slave", "environments", "." ]
54dc3e31c97a3f938e58272f8ab80b6bcafeff58
https://github.com/assamite/creamas/blob/54dc3e31c97a3f938e58272f8ab80b6bcafeff58/creamas/mp.py#L556-L578
train
assamite/creamas
creamas/mp.py
MultiEnvironment._get_smallest_env
async def _get_smallest_env(self): """Get address of the slave environment manager with the smallest number of agents. """ async def slave_task(mgr_addr): r_manager = await self.env.connect(mgr_addr, timeout=TIMEOUT) ret = await r_manager.get_agents(addr=True) return mgr_addr, len(ret) sizes = await create_tasks(slave_task, self.addrs, flatten=False) return sorted(sizes, key=lambda x: x[1])[0][0]
python
async def _get_smallest_env(self): """Get address of the slave environment manager with the smallest number of agents. """ async def slave_task(mgr_addr): r_manager = await self.env.connect(mgr_addr, timeout=TIMEOUT) ret = await r_manager.get_agents(addr=True) return mgr_addr, len(ret) sizes = await create_tasks(slave_task, self.addrs, flatten=False) return sorted(sizes, key=lambda x: x[1])[0][0]
[ "async", "def", "_get_smallest_env", "(", "self", ")", ":", "async", "def", "slave_task", "(", "mgr_addr", ")", ":", "r_manager", "=", "await", "self", ".", "env", ".", "connect", "(", "mgr_addr", ",", "timeout", "=", "TIMEOUT", ")", "ret", "=", "await", "r_manager", ".", "get_agents", "(", "addr", "=", "True", ")", "return", "mgr_addr", ",", "len", "(", "ret", ")", "sizes", "=", "await", "create_tasks", "(", "slave_task", ",", "self", ".", "addrs", ",", "flatten", "=", "False", ")", "return", "sorted", "(", "sizes", ",", "key", "=", "lambda", "x", ":", "x", "[", "1", "]", ")", "[", "0", "]", "[", "0", "]" ]
Get address of the slave environment manager with the smallest number of agents.
[ "Get", "address", "of", "the", "slave", "environment", "manager", "with", "the", "smallest", "number", "of", "agents", "." ]
54dc3e31c97a3f938e58272f8ab80b6bcafeff58
https://github.com/assamite/creamas/blob/54dc3e31c97a3f938e58272f8ab80b6bcafeff58/creamas/mp.py#L692-L702
train
assamite/creamas
creamas/mp.py
MultiEnvironment.spawn
async def spawn(self, agent_cls, *args, addr=None, **kwargs): """Spawn a new agent in a slave environment. :param str agent_cls: `qualname`` of the agent class. That is, the name should be in the form ``pkg.mod:cls``, e.g. ``creamas.core.agent:CreativeAgent``. :param str addr: Optional. Address for the slave enviroment's manager. If :attr:`addr` is None, spawns the agent in the slave environment with currently smallest number of agents. :returns: :class:`aiomas.rpc.Proxy` and address for the created agent. The ``*args`` and ``**kwargs`` are passed down to the agent's :meth:`__init__`. .. note:: Use :meth:`~creamas.mp.MultiEnvironment.spawn_n` to spawn large number of agents with identical initialization parameters. """ if addr is None: addr = await self._get_smallest_env() r_manager = await self.env.connect(addr) return await r_manager.spawn(agent_cls, *args, **kwargs)
python
async def spawn(self, agent_cls, *args, addr=None, **kwargs): """Spawn a new agent in a slave environment. :param str agent_cls: `qualname`` of the agent class. That is, the name should be in the form ``pkg.mod:cls``, e.g. ``creamas.core.agent:CreativeAgent``. :param str addr: Optional. Address for the slave enviroment's manager. If :attr:`addr` is None, spawns the agent in the slave environment with currently smallest number of agents. :returns: :class:`aiomas.rpc.Proxy` and address for the created agent. The ``*args`` and ``**kwargs`` are passed down to the agent's :meth:`__init__`. .. note:: Use :meth:`~creamas.mp.MultiEnvironment.spawn_n` to spawn large number of agents with identical initialization parameters. """ if addr is None: addr = await self._get_smallest_env() r_manager = await self.env.connect(addr) return await r_manager.spawn(agent_cls, *args, **kwargs)
[ "async", "def", "spawn", "(", "self", ",", "agent_cls", ",", "*", "args", ",", "addr", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "addr", "is", "None", ":", "addr", "=", "await", "self", ".", "_get_smallest_env", "(", ")", "r_manager", "=", "await", "self", ".", "env", ".", "connect", "(", "addr", ")", "return", "await", "r_manager", ".", "spawn", "(", "agent_cls", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Spawn a new agent in a slave environment. :param str agent_cls: `qualname`` of the agent class. That is, the name should be in the form ``pkg.mod:cls``, e.g. ``creamas.core.agent:CreativeAgent``. :param str addr: Optional. Address for the slave enviroment's manager. If :attr:`addr` is None, spawns the agent in the slave environment with currently smallest number of agents. :returns: :class:`aiomas.rpc.Proxy` and address for the created agent. The ``*args`` and ``**kwargs`` are passed down to the agent's :meth:`__init__`. .. note:: Use :meth:`~creamas.mp.MultiEnvironment.spawn_n` to spawn large number of agents with identical initialization parameters.
[ "Spawn", "a", "new", "agent", "in", "a", "slave", "environment", "." ]
54dc3e31c97a3f938e58272f8ab80b6bcafeff58
https://github.com/assamite/creamas/blob/54dc3e31c97a3f938e58272f8ab80b6bcafeff58/creamas/mp.py#L704-L729
train
assamite/creamas
creamas/mp.py
MultiEnvironment.get_connections
def get_connections(self, data=True, as_coro=False): """Return connections from all the agents in the slave environments. :param bool data: If ``True``, returns also the data stored for each connection. :param bool as_coro: If ``True`` returns a coroutine, otherwise runs the asynchronous calls to the slave environment managers in the event loop. .. seealso:: :meth:`creamas.core.environment.Environment.get_connections` """ async def slave_task(addr, data): r_manager = await self.env.connect(addr) return await r_manager.get_connections(data) tasks = create_tasks(slave_task, self.addrs, data) return run_or_coro(tasks, as_coro)
python
def get_connections(self, data=True, as_coro=False): """Return connections from all the agents in the slave environments. :param bool data: If ``True``, returns also the data stored for each connection. :param bool as_coro: If ``True`` returns a coroutine, otherwise runs the asynchronous calls to the slave environment managers in the event loop. .. seealso:: :meth:`creamas.core.environment.Environment.get_connections` """ async def slave_task(addr, data): r_manager = await self.env.connect(addr) return await r_manager.get_connections(data) tasks = create_tasks(slave_task, self.addrs, data) return run_or_coro(tasks, as_coro)
[ "def", "get_connections", "(", "self", ",", "data", "=", "True", ",", "as_coro", "=", "False", ")", ":", "async", "def", "slave_task", "(", "addr", ",", "data", ")", ":", "r_manager", "=", "await", "self", ".", "env", ".", "connect", "(", "addr", ")", "return", "await", "r_manager", ".", "get_connections", "(", "data", ")", "tasks", "=", "create_tasks", "(", "slave_task", ",", "self", ".", "addrs", ",", "data", ")", "return", "run_or_coro", "(", "tasks", ",", "as_coro", ")" ]
Return connections from all the agents in the slave environments. :param bool data: If ``True``, returns also the data stored for each connection. :param bool as_coro: If ``True`` returns a coroutine, otherwise runs the asynchronous calls to the slave environment managers in the event loop. .. seealso:: :meth:`creamas.core.environment.Environment.get_connections`
[ "Return", "connections", "from", "all", "the", "agents", "in", "the", "slave", "environments", "." ]
54dc3e31c97a3f938e58272f8ab80b6bcafeff58
https://github.com/assamite/creamas/blob/54dc3e31c97a3f938e58272f8ab80b6bcafeff58/creamas/mp.py#L780-L799
train
assamite/creamas
creamas/mp.py
MultiEnvironment.get_artifacts
def get_artifacts(self, agent_name=None): """Get all artifacts or all artifacts published by a specific agent. :param str agent_name: Optional. Name of the agent which artifacts are returned. :returns: All artifacts or all artifacts published by the agent. :rtype: list """ if agent_name is not None: return [a for a in self.artifacts if agent_name == a.creator] return self.artifacts
python
def get_artifacts(self, agent_name=None): """Get all artifacts or all artifacts published by a specific agent. :param str agent_name: Optional. Name of the agent which artifacts are returned. :returns: All artifacts or all artifacts published by the agent. :rtype: list """ if agent_name is not None: return [a for a in self.artifacts if agent_name == a.creator] return self.artifacts
[ "def", "get_artifacts", "(", "self", ",", "agent_name", "=", "None", ")", ":", "if", "agent_name", "is", "not", "None", ":", "return", "[", "a", "for", "a", "in", "self", ".", "artifacts", "if", "agent_name", "==", "a", ".", "creator", "]", "return", "self", ".", "artifacts" ]
Get all artifacts or all artifacts published by a specific agent. :param str agent_name: Optional. Name of the agent which artifacts are returned. :returns: All artifacts or all artifacts published by the agent. :rtype: list
[ "Get", "all", "artifacts", "or", "all", "artifacts", "published", "by", "a", "specific", "agent", "." ]
54dc3e31c97a3f938e58272f8ab80b6bcafeff58
https://github.com/assamite/creamas/blob/54dc3e31c97a3f938e58272f8ab80b6bcafeff58/creamas/mp.py#L825-L836
train
assamite/creamas
creamas/mp.py
MultiEnvironment.stop_slaves
async def stop_slaves(self, timeout=1): """Stop all the slaves by sending a stop-message to their managers. :param int timeout: Timeout for connecting to each manager. If a connection can not be made before the timeout expires, the resulting error for that particular manager is logged, but the stopping of other managers is not halted. """ for addr in self.addrs: try: r_manager = await self.env.connect(addr, timeout=timeout) await r_manager.stop() except: self._log(logging.WARNING, "Could not stop {}".format(addr))
python
async def stop_slaves(self, timeout=1): """Stop all the slaves by sending a stop-message to their managers. :param int timeout: Timeout for connecting to each manager. If a connection can not be made before the timeout expires, the resulting error for that particular manager is logged, but the stopping of other managers is not halted. """ for addr in self.addrs: try: r_manager = await self.env.connect(addr, timeout=timeout) await r_manager.stop() except: self._log(logging.WARNING, "Could not stop {}".format(addr))
[ "async", "def", "stop_slaves", "(", "self", ",", "timeout", "=", "1", ")", ":", "for", "addr", "in", "self", ".", "addrs", ":", "try", ":", "r_manager", "=", "await", "self", ".", "env", ".", "connect", "(", "addr", ",", "timeout", "=", "timeout", ")", "await", "r_manager", ".", "stop", "(", ")", "except", ":", "self", ".", "_log", "(", "logging", ".", "WARNING", ",", "\"Could not stop {}\"", ".", "format", "(", "addr", ")", ")" ]
Stop all the slaves by sending a stop-message to their managers. :param int timeout: Timeout for connecting to each manager. If a connection can not be made before the timeout expires, the resulting error for that particular manager is logged, but the stopping of other managers is not halted.
[ "Stop", "all", "the", "slaves", "by", "sending", "a", "stop", "-", "message", "to", "their", "managers", "." ]
54dc3e31c97a3f938e58272f8ab80b6bcafeff58
https://github.com/assamite/creamas/blob/54dc3e31c97a3f938e58272f8ab80b6bcafeff58/creamas/mp.py#L852-L866
train
assamite/creamas
creamas/mp.py
MultiEnvironment.destroy
def destroy(self, folder=None, as_coro=False): """Destroy the multiprocessing environment and its slave environments. """ async def _destroy(folder): ret = self.save_info(folder) await self.stop_slaves() # Terminate and join the process pool when we are destroyed. # Do not wait for unfinished processed with pool.close(), # the slaves should be anyway already stopped. if self._pool is not None: self._pool.terminate() self._pool.join() await self._env.shutdown(as_coro=True) return ret return run_or_coro(_destroy(folder), as_coro)
python
def destroy(self, folder=None, as_coro=False): """Destroy the multiprocessing environment and its slave environments. """ async def _destroy(folder): ret = self.save_info(folder) await self.stop_slaves() # Terminate and join the process pool when we are destroyed. # Do not wait for unfinished processed with pool.close(), # the slaves should be anyway already stopped. if self._pool is not None: self._pool.terminate() self._pool.join() await self._env.shutdown(as_coro=True) return ret return run_or_coro(_destroy(folder), as_coro)
[ "def", "destroy", "(", "self", ",", "folder", "=", "None", ",", "as_coro", "=", "False", ")", ":", "async", "def", "_destroy", "(", "folder", ")", ":", "ret", "=", "self", ".", "save_info", "(", "folder", ")", "await", "self", ".", "stop_slaves", "(", ")", "# Terminate and join the process pool when we are destroyed.", "# Do not wait for unfinished processed with pool.close(),", "# the slaves should be anyway already stopped.", "if", "self", ".", "_pool", "is", "not", "None", ":", "self", ".", "_pool", ".", "terminate", "(", ")", "self", ".", "_pool", ".", "join", "(", ")", "await", "self", ".", "_env", ".", "shutdown", "(", "as_coro", "=", "True", ")", "return", "ret", "return", "run_or_coro", "(", "_destroy", "(", "folder", ")", ",", "as_coro", ")" ]
Destroy the multiprocessing environment and its slave environments.
[ "Destroy", "the", "multiprocessing", "environment", "and", "its", "slave", "environments", "." ]
54dc3e31c97a3f938e58272f8ab80b6bcafeff58
https://github.com/assamite/creamas/blob/54dc3e31c97a3f938e58272f8ab80b6bcafeff58/creamas/mp.py#L868-L883
train
inveniosoftware/invenio-pidrelations
invenio_pidrelations/serializers/schemas.py
RelationSchema.dump_index
def dump_index(self, obj): """Dump the index of the child in the relation.""" if isinstance(obj, PIDNodeOrdered) and self._is_child(obj): return obj.index(self.context['pid']) else: return None
python
def dump_index(self, obj): """Dump the index of the child in the relation.""" if isinstance(obj, PIDNodeOrdered) and self._is_child(obj): return obj.index(self.context['pid']) else: return None
[ "def", "dump_index", "(", "self", ",", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "PIDNodeOrdered", ")", "and", "self", ".", "_is_child", "(", "obj", ")", ":", "return", "obj", ".", "index", "(", "self", ".", "context", "[", "'pid'", "]", ")", "else", ":", "return", "None" ]
Dump the index of the child in the relation.
[ "Dump", "the", "index", "of", "the", "child", "in", "the", "relation", "." ]
a49f3725cf595b663c5b04814280b231f88bc333
https://github.com/inveniosoftware/invenio-pidrelations/blob/a49f3725cf595b663c5b04814280b231f88bc333/invenio_pidrelations/serializers/schemas.py#L73-L78
train
inveniosoftware/invenio-pidrelations
invenio_pidrelations/serializers/schemas.py
RelationSchema.dump_is_last
def dump_is_last(self, obj): """Dump the boolean stating if the child in the relation is last. Dumps `None` for parent serialization. """ if self._is_child(obj) and isinstance(obj, PIDNodeOrdered): if obj.children.count() > 0: return obj.children.ordered('asc').all()[-1] == \ self.context['pid'] elif obj.draft_child: return obj.draft_child == self.context['pid'] else: return True else: return None
python
def dump_is_last(self, obj): """Dump the boolean stating if the child in the relation is last. Dumps `None` for parent serialization. """ if self._is_child(obj) and isinstance(obj, PIDNodeOrdered): if obj.children.count() > 0: return obj.children.ordered('asc').all()[-1] == \ self.context['pid'] elif obj.draft_child: return obj.draft_child == self.context['pid'] else: return True else: return None
[ "def", "dump_is_last", "(", "self", ",", "obj", ")", ":", "if", "self", ".", "_is_child", "(", "obj", ")", "and", "isinstance", "(", "obj", ",", "PIDNodeOrdered", ")", ":", "if", "obj", ".", "children", ".", "count", "(", ")", ">", "0", ":", "return", "obj", ".", "children", ".", "ordered", "(", "'asc'", ")", ".", "all", "(", ")", "[", "-", "1", "]", "==", "self", ".", "context", "[", "'pid'", "]", "elif", "obj", ".", "draft_child", ":", "return", "obj", ".", "draft_child", "==", "self", ".", "context", "[", "'pid'", "]", "else", ":", "return", "True", "else", ":", "return", "None" ]
Dump the boolean stating if the child in the relation is last. Dumps `None` for parent serialization.
[ "Dump", "the", "boolean", "stating", "if", "the", "child", "in", "the", "relation", "is", "last", "." ]
a49f3725cf595b663c5b04814280b231f88bc333
https://github.com/inveniosoftware/invenio-pidrelations/blob/a49f3725cf595b663c5b04814280b231f88bc333/invenio_pidrelations/serializers/schemas.py#L88-L102
train
inveniosoftware/invenio-pidrelations
invenio_pidrelations/serializers/schemas.py
RelationSchema.dump_type
def dump_type(self, obj): """Dump the text name of the relation.""" if not isinstance(obj.relation_type, RelationType): return resolve_relation_type_config(obj.relation_type).name else: return obj.relation_type.name
python
def dump_type(self, obj): """Dump the text name of the relation.""" if not isinstance(obj.relation_type, RelationType): return resolve_relation_type_config(obj.relation_type).name else: return obj.relation_type.name
[ "def", "dump_type", "(", "self", ",", "obj", ")", ":", "if", "not", "isinstance", "(", "obj", ".", "relation_type", ",", "RelationType", ")", ":", "return", "resolve_relation_type_config", "(", "obj", ".", "relation_type", ")", ".", "name", "else", ":", "return", "obj", ".", "relation_type", ".", "name" ]
Dump the text name of the relation.
[ "Dump", "the", "text", "name", "of", "the", "relation", "." ]
a49f3725cf595b663c5b04814280b231f88bc333
https://github.com/inveniosoftware/invenio-pidrelations/blob/a49f3725cf595b663c5b04814280b231f88bc333/invenio_pidrelations/serializers/schemas.py#L104-L109
train
inveniosoftware/invenio-pidrelations
invenio_pidrelations/serializers/schemas.py
RelationSchema.dump_children
def dump_children(self, obj): """Dump the siblings of a PID.""" data, errors = PIDSchema(many=True).dump( obj.children.ordered('asc').all()) return data
python
def dump_children(self, obj): """Dump the siblings of a PID.""" data, errors = PIDSchema(many=True).dump( obj.children.ordered('asc').all()) return data
[ "def", "dump_children", "(", "self", ",", "obj", ")", ":", "data", ",", "errors", "=", "PIDSchema", "(", "many", "=", "True", ")", ".", "dump", "(", "obj", ".", "children", ".", "ordered", "(", "'asc'", ")", ".", "all", "(", ")", ")", "return", "data" ]
Dump the siblings of a PID.
[ "Dump", "the", "siblings", "of", "a", "PID", "." ]
a49f3725cf595b663c5b04814280b231f88bc333
https://github.com/inveniosoftware/invenio-pidrelations/blob/a49f3725cf595b663c5b04814280b231f88bc333/invenio_pidrelations/serializers/schemas.py#L117-L121
train
sopoforic/gamest
gamest/app.py
identify_window
def identify_window(pid, text): """Identify the app associated with a window.""" proc = None path = None uas = Session.query(UserApp).filter(UserApp.window_text == text) nontext = Session.query(UserApp).filter(UserApp.window_text == None) if uas.count(): proc = psutil.Process(pid) try: path = proc.exe() except psutil.AccessDenied: path = proc.name() logger.debug("Trying to identify app, path=%s", path) app = uas.filter(UserApp.path == path).first() if app: return app, proc if nontext.count(): if proc == None: proc = psutil.Process(pid) path = proc.exe() app = nontext.filter(UserApp.path == path).first() if app: return app, proc return None, None
python
def identify_window(pid, text): """Identify the app associated with a window.""" proc = None path = None uas = Session.query(UserApp).filter(UserApp.window_text == text) nontext = Session.query(UserApp).filter(UserApp.window_text == None) if uas.count(): proc = psutil.Process(pid) try: path = proc.exe() except psutil.AccessDenied: path = proc.name() logger.debug("Trying to identify app, path=%s", path) app = uas.filter(UserApp.path == path).first() if app: return app, proc if nontext.count(): if proc == None: proc = psutil.Process(pid) path = proc.exe() app = nontext.filter(UserApp.path == path).first() if app: return app, proc return None, None
[ "def", "identify_window", "(", "pid", ",", "text", ")", ":", "proc", "=", "None", "path", "=", "None", "uas", "=", "Session", ".", "query", "(", "UserApp", ")", ".", "filter", "(", "UserApp", ".", "window_text", "==", "text", ")", "nontext", "=", "Session", ".", "query", "(", "UserApp", ")", ".", "filter", "(", "UserApp", ".", "window_text", "==", "None", ")", "if", "uas", ".", "count", "(", ")", ":", "proc", "=", "psutil", ".", "Process", "(", "pid", ")", "try", ":", "path", "=", "proc", ".", "exe", "(", ")", "except", "psutil", ".", "AccessDenied", ":", "path", "=", "proc", ".", "name", "(", ")", "logger", ".", "debug", "(", "\"Trying to identify app, path=%s\"", ",", "path", ")", "app", "=", "uas", ".", "filter", "(", "UserApp", ".", "path", "==", "path", ")", ".", "first", "(", ")", "if", "app", ":", "return", "app", ",", "proc", "if", "nontext", ".", "count", "(", ")", ":", "if", "proc", "==", "None", ":", "proc", "=", "psutil", ".", "Process", "(", "pid", ")", "path", "=", "proc", ".", "exe", "(", ")", "app", "=", "nontext", ".", "filter", "(", "UserApp", ".", "path", "==", "path", ")", ".", "first", "(", ")", "if", "app", ":", "return", "app", ",", "proc", "return", "None", ",", "None" ]
Identify the app associated with a window.
[ "Identify", "the", "app", "associated", "with", "a", "window", "." ]
2129a6b7ce9976c40347d7232525878a76915eea
https://github.com/sopoforic/gamest/blob/2129a6b7ce9976c40347d7232525878a76915eea/gamest/app.py#L43-L66
train
saxix/drf-api-checker
src/drf_api_checker/recorder.py
Recorder._assertCALL
def _assertCALL(self, url, *, allow_empty=False, check_headers=True, check_status=True, expect_errors=False, name=None, method='get', data=None): """ check url for response changes :param url: url to check :param allow_empty: if True ignore empty response and 404 errors :param check_headers: check response headers :param check_status: check response status code :raises: ValueError :raises: AssertionError """ self.view = resolve(url).func.cls m = getattr(self.client, method.lower()) self.filename = self.get_response_filename(method, name or url) response = m(url, data=data) assert response.accepted_renderer payload = response.data if not allow_empty and not payload: raise ValueError(f"View {self.view} returned and empty json. Check your test") if response.status_code > 299 and not expect_errors: raise ValueError(f"View {self.view} unexpected response. {response.status_code} - {response.content}") if not allow_empty and response.status_code == 404: raise ValueError(f"View {self.view} returned 404 status code. Check your test") if not os.path.exists(self.filename) or os.environ.get('API_CHECKER_RESET', False): _write(self.filename, serialize_response(response)) stored = load_response(self.filename) if (check_status) and response.status_code != stored.status_code: raise StatusCodeError(self.view, response.status_code, stored.status_code) if check_headers: self._assert_headers(response, stored) self.compare(payload, stored.data, self.filename, view=self.view)
python
def _assertCALL(self, url, *, allow_empty=False, check_headers=True, check_status=True, expect_errors=False, name=None, method='get', data=None): """ check url for response changes :param url: url to check :param allow_empty: if True ignore empty response and 404 errors :param check_headers: check response headers :param check_status: check response status code :raises: ValueError :raises: AssertionError """ self.view = resolve(url).func.cls m = getattr(self.client, method.lower()) self.filename = self.get_response_filename(method, name or url) response = m(url, data=data) assert response.accepted_renderer payload = response.data if not allow_empty and not payload: raise ValueError(f"View {self.view} returned and empty json. Check your test") if response.status_code > 299 and not expect_errors: raise ValueError(f"View {self.view} unexpected response. {response.status_code} - {response.content}") if not allow_empty and response.status_code == 404: raise ValueError(f"View {self.view} returned 404 status code. Check your test") if not os.path.exists(self.filename) or os.environ.get('API_CHECKER_RESET', False): _write(self.filename, serialize_response(response)) stored = load_response(self.filename) if (check_status) and response.status_code != stored.status_code: raise StatusCodeError(self.view, response.status_code, stored.status_code) if check_headers: self._assert_headers(response, stored) self.compare(payload, stored.data, self.filename, view=self.view)
[ "def", "_assertCALL", "(", "self", ",", "url", ",", "*", ",", "allow_empty", "=", "False", ",", "check_headers", "=", "True", ",", "check_status", "=", "True", ",", "expect_errors", "=", "False", ",", "name", "=", "None", ",", "method", "=", "'get'", ",", "data", "=", "None", ")", ":", "self", ".", "view", "=", "resolve", "(", "url", ")", ".", "func", ".", "cls", "m", "=", "getattr", "(", "self", ".", "client", ",", "method", ".", "lower", "(", ")", ")", "self", ".", "filename", "=", "self", ".", "get_response_filename", "(", "method", ",", "name", "or", "url", ")", "response", "=", "m", "(", "url", ",", "data", "=", "data", ")", "assert", "response", ".", "accepted_renderer", "payload", "=", "response", ".", "data", "if", "not", "allow_empty", "and", "not", "payload", ":", "raise", "ValueError", "(", "f\"View {self.view} returned and empty json. Check your test\"", ")", "if", "response", ".", "status_code", ">", "299", "and", "not", "expect_errors", ":", "raise", "ValueError", "(", "f\"View {self.view} unexpected response. {response.status_code} - {response.content}\"", ")", "if", "not", "allow_empty", "and", "response", ".", "status_code", "==", "404", ":", "raise", "ValueError", "(", "f\"View {self.view} returned 404 status code. Check your test\"", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "filename", ")", "or", "os", ".", "environ", ".", "get", "(", "'API_CHECKER_RESET'", ",", "False", ")", ":", "_write", "(", "self", ".", "filename", ",", "serialize_response", "(", "response", ")", ")", "stored", "=", "load_response", "(", "self", ".", "filename", ")", "if", "(", "check_status", ")", "and", "response", ".", "status_code", "!=", "stored", ".", "status_code", ":", "raise", "StatusCodeError", "(", "self", ".", "view", ",", "response", ".", "status_code", ",", "stored", ".", "status_code", ")", "if", "check_headers", ":", "self", ".", "_assert_headers", "(", "response", ",", "stored", ")", "self", ".", "compare", "(", "payload", ",", "stored", ".", "data", ",", "self", ".", "filename", ",", "view", "=", "self", ".", "view", ")" ]
check url for response changes :param url: url to check :param allow_empty: if True ignore empty response and 404 errors :param check_headers: check response headers :param check_status: check response status code :raises: ValueError :raises: AssertionError
[ "check", "url", "for", "response", "changes" ]
c84e598f3a95b5b63cf758dcaced89c02b53d772
https://github.com/saxix/drf-api-checker/blob/c84e598f3a95b5b63cf758dcaced89c02b53d772/src/drf_api_checker/recorder.py#L102-L137
train
ludeeus/GHLocalApi
examples/reboot.py
reboot
async def reboot(): """Reboot a Google Home unit.""" async with aiohttp.ClientSession() as session: ghlocalapi = DeviceSettings(LOOP, session, IPADDRESS) result = await ghlocalapi.reboot() print("Reboot info:", result)
python
async def reboot(): """Reboot a Google Home unit.""" async with aiohttp.ClientSession() as session: ghlocalapi = DeviceSettings(LOOP, session, IPADDRESS) result = await ghlocalapi.reboot() print("Reboot info:", result)
[ "async", "def", "reboot", "(", ")", ":", "async", "with", "aiohttp", ".", "ClientSession", "(", ")", "as", "session", ":", "ghlocalapi", "=", "DeviceSettings", "(", "LOOP", ",", "session", ",", "IPADDRESS", ")", "result", "=", "await", "ghlocalapi", ".", "reboot", "(", ")", "print", "(", "\"Reboot info:\"", ",", "result", ")" ]
Reboot a Google Home unit.
[ "Reboot", "a", "Google", "Home", "unit", "." ]
93abdee299c4a4b65aa9dd03c77ec34e174e3c56
https://github.com/ludeeus/GHLocalApi/blob/93abdee299c4a4b65aa9dd03c77ec34e174e3c56/examples/reboot.py#L9-L15
train
djaodjin/djaodjin-deployutils
deployutils/apps/django/logging.py
RequestFilter.filter
def filter(self, record): """ Adds user and remote_addr to the record. """ request = get_request() if request: user = getattr(request, 'user', None) if user and not user.is_anonymous(): record.username = user.username else: record.username = '-' meta = getattr(request, 'META', {}) record.remote_addr = meta.get('REMOTE_ADDR', '-') record.http_user_agent = meta.get('HTTP_USER_AGENT', '-') if not hasattr(record, 'request'): record.request = request else: record.username = '-' record.remote_addr = '-' record.http_user_agent = '-' return True
python
def filter(self, record): """ Adds user and remote_addr to the record. """ request = get_request() if request: user = getattr(request, 'user', None) if user and not user.is_anonymous(): record.username = user.username else: record.username = '-' meta = getattr(request, 'META', {}) record.remote_addr = meta.get('REMOTE_ADDR', '-') record.http_user_agent = meta.get('HTTP_USER_AGENT', '-') if not hasattr(record, 'request'): record.request = request else: record.username = '-' record.remote_addr = '-' record.http_user_agent = '-' return True
[ "def", "filter", "(", "self", ",", "record", ")", ":", "request", "=", "get_request", "(", ")", "if", "request", ":", "user", "=", "getattr", "(", "request", ",", "'user'", ",", "None", ")", "if", "user", "and", "not", "user", ".", "is_anonymous", "(", ")", ":", "record", ".", "username", "=", "user", ".", "username", "else", ":", "record", ".", "username", "=", "'-'", "meta", "=", "getattr", "(", "request", ",", "'META'", ",", "{", "}", ")", "record", ".", "remote_addr", "=", "meta", ".", "get", "(", "'REMOTE_ADDR'", ",", "'-'", ")", "record", ".", "http_user_agent", "=", "meta", ".", "get", "(", "'HTTP_USER_AGENT'", ",", "'-'", ")", "if", "not", "hasattr", "(", "record", ",", "'request'", ")", ":", "record", ".", "request", "=", "request", "else", ":", "record", ".", "username", "=", "'-'", "record", ".", "remote_addr", "=", "'-'", "record", ".", "http_user_agent", "=", "'-'", "return", "True" ]
Adds user and remote_addr to the record.
[ "Adds", "user", "and", "remote_addr", "to", "the", "record", "." ]
a0fe3cf3030dbbf09025c69ce75a69b326565dd8
https://github.com/djaodjin/djaodjin-deployutils/blob/a0fe3cf3030dbbf09025c69ce75a69b326565dd8/deployutils/apps/django/logging.py#L39-L59
train
PostmonAPI/postmon-python
postmon.py
PostmonModel.status
def status(self): """Status da resposta recebida do Postmon. Os status previstos pelo Postmon são: * ``200 OK`` * ``404 CEP NAO ENCONTRADO`` * ``503 SERVICO INDISPONIVEL`` Além dos status listados, outros status HTTP podem ocorrer, como em qualquer chamada HTTP. O único caso de sucesso é o ``200 OK``, caso em que o resultado no objeto é válido e pode ser utilizado. """ try: r = self._response except AttributeError: return None else: return r.status_code, r.reason
python
def status(self): """Status da resposta recebida do Postmon. Os status previstos pelo Postmon são: * ``200 OK`` * ``404 CEP NAO ENCONTRADO`` * ``503 SERVICO INDISPONIVEL`` Além dos status listados, outros status HTTP podem ocorrer, como em qualquer chamada HTTP. O único caso de sucesso é o ``200 OK``, caso em que o resultado no objeto é válido e pode ser utilizado. """ try: r = self._response except AttributeError: return None else: return r.status_code, r.reason
[ "def", "status", "(", "self", ")", ":", "try", ":", "r", "=", "self", ".", "_response", "except", "AttributeError", ":", "return", "None", "else", ":", "return", "r", ".", "status_code", ",", "r", ".", "reason" ]
Status da resposta recebida do Postmon. Os status previstos pelo Postmon são: * ``200 OK`` * ``404 CEP NAO ENCONTRADO`` * ``503 SERVICO INDISPONIVEL`` Além dos status listados, outros status HTTP podem ocorrer, como em qualquer chamada HTTP. O único caso de sucesso é o ``200 OK``, caso em que o resultado no objeto é válido e pode ser utilizado.
[ "Status", "da", "resposta", "recebida", "do", "Postmon", "." ]
bfd3a38a8aec5e069536a0bc36440946d1981e37
https://github.com/PostmonAPI/postmon-python/blob/bfd3a38a8aec5e069536a0bc36440946d1981e37/postmon.py#L72-L92
train
djaodjin/djaodjin-deployutils
deployutils/apps/django/themes.py
package_theme
def package_theme(app_name, build_dir, excludes=None, includes=None, path_prefix=None, template_dirs=None): """ Package resources and templates for a multi-tier environment into a zip file. Templates are pre-compiled into ``*build_dir*/*app_name*/templates``. Compilation means {% assets '*path*' %} and {% static '*path*' %} tags are replaced by their compiled expression. """ #pylint:disable=too-many-locals,too-many-arguments templates_dest = os.path.join(build_dir, 'templates') # override STATIC_URL to prefix APP_NAME. orig_static_url = django_settings.STATIC_URL if (app_name != settings.APP_NAME and not django_settings.STATIC_URL.startswith('/' + app_name)): django_settings.STATIC_URL = '/' + app_name + orig_static_url if not os.path.exists(templates_dest): os.makedirs(templates_dest) if template_dirs is None: template_dirs = get_template_search_path(app_name) for template_dir in template_dirs: # The first of template_dirs usually contains the most specialized # templates (ie. the ones we truely want to install). if (templates_dest and not os.path.samefile(template_dir, templates_dest)): install_templates(template_dir, templates_dest, excludes=excludes, includes=includes, path_prefix=path_prefix)
python
def package_theme(app_name, build_dir, excludes=None, includes=None, path_prefix=None, template_dirs=None): """ Package resources and templates for a multi-tier environment into a zip file. Templates are pre-compiled into ``*build_dir*/*app_name*/templates``. Compilation means {% assets '*path*' %} and {% static '*path*' %} tags are replaced by their compiled expression. """ #pylint:disable=too-many-locals,too-many-arguments templates_dest = os.path.join(build_dir, 'templates') # override STATIC_URL to prefix APP_NAME. orig_static_url = django_settings.STATIC_URL if (app_name != settings.APP_NAME and not django_settings.STATIC_URL.startswith('/' + app_name)): django_settings.STATIC_URL = '/' + app_name + orig_static_url if not os.path.exists(templates_dest): os.makedirs(templates_dest) if template_dirs is None: template_dirs = get_template_search_path(app_name) for template_dir in template_dirs: # The first of template_dirs usually contains the most specialized # templates (ie. the ones we truely want to install). if (templates_dest and not os.path.samefile(template_dir, templates_dest)): install_templates(template_dir, templates_dest, excludes=excludes, includes=includes, path_prefix=path_prefix)
[ "def", "package_theme", "(", "app_name", ",", "build_dir", ",", "excludes", "=", "None", ",", "includes", "=", "None", ",", "path_prefix", "=", "None", ",", "template_dirs", "=", "None", ")", ":", "#pylint:disable=too-many-locals,too-many-arguments", "templates_dest", "=", "os", ".", "path", ".", "join", "(", "build_dir", ",", "'templates'", ")", "# override STATIC_URL to prefix APP_NAME.", "orig_static_url", "=", "django_settings", ".", "STATIC_URL", "if", "(", "app_name", "!=", "settings", ".", "APP_NAME", "and", "not", "django_settings", ".", "STATIC_URL", ".", "startswith", "(", "'/'", "+", "app_name", ")", ")", ":", "django_settings", ".", "STATIC_URL", "=", "'/'", "+", "app_name", "+", "orig_static_url", "if", "not", "os", ".", "path", ".", "exists", "(", "templates_dest", ")", ":", "os", ".", "makedirs", "(", "templates_dest", ")", "if", "template_dirs", "is", "None", ":", "template_dirs", "=", "get_template_search_path", "(", "app_name", ")", "for", "template_dir", "in", "template_dirs", ":", "# The first of template_dirs usually contains the most specialized", "# templates (ie. the ones we truely want to install).", "if", "(", "templates_dest", "and", "not", "os", ".", "path", ".", "samefile", "(", "template_dir", ",", "templates_dest", ")", ")", ":", "install_templates", "(", "template_dir", ",", "templates_dest", ",", "excludes", "=", "excludes", ",", "includes", "=", "includes", ",", "path_prefix", "=", "path_prefix", ")" ]
Package resources and templates for a multi-tier environment into a zip file. Templates are pre-compiled into ``*build_dir*/*app_name*/templates``. Compilation means {% assets '*path*' %} and {% static '*path*' %} tags are replaced by their compiled expression.
[ "Package", "resources", "and", "templates", "for", "a", "multi", "-", "tier", "environment", "into", "a", "zip", "file", "." ]
a0fe3cf3030dbbf09025c69ce75a69b326565dd8
https://github.com/djaodjin/djaodjin-deployutils/blob/a0fe3cf3030dbbf09025c69ce75a69b326565dd8/deployutils/apps/django/themes.py#L276-L304
train
dipcode-software/django-ajax-cbv
ajax_cbv/mixins.py
AjaxResponseMixin.json_to_response
def json_to_response(self, action=None, json_status=None, success_url=None, json_data=None, **response_kwargs): """ Valid response with next action to be followed by the JS """ data = { "status": self.get_status(json_status), "action": self.get_action(action), "extra_data": self.get_json_data(json_data or {}) } if self.action == AjaxResponseAction.REDIRECT: data["action_url"] = success_url or self.get_success_url() return JsonResponse(data, **response_kwargs)
python
def json_to_response(self, action=None, json_status=None, success_url=None, json_data=None, **response_kwargs): """ Valid response with next action to be followed by the JS """ data = { "status": self.get_status(json_status), "action": self.get_action(action), "extra_data": self.get_json_data(json_data or {}) } if self.action == AjaxResponseAction.REDIRECT: data["action_url"] = success_url or self.get_success_url() return JsonResponse(data, **response_kwargs)
[ "def", "json_to_response", "(", "self", ",", "action", "=", "None", ",", "json_status", "=", "None", ",", "success_url", "=", "None", ",", "json_data", "=", "None", ",", "*", "*", "response_kwargs", ")", ":", "data", "=", "{", "\"status\"", ":", "self", ".", "get_status", "(", "json_status", ")", ",", "\"action\"", ":", "self", ".", "get_action", "(", "action", ")", ",", "\"extra_data\"", ":", "self", ".", "get_json_data", "(", "json_data", "or", "{", "}", ")", "}", "if", "self", ".", "action", "==", "AjaxResponseAction", ".", "REDIRECT", ":", "data", "[", "\"action_url\"", "]", "=", "success_url", "or", "self", ".", "get_success_url", "(", ")", "return", "JsonResponse", "(", "data", ",", "*", "*", "response_kwargs", ")" ]
Valid response with next action to be followed by the JS
[ "Valid", "response", "with", "next", "action", "to", "be", "followed", "by", "the", "JS" ]
a723eaa90490e57f1b2dfbd8d10905b9e2541f88
https://github.com/dipcode-software/django-ajax-cbv/blob/a723eaa90490e57f1b2dfbd8d10905b9e2541f88/ajax_cbv/mixins.py#L39-L50
train
dipcode-software/django-ajax-cbv
ajax_cbv/mixins.py
AjaxResponseMixin.get_action
def get_action(self, action=None): """ Returns action to take after call """ if action: self.action = action if self.action not in AjaxResponseAction.choices: raise ValueError( "Invalid action selected: '{}'".format(self.action)) return self.action
python
def get_action(self, action=None): """ Returns action to take after call """ if action: self.action = action if self.action not in AjaxResponseAction.choices: raise ValueError( "Invalid action selected: '{}'".format(self.action)) return self.action
[ "def", "get_action", "(", "self", ",", "action", "=", "None", ")", ":", "if", "action", ":", "self", ".", "action", "=", "action", "if", "self", ".", "action", "not", "in", "AjaxResponseAction", ".", "choices", ":", "raise", "ValueError", "(", "\"Invalid action selected: '{}'\"", ".", "format", "(", "self", ".", "action", ")", ")", "return", "self", ".", "action" ]
Returns action to take after call
[ "Returns", "action", "to", "take", "after", "call" ]
a723eaa90490e57f1b2dfbd8d10905b9e2541f88
https://github.com/dipcode-software/django-ajax-cbv/blob/a723eaa90490e57f1b2dfbd8d10905b9e2541f88/ajax_cbv/mixins.py#L52-L61
train
dipcode-software/django-ajax-cbv
ajax_cbv/mixins.py
AjaxResponseMixin.get_status
def get_status(self, json_status=None): """ Returns status of for json """ if json_status: self.json_status = json_status if self.json_status not in AjaxResponseStatus.choices: raise ValueError( "Invalid status selected: '{}'".format(self.json_status)) return self.json_status
python
def get_status(self, json_status=None): """ Returns status of for json """ if json_status: self.json_status = json_status if self.json_status not in AjaxResponseStatus.choices: raise ValueError( "Invalid status selected: '{}'".format(self.json_status)) return self.json_status
[ "def", "get_status", "(", "self", ",", "json_status", "=", "None", ")", ":", "if", "json_status", ":", "self", ".", "json_status", "=", "json_status", "if", "self", ".", "json_status", "not", "in", "AjaxResponseStatus", ".", "choices", ":", "raise", "ValueError", "(", "\"Invalid status selected: '{}'\"", ".", "format", "(", "self", ".", "json_status", ")", ")", "return", "self", ".", "json_status" ]
Returns status of for json
[ "Returns", "status", "of", "for", "json" ]
a723eaa90490e57f1b2dfbd8d10905b9e2541f88
https://github.com/dipcode-software/django-ajax-cbv/blob/a723eaa90490e57f1b2dfbd8d10905b9e2541f88/ajax_cbv/mixins.py#L63-L72
train
dipcode-software/django-ajax-cbv
ajax_cbv/mixins.py
FormAjaxMixin.form_invalid
def form_invalid(self, form, prefix=None): """ If form invalid return error list in JSON response """ response = super(FormAjaxMixin, self).form_invalid(form) if self.request.is_ajax(): data = { "errors_list": self.add_prefix(form.errors, prefix), } return self.json_to_response(status=400, json_data=data, json_status=AjaxResponseStatus.ERROR) return response
python
def form_invalid(self, form, prefix=None): """ If form invalid return error list in JSON response """ response = super(FormAjaxMixin, self).form_invalid(form) if self.request.is_ajax(): data = { "errors_list": self.add_prefix(form.errors, prefix), } return self.json_to_response(status=400, json_data=data, json_status=AjaxResponseStatus.ERROR) return response
[ "def", "form_invalid", "(", "self", ",", "form", ",", "prefix", "=", "None", ")", ":", "response", "=", "super", "(", "FormAjaxMixin", ",", "self", ")", ".", "form_invalid", "(", "form", ")", "if", "self", ".", "request", ".", "is_ajax", "(", ")", ":", "data", "=", "{", "\"errors_list\"", ":", "self", ".", "add_prefix", "(", "form", ".", "errors", ",", "prefix", ")", ",", "}", "return", "self", ".", "json_to_response", "(", "status", "=", "400", ",", "json_data", "=", "data", ",", "json_status", "=", "AjaxResponseStatus", ".", "ERROR", ")", "return", "response" ]
If form invalid return error list in JSON response
[ "If", "form", "invalid", "return", "error", "list", "in", "JSON", "response" ]
a723eaa90490e57f1b2dfbd8d10905b9e2541f88
https://github.com/dipcode-software/django-ajax-cbv/blob/a723eaa90490e57f1b2dfbd8d10905b9e2541f88/ajax_cbv/mixins.py#L82-L91
train
dipcode-software/django-ajax-cbv
ajax_cbv/mixins.py
FormAjaxMixin.form_valid
def form_valid(self, form): """ If form valid return response with action """ response = super(FormAjaxMixin, self).form_valid(form) if self.request.is_ajax(): return self.json_to_response() return response
python
def form_valid(self, form): """ If form valid return response with action """ response = super(FormAjaxMixin, self).form_valid(form) if self.request.is_ajax(): return self.json_to_response() return response
[ "def", "form_valid", "(", "self", ",", "form", ")", ":", "response", "=", "super", "(", "FormAjaxMixin", ",", "self", ")", ".", "form_valid", "(", "form", ")", "if", "self", ".", "request", ".", "is_ajax", "(", ")", ":", "return", "self", ".", "json_to_response", "(", ")", "return", "response" ]
If form valid return response with action
[ "If", "form", "valid", "return", "response", "with", "action" ]
a723eaa90490e57f1b2dfbd8d10905b9e2541f88
https://github.com/dipcode-software/django-ajax-cbv/blob/a723eaa90490e57f1b2dfbd8d10905b9e2541f88/ajax_cbv/mixins.py#L99-L104
train
dipcode-software/django-ajax-cbv
ajax_cbv/mixins.py
FormAjaxMixin.add_prefix
def add_prefix(self, errors, prefix): """Add form prefix to errors""" if not prefix: prefix = self.get_prefix() if prefix: return {"%s-%s" % (prefix, k): v for k, v in errors.items()} return errors
python
def add_prefix(self, errors, prefix): """Add form prefix to errors""" if not prefix: prefix = self.get_prefix() if prefix: return {"%s-%s" % (prefix, k): v for k, v in errors.items()} return errors
[ "def", "add_prefix", "(", "self", ",", "errors", ",", "prefix", ")", ":", "if", "not", "prefix", ":", "prefix", "=", "self", ".", "get_prefix", "(", ")", "if", "prefix", ":", "return", "{", "\"%s-%s\"", "%", "(", "prefix", ",", "k", ")", ":", "v", "for", "k", ",", "v", "in", "errors", ".", "items", "(", ")", "}", "return", "errors" ]
Add form prefix to errors
[ "Add", "form", "prefix", "to", "errors" ]
a723eaa90490e57f1b2dfbd8d10905b9e2541f88
https://github.com/dipcode-software/django-ajax-cbv/blob/a723eaa90490e57f1b2dfbd8d10905b9e2541f88/ajax_cbv/mixins.py#L106-L112
train
dipcode-software/django-ajax-cbv
ajax_cbv/mixins.py
PartialAjaxMixin.render_to_response
def render_to_response(self, context, **response_kwargs): """ Returns the rendered template in JSON format """ if self.request.is_ajax(): data = { "content": render_to_string( self.get_template_names(), context, request=self.request) } return JsonResponse(data) if settings.DEBUG: return super(PartialAjaxMixin, self).render_to_response( context, **response_kwargs) raise Http404()
python
def render_to_response(self, context, **response_kwargs): """ Returns the rendered template in JSON format """ if self.request.is_ajax(): data = { "content": render_to_string( self.get_template_names(), context, request=self.request) } return JsonResponse(data) if settings.DEBUG: return super(PartialAjaxMixin, self).render_to_response( context, **response_kwargs) raise Http404()
[ "def", "render_to_response", "(", "self", ",", "context", ",", "*", "*", "response_kwargs", ")", ":", "if", "self", ".", "request", ".", "is_ajax", "(", ")", ":", "data", "=", "{", "\"content\"", ":", "render_to_string", "(", "self", ".", "get_template_names", "(", ")", ",", "context", ",", "request", "=", "self", ".", "request", ")", "}", "return", "JsonResponse", "(", "data", ")", "if", "settings", ".", "DEBUG", ":", "return", "super", "(", "PartialAjaxMixin", ",", "self", ")", ".", "render_to_response", "(", "context", ",", "*", "*", "response_kwargs", ")", "raise", "Http404", "(", ")" ]
Returns the rendered template in JSON format
[ "Returns", "the", "rendered", "template", "in", "JSON", "format" ]
a723eaa90490e57f1b2dfbd8d10905b9e2541f88
https://github.com/dipcode-software/django-ajax-cbv/blob/a723eaa90490e57f1b2dfbd8d10905b9e2541f88/ajax_cbv/mixins.py#L131-L142
train
romanorac/discomll
discomll/clustering/kmeans.py
random_init_map
def random_init_map(interface, state, label, inp): """Assign datapoint `e` randomly to one of the `k` clusters.""" import random out = interface.output(0) centers = {} for row in inp: row = row.strip().split(state["delimiter"]) if len(row) > 1: x = [(0 if row[i] in state["missing_vals"] else float(row[i])) for i in state["X_indices"]] cluster = random.randint(0, state['k'] - 1) vertex = state['create'](x, 1.0) centers[cluster] = vertex if cluster not in centers else state["update"](centers[cluster], vertex) for cluster, values in centers.iteritems(): out.add(cluster, values)
python
def random_init_map(interface, state, label, inp): """Assign datapoint `e` randomly to one of the `k` clusters.""" import random out = interface.output(0) centers = {} for row in inp: row = row.strip().split(state["delimiter"]) if len(row) > 1: x = [(0 if row[i] in state["missing_vals"] else float(row[i])) for i in state["X_indices"]] cluster = random.randint(0, state['k'] - 1) vertex = state['create'](x, 1.0) centers[cluster] = vertex if cluster not in centers else state["update"](centers[cluster], vertex) for cluster, values in centers.iteritems(): out.add(cluster, values)
[ "def", "random_init_map", "(", "interface", ",", "state", ",", "label", ",", "inp", ")", ":", "import", "random", "out", "=", "interface", ".", "output", "(", "0", ")", "centers", "=", "{", "}", "for", "row", "in", "inp", ":", "row", "=", "row", ".", "strip", "(", ")", ".", "split", "(", "state", "[", "\"delimiter\"", "]", ")", "if", "len", "(", "row", ")", ">", "1", ":", "x", "=", "[", "(", "0", "if", "row", "[", "i", "]", "in", "state", "[", "\"missing_vals\"", "]", "else", "float", "(", "row", "[", "i", "]", ")", ")", "for", "i", "in", "state", "[", "\"X_indices\"", "]", "]", "cluster", "=", "random", ".", "randint", "(", "0", ",", "state", "[", "'k'", "]", "-", "1", ")", "vertex", "=", "state", "[", "'create'", "]", "(", "x", ",", "1.0", ")", "centers", "[", "cluster", "]", "=", "vertex", "if", "cluster", "not", "in", "centers", "else", "state", "[", "\"update\"", "]", "(", "centers", "[", "cluster", "]", ",", "vertex", ")", "for", "cluster", ",", "values", "in", "centers", ".", "iteritems", "(", ")", ":", "out", ".", "add", "(", "cluster", ",", "values", ")" ]
Assign datapoint `e` randomly to one of the `k` clusters.
[ "Assign", "datapoint", "e", "randomly", "to", "one", "of", "the", "k", "clusters", "." ]
a4703daffb2ba3c9f614bc3dbe45ae55884aea00
https://github.com/romanorac/discomll/blob/a4703daffb2ba3c9f614bc3dbe45ae55884aea00/discomll/clustering/kmeans.py#L31-L45
train
romanorac/discomll
discomll/clustering/kmeans.py
estimate_map
def estimate_map(interface, state, label, inp): """Find the cluster `i` that is closest to the datapoint `e`.""" out = interface.output(0) centers = {} for row in inp: row = row.strip().split(state["delimiter"]) if len(row) > 1: x = [(0 if row[i] in state["missing_vals"] else float(row[i])) for i in state["X_indices"]] cluster = min((state['dist'](c, x), i) for i, c in state['centers'])[1] vertex = state['create'](x, 1.0) centers[cluster] = vertex if cluster not in centers else state["update"](centers[cluster], vertex) for cluster, values in centers.iteritems(): out.add(cluster, values)
python
def estimate_map(interface, state, label, inp): """Find the cluster `i` that is closest to the datapoint `e`.""" out = interface.output(0) centers = {} for row in inp: row = row.strip().split(state["delimiter"]) if len(row) > 1: x = [(0 if row[i] in state["missing_vals"] else float(row[i])) for i in state["X_indices"]] cluster = min((state['dist'](c, x), i) for i, c in state['centers'])[1] vertex = state['create'](x, 1.0) centers[cluster] = vertex if cluster not in centers else state["update"](centers[cluster], vertex) for cluster, values in centers.iteritems(): out.add(cluster, values)
[ "def", "estimate_map", "(", "interface", ",", "state", ",", "label", ",", "inp", ")", ":", "out", "=", "interface", ".", "output", "(", "0", ")", "centers", "=", "{", "}", "for", "row", "in", "inp", ":", "row", "=", "row", ".", "strip", "(", ")", ".", "split", "(", "state", "[", "\"delimiter\"", "]", ")", "if", "len", "(", "row", ")", ">", "1", ":", "x", "=", "[", "(", "0", "if", "row", "[", "i", "]", "in", "state", "[", "\"missing_vals\"", "]", "else", "float", "(", "row", "[", "i", "]", ")", ")", "for", "i", "in", "state", "[", "\"X_indices\"", "]", "]", "cluster", "=", "min", "(", "(", "state", "[", "'dist'", "]", "(", "c", ",", "x", ")", ",", "i", ")", "for", "i", ",", "c", "in", "state", "[", "'centers'", "]", ")", "[", "1", "]", "vertex", "=", "state", "[", "'create'", "]", "(", "x", ",", "1.0", ")", "centers", "[", "cluster", "]", "=", "vertex", "if", "cluster", "not", "in", "centers", "else", "state", "[", "\"update\"", "]", "(", "centers", "[", "cluster", "]", ",", "vertex", ")", "for", "cluster", ",", "values", "in", "centers", ".", "iteritems", "(", ")", ":", "out", ".", "add", "(", "cluster", ",", "values", ")" ]
Find the cluster `i` that is closest to the datapoint `e`.
[ "Find", "the", "cluster", "i", "that", "is", "closest", "to", "the", "datapoint", "e", "." ]
a4703daffb2ba3c9f614bc3dbe45ae55884aea00
https://github.com/romanorac/discomll/blob/a4703daffb2ba3c9f614bc3dbe45ae55884aea00/discomll/clustering/kmeans.py#L48-L61
train
romanorac/discomll
discomll/clustering/kmeans.py
estimate_reduce
def estimate_reduce(interface, state, label, inp): """Estimate the cluster centers for each cluster.""" centers = {} for i, c in inp: centers[i] = c if i not in centers else state['update'](centers[i], c) out = interface.output(0) for i, c in centers.items(): out.add(i, state['finalize'](c))
python
def estimate_reduce(interface, state, label, inp): """Estimate the cluster centers for each cluster.""" centers = {} for i, c in inp: centers[i] = c if i not in centers else state['update'](centers[i], c) out = interface.output(0) for i, c in centers.items(): out.add(i, state['finalize'](c))
[ "def", "estimate_reduce", "(", "interface", ",", "state", ",", "label", ",", "inp", ")", ":", "centers", "=", "{", "}", "for", "i", ",", "c", "in", "inp", ":", "centers", "[", "i", "]", "=", "c", "if", "i", "not", "in", "centers", "else", "state", "[", "'update'", "]", "(", "centers", "[", "i", "]", ",", "c", ")", "out", "=", "interface", ".", "output", "(", "0", ")", "for", "i", ",", "c", "in", "centers", ".", "items", "(", ")", ":", "out", ".", "add", "(", "i", ",", "state", "[", "'finalize'", "]", "(", "c", ")", ")" ]
Estimate the cluster centers for each cluster.
[ "Estimate", "the", "cluster", "centers", "for", "each", "cluster", "." ]
a4703daffb2ba3c9f614bc3dbe45ae55884aea00
https://github.com/romanorac/discomll/blob/a4703daffb2ba3c9f614bc3dbe45ae55884aea00/discomll/clustering/kmeans.py#L64-L72
train
romanorac/discomll
discomll/clustering/kmeans.py
predict_map
def predict_map(interface, state, label, inp): """Determine the closest cluster for the datapoint `e`.""" out = interface.output(0) for row in inp: if len(row) > 1: row = row.strip().split(state["delimiter"]) x_id = "" if state["id_index"] == -1 else row[state["id_index"]] x = [(0 if row[i] in state["missing_vals"] else float(row[i])) for i in state["X_indices"]] out.add(x_id, min([(i, state["dist"](c, x)) for i, c in state["centers"]], key=lambda t: t[1]))
python
def predict_map(interface, state, label, inp): """Determine the closest cluster for the datapoint `e`.""" out = interface.output(0) for row in inp: if len(row) > 1: row = row.strip().split(state["delimiter"]) x_id = "" if state["id_index"] == -1 else row[state["id_index"]] x = [(0 if row[i] in state["missing_vals"] else float(row[i])) for i in state["X_indices"]] out.add(x_id, min([(i, state["dist"](c, x)) for i, c in state["centers"]], key=lambda t: t[1]))
[ "def", "predict_map", "(", "interface", ",", "state", ",", "label", ",", "inp", ")", ":", "out", "=", "interface", ".", "output", "(", "0", ")", "for", "row", "in", "inp", ":", "if", "len", "(", "row", ")", ">", "1", ":", "row", "=", "row", ".", "strip", "(", ")", ".", "split", "(", "state", "[", "\"delimiter\"", "]", ")", "x_id", "=", "\"\"", "if", "state", "[", "\"id_index\"", "]", "==", "-", "1", "else", "row", "[", "state", "[", "\"id_index\"", "]", "]", "x", "=", "[", "(", "0", "if", "row", "[", "i", "]", "in", "state", "[", "\"missing_vals\"", "]", "else", "float", "(", "row", "[", "i", "]", ")", ")", "for", "i", "in", "state", "[", "\"X_indices\"", "]", "]", "out", ".", "add", "(", "x_id", ",", "min", "(", "[", "(", "i", ",", "state", "[", "\"dist\"", "]", "(", "c", ",", "x", ")", ")", "for", "i", ",", "c", "in", "state", "[", "\"centers\"", "]", "]", ",", "key", "=", "lambda", "t", ":", "t", "[", "1", "]", ")", ")" ]
Determine the closest cluster for the datapoint `e`.
[ "Determine", "the", "closest", "cluster", "for", "the", "datapoint", "e", "." ]
a4703daffb2ba3c9f614bc3dbe45ae55884aea00
https://github.com/romanorac/discomll/blob/a4703daffb2ba3c9f614bc3dbe45ae55884aea00/discomll/clustering/kmeans.py#L75-L83
train
romanorac/discomll
discomll/clustering/kmeans.py
fit
def fit(dataset, n_clusters=5, max_iterations=10, random_state=None, save_results=True, show=False): """ Optimize k-clustering for `iterations` iterations with cluster center definitions as given in `center`. """ from disco.job import Job from disco.worker.pipeline.worker import Worker, Stage from disco.core import result_iterator try: n_clusters = int(n_clusters) max_iterations = int(max_iterations) if n_clusters < 2: raise Exception("Parameter n_clusters should be greater than 1.") if max_iterations < 1: raise Exception("Parameter max_iterations should be greater than 0.") except ValueError: raise Exception("Parameters should be numerical.") job = Job(worker=Worker(save_results=save_results)) job.pipeline = [("split", Stage("kmeans_init_map", input_chain=dataset.params["input_chain"], init=map_init, process=random_init_map)), ('group_label', Stage("kmeans_init_reduce", process=estimate_reduce, init=simple_init, combine=True))] job.params = dict(dataset.params.items() + mean_point_center.items()) job.params['seed'] = random_state job.params['k'] = n_clusters job.run(input=dataset.params["data_tag"], name="kmeans_init") init = job.wait(show=show) centers = [(i, c) for i, c in result_iterator(init)] for j in range(max_iterations): job = Job(worker=Worker(save_results=save_results)) job.params = dict(dataset.params.items() + mean_point_center.items()) job.params['k'] = n_clusters job.params['centers'] = centers job.pipeline = [('split', Stage("kmeans_map_iter_%s" % (j + 1,), input_chain=dataset.params["input_chain"], process=estimate_map, init=simple_init)), ('group_label', Stage("kmeans_reduce_iter_%s" % (j + 1,), process=estimate_reduce, init=simple_init, combine=True))] job.run(input=dataset.params["data_tag"], name='kmeans_iter_%d' % (j + 1,)) fitmodel_url = job.wait(show=show) centers = [(i, c) for i, c in result_iterator(fitmodel_url)] return {"kmeans_fitmodel": fitmodel_url}
python
def fit(dataset, n_clusters=5, max_iterations=10, random_state=None, save_results=True, show=False): """ Optimize k-clustering for `iterations` iterations with cluster center definitions as given in `center`. """ from disco.job import Job from disco.worker.pipeline.worker import Worker, Stage from disco.core import result_iterator try: n_clusters = int(n_clusters) max_iterations = int(max_iterations) if n_clusters < 2: raise Exception("Parameter n_clusters should be greater than 1.") if max_iterations < 1: raise Exception("Parameter max_iterations should be greater than 0.") except ValueError: raise Exception("Parameters should be numerical.") job = Job(worker=Worker(save_results=save_results)) job.pipeline = [("split", Stage("kmeans_init_map", input_chain=dataset.params["input_chain"], init=map_init, process=random_init_map)), ('group_label', Stage("kmeans_init_reduce", process=estimate_reduce, init=simple_init, combine=True))] job.params = dict(dataset.params.items() + mean_point_center.items()) job.params['seed'] = random_state job.params['k'] = n_clusters job.run(input=dataset.params["data_tag"], name="kmeans_init") init = job.wait(show=show) centers = [(i, c) for i, c in result_iterator(init)] for j in range(max_iterations): job = Job(worker=Worker(save_results=save_results)) job.params = dict(dataset.params.items() + mean_point_center.items()) job.params['k'] = n_clusters job.params['centers'] = centers job.pipeline = [('split', Stage("kmeans_map_iter_%s" % (j + 1,), input_chain=dataset.params["input_chain"], process=estimate_map, init=simple_init)), ('group_label', Stage("kmeans_reduce_iter_%s" % (j + 1,), process=estimate_reduce, init=simple_init, combine=True))] job.run(input=dataset.params["data_tag"], name='kmeans_iter_%d' % (j + 1,)) fitmodel_url = job.wait(show=show) centers = [(i, c) for i, c in result_iterator(fitmodel_url)] return {"kmeans_fitmodel": fitmodel_url}
[ "def", "fit", "(", "dataset", ",", "n_clusters", "=", "5", ",", "max_iterations", "=", "10", ",", "random_state", "=", "None", ",", "save_results", "=", "True", ",", "show", "=", "False", ")", ":", "from", "disco", ".", "job", "import", "Job", "from", "disco", ".", "worker", ".", "pipeline", ".", "worker", "import", "Worker", ",", "Stage", "from", "disco", ".", "core", "import", "result_iterator", "try", ":", "n_clusters", "=", "int", "(", "n_clusters", ")", "max_iterations", "=", "int", "(", "max_iterations", ")", "if", "n_clusters", "<", "2", ":", "raise", "Exception", "(", "\"Parameter n_clusters should be greater than 1.\"", ")", "if", "max_iterations", "<", "1", ":", "raise", "Exception", "(", "\"Parameter max_iterations should be greater than 0.\"", ")", "except", "ValueError", ":", "raise", "Exception", "(", "\"Parameters should be numerical.\"", ")", "job", "=", "Job", "(", "worker", "=", "Worker", "(", "save_results", "=", "save_results", ")", ")", "job", ".", "pipeline", "=", "[", "(", "\"split\"", ",", "Stage", "(", "\"kmeans_init_map\"", ",", "input_chain", "=", "dataset", ".", "params", "[", "\"input_chain\"", "]", ",", "init", "=", "map_init", ",", "process", "=", "random_init_map", ")", ")", ",", "(", "'group_label'", ",", "Stage", "(", "\"kmeans_init_reduce\"", ",", "process", "=", "estimate_reduce", ",", "init", "=", "simple_init", ",", "combine", "=", "True", ")", ")", "]", "job", ".", "params", "=", "dict", "(", "dataset", ".", "params", ".", "items", "(", ")", "+", "mean_point_center", ".", "items", "(", ")", ")", "job", ".", "params", "[", "'seed'", "]", "=", "random_state", "job", ".", "params", "[", "'k'", "]", "=", "n_clusters", "job", ".", "run", "(", "input", "=", "dataset", ".", "params", "[", "\"data_tag\"", "]", ",", "name", "=", "\"kmeans_init\"", ")", "init", "=", "job", ".", "wait", "(", "show", "=", "show", ")", "centers", "=", "[", "(", "i", ",", "c", ")", "for", "i", ",", "c", "in", "result_iterator", "(", "init", ")", "]", "for", "j", "in", "range", "(", "max_iterations", ")", ":", "job", "=", "Job", "(", "worker", "=", "Worker", "(", "save_results", "=", "save_results", ")", ")", "job", ".", "params", "=", "dict", "(", "dataset", ".", "params", ".", "items", "(", ")", "+", "mean_point_center", ".", "items", "(", ")", ")", "job", ".", "params", "[", "'k'", "]", "=", "n_clusters", "job", ".", "params", "[", "'centers'", "]", "=", "centers", "job", ".", "pipeline", "=", "[", "(", "'split'", ",", "Stage", "(", "\"kmeans_map_iter_%s\"", "%", "(", "j", "+", "1", ",", ")", ",", "input_chain", "=", "dataset", ".", "params", "[", "\"input_chain\"", "]", ",", "process", "=", "estimate_map", ",", "init", "=", "simple_init", ")", ")", ",", "(", "'group_label'", ",", "Stage", "(", "\"kmeans_reduce_iter_%s\"", "%", "(", "j", "+", "1", ",", ")", ",", "process", "=", "estimate_reduce", ",", "init", "=", "simple_init", ",", "combine", "=", "True", ")", ")", "]", "job", ".", "run", "(", "input", "=", "dataset", ".", "params", "[", "\"data_tag\"", "]", ",", "name", "=", "'kmeans_iter_%d'", "%", "(", "j", "+", "1", ",", ")", ")", "fitmodel_url", "=", "job", ".", "wait", "(", "show", "=", "show", ")", "centers", "=", "[", "(", "i", ",", "c", ")", "for", "i", ",", "c", "in", "result_iterator", "(", "fitmodel_url", ")", "]", "return", "{", "\"kmeans_fitmodel\"", ":", "fitmodel_url", "}" ]
Optimize k-clustering for `iterations` iterations with cluster center definitions as given in `center`.
[ "Optimize", "k", "-", "clustering", "for", "iterations", "iterations", "with", "cluster", "center", "definitions", "as", "given", "in", "center", "." ]
a4703daffb2ba3c9f614bc3dbe45ae55884aea00
https://github.com/romanorac/discomll/blob/a4703daffb2ba3c9f614bc3dbe45ae55884aea00/discomll/clustering/kmeans.py#L86-L135
train
romanorac/discomll
discomll/clustering/kmeans.py
predict
def predict(dataset, fitmodel_url, save_results=True, show=False): """ Predict the closest clusters for the datapoints in input. """ from disco.job import Job from disco.worker.pipeline.worker import Worker, Stage from disco.core import result_iterator if "kmeans_fitmodel" not in fitmodel_url: raise Exception("Incorrect fit model.") job = Job(worker=Worker(save_results=save_results)) job.params = dict(dataset.params.items() + mean_point_center.items()) job.params["centers"] = [(i, c) for i, c in result_iterator(fitmodel_url["kmeans_fitmodel"])] job.pipeline = [("split", Stage("kmeans_predict", input_chain=dataset.params["input_chain"], init=simple_init, process=predict_map))] job.run(input=dataset.params["data_tag"], name="kmeans_predict") return job.wait(show=show)
python
def predict(dataset, fitmodel_url, save_results=True, show=False): """ Predict the closest clusters for the datapoints in input. """ from disco.job import Job from disco.worker.pipeline.worker import Worker, Stage from disco.core import result_iterator if "kmeans_fitmodel" not in fitmodel_url: raise Exception("Incorrect fit model.") job = Job(worker=Worker(save_results=save_results)) job.params = dict(dataset.params.items() + mean_point_center.items()) job.params["centers"] = [(i, c) for i, c in result_iterator(fitmodel_url["kmeans_fitmodel"])] job.pipeline = [("split", Stage("kmeans_predict", input_chain=dataset.params["input_chain"], init=simple_init, process=predict_map))] job.run(input=dataset.params["data_tag"], name="kmeans_predict") return job.wait(show=show)
[ "def", "predict", "(", "dataset", ",", "fitmodel_url", ",", "save_results", "=", "True", ",", "show", "=", "False", ")", ":", "from", "disco", ".", "job", "import", "Job", "from", "disco", ".", "worker", ".", "pipeline", ".", "worker", "import", "Worker", ",", "Stage", "from", "disco", ".", "core", "import", "result_iterator", "if", "\"kmeans_fitmodel\"", "not", "in", "fitmodel_url", ":", "raise", "Exception", "(", "\"Incorrect fit model.\"", ")", "job", "=", "Job", "(", "worker", "=", "Worker", "(", "save_results", "=", "save_results", ")", ")", "job", ".", "params", "=", "dict", "(", "dataset", ".", "params", ".", "items", "(", ")", "+", "mean_point_center", ".", "items", "(", ")", ")", "job", ".", "params", "[", "\"centers\"", "]", "=", "[", "(", "i", ",", "c", ")", "for", "i", ",", "c", "in", "result_iterator", "(", "fitmodel_url", "[", "\"kmeans_fitmodel\"", "]", ")", "]", "job", ".", "pipeline", "=", "[", "(", "\"split\"", ",", "Stage", "(", "\"kmeans_predict\"", ",", "input_chain", "=", "dataset", ".", "params", "[", "\"input_chain\"", "]", ",", "init", "=", "simple_init", ",", "process", "=", "predict_map", ")", ")", "]", "job", ".", "run", "(", "input", "=", "dataset", ".", "params", "[", "\"data_tag\"", "]", ",", "name", "=", "\"kmeans_predict\"", ")", "return", "job", ".", "wait", "(", "show", "=", "show", ")" ]
Predict the closest clusters for the datapoints in input.
[ "Predict", "the", "closest", "clusters", "for", "the", "datapoints", "in", "input", "." ]
a4703daffb2ba3c9f614bc3dbe45ae55884aea00
https://github.com/romanorac/discomll/blob/a4703daffb2ba3c9f614bc3dbe45ae55884aea00/discomll/clustering/kmeans.py#L138-L159
train
tweekmonster/moult
moult/utils.py
load_stdlib
def load_stdlib(): '''Scans sys.path for standard library modules. ''' if _stdlib: return _stdlib prefixes = tuple({os.path.abspath(p) for p in ( sys.prefix, getattr(sys, 'real_prefix', sys.prefix), getattr(sys, 'base_prefix', sys.prefix), )}) for sp in sys.path: if not sp: continue _import_paths.append(os.path.abspath(sp)) stdpaths = tuple({p for p in _import_paths if p.startswith(prefixes) and 'site-packages' not in p}) _stdlib.update(sys.builtin_module_names) for stdpath in stdpaths: if not os.path.isdir(stdpath): continue for item in os.listdir(stdpath): if item.startswith('.') or item == 'site-packages': continue p = os.path.join(stdpath, item) if not os.path.isdir(p) and not item.endswith(('.py', '.so')): continue _stdlib.add(item.split('.', 1)[0]) return _stdlib
python
def load_stdlib(): '''Scans sys.path for standard library modules. ''' if _stdlib: return _stdlib prefixes = tuple({os.path.abspath(p) for p in ( sys.prefix, getattr(sys, 'real_prefix', sys.prefix), getattr(sys, 'base_prefix', sys.prefix), )}) for sp in sys.path: if not sp: continue _import_paths.append(os.path.abspath(sp)) stdpaths = tuple({p for p in _import_paths if p.startswith(prefixes) and 'site-packages' not in p}) _stdlib.update(sys.builtin_module_names) for stdpath in stdpaths: if not os.path.isdir(stdpath): continue for item in os.listdir(stdpath): if item.startswith('.') or item == 'site-packages': continue p = os.path.join(stdpath, item) if not os.path.isdir(p) and not item.endswith(('.py', '.so')): continue _stdlib.add(item.split('.', 1)[0]) return _stdlib
[ "def", "load_stdlib", "(", ")", ":", "if", "_stdlib", ":", "return", "_stdlib", "prefixes", "=", "tuple", "(", "{", "os", ".", "path", ".", "abspath", "(", "p", ")", "for", "p", "in", "(", "sys", ".", "prefix", ",", "getattr", "(", "sys", ",", "'real_prefix'", ",", "sys", ".", "prefix", ")", ",", "getattr", "(", "sys", ",", "'base_prefix'", ",", "sys", ".", "prefix", ")", ",", ")", "}", ")", "for", "sp", "in", "sys", ".", "path", ":", "if", "not", "sp", ":", "continue", "_import_paths", ".", "append", "(", "os", ".", "path", ".", "abspath", "(", "sp", ")", ")", "stdpaths", "=", "tuple", "(", "{", "p", "for", "p", "in", "_import_paths", "if", "p", ".", "startswith", "(", "prefixes", ")", "and", "'site-packages'", "not", "in", "p", "}", ")", "_stdlib", ".", "update", "(", "sys", ".", "builtin_module_names", ")", "for", "stdpath", "in", "stdpaths", ":", "if", "not", "os", ".", "path", ".", "isdir", "(", "stdpath", ")", ":", "continue", "for", "item", "in", "os", ".", "listdir", "(", "stdpath", ")", ":", "if", "item", ".", "startswith", "(", "'.'", ")", "or", "item", "==", "'site-packages'", ":", "continue", "p", "=", "os", ".", "path", ".", "join", "(", "stdpath", ",", "item", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "p", ")", "and", "not", "item", ".", "endswith", "(", "(", "'.py'", ",", "'.so'", ")", ")", ":", "continue", "_stdlib", ".", "add", "(", "item", ".", "split", "(", "'.'", ",", "1", ")", "[", "0", "]", ")", "return", "_stdlib" ]
Scans sys.path for standard library modules.
[ "Scans", "sys", ".", "path", "for", "standard", "library", "modules", "." ]
38d3a3b9002336219897ebe263ca1d8dcadbecf5
https://github.com/tweekmonster/moult/blob/38d3a3b9002336219897ebe263ca1d8dcadbecf5/moult/utils.py#L18-L54
train
tweekmonster/moult
moult/utils.py
import_path_from_file
def import_path_from_file(filename, as_list=False): '''Returns a tuple of the import path and root module directory for the supplied file. ''' module_path = [] basename = os.path.splitext(os.path.basename(filename))[0] if basename != '__init__': module_path.append(basename) dirname = os.path.dirname(filename) while os.path.isfile(os.path.join(dirname, '__init__.py')): dirname, tail = os.path.split(dirname) module_path.insert(0, tail) if as_list: return module_path, dirname return '.'.join(module_path), dirname
python
def import_path_from_file(filename, as_list=False): '''Returns a tuple of the import path and root module directory for the supplied file. ''' module_path = [] basename = os.path.splitext(os.path.basename(filename))[0] if basename != '__init__': module_path.append(basename) dirname = os.path.dirname(filename) while os.path.isfile(os.path.join(dirname, '__init__.py')): dirname, tail = os.path.split(dirname) module_path.insert(0, tail) if as_list: return module_path, dirname return '.'.join(module_path), dirname
[ "def", "import_path_from_file", "(", "filename", ",", "as_list", "=", "False", ")", ":", "module_path", "=", "[", "]", "basename", "=", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "filename", ")", ")", "[", "0", "]", "if", "basename", "!=", "'__init__'", ":", "module_path", ".", "append", "(", "basename", ")", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "filename", ")", "while", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "dirname", ",", "'__init__.py'", ")", ")", ":", "dirname", ",", "tail", "=", "os", ".", "path", ".", "split", "(", "dirname", ")", "module_path", ".", "insert", "(", "0", ",", "tail", ")", "if", "as_list", ":", "return", "module_path", ",", "dirname", "return", "'.'", ".", "join", "(", "module_path", ")", ",", "dirname" ]
Returns a tuple of the import path and root module directory for the supplied file.
[ "Returns", "a", "tuple", "of", "the", "import", "path", "and", "root", "module", "directory", "for", "the", "supplied", "file", "." ]
38d3a3b9002336219897ebe263ca1d8dcadbecf5
https://github.com/tweekmonster/moult/blob/38d3a3b9002336219897ebe263ca1d8dcadbecf5/moult/utils.py#L69-L85
train
tweekmonster/moult
moult/utils.py
file_containing_import
def file_containing_import(import_path, import_root): '''Finds the file that might contain the import_path. ''' if not _import_paths: load_stdlib() if os.path.isfile(import_root): import_root = os.path.dirname(import_root) search_paths = [import_root] + _import_paths module_parts = import_path.split('.') for i in range(len(module_parts), 0, -1): module_path = os.path.join(*module_parts[:i]) for sp in search_paths: p = os.path.join(sp, module_path) if os.path.isdir(p): return os.path.join(p, '__init__.py') elif os.path.isfile(p + '.py'): return p + '.py' return None
python
def file_containing_import(import_path, import_root): '''Finds the file that might contain the import_path. ''' if not _import_paths: load_stdlib() if os.path.isfile(import_root): import_root = os.path.dirname(import_root) search_paths = [import_root] + _import_paths module_parts = import_path.split('.') for i in range(len(module_parts), 0, -1): module_path = os.path.join(*module_parts[:i]) for sp in search_paths: p = os.path.join(sp, module_path) if os.path.isdir(p): return os.path.join(p, '__init__.py') elif os.path.isfile(p + '.py'): return p + '.py' return None
[ "def", "file_containing_import", "(", "import_path", ",", "import_root", ")", ":", "if", "not", "_import_paths", ":", "load_stdlib", "(", ")", "if", "os", ".", "path", ".", "isfile", "(", "import_root", ")", ":", "import_root", "=", "os", ".", "path", ".", "dirname", "(", "import_root", ")", "search_paths", "=", "[", "import_root", "]", "+", "_import_paths", "module_parts", "=", "import_path", ".", "split", "(", "'.'", ")", "for", "i", "in", "range", "(", "len", "(", "module_parts", ")", ",", "0", ",", "-", "1", ")", ":", "module_path", "=", "os", ".", "path", ".", "join", "(", "*", "module_parts", "[", ":", "i", "]", ")", "for", "sp", "in", "search_paths", ":", "p", "=", "os", ".", "path", ".", "join", "(", "sp", ",", "module_path", ")", "if", "os", ".", "path", ".", "isdir", "(", "p", ")", ":", "return", "os", ".", "path", ".", "join", "(", "p", ",", "'__init__.py'", ")", "elif", "os", ".", "path", ".", "isfile", "(", "p", "+", "'.py'", ")", ":", "return", "p", "+", "'.py'", "return", "None" ]
Finds the file that might contain the import_path.
[ "Finds", "the", "file", "that", "might", "contain", "the", "import_path", "." ]
38d3a3b9002336219897ebe263ca1d8dcadbecf5
https://github.com/tweekmonster/moult/blob/38d3a3b9002336219897ebe263ca1d8dcadbecf5/moult/utils.py#L88-L107
train
tweekmonster/moult
moult/utils.py
resolve_import
def resolve_import(import_path, from_module): '''Resolves relative imports from a module. ''' if not import_path or not import_path.startswith('.'): return import_path from_module = from_module.split('.') dots = 0 for c in import_path: if c == '.': dots += 1 else: break if dots: from_module = from_module[:-dots] import_path = import_path[dots:] if import_path: from_module.append(import_path) return '.'.join(from_module)
python
def resolve_import(import_path, from_module): '''Resolves relative imports from a module. ''' if not import_path or not import_path.startswith('.'): return import_path from_module = from_module.split('.') dots = 0 for c in import_path: if c == '.': dots += 1 else: break if dots: from_module = from_module[:-dots] import_path = import_path[dots:] if import_path: from_module.append(import_path) return '.'.join(from_module)
[ "def", "resolve_import", "(", "import_path", ",", "from_module", ")", ":", "if", "not", "import_path", "or", "not", "import_path", ".", "startswith", "(", "'.'", ")", ":", "return", "import_path", "from_module", "=", "from_module", ".", "split", "(", "'.'", ")", "dots", "=", "0", "for", "c", "in", "import_path", ":", "if", "c", "==", "'.'", ":", "dots", "+=", "1", "else", ":", "break", "if", "dots", ":", "from_module", "=", "from_module", "[", ":", "-", "dots", "]", "import_path", "=", "import_path", "[", "dots", ":", "]", "if", "import_path", ":", "from_module", ".", "append", "(", "import_path", ")", "return", "'.'", ".", "join", "(", "from_module", ")" ]
Resolves relative imports from a module.
[ "Resolves", "relative", "imports", "from", "a", "module", "." ]
38d3a3b9002336219897ebe263ca1d8dcadbecf5
https://github.com/tweekmonster/moult/blob/38d3a3b9002336219897ebe263ca1d8dcadbecf5/moult/utils.py#L110-L131
train
tweekmonster/moult
moult/utils.py
find_package
def find_package(name, installed, package=False): '''Finds a package in the installed list. If `package` is true, match package names, otherwise, match import paths. ''' if package: name = name.lower() tests = ( lambda x: x.user and name == x.name.lower(), lambda x: x.local and name == x.name.lower(), lambda x: name == x.name.lower(), ) else: tests = ( lambda x: x.user and name in x.import_names, lambda x: x.local and name in x.import_names, lambda x: name in x.import_names, ) for t in tests: try: found = list(filter(t, installed)) if found and not found[0].is_scan: return found[0] except StopIteration: pass return None
python
def find_package(name, installed, package=False): '''Finds a package in the installed list. If `package` is true, match package names, otherwise, match import paths. ''' if package: name = name.lower() tests = ( lambda x: x.user and name == x.name.lower(), lambda x: x.local and name == x.name.lower(), lambda x: name == x.name.lower(), ) else: tests = ( lambda x: x.user and name in x.import_names, lambda x: x.local and name in x.import_names, lambda x: name in x.import_names, ) for t in tests: try: found = list(filter(t, installed)) if found and not found[0].is_scan: return found[0] except StopIteration: pass return None
[ "def", "find_package", "(", "name", ",", "installed", ",", "package", "=", "False", ")", ":", "if", "package", ":", "name", "=", "name", ".", "lower", "(", ")", "tests", "=", "(", "lambda", "x", ":", "x", ".", "user", "and", "name", "==", "x", ".", "name", ".", "lower", "(", ")", ",", "lambda", "x", ":", "x", ".", "local", "and", "name", "==", "x", ".", "name", ".", "lower", "(", ")", ",", "lambda", "x", ":", "name", "==", "x", ".", "name", ".", "lower", "(", ")", ",", ")", "else", ":", "tests", "=", "(", "lambda", "x", ":", "x", ".", "user", "and", "name", "in", "x", ".", "import_names", ",", "lambda", "x", ":", "x", ".", "local", "and", "name", "in", "x", ".", "import_names", ",", "lambda", "x", ":", "name", "in", "x", ".", "import_names", ",", ")", "for", "t", "in", "tests", ":", "try", ":", "found", "=", "list", "(", "filter", "(", "t", ",", "installed", ")", ")", "if", "found", "and", "not", "found", "[", "0", "]", ".", "is_scan", ":", "return", "found", "[", "0", "]", "except", "StopIteration", ":", "pass", "return", "None" ]
Finds a package in the installed list. If `package` is true, match package names, otherwise, match import paths.
[ "Finds", "a", "package", "in", "the", "installed", "list", "." ]
38d3a3b9002336219897ebe263ca1d8dcadbecf5
https://github.com/tweekmonster/moult/blob/38d3a3b9002336219897ebe263ca1d8dcadbecf5/moult/utils.py#L134-L160
train
tweekmonster/moult
moult/utils.py
is_script
def is_script(filename): '''Checks if a file has a hashbang. ''' if not os.path.isfile(filename): return False try: with open(filename, 'rb') as fp: return fp.read(2) == b'#!' except IOError: pass return False
python
def is_script(filename): '''Checks if a file has a hashbang. ''' if not os.path.isfile(filename): return False try: with open(filename, 'rb') as fp: return fp.read(2) == b'#!' except IOError: pass return False
[ "def", "is_script", "(", "filename", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "return", "False", "try", ":", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "fp", ":", "return", "fp", ".", "read", "(", "2", ")", "==", "b'#!'", "except", "IOError", ":", "pass", "return", "False" ]
Checks if a file has a hashbang.
[ "Checks", "if", "a", "file", "has", "a", "hashbang", "." ]
38d3a3b9002336219897ebe263ca1d8dcadbecf5
https://github.com/tweekmonster/moult/blob/38d3a3b9002336219897ebe263ca1d8dcadbecf5/moult/utils.py#L163-L175
train
tweekmonster/moult
moult/utils.py
is_python_script
def is_python_script(filename): '''Checks a file to see if it's a python script of some sort. ''' if filename.lower().endswith('.py'): return True if not os.path.isfile(filename): return False try: with open(filename, 'rb') as fp: if fp.read(2) != b'#!': return False return re.match(r'.*python', str_(fp.readline())) except IOError: pass return False
python
def is_python_script(filename): '''Checks a file to see if it's a python script of some sort. ''' if filename.lower().endswith('.py'): return True if not os.path.isfile(filename): return False try: with open(filename, 'rb') as fp: if fp.read(2) != b'#!': return False return re.match(r'.*python', str_(fp.readline())) except IOError: pass return False
[ "def", "is_python_script", "(", "filename", ")", ":", "if", "filename", ".", "lower", "(", ")", ".", "endswith", "(", "'.py'", ")", ":", "return", "True", "if", "not", "os", ".", "path", ".", "isfile", "(", "filename", ")", ":", "return", "False", "try", ":", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "fp", ":", "if", "fp", ".", "read", "(", "2", ")", "!=", "b'#!'", ":", "return", "False", "return", "re", ".", "match", "(", "r'.*python'", ",", "str_", "(", "fp", ".", "readline", "(", ")", ")", ")", "except", "IOError", ":", "pass", "return", "False" ]
Checks a file to see if it's a python script of some sort.
[ "Checks", "a", "file", "to", "see", "if", "it", "s", "a", "python", "script", "of", "some", "sort", "." ]
38d3a3b9002336219897ebe263ca1d8dcadbecf5
https://github.com/tweekmonster/moult/blob/38d3a3b9002336219897ebe263ca1d8dcadbecf5/moult/utils.py#L178-L195
train
catch22/pw
pw/store.py
Store.search
def search(self, key_pattern: str, user_pattern: str) -> List[Entry]: """Search database for given key and user pattern.""" # normalize key key_pattern = _normalized_key(key_pattern) # search results = [] for entry in self.entries: if key_pattern in entry.key and user_pattern in entry.user: results.append(entry) # sort results according to key (stability of sorted() ensures that the order of accounts for any given key remains untouched) return sorted(results, key=lambda e: e.key)
python
def search(self, key_pattern: str, user_pattern: str) -> List[Entry]: """Search database for given key and user pattern.""" # normalize key key_pattern = _normalized_key(key_pattern) # search results = [] for entry in self.entries: if key_pattern in entry.key and user_pattern in entry.user: results.append(entry) # sort results according to key (stability of sorted() ensures that the order of accounts for any given key remains untouched) return sorted(results, key=lambda e: e.key)
[ "def", "search", "(", "self", ",", "key_pattern", ":", "str", ",", "user_pattern", ":", "str", ")", "->", "List", "[", "Entry", "]", ":", "# normalize key", "key_pattern", "=", "_normalized_key", "(", "key_pattern", ")", "# search", "results", "=", "[", "]", "for", "entry", "in", "self", ".", "entries", ":", "if", "key_pattern", "in", "entry", ".", "key", "and", "user_pattern", "in", "entry", ".", "user", ":", "results", ".", "append", "(", "entry", ")", "# sort results according to key (stability of sorted() ensures that the order of accounts for any given key remains untouched)", "return", "sorted", "(", "results", ",", "key", "=", "lambda", "e", ":", "e", ".", "key", ")" ]
Search database for given key and user pattern.
[ "Search", "database", "for", "given", "key", "and", "user", "pattern", "." ]
2452924bbdccad28b21290b6ce062809c3d1c5f2
https://github.com/catch22/pw/blob/2452924bbdccad28b21290b6ce062809c3d1c5f2/pw/store.py#L23-L35
train
catch22/pw
pw/store.py
Store.load
def load(path: str) -> "Store": """Load password store from file.""" # load source (decrypting if necessary) if _gpg.is_encrypted(path): src_bytes = _gpg.decrypt(path) else: src_bytes = open(path, "rb").read() src = src_bytes.decode("utf-8") # parse database source ext = _gpg.unencrypted_ext(path) assert ext not in [ ".yml", ".yaml", ], "YAML support was removed in version 0.12.0" entries = _parse_entries(src) return Store(path, entries)
python
def load(path: str) -> "Store": """Load password store from file.""" # load source (decrypting if necessary) if _gpg.is_encrypted(path): src_bytes = _gpg.decrypt(path) else: src_bytes = open(path, "rb").read() src = src_bytes.decode("utf-8") # parse database source ext = _gpg.unencrypted_ext(path) assert ext not in [ ".yml", ".yaml", ], "YAML support was removed in version 0.12.0" entries = _parse_entries(src) return Store(path, entries)
[ "def", "load", "(", "path", ":", "str", ")", "->", "\"Store\"", ":", "# load source (decrypting if necessary)", "if", "_gpg", ".", "is_encrypted", "(", "path", ")", ":", "src_bytes", "=", "_gpg", ".", "decrypt", "(", "path", ")", "else", ":", "src_bytes", "=", "open", "(", "path", ",", "\"rb\"", ")", ".", "read", "(", ")", "src", "=", "src_bytes", ".", "decode", "(", "\"utf-8\"", ")", "# parse database source", "ext", "=", "_gpg", ".", "unencrypted_ext", "(", "path", ")", "assert", "ext", "not", "in", "[", "\".yml\"", ",", "\".yaml\"", ",", "]", ",", "\"YAML support was removed in version 0.12.0\"", "entries", "=", "_parse_entries", "(", "src", ")", "return", "Store", "(", "path", ",", "entries", ")" ]
Load password store from file.
[ "Load", "password", "store", "from", "file", "." ]
2452924bbdccad28b21290b6ce062809c3d1c5f2
https://github.com/catch22/pw/blob/2452924bbdccad28b21290b6ce062809c3d1c5f2/pw/store.py#L38-L55
train
mkoura/dump2polarion
dump2polarion/exporters/transform.py
only_passed_and_wait
def only_passed_and_wait(result): """Returns PASS and WAIT results only, skips everything else.""" verdict = result.get("verdict", "").strip().lower() if verdict in Verdicts.PASS + Verdicts.WAIT: return result return None
python
def only_passed_and_wait(result): """Returns PASS and WAIT results only, skips everything else.""" verdict = result.get("verdict", "").strip().lower() if verdict in Verdicts.PASS + Verdicts.WAIT: return result return None
[ "def", "only_passed_and_wait", "(", "result", ")", ":", "verdict", "=", "result", ".", "get", "(", "\"verdict\"", ",", "\"\"", ")", ".", "strip", "(", ")", ".", "lower", "(", ")", "if", "verdict", "in", "Verdicts", ".", "PASS", "+", "Verdicts", ".", "WAIT", ":", "return", "result", "return", "None" ]
Returns PASS and WAIT results only, skips everything else.
[ "Returns", "PASS", "and", "WAIT", "results", "only", "skips", "everything", "else", "." ]
f4bd24e9d5070e282aad15f1e8bb514c0525cd37
https://github.com/mkoura/dump2polarion/blob/f4bd24e9d5070e282aad15f1e8bb514c0525cd37/dump2polarion/exporters/transform.py#L23-L28
train
mkoura/dump2polarion
dump2polarion/exporters/transform.py
insert_source_info
def insert_source_info(result): """Adds info about source of test result if available.""" comment = result.get("comment") # don't change comment if it already exists if comment: return source = result.get("source") job_name = result.get("job_name") run = result.get("run") source_list = [source, job_name, run] if not all(source_list): return source_note = "/".join(source_list) source_note = "Source: {}".format(source_note) result["comment"] = source_note
python
def insert_source_info(result): """Adds info about source of test result if available.""" comment = result.get("comment") # don't change comment if it already exists if comment: return source = result.get("source") job_name = result.get("job_name") run = result.get("run") source_list = [source, job_name, run] if not all(source_list): return source_note = "/".join(source_list) source_note = "Source: {}".format(source_note) result["comment"] = source_note
[ "def", "insert_source_info", "(", "result", ")", ":", "comment", "=", "result", ".", "get", "(", "\"comment\"", ")", "# don't change comment if it already exists", "if", "comment", ":", "return", "source", "=", "result", ".", "get", "(", "\"source\"", ")", "job_name", "=", "result", ".", "get", "(", "\"job_name\"", ")", "run", "=", "result", ".", "get", "(", "\"run\"", ")", "source_list", "=", "[", "source", ",", "job_name", ",", "run", "]", "if", "not", "all", "(", "source_list", ")", ":", "return", "source_note", "=", "\"/\"", ".", "join", "(", "source_list", ")", "source_note", "=", "\"Source: {}\"", ".", "format", "(", "source_note", ")", "result", "[", "\"comment\"", "]", "=", "source_note" ]
Adds info about source of test result if available.
[ "Adds", "info", "about", "source", "of", "test", "result", "if", "available", "." ]
f4bd24e9d5070e282aad15f1e8bb514c0525cd37
https://github.com/mkoura/dump2polarion/blob/f4bd24e9d5070e282aad15f1e8bb514c0525cd37/dump2polarion/exporters/transform.py#L31-L47
train
mkoura/dump2polarion
dump2polarion/exporters/transform.py
setup_parametrization
def setup_parametrization(result, parametrize): """Modifies result's data according to the parametrization settings.""" if parametrize: # remove parameters from title title = result.get("title") if title: result["title"] = TEST_PARAM_RE.sub("", title) else: # don't parametrize if not specifically configured if "params" in result: del result["params"]
python
def setup_parametrization(result, parametrize): """Modifies result's data according to the parametrization settings.""" if parametrize: # remove parameters from title title = result.get("title") if title: result["title"] = TEST_PARAM_RE.sub("", title) else: # don't parametrize if not specifically configured if "params" in result: del result["params"]
[ "def", "setup_parametrization", "(", "result", ",", "parametrize", ")", ":", "if", "parametrize", ":", "# remove parameters from title", "title", "=", "result", ".", "get", "(", "\"title\"", ")", "if", "title", ":", "result", "[", "\"title\"", "]", "=", "TEST_PARAM_RE", ".", "sub", "(", "\"\"", ",", "title", ")", "else", ":", "# don't parametrize if not specifically configured", "if", "\"params\"", "in", "result", ":", "del", "result", "[", "\"params\"", "]" ]
Modifies result's data according to the parametrization settings.
[ "Modifies", "result", "s", "data", "according", "to", "the", "parametrization", "settings", "." ]
f4bd24e9d5070e282aad15f1e8bb514c0525cd37
https://github.com/mkoura/dump2polarion/blob/f4bd24e9d5070e282aad15f1e8bb514c0525cd37/dump2polarion/exporters/transform.py#L50-L60
train
mkoura/dump2polarion
dump2polarion/exporters/transform.py
include_class_in_title
def include_class_in_title(result): """Makes sure that test class is included in "title". e.g. "TestServiceRESTAPI.test_power_parent_service" >>> result = {"title": "test_foo", "classname": "foo.bar.baz.TestFoo", ... "file": "foo/bar/baz.py"} >>> include_class_in_title(result) >>> str(result.get("title")) 'TestFoo.test_foo' >>> result.get("classname") """ classname = result.get("classname", "") if classname: filepath = result.get("file", "") title = result.get("title") if title and "/" in filepath and "." in classname: fname = filepath.split("/")[-1].replace(".py", "") last_classname = classname.split(".")[-1] # last part of classname is not file name if fname != last_classname and last_classname not in title: result["title"] = "{}.{}".format(last_classname, title) # we don't need to pass classnames? del result["classname"]
python
def include_class_in_title(result): """Makes sure that test class is included in "title". e.g. "TestServiceRESTAPI.test_power_parent_service" >>> result = {"title": "test_foo", "classname": "foo.bar.baz.TestFoo", ... "file": "foo/bar/baz.py"} >>> include_class_in_title(result) >>> str(result.get("title")) 'TestFoo.test_foo' >>> result.get("classname") """ classname = result.get("classname", "") if classname: filepath = result.get("file", "") title = result.get("title") if title and "/" in filepath and "." in classname: fname = filepath.split("/")[-1].replace(".py", "") last_classname = classname.split(".")[-1] # last part of classname is not file name if fname != last_classname and last_classname not in title: result["title"] = "{}.{}".format(last_classname, title) # we don't need to pass classnames? del result["classname"]
[ "def", "include_class_in_title", "(", "result", ")", ":", "classname", "=", "result", ".", "get", "(", "\"classname\"", ",", "\"\"", ")", "if", "classname", ":", "filepath", "=", "result", ".", "get", "(", "\"file\"", ",", "\"\"", ")", "title", "=", "result", ".", "get", "(", "\"title\"", ")", "if", "title", "and", "\"/\"", "in", "filepath", "and", "\".\"", "in", "classname", ":", "fname", "=", "filepath", ".", "split", "(", "\"/\"", ")", "[", "-", "1", "]", ".", "replace", "(", "\".py\"", ",", "\"\"", ")", "last_classname", "=", "classname", ".", "split", "(", "\".\"", ")", "[", "-", "1", "]", "# last part of classname is not file name", "if", "fname", "!=", "last_classname", "and", "last_classname", "not", "in", "title", ":", "result", "[", "\"title\"", "]", "=", "\"{}.{}\"", ".", "format", "(", "last_classname", ",", "title", ")", "# we don't need to pass classnames?", "del", "result", "[", "\"classname\"", "]" ]
Makes sure that test class is included in "title". e.g. "TestServiceRESTAPI.test_power_parent_service" >>> result = {"title": "test_foo", "classname": "foo.bar.baz.TestFoo", ... "file": "foo/bar/baz.py"} >>> include_class_in_title(result) >>> str(result.get("title")) 'TestFoo.test_foo' >>> result.get("classname")
[ "Makes", "sure", "that", "test", "class", "is", "included", "in", "title", "." ]
f4bd24e9d5070e282aad15f1e8bb514c0525cd37
https://github.com/mkoura/dump2polarion/blob/f4bd24e9d5070e282aad15f1e8bb514c0525cd37/dump2polarion/exporters/transform.py#L63-L86
train
mkoura/dump2polarion
dump2polarion/exporters/transform.py
parse_rst_description
def parse_rst_description(testcase): """Creates an HTML version of the RST formatted description.""" description = testcase.get("description") if not description: return try: with open(os.devnull, "w") as devnull: testcase["description"] = publish_parts( description, writer_name="html", settings_overrides={"report_level": 2, "halt_level": 2, "warning_stream": devnull}, )["html_body"] # pylint: disable=broad-except except Exception as exp: testcase_id = testcase.get("nodeid") or testcase.get("id") or testcase.get("title") logger.error("%s: description: %s", str(exp), testcase_id)
python
def parse_rst_description(testcase): """Creates an HTML version of the RST formatted description.""" description = testcase.get("description") if not description: return try: with open(os.devnull, "w") as devnull: testcase["description"] = publish_parts( description, writer_name="html", settings_overrides={"report_level": 2, "halt_level": 2, "warning_stream": devnull}, )["html_body"] # pylint: disable=broad-except except Exception as exp: testcase_id = testcase.get("nodeid") or testcase.get("id") or testcase.get("title") logger.error("%s: description: %s", str(exp), testcase_id)
[ "def", "parse_rst_description", "(", "testcase", ")", ":", "description", "=", "testcase", ".", "get", "(", "\"description\"", ")", "if", "not", "description", ":", "return", "try", ":", "with", "open", "(", "os", ".", "devnull", ",", "\"w\"", ")", "as", "devnull", ":", "testcase", "[", "\"description\"", "]", "=", "publish_parts", "(", "description", ",", "writer_name", "=", "\"html\"", ",", "settings_overrides", "=", "{", "\"report_level\"", ":", "2", ",", "\"halt_level\"", ":", "2", ",", "\"warning_stream\"", ":", "devnull", "}", ",", ")", "[", "\"html_body\"", "]", "# pylint: disable=broad-except", "except", "Exception", "as", "exp", ":", "testcase_id", "=", "testcase", ".", "get", "(", "\"nodeid\"", ")", "or", "testcase", ".", "get", "(", "\"id\"", ")", "or", "testcase", ".", "get", "(", "\"title\"", ")", "logger", ".", "error", "(", "\"%s: description: %s\"", ",", "str", "(", "exp", ")", ",", "testcase_id", ")" ]
Creates an HTML version of the RST formatted description.
[ "Creates", "an", "HTML", "version", "of", "the", "RST", "formatted", "description", "." ]
f4bd24e9d5070e282aad15f1e8bb514c0525cd37
https://github.com/mkoura/dump2polarion/blob/f4bd24e9d5070e282aad15f1e8bb514c0525cd37/dump2polarion/exporters/transform.py#L119-L136
train
mkoura/dump2polarion
dump2polarion/exporters/transform.py
preformat_plain_description
def preformat_plain_description(testcase): """Creates a preformatted HTML version of the description.""" description = testcase.get("description") if not description: return # naive approach to removing indent from pytest docstrings nodeid = testcase.get("nodeid") or "" indent = None if "::Test" in nodeid: indent = 8 * " " elif "::test_" in nodeid: indent = 4 * " " if indent: orig_lines = description.split("\n") new_lines = [] for line in orig_lines: if line.startswith(indent): line = line.replace(indent, "", 1) new_lines.append(line) description = "\n".join(new_lines) testcase["description"] = "<pre>\n{}\n</pre>".format(description)
python
def preformat_plain_description(testcase): """Creates a preformatted HTML version of the description.""" description = testcase.get("description") if not description: return # naive approach to removing indent from pytest docstrings nodeid = testcase.get("nodeid") or "" indent = None if "::Test" in nodeid: indent = 8 * " " elif "::test_" in nodeid: indent = 4 * " " if indent: orig_lines = description.split("\n") new_lines = [] for line in orig_lines: if line.startswith(indent): line = line.replace(indent, "", 1) new_lines.append(line) description = "\n".join(new_lines) testcase["description"] = "<pre>\n{}\n</pre>".format(description)
[ "def", "preformat_plain_description", "(", "testcase", ")", ":", "description", "=", "testcase", ".", "get", "(", "\"description\"", ")", "if", "not", "description", ":", "return", "# naive approach to removing indent from pytest docstrings", "nodeid", "=", "testcase", ".", "get", "(", "\"nodeid\"", ")", "or", "\"\"", "indent", "=", "None", "if", "\"::Test\"", "in", "nodeid", ":", "indent", "=", "8", "*", "\" \"", "elif", "\"::test_\"", "in", "nodeid", ":", "indent", "=", "4", "*", "\" \"", "if", "indent", ":", "orig_lines", "=", "description", ".", "split", "(", "\"\\n\"", ")", "new_lines", "=", "[", "]", "for", "line", "in", "orig_lines", ":", "if", "line", ".", "startswith", "(", "indent", ")", ":", "line", "=", "line", ".", "replace", "(", "indent", ",", "\"\"", ",", "1", ")", "new_lines", ".", "append", "(", "line", ")", "description", "=", "\"\\n\"", ".", "join", "(", "new_lines", ")", "testcase", "[", "\"description\"", "]", "=", "\"<pre>\\n{}\\n</pre>\"", ".", "format", "(", "description", ")" ]
Creates a preformatted HTML version of the description.
[ "Creates", "a", "preformatted", "HTML", "version", "of", "the", "description", "." ]
f4bd24e9d5070e282aad15f1e8bb514c0525cd37
https://github.com/mkoura/dump2polarion/blob/f4bd24e9d5070e282aad15f1e8bb514c0525cd37/dump2polarion/exporters/transform.py#L139-L163
train
mkoura/dump2polarion
dump2polarion/exporters/transform.py
add_unique_runid
def add_unique_runid(testcase, run_id=None): """Adds run id to the test description. The `run_id` runs makes the descriptions unique between imports and force Polarion to update every testcase every time. """ testcase["description"] = '{}<br id="{}"/>'.format( testcase.get("description") or "", run_id or id(add_unique_runid) )
python
def add_unique_runid(testcase, run_id=None): """Adds run id to the test description. The `run_id` runs makes the descriptions unique between imports and force Polarion to update every testcase every time. """ testcase["description"] = '{}<br id="{}"/>'.format( testcase.get("description") or "", run_id or id(add_unique_runid) )
[ "def", "add_unique_runid", "(", "testcase", ",", "run_id", "=", "None", ")", ":", "testcase", "[", "\"description\"", "]", "=", "'{}<br id=\"{}\"/>'", ".", "format", "(", "testcase", ".", "get", "(", "\"description\"", ")", "or", "\"\"", ",", "run_id", "or", "id", "(", "add_unique_runid", ")", ")" ]
Adds run id to the test description. The `run_id` runs makes the descriptions unique between imports and force Polarion to update every testcase every time.
[ "Adds", "run", "id", "to", "the", "test", "description", "." ]
f4bd24e9d5070e282aad15f1e8bb514c0525cd37
https://github.com/mkoura/dump2polarion/blob/f4bd24e9d5070e282aad15f1e8bb514c0525cd37/dump2polarion/exporters/transform.py#L166-L174
train
mkoura/dump2polarion
dump2polarion/exporters/transform.py
add_automation_link
def add_automation_link(testcase): """Appends link to automation script to the test description.""" automation_link = ( '<a href="{}">Test Source</a>'.format(testcase["automation_script"]) if testcase.get("automation_script") else "" ) testcase["description"] = "{}<br/>{}".format(testcase.get("description") or "", automation_link)
python
def add_automation_link(testcase): """Appends link to automation script to the test description.""" automation_link = ( '<a href="{}">Test Source</a>'.format(testcase["automation_script"]) if testcase.get("automation_script") else "" ) testcase["description"] = "{}<br/>{}".format(testcase.get("description") or "", automation_link)
[ "def", "add_automation_link", "(", "testcase", ")", ":", "automation_link", "=", "(", "'<a href=\"{}\">Test Source</a>'", ".", "format", "(", "testcase", "[", "\"automation_script\"", "]", ")", "if", "testcase", ".", "get", "(", "\"automation_script\"", ")", "else", "\"\"", ")", "testcase", "[", "\"description\"", "]", "=", "\"{}<br/>{}\"", ".", "format", "(", "testcase", ".", "get", "(", "\"description\"", ")", "or", "\"\"", ",", "automation_link", ")" ]
Appends link to automation script to the test description.
[ "Appends", "link", "to", "automation", "script", "to", "the", "test", "description", "." ]
f4bd24e9d5070e282aad15f1e8bb514c0525cd37
https://github.com/mkoura/dump2polarion/blob/f4bd24e9d5070e282aad15f1e8bb514c0525cd37/dump2polarion/exporters/transform.py#L177-L184
train
marcosfelt/cheminventory_python
cheminventory/objects.py
Container.image
def image(self): '''Return an image of the structure of the compound''' r = requests.get(self.image_url, stream=True) r.raise_for_status() return r.raw.read()
python
def image(self): '''Return an image of the structure of the compound''' r = requests.get(self.image_url, stream=True) r.raise_for_status() return r.raw.read()
[ "def", "image", "(", "self", ")", ":", "r", "=", "requests", ".", "get", "(", "self", ".", "image_url", ",", "stream", "=", "True", ")", "r", ".", "raise_for_status", "(", ")", "return", "r", ".", "raw", ".", "read", "(", ")" ]
Return an image of the structure of the compound
[ "Return", "an", "image", "of", "the", "structure", "of", "the", "compound" ]
fa7d67a3741ba7095b30377ac52842997a649012
https://github.com/marcosfelt/cheminventory_python/blob/fa7d67a3741ba7095b30377ac52842997a649012/cheminventory/objects.py#L52-L56
train
oz123/blogit
media/uploads/ajax_bottle_files/ajax_bottle.py
add_numbers
def add_numbers(): """Add two numbers server side, ridiculous but well...""" a = request.params.get('a', 0, type=int) b = request.params.get('b', 0, type=int) return json.dumps({'result': a+b})
python
def add_numbers(): """Add two numbers server side, ridiculous but well...""" a = request.params.get('a', 0, type=int) b = request.params.get('b', 0, type=int) return json.dumps({'result': a+b})
[ "def", "add_numbers", "(", ")", ":", "a", "=", "request", ".", "params", ".", "get", "(", "'a'", ",", "0", ",", "type", "=", "int", ")", "b", "=", "request", ".", "params", ".", "get", "(", "'b'", ",", "0", ",", "type", "=", "int", ")", "return", "json", ".", "dumps", "(", "{", "'result'", ":", "a", "+", "b", "}", ")" ]
Add two numbers server side, ridiculous but well...
[ "Add", "two", "numbers", "server", "side", "ridiculous", "but", "well", "..." ]
15b94969fa43aaf8dc677a8184b144ae8c0f7700
https://github.com/oz123/blogit/blob/15b94969fa43aaf8dc677a8184b144ae8c0f7700/media/uploads/ajax_bottle_files/ajax_bottle.py#L16-L20
train
bradmontgomery/django-blargg
blargg/templatetags/blargg_tags.py
entry_archive_year_url
def entry_archive_year_url(): """Renders the ``entry_archive_year`` URL for the latest ``Entry``.""" entry = Entry.objects.filter(published=True).latest() arg_list = [entry.published_on.strftime("%Y")] return reverse('blargg:entry_archive_year', args=arg_list)
python
def entry_archive_year_url(): """Renders the ``entry_archive_year`` URL for the latest ``Entry``.""" entry = Entry.objects.filter(published=True).latest() arg_list = [entry.published_on.strftime("%Y")] return reverse('blargg:entry_archive_year', args=arg_list)
[ "def", "entry_archive_year_url", "(", ")", ":", "entry", "=", "Entry", ".", "objects", ".", "filter", "(", "published", "=", "True", ")", ".", "latest", "(", ")", "arg_list", "=", "[", "entry", ".", "published_on", ".", "strftime", "(", "\"%Y\"", ")", "]", "return", "reverse", "(", "'blargg:entry_archive_year'", ",", "args", "=", "arg_list", ")" ]
Renders the ``entry_archive_year`` URL for the latest ``Entry``.
[ "Renders", "the", "entry_archive_year", "URL", "for", "the", "latest", "Entry", "." ]
5d683e04723889a0d1c6d6cf1a67a3d431a2e617
https://github.com/bradmontgomery/django-blargg/blob/5d683e04723889a0d1c6d6cf1a67a3d431a2e617/blargg/templatetags/blargg_tags.py#L9-L13
train
mkoura/dump2polarion
dump2polarion/results/junittools.py
_extract_parameters_from_properties
def _extract_parameters_from_properties(properties): """Extracts parameters from properties.""" new_properties = {} parameters = [] for key, value in six.iteritems(properties): if key.startswith(_PARAMETER_PREFIX): parameters.append((key.replace(_PARAMETER_PREFIX, ""), value)) else: new_properties[key] = value return new_properties, sorted(parameters)
python
def _extract_parameters_from_properties(properties): """Extracts parameters from properties.""" new_properties = {} parameters = [] for key, value in six.iteritems(properties): if key.startswith(_PARAMETER_PREFIX): parameters.append((key.replace(_PARAMETER_PREFIX, ""), value)) else: new_properties[key] = value return new_properties, sorted(parameters)
[ "def", "_extract_parameters_from_properties", "(", "properties", ")", ":", "new_properties", "=", "{", "}", "parameters", "=", "[", "]", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "properties", ")", ":", "if", "key", ".", "startswith", "(", "_PARAMETER_PREFIX", ")", ":", "parameters", ".", "append", "(", "(", "key", ".", "replace", "(", "_PARAMETER_PREFIX", ",", "\"\"", ")", ",", "value", ")", ")", "else", ":", "new_properties", "[", "key", "]", "=", "value", "return", "new_properties", ",", "sorted", "(", "parameters", ")" ]
Extracts parameters from properties.
[ "Extracts", "parameters", "from", "properties", "." ]
f4bd24e9d5070e282aad15f1e8bb514c0525cd37
https://github.com/mkoura/dump2polarion/blob/f4bd24e9d5070e282aad15f1e8bb514c0525cd37/dump2polarion/results/junittools.py#L58-L68
train
mkoura/dump2polarion
dump2polarion/results/junittools.py
import_junit
def import_junit(junit_file, **kwargs): """Reads the content of the junit-results file produced by pytest and returns imported data.""" xml_root = _get_xml_root(junit_file) results = [] for test_data in xml_root: if test_data.tag != "testcase": continue verdict, comment, properties = _parse_testcase_record(test_data) properties, parameters = _extract_parameters_from_properties(properties) title = test_data.get("name") classname = test_data.get("classname") time = test_data.get("time", 0) filepath = test_data.get("file") data = [ ("title", title), ("classname", classname), ("verdict", verdict), ("comment", comment), ("time", time), ("file", filepath), ] for key in sorted(properties): data.append((key, properties[key])) if parameters: data.append(("params", OrderedDict(parameters))) results.append(OrderedDict(data)) return xunit_exporter.ImportedData(results=results, testrun=None)
python
def import_junit(junit_file, **kwargs): """Reads the content of the junit-results file produced by pytest and returns imported data.""" xml_root = _get_xml_root(junit_file) results = [] for test_data in xml_root: if test_data.tag != "testcase": continue verdict, comment, properties = _parse_testcase_record(test_data) properties, parameters = _extract_parameters_from_properties(properties) title = test_data.get("name") classname = test_data.get("classname") time = test_data.get("time", 0) filepath = test_data.get("file") data = [ ("title", title), ("classname", classname), ("verdict", verdict), ("comment", comment), ("time", time), ("file", filepath), ] for key in sorted(properties): data.append((key, properties[key])) if parameters: data.append(("params", OrderedDict(parameters))) results.append(OrderedDict(data)) return xunit_exporter.ImportedData(results=results, testrun=None)
[ "def", "import_junit", "(", "junit_file", ",", "*", "*", "kwargs", ")", ":", "xml_root", "=", "_get_xml_root", "(", "junit_file", ")", "results", "=", "[", "]", "for", "test_data", "in", "xml_root", ":", "if", "test_data", ".", "tag", "!=", "\"testcase\"", ":", "continue", "verdict", ",", "comment", ",", "properties", "=", "_parse_testcase_record", "(", "test_data", ")", "properties", ",", "parameters", "=", "_extract_parameters_from_properties", "(", "properties", ")", "title", "=", "test_data", ".", "get", "(", "\"name\"", ")", "classname", "=", "test_data", ".", "get", "(", "\"classname\"", ")", "time", "=", "test_data", ".", "get", "(", "\"time\"", ",", "0", ")", "filepath", "=", "test_data", ".", "get", "(", "\"file\"", ")", "data", "=", "[", "(", "\"title\"", ",", "title", ")", ",", "(", "\"classname\"", ",", "classname", ")", ",", "(", "\"verdict\"", ",", "verdict", ")", ",", "(", "\"comment\"", ",", "comment", ")", ",", "(", "\"time\"", ",", "time", ")", ",", "(", "\"file\"", ",", "filepath", ")", ",", "]", "for", "key", "in", "sorted", "(", "properties", ")", ":", "data", ".", "append", "(", "(", "key", ",", "properties", "[", "key", "]", ")", ")", "if", "parameters", ":", "data", ".", "append", "(", "(", "\"params\"", ",", "OrderedDict", "(", "parameters", ")", ")", ")", "results", ".", "append", "(", "OrderedDict", "(", "data", ")", ")", "return", "xunit_exporter", ".", "ImportedData", "(", "results", "=", "results", ",", "testrun", "=", "None", ")" ]
Reads the content of the junit-results file produced by pytest and returns imported data.
[ "Reads", "the", "content", "of", "the", "junit", "-", "results", "file", "produced", "by", "pytest", "and", "returns", "imported", "data", "." ]
f4bd24e9d5070e282aad15f1e8bb514c0525cd37
https://github.com/mkoura/dump2polarion/blob/f4bd24e9d5070e282aad15f1e8bb514c0525cd37/dump2polarion/results/junittools.py#L72-L104
train
klen/zeta-library
zetalibrary/main.py
libs
def libs(): " Show zeta libs " for name, description, version, url in gen_frameworks(): print name print ''.join('-' for _ in xrange(len(name))) print description.strip('/*\n ') print version.strip('/*\n ') print url.strip('/*\n ') print
python
def libs(): " Show zeta libs " for name, description, version, url in gen_frameworks(): print name print ''.join('-' for _ in xrange(len(name))) print description.strip('/*\n ') print version.strip('/*\n ') print url.strip('/*\n ') print
[ "def", "libs", "(", ")", ":", "for", "name", ",", "description", ",", "version", ",", "url", "in", "gen_frameworks", "(", ")", ":", "print", "name", "print", "''", ".", "join", "(", "'-'", "for", "_", "in", "xrange", "(", "len", "(", "name", ")", ")", ")", "print", "description", ".", "strip", "(", "'/*\\n '", ")", "print", "version", ".", "strip", "(", "'/*\\n '", ")", "print", "url", ".", "strip", "(", "'/*\\n '", ")", "print" ]
Show zeta libs
[ "Show", "zeta", "libs" ]
b76f89000f467e10ddcc94aded3f6c6bf4a0e5bd
https://github.com/klen/zeta-library/blob/b76f89000f467e10ddcc94aded3f6c6bf4a0e5bd/zetalibrary/main.py#L23-L31
train
klen/zeta-library
zetalibrary/main.py
watch
def watch(args): " Watch directory for changes and auto pack sources " assert op.isdir(args.source), "Watch mode allowed only for directories." print 'Zeta-library v. %s watch mode' % VERSION print '================================' print 'Ctrl+C for exit\n' observer = Observer() handler = ZetaTrick(args=args) observer.schedule(handler, args.source, recursive=True) observer.start() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() print "\nWatch mode stoped." observer.join()
python
def watch(args): " Watch directory for changes and auto pack sources " assert op.isdir(args.source), "Watch mode allowed only for directories." print 'Zeta-library v. %s watch mode' % VERSION print '================================' print 'Ctrl+C for exit\n' observer = Observer() handler = ZetaTrick(args=args) observer.schedule(handler, args.source, recursive=True) observer.start() try: while True: time.sleep(1) except KeyboardInterrupt: observer.stop() print "\nWatch mode stoped." observer.join()
[ "def", "watch", "(", "args", ")", ":", "assert", "op", ".", "isdir", "(", "args", ".", "source", ")", ",", "\"Watch mode allowed only for directories.\"", "print", "'Zeta-library v. %s watch mode'", "%", "VERSION", "print", "'================================'", "print", "'Ctrl+C for exit\\n'", "observer", "=", "Observer", "(", ")", "handler", "=", "ZetaTrick", "(", "args", "=", "args", ")", "observer", ".", "schedule", "(", "handler", ",", "args", ".", "source", ",", "recursive", "=", "True", ")", "observer", ".", "start", "(", ")", "try", ":", "while", "True", ":", "time", ".", "sleep", "(", "1", ")", "except", "KeyboardInterrupt", ":", "observer", ".", "stop", "(", ")", "print", "\"\\nWatch mode stoped.\"", "observer", ".", "join", "(", ")" ]
Watch directory for changes and auto pack sources
[ "Watch", "directory", "for", "changes", "and", "auto", "pack", "sources" ]
b76f89000f467e10ddcc94aded3f6c6bf4a0e5bd
https://github.com/klen/zeta-library/blob/b76f89000f467e10ddcc94aded3f6c6bf4a0e5bd/zetalibrary/main.py#L42-L58
train
klen/zeta-library
zetalibrary/main.py
pack
def pack(args): " Parse file or dir, import css, js code and save with prefix " assert op.exists(args.source), "Does not exists: %s" % args.source zeta_pack(args)
python
def pack(args): " Parse file or dir, import css, js code and save with prefix " assert op.exists(args.source), "Does not exists: %s" % args.source zeta_pack(args)
[ "def", "pack", "(", "args", ")", ":", "assert", "op", ".", "exists", "(", "args", ".", "source", ")", ",", "\"Does not exists: %s\"", "%", "args", ".", "source", "zeta_pack", "(", "args", ")" ]
Parse file or dir, import css, js code and save with prefix
[ "Parse", "file", "or", "dir", "import", "css", "js", "code", "and", "save", "with", "prefix" ]
b76f89000f467e10ddcc94aded3f6c6bf4a0e5bd
https://github.com/klen/zeta-library/blob/b76f89000f467e10ddcc94aded3f6c6bf4a0e5bd/zetalibrary/main.py#L69-L72
train
Genida/dependenpy
src/dependenpy/structures.py
Matrix.cast
def cast(keys, data): """Cast a set of keys and an array to a Matrix object.""" matrix = Matrix() matrix.keys = keys matrix.data = data return matrix
python
def cast(keys, data): """Cast a set of keys and an array to a Matrix object.""" matrix = Matrix() matrix.keys = keys matrix.data = data return matrix
[ "def", "cast", "(", "keys", ",", "data", ")", ":", "matrix", "=", "Matrix", "(", ")", "matrix", ".", "keys", "=", "keys", "matrix", ".", "data", "=", "data", "return", "matrix" ]
Cast a set of keys and an array to a Matrix object.
[ "Cast", "a", "set", "of", "keys", "and", "an", "array", "to", "a", "Matrix", "object", "." ]
df099c17cbe735c990eca9197e39cfc5eb8a4c8e
https://github.com/Genida/dependenpy/blob/df099c17cbe735c990eca9197e39cfc5eb8a4c8e/src/dependenpy/structures.py#L82-L87
train
Genida/dependenpy
src/dependenpy/structures.py
Vertex.connect_to
def connect_to(self, vertex, weight=1): """ Connect this vertex to another one. Args: vertex (Vertex): vertex to connect to. weight (int): weight of the edge. Returns: Edge: the newly created edge. """ for edge in self.edges_out: if vertex == edge.vertex_in: return edge return Edge(self, vertex, weight)
python
def connect_to(self, vertex, weight=1): """ Connect this vertex to another one. Args: vertex (Vertex): vertex to connect to. weight (int): weight of the edge. Returns: Edge: the newly created edge. """ for edge in self.edges_out: if vertex == edge.vertex_in: return edge return Edge(self, vertex, weight)
[ "def", "connect_to", "(", "self", ",", "vertex", ",", "weight", "=", "1", ")", ":", "for", "edge", "in", "self", ".", "edges_out", ":", "if", "vertex", "==", "edge", ".", "vertex_in", ":", "return", "edge", "return", "Edge", "(", "self", ",", "vertex", ",", "weight", ")" ]
Connect this vertex to another one. Args: vertex (Vertex): vertex to connect to. weight (int): weight of the edge. Returns: Edge: the newly created edge.
[ "Connect", "this", "vertex", "to", "another", "one", "." ]
df099c17cbe735c990eca9197e39cfc5eb8a4c8e
https://github.com/Genida/dependenpy/blob/df099c17cbe735c990eca9197e39cfc5eb8a4c8e/src/dependenpy/structures.py#L201-L215
train
Genida/dependenpy
src/dependenpy/structures.py
Vertex.connect_from
def connect_from(self, vertex, weight=1): """ Connect another vertex to this one. Args: vertex (Vertex): vertex to connect from. weight (int): weight of the edge. Returns: Edge: the newly created edge. """ for edge in self.edges_in: if vertex == edge.vertex_out: return edge return Edge(vertex, self, weight)
python
def connect_from(self, vertex, weight=1): """ Connect another vertex to this one. Args: vertex (Vertex): vertex to connect from. weight (int): weight of the edge. Returns: Edge: the newly created edge. """ for edge in self.edges_in: if vertex == edge.vertex_out: return edge return Edge(vertex, self, weight)
[ "def", "connect_from", "(", "self", ",", "vertex", ",", "weight", "=", "1", ")", ":", "for", "edge", "in", "self", ".", "edges_in", ":", "if", "vertex", "==", "edge", ".", "vertex_out", ":", "return", "edge", "return", "Edge", "(", "vertex", ",", "self", ",", "weight", ")" ]
Connect another vertex to this one. Args: vertex (Vertex): vertex to connect from. weight (int): weight of the edge. Returns: Edge: the newly created edge.
[ "Connect", "another", "vertex", "to", "this", "one", "." ]
df099c17cbe735c990eca9197e39cfc5eb8a4c8e
https://github.com/Genida/dependenpy/blob/df099c17cbe735c990eca9197e39cfc5eb8a4c8e/src/dependenpy/structures.py#L217-L231
train
Genida/dependenpy
src/dependenpy/structures.py
Edge.go_from
def go_from(self, vertex): """ Tell the edge to go out from this vertex. Args: vertex (Vertex): vertex to go from. """ if self.vertex_out: self.vertex_out.edges_out.remove(self) self.vertex_out = vertex vertex.edges_out.add(self)
python
def go_from(self, vertex): """ Tell the edge to go out from this vertex. Args: vertex (Vertex): vertex to go from. """ if self.vertex_out: self.vertex_out.edges_out.remove(self) self.vertex_out = vertex vertex.edges_out.add(self)
[ "def", "go_from", "(", "self", ",", "vertex", ")", ":", "if", "self", ".", "vertex_out", ":", "self", ".", "vertex_out", ".", "edges_out", ".", "remove", "(", "self", ")", "self", ".", "vertex_out", "=", "vertex", "vertex", ".", "edges_out", ".", "add", "(", "self", ")" ]
Tell the edge to go out from this vertex. Args: vertex (Vertex): vertex to go from.
[ "Tell", "the", "edge", "to", "go", "out", "from", "this", "vertex", "." ]
df099c17cbe735c990eca9197e39cfc5eb8a4c8e
https://github.com/Genida/dependenpy/blob/df099c17cbe735c990eca9197e39cfc5eb8a4c8e/src/dependenpy/structures.py#L256-L266
train
Genida/dependenpy
src/dependenpy/structures.py
Edge.go_in
def go_in(self, vertex): """ Tell the edge to go into this vertex. Args: vertex (Vertex): vertex to go into. """ if self.vertex_in: self.vertex_in.edges_in.remove(self) self.vertex_in = vertex vertex.edges_in.add(self)
python
def go_in(self, vertex): """ Tell the edge to go into this vertex. Args: vertex (Vertex): vertex to go into. """ if self.vertex_in: self.vertex_in.edges_in.remove(self) self.vertex_in = vertex vertex.edges_in.add(self)
[ "def", "go_in", "(", "self", ",", "vertex", ")", ":", "if", "self", ".", "vertex_in", ":", "self", ".", "vertex_in", ".", "edges_in", ".", "remove", "(", "self", ")", "self", ".", "vertex_in", "=", "vertex", "vertex", ".", "edges_in", ".", "add", "(", "self", ")" ]
Tell the edge to go into this vertex. Args: vertex (Vertex): vertex to go into.
[ "Tell", "the", "edge", "to", "go", "into", "this", "vertex", "." ]
df099c17cbe735c990eca9197e39cfc5eb8a4c8e
https://github.com/Genida/dependenpy/blob/df099c17cbe735c990eca9197e39cfc5eb8a4c8e/src/dependenpy/structures.py#L268-L278
train
ludeeus/GHLocalApi
ghlocalapi/utils/convert.py
get_device_type
def get_device_type(device_type=0): """Return the device type from a device_type list.""" device_types = { 0: "Unknown", 1: "Classic - BR/EDR devices", 2: "Low Energy - LE-only", 3: "Dual Mode - BR/EDR/LE" } if device_type in [0, 1, 2, 3]: return_value = device_types[device_type] else: return_value = device_types[0] return return_value
python
def get_device_type(device_type=0): """Return the device type from a device_type list.""" device_types = { 0: "Unknown", 1: "Classic - BR/EDR devices", 2: "Low Energy - LE-only", 3: "Dual Mode - BR/EDR/LE" } if device_type in [0, 1, 2, 3]: return_value = device_types[device_type] else: return_value = device_types[0] return return_value
[ "def", "get_device_type", "(", "device_type", "=", "0", ")", ":", "device_types", "=", "{", "0", ":", "\"Unknown\"", ",", "1", ":", "\"Classic - BR/EDR devices\"", ",", "2", ":", "\"Low Energy - LE-only\"", ",", "3", ":", "\"Dual Mode - BR/EDR/LE\"", "}", "if", "device_type", "in", "[", "0", ",", "1", ",", "2", ",", "3", "]", ":", "return_value", "=", "device_types", "[", "device_type", "]", "else", ":", "return_value", "=", "device_types", "[", "0", "]", "return", "return_value" ]
Return the device type from a device_type list.
[ "Return", "the", "device", "type", "from", "a", "device_type", "list", "." ]
93abdee299c4a4b65aa9dd03c77ec34e174e3c56
https://github.com/ludeeus/GHLocalApi/blob/93abdee299c4a4b65aa9dd03c77ec34e174e3c56/ghlocalapi/utils/convert.py#L9-L21
train
romanorac/discomll
discomll/classification/logistic_regression.py
fit
def fit(dataset, alpha=1e-8, max_iterations=10, save_results=True, show=False): """ Function starts a job for calculation of theta parameters Parameters ---------- input - dataset object with input urls and other parameters alpha - convergence value max_iterations - define maximum number of iterations save_results - save results to ddfs show - show info about job execution Returns ------- Urls of fit model results on ddfs """ from disco.worker.pipeline.worker import Worker, Stage from disco.core import Job, result_iterator import numpy as np if dataset.params["y_map"] == []: raise Exception("Logistic regression requires a target label mapping parameter.") try: alpha = float(alpha) max_iterations = int(max_iterations) if max_iterations < 1: raise Exception("Parameter max_iterations should be greater than 0.") except ValueError: raise Exception("Parameters should be numerical.") # initialize thetas to 0 and add intercept term thetas = np.zeros(len(dataset.params["X_indices"]) + 1) J = [0] # J cost function values for every iteration for i in range(max_iterations): job = Job(worker=Worker(save_results=save_results)) # job parallelizes mappers and joins them with one reducer job.pipeline = [ ("split", Stage("map", input_chain=dataset.params["input_chain"], init=simple_init, process=map_fit)), ('group_all', Stage("reduce", init=simple_init, process=reduce_fit, combine=True))] job.params = dataset.params # job parameters (dataset object) job.params["thetas"] = thetas # every iteration set new thetas job.run(name="logreg_fit_iter_%d" % (i + 1), input=dataset.params["data_tag"]) fitmodel_url = job.wait(show=show) for k, v in result_iterator(fitmodel_url): if k == "J": # J.append(v) # save value of J cost function else: thetas = v # save new thetas if np.abs(J[-2] - J[-1]) < alpha: # check for convergence if show: print("Converged at iteration %d" % (i + 1)) break return {"logreg_fitmodel": fitmodel_url}
python
def fit(dataset, alpha=1e-8, max_iterations=10, save_results=True, show=False): """ Function starts a job for calculation of theta parameters Parameters ---------- input - dataset object with input urls and other parameters alpha - convergence value max_iterations - define maximum number of iterations save_results - save results to ddfs show - show info about job execution Returns ------- Urls of fit model results on ddfs """ from disco.worker.pipeline.worker import Worker, Stage from disco.core import Job, result_iterator import numpy as np if dataset.params["y_map"] == []: raise Exception("Logistic regression requires a target label mapping parameter.") try: alpha = float(alpha) max_iterations = int(max_iterations) if max_iterations < 1: raise Exception("Parameter max_iterations should be greater than 0.") except ValueError: raise Exception("Parameters should be numerical.") # initialize thetas to 0 and add intercept term thetas = np.zeros(len(dataset.params["X_indices"]) + 1) J = [0] # J cost function values for every iteration for i in range(max_iterations): job = Job(worker=Worker(save_results=save_results)) # job parallelizes mappers and joins them with one reducer job.pipeline = [ ("split", Stage("map", input_chain=dataset.params["input_chain"], init=simple_init, process=map_fit)), ('group_all', Stage("reduce", init=simple_init, process=reduce_fit, combine=True))] job.params = dataset.params # job parameters (dataset object) job.params["thetas"] = thetas # every iteration set new thetas job.run(name="logreg_fit_iter_%d" % (i + 1), input=dataset.params["data_tag"]) fitmodel_url = job.wait(show=show) for k, v in result_iterator(fitmodel_url): if k == "J": # J.append(v) # save value of J cost function else: thetas = v # save new thetas if np.abs(J[-2] - J[-1]) < alpha: # check for convergence if show: print("Converged at iteration %d" % (i + 1)) break return {"logreg_fitmodel": fitmodel_url}
[ "def", "fit", "(", "dataset", ",", "alpha", "=", "1e-8", ",", "max_iterations", "=", "10", ",", "save_results", "=", "True", ",", "show", "=", "False", ")", ":", "from", "disco", ".", "worker", ".", "pipeline", ".", "worker", "import", "Worker", ",", "Stage", "from", "disco", ".", "core", "import", "Job", ",", "result_iterator", "import", "numpy", "as", "np", "if", "dataset", ".", "params", "[", "\"y_map\"", "]", "==", "[", "]", ":", "raise", "Exception", "(", "\"Logistic regression requires a target label mapping parameter.\"", ")", "try", ":", "alpha", "=", "float", "(", "alpha", ")", "max_iterations", "=", "int", "(", "max_iterations", ")", "if", "max_iterations", "<", "1", ":", "raise", "Exception", "(", "\"Parameter max_iterations should be greater than 0.\"", ")", "except", "ValueError", ":", "raise", "Exception", "(", "\"Parameters should be numerical.\"", ")", "# initialize thetas to 0 and add intercept term", "thetas", "=", "np", ".", "zeros", "(", "len", "(", "dataset", ".", "params", "[", "\"X_indices\"", "]", ")", "+", "1", ")", "J", "=", "[", "0", "]", "# J cost function values for every iteration", "for", "i", "in", "range", "(", "max_iterations", ")", ":", "job", "=", "Job", "(", "worker", "=", "Worker", "(", "save_results", "=", "save_results", ")", ")", "# job parallelizes mappers and joins them with one reducer", "job", ".", "pipeline", "=", "[", "(", "\"split\"", ",", "Stage", "(", "\"map\"", ",", "input_chain", "=", "dataset", ".", "params", "[", "\"input_chain\"", "]", ",", "init", "=", "simple_init", ",", "process", "=", "map_fit", ")", ")", ",", "(", "'group_all'", ",", "Stage", "(", "\"reduce\"", ",", "init", "=", "simple_init", ",", "process", "=", "reduce_fit", ",", "combine", "=", "True", ")", ")", "]", "job", ".", "params", "=", "dataset", ".", "params", "# job parameters (dataset object)", "job", ".", "params", "[", "\"thetas\"", "]", "=", "thetas", "# every iteration set new thetas", "job", ".", "run", "(", "name", "=", "\"logreg_fit_iter_%d\"", "%", "(", "i", "+", "1", ")", ",", "input", "=", "dataset", ".", "params", "[", "\"data_tag\"", "]", ")", "fitmodel_url", "=", "job", ".", "wait", "(", "show", "=", "show", ")", "for", "k", ",", "v", "in", "result_iterator", "(", "fitmodel_url", ")", ":", "if", "k", "==", "\"J\"", ":", "#", "J", ".", "append", "(", "v", ")", "# save value of J cost function", "else", ":", "thetas", "=", "v", "# save new thetas", "if", "np", ".", "abs", "(", "J", "[", "-", "2", "]", "-", "J", "[", "-", "1", "]", ")", "<", "alpha", ":", "# check for convergence", "if", "show", ":", "print", "(", "\"Converged at iteration %d\"", "%", "(", "i", "+", "1", ")", ")", "break", "return", "{", "\"logreg_fitmodel\"", ":", "fitmodel_url", "}" ]
Function starts a job for calculation of theta parameters Parameters ---------- input - dataset object with input urls and other parameters alpha - convergence value max_iterations - define maximum number of iterations save_results - save results to ddfs show - show info about job execution Returns ------- Urls of fit model results on ddfs
[ "Function", "starts", "a", "job", "for", "calculation", "of", "theta", "parameters" ]
a4703daffb2ba3c9f614bc3dbe45ae55884aea00
https://github.com/romanorac/discomll/blob/a4703daffb2ba3c9f614bc3dbe45ae55884aea00/discomll/classification/logistic_regression.py#L82-L138
train
davidfokkema/artist
artist/plot.py
BasePlotContainer.save
def save(self, dest_path): r"""Save the plot as a LaTeX file. The output file can be included (using \input) in your LaTeX document. It can also be compiled as a standalone document to generate a (cropped) pdf version of the plot. :param dest_path: path of the file. """ self.save_assets(dest_path) self.external_filename = 'externalized-%s' % \ os.path.basename(dest_path).replace(' ', '_') dest_path = self._add_extension('tex', dest_path) with open(dest_path, 'w') as f: f.write(self.render())
python
def save(self, dest_path): r"""Save the plot as a LaTeX file. The output file can be included (using \input) in your LaTeX document. It can also be compiled as a standalone document to generate a (cropped) pdf version of the plot. :param dest_path: path of the file. """ self.save_assets(dest_path) self.external_filename = 'externalized-%s' % \ os.path.basename(dest_path).replace(' ', '_') dest_path = self._add_extension('tex', dest_path) with open(dest_path, 'w') as f: f.write(self.render())
[ "def", "save", "(", "self", ",", "dest_path", ")", ":", "self", ".", "save_assets", "(", "dest_path", ")", "self", ".", "external_filename", "=", "'externalized-%s'", "%", "os", ".", "path", ".", "basename", "(", "dest_path", ")", ".", "replace", "(", "' '", ",", "'_'", ")", "dest_path", "=", "self", ".", "_add_extension", "(", "'tex'", ",", "dest_path", ")", "with", "open", "(", "dest_path", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "self", ".", "render", "(", ")", ")" ]
r"""Save the plot as a LaTeX file. The output file can be included (using \input) in your LaTeX document. It can also be compiled as a standalone document to generate a (cropped) pdf version of the plot. :param dest_path: path of the file.
[ "r", "Save", "the", "plot", "as", "a", "LaTeX", "file", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L96-L111
train
davidfokkema/artist
artist/plot.py
BasePlotContainer.save_as_pdf
def save_as_pdf(self, dest_path): """Save the plot as a PDF file. Save and render the plot using LaTeX to create a PDF file. :param dest_path: path of the file. """ dest_path = self._add_extension('pdf', dest_path) build_dir = tempfile.mkdtemp() build_path = os.path.join(build_dir, 'document.tex') self.save_assets(build_path) with open(build_path, 'w') as f: f.write(self.render()) pdf_path = self._build_document(build_path) shutil.copyfile(pdf_path, dest_path) shutil.rmtree(build_dir)
python
def save_as_pdf(self, dest_path): """Save the plot as a PDF file. Save and render the plot using LaTeX to create a PDF file. :param dest_path: path of the file. """ dest_path = self._add_extension('pdf', dest_path) build_dir = tempfile.mkdtemp() build_path = os.path.join(build_dir, 'document.tex') self.save_assets(build_path) with open(build_path, 'w') as f: f.write(self.render()) pdf_path = self._build_document(build_path) shutil.copyfile(pdf_path, dest_path) shutil.rmtree(build_dir)
[ "def", "save_as_pdf", "(", "self", ",", "dest_path", ")", ":", "dest_path", "=", "self", ".", "_add_extension", "(", "'pdf'", ",", "dest_path", ")", "build_dir", "=", "tempfile", ".", "mkdtemp", "(", ")", "build_path", "=", "os", ".", "path", ".", "join", "(", "build_dir", ",", "'document.tex'", ")", "self", ".", "save_assets", "(", "build_path", ")", "with", "open", "(", "build_path", ",", "'w'", ")", "as", "f", ":", "f", ".", "write", "(", "self", ".", "render", "(", ")", ")", "pdf_path", "=", "self", ".", "_build_document", "(", "build_path", ")", "shutil", ".", "copyfile", "(", "pdf_path", ",", "dest_path", ")", "shutil", ".", "rmtree", "(", "build_dir", ")" ]
Save the plot as a PDF file. Save and render the plot using LaTeX to create a PDF file. :param dest_path: path of the file.
[ "Save", "the", "plot", "as", "a", "PDF", "file", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L113-L129
train
davidfokkema/artist
artist/plot.py
SubPlot.histogram
def histogram(self, counts, bin_edges, linestyle='solid'): """Plot a histogram. The user needs to supply the histogram. This method only plots the results. You can use NumPy's histogram function. :param counts: array containing the count values. :param bin_edges: array containing the bin edges. :param linestyle: the line style used to connect the data points. May be None, or any line style accepted by TikZ (e.g. solid, dashed, dotted, thick, or even combinations like "red,thick,dashed"). Example:: >>> plot = artist.Plot() >>> x = np.random.normal(size=1000) >>> n, bins = np.histogram(x) >>> plot.histogram(n, bins) """ if len(bin_edges) - 1 != len(counts): raise RuntimeError( 'The length of bin_edges should be length of counts + 1') x = bin_edges y = list(counts) + [counts[-1]] self.plot(x, y, mark=None, linestyle=linestyle, use_steps=True)
python
def histogram(self, counts, bin_edges, linestyle='solid'): """Plot a histogram. The user needs to supply the histogram. This method only plots the results. You can use NumPy's histogram function. :param counts: array containing the count values. :param bin_edges: array containing the bin edges. :param linestyle: the line style used to connect the data points. May be None, or any line style accepted by TikZ (e.g. solid, dashed, dotted, thick, or even combinations like "red,thick,dashed"). Example:: >>> plot = artist.Plot() >>> x = np.random.normal(size=1000) >>> n, bins = np.histogram(x) >>> plot.histogram(n, bins) """ if len(bin_edges) - 1 != len(counts): raise RuntimeError( 'The length of bin_edges should be length of counts + 1') x = bin_edges y = list(counts) + [counts[-1]] self.plot(x, y, mark=None, linestyle=linestyle, use_steps=True)
[ "def", "histogram", "(", "self", ",", "counts", ",", "bin_edges", ",", "linestyle", "=", "'solid'", ")", ":", "if", "len", "(", "bin_edges", ")", "-", "1", "!=", "len", "(", "counts", ")", ":", "raise", "RuntimeError", "(", "'The length of bin_edges should be length of counts + 1'", ")", "x", "=", "bin_edges", "y", "=", "list", "(", "counts", ")", "+", "[", "counts", "[", "-", "1", "]", "]", "self", ".", "plot", "(", "x", ",", "y", ",", "mark", "=", "None", ",", "linestyle", "=", "linestyle", ",", "use_steps", "=", "True", ")" ]
Plot a histogram. The user needs to supply the histogram. This method only plots the results. You can use NumPy's histogram function. :param counts: array containing the count values. :param bin_edges: array containing the bin edges. :param linestyle: the line style used to connect the data points. May be None, or any line style accepted by TikZ (e.g. solid, dashed, dotted, thick, or even combinations like "red,thick,dashed"). Example:: >>> plot = artist.Plot() >>> x = np.random.normal(size=1000) >>> n, bins = np.histogram(x) >>> plot.histogram(n, bins)
[ "Plot", "a", "histogram", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L289-L315
train
davidfokkema/artist
artist/plot.py
SubPlot.scatter
def scatter(self, x, y, xerr=[], yerr=[], mark='o', markstyle=None): """Plot a series of points. Plot a series of points (marks) that are not connected by a line. Shortcut for plot with linestyle=None. :param x: array containing x-values. :param y: array containing y-values. :param xerr: array containing errors on the x-values. :param yerr: array containing errors on the y-values. :param mark: the symbol used to mark the data points. May be any plot mark accepted by TikZ (e.g. ``*, x, +, o, square, triangle``). :param markstyle: the style of the plot marks (e.g. 'mark size=.75pt') Example:: >>> plot = artist.Plot() >>> x = np.random.normal(size=20) >>> y = np.random.normal(size=20) >>> plot.scatter(x, y, mark='*') """ self.plot(x, y, xerr=xerr, yerr=yerr, mark=mark, linestyle=None, markstyle=markstyle)
python
def scatter(self, x, y, xerr=[], yerr=[], mark='o', markstyle=None): """Plot a series of points. Plot a series of points (marks) that are not connected by a line. Shortcut for plot with linestyle=None. :param x: array containing x-values. :param y: array containing y-values. :param xerr: array containing errors on the x-values. :param yerr: array containing errors on the y-values. :param mark: the symbol used to mark the data points. May be any plot mark accepted by TikZ (e.g. ``*, x, +, o, square, triangle``). :param markstyle: the style of the plot marks (e.g. 'mark size=.75pt') Example:: >>> plot = artist.Plot() >>> x = np.random.normal(size=20) >>> y = np.random.normal(size=20) >>> plot.scatter(x, y, mark='*') """ self.plot(x, y, xerr=xerr, yerr=yerr, mark=mark, linestyle=None, markstyle=markstyle)
[ "def", "scatter", "(", "self", ",", "x", ",", "y", ",", "xerr", "=", "[", "]", ",", "yerr", "=", "[", "]", ",", "mark", "=", "'o'", ",", "markstyle", "=", "None", ")", ":", "self", ".", "plot", "(", "x", ",", "y", ",", "xerr", "=", "xerr", ",", "yerr", "=", "yerr", ",", "mark", "=", "mark", ",", "linestyle", "=", "None", ",", "markstyle", "=", "markstyle", ")" ]
Plot a series of points. Plot a series of points (marks) that are not connected by a line. Shortcut for plot with linestyle=None. :param x: array containing x-values. :param y: array containing y-values. :param xerr: array containing errors on the x-values. :param yerr: array containing errors on the y-values. :param mark: the symbol used to mark the data points. May be any plot mark accepted by TikZ (e.g. ``*, x, +, o, square, triangle``). :param markstyle: the style of the plot marks (e.g. 'mark size=.75pt') Example:: >>> plot = artist.Plot() >>> x = np.random.normal(size=20) >>> y = np.random.normal(size=20) >>> plot.scatter(x, y, mark='*')
[ "Plot", "a", "series", "of", "points", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L433-L458
train
davidfokkema/artist
artist/plot.py
SubPlot.set_label
def set_label(self, text, location='upper right', style=None): """Set a label for the plot. :param text: the label text. :param location: the location of the label inside the plot. May be one of 'center', 'upper right', 'lower right', 'upper left', 'lower left'. :param style: any TikZ style to style the text. """ if location in RELATIVE_NODE_LOCATIONS: label = RELATIVE_NODE_LOCATIONS[location].copy() label['text'] = text label['style'] = style self.label = label else: raise RuntimeError('Unknown label location: %s' % location)
python
def set_label(self, text, location='upper right', style=None): """Set a label for the plot. :param text: the label text. :param location: the location of the label inside the plot. May be one of 'center', 'upper right', 'lower right', 'upper left', 'lower left'. :param style: any TikZ style to style the text. """ if location in RELATIVE_NODE_LOCATIONS: label = RELATIVE_NODE_LOCATIONS[location].copy() label['text'] = text label['style'] = style self.label = label else: raise RuntimeError('Unknown label location: %s' % location)
[ "def", "set_label", "(", "self", ",", "text", ",", "location", "=", "'upper right'", ",", "style", "=", "None", ")", ":", "if", "location", "in", "RELATIVE_NODE_LOCATIONS", ":", "label", "=", "RELATIVE_NODE_LOCATIONS", "[", "location", "]", ".", "copy", "(", ")", "label", "[", "'text'", "]", "=", "text", "label", "[", "'style'", "]", "=", "style", "self", ".", "label", "=", "label", "else", ":", "raise", "RuntimeError", "(", "'Unknown label location: %s'", "%", "location", ")" ]
Set a label for the plot. :param text: the label text. :param location: the location of the label inside the plot. May be one of 'center', 'upper right', 'lower right', 'upper left', 'lower left'. :param style: any TikZ style to style the text.
[ "Set", "a", "label", "for", "the", "plot", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L494-L510
train
davidfokkema/artist
artist/plot.py
SubPlot.add_pin
def add_pin(self, text, location='left', x=None, use_arrow=False, relative_position=None, style=None): """Add pin to most recent data series. The location of the pin is interpolated if necessary, while correcting for logarithmic x and/or y axes. :param text: the text of the pin label. :param location: the location of the pin relative to the data point. Any location accepted by TikZ is allowed. :type location: string :param x: the x location of the data point (in the most recent data series) at which to place the label. This is interpolated between the actual data points. If None, only the relative_position parameter is used. :param use_arrow: specifies whether to draw an arrow between the data point and the pin label text. :type use_arrow: boolean :param relative_position: location of the data point as a relative number between 0 and 1. :param style: optional TikZ styles to apply (e.g. 'red'). """ try: series = self.plot_series_list[-1] except IndexError: raise RuntimeError( 'First plot a data series, before using this function') data = series['data'] series_x, series_y = list(zip(*data))[:2] if x is not None: if self.xmode == 'log': series_x = np.log10(np.array(series_x)) xp = np.log10(x) else: xp = x if self.ymode == 'log': series_y = np.log10(np.array(series_y)) y = 10 ** np.interp(xp, series_x, series_y) else: y = np.interp(xp, series_x, series_y) else: x, y = series_x, series_y self.add_pin_at_xy(x, y, text, location, relative_position, use_arrow, style)
python
def add_pin(self, text, location='left', x=None, use_arrow=False, relative_position=None, style=None): """Add pin to most recent data series. The location of the pin is interpolated if necessary, while correcting for logarithmic x and/or y axes. :param text: the text of the pin label. :param location: the location of the pin relative to the data point. Any location accepted by TikZ is allowed. :type location: string :param x: the x location of the data point (in the most recent data series) at which to place the label. This is interpolated between the actual data points. If None, only the relative_position parameter is used. :param use_arrow: specifies whether to draw an arrow between the data point and the pin label text. :type use_arrow: boolean :param relative_position: location of the data point as a relative number between 0 and 1. :param style: optional TikZ styles to apply (e.g. 'red'). """ try: series = self.plot_series_list[-1] except IndexError: raise RuntimeError( 'First plot a data series, before using this function') data = series['data'] series_x, series_y = list(zip(*data))[:2] if x is not None: if self.xmode == 'log': series_x = np.log10(np.array(series_x)) xp = np.log10(x) else: xp = x if self.ymode == 'log': series_y = np.log10(np.array(series_y)) y = 10 ** np.interp(xp, series_x, series_y) else: y = np.interp(xp, series_x, series_y) else: x, y = series_x, series_y self.add_pin_at_xy(x, y, text, location, relative_position, use_arrow, style)
[ "def", "add_pin", "(", "self", ",", "text", ",", "location", "=", "'left'", ",", "x", "=", "None", ",", "use_arrow", "=", "False", ",", "relative_position", "=", "None", ",", "style", "=", "None", ")", ":", "try", ":", "series", "=", "self", ".", "plot_series_list", "[", "-", "1", "]", "except", "IndexError", ":", "raise", "RuntimeError", "(", "'First plot a data series, before using this function'", ")", "data", "=", "series", "[", "'data'", "]", "series_x", ",", "series_y", "=", "list", "(", "zip", "(", "*", "data", ")", ")", "[", ":", "2", "]", "if", "x", "is", "not", "None", ":", "if", "self", ".", "xmode", "==", "'log'", ":", "series_x", "=", "np", ".", "log10", "(", "np", ".", "array", "(", "series_x", ")", ")", "xp", "=", "np", ".", "log10", "(", "x", ")", "else", ":", "xp", "=", "x", "if", "self", ".", "ymode", "==", "'log'", ":", "series_y", "=", "np", ".", "log10", "(", "np", ".", "array", "(", "series_y", ")", ")", "y", "=", "10", "**", "np", ".", "interp", "(", "xp", ",", "series_x", ",", "series_y", ")", "else", ":", "y", "=", "np", ".", "interp", "(", "xp", ",", "series_x", ",", "series_y", ")", "else", ":", "x", ",", "y", "=", "series_x", ",", "series_y", "self", ".", "add_pin_at_xy", "(", "x", ",", "y", ",", "text", ",", "location", ",", "relative_position", ",", "use_arrow", ",", "style", ")" ]
Add pin to most recent data series. The location of the pin is interpolated if necessary, while correcting for logarithmic x and/or y axes. :param text: the text of the pin label. :param location: the location of the pin relative to the data point. Any location accepted by TikZ is allowed. :type location: string :param x: the x location of the data point (in the most recent data series) at which to place the label. This is interpolated between the actual data points. If None, only the relative_position parameter is used. :param use_arrow: specifies whether to draw an arrow between the data point and the pin label text. :type use_arrow: boolean :param relative_position: location of the data point as a relative number between 0 and 1. :param style: optional TikZ styles to apply (e.g. 'red').
[ "Add", "pin", "to", "most", "recent", "data", "series", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L512-L560
train
davidfokkema/artist
artist/plot.py
SubPlot.add_pin_at_xy
def add_pin_at_xy(self, x, y, text, location='above right', relative_position=.9, use_arrow=True, style=None): """Add pin at x, y location. :param x: array, list or float, specifying the location of the pin. :param y: array, list or float, specifying the location of the pin. :param text: the text of the pin label. :param location: the location of the pin relative to the data point. Any location accepted by TikZ is allowed. :param relative_position: location of the data point as a relative number between 0 and 1. :param use_arrow: specifies whether to draw an arrow between the data point and the pin label text. :type use_arrow: boolean :param style: optional TikZ styles to apply (e.g. 'red'). If x, y are arrays or lists, relative position is used to pick a point from the arrays. A relative position of 0.0 will be the first point from the series, while 1.0 will be the last point. """ if relative_position is None: if location == 'left': relative_position = 0. elif location == 'right': relative_position = 1. else: relative_position = .8 x, y = self._calc_position_for_pin(x, y, relative_position) self.pin_list.append({'x': x, 'y': y, 'text': text, 'location': location, 'use_arrow': use_arrow, 'options': style})
python
def add_pin_at_xy(self, x, y, text, location='above right', relative_position=.9, use_arrow=True, style=None): """Add pin at x, y location. :param x: array, list or float, specifying the location of the pin. :param y: array, list or float, specifying the location of the pin. :param text: the text of the pin label. :param location: the location of the pin relative to the data point. Any location accepted by TikZ is allowed. :param relative_position: location of the data point as a relative number between 0 and 1. :param use_arrow: specifies whether to draw an arrow between the data point and the pin label text. :type use_arrow: boolean :param style: optional TikZ styles to apply (e.g. 'red'). If x, y are arrays or lists, relative position is used to pick a point from the arrays. A relative position of 0.0 will be the first point from the series, while 1.0 will be the last point. """ if relative_position is None: if location == 'left': relative_position = 0. elif location == 'right': relative_position = 1. else: relative_position = .8 x, y = self._calc_position_for_pin(x, y, relative_position) self.pin_list.append({'x': x, 'y': y, 'text': text, 'location': location, 'use_arrow': use_arrow, 'options': style})
[ "def", "add_pin_at_xy", "(", "self", ",", "x", ",", "y", ",", "text", ",", "location", "=", "'above right'", ",", "relative_position", "=", ".9", ",", "use_arrow", "=", "True", ",", "style", "=", "None", ")", ":", "if", "relative_position", "is", "None", ":", "if", "location", "==", "'left'", ":", "relative_position", "=", "0.", "elif", "location", "==", "'right'", ":", "relative_position", "=", "1.", "else", ":", "relative_position", "=", ".8", "x", ",", "y", "=", "self", ".", "_calc_position_for_pin", "(", "x", ",", "y", ",", "relative_position", ")", "self", ".", "pin_list", ".", "append", "(", "{", "'x'", ":", "x", ",", "'y'", ":", "y", ",", "'text'", ":", "text", ",", "'location'", ":", "location", ",", "'use_arrow'", ":", "use_arrow", ",", "'options'", ":", "style", "}", ")" ]
Add pin at x, y location. :param x: array, list or float, specifying the location of the pin. :param y: array, list or float, specifying the location of the pin. :param text: the text of the pin label. :param location: the location of the pin relative to the data point. Any location accepted by TikZ is allowed. :param relative_position: location of the data point as a relative number between 0 and 1. :param use_arrow: specifies whether to draw an arrow between the data point and the pin label text. :type use_arrow: boolean :param style: optional TikZ styles to apply (e.g. 'red'). If x, y are arrays or lists, relative position is used to pick a point from the arrays. A relative position of 0.0 will be the first point from the series, while 1.0 will be the last point.
[ "Add", "pin", "at", "x", "y", "location", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L562-L597
train
davidfokkema/artist
artist/plot.py
SubPlot.shade_region
def shade_region(self, x, lower, upper, color='lightgray'): """Shade a region between upper and lower bounds. :param x: array containing x-values :param lower: array containing y-values of lower bounds :param upper: array containing y-values of upper bounds :param color: TikZ style to color the region """ self.shaded_regions_list.append({'data': list(zip(x, lower, upper)), 'color': color})
python
def shade_region(self, x, lower, upper, color='lightgray'): """Shade a region between upper and lower bounds. :param x: array containing x-values :param lower: array containing y-values of lower bounds :param upper: array containing y-values of upper bounds :param color: TikZ style to color the region """ self.shaded_regions_list.append({'data': list(zip(x, lower, upper)), 'color': color})
[ "def", "shade_region", "(", "self", ",", "x", ",", "lower", ",", "upper", ",", "color", "=", "'lightgray'", ")", ":", "self", ".", "shaded_regions_list", ".", "append", "(", "{", "'data'", ":", "list", "(", "zip", "(", "x", ",", "lower", ",", "upper", ")", ")", ",", "'color'", ":", "color", "}", ")" ]
Shade a region between upper and lower bounds. :param x: array containing x-values :param lower: array containing y-values of lower bounds :param upper: array containing y-values of upper bounds :param color: TikZ style to color the region
[ "Shade", "a", "region", "between", "upper", "and", "lower", "bounds", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L599-L609
train
davidfokkema/artist
artist/plot.py
SubPlot.draw_image
def draw_image(self, image, xmin=0, ymin=0, xmax=None, ymax=None): """Draw an image. Do not forget to use :meth:`set_axis_equal` to preserve the aspect ratio of the image, or change the aspect ratio of the plot to the aspect ratio of the image. :param image: Pillow Image object. :param xmin,ymin,xmax,ymax: the x, y image bounds. Example:: >>> from PIL import Image >>> image = Image.open('background.png') >>> plot = artist.Plot() >>> plot.set_axis_equal() >>> plot.draw_image(image) """ if xmax is None: xmax = xmin + image.size[0] if ymax is None: ymax = ymin + image.size[1] self.bitmap_list.append({'image': image, 'xmin': xmin, 'xmax': xmax, 'ymin': ymin, 'ymax': ymax}) # Set limits unless lower/higher limits are already set. xmin = min(x for x in (xmin, self.limits['xmin']) if x is not None) ymin = min(y for y in (ymin, self.limits['ymin']) if y is not None) xmax = max(x for x in (xmax, self.limits['xmax']) if x is not None) ymax = max(y for y in (ymax, self.limits['ymax']) if y is not None) self.set_xlimits(xmin, xmax) self.set_ylimits(ymin, ymax)
python
def draw_image(self, image, xmin=0, ymin=0, xmax=None, ymax=None): """Draw an image. Do not forget to use :meth:`set_axis_equal` to preserve the aspect ratio of the image, or change the aspect ratio of the plot to the aspect ratio of the image. :param image: Pillow Image object. :param xmin,ymin,xmax,ymax: the x, y image bounds. Example:: >>> from PIL import Image >>> image = Image.open('background.png') >>> plot = artist.Plot() >>> plot.set_axis_equal() >>> plot.draw_image(image) """ if xmax is None: xmax = xmin + image.size[0] if ymax is None: ymax = ymin + image.size[1] self.bitmap_list.append({'image': image, 'xmin': xmin, 'xmax': xmax, 'ymin': ymin, 'ymax': ymax}) # Set limits unless lower/higher limits are already set. xmin = min(x for x in (xmin, self.limits['xmin']) if x is not None) ymin = min(y for y in (ymin, self.limits['ymin']) if y is not None) xmax = max(x for x in (xmax, self.limits['xmax']) if x is not None) ymax = max(y for y in (ymax, self.limits['ymax']) if y is not None) self.set_xlimits(xmin, xmax) self.set_ylimits(ymin, ymax)
[ "def", "draw_image", "(", "self", ",", "image", ",", "xmin", "=", "0", ",", "ymin", "=", "0", ",", "xmax", "=", "None", ",", "ymax", "=", "None", ")", ":", "if", "xmax", "is", "None", ":", "xmax", "=", "xmin", "+", "image", ".", "size", "[", "0", "]", "if", "ymax", "is", "None", ":", "ymax", "=", "ymin", "+", "image", ".", "size", "[", "1", "]", "self", ".", "bitmap_list", ".", "append", "(", "{", "'image'", ":", "image", ",", "'xmin'", ":", "xmin", ",", "'xmax'", ":", "xmax", ",", "'ymin'", ":", "ymin", ",", "'ymax'", ":", "ymax", "}", ")", "# Set limits unless lower/higher limits are already set.", "xmin", "=", "min", "(", "x", "for", "x", "in", "(", "xmin", ",", "self", ".", "limits", "[", "'xmin'", "]", ")", "if", "x", "is", "not", "None", ")", "ymin", "=", "min", "(", "y", "for", "y", "in", "(", "ymin", ",", "self", ".", "limits", "[", "'ymin'", "]", ")", "if", "y", "is", "not", "None", ")", "xmax", "=", "max", "(", "x", "for", "x", "in", "(", "xmax", ",", "self", ".", "limits", "[", "'xmax'", "]", ")", "if", "x", "is", "not", "None", ")", "ymax", "=", "max", "(", "y", "for", "y", "in", "(", "ymax", ",", "self", ".", "limits", "[", "'ymax'", "]", ")", "if", "y", "is", "not", "None", ")", "self", ".", "set_xlimits", "(", "xmin", ",", "xmax", ")", "self", ".", "set_ylimits", "(", "ymin", ",", "ymax", ")" ]
Draw an image. Do not forget to use :meth:`set_axis_equal` to preserve the aspect ratio of the image, or change the aspect ratio of the plot to the aspect ratio of the image. :param image: Pillow Image object. :param xmin,ymin,xmax,ymax: the x, y image bounds. Example:: >>> from PIL import Image >>> image = Image.open('background.png') >>> plot = artist.Plot() >>> plot.set_axis_equal() >>> plot.draw_image(image)
[ "Draw", "an", "image", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L611-L649
train
davidfokkema/artist
artist/plot.py
SubPlot.set_xlimits
def set_xlimits(self, min=None, max=None): """Set limits for the x-axis. :param min: minimum value to be displayed. If None, it will be calculated. :param max: maximum value to be displayed. If None, it will be calculated. """ self.limits['xmin'] = min self.limits['xmax'] = max
python
def set_xlimits(self, min=None, max=None): """Set limits for the x-axis. :param min: minimum value to be displayed. If None, it will be calculated. :param max: maximum value to be displayed. If None, it will be calculated. """ self.limits['xmin'] = min self.limits['xmax'] = max
[ "def", "set_xlimits", "(", "self", ",", "min", "=", "None", ",", "max", "=", "None", ")", ":", "self", ".", "limits", "[", "'xmin'", "]", "=", "min", "self", ".", "limits", "[", "'xmax'", "]", "=", "max" ]
Set limits for the x-axis. :param min: minimum value to be displayed. If None, it will be calculated. :param max: maximum value to be displayed. If None, it will be calculated.
[ "Set", "limits", "for", "the", "x", "-", "axis", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L695-L705
train
davidfokkema/artist
artist/plot.py
SubPlot.set_ylimits
def set_ylimits(self, min=None, max=None): """Set limits for the y-axis. :param min: minimum value to be displayed. If None, it will be calculated. :param max: maximum value to be displayed. If None, it will be calculated. """ self.limits['ymin'] = min self.limits['ymax'] = max
python
def set_ylimits(self, min=None, max=None): """Set limits for the y-axis. :param min: minimum value to be displayed. If None, it will be calculated. :param max: maximum value to be displayed. If None, it will be calculated. """ self.limits['ymin'] = min self.limits['ymax'] = max
[ "def", "set_ylimits", "(", "self", ",", "min", "=", "None", ",", "max", "=", "None", ")", ":", "self", ".", "limits", "[", "'ymin'", "]", "=", "min", "self", ".", "limits", "[", "'ymax'", "]", "=", "max" ]
Set limits for the y-axis. :param min: minimum value to be displayed. If None, it will be calculated. :param max: maximum value to be displayed. If None, it will be calculated.
[ "Set", "limits", "for", "the", "y", "-", "axis", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L707-L717
train
davidfokkema/artist
artist/plot.py
SubPlot.set_xtick_labels
def set_xtick_labels(self, labels, style=None): """Set tick labels for the x-axis. Also set the x-ticks positions to ensure the labels end up on the correct place. :param labels: list of labels for the ticks along the axis. """ self.ticks['xlabels'] = labels self.ticks['xlabel_style'] = style
python
def set_xtick_labels(self, labels, style=None): """Set tick labels for the x-axis. Also set the x-ticks positions to ensure the labels end up on the correct place. :param labels: list of labels for the ticks along the axis. """ self.ticks['xlabels'] = labels self.ticks['xlabel_style'] = style
[ "def", "set_xtick_labels", "(", "self", ",", "labels", ",", "style", "=", "None", ")", ":", "self", ".", "ticks", "[", "'xlabels'", "]", "=", "labels", "self", ".", "ticks", "[", "'xlabel_style'", "]", "=", "style" ]
Set tick labels for the x-axis. Also set the x-ticks positions to ensure the labels end up on the correct place. :param labels: list of labels for the ticks along the axis.
[ "Set", "tick", "labels", "for", "the", "x", "-", "axis", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L811-L821
train
davidfokkema/artist
artist/plot.py
SubPlot.set_ytick_labels
def set_ytick_labels(self, labels, style=None): """Set tick labels for the y-axis. Also set the y-ticks positions to ensure the labels end up on the correct place. :param labels: list of labels for the ticks along the axis. """ self.ticks['ylabels'] = labels self.ticks['ylabel_style'] = style
python
def set_ytick_labels(self, labels, style=None): """Set tick labels for the y-axis. Also set the y-ticks positions to ensure the labels end up on the correct place. :param labels: list of labels for the ticks along the axis. """ self.ticks['ylabels'] = labels self.ticks['ylabel_style'] = style
[ "def", "set_ytick_labels", "(", "self", ",", "labels", ",", "style", "=", "None", ")", ":", "self", ".", "ticks", "[", "'ylabels'", "]", "=", "labels", "self", ".", "ticks", "[", "'ylabel_style'", "]", "=", "style" ]
Set tick labels for the y-axis. Also set the y-ticks positions to ensure the labels end up on the correct place. :param labels: list of labels for the ticks along the axis.
[ "Set", "tick", "labels", "for", "the", "y", "-", "axis", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L823-L833
train
davidfokkema/artist
artist/plot.py
SubPlot.set_xtick_suffix
def set_xtick_suffix(self, suffix): """Set the suffix for the ticks of the x-axis. :param suffix: string added after each tick. If the value is `degree` or `precent` the corresponding symbols will be added. """ if suffix == 'degree': suffix = r'^\circ' elif suffix == 'percent': suffix = r'\%' self.ticks['xsuffix'] = suffix
python
def set_xtick_suffix(self, suffix): """Set the suffix for the ticks of the x-axis. :param suffix: string added after each tick. If the value is `degree` or `precent` the corresponding symbols will be added. """ if suffix == 'degree': suffix = r'^\circ' elif suffix == 'percent': suffix = r'\%' self.ticks['xsuffix'] = suffix
[ "def", "set_xtick_suffix", "(", "self", ",", "suffix", ")", ":", "if", "suffix", "==", "'degree'", ":", "suffix", "=", "r'^\\circ'", "elif", "suffix", "==", "'percent'", ":", "suffix", "=", "r'\\%'", "self", ".", "ticks", "[", "'xsuffix'", "]", "=", "suffix" ]
Set the suffix for the ticks of the x-axis. :param suffix: string added after each tick. If the value is `degree` or `precent` the corresponding symbols will be added.
[ "Set", "the", "suffix", "for", "the", "ticks", "of", "the", "x", "-", "axis", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L835-L848
train
davidfokkema/artist
artist/plot.py
SubPlot.set_ytick_suffix
def set_ytick_suffix(self, suffix): """Set ticks for the y-axis. :param suffix: string added after each tick. If the value is `degree` or `precent` the corresponding symbols will be added. """ if suffix == 'degree': suffix = r'^\circ' elif suffix == 'percent': suffix = r'\%' self.ticks['ysuffix'] = suffix
python
def set_ytick_suffix(self, suffix): """Set ticks for the y-axis. :param suffix: string added after each tick. If the value is `degree` or `precent` the corresponding symbols will be added. """ if suffix == 'degree': suffix = r'^\circ' elif suffix == 'percent': suffix = r'\%' self.ticks['ysuffix'] = suffix
[ "def", "set_ytick_suffix", "(", "self", ",", "suffix", ")", ":", "if", "suffix", "==", "'degree'", ":", "suffix", "=", "r'^\\circ'", "elif", "suffix", "==", "'percent'", ":", "suffix", "=", "r'\\%'", "self", ".", "ticks", "[", "'ysuffix'", "]", "=", "suffix" ]
Set ticks for the y-axis. :param suffix: string added after each tick. If the value is `degree` or `precent` the corresponding symbols will be added.
[ "Set", "ticks", "for", "the", "y", "-", "axis", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L850-L863
train
davidfokkema/artist
artist/plot.py
SubPlot.set_scalebar
def set_scalebar(self, location='lower right'): """Show marker area scale. :param location: the location of the label inside the plot. May be one of 'center', 'upper right', 'lower right', 'upper left', 'lower left'. """ if location in RELATIVE_NODE_LOCATIONS: scalebar = RELATIVE_NODE_LOCATIONS[location].copy() self.scalebar = scalebar else: raise RuntimeError('Unknown scalebar location: %s' % location)
python
def set_scalebar(self, location='lower right'): """Show marker area scale. :param location: the location of the label inside the plot. May be one of 'center', 'upper right', 'lower right', 'upper left', 'lower left'. """ if location in RELATIVE_NODE_LOCATIONS: scalebar = RELATIVE_NODE_LOCATIONS[location].copy() self.scalebar = scalebar else: raise RuntimeError('Unknown scalebar location: %s' % location)
[ "def", "set_scalebar", "(", "self", ",", "location", "=", "'lower right'", ")", ":", "if", "location", "in", "RELATIVE_NODE_LOCATIONS", ":", "scalebar", "=", "RELATIVE_NODE_LOCATIONS", "[", "location", "]", ".", "copy", "(", ")", "self", ".", "scalebar", "=", "scalebar", "else", ":", "raise", "RuntimeError", "(", "'Unknown scalebar location: %s'", "%", "location", ")" ]
Show marker area scale. :param location: the location of the label inside the plot. May be one of 'center', 'upper right', 'lower right', 'upper left', 'lower left'.
[ "Show", "marker", "area", "scale", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L870-L882
train
davidfokkema/artist
artist/plot.py
SubPlot._calc_position_for_pin
def _calc_position_for_pin(self, x, y, relative_position): """Determine position at fraction of x, y path. :param x,y: two equal length lists of values describing a path. :param relative_position: value between 0 and 1 :returns: the x, y position of the fraction (relative_position) of the path length. """ try: max_idx_x = len(x) - 1 max_idx_y = len(y) - 1 except TypeError: return x, y else: assert max_idx_x == max_idx_y, \ 'If x and y are iterables, they must be the same length' if relative_position == 0: xs, ys = x[0], y[0] elif relative_position == 1: xs, ys = x[max_idx_x], y[max_idx_y] else: if self.xmode == 'log': x = np.log10(np.array(x)) if self.ymode == 'log': y = np.log10(np.array(y)) rel_length = [0] rel_length.extend(self._calc_relative_path_lengths(x, y)) idx = np.interp(relative_position, rel_length, range(len(rel_length))) frac, idx = modf(idx) idx = int(idx) if self.xmode == 'log': xs = 10 ** (x[idx] + (x[idx + 1] - x[idx]) * frac) else: xs = x[idx] + (x[idx + 1] - x[idx]) * frac if self.ymode == 'log': ys = 10 ** (y[idx] + (y[idx + 1] - y[idx]) * frac) else: ys = y[idx] + (y[idx + 1] - y[idx]) * frac return xs, ys
python
def _calc_position_for_pin(self, x, y, relative_position): """Determine position at fraction of x, y path. :param x,y: two equal length lists of values describing a path. :param relative_position: value between 0 and 1 :returns: the x, y position of the fraction (relative_position) of the path length. """ try: max_idx_x = len(x) - 1 max_idx_y = len(y) - 1 except TypeError: return x, y else: assert max_idx_x == max_idx_y, \ 'If x and y are iterables, they must be the same length' if relative_position == 0: xs, ys = x[0], y[0] elif relative_position == 1: xs, ys = x[max_idx_x], y[max_idx_y] else: if self.xmode == 'log': x = np.log10(np.array(x)) if self.ymode == 'log': y = np.log10(np.array(y)) rel_length = [0] rel_length.extend(self._calc_relative_path_lengths(x, y)) idx = np.interp(relative_position, rel_length, range(len(rel_length))) frac, idx = modf(idx) idx = int(idx) if self.xmode == 'log': xs = 10 ** (x[idx] + (x[idx + 1] - x[idx]) * frac) else: xs = x[idx] + (x[idx + 1] - x[idx]) * frac if self.ymode == 'log': ys = 10 ** (y[idx] + (y[idx + 1] - y[idx]) * frac) else: ys = y[idx] + (y[idx + 1] - y[idx]) * frac return xs, ys
[ "def", "_calc_position_for_pin", "(", "self", ",", "x", ",", "y", ",", "relative_position", ")", ":", "try", ":", "max_idx_x", "=", "len", "(", "x", ")", "-", "1", "max_idx_y", "=", "len", "(", "y", ")", "-", "1", "except", "TypeError", ":", "return", "x", ",", "y", "else", ":", "assert", "max_idx_x", "==", "max_idx_y", ",", "'If x and y are iterables, they must be the same length'", "if", "relative_position", "==", "0", ":", "xs", ",", "ys", "=", "x", "[", "0", "]", ",", "y", "[", "0", "]", "elif", "relative_position", "==", "1", ":", "xs", ",", "ys", "=", "x", "[", "max_idx_x", "]", ",", "y", "[", "max_idx_y", "]", "else", ":", "if", "self", ".", "xmode", "==", "'log'", ":", "x", "=", "np", ".", "log10", "(", "np", ".", "array", "(", "x", ")", ")", "if", "self", ".", "ymode", "==", "'log'", ":", "y", "=", "np", ".", "log10", "(", "np", ".", "array", "(", "y", ")", ")", "rel_length", "=", "[", "0", "]", "rel_length", ".", "extend", "(", "self", ".", "_calc_relative_path_lengths", "(", "x", ",", "y", ")", ")", "idx", "=", "np", ".", "interp", "(", "relative_position", ",", "rel_length", ",", "range", "(", "len", "(", "rel_length", ")", ")", ")", "frac", ",", "idx", "=", "modf", "(", "idx", ")", "idx", "=", "int", "(", "idx", ")", "if", "self", ".", "xmode", "==", "'log'", ":", "xs", "=", "10", "**", "(", "x", "[", "idx", "]", "+", "(", "x", "[", "idx", "+", "1", "]", "-", "x", "[", "idx", "]", ")", "*", "frac", ")", "else", ":", "xs", "=", "x", "[", "idx", "]", "+", "(", "x", "[", "idx", "+", "1", "]", "-", "x", "[", "idx", "]", ")", "*", "frac", "if", "self", ".", "ymode", "==", "'log'", ":", "ys", "=", "10", "**", "(", "y", "[", "idx", "]", "+", "(", "y", "[", "idx", "+", "1", "]", "-", "y", "[", "idx", "]", ")", "*", "frac", ")", "else", ":", "ys", "=", "y", "[", "idx", "]", "+", "(", "y", "[", "idx", "+", "1", "]", "-", "y", "[", "idx", "]", ")", "*", "frac", "return", "xs", ",", "ys" ]
Determine position at fraction of x, y path. :param x,y: two equal length lists of values describing a path. :param relative_position: value between 0 and 1 :returns: the x, y position of the fraction (relative_position) of the path length.
[ "Determine", "position", "at", "fraction", "of", "x", "y", "path", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L952-L993
train
davidfokkema/artist
artist/plot.py
SubPlot._calc_relative_path_lengths
def _calc_relative_path_lengths(self, x, y): """Determine the relative path length at each x,y position.""" path_lengths = np.sqrt(np.diff(x) ** 2 + np.diff(y) ** 2) total_length = np.sum(path_lengths) cummulative_lengths = np.cumsum(path_lengths) relative_path_lengths = cummulative_lengths / total_length return relative_path_lengths
python
def _calc_relative_path_lengths(self, x, y): """Determine the relative path length at each x,y position.""" path_lengths = np.sqrt(np.diff(x) ** 2 + np.diff(y) ** 2) total_length = np.sum(path_lengths) cummulative_lengths = np.cumsum(path_lengths) relative_path_lengths = cummulative_lengths / total_length return relative_path_lengths
[ "def", "_calc_relative_path_lengths", "(", "self", ",", "x", ",", "y", ")", ":", "path_lengths", "=", "np", ".", "sqrt", "(", "np", ".", "diff", "(", "x", ")", "**", "2", "+", "np", ".", "diff", "(", "y", ")", "**", "2", ")", "total_length", "=", "np", ".", "sum", "(", "path_lengths", ")", "cummulative_lengths", "=", "np", ".", "cumsum", "(", "path_lengths", ")", "relative_path_lengths", "=", "cummulative_lengths", "/", "total_length", "return", "relative_path_lengths" ]
Determine the relative path length at each x,y position.
[ "Determine", "the", "relative", "path", "length", "at", "each", "x", "y", "position", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L995-L1002
train
davidfokkema/artist
artist/plot.py
SubPlot._normalize_histogram2d
def _normalize_histogram2d(self, counts, type): """Normalize the values of the counts for a 2D histogram. This normalizes the values of a numpy array to the range 0-255. :param counts: a NumPy array which is to be rescaled. :param type: either 'bw' or 'reverse_bw'. """ counts = (255 * (counts - np.nanmin(counts)) / (np.nanmax(counts) - np.nanmin(counts))) if type == 'reverse_bw': counts = 255 - counts return counts.astype(np.uint8)
python
def _normalize_histogram2d(self, counts, type): """Normalize the values of the counts for a 2D histogram. This normalizes the values of a numpy array to the range 0-255. :param counts: a NumPy array which is to be rescaled. :param type: either 'bw' or 'reverse_bw'. """ counts = (255 * (counts - np.nanmin(counts)) / (np.nanmax(counts) - np.nanmin(counts))) if type == 'reverse_bw': counts = 255 - counts return counts.astype(np.uint8)
[ "def", "_normalize_histogram2d", "(", "self", ",", "counts", ",", "type", ")", ":", "counts", "=", "(", "255", "*", "(", "counts", "-", "np", ".", "nanmin", "(", "counts", ")", ")", "/", "(", "np", ".", "nanmax", "(", "counts", ")", "-", "np", ".", "nanmin", "(", "counts", ")", ")", ")", "if", "type", "==", "'reverse_bw'", ":", "counts", "=", "255", "-", "counts", "return", "counts", ".", "astype", "(", "np", ".", "uint8", ")" ]
Normalize the values of the counts for a 2D histogram. This normalizes the values of a numpy array to the range 0-255. :param counts: a NumPy array which is to be rescaled. :param type: either 'bw' or 'reverse_bw'.
[ "Normalize", "the", "values", "of", "the", "counts", "for", "a", "2D", "histogram", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L1004-L1019
train
davidfokkema/artist
artist/plot.py
SubPlot._write_bitmaps
def _write_bitmaps(self, path, suffix=''): """Write bitmap file assets. :param path: path of the plot file. :param suffix: optional suffix to add to asset names. The path parameter is used for the dirname, and the filename. So if :meth:`save` is called with '/foo/myplot.tex', you can call this method with that same path. The assets will then be saved in the /foo directory, and have a name like 'myplot_0.png'. """ dir, prefix = os.path.split(path) if '.' in prefix: prefix = prefix.split('.')[0] if prefix == '': prefix = 'figure' for i, bitmap in enumerate(self.bitmap_list): name = '%s%s_%d.png' % (prefix, suffix, i) bitmap['name'] = name img = bitmap['image'] # Make the bitmap at least 1000x1000 pixels size0 = int(np.ceil(1000. / img.size[0]) * img.size[0]) size1 = int(np.ceil(1000. / img.size[1]) * img.size[1]) large_img = img.resize((size0, size1)) large_img.save(os.path.join(dir, name))
python
def _write_bitmaps(self, path, suffix=''): """Write bitmap file assets. :param path: path of the plot file. :param suffix: optional suffix to add to asset names. The path parameter is used for the dirname, and the filename. So if :meth:`save` is called with '/foo/myplot.tex', you can call this method with that same path. The assets will then be saved in the /foo directory, and have a name like 'myplot_0.png'. """ dir, prefix = os.path.split(path) if '.' in prefix: prefix = prefix.split('.')[0] if prefix == '': prefix = 'figure' for i, bitmap in enumerate(self.bitmap_list): name = '%s%s_%d.png' % (prefix, suffix, i) bitmap['name'] = name img = bitmap['image'] # Make the bitmap at least 1000x1000 pixels size0 = int(np.ceil(1000. / img.size[0]) * img.size[0]) size1 = int(np.ceil(1000. / img.size[1]) * img.size[1]) large_img = img.resize((size0, size1)) large_img.save(os.path.join(dir, name))
[ "def", "_write_bitmaps", "(", "self", ",", "path", ",", "suffix", "=", "''", ")", ":", "dir", ",", "prefix", "=", "os", ".", "path", ".", "split", "(", "path", ")", "if", "'.'", "in", "prefix", ":", "prefix", "=", "prefix", ".", "split", "(", "'.'", ")", "[", "0", "]", "if", "prefix", "==", "''", ":", "prefix", "=", "'figure'", "for", "i", ",", "bitmap", "in", "enumerate", "(", "self", ".", "bitmap_list", ")", ":", "name", "=", "'%s%s_%d.png'", "%", "(", "prefix", ",", "suffix", ",", "i", ")", "bitmap", "[", "'name'", "]", "=", "name", "img", "=", "bitmap", "[", "'image'", "]", "# Make the bitmap at least 1000x1000 pixels", "size0", "=", "int", "(", "np", ".", "ceil", "(", "1000.", "/", "img", ".", "size", "[", "0", "]", ")", "*", "img", ".", "size", "[", "0", "]", ")", "size1", "=", "int", "(", "np", ".", "ceil", "(", "1000.", "/", "img", ".", "size", "[", "1", "]", ")", "*", "img", ".", "size", "[", "1", "]", ")", "large_img", "=", "img", ".", "resize", "(", "(", "size0", ",", "size1", ")", ")", "large_img", ".", "save", "(", "os", ".", "path", ".", "join", "(", "dir", ",", "name", ")", ")" ]
Write bitmap file assets. :param path: path of the plot file. :param suffix: optional suffix to add to asset names. The path parameter is used for the dirname, and the filename. So if :meth:`save` is called with '/foo/myplot.tex', you can call this method with that same path. The assets will then be saved in the /foo directory, and have a name like 'myplot_0.png'.
[ "Write", "bitmap", "file", "assets", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L1021-L1046
train
davidfokkema/artist
artist/plot.py
SubPlot._prepare_data
def _prepare_data(self): """Prepare data before rendering When plotting very large datasets, we don't want to include data points which are outside the x-axis limits. LaTeX is very slow and consumes a lot of memory otherwise. Limiting the data points is only (easily) possible when the data are sorted. """ xmin, xmax = self.limits['xmin'], self.limits['xmax'] self.prepared_plot_series_list = [] for series in self.plot_series_list: prepared_series = series.copy() data = prepared_series['data'] x, _, _, _ = zip(*data) # only limit data when the data is sorted if sorted(x) == list(x): x = np.array(x) if xmin is not None: min_idx = x.searchsorted(xmin) if min_idx > 0: min_idx -= 1 else: min_idx = None if xmax is not None: max_idx = x.searchsorted(xmax) + 1 else: max_idx = None prepared_series['data'] = data[min_idx:max_idx] self.prepared_plot_series_list.append(prepared_series) self.prepared_shaded_regions_list = [] for series in self.shaded_regions_list: prepared_series = series.copy() data = prepared_series['data'] x, _, _ = zip(*data) # only limit data when the data is sorted if sorted(x) == list(x): x = np.array(x) if xmin is not None: min_idx = x.searchsorted(xmin) if min_idx > 0: min_idx -= 1 else: min_idx = None if xmax is not None: max_idx = x.searchsorted(xmax) + 1 else: max_idx = None prepared_series['data'] = data[min_idx:max_idx] self.prepared_shaded_regions_list.append(prepared_series)
python
def _prepare_data(self): """Prepare data before rendering When plotting very large datasets, we don't want to include data points which are outside the x-axis limits. LaTeX is very slow and consumes a lot of memory otherwise. Limiting the data points is only (easily) possible when the data are sorted. """ xmin, xmax = self.limits['xmin'], self.limits['xmax'] self.prepared_plot_series_list = [] for series in self.plot_series_list: prepared_series = series.copy() data = prepared_series['data'] x, _, _, _ = zip(*data) # only limit data when the data is sorted if sorted(x) == list(x): x = np.array(x) if xmin is not None: min_idx = x.searchsorted(xmin) if min_idx > 0: min_idx -= 1 else: min_idx = None if xmax is not None: max_idx = x.searchsorted(xmax) + 1 else: max_idx = None prepared_series['data'] = data[min_idx:max_idx] self.prepared_plot_series_list.append(prepared_series) self.prepared_shaded_regions_list = [] for series in self.shaded_regions_list: prepared_series = series.copy() data = prepared_series['data'] x, _, _ = zip(*data) # only limit data when the data is sorted if sorted(x) == list(x): x = np.array(x) if xmin is not None: min_idx = x.searchsorted(xmin) if min_idx > 0: min_idx -= 1 else: min_idx = None if xmax is not None: max_idx = x.searchsorted(xmax) + 1 else: max_idx = None prepared_series['data'] = data[min_idx:max_idx] self.prepared_shaded_regions_list.append(prepared_series)
[ "def", "_prepare_data", "(", "self", ")", ":", "xmin", ",", "xmax", "=", "self", ".", "limits", "[", "'xmin'", "]", ",", "self", ".", "limits", "[", "'xmax'", "]", "self", ".", "prepared_plot_series_list", "=", "[", "]", "for", "series", "in", "self", ".", "plot_series_list", ":", "prepared_series", "=", "series", ".", "copy", "(", ")", "data", "=", "prepared_series", "[", "'data'", "]", "x", ",", "_", ",", "_", ",", "_", "=", "zip", "(", "*", "data", ")", "# only limit data when the data is sorted", "if", "sorted", "(", "x", ")", "==", "list", "(", "x", ")", ":", "x", "=", "np", ".", "array", "(", "x", ")", "if", "xmin", "is", "not", "None", ":", "min_idx", "=", "x", ".", "searchsorted", "(", "xmin", ")", "if", "min_idx", ">", "0", ":", "min_idx", "-=", "1", "else", ":", "min_idx", "=", "None", "if", "xmax", "is", "not", "None", ":", "max_idx", "=", "x", ".", "searchsorted", "(", "xmax", ")", "+", "1", "else", ":", "max_idx", "=", "None", "prepared_series", "[", "'data'", "]", "=", "data", "[", "min_idx", ":", "max_idx", "]", "self", ".", "prepared_plot_series_list", ".", "append", "(", "prepared_series", ")", "self", ".", "prepared_shaded_regions_list", "=", "[", "]", "for", "series", "in", "self", ".", "shaded_regions_list", ":", "prepared_series", "=", "series", ".", "copy", "(", ")", "data", "=", "prepared_series", "[", "'data'", "]", "x", ",", "_", ",", "_", "=", "zip", "(", "*", "data", ")", "# only limit data when the data is sorted", "if", "sorted", "(", "x", ")", "==", "list", "(", "x", ")", ":", "x", "=", "np", ".", "array", "(", "x", ")", "if", "xmin", "is", "not", "None", ":", "min_idx", "=", "x", ".", "searchsorted", "(", "xmin", ")", "if", "min_idx", ">", "0", ":", "min_idx", "-=", "1", "else", ":", "min_idx", "=", "None", "if", "xmax", "is", "not", "None", ":", "max_idx", "=", "x", ".", "searchsorted", "(", "xmax", ")", "+", "1", "else", ":", "max_idx", "=", "None", "prepared_series", "[", "'data'", "]", "=", "data", "[", "min_idx", ":", "max_idx", "]", "self", ".", "prepared_shaded_regions_list", ".", "append", "(", "prepared_series", ")" ]
Prepare data before rendering When plotting very large datasets, we don't want to include data points which are outside the x-axis limits. LaTeX is very slow and consumes a lot of memory otherwise. Limiting the data points is only (easily) possible when the data are sorted.
[ "Prepare", "data", "before", "rendering" ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L1048-L1103
train
davidfokkema/artist
artist/plot.py
PolarPlot.histogram
def histogram(self, counts, bin_edges, linestyle='solid'): """Plot a polar histogram. The user needs to supply the histogram. This method only plots the results. You can use NumPy's histogram function. :param counts: array containing the count values. :param bin_edges: array containing the bin edges in degrees (or radians). :param linestyle: the line style used to connect the data points. May be None, or any line style accepted by TikZ (e.g. solid, dashed, dotted, thick, or even combinations like "red,thick,dashed"). Example:: >>> plot = artist.PolarPlot() >>> x = np.random.uniform(0, 360, size=1000) >>> n, bins = np.histogram(x, bins=np.linspace(0, 360, 37)) >>> plot.histogram(n, bins) """ if len(bin_edges) - 1 != len(counts): raise RuntimeError( 'The length of bin_edges should be length of counts + 1') x = [] y = [] if self.use_radians: circle = 2 * np.pi else: circle = 360. step = circle / 1800. for i in range(len(bin_edges) - 1): for bin_edge in np.arange(bin_edges[i], bin_edges[i + 1], step=step): x.append(bin_edge) y.append(counts[i]) x.append(bin_edges[i + 1]) y.append(counts[i]) # If last edge is same as first bin edge, connect the ends. if bin_edges[-1] % circle == bin_edges[0] % circle: x.append(bin_edges[0]) y.append(counts[0]) self.plot(x, y, mark=None, linestyle=linestyle)
python
def histogram(self, counts, bin_edges, linestyle='solid'): """Plot a polar histogram. The user needs to supply the histogram. This method only plots the results. You can use NumPy's histogram function. :param counts: array containing the count values. :param bin_edges: array containing the bin edges in degrees (or radians). :param linestyle: the line style used to connect the data points. May be None, or any line style accepted by TikZ (e.g. solid, dashed, dotted, thick, or even combinations like "red,thick,dashed"). Example:: >>> plot = artist.PolarPlot() >>> x = np.random.uniform(0, 360, size=1000) >>> n, bins = np.histogram(x, bins=np.linspace(0, 360, 37)) >>> plot.histogram(n, bins) """ if len(bin_edges) - 1 != len(counts): raise RuntimeError( 'The length of bin_edges should be length of counts + 1') x = [] y = [] if self.use_radians: circle = 2 * np.pi else: circle = 360. step = circle / 1800. for i in range(len(bin_edges) - 1): for bin_edge in np.arange(bin_edges[i], bin_edges[i + 1], step=step): x.append(bin_edge) y.append(counts[i]) x.append(bin_edges[i + 1]) y.append(counts[i]) # If last edge is same as first bin edge, connect the ends. if bin_edges[-1] % circle == bin_edges[0] % circle: x.append(bin_edges[0]) y.append(counts[0]) self.plot(x, y, mark=None, linestyle=linestyle)
[ "def", "histogram", "(", "self", ",", "counts", ",", "bin_edges", ",", "linestyle", "=", "'solid'", ")", ":", "if", "len", "(", "bin_edges", ")", "-", "1", "!=", "len", "(", "counts", ")", ":", "raise", "RuntimeError", "(", "'The length of bin_edges should be length of counts + 1'", ")", "x", "=", "[", "]", "y", "=", "[", "]", "if", "self", ".", "use_radians", ":", "circle", "=", "2", "*", "np", ".", "pi", "else", ":", "circle", "=", "360.", "step", "=", "circle", "/", "1800.", "for", "i", "in", "range", "(", "len", "(", "bin_edges", ")", "-", "1", ")", ":", "for", "bin_edge", "in", "np", ".", "arange", "(", "bin_edges", "[", "i", "]", ",", "bin_edges", "[", "i", "+", "1", "]", ",", "step", "=", "step", ")", ":", "x", ".", "append", "(", "bin_edge", ")", "y", ".", "append", "(", "counts", "[", "i", "]", ")", "x", ".", "append", "(", "bin_edges", "[", "i", "+", "1", "]", ")", "y", ".", "append", "(", "counts", "[", "i", "]", ")", "# If last edge is same as first bin edge, connect the ends.", "if", "bin_edges", "[", "-", "1", "]", "%", "circle", "==", "bin_edges", "[", "0", "]", "%", "circle", ":", "x", ".", "append", "(", "bin_edges", "[", "0", "]", ")", "y", ".", "append", "(", "counts", "[", "0", "]", ")", "self", ".", "plot", "(", "x", ",", "y", ",", "mark", "=", "None", ",", "linestyle", "=", "linestyle", ")" ]
Plot a polar histogram. The user needs to supply the histogram. This method only plots the results. You can use NumPy's histogram function. :param counts: array containing the count values. :param bin_edges: array containing the bin edges in degrees (or radians). :param linestyle: the line style used to connect the data points. May be None, or any line style accepted by TikZ (e.g. solid, dashed, dotted, thick, or even combinations like "red,thick,dashed"). Example:: >>> plot = artist.PolarPlot() >>> x = np.random.uniform(0, 360, size=1000) >>> n, bins = np.histogram(x, bins=np.linspace(0, 360, 37)) >>> plot.histogram(n, bins)
[ "Plot", "a", "polar", "histogram", "." ]
26ae7987522622710f2910980770c50012fda47d
https://github.com/davidfokkema/artist/blob/26ae7987522622710f2910980770c50012fda47d/artist/plot.py#L1220-L1269
train
praekeltfoundation/seed-message-sender
message_sender/factory.py
GenericHttpApiSender._get_filename
def _get_filename(self, path): """ This function gets the base filename from the path, if a language code is present the filename will start from there. """ match = re.search("[a-z]{2,3}_[A-Z]{2}", path) if match: start = match.start(0) filename = path[start:] else: filename = os.path.basename(path) return filename
python
def _get_filename(self, path): """ This function gets the base filename from the path, if a language code is present the filename will start from there. """ match = re.search("[a-z]{2,3}_[A-Z]{2}", path) if match: start = match.start(0) filename = path[start:] else: filename = os.path.basename(path) return filename
[ "def", "_get_filename", "(", "self", ",", "path", ")", ":", "match", "=", "re", ".", "search", "(", "\"[a-z]{2,3}_[A-Z]{2}\"", ",", "path", ")", "if", "match", ":", "start", "=", "match", ".", "start", "(", "0", ")", "filename", "=", "path", "[", "start", ":", "]", "else", ":", "filename", "=", "os", ".", "path", ".", "basename", "(", "path", ")", "return", "filename" ]
This function gets the base filename from the path, if a language code is present the filename will start from there.
[ "This", "function", "gets", "the", "base", "filename", "from", "the", "path", "if", "a", "language", "code", "is", "present", "the", "filename", "will", "start", "from", "there", "." ]
257b01635171b9dbe1f5f13baa810c971bb2620e
https://github.com/praekeltfoundation/seed-message-sender/blob/257b01635171b9dbe1f5f13baa810c971bb2620e/message_sender/factory.py#L72-L85
train
praekeltfoundation/seed-message-sender
message_sender/factory.py
GenericHttpApiSender._override_payload
def _override_payload(self, payload): """ This function transforms the payload into a new format using the self.override_payload property. """ if self.override_payload: old_payload = payload def get_value(data, key): try: parent_key, nested_key = key.split(".", 1) return get_value(data.get(parent_key, {}), nested_key) except ValueError: return data.get(key, key) def set_values(data): for key, value in data.items(): if isinstance(value, dict): set_values(value) else: data[key] = get_value(old_payload, value) payload = deepcopy(self.override_payload) set_values(payload) return payload
python
def _override_payload(self, payload): """ This function transforms the payload into a new format using the self.override_payload property. """ if self.override_payload: old_payload = payload def get_value(data, key): try: parent_key, nested_key = key.split(".", 1) return get_value(data.get(parent_key, {}), nested_key) except ValueError: return data.get(key, key) def set_values(data): for key, value in data.items(): if isinstance(value, dict): set_values(value) else: data[key] = get_value(old_payload, value) payload = deepcopy(self.override_payload) set_values(payload) return payload
[ "def", "_override_payload", "(", "self", ",", "payload", ")", ":", "if", "self", ".", "override_payload", ":", "old_payload", "=", "payload", "def", "get_value", "(", "data", ",", "key", ")", ":", "try", ":", "parent_key", ",", "nested_key", "=", "key", ".", "split", "(", "\".\"", ",", "1", ")", "return", "get_value", "(", "data", ".", "get", "(", "parent_key", ",", "{", "}", ")", ",", "nested_key", ")", "except", "ValueError", ":", "return", "data", ".", "get", "(", "key", ",", "key", ")", "def", "set_values", "(", "data", ")", ":", "for", "key", ",", "value", "in", "data", ".", "items", "(", ")", ":", "if", "isinstance", "(", "value", ",", "dict", ")", ":", "set_values", "(", "value", ")", "else", ":", "data", "[", "key", "]", "=", "get_value", "(", "old_payload", ",", "value", ")", "payload", "=", "deepcopy", "(", "self", ".", "override_payload", ")", "set_values", "(", "payload", ")", "return", "payload" ]
This function transforms the payload into a new format using the self.override_payload property.
[ "This", "function", "transforms", "the", "payload", "into", "a", "new", "format", "using", "the", "self", ".", "override_payload", "property", "." ]
257b01635171b9dbe1f5f13baa810c971bb2620e
https://github.com/praekeltfoundation/seed-message-sender/blob/257b01635171b9dbe1f5f13baa810c971bb2620e/message_sender/factory.py#L123-L148
train
praekeltfoundation/seed-message-sender
message_sender/factory.py
WhatsAppApiSender.fire_failed_contact_lookup
def fire_failed_contact_lookup(self, msisdn): """ Fires a webhook in the event of a failed WhatsApp contact lookup. """ payload = {"address": msisdn} # We cannot user the raw_hook_event here, because we don't have a user, so we # manually filter and send the hooks for all users hooks = Hook.objects.filter(event="whatsapp.failed_contact_check") for hook in hooks: hook.deliver_hook( None, payload_override={"hook": hook.dict(), "data": payload} )
python
def fire_failed_contact_lookup(self, msisdn): """ Fires a webhook in the event of a failed WhatsApp contact lookup. """ payload = {"address": msisdn} # We cannot user the raw_hook_event here, because we don't have a user, so we # manually filter and send the hooks for all users hooks = Hook.objects.filter(event="whatsapp.failed_contact_check") for hook in hooks: hook.deliver_hook( None, payload_override={"hook": hook.dict(), "data": payload} )
[ "def", "fire_failed_contact_lookup", "(", "self", ",", "msisdn", ")", ":", "payload", "=", "{", "\"address\"", ":", "msisdn", "}", "# We cannot user the raw_hook_event here, because we don't have a user, so we", "# manually filter and send the hooks for all users", "hooks", "=", "Hook", ".", "objects", ".", "filter", "(", "event", "=", "\"whatsapp.failed_contact_check\"", ")", "for", "hook", "in", "hooks", ":", "hook", ".", "deliver_hook", "(", "None", ",", "payload_override", "=", "{", "\"hook\"", ":", "hook", ".", "dict", "(", ")", ",", "\"data\"", ":", "payload", "}", ")" ]
Fires a webhook in the event of a failed WhatsApp contact lookup.
[ "Fires", "a", "webhook", "in", "the", "event", "of", "a", "failed", "WhatsApp", "contact", "lookup", "." ]
257b01635171b9dbe1f5f13baa810c971bb2620e
https://github.com/praekeltfoundation/seed-message-sender/blob/257b01635171b9dbe1f5f13baa810c971bb2620e/message_sender/factory.py#L321-L332
train
praekeltfoundation/seed-message-sender
message_sender/factory.py
WhatsAppApiSender.get_contact
def get_contact(self, msisdn): """ Returns the WhatsApp ID for the given MSISDN """ response = self.session.post( urllib_parse.urljoin(self.api_url, "/v1/contacts"), json={"blocking": "wait", "contacts": [msisdn]}, ) response.raise_for_status() whatsapp_id = response.json()["contacts"][0].get("wa_id") if not whatsapp_id: self.fire_failed_contact_lookup(msisdn) return whatsapp_id
python
def get_contact(self, msisdn): """ Returns the WhatsApp ID for the given MSISDN """ response = self.session.post( urllib_parse.urljoin(self.api_url, "/v1/contacts"), json={"blocking": "wait", "contacts": [msisdn]}, ) response.raise_for_status() whatsapp_id = response.json()["contacts"][0].get("wa_id") if not whatsapp_id: self.fire_failed_contact_lookup(msisdn) return whatsapp_id
[ "def", "get_contact", "(", "self", ",", "msisdn", ")", ":", "response", "=", "self", ".", "session", ".", "post", "(", "urllib_parse", ".", "urljoin", "(", "self", ".", "api_url", ",", "\"/v1/contacts\"", ")", ",", "json", "=", "{", "\"blocking\"", ":", "\"wait\"", ",", "\"contacts\"", ":", "[", "msisdn", "]", "}", ",", ")", "response", ".", "raise_for_status", "(", ")", "whatsapp_id", "=", "response", ".", "json", "(", ")", "[", "\"contacts\"", "]", "[", "0", "]", ".", "get", "(", "\"wa_id\"", ")", "if", "not", "whatsapp_id", ":", "self", ".", "fire_failed_contact_lookup", "(", "msisdn", ")", "return", "whatsapp_id" ]
Returns the WhatsApp ID for the given MSISDN
[ "Returns", "the", "WhatsApp", "ID", "for", "the", "given", "MSISDN" ]
257b01635171b9dbe1f5f13baa810c971bb2620e
https://github.com/praekeltfoundation/seed-message-sender/blob/257b01635171b9dbe1f5f13baa810c971bb2620e/message_sender/factory.py#L334-L346
train
praekeltfoundation/seed-message-sender
message_sender/factory.py
WhatsAppApiSender.send_custom_hsm
def send_custom_hsm(self, whatsapp_id, template_name, language, variables): """ Sends an HSM with more customizable fields than the send_hsm function """ data = { "to": whatsapp_id, "type": "hsm", "hsm": { "namespace": self.hsm_namespace, "element_name": template_name, "language": {"policy": "deterministic", "code": language}, "localizable_params": [{"default": variable} for variable in variables], }, } if self.ttl is not None: data["ttl"] = self.ttl response = self.session.post( urllib_parse.urljoin(self.api_url, "/v1/messages"), json=data ) return self.return_response(response)
python
def send_custom_hsm(self, whatsapp_id, template_name, language, variables): """ Sends an HSM with more customizable fields than the send_hsm function """ data = { "to": whatsapp_id, "type": "hsm", "hsm": { "namespace": self.hsm_namespace, "element_name": template_name, "language": {"policy": "deterministic", "code": language}, "localizable_params": [{"default": variable} for variable in variables], }, } if self.ttl is not None: data["ttl"] = self.ttl response = self.session.post( urllib_parse.urljoin(self.api_url, "/v1/messages"), json=data ) return self.return_response(response)
[ "def", "send_custom_hsm", "(", "self", ",", "whatsapp_id", ",", "template_name", ",", "language", ",", "variables", ")", ":", "data", "=", "{", "\"to\"", ":", "whatsapp_id", ",", "\"type\"", ":", "\"hsm\"", ",", "\"hsm\"", ":", "{", "\"namespace\"", ":", "self", ".", "hsm_namespace", ",", "\"element_name\"", ":", "template_name", ",", "\"language\"", ":", "{", "\"policy\"", ":", "\"deterministic\"", ",", "\"code\"", ":", "language", "}", ",", "\"localizable_params\"", ":", "[", "{", "\"default\"", ":", "variable", "}", "for", "variable", "in", "variables", "]", ",", "}", ",", "}", "if", "self", ".", "ttl", "is", "not", "None", ":", "data", "[", "\"ttl\"", "]", "=", "self", ".", "ttl", "response", "=", "self", ".", "session", ".", "post", "(", "urllib_parse", ".", "urljoin", "(", "self", ".", "api_url", ",", "\"/v1/messages\"", ")", ",", "json", "=", "data", ")", "return", "self", ".", "return_response", "(", "response", ")" ]
Sends an HSM with more customizable fields than the send_hsm function
[ "Sends", "an", "HSM", "with", "more", "customizable", "fields", "than", "the", "send_hsm", "function" ]
257b01635171b9dbe1f5f13baa810c971bb2620e
https://github.com/praekeltfoundation/seed-message-sender/blob/257b01635171b9dbe1f5f13baa810c971bb2620e/message_sender/factory.py#L366-L386
train
iLampard/x-utils
xutils/bar_builder/polling_thread.py
BarThread.load_data
def load_data(self): """ Overwrite this for new source data structures """ try: df = self.live_quote_arg_func(self.tickers) for index, ticker in enumerate(self.tickers): ticker_info = df.loc[index] self.ticker_dict[ticker].append(ticker_info['price'], ticker_info['volume'], ticker_info['amount'], ticker_info['time']) except Exception: raise ValueError('Polling thread exception')
python
def load_data(self): """ Overwrite this for new source data structures """ try: df = self.live_quote_arg_func(self.tickers) for index, ticker in enumerate(self.tickers): ticker_info = df.loc[index] self.ticker_dict[ticker].append(ticker_info['price'], ticker_info['volume'], ticker_info['amount'], ticker_info['time']) except Exception: raise ValueError('Polling thread exception')
[ "def", "load_data", "(", "self", ")", ":", "try", ":", "df", "=", "self", ".", "live_quote_arg_func", "(", "self", ".", "tickers", ")", "for", "index", ",", "ticker", "in", "enumerate", "(", "self", ".", "tickers", ")", ":", "ticker_info", "=", "df", ".", "loc", "[", "index", "]", "self", ".", "ticker_dict", "[", "ticker", "]", ".", "append", "(", "ticker_info", "[", "'price'", "]", ",", "ticker_info", "[", "'volume'", "]", ",", "ticker_info", "[", "'amount'", "]", ",", "ticker_info", "[", "'time'", "]", ")", "except", "Exception", ":", "raise", "ValueError", "(", "'Polling thread exception'", ")" ]
Overwrite this for new source data structures
[ "Overwrite", "this", "for", "new", "source", "data", "structures" ]
291d92832ee0e0c89bc22e10ecf2f44445e0d300
https://github.com/iLampard/x-utils/blob/291d92832ee0e0c89bc22e10ecf2f44445e0d300/xutils/bar_builder/polling_thread.py#L160-L173
train
Dav0815/TransportNSW
TransportNSW/TransportNSW.py
TransportNSW.get_departures
def get_departures(self, stop_id, route, destination, api_key): """Get the latest data from Transport NSW.""" self.stop_id = stop_id self.route = route self.destination = destination self.api_key = api_key # Build the URL including the STOP_ID and the API key url = \ 'https://api.transport.nsw.gov.au/v1/tp/departure_mon?' \ 'outputFormat=rapidJSON&coordOutputFormat=EPSG%3A4326&' \ 'mode=direct&type_dm=stop&name_dm=' \ + self.stop_id \ + '&departureMonitorMacro=true&TfNSWDM=true&version=10.2.1.42' auth = 'apikey ' + self.api_key header = {'Accept': 'application/json', 'Authorization': auth} # Send query or return error try: response = requests.get(url, headers=header, timeout=10) except: logger.warning("Network or Timeout error") return self.info # If there is no valid request if response.status_code != 200: logger.warning("Error with the request sent; check api key") return self.info # Parse the result as a JSON object result = response.json() # If there is no stop events for the query try: result['stopEvents'] except KeyError: logger.warning("No stop events for this query") return self.info # Set variables maxresults = 1 monitor = [] if self.destination != '': for i in range(len(result['stopEvents'])): destination = result['stopEvents'][i]['transportation']['destination']['name'] if destination == self.destination: event = self.parseEvent(result, i) if event != None: monitor.append(event) if len(monitor) >= maxresults: # We found enough results, lets stop break elif self.route != '': # Find the next stop events for a specific route for i in range(len(result['stopEvents'])): number = result['stopEvents'][i]['transportation']['number'] if number == self.route: event = self.parseEvent(result, i) if event != None: monitor.append(event) if len(monitor) >= maxresults: # We found enough results, lets stop break else: # No route defined, find any route leaving next for i in range(0, maxresults): event = self.parseEvent(result, i) if event != None: monitor.append(event) if monitor: self.info = { ATTR_STOP_ID: self.stop_id, ATTR_ROUTE: monitor[0][0], ATTR_DUE_IN: monitor[0][1], ATTR_DELAY: monitor[0][2], ATTR_REALTIME: monitor[0][5], ATTR_DESTINATION: monitor[0][6], ATTR_MODE: monitor[0][7] } return self.info
python
def get_departures(self, stop_id, route, destination, api_key): """Get the latest data from Transport NSW.""" self.stop_id = stop_id self.route = route self.destination = destination self.api_key = api_key # Build the URL including the STOP_ID and the API key url = \ 'https://api.transport.nsw.gov.au/v1/tp/departure_mon?' \ 'outputFormat=rapidJSON&coordOutputFormat=EPSG%3A4326&' \ 'mode=direct&type_dm=stop&name_dm=' \ + self.stop_id \ + '&departureMonitorMacro=true&TfNSWDM=true&version=10.2.1.42' auth = 'apikey ' + self.api_key header = {'Accept': 'application/json', 'Authorization': auth} # Send query or return error try: response = requests.get(url, headers=header, timeout=10) except: logger.warning("Network or Timeout error") return self.info # If there is no valid request if response.status_code != 200: logger.warning("Error with the request sent; check api key") return self.info # Parse the result as a JSON object result = response.json() # If there is no stop events for the query try: result['stopEvents'] except KeyError: logger.warning("No stop events for this query") return self.info # Set variables maxresults = 1 monitor = [] if self.destination != '': for i in range(len(result['stopEvents'])): destination = result['stopEvents'][i]['transportation']['destination']['name'] if destination == self.destination: event = self.parseEvent(result, i) if event != None: monitor.append(event) if len(monitor) >= maxresults: # We found enough results, lets stop break elif self.route != '': # Find the next stop events for a specific route for i in range(len(result['stopEvents'])): number = result['stopEvents'][i]['transportation']['number'] if number == self.route: event = self.parseEvent(result, i) if event != None: monitor.append(event) if len(monitor) >= maxresults: # We found enough results, lets stop break else: # No route defined, find any route leaving next for i in range(0, maxresults): event = self.parseEvent(result, i) if event != None: monitor.append(event) if monitor: self.info = { ATTR_STOP_ID: self.stop_id, ATTR_ROUTE: monitor[0][0], ATTR_DUE_IN: monitor[0][1], ATTR_DELAY: monitor[0][2], ATTR_REALTIME: monitor[0][5], ATTR_DESTINATION: monitor[0][6], ATTR_MODE: monitor[0][7] } return self.info
[ "def", "get_departures", "(", "self", ",", "stop_id", ",", "route", ",", "destination", ",", "api_key", ")", ":", "self", ".", "stop_id", "=", "stop_id", "self", ".", "route", "=", "route", "self", ".", "destination", "=", "destination", "self", ".", "api_key", "=", "api_key", "# Build the URL including the STOP_ID and the API key", "url", "=", "'https://api.transport.nsw.gov.au/v1/tp/departure_mon?'", "'outputFormat=rapidJSON&coordOutputFormat=EPSG%3A4326&'", "'mode=direct&type_dm=stop&name_dm='", "+", "self", ".", "stop_id", "+", "'&departureMonitorMacro=true&TfNSWDM=true&version=10.2.1.42'", "auth", "=", "'apikey '", "+", "self", ".", "api_key", "header", "=", "{", "'Accept'", ":", "'application/json'", ",", "'Authorization'", ":", "auth", "}", "# Send query or return error", "try", ":", "response", "=", "requests", ".", "get", "(", "url", ",", "headers", "=", "header", ",", "timeout", "=", "10", ")", "except", ":", "logger", ".", "warning", "(", "\"Network or Timeout error\"", ")", "return", "self", ".", "info", "# If there is no valid request", "if", "response", ".", "status_code", "!=", "200", ":", "logger", ".", "warning", "(", "\"Error with the request sent; check api key\"", ")", "return", "self", ".", "info", "# Parse the result as a JSON object", "result", "=", "response", ".", "json", "(", ")", "# If there is no stop events for the query", "try", ":", "result", "[", "'stopEvents'", "]", "except", "KeyError", ":", "logger", ".", "warning", "(", "\"No stop events for this query\"", ")", "return", "self", ".", "info", "# Set variables", "maxresults", "=", "1", "monitor", "=", "[", "]", "if", "self", ".", "destination", "!=", "''", ":", "for", "i", "in", "range", "(", "len", "(", "result", "[", "'stopEvents'", "]", ")", ")", ":", "destination", "=", "result", "[", "'stopEvents'", "]", "[", "i", "]", "[", "'transportation'", "]", "[", "'destination'", "]", "[", "'name'", "]", "if", "destination", "==", "self", ".", "destination", ":", "event", "=", "self", ".", "parseEvent", "(", "result", ",", "i", ")", "if", "event", "!=", "None", ":", "monitor", ".", "append", "(", "event", ")", "if", "len", "(", "monitor", ")", ">=", "maxresults", ":", "# We found enough results, lets stop", "break", "elif", "self", ".", "route", "!=", "''", ":", "# Find the next stop events for a specific route", "for", "i", "in", "range", "(", "len", "(", "result", "[", "'stopEvents'", "]", ")", ")", ":", "number", "=", "result", "[", "'stopEvents'", "]", "[", "i", "]", "[", "'transportation'", "]", "[", "'number'", "]", "if", "number", "==", "self", ".", "route", ":", "event", "=", "self", ".", "parseEvent", "(", "result", ",", "i", ")", "if", "event", "!=", "None", ":", "monitor", ".", "append", "(", "event", ")", "if", "len", "(", "monitor", ")", ">=", "maxresults", ":", "# We found enough results, lets stop", "break", "else", ":", "# No route defined, find any route leaving next", "for", "i", "in", "range", "(", "0", ",", "maxresults", ")", ":", "event", "=", "self", ".", "parseEvent", "(", "result", ",", "i", ")", "if", "event", "!=", "None", ":", "monitor", ".", "append", "(", "event", ")", "if", "monitor", ":", "self", ".", "info", "=", "{", "ATTR_STOP_ID", ":", "self", ".", "stop_id", ",", "ATTR_ROUTE", ":", "monitor", "[", "0", "]", "[", "0", "]", ",", "ATTR_DUE_IN", ":", "monitor", "[", "0", "]", "[", "1", "]", ",", "ATTR_DELAY", ":", "monitor", "[", "0", "]", "[", "2", "]", ",", "ATTR_REALTIME", ":", "monitor", "[", "0", "]", "[", "5", "]", ",", "ATTR_DESTINATION", ":", "monitor", "[", "0", "]", "[", "6", "]", ",", "ATTR_MODE", ":", "monitor", "[", "0", "]", "[", "7", "]", "}", "return", "self", ".", "info" ]
Get the latest data from Transport NSW.
[ "Get", "the", "latest", "data", "from", "Transport", "NSW", "." ]
828aae948fd26bb2ce89637ed639129b4cfdf62a
https://github.com/Dav0815/TransportNSW/blob/828aae948fd26bb2ce89637ed639129b4cfdf62a/TransportNSW/TransportNSW.py#L36-L115
train
Dav0815/TransportNSW
TransportNSW/TransportNSW.py
TransportNSW.parseEvent
def parseEvent(self, result, i): """Parse the current event and extract data.""" fmt = '%Y-%m-%dT%H:%M:%SZ' due = 0 delay = 0 real_time = 'n' number = result['stopEvents'][i]['transportation']['number'] planned = datetime.strptime(result['stopEvents'][i] ['departureTimePlanned'], fmt) destination = result['stopEvents'][i]['transportation']['destination']['name'] mode = self.get_mode(result['stopEvents'][i]['transportation']['product']['class']) estimated = planned if 'isRealtimeControlled' in result['stopEvents'][i]: real_time = 'y' estimated = datetime.strptime(result['stopEvents'][i] ['departureTimeEstimated'], fmt) # Only deal with future leave times if estimated > datetime.utcnow(): due = self.get_due(estimated) delay = self.get_delay(planned, estimated) return[ number, due, delay, planned, estimated, real_time, destination, mode ] else: return None
python
def parseEvent(self, result, i): """Parse the current event and extract data.""" fmt = '%Y-%m-%dT%H:%M:%SZ' due = 0 delay = 0 real_time = 'n' number = result['stopEvents'][i]['transportation']['number'] planned = datetime.strptime(result['stopEvents'][i] ['departureTimePlanned'], fmt) destination = result['stopEvents'][i]['transportation']['destination']['name'] mode = self.get_mode(result['stopEvents'][i]['transportation']['product']['class']) estimated = planned if 'isRealtimeControlled' in result['stopEvents'][i]: real_time = 'y' estimated = datetime.strptime(result['stopEvents'][i] ['departureTimeEstimated'], fmt) # Only deal with future leave times if estimated > datetime.utcnow(): due = self.get_due(estimated) delay = self.get_delay(planned, estimated) return[ number, due, delay, planned, estimated, real_time, destination, mode ] else: return None
[ "def", "parseEvent", "(", "self", ",", "result", ",", "i", ")", ":", "fmt", "=", "'%Y-%m-%dT%H:%M:%SZ'", "due", "=", "0", "delay", "=", "0", "real_time", "=", "'n'", "number", "=", "result", "[", "'stopEvents'", "]", "[", "i", "]", "[", "'transportation'", "]", "[", "'number'", "]", "planned", "=", "datetime", ".", "strptime", "(", "result", "[", "'stopEvents'", "]", "[", "i", "]", "[", "'departureTimePlanned'", "]", ",", "fmt", ")", "destination", "=", "result", "[", "'stopEvents'", "]", "[", "i", "]", "[", "'transportation'", "]", "[", "'destination'", "]", "[", "'name'", "]", "mode", "=", "self", ".", "get_mode", "(", "result", "[", "'stopEvents'", "]", "[", "i", "]", "[", "'transportation'", "]", "[", "'product'", "]", "[", "'class'", "]", ")", "estimated", "=", "planned", "if", "'isRealtimeControlled'", "in", "result", "[", "'stopEvents'", "]", "[", "i", "]", ":", "real_time", "=", "'y'", "estimated", "=", "datetime", ".", "strptime", "(", "result", "[", "'stopEvents'", "]", "[", "i", "]", "[", "'departureTimeEstimated'", "]", ",", "fmt", ")", "# Only deal with future leave times", "if", "estimated", ">", "datetime", ".", "utcnow", "(", ")", ":", "due", "=", "self", ".", "get_due", "(", "estimated", ")", "delay", "=", "self", ".", "get_delay", "(", "planned", ",", "estimated", ")", "return", "[", "number", ",", "due", ",", "delay", ",", "planned", ",", "estimated", ",", "real_time", ",", "destination", ",", "mode", "]", "else", ":", "return", "None" ]
Parse the current event and extract data.
[ "Parse", "the", "current", "event", "and", "extract", "data", "." ]
828aae948fd26bb2ce89637ed639129b4cfdf62a
https://github.com/Dav0815/TransportNSW/blob/828aae948fd26bb2ce89637ed639129b4cfdf62a/TransportNSW/TransportNSW.py#L117-L148
train
Dav0815/TransportNSW
TransportNSW/TransportNSW.py
TransportNSW.get_due
def get_due(self, estimated): """Min till next leave event.""" due = 0 due = round((estimated - datetime.utcnow()).seconds / 60) return due
python
def get_due(self, estimated): """Min till next leave event.""" due = 0 due = round((estimated - datetime.utcnow()).seconds / 60) return due
[ "def", "get_due", "(", "self", ",", "estimated", ")", ":", "due", "=", "0", "due", "=", "round", "(", "(", "estimated", "-", "datetime", ".", "utcnow", "(", ")", ")", ".", "seconds", "/", "60", ")", "return", "due" ]
Min till next leave event.
[ "Min", "till", "next", "leave", "event", "." ]
828aae948fd26bb2ce89637ed639129b4cfdf62a
https://github.com/Dav0815/TransportNSW/blob/828aae948fd26bb2ce89637ed639129b4cfdf62a/TransportNSW/TransportNSW.py#L150-L154
train
Dav0815/TransportNSW
TransportNSW/TransportNSW.py
TransportNSW.get_delay
def get_delay(self, planned, estimated): """Min of delay on planned departure.""" delay = 0 # default is no delay if estimated >= planned: # there is a delay delay = round((estimated - planned).seconds / 60) else: # leaving earlier delay = round((planned - estimated).seconds / 60) * -1 return delay
python
def get_delay(self, planned, estimated): """Min of delay on planned departure.""" delay = 0 # default is no delay if estimated >= planned: # there is a delay delay = round((estimated - planned).seconds / 60) else: # leaving earlier delay = round((planned - estimated).seconds / 60) * -1 return delay
[ "def", "get_delay", "(", "self", ",", "planned", ",", "estimated", ")", ":", "delay", "=", "0", "# default is no delay", "if", "estimated", ">=", "planned", ":", "# there is a delay", "delay", "=", "round", "(", "(", "estimated", "-", "planned", ")", ".", "seconds", "/", "60", ")", "else", ":", "# leaving earlier", "delay", "=", "round", "(", "(", "planned", "-", "estimated", ")", ".", "seconds", "/", "60", ")", "*", "-", "1", "return", "delay" ]
Min of delay on planned departure.
[ "Min", "of", "delay", "on", "planned", "departure", "." ]
828aae948fd26bb2ce89637ed639129b4cfdf62a
https://github.com/Dav0815/TransportNSW/blob/828aae948fd26bb2ce89637ed639129b4cfdf62a/TransportNSW/TransportNSW.py#L156-L163
train
DeV1doR/aioethereum
aioethereum/client.py
create_ethereum_client
def create_ethereum_client(uri, timeout=60, *, loop=None): """Create client to ethereum node based on schema. :param uri: Host on ethereum node :type uri: str :param timeout: An optional total time of timeout call :type timeout: int :param loop: An optional *event loop* instance (uses :func:`asyncio.get_event_loop` if not specified). :type loop: :ref:`EventLoop<asyncio-event-loop>` :return: :class:`BaseAsyncIOClient` instance. """ if loop is None: loop = asyncio.get_event_loop() presult = urlparse(uri) if presult.scheme in ('ipc', 'unix'): reader, writer = yield from asyncio.open_unix_connection(presult.path, loop=loop) return AsyncIOIPCClient(reader, writer, uri, timeout, loop=loop) elif presult.scheme in ('http', 'https'): tls = presult.scheme[-1] == 's' netloc = presult.netloc.split(':') host = netloc.pop(0) port = netloc.pop(0) if netloc else (443 if tls else 80) return AsyncIOHTTPClient(host, port, tls, timeout, loop=loop) else: raise RuntimeError('This scheme does not supported.')
python
def create_ethereum_client(uri, timeout=60, *, loop=None): """Create client to ethereum node based on schema. :param uri: Host on ethereum node :type uri: str :param timeout: An optional total time of timeout call :type timeout: int :param loop: An optional *event loop* instance (uses :func:`asyncio.get_event_loop` if not specified). :type loop: :ref:`EventLoop<asyncio-event-loop>` :return: :class:`BaseAsyncIOClient` instance. """ if loop is None: loop = asyncio.get_event_loop() presult = urlparse(uri) if presult.scheme in ('ipc', 'unix'): reader, writer = yield from asyncio.open_unix_connection(presult.path, loop=loop) return AsyncIOIPCClient(reader, writer, uri, timeout, loop=loop) elif presult.scheme in ('http', 'https'): tls = presult.scheme[-1] == 's' netloc = presult.netloc.split(':') host = netloc.pop(0) port = netloc.pop(0) if netloc else (443 if tls else 80) return AsyncIOHTTPClient(host, port, tls, timeout, loop=loop) else: raise RuntimeError('This scheme does not supported.')
[ "def", "create_ethereum_client", "(", "uri", ",", "timeout", "=", "60", ",", "*", ",", "loop", "=", "None", ")", ":", "if", "loop", "is", "None", ":", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "presult", "=", "urlparse", "(", "uri", ")", "if", "presult", ".", "scheme", "in", "(", "'ipc'", ",", "'unix'", ")", ":", "reader", ",", "writer", "=", "yield", "from", "asyncio", ".", "open_unix_connection", "(", "presult", ".", "path", ",", "loop", "=", "loop", ")", "return", "AsyncIOIPCClient", "(", "reader", ",", "writer", ",", "uri", ",", "timeout", ",", "loop", "=", "loop", ")", "elif", "presult", ".", "scheme", "in", "(", "'http'", ",", "'https'", ")", ":", "tls", "=", "presult", ".", "scheme", "[", "-", "1", "]", "==", "'s'", "netloc", "=", "presult", ".", "netloc", ".", "split", "(", "':'", ")", "host", "=", "netloc", ".", "pop", "(", "0", ")", "port", "=", "netloc", ".", "pop", "(", "0", ")", "if", "netloc", "else", "(", "443", "if", "tls", "else", "80", ")", "return", "AsyncIOHTTPClient", "(", "host", ",", "port", ",", "tls", ",", "timeout", ",", "loop", "=", "loop", ")", "else", ":", "raise", "RuntimeError", "(", "'This scheme does not supported.'", ")" ]
Create client to ethereum node based on schema. :param uri: Host on ethereum node :type uri: str :param timeout: An optional total time of timeout call :type timeout: int :param loop: An optional *event loop* instance (uses :func:`asyncio.get_event_loop` if not specified). :type loop: :ref:`EventLoop<asyncio-event-loop>` :return: :class:`BaseAsyncIOClient` instance.
[ "Create", "client", "to", "ethereum", "node", "based", "on", "schema", "." ]
85eb46550d862b3ccc309914ea871ca1c7b42157
https://github.com/DeV1doR/aioethereum/blob/85eb46550d862b3ccc309914ea871ca1c7b42157/aioethereum/client.py#L246-L276
train
ludeeus/GHLocalApi
examples/get_alarms.py
get_alarms
async def get_alarms(): """Get alarms and timers from GH.""" async with aiohttp.ClientSession() as session: ghlocalapi = Alarms(LOOP, session, IPADDRESS) await ghlocalapi.get_alarms() print("Alarms:", ghlocalapi.alarms)
python
async def get_alarms(): """Get alarms and timers from GH.""" async with aiohttp.ClientSession() as session: ghlocalapi = Alarms(LOOP, session, IPADDRESS) await ghlocalapi.get_alarms() print("Alarms:", ghlocalapi.alarms)
[ "async", "def", "get_alarms", "(", ")", ":", "async", "with", "aiohttp", ".", "ClientSession", "(", ")", "as", "session", ":", "ghlocalapi", "=", "Alarms", "(", "LOOP", ",", "session", ",", "IPADDRESS", ")", "await", "ghlocalapi", ".", "get_alarms", "(", ")", "print", "(", "\"Alarms:\"", ",", "ghlocalapi", ".", "alarms", ")" ]
Get alarms and timers from GH.
[ "Get", "alarms", "and", "timers", "from", "GH", "." ]
93abdee299c4a4b65aa9dd03c77ec34e174e3c56
https://github.com/ludeeus/GHLocalApi/blob/93abdee299c4a4b65aa9dd03c77ec34e174e3c56/examples/get_alarms.py#L9-L15
train