repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
yymao/generic-catalog-reader
GCR/base.py
BaseGenericCatalog.add_quantity_modifier
def add_quantity_modifier(self, quantity, modifier, overwrite=False): """ Add a quantify modifier. Consider useing the high-level function `add_derived_quantity` instead! Parameters ---------- quantity : str name of the derived quantity to add modifier : None or str or tuple If the quantity modifier is a tuple of length >=2 and the first element is a callable, it should be in the formate of `(callable, native quantity 1, native quantity 2, ...)`. And the modifier would work as callable(native quantity 1, native quantity 2, ...) If the quantity modifier is None, the quantity will be used as the native quantity name Otherwise, the modifier would be use directly as a native quantity name overwrite : bool, optional If False and quantity are already specified in _quantity_modifiers, raise an ValueError """ if quantity in self._quantity_modifiers and not overwrite: raise ValueError('quantity `{}` already exists'.format(quantity)) self._quantity_modifiers[quantity] = modifier self._check_quantities_exist([quantity], raise_exception=False)
python
def add_quantity_modifier(self, quantity, modifier, overwrite=False): """ Add a quantify modifier. Consider useing the high-level function `add_derived_quantity` instead! Parameters ---------- quantity : str name of the derived quantity to add modifier : None or str or tuple If the quantity modifier is a tuple of length >=2 and the first element is a callable, it should be in the formate of `(callable, native quantity 1, native quantity 2, ...)`. And the modifier would work as callable(native quantity 1, native quantity 2, ...) If the quantity modifier is None, the quantity will be used as the native quantity name Otherwise, the modifier would be use directly as a native quantity name overwrite : bool, optional If False and quantity are already specified in _quantity_modifiers, raise an ValueError """ if quantity in self._quantity_modifiers and not overwrite: raise ValueError('quantity `{}` already exists'.format(quantity)) self._quantity_modifiers[quantity] = modifier self._check_quantities_exist([quantity], raise_exception=False)
[ "def", "add_quantity_modifier", "(", "self", ",", "quantity", ",", "modifier", ",", "overwrite", "=", "False", ")", ":", "if", "quantity", "in", "self", ".", "_quantity_modifiers", "and", "not", "overwrite", ":", "raise", "ValueError", "(", "'quantity `{}` already exists'", ".", "format", "(", "quantity", ")", ")", "self", ".", "_quantity_modifiers", "[", "quantity", "]", "=", "modifier", "self", ".", "_check_quantities_exist", "(", "[", "quantity", "]", ",", "raise_exception", "=", "False", ")" ]
Add a quantify modifier. Consider useing the high-level function `add_derived_quantity` instead! Parameters ---------- quantity : str name of the derived quantity to add modifier : None or str or tuple If the quantity modifier is a tuple of length >=2 and the first element is a callable, it should be in the formate of `(callable, native quantity 1, native quantity 2, ...)`. And the modifier would work as callable(native quantity 1, native quantity 2, ...) If the quantity modifier is None, the quantity will be used as the native quantity name Otherwise, the modifier would be use directly as a native quantity name overwrite : bool, optional If False and quantity are already specified in _quantity_modifiers, raise an ValueError
[ "Add", "a", "quantify", "modifier", ".", "Consider", "useing", "the", "high", "-", "level", "function", "add_derived_quantity", "instead!" ]
bc6267ac41b9f68106ed6065184469ac13fdc0b6
https://github.com/yymao/generic-catalog-reader/blob/bc6267ac41b9f68106ed6065184469ac13fdc0b6/GCR/base.py#L194-L217
train
yymao/generic-catalog-reader
GCR/base.py
BaseGenericCatalog.get_normalized_quantity_modifier
def get_normalized_quantity_modifier(self, quantity): """ Retrive a quantify modifier, normalized. This function would also return a tuple, with the first item a callable, and the rest native quantity names Parameters ---------- quantity : str name of the derived quantity to get Returns ------- tuple : (callable, quantity1, quantity2...) """ modifier = self._quantity_modifiers.get(quantity, self._default_quantity_modifier) if modifier is None: return (trivial_callable, quantity) if callable(modifier): return (modifier, quantity) if isinstance(modifier, (tuple, list)) and len(modifier) > 1 and callable(modifier[0]): return modifier return (trivial_callable, modifier)
python
def get_normalized_quantity_modifier(self, quantity): """ Retrive a quantify modifier, normalized. This function would also return a tuple, with the first item a callable, and the rest native quantity names Parameters ---------- quantity : str name of the derived quantity to get Returns ------- tuple : (callable, quantity1, quantity2...) """ modifier = self._quantity_modifiers.get(quantity, self._default_quantity_modifier) if modifier is None: return (trivial_callable, quantity) if callable(modifier): return (modifier, quantity) if isinstance(modifier, (tuple, list)) and len(modifier) > 1 and callable(modifier[0]): return modifier return (trivial_callable, modifier)
[ "def", "get_normalized_quantity_modifier", "(", "self", ",", "quantity", ")", ":", "modifier", "=", "self", ".", "_quantity_modifiers", ".", "get", "(", "quantity", ",", "self", ".", "_default_quantity_modifier", ")", "if", "modifier", "is", "None", ":", "return", "(", "trivial_callable", ",", "quantity", ")", "if", "callable", "(", "modifier", ")", ":", "return", "(", "modifier", ",", "quantity", ")", "if", "isinstance", "(", "modifier", ",", "(", "tuple", ",", "list", ")", ")", "and", "len", "(", "modifier", ")", ">", "1", "and", "callable", "(", "modifier", "[", "0", "]", ")", ":", "return", "modifier", "return", "(", "trivial_callable", ",", "modifier", ")" ]
Retrive a quantify modifier, normalized. This function would also return a tuple, with the first item a callable, and the rest native quantity names Parameters ---------- quantity : str name of the derived quantity to get Returns ------- tuple : (callable, quantity1, quantity2...)
[ "Retrive", "a", "quantify", "modifier", "normalized", ".", "This", "function", "would", "also", "return", "a", "tuple", "with", "the", "first", "item", "a", "callable", "and", "the", "rest", "native", "quantity", "names" ]
bc6267ac41b9f68106ed6065184469ac13fdc0b6
https://github.com/yymao/generic-catalog-reader/blob/bc6267ac41b9f68106ed6065184469ac13fdc0b6/GCR/base.py#L234-L259
train
yymao/generic-catalog-reader
GCR/base.py
BaseGenericCatalog.add_derived_quantity
def add_derived_quantity(self, derived_quantity, func, *quantities): """ Add a derived quantify modifier. Parameters ---------- derived_quantity : str name of the derived quantity to be added func : callable function to calculate the derived quantity the number of arguments should equal number of following `quantities` quantities : list of str quantities to pass to the callable """ if derived_quantity in self._quantity_modifiers: raise ValueError('quantity name `{}` already exists'.format(derived_quantity)) if set(quantities).issubset(self._native_quantities): new_modifier = (func,) + quantities else: functions = [] quantities_needed = [] quantity_count = [] for q in quantities: modifier = self.get_normalized_quantity_modifier(q) functions.append(modifier[0]) quantities_needed.extend(modifier[1:]) quantity_count.append(len(modifier)-1) def _new_func(*x): assert len(x) == sum(quantity_count) count_current = 0 new_args = [] for func_this, count in zip(functions, quantity_count): new_args.append(func_this(*x[count_current:count_current+count])) count_current += count return func(*new_args) new_modifier = (_new_func,) + tuple(quantities_needed) self.add_quantity_modifier(derived_quantity, new_modifier)
python
def add_derived_quantity(self, derived_quantity, func, *quantities): """ Add a derived quantify modifier. Parameters ---------- derived_quantity : str name of the derived quantity to be added func : callable function to calculate the derived quantity the number of arguments should equal number of following `quantities` quantities : list of str quantities to pass to the callable """ if derived_quantity in self._quantity_modifiers: raise ValueError('quantity name `{}` already exists'.format(derived_quantity)) if set(quantities).issubset(self._native_quantities): new_modifier = (func,) + quantities else: functions = [] quantities_needed = [] quantity_count = [] for q in quantities: modifier = self.get_normalized_quantity_modifier(q) functions.append(modifier[0]) quantities_needed.extend(modifier[1:]) quantity_count.append(len(modifier)-1) def _new_func(*x): assert len(x) == sum(quantity_count) count_current = 0 new_args = [] for func_this, count in zip(functions, quantity_count): new_args.append(func_this(*x[count_current:count_current+count])) count_current += count return func(*new_args) new_modifier = (_new_func,) + tuple(quantities_needed) self.add_quantity_modifier(derived_quantity, new_modifier)
[ "def", "add_derived_quantity", "(", "self", ",", "derived_quantity", ",", "func", ",", "*", "quantities", ")", ":", "if", "derived_quantity", "in", "self", ".", "_quantity_modifiers", ":", "raise", "ValueError", "(", "'quantity name `{}` already exists'", ".", "format", "(", "derived_quantity", ")", ")", "if", "set", "(", "quantities", ")", ".", "issubset", "(", "self", ".", "_native_quantities", ")", ":", "new_modifier", "=", "(", "func", ",", ")", "+", "quantities", "else", ":", "functions", "=", "[", "]", "quantities_needed", "=", "[", "]", "quantity_count", "=", "[", "]", "for", "q", "in", "quantities", ":", "modifier", "=", "self", ".", "get_normalized_quantity_modifier", "(", "q", ")", "functions", ".", "append", "(", "modifier", "[", "0", "]", ")", "quantities_needed", ".", "extend", "(", "modifier", "[", "1", ":", "]", ")", "quantity_count", ".", "append", "(", "len", "(", "modifier", ")", "-", "1", ")", "def", "_new_func", "(", "*", "x", ")", ":", "assert", "len", "(", "x", ")", "==", "sum", "(", "quantity_count", ")", "count_current", "=", "0", "new_args", "=", "[", "]", "for", "func_this", ",", "count", "in", "zip", "(", "functions", ",", "quantity_count", ")", ":", "new_args", ".", "append", "(", "func_this", "(", "*", "x", "[", "count_current", ":", "count_current", "+", "count", "]", ")", ")", "count_current", "+=", "count", "return", "func", "(", "*", "new_args", ")", "new_modifier", "=", "(", "_new_func", ",", ")", "+", "tuple", "(", "quantities_needed", ")", "self", ".", "add_quantity_modifier", "(", "derived_quantity", ",", "new_modifier", ")" ]
Add a derived quantify modifier. Parameters ---------- derived_quantity : str name of the derived quantity to be added func : callable function to calculate the derived quantity the number of arguments should equal number of following `quantities` quantities : list of str quantities to pass to the callable
[ "Add", "a", "derived", "quantify", "modifier", "." ]
bc6267ac41b9f68106ed6065184469ac13fdc0b6
https://github.com/yymao/generic-catalog-reader/blob/bc6267ac41b9f68106ed6065184469ac13fdc0b6/GCR/base.py#L261-L304
train
yymao/generic-catalog-reader
GCR/base.py
BaseGenericCatalog.add_modifier_on_derived_quantities
def add_modifier_on_derived_quantities(self, new_quantity, func, *quantities): """ Deprecated. Use `add_derived_quantity` instead. """ warnings.warn("Use `add_derived_quantity` instead.", DeprecationWarning) self.add_derived_quantity(new_quantity, func, *quantities)
python
def add_modifier_on_derived_quantities(self, new_quantity, func, *quantities): """ Deprecated. Use `add_derived_quantity` instead. """ warnings.warn("Use `add_derived_quantity` instead.", DeprecationWarning) self.add_derived_quantity(new_quantity, func, *quantities)
[ "def", "add_modifier_on_derived_quantities", "(", "self", ",", "new_quantity", ",", "func", ",", "*", "quantities", ")", ":", "warnings", ".", "warn", "(", "\"Use `add_derived_quantity` instead.\"", ",", "DeprecationWarning", ")", "self", ".", "add_derived_quantity", "(", "new_quantity", ",", "func", ",", "*", "quantities", ")" ]
Deprecated. Use `add_derived_quantity` instead.
[ "Deprecated", ".", "Use", "add_derived_quantity", "instead", "." ]
bc6267ac41b9f68106ed6065184469ac13fdc0b6
https://github.com/yymao/generic-catalog-reader/blob/bc6267ac41b9f68106ed6065184469ac13fdc0b6/GCR/base.py#L306-L311
train
brutus/wdiffhtml
wdiffhtml/utils.py
check_for_wdiff
def check_for_wdiff(): """ Checks if the `wdiff` command can be found. Raises: WdiffNotFoundError: if ``wdiff`` is not found. """ cmd = ['which', CMD_WDIFF] DEVNULL = open(os.devnull, 'wb') proc = sub.Popen(cmd, stdout=DEVNULL) proc.wait() DEVNULL.close() if proc.returncode != 0: msg = "the `{}` command can't be found".format(CMD_WDIFF) raise WdiffNotFoundError(msg)
python
def check_for_wdiff(): """ Checks if the `wdiff` command can be found. Raises: WdiffNotFoundError: if ``wdiff`` is not found. """ cmd = ['which', CMD_WDIFF] DEVNULL = open(os.devnull, 'wb') proc = sub.Popen(cmd, stdout=DEVNULL) proc.wait() DEVNULL.close() if proc.returncode != 0: msg = "the `{}` command can't be found".format(CMD_WDIFF) raise WdiffNotFoundError(msg)
[ "def", "check_for_wdiff", "(", ")", ":", "cmd", "=", "[", "'which'", ",", "CMD_WDIFF", "]", "DEVNULL", "=", "open", "(", "os", ".", "devnull", ",", "'wb'", ")", "proc", "=", "sub", ".", "Popen", "(", "cmd", ",", "stdout", "=", "DEVNULL", ")", "proc", ".", "wait", "(", ")", "DEVNULL", ".", "close", "(", ")", "if", "proc", ".", "returncode", "!=", "0", ":", "msg", "=", "\"the `{}` command can't be found\"", ".", "format", "(", "CMD_WDIFF", ")", "raise", "WdiffNotFoundError", "(", "msg", ")" ]
Checks if the `wdiff` command can be found. Raises: WdiffNotFoundError: if ``wdiff`` is not found.
[ "Checks", "if", "the", "wdiff", "command", "can", "be", "found", "." ]
e97b524a7945f7a626e33ec141343120c524d9fa
https://github.com/brutus/wdiffhtml/blob/e97b524a7945f7a626e33ec141343120c524d9fa/wdiffhtml/utils.py#L36-L52
train
brutus/wdiffhtml
wdiffhtml/utils.py
generate_wdiff
def generate_wdiff(org_file, new_file, fold_tags=False, html=True): """ Returns the results from the `wdiff` command as a string. HTML `<ins>` and `<del>` tags will be used instead of the default markings, unless *html* is set to `False`. If *fold_tags* is set, `<ins>` and `<del>` tags are allowed to span line breaks (option `-n` is not used). Raises: subrocess.CalledProcessError: on any `wdiff` process errors """ check_for_wdiff() cmd = [CMD_WDIFF] if html: cmd.extend(OPTIONS_OUTPUT) if not fold_tags: cmd.extend(OPTIONS_LINEBREAK) cmd.extend([org_file, new_file]) proc = sub.Popen(cmd, stdout=sub.PIPE) diff, _ = proc.communicate() return diff.decode('utf-8')
python
def generate_wdiff(org_file, new_file, fold_tags=False, html=True): """ Returns the results from the `wdiff` command as a string. HTML `<ins>` and `<del>` tags will be used instead of the default markings, unless *html* is set to `False`. If *fold_tags* is set, `<ins>` and `<del>` tags are allowed to span line breaks (option `-n` is not used). Raises: subrocess.CalledProcessError: on any `wdiff` process errors """ check_for_wdiff() cmd = [CMD_WDIFF] if html: cmd.extend(OPTIONS_OUTPUT) if not fold_tags: cmd.extend(OPTIONS_LINEBREAK) cmd.extend([org_file, new_file]) proc = sub.Popen(cmd, stdout=sub.PIPE) diff, _ = proc.communicate() return diff.decode('utf-8')
[ "def", "generate_wdiff", "(", "org_file", ",", "new_file", ",", "fold_tags", "=", "False", ",", "html", "=", "True", ")", ":", "check_for_wdiff", "(", ")", "cmd", "=", "[", "CMD_WDIFF", "]", "if", "html", ":", "cmd", ".", "extend", "(", "OPTIONS_OUTPUT", ")", "if", "not", "fold_tags", ":", "cmd", ".", "extend", "(", "OPTIONS_LINEBREAK", ")", "cmd", ".", "extend", "(", "[", "org_file", ",", "new_file", "]", ")", "proc", "=", "sub", ".", "Popen", "(", "cmd", ",", "stdout", "=", "sub", ".", "PIPE", ")", "diff", ",", "_", "=", "proc", ".", "communicate", "(", ")", "return", "diff", ".", "decode", "(", "'utf-8'", ")" ]
Returns the results from the `wdiff` command as a string. HTML `<ins>` and `<del>` tags will be used instead of the default markings, unless *html* is set to `False`. If *fold_tags* is set, `<ins>` and `<del>` tags are allowed to span line breaks (option `-n` is not used). Raises: subrocess.CalledProcessError: on any `wdiff` process errors
[ "Returns", "the", "results", "from", "the", "wdiff", "command", "as", "a", "string", "." ]
e97b524a7945f7a626e33ec141343120c524d9fa
https://github.com/brutus/wdiffhtml/blob/e97b524a7945f7a626e33ec141343120c524d9fa/wdiffhtml/utils.py#L55-L79
train
mesbahamin/chronophore
chronophore/tkview.py
TkUserTypeSelectionDialog.body
def body(self, master): """Create dialog body. Return widget that should have initial focus. Inherited from tkinter.simpledialog.Dialog """ self.frame = ttk.Frame(master, padding=(5, 5, 10, 10)) self.lbl_message = ttk.Label( self.frame, text='Select User Type: ', ) self.rb_student = ttk.Radiobutton( self.frame, text='Student', variable=self.rb_choice, value='student', ) self.rb_tutor = ttk.Radiobutton( self.frame, text='Tutor', variable=self.rb_choice, value='tutor', ) self.btn_ok = ttk.Button( self.frame, text='OK', command=self.ok, ) self.btn_cancel = ttk.Button( self.frame, text='Cancel', command=self.cancel, ) # assemble grid self.frame.grid(column=0, row=0, sticky=(N, S, E, W)) self.lbl_message.grid(column=0, row=0, columnspan=2, sticky=(W, E)) self.rb_student.grid(column=0, row=1, columnspan=2, sticky=W) self.rb_tutor.grid(column=0, row=2, columnspan=2, sticky=W) self.btn_ok.grid(column=0, row=3) self.btn_cancel.grid(column=1, row=3) # key bindings self.bind('<Return>', self.ok) self.bind('<KP_Enter>', self.ok) self.bind('<Escape>', self.cancel) self.rb_tutor.invoke() return self.btn_ok
python
def body(self, master): """Create dialog body. Return widget that should have initial focus. Inherited from tkinter.simpledialog.Dialog """ self.frame = ttk.Frame(master, padding=(5, 5, 10, 10)) self.lbl_message = ttk.Label( self.frame, text='Select User Type: ', ) self.rb_student = ttk.Radiobutton( self.frame, text='Student', variable=self.rb_choice, value='student', ) self.rb_tutor = ttk.Radiobutton( self.frame, text='Tutor', variable=self.rb_choice, value='tutor', ) self.btn_ok = ttk.Button( self.frame, text='OK', command=self.ok, ) self.btn_cancel = ttk.Button( self.frame, text='Cancel', command=self.cancel, ) # assemble grid self.frame.grid(column=0, row=0, sticky=(N, S, E, W)) self.lbl_message.grid(column=0, row=0, columnspan=2, sticky=(W, E)) self.rb_student.grid(column=0, row=1, columnspan=2, sticky=W) self.rb_tutor.grid(column=0, row=2, columnspan=2, sticky=W) self.btn_ok.grid(column=0, row=3) self.btn_cancel.grid(column=1, row=3) # key bindings self.bind('<Return>', self.ok) self.bind('<KP_Enter>', self.ok) self.bind('<Escape>', self.cancel) self.rb_tutor.invoke() return self.btn_ok
[ "def", "body", "(", "self", ",", "master", ")", ":", "self", ".", "frame", "=", "ttk", ".", "Frame", "(", "master", ",", "padding", "=", "(", "5", ",", "5", ",", "10", ",", "10", ")", ")", "self", ".", "lbl_message", "=", "ttk", ".", "Label", "(", "self", ".", "frame", ",", "text", "=", "'Select User Type: '", ",", ")", "self", ".", "rb_student", "=", "ttk", ".", "Radiobutton", "(", "self", ".", "frame", ",", "text", "=", "'Student'", ",", "variable", "=", "self", ".", "rb_choice", ",", "value", "=", "'student'", ",", ")", "self", ".", "rb_tutor", "=", "ttk", ".", "Radiobutton", "(", "self", ".", "frame", ",", "text", "=", "'Tutor'", ",", "variable", "=", "self", ".", "rb_choice", ",", "value", "=", "'tutor'", ",", ")", "self", ".", "btn_ok", "=", "ttk", ".", "Button", "(", "self", ".", "frame", ",", "text", "=", "'OK'", ",", "command", "=", "self", ".", "ok", ",", ")", "self", ".", "btn_cancel", "=", "ttk", ".", "Button", "(", "self", ".", "frame", ",", "text", "=", "'Cancel'", ",", "command", "=", "self", ".", "cancel", ",", ")", "# assemble grid", "self", ".", "frame", ".", "grid", "(", "column", "=", "0", ",", "row", "=", "0", ",", "sticky", "=", "(", "N", ",", "S", ",", "E", ",", "W", ")", ")", "self", ".", "lbl_message", ".", "grid", "(", "column", "=", "0", ",", "row", "=", "0", ",", "columnspan", "=", "2", ",", "sticky", "=", "(", "W", ",", "E", ")", ")", "self", ".", "rb_student", ".", "grid", "(", "column", "=", "0", ",", "row", "=", "1", ",", "columnspan", "=", "2", ",", "sticky", "=", "W", ")", "self", ".", "rb_tutor", ".", "grid", "(", "column", "=", "0", ",", "row", "=", "2", ",", "columnspan", "=", "2", ",", "sticky", "=", "W", ")", "self", ".", "btn_ok", ".", "grid", "(", "column", "=", "0", ",", "row", "=", "3", ")", "self", ".", "btn_cancel", ".", "grid", "(", "column", "=", "1", ",", "row", "=", "3", ")", "# key bindings", "self", ".", "bind", "(", "'<Return>'", ",", "self", ".", "ok", ")", "self", ".", "bind", "(", "'<KP_Enter>'", ",", "self", ".", "ok", ")", "self", ".", "bind", "(", "'<Escape>'", ",", "self", ".", "cancel", ")", "self", ".", "rb_tutor", ".", "invoke", "(", ")", "return", "self", ".", "btn_ok" ]
Create dialog body. Return widget that should have initial focus. Inherited from tkinter.simpledialog.Dialog
[ "Create", "dialog", "body", ".", "Return", "widget", "that", "should", "have", "initial", "focus", "." ]
ee140c61b4dfada966f078de8304bac737cec6f7
https://github.com/mesbahamin/chronophore/blob/ee140c61b4dfada966f078de8304bac737cec6f7/chronophore/tkview.py#L240-L289
train
mesbahamin/chronophore
chronophore/tkview.py
TkUserTypeSelectionDialog.apply
def apply(self): """Inherited from tkinter.simpledialog.Dialog""" user_type = self.rb_choice.get() if user_type == 'student' or user_type == 'tutor': self.result = user_type
python
def apply(self): """Inherited from tkinter.simpledialog.Dialog""" user_type = self.rb_choice.get() if user_type == 'student' or user_type == 'tutor': self.result = user_type
[ "def", "apply", "(", "self", ")", ":", "user_type", "=", "self", ".", "rb_choice", ".", "get", "(", ")", "if", "user_type", "==", "'student'", "or", "user_type", "==", "'tutor'", ":", "self", ".", "result", "=", "user_type" ]
Inherited from tkinter.simpledialog.Dialog
[ "Inherited", "from", "tkinter", ".", "simpledialog", ".", "Dialog" ]
ee140c61b4dfada966f078de8304bac737cec6f7
https://github.com/mesbahamin/chronophore/blob/ee140c61b4dfada966f078de8304bac737cec6f7/chronophore/tkview.py#L321-L325
train
mesbahamin/chronophore
chronophore/controller.py
flag_forgotten_entries
def flag_forgotten_entries(session, today=None): """Flag any entries from previous days where users forgot to sign out. :param session: SQLAlchemy session through which to access the database. :param today: (optional) The current date as a `datetime.date` object. Used for testing. """ # noqa today = date.today() if today is None else today forgotten = ( session .query(Entry) .filter(Entry.time_out.is_(None)) .filter(Entry.forgot_sign_out.is_(False)) .filter(Entry.date < today) ) for entry in forgotten: e = sign_out(entry, forgot=True) logger.debug('Signing out forgotten entry: {}'.format(e)) session.add(e) session.commit()
python
def flag_forgotten_entries(session, today=None): """Flag any entries from previous days where users forgot to sign out. :param session: SQLAlchemy session through which to access the database. :param today: (optional) The current date as a `datetime.date` object. Used for testing. """ # noqa today = date.today() if today is None else today forgotten = ( session .query(Entry) .filter(Entry.time_out.is_(None)) .filter(Entry.forgot_sign_out.is_(False)) .filter(Entry.date < today) ) for entry in forgotten: e = sign_out(entry, forgot=True) logger.debug('Signing out forgotten entry: {}'.format(e)) session.add(e) session.commit()
[ "def", "flag_forgotten_entries", "(", "session", ",", "today", "=", "None", ")", ":", "# noqa", "today", "=", "date", ".", "today", "(", ")", "if", "today", "is", "None", "else", "today", "forgotten", "=", "(", "session", ".", "query", "(", "Entry", ")", ".", "filter", "(", "Entry", ".", "time_out", ".", "is_", "(", "None", ")", ")", ".", "filter", "(", "Entry", ".", "forgot_sign_out", ".", "is_", "(", "False", ")", ")", ".", "filter", "(", "Entry", ".", "date", "<", "today", ")", ")", "for", "entry", "in", "forgotten", ":", "e", "=", "sign_out", "(", "entry", ",", "forgot", "=", "True", ")", "logger", ".", "debug", "(", "'Signing out forgotten entry: {}'", ".", "format", "(", "e", ")", ")", "session", ".", "add", "(", "e", ")", "session", ".", "commit", "(", ")" ]
Flag any entries from previous days where users forgot to sign out. :param session: SQLAlchemy session through which to access the database. :param today: (optional) The current date as a `datetime.date` object. Used for testing.
[ "Flag", "any", "entries", "from", "previous", "days", "where", "users", "forgot", "to", "sign", "out", "." ]
ee140c61b4dfada966f078de8304bac737cec6f7
https://github.com/mesbahamin/chronophore/blob/ee140c61b4dfada966f078de8304bac737cec6f7/chronophore/controller.py#L65-L87
train
mesbahamin/chronophore
chronophore/controller.py
signed_in_users
def signed_in_users(session=None, today=None, full_name=True): """Return list of names of currently signed in users. :param session: SQLAlchemy session through which to access the database. :param today: (optional) The current date as a `datetime.date` object. Used for testing. :param full_name: (optional) Whether to display full user names, or just first names. :return: List of currently signed in users. """ # noqa if session is None: session = Session() else: session = session if today is None: today = date.today() else: today = today signed_in_users = ( session .query(User) .filter(Entry.date == today) .filter(Entry.time_out.is_(None)) .filter(User.user_id == Entry.user_id) .all() ) session.close() return signed_in_users
python
def signed_in_users(session=None, today=None, full_name=True): """Return list of names of currently signed in users. :param session: SQLAlchemy session through which to access the database. :param today: (optional) The current date as a `datetime.date` object. Used for testing. :param full_name: (optional) Whether to display full user names, or just first names. :return: List of currently signed in users. """ # noqa if session is None: session = Session() else: session = session if today is None: today = date.today() else: today = today signed_in_users = ( session .query(User) .filter(Entry.date == today) .filter(Entry.time_out.is_(None)) .filter(User.user_id == Entry.user_id) .all() ) session.close() return signed_in_users
[ "def", "signed_in_users", "(", "session", "=", "None", ",", "today", "=", "None", ",", "full_name", "=", "True", ")", ":", "# noqa", "if", "session", "is", "None", ":", "session", "=", "Session", "(", ")", "else", ":", "session", "=", "session", "if", "today", "is", "None", ":", "today", "=", "date", ".", "today", "(", ")", "else", ":", "today", "=", "today", "signed_in_users", "=", "(", "session", ".", "query", "(", "User", ")", ".", "filter", "(", "Entry", ".", "date", "==", "today", ")", ".", "filter", "(", "Entry", ".", "time_out", ".", "is_", "(", "None", ")", ")", ".", "filter", "(", "User", ".", "user_id", "==", "Entry", ".", "user_id", ")", ".", "all", "(", ")", ")", "session", ".", "close", "(", ")", "return", "signed_in_users" ]
Return list of names of currently signed in users. :param session: SQLAlchemy session through which to access the database. :param today: (optional) The current date as a `datetime.date` object. Used for testing. :param full_name: (optional) Whether to display full user names, or just first names. :return: List of currently signed in users.
[ "Return", "list", "of", "names", "of", "currently", "signed", "in", "users", "." ]
ee140c61b4dfada966f078de8304bac737cec6f7
https://github.com/mesbahamin/chronophore/blob/ee140c61b4dfada966f078de8304bac737cec6f7/chronophore/controller.py#L90-L118
train
mesbahamin/chronophore
chronophore/controller.py
get_user_name
def get_user_name(user, full_name=True): """Return the user's name as a string. :param user: `models.User` object. The user to get the name of. :param full_name: (optional) Whether to return full user name, or just first name. :return: The user's name. """ # noqa try: if full_name: name = ' '.join([user.first_name, user.last_name]) else: name = user.first_name except AttributeError: name = None return name
python
def get_user_name(user, full_name=True): """Return the user's name as a string. :param user: `models.User` object. The user to get the name of. :param full_name: (optional) Whether to return full user name, or just first name. :return: The user's name. """ # noqa try: if full_name: name = ' '.join([user.first_name, user.last_name]) else: name = user.first_name except AttributeError: name = None return name
[ "def", "get_user_name", "(", "user", ",", "full_name", "=", "True", ")", ":", "# noqa", "try", ":", "if", "full_name", ":", "name", "=", "' '", ".", "join", "(", "[", "user", ".", "first_name", ",", "user", ".", "last_name", "]", ")", "else", ":", "name", "=", "user", ".", "first_name", "except", "AttributeError", ":", "name", "=", "None", "return", "name" ]
Return the user's name as a string. :param user: `models.User` object. The user to get the name of. :param full_name: (optional) Whether to return full user name, or just first name. :return: The user's name.
[ "Return", "the", "user", "s", "name", "as", "a", "string", "." ]
ee140c61b4dfada966f078de8304bac737cec6f7
https://github.com/mesbahamin/chronophore/blob/ee140c61b4dfada966f078de8304bac737cec6f7/chronophore/controller.py#L121-L136
train
mesbahamin/chronophore
chronophore/controller.py
sign_in
def sign_in(user, user_type=None, date=None, time_in=None): """Add a new entry to the timesheet. :param user: `models.User` object. The user to sign in. :param user_type: (optional) Specify whether user is signing in as a `'student'` or `'tutor'`. :param date: (optional) `datetime.date` object. Specify the entry date. :param time_in: (optional) `datetime.time` object. Specify the sign in time. :return: The new entry. """ # noqa now = datetime.today() if date is None: date = now.date() if time_in is None: time_in = now.time() if user_type is None: if user.is_student and user.is_tutor: raise AmbiguousUserType('User is both a student and a tutor.') elif user.is_student: user_type = 'student' elif user.is_tutor: user_type = 'tutor' else: raise ValueError('Unknown user type.') new_entry = Entry( uuid=str(uuid.uuid4()), date=date, time_in=time_in, time_out=None, user_id=user.user_id, user_type=user_type, user=user, ) logger.info('{} ({}) signed in.'.format(new_entry.user_id, new_entry.user_type)) return new_entry
python
def sign_in(user, user_type=None, date=None, time_in=None): """Add a new entry to the timesheet. :param user: `models.User` object. The user to sign in. :param user_type: (optional) Specify whether user is signing in as a `'student'` or `'tutor'`. :param date: (optional) `datetime.date` object. Specify the entry date. :param time_in: (optional) `datetime.time` object. Specify the sign in time. :return: The new entry. """ # noqa now = datetime.today() if date is None: date = now.date() if time_in is None: time_in = now.time() if user_type is None: if user.is_student and user.is_tutor: raise AmbiguousUserType('User is both a student and a tutor.') elif user.is_student: user_type = 'student' elif user.is_tutor: user_type = 'tutor' else: raise ValueError('Unknown user type.') new_entry = Entry( uuid=str(uuid.uuid4()), date=date, time_in=time_in, time_out=None, user_id=user.user_id, user_type=user_type, user=user, ) logger.info('{} ({}) signed in.'.format(new_entry.user_id, new_entry.user_type)) return new_entry
[ "def", "sign_in", "(", "user", ",", "user_type", "=", "None", ",", "date", "=", "None", ",", "time_in", "=", "None", ")", ":", "# noqa", "now", "=", "datetime", ".", "today", "(", ")", "if", "date", "is", "None", ":", "date", "=", "now", ".", "date", "(", ")", "if", "time_in", "is", "None", ":", "time_in", "=", "now", ".", "time", "(", ")", "if", "user_type", "is", "None", ":", "if", "user", ".", "is_student", "and", "user", ".", "is_tutor", ":", "raise", "AmbiguousUserType", "(", "'User is both a student and a tutor.'", ")", "elif", "user", ".", "is_student", ":", "user_type", "=", "'student'", "elif", "user", ".", "is_tutor", ":", "user_type", "=", "'tutor'", "else", ":", "raise", "ValueError", "(", "'Unknown user type.'", ")", "new_entry", "=", "Entry", "(", "uuid", "=", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", ",", "date", "=", "date", ",", "time_in", "=", "time_in", ",", "time_out", "=", "None", ",", "user_id", "=", "user", ".", "user_id", ",", "user_type", "=", "user_type", ",", "user", "=", "user", ",", ")", "logger", ".", "info", "(", "'{} ({}) signed in.'", ".", "format", "(", "new_entry", ".", "user_id", ",", "new_entry", ".", "user_type", ")", ")", "return", "new_entry" ]
Add a new entry to the timesheet. :param user: `models.User` object. The user to sign in. :param user_type: (optional) Specify whether user is signing in as a `'student'` or `'tutor'`. :param date: (optional) `datetime.date` object. Specify the entry date. :param time_in: (optional) `datetime.time` object. Specify the sign in time. :return: The new entry.
[ "Add", "a", "new", "entry", "to", "the", "timesheet", "." ]
ee140c61b4dfada966f078de8304bac737cec6f7
https://github.com/mesbahamin/chronophore/blob/ee140c61b4dfada966f078de8304bac737cec6f7/chronophore/controller.py#L139-L174
train
mesbahamin/chronophore
chronophore/controller.py
sign_out
def sign_out(entry, time_out=None, forgot=False): """Sign out of an existing entry in the timesheet. If the user forgot to sign out, flag the entry. :param entry: `models.Entry` object. The entry to sign out. :param time_out: (optional) `datetime.time` object. Specify the sign out time. :param forgot: (optional) If true, user forgot to sign out. Entry will be flagged as forgotten. :return: The signed out entry. """ # noqa if time_out is None: time_out = datetime.today().time() if forgot: entry.forgot_sign_out = True logger.info( '{} forgot to sign out on {}.'.format(entry.user_id, entry.date) ) else: entry.time_out = time_out logger.info('{} ({}) signed out.'.format(entry.user_id, entry.user_type)) return entry
python
def sign_out(entry, time_out=None, forgot=False): """Sign out of an existing entry in the timesheet. If the user forgot to sign out, flag the entry. :param entry: `models.Entry` object. The entry to sign out. :param time_out: (optional) `datetime.time` object. Specify the sign out time. :param forgot: (optional) If true, user forgot to sign out. Entry will be flagged as forgotten. :return: The signed out entry. """ # noqa if time_out is None: time_out = datetime.today().time() if forgot: entry.forgot_sign_out = True logger.info( '{} forgot to sign out on {}.'.format(entry.user_id, entry.date) ) else: entry.time_out = time_out logger.info('{} ({}) signed out.'.format(entry.user_id, entry.user_type)) return entry
[ "def", "sign_out", "(", "entry", ",", "time_out", "=", "None", ",", "forgot", "=", "False", ")", ":", "# noqa", "if", "time_out", "is", "None", ":", "time_out", "=", "datetime", ".", "today", "(", ")", ".", "time", "(", ")", "if", "forgot", ":", "entry", ".", "forgot_sign_out", "=", "True", "logger", ".", "info", "(", "'{} forgot to sign out on {}.'", ".", "format", "(", "entry", ".", "user_id", ",", "entry", ".", "date", ")", ")", "else", ":", "entry", ".", "time_out", "=", "time_out", "logger", ".", "info", "(", "'{} ({}) signed out.'", ".", "format", "(", "entry", ".", "user_id", ",", "entry", ".", "user_type", ")", ")", "return", "entry" ]
Sign out of an existing entry in the timesheet. If the user forgot to sign out, flag the entry. :param entry: `models.Entry` object. The entry to sign out. :param time_out: (optional) `datetime.time` object. Specify the sign out time. :param forgot: (optional) If true, user forgot to sign out. Entry will be flagged as forgotten. :return: The signed out entry.
[ "Sign", "out", "of", "an", "existing", "entry", "in", "the", "timesheet", ".", "If", "the", "user", "forgot", "to", "sign", "out", "flag", "the", "entry", "." ]
ee140c61b4dfada966f078de8304bac737cec6f7
https://github.com/mesbahamin/chronophore/blob/ee140c61b4dfada966f078de8304bac737cec6f7/chronophore/controller.py#L177-L199
train
mesbahamin/chronophore
chronophore/controller.py
undo_sign_in
def undo_sign_in(entry, session=None): """Delete a signed in entry. :param entry: `models.Entry` object. The entry to delete. :param session: (optional) SQLAlchemy session through which to access the database. """ # noqa if session is None: session = Session() else: session = session entry_to_delete = ( session .query(Entry) .filter(Entry.uuid == entry.uuid) .one_or_none() ) if entry_to_delete: logger.info('Undo sign in: {}'.format(entry_to_delete.user_id)) logger.debug('Undo sign in: {}'.format(entry_to_delete)) session.delete(entry_to_delete) session.commit() else: error_message = 'Entry not found: {}'.format(entry) logger.error(error_message) raise ValueError(error_message)
python
def undo_sign_in(entry, session=None): """Delete a signed in entry. :param entry: `models.Entry` object. The entry to delete. :param session: (optional) SQLAlchemy session through which to access the database. """ # noqa if session is None: session = Session() else: session = session entry_to_delete = ( session .query(Entry) .filter(Entry.uuid == entry.uuid) .one_or_none() ) if entry_to_delete: logger.info('Undo sign in: {}'.format(entry_to_delete.user_id)) logger.debug('Undo sign in: {}'.format(entry_to_delete)) session.delete(entry_to_delete) session.commit() else: error_message = 'Entry not found: {}'.format(entry) logger.error(error_message) raise ValueError(error_message)
[ "def", "undo_sign_in", "(", "entry", ",", "session", "=", "None", ")", ":", "# noqa", "if", "session", "is", "None", ":", "session", "=", "Session", "(", ")", "else", ":", "session", "=", "session", "entry_to_delete", "=", "(", "session", ".", "query", "(", "Entry", ")", ".", "filter", "(", "Entry", ".", "uuid", "==", "entry", ".", "uuid", ")", ".", "one_or_none", "(", ")", ")", "if", "entry_to_delete", ":", "logger", ".", "info", "(", "'Undo sign in: {}'", ".", "format", "(", "entry_to_delete", ".", "user_id", ")", ")", "logger", ".", "debug", "(", "'Undo sign in: {}'", ".", "format", "(", "entry_to_delete", ")", ")", "session", ".", "delete", "(", "entry_to_delete", ")", "session", ".", "commit", "(", ")", "else", ":", "error_message", "=", "'Entry not found: {}'", ".", "format", "(", "entry", ")", "logger", ".", "error", "(", "error_message", ")", "raise", "ValueError", "(", "error_message", ")" ]
Delete a signed in entry. :param entry: `models.Entry` object. The entry to delete. :param session: (optional) SQLAlchemy session through which to access the database.
[ "Delete", "a", "signed", "in", "entry", "." ]
ee140c61b4dfada966f078de8304bac737cec6f7
https://github.com/mesbahamin/chronophore/blob/ee140c61b4dfada966f078de8304bac737cec6f7/chronophore/controller.py#L202-L228
train
mesbahamin/chronophore
chronophore/controller.py
undo_sign_out
def undo_sign_out(entry, session=None): """Sign in a signed out entry. :param entry: `models.Entry` object. The entry to sign back in. :param session: (optional) SQLAlchemy session through which to access the database. """ # noqa if session is None: session = Session() else: session = session entry_to_sign_in = ( session .query(Entry) .filter(Entry.uuid == entry.uuid) .one_or_none() ) if entry_to_sign_in: logger.info('Undo sign out: {}'.format(entry_to_sign_in.user_id)) logger.debug('Undo sign out: {}'.format(entry_to_sign_in)) entry_to_sign_in.time_out = None session.add(entry_to_sign_in) session.commit() else: error_message = 'Entry not found: {}'.format(entry) logger.error(error_message) raise ValueError(error_message)
python
def undo_sign_out(entry, session=None): """Sign in a signed out entry. :param entry: `models.Entry` object. The entry to sign back in. :param session: (optional) SQLAlchemy session through which to access the database. """ # noqa if session is None: session = Session() else: session = session entry_to_sign_in = ( session .query(Entry) .filter(Entry.uuid == entry.uuid) .one_or_none() ) if entry_to_sign_in: logger.info('Undo sign out: {}'.format(entry_to_sign_in.user_id)) logger.debug('Undo sign out: {}'.format(entry_to_sign_in)) entry_to_sign_in.time_out = None session.add(entry_to_sign_in) session.commit() else: error_message = 'Entry not found: {}'.format(entry) logger.error(error_message) raise ValueError(error_message)
[ "def", "undo_sign_out", "(", "entry", ",", "session", "=", "None", ")", ":", "# noqa", "if", "session", "is", "None", ":", "session", "=", "Session", "(", ")", "else", ":", "session", "=", "session", "entry_to_sign_in", "=", "(", "session", ".", "query", "(", "Entry", ")", ".", "filter", "(", "Entry", ".", "uuid", "==", "entry", ".", "uuid", ")", ".", "one_or_none", "(", ")", ")", "if", "entry_to_sign_in", ":", "logger", ".", "info", "(", "'Undo sign out: {}'", ".", "format", "(", "entry_to_sign_in", ".", "user_id", ")", ")", "logger", ".", "debug", "(", "'Undo sign out: {}'", ".", "format", "(", "entry_to_sign_in", ")", ")", "entry_to_sign_in", ".", "time_out", "=", "None", "session", ".", "add", "(", "entry_to_sign_in", ")", "session", ".", "commit", "(", ")", "else", ":", "error_message", "=", "'Entry not found: {}'", ".", "format", "(", "entry", ")", "logger", ".", "error", "(", "error_message", ")", "raise", "ValueError", "(", "error_message", ")" ]
Sign in a signed out entry. :param entry: `models.Entry` object. The entry to sign back in. :param session: (optional) SQLAlchemy session through which to access the database.
[ "Sign", "in", "a", "signed", "out", "entry", "." ]
ee140c61b4dfada966f078de8304bac737cec6f7
https://github.com/mesbahamin/chronophore/blob/ee140c61b4dfada966f078de8304bac737cec6f7/chronophore/controller.py#L231-L258
train
mesbahamin/chronophore
chronophore/controller.py
sign
def sign(user_id, user_type=None, today=None, session=None): """Check user id for validity, then sign user in if they are signed out, or out if they are signed in. :param user_id: The ID of the user to sign in or out. :param user_type: (optional) Specify whether user is signing in as a `'student'` or `'tutor'`. :param today: (optional) The current date as a `datetime.date` object. Used for testing. :param session: (optional) SQLAlchemy session through which to access the database. :return: `Status` named tuple object. Information about the sign attempt. """ # noqa if session is None: session = Session() else: session = session if today is None: today = date.today() else: today = today user = ( session .query(User) .filter(User.user_id == user_id) .one_or_none() ) if user: signed_in_entries = ( user .entries .filter(Entry.date == today) .filter(Entry.time_out.is_(None)) .all() ) if not signed_in_entries: new_entry = sign_in(user, user_type=user_type) session.add(new_entry) status = Status( valid=True, in_or_out='in', user_name=get_user_name(user), user_type=new_entry.user_type, entry=new_entry ) else: for entry in signed_in_entries: signed_out_entry = sign_out(entry) session.add(signed_out_entry) status = Status( valid=True, in_or_out='out', user_name=get_user_name(user), user_type=signed_out_entry.user_type, entry=signed_out_entry ) session.commit() else: raise UnregisteredUser( '{} not registered. Please register at the front desk.'.format( user_id ) ) logger.debug(status) return status
python
def sign(user_id, user_type=None, today=None, session=None): """Check user id for validity, then sign user in if they are signed out, or out if they are signed in. :param user_id: The ID of the user to sign in or out. :param user_type: (optional) Specify whether user is signing in as a `'student'` or `'tutor'`. :param today: (optional) The current date as a `datetime.date` object. Used for testing. :param session: (optional) SQLAlchemy session through which to access the database. :return: `Status` named tuple object. Information about the sign attempt. """ # noqa if session is None: session = Session() else: session = session if today is None: today = date.today() else: today = today user = ( session .query(User) .filter(User.user_id == user_id) .one_or_none() ) if user: signed_in_entries = ( user .entries .filter(Entry.date == today) .filter(Entry.time_out.is_(None)) .all() ) if not signed_in_entries: new_entry = sign_in(user, user_type=user_type) session.add(new_entry) status = Status( valid=True, in_or_out='in', user_name=get_user_name(user), user_type=new_entry.user_type, entry=new_entry ) else: for entry in signed_in_entries: signed_out_entry = sign_out(entry) session.add(signed_out_entry) status = Status( valid=True, in_or_out='out', user_name=get_user_name(user), user_type=signed_out_entry.user_type, entry=signed_out_entry ) session.commit() else: raise UnregisteredUser( '{} not registered. Please register at the front desk.'.format( user_id ) ) logger.debug(status) return status
[ "def", "sign", "(", "user_id", ",", "user_type", "=", "None", ",", "today", "=", "None", ",", "session", "=", "None", ")", ":", "# noqa", "if", "session", "is", "None", ":", "session", "=", "Session", "(", ")", "else", ":", "session", "=", "session", "if", "today", "is", "None", ":", "today", "=", "date", ".", "today", "(", ")", "else", ":", "today", "=", "today", "user", "=", "(", "session", ".", "query", "(", "User", ")", ".", "filter", "(", "User", ".", "user_id", "==", "user_id", ")", ".", "one_or_none", "(", ")", ")", "if", "user", ":", "signed_in_entries", "=", "(", "user", ".", "entries", ".", "filter", "(", "Entry", ".", "date", "==", "today", ")", ".", "filter", "(", "Entry", ".", "time_out", ".", "is_", "(", "None", ")", ")", ".", "all", "(", ")", ")", "if", "not", "signed_in_entries", ":", "new_entry", "=", "sign_in", "(", "user", ",", "user_type", "=", "user_type", ")", "session", ".", "add", "(", "new_entry", ")", "status", "=", "Status", "(", "valid", "=", "True", ",", "in_or_out", "=", "'in'", ",", "user_name", "=", "get_user_name", "(", "user", ")", ",", "user_type", "=", "new_entry", ".", "user_type", ",", "entry", "=", "new_entry", ")", "else", ":", "for", "entry", "in", "signed_in_entries", ":", "signed_out_entry", "=", "sign_out", "(", "entry", ")", "session", ".", "add", "(", "signed_out_entry", ")", "status", "=", "Status", "(", "valid", "=", "True", ",", "in_or_out", "=", "'out'", ",", "user_name", "=", "get_user_name", "(", "user", ")", ",", "user_type", "=", "signed_out_entry", ".", "user_type", ",", "entry", "=", "signed_out_entry", ")", "session", ".", "commit", "(", ")", "else", ":", "raise", "UnregisteredUser", "(", "'{} not registered. Please register at the front desk.'", ".", "format", "(", "user_id", ")", ")", "logger", ".", "debug", "(", "status", ")", "return", "status" ]
Check user id for validity, then sign user in if they are signed out, or out if they are signed in. :param user_id: The ID of the user to sign in or out. :param user_type: (optional) Specify whether user is signing in as a `'student'` or `'tutor'`. :param today: (optional) The current date as a `datetime.date` object. Used for testing. :param session: (optional) SQLAlchemy session through which to access the database. :return: `Status` named tuple object. Information about the sign attempt.
[ "Check", "user", "id", "for", "validity", "then", "sign", "user", "in", "if", "they", "are", "signed", "out", "or", "out", "if", "they", "are", "signed", "in", "." ]
ee140c61b4dfada966f078de8304bac737cec6f7
https://github.com/mesbahamin/chronophore/blob/ee140c61b4dfada966f078de8304bac737cec6f7/chronophore/controller.py#L261-L330
train
reorx/torext
torext/gevent_wsgi.py
FormattedWSGIHandler.format_request
def format_request(self): """Override for better log format Tornado format: [INFO 2015-03-24 11:29:57 app:521] 200 GET /static/css/lib/pure-min.css (127.0.0.1) 6.76ms Current format: [gevent.wsgi] INFO 127.0.0.1 - - [2015-03-24 11:18:45] "GET /test HTTP/1.1" 200 304 0.000793 """ fmt = '{now} {status} {requestline} ({client_address}) {response_length} {delta}ms' requestline = getattr(self, 'requestline') if requestline: # Original "GET / HTTP/1.1", remove the "HTTP/1.1" requestline = ' '.join(requestline.split(' ')[:-1]) else: requestline = '???' if self.time_finish: delta = '%.2f' % ((self.time_finish - self.time_start) * 1000) else: delta = '-' data = dict( now=datetime.datetime.now().replace(microsecond=0), response_length=self.response_length or '-', client_address=self.client_address[0] if isinstance(self.client_address, tuple) else self.client_address, status=str(self._get_status_int()), requestline=requestline, delta=delta, ) return fmt.format(**data)
python
def format_request(self): """Override for better log format Tornado format: [INFO 2015-03-24 11:29:57 app:521] 200 GET /static/css/lib/pure-min.css (127.0.0.1) 6.76ms Current format: [gevent.wsgi] INFO 127.0.0.1 - - [2015-03-24 11:18:45] "GET /test HTTP/1.1" 200 304 0.000793 """ fmt = '{now} {status} {requestline} ({client_address}) {response_length} {delta}ms' requestline = getattr(self, 'requestline') if requestline: # Original "GET / HTTP/1.1", remove the "HTTP/1.1" requestline = ' '.join(requestline.split(' ')[:-1]) else: requestline = '???' if self.time_finish: delta = '%.2f' % ((self.time_finish - self.time_start) * 1000) else: delta = '-' data = dict( now=datetime.datetime.now().replace(microsecond=0), response_length=self.response_length or '-', client_address=self.client_address[0] if isinstance(self.client_address, tuple) else self.client_address, status=str(self._get_status_int()), requestline=requestline, delta=delta, ) return fmt.format(**data)
[ "def", "format_request", "(", "self", ")", ":", "fmt", "=", "'{now} {status} {requestline} ({client_address}) {response_length} {delta}ms'", "requestline", "=", "getattr", "(", "self", ",", "'requestline'", ")", "if", "requestline", ":", "# Original \"GET / HTTP/1.1\", remove the \"HTTP/1.1\"", "requestline", "=", "' '", ".", "join", "(", "requestline", ".", "split", "(", "' '", ")", "[", ":", "-", "1", "]", ")", "else", ":", "requestline", "=", "'???'", "if", "self", ".", "time_finish", ":", "delta", "=", "'%.2f'", "%", "(", "(", "self", ".", "time_finish", "-", "self", ".", "time_start", ")", "*", "1000", ")", "else", ":", "delta", "=", "'-'", "data", "=", "dict", "(", "now", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "replace", "(", "microsecond", "=", "0", ")", ",", "response_length", "=", "self", ".", "response_length", "or", "'-'", ",", "client_address", "=", "self", ".", "client_address", "[", "0", "]", "if", "isinstance", "(", "self", ".", "client_address", ",", "tuple", ")", "else", "self", ".", "client_address", ",", "status", "=", "str", "(", "self", ".", "_get_status_int", "(", ")", ")", ",", "requestline", "=", "requestline", ",", "delta", "=", "delta", ",", ")", "return", "fmt", ".", "format", "(", "*", "*", "data", ")" ]
Override for better log format Tornado format: [INFO 2015-03-24 11:29:57 app:521] 200 GET /static/css/lib/pure-min.css (127.0.0.1) 6.76ms Current format: [gevent.wsgi] INFO 127.0.0.1 - - [2015-03-24 11:18:45] "GET /test HTTP/1.1" 200 304 0.000793
[ "Override", "for", "better", "log", "format" ]
84c4300ebc7fab0dbd11cf8b020bc7d4d1570171
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/gevent_wsgi.py#L21-L52
train
reorx/torext
torext/gevent_wsgi.py
FormattedWSGIHandler.handle_error
def handle_error(self, type_, value, tb): """This method copies the code from pywsgi.WSGIHandler.handle_error, change the write part to be a reflection of traceback and environ """ if not issubclass(type_, pywsgi.GreenletExit): self.server.loop.handle_error(self.environ, type_, value, tb) if self.response_length: self.close_connection = True else: tb_stream = traceback.format_exception(type_, value, tb) del tb tb_stream.append('\n') tb_stream.append(pprint.pformat(self.environ)) body = ''.join(tb_stream) headers = pywsgi._INTERNAL_ERROR_HEADERS[:] headers[2] = ('Content-Length', str(len(body))) self.start_response(pywsgi._INTERNAL_ERROR_STATUS, headers) self.write(body)
python
def handle_error(self, type_, value, tb): """This method copies the code from pywsgi.WSGIHandler.handle_error, change the write part to be a reflection of traceback and environ """ if not issubclass(type_, pywsgi.GreenletExit): self.server.loop.handle_error(self.environ, type_, value, tb) if self.response_length: self.close_connection = True else: tb_stream = traceback.format_exception(type_, value, tb) del tb tb_stream.append('\n') tb_stream.append(pprint.pformat(self.environ)) body = ''.join(tb_stream) headers = pywsgi._INTERNAL_ERROR_HEADERS[:] headers[2] = ('Content-Length', str(len(body))) self.start_response(pywsgi._INTERNAL_ERROR_STATUS, headers) self.write(body)
[ "def", "handle_error", "(", "self", ",", "type_", ",", "value", ",", "tb", ")", ":", "if", "not", "issubclass", "(", "type_", ",", "pywsgi", ".", "GreenletExit", ")", ":", "self", ".", "server", ".", "loop", ".", "handle_error", "(", "self", ".", "environ", ",", "type_", ",", "value", ",", "tb", ")", "if", "self", ".", "response_length", ":", "self", ".", "close_connection", "=", "True", "else", ":", "tb_stream", "=", "traceback", ".", "format_exception", "(", "type_", ",", "value", ",", "tb", ")", "del", "tb", "tb_stream", ".", "append", "(", "'\\n'", ")", "tb_stream", ".", "append", "(", "pprint", ".", "pformat", "(", "self", ".", "environ", ")", ")", "body", "=", "''", ".", "join", "(", "tb_stream", ")", "headers", "=", "pywsgi", ".", "_INTERNAL_ERROR_HEADERS", "[", ":", "]", "headers", "[", "2", "]", "=", "(", "'Content-Length'", ",", "str", "(", "len", "(", "body", ")", ")", ")", "self", ".", "start_response", "(", "pywsgi", ".", "_INTERNAL_ERROR_STATUS", ",", "headers", ")", "self", ".", "write", "(", "body", ")" ]
This method copies the code from pywsgi.WSGIHandler.handle_error, change the write part to be a reflection of traceback and environ
[ "This", "method", "copies", "the", "code", "from", "pywsgi", ".", "WSGIHandler", ".", "handle_error", "change", "the", "write", "part", "to", "be", "a", "reflection", "of", "traceback", "and", "environ" ]
84c4300ebc7fab0dbd11cf8b020bc7d4d1570171
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/gevent_wsgi.py#L82-L100
train
geophysics-ubonn/crtomo_tools
lib/crtomo/configManager.py
ConfigManager.clear_measurements
def clear_measurements(self): """Remove all measurements from self.measurements. Reset the measurement counter. All ID are invalidated. """ keys = list(self.measurements.keys()) for key in keys: del(self.measurements[key]) self.meas_counter = -1
python
def clear_measurements(self): """Remove all measurements from self.measurements. Reset the measurement counter. All ID are invalidated. """ keys = list(self.measurements.keys()) for key in keys: del(self.measurements[key]) self.meas_counter = -1
[ "def", "clear_measurements", "(", "self", ")", ":", "keys", "=", "list", "(", "self", ".", "measurements", ".", "keys", "(", ")", ")", "for", "key", "in", "keys", ":", "del", "(", "self", ".", "measurements", "[", "key", "]", ")", "self", ".", "meas_counter", "=", "-", "1" ]
Remove all measurements from self.measurements. Reset the measurement counter. All ID are invalidated.
[ "Remove", "all", "measurements", "from", "self", ".", "measurements", ".", "Reset", "the", "measurement", "counter", ".", "All", "ID", "are", "invalidated", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/configManager.py#L56-L63
train
geophysics-ubonn/crtomo_tools
lib/crtomo/configManager.py
ConfigManager.add_measurements
def add_measurements(self, measurements): """Add new measurements to this instance Parameters ---------- measurements: numpy.ndarray one or more measurement sets. It must either be 1D or 2D, with the first dimension the number of measurement sets (K), and the second the number of measurements (N): K x N Returns ------- mid: int measurement ID used to extract the measurements later on Examples -------- >>> import numpy as np import crtomo.configManager as CRconfig config = CRconfig.ConfigManager(nr_of_electrodes=10) config.gen_dipole_dipole(skipc=0) # generate some random noise random_measurements = np.random.random(config.nr_of_configs) mid = config.add_measurements(random_measurements) # retrieve using mid print(config.measurements[mid]) """ subdata = np.atleast_2d(measurements) if self.configs is None: raise Exception( 'must read in configuration before measurements can be stored' ) # we try to accommodate transposed input if subdata.shape[1] != self.configs.shape[0]: if subdata.shape[0] == self.configs.shape[0]: subdata = subdata.T else: raise Exception( 'Number of measurements does not match number of configs' ) return_ids = [] for dataset in subdata: cid = self._get_next_index() self.measurements[cid] = dataset.copy() return_ids.append(cid) if len(return_ids) == 1: return return_ids[0] else: return return_ids
python
def add_measurements(self, measurements): """Add new measurements to this instance Parameters ---------- measurements: numpy.ndarray one or more measurement sets. It must either be 1D or 2D, with the first dimension the number of measurement sets (K), and the second the number of measurements (N): K x N Returns ------- mid: int measurement ID used to extract the measurements later on Examples -------- >>> import numpy as np import crtomo.configManager as CRconfig config = CRconfig.ConfigManager(nr_of_electrodes=10) config.gen_dipole_dipole(skipc=0) # generate some random noise random_measurements = np.random.random(config.nr_of_configs) mid = config.add_measurements(random_measurements) # retrieve using mid print(config.measurements[mid]) """ subdata = np.atleast_2d(measurements) if self.configs is None: raise Exception( 'must read in configuration before measurements can be stored' ) # we try to accommodate transposed input if subdata.shape[1] != self.configs.shape[0]: if subdata.shape[0] == self.configs.shape[0]: subdata = subdata.T else: raise Exception( 'Number of measurements does not match number of configs' ) return_ids = [] for dataset in subdata: cid = self._get_next_index() self.measurements[cid] = dataset.copy() return_ids.append(cid) if len(return_ids) == 1: return return_ids[0] else: return return_ids
[ "def", "add_measurements", "(", "self", ",", "measurements", ")", ":", "subdata", "=", "np", ".", "atleast_2d", "(", "measurements", ")", "if", "self", ".", "configs", "is", "None", ":", "raise", "Exception", "(", "'must read in configuration before measurements can be stored'", ")", "# we try to accommodate transposed input", "if", "subdata", ".", "shape", "[", "1", "]", "!=", "self", ".", "configs", ".", "shape", "[", "0", "]", ":", "if", "subdata", ".", "shape", "[", "0", "]", "==", "self", ".", "configs", ".", "shape", "[", "0", "]", ":", "subdata", "=", "subdata", ".", "T", "else", ":", "raise", "Exception", "(", "'Number of measurements does not match number of configs'", ")", "return_ids", "=", "[", "]", "for", "dataset", "in", "subdata", ":", "cid", "=", "self", ".", "_get_next_index", "(", ")", "self", ".", "measurements", "[", "cid", "]", "=", "dataset", ".", "copy", "(", ")", "return_ids", ".", "append", "(", "cid", ")", "if", "len", "(", "return_ids", ")", "==", "1", ":", "return", "return_ids", "[", "0", "]", "else", ":", "return", "return_ids" ]
Add new measurements to this instance Parameters ---------- measurements: numpy.ndarray one or more measurement sets. It must either be 1D or 2D, with the first dimension the number of measurement sets (K), and the second the number of measurements (N): K x N Returns ------- mid: int measurement ID used to extract the measurements later on Examples -------- >>> import numpy as np import crtomo.configManager as CRconfig config = CRconfig.ConfigManager(nr_of_electrodes=10) config.gen_dipole_dipole(skipc=0) # generate some random noise random_measurements = np.random.random(config.nr_of_configs) mid = config.add_measurements(random_measurements) # retrieve using mid print(config.measurements[mid])
[ "Add", "new", "measurements", "to", "this", "instance" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/configManager.py#L120-L173
train
geophysics-ubonn/crtomo_tools
lib/crtomo/configManager.py
ConfigManager.gen_all_voltages_for_injections
def gen_all_voltages_for_injections(self, injections_raw): """For a given set of current injections AB, generate all possible unique potential measurements. After Noel and Xu, 1991, for N electrodes, the number of possible voltage dipoles for a given current dipole is :math:`(N - 2)(N - 3) / 2`. This includes normal and reciprocal measurements. If current dipoles are generated with ConfigManager.gen_all_current_dipoles(), then :math:`N \cdot (N - 1) / 2` current dipoles are generated. Thus, this function will produce :math:`(N - 1)(N - 2)(N - 3) / 4` four-point configurations ABMN, half of which are reciprocals (Noel and Xu, 1991). All generated measurements are added to the instance. Use ConfigManager.split_into_normal_and_reciprocal() to split the configurations into normal and reciprocal measurements. Parameters ---------- injections: numpy.ndarray Kx2 array holding K current injection dipoles A-B Returns ------- configs: numpy.ndarray Nax4 array holding all possible measurement configurations """ injections = injections_raw.astype(int) N = self.nr_electrodes all_quadpoles = [] for idipole in injections: # sort current electrodes and convert to array indices Icurrent = np.sort(idipole) - 1 # voltage electrodes velecs = list(range(1, N + 1)) # remove current electrodes del(velecs[Icurrent[1]]) del(velecs[Icurrent[0]]) # permutate remaining voltages = itertools.permutations(velecs, 2) for voltage in voltages: all_quadpoles.append( (idipole[0], idipole[1], voltage[0], voltage[1]) ) configs_unsorted = np.array(all_quadpoles) # sort AB and MN configs_sorted = np.hstack(( np.sort(configs_unsorted[:, 0:2], axis=1), np.sort(configs_unsorted[:, 2:4], axis=1), )) configs = self.remove_duplicates(configs_sorted) self.add_to_configs(configs) self.remove_duplicates() return configs
python
def gen_all_voltages_for_injections(self, injections_raw): """For a given set of current injections AB, generate all possible unique potential measurements. After Noel and Xu, 1991, for N electrodes, the number of possible voltage dipoles for a given current dipole is :math:`(N - 2)(N - 3) / 2`. This includes normal and reciprocal measurements. If current dipoles are generated with ConfigManager.gen_all_current_dipoles(), then :math:`N \cdot (N - 1) / 2` current dipoles are generated. Thus, this function will produce :math:`(N - 1)(N - 2)(N - 3) / 4` four-point configurations ABMN, half of which are reciprocals (Noel and Xu, 1991). All generated measurements are added to the instance. Use ConfigManager.split_into_normal_and_reciprocal() to split the configurations into normal and reciprocal measurements. Parameters ---------- injections: numpy.ndarray Kx2 array holding K current injection dipoles A-B Returns ------- configs: numpy.ndarray Nax4 array holding all possible measurement configurations """ injections = injections_raw.astype(int) N = self.nr_electrodes all_quadpoles = [] for idipole in injections: # sort current electrodes and convert to array indices Icurrent = np.sort(idipole) - 1 # voltage electrodes velecs = list(range(1, N + 1)) # remove current electrodes del(velecs[Icurrent[1]]) del(velecs[Icurrent[0]]) # permutate remaining voltages = itertools.permutations(velecs, 2) for voltage in voltages: all_quadpoles.append( (idipole[0], idipole[1], voltage[0], voltage[1]) ) configs_unsorted = np.array(all_quadpoles) # sort AB and MN configs_sorted = np.hstack(( np.sort(configs_unsorted[:, 0:2], axis=1), np.sort(configs_unsorted[:, 2:4], axis=1), )) configs = self.remove_duplicates(configs_sorted) self.add_to_configs(configs) self.remove_duplicates() return configs
[ "def", "gen_all_voltages_for_injections", "(", "self", ",", "injections_raw", ")", ":", "injections", "=", "injections_raw", ".", "astype", "(", "int", ")", "N", "=", "self", ".", "nr_electrodes", "all_quadpoles", "=", "[", "]", "for", "idipole", "in", "injections", ":", "# sort current electrodes and convert to array indices", "Icurrent", "=", "np", ".", "sort", "(", "idipole", ")", "-", "1", "# voltage electrodes", "velecs", "=", "list", "(", "range", "(", "1", ",", "N", "+", "1", ")", ")", "# remove current electrodes", "del", "(", "velecs", "[", "Icurrent", "[", "1", "]", "]", ")", "del", "(", "velecs", "[", "Icurrent", "[", "0", "]", "]", ")", "# permutate remaining", "voltages", "=", "itertools", ".", "permutations", "(", "velecs", ",", "2", ")", "for", "voltage", "in", "voltages", ":", "all_quadpoles", ".", "append", "(", "(", "idipole", "[", "0", "]", ",", "idipole", "[", "1", "]", ",", "voltage", "[", "0", "]", ",", "voltage", "[", "1", "]", ")", ")", "configs_unsorted", "=", "np", ".", "array", "(", "all_quadpoles", ")", "# sort AB and MN", "configs_sorted", "=", "np", ".", "hstack", "(", "(", "np", ".", "sort", "(", "configs_unsorted", "[", ":", ",", "0", ":", "2", "]", ",", "axis", "=", "1", ")", ",", "np", ".", "sort", "(", "configs_unsorted", "[", ":", ",", "2", ":", "4", "]", ",", "axis", "=", "1", ")", ",", ")", ")", "configs", "=", "self", ".", "remove_duplicates", "(", "configs_sorted", ")", "self", ".", "add_to_configs", "(", "configs", ")", "self", ".", "remove_duplicates", "(", ")", "return", "configs" ]
For a given set of current injections AB, generate all possible unique potential measurements. After Noel and Xu, 1991, for N electrodes, the number of possible voltage dipoles for a given current dipole is :math:`(N - 2)(N - 3) / 2`. This includes normal and reciprocal measurements. If current dipoles are generated with ConfigManager.gen_all_current_dipoles(), then :math:`N \cdot (N - 1) / 2` current dipoles are generated. Thus, this function will produce :math:`(N - 1)(N - 2)(N - 3) / 4` four-point configurations ABMN, half of which are reciprocals (Noel and Xu, 1991). All generated measurements are added to the instance. Use ConfigManager.split_into_normal_and_reciprocal() to split the configurations into normal and reciprocal measurements. Parameters ---------- injections: numpy.ndarray Kx2 array holding K current injection dipoles A-B Returns ------- configs: numpy.ndarray Nax4 array holding all possible measurement configurations
[ "For", "a", "given", "set", "of", "current", "injections", "AB", "generate", "all", "possible", "unique", "potential", "measurements", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/configManager.py#L893-L954
train
geophysics-ubonn/crtomo_tools
lib/crtomo/configManager.py
ConfigManager.gen_wenner
def gen_wenner(self, a): """Generate Wenner measurement configurations. Parameters ---------- a: int distance (in electrodes) between subsequent electrodes of each four-point configuration. Returns ------- configs: Kx4 numpy.ndarray array holding the configurations """ configs = [] for i in range(1, self.nr_electrodes - 3 * a + 1): configs.append( (i, i + a, i + 2 * a, i + 3 * a), ) configs = np.array(configs) self.add_to_configs(configs) return configs
python
def gen_wenner(self, a): """Generate Wenner measurement configurations. Parameters ---------- a: int distance (in electrodes) between subsequent electrodes of each four-point configuration. Returns ------- configs: Kx4 numpy.ndarray array holding the configurations """ configs = [] for i in range(1, self.nr_electrodes - 3 * a + 1): configs.append( (i, i + a, i + 2 * a, i + 3 * a), ) configs = np.array(configs) self.add_to_configs(configs) return configs
[ "def", "gen_wenner", "(", "self", ",", "a", ")", ":", "configs", "=", "[", "]", "for", "i", "in", "range", "(", "1", ",", "self", ".", "nr_electrodes", "-", "3", "*", "a", "+", "1", ")", ":", "configs", ".", "append", "(", "(", "i", ",", "i", "+", "a", ",", "i", "+", "2", "*", "a", ",", "i", "+", "3", "*", "a", ")", ",", ")", "configs", "=", "np", ".", "array", "(", "configs", ")", "self", ".", "add_to_configs", "(", "configs", ")", "return", "configs" ]
Generate Wenner measurement configurations. Parameters ---------- a: int distance (in electrodes) between subsequent electrodes of each four-point configuration. Returns ------- configs: Kx4 numpy.ndarray array holding the configurations
[ "Generate", "Wenner", "measurement", "configurations", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/configManager.py#L1108-L1129
train
geophysics-ubonn/crtomo_tools
lib/crtomo/configManager.py
ConfigManager.gen_reciprocals
def gen_reciprocals(self, quadrupoles): """For a given set of quadrupoles, generate and return reciprocals """ reciprocals = quadrupoles[:, ::-1].copy() reciprocals[:, 0:2] = np.sort(reciprocals[:, 0:2], axis=1) reciprocals[:, 2:4] = np.sort(reciprocals[:, 2:4], axis=1) return reciprocals
python
def gen_reciprocals(self, quadrupoles): """For a given set of quadrupoles, generate and return reciprocals """ reciprocals = quadrupoles[:, ::-1].copy() reciprocals[:, 0:2] = np.sort(reciprocals[:, 0:2], axis=1) reciprocals[:, 2:4] = np.sort(reciprocals[:, 2:4], axis=1) return reciprocals
[ "def", "gen_reciprocals", "(", "self", ",", "quadrupoles", ")", ":", "reciprocals", "=", "quadrupoles", "[", ":", ",", ":", ":", "-", "1", "]", ".", "copy", "(", ")", "reciprocals", "[", ":", ",", "0", ":", "2", "]", "=", "np", ".", "sort", "(", "reciprocals", "[", ":", ",", "0", ":", "2", "]", ",", "axis", "=", "1", ")", "reciprocals", "[", ":", ",", "2", ":", "4", "]", "=", "np", ".", "sort", "(", "reciprocals", "[", ":", ",", "2", ":", "4", "]", ",", "axis", "=", "1", ")", "return", "reciprocals" ]
For a given set of quadrupoles, generate and return reciprocals
[ "For", "a", "given", "set", "of", "quadrupoles", "generate", "and", "return", "reciprocals" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/configManager.py#L1253-L1259
train
geophysics-ubonn/crtomo_tools
lib/crtomo/configManager.py
ConfigManager.compute_K_factors
def compute_K_factors(self, spacing=None, configs=None, numerical=False, elem_file=None, elec_file=None): """Compute analytical geometrical factors. TODO: use real electrode positions from self.grid """ if configs is None: use_configs = self.configs else: use_configs = configs if numerical: settings = { 'elem': elem_file, 'elec': elec_file, 'rho': 100, } K = edfK.compute_K_numerical(use_configs, settings) else: K = edfK.compute_K_analytical(use_configs, spacing=spacing) return K
python
def compute_K_factors(self, spacing=None, configs=None, numerical=False, elem_file=None, elec_file=None): """Compute analytical geometrical factors. TODO: use real electrode positions from self.grid """ if configs is None: use_configs = self.configs else: use_configs = configs if numerical: settings = { 'elem': elem_file, 'elec': elec_file, 'rho': 100, } K = edfK.compute_K_numerical(use_configs, settings) else: K = edfK.compute_K_analytical(use_configs, spacing=spacing) return K
[ "def", "compute_K_factors", "(", "self", ",", "spacing", "=", "None", ",", "configs", "=", "None", ",", "numerical", "=", "False", ",", "elem_file", "=", "None", ",", "elec_file", "=", "None", ")", ":", "if", "configs", "is", "None", ":", "use_configs", "=", "self", ".", "configs", "else", ":", "use_configs", "=", "configs", "if", "numerical", ":", "settings", "=", "{", "'elem'", ":", "elem_file", ",", "'elec'", ":", "elec_file", ",", "'rho'", ":", "100", ",", "}", "K", "=", "edfK", ".", "compute_K_numerical", "(", "use_configs", ",", "settings", ")", "else", ":", "K", "=", "edfK", ".", "compute_K_analytical", "(", "use_configs", ",", "spacing", "=", "spacing", ")", "return", "K" ]
Compute analytical geometrical factors. TODO: use real electrode positions from self.grid
[ "Compute", "analytical", "geometrical", "factors", "." ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/lib/crtomo/configManager.py#L1299-L1319
train
albert12132/templar
templar/api/rules/core.py
Rule.applies
def applies(self, src, dst): """Checks if this rule applies to the given src and dst paths, based on the src pattern and dst pattern given in the constructor. If src pattern was None, this rule will apply to any given src path (same for dst). """ if self._src_pattern and (src is None or re.search(self._src_pattern, src) is None): return False elif self._dst_pattern and (dst is None or re.search(self._dst_pattern, dst) is None): return False return True
python
def applies(self, src, dst): """Checks if this rule applies to the given src and dst paths, based on the src pattern and dst pattern given in the constructor. If src pattern was None, this rule will apply to any given src path (same for dst). """ if self._src_pattern and (src is None or re.search(self._src_pattern, src) is None): return False elif self._dst_pattern and (dst is None or re.search(self._dst_pattern, dst) is None): return False return True
[ "def", "applies", "(", "self", ",", "src", ",", "dst", ")", ":", "if", "self", ".", "_src_pattern", "and", "(", "src", "is", "None", "or", "re", ".", "search", "(", "self", ".", "_src_pattern", ",", "src", ")", "is", "None", ")", ":", "return", "False", "elif", "self", ".", "_dst_pattern", "and", "(", "dst", "is", "None", "or", "re", ".", "search", "(", "self", ".", "_dst_pattern", ",", "dst", ")", "is", "None", ")", ":", "return", "False", "return", "True" ]
Checks if this rule applies to the given src and dst paths, based on the src pattern and dst pattern given in the constructor. If src pattern was None, this rule will apply to any given src path (same for dst).
[ "Checks", "if", "this", "rule", "applies", "to", "the", "given", "src", "and", "dst", "paths", "based", "on", "the", "src", "pattern", "and", "dst", "pattern", "given", "in", "the", "constructor", "." ]
39851c89730ab69e5c73d0a46adca2a44ecc4165
https://github.com/albert12132/templar/blob/39851c89730ab69e5c73d0a46adca2a44ecc4165/templar/api/rules/core.py#L33-L43
train
NJDFan/ctypes-bitfield
bitfield/walk.py
_createunbound
def _createunbound(kls, **info): """Create a new UnboundNode representing a given class.""" if issubclass(kls, Bitfield): nodetype = UnboundBitfieldNode elif hasattr(kls, '_fields_'): nodetype = UnboundStructureNode elif issubclass(kls, ctypes.Array): nodetype = UnboundArrayNode else: nodetype = UnboundSimpleNode return nodetype(type=kls, **info)
python
def _createunbound(kls, **info): """Create a new UnboundNode representing a given class.""" if issubclass(kls, Bitfield): nodetype = UnboundBitfieldNode elif hasattr(kls, '_fields_'): nodetype = UnboundStructureNode elif issubclass(kls, ctypes.Array): nodetype = UnboundArrayNode else: nodetype = UnboundSimpleNode return nodetype(type=kls, **info)
[ "def", "_createunbound", "(", "kls", ",", "*", "*", "info", ")", ":", "if", "issubclass", "(", "kls", ",", "Bitfield", ")", ":", "nodetype", "=", "UnboundBitfieldNode", "elif", "hasattr", "(", "kls", ",", "'_fields_'", ")", ":", "nodetype", "=", "UnboundStructureNode", "elif", "issubclass", "(", "kls", ",", "ctypes", ".", "Array", ")", ":", "nodetype", "=", "UnboundArrayNode", "else", ":", "nodetype", "=", "UnboundSimpleNode", "return", "nodetype", "(", "type", "=", "kls", ",", "*", "*", "info", ")" ]
Create a new UnboundNode representing a given class.
[ "Create", "a", "new", "UnboundNode", "representing", "a", "given", "class", "." ]
ae76b1dcfef7ecc90bd1900735b94ddee41a6376
https://github.com/NJDFan/ctypes-bitfield/blob/ae76b1dcfef7ecc90bd1900735b94ddee41a6376/bitfield/walk.py#L240-L251
train
NJDFan/ctypes-bitfield
bitfield/walk.py
_createbound
def _createbound(obj): """Create a new BoundNode representing a given object.""" # Start by allowing objects to define custom unbound reference hooks try: kls = obj._unboundreference_() except AttributeError: kls = type(obj) unbound = _createunbound(kls) def valueget(): return obj for t in (BoundBitfieldNode, BoundStructureNode, BoundArrayNode): if isinstance(unbound, t._unboundtype): kls = t break else: kls = BoundSimpleNode child = kls(unbound, valueget) return child
python
def _createbound(obj): """Create a new BoundNode representing a given object.""" # Start by allowing objects to define custom unbound reference hooks try: kls = obj._unboundreference_() except AttributeError: kls = type(obj) unbound = _createunbound(kls) def valueget(): return obj for t in (BoundBitfieldNode, BoundStructureNode, BoundArrayNode): if isinstance(unbound, t._unboundtype): kls = t break else: kls = BoundSimpleNode child = kls(unbound, valueget) return child
[ "def", "_createbound", "(", "obj", ")", ":", "# Start by allowing objects to define custom unbound reference hooks", "try", ":", "kls", "=", "obj", ".", "_unboundreference_", "(", ")", "except", "AttributeError", ":", "kls", "=", "type", "(", "obj", ")", "unbound", "=", "_createunbound", "(", "kls", ")", "def", "valueget", "(", ")", ":", "return", "obj", "for", "t", "in", "(", "BoundBitfieldNode", ",", "BoundStructureNode", ",", "BoundArrayNode", ")", ":", "if", "isinstance", "(", "unbound", ",", "t", ".", "_unboundtype", ")", ":", "kls", "=", "t", "break", "else", ":", "kls", "=", "BoundSimpleNode", "child", "=", "kls", "(", "unbound", ",", "valueget", ")", "return", "child" ]
Create a new BoundNode representing a given object.
[ "Create", "a", "new", "BoundNode", "representing", "a", "given", "object", "." ]
ae76b1dcfef7ecc90bd1900735b94ddee41a6376
https://github.com/NJDFan/ctypes-bitfield/blob/ae76b1dcfef7ecc90bd1900735b94ddee41a6376/bitfield/walk.py#L361-L380
train
NJDFan/ctypes-bitfield
bitfield/walk.py
display
def display(obj, skiphidden=True, **printargs): """Print a view of obj, where obj is either a ctypes-derived class or an instance of such a class. Any additional keyword arguments are passed directly to the print function. This is mostly useful to introspect structures from an interactive session. """ top = findnode(obj) #------------------------------------------------------------------- # Iterate through the entire structure turning all the nodes into # tuples of strings for display. maxhex = len(hex(ctypes.sizeof(top.type))) - 2 def addrformat(addr): if isinstance(addr, int): return "0x{0:0{1}X}".format(addr, maxhex) else: intpart = int(addr) fracbits = int((addr - intpart) * 8) return "0x{0:0{1}X}'{2}".format(intpart, maxhex, fracbits) def formatval(here): if isinstance(here, BoundSimpleNode): return "{0}({1})".format(here.type.__name__, here.value) else: return str(here.value) if isinstance(top, UnboundNode): headers = ['Path', 'Addr', 'Type'] results = [ ((' ' * n.depth) + n.name, addrformat(n.baseoffset), n.type.__name__) for n in walknode(top, skiphidden) ] else: headers = ['Path', 'Addr', 'Value'] results = [ ((' ' * n.depth) + n.name, addrformat(n.baseoffset), formatval(n)) for n in walknode(top, skiphidden) ] #------------------------------------------------------------------- # Determine the maximum width of the text in each column, make the # column always that wide. widths = [ max(max(len(d[col]) for d in results), len(h)) for col, h in enumerate(headers) ] #------------------------------------------------------------------- # Print out the tabular data. def lp(args): print(*args, **printargs) lp(d.center(w) for d, w in zip(headers, widths)) lp('-' * w for w in widths) for r in results: lp(d.ljust(w) for d, w in zip(r, widths))
python
def display(obj, skiphidden=True, **printargs): """Print a view of obj, where obj is either a ctypes-derived class or an instance of such a class. Any additional keyword arguments are passed directly to the print function. This is mostly useful to introspect structures from an interactive session. """ top = findnode(obj) #------------------------------------------------------------------- # Iterate through the entire structure turning all the nodes into # tuples of strings for display. maxhex = len(hex(ctypes.sizeof(top.type))) - 2 def addrformat(addr): if isinstance(addr, int): return "0x{0:0{1}X}".format(addr, maxhex) else: intpart = int(addr) fracbits = int((addr - intpart) * 8) return "0x{0:0{1}X}'{2}".format(intpart, maxhex, fracbits) def formatval(here): if isinstance(here, BoundSimpleNode): return "{0}({1})".format(here.type.__name__, here.value) else: return str(here.value) if isinstance(top, UnboundNode): headers = ['Path', 'Addr', 'Type'] results = [ ((' ' * n.depth) + n.name, addrformat(n.baseoffset), n.type.__name__) for n in walknode(top, skiphidden) ] else: headers = ['Path', 'Addr', 'Value'] results = [ ((' ' * n.depth) + n.name, addrformat(n.baseoffset), formatval(n)) for n in walknode(top, skiphidden) ] #------------------------------------------------------------------- # Determine the maximum width of the text in each column, make the # column always that wide. widths = [ max(max(len(d[col]) for d in results), len(h)) for col, h in enumerate(headers) ] #------------------------------------------------------------------- # Print out the tabular data. def lp(args): print(*args, **printargs) lp(d.center(w) for d, w in zip(headers, widths)) lp('-' * w for w in widths) for r in results: lp(d.ljust(w) for d, w in zip(r, widths))
[ "def", "display", "(", "obj", ",", "skiphidden", "=", "True", ",", "*", "*", "printargs", ")", ":", "top", "=", "findnode", "(", "obj", ")", "#-------------------------------------------------------------------", "# Iterate through the entire structure turning all the nodes into", "# tuples of strings for display.", "maxhex", "=", "len", "(", "hex", "(", "ctypes", ".", "sizeof", "(", "top", ".", "type", ")", ")", ")", "-", "2", "def", "addrformat", "(", "addr", ")", ":", "if", "isinstance", "(", "addr", ",", "int", ")", ":", "return", "\"0x{0:0{1}X}\"", ".", "format", "(", "addr", ",", "maxhex", ")", "else", ":", "intpart", "=", "int", "(", "addr", ")", "fracbits", "=", "int", "(", "(", "addr", "-", "intpart", ")", "*", "8", ")", "return", "\"0x{0:0{1}X}'{2}\"", ".", "format", "(", "intpart", ",", "maxhex", ",", "fracbits", ")", "def", "formatval", "(", "here", ")", ":", "if", "isinstance", "(", "here", ",", "BoundSimpleNode", ")", ":", "return", "\"{0}({1})\"", ".", "format", "(", "here", ".", "type", ".", "__name__", ",", "here", ".", "value", ")", "else", ":", "return", "str", "(", "here", ".", "value", ")", "if", "isinstance", "(", "top", ",", "UnboundNode", ")", ":", "headers", "=", "[", "'Path'", ",", "'Addr'", ",", "'Type'", "]", "results", "=", "[", "(", "(", "' '", "*", "n", ".", "depth", ")", "+", "n", ".", "name", ",", "addrformat", "(", "n", ".", "baseoffset", ")", ",", "n", ".", "type", ".", "__name__", ")", "for", "n", "in", "walknode", "(", "top", ",", "skiphidden", ")", "]", "else", ":", "headers", "=", "[", "'Path'", ",", "'Addr'", ",", "'Value'", "]", "results", "=", "[", "(", "(", "' '", "*", "n", ".", "depth", ")", "+", "n", ".", "name", ",", "addrformat", "(", "n", ".", "baseoffset", ")", ",", "formatval", "(", "n", ")", ")", "for", "n", "in", "walknode", "(", "top", ",", "skiphidden", ")", "]", "#-------------------------------------------------------------------", "# Determine the maximum width of the text in each column, make the", "# column always that wide.", "widths", "=", "[", "max", "(", "max", "(", "len", "(", "d", "[", "col", "]", ")", "for", "d", "in", "results", ")", ",", "len", "(", "h", ")", ")", "for", "col", ",", "h", "in", "enumerate", "(", "headers", ")", "]", "#-------------------------------------------------------------------", "# Print out the tabular data.", "def", "lp", "(", "args", ")", ":", "print", "(", "*", "args", ",", "*", "*", "printargs", ")", "lp", "(", "d", ".", "center", "(", "w", ")", "for", "d", ",", "w", "in", "zip", "(", "headers", ",", "widths", ")", ")", "lp", "(", "'-'", "*", "w", "for", "w", "in", "widths", ")", "for", "r", "in", "results", ":", "lp", "(", "d", ".", "ljust", "(", "w", ")", "for", "d", ",", "w", "in", "zip", "(", "r", ",", "widths", ")", ")" ]
Print a view of obj, where obj is either a ctypes-derived class or an instance of such a class. Any additional keyword arguments are passed directly to the print function. This is mostly useful to introspect structures from an interactive session.
[ "Print", "a", "view", "of", "obj", "where", "obj", "is", "either", "a", "ctypes", "-", "derived", "class", "or", "an", "instance", "of", "such", "a", "class", ".", "Any", "additional", "keyword", "arguments", "are", "passed", "directly", "to", "the", "print", "function", ".", "This", "is", "mostly", "useful", "to", "introspect", "structures", "from", "an", "interactive", "session", "." ]
ae76b1dcfef7ecc90bd1900735b94ddee41a6376
https://github.com/NJDFan/ctypes-bitfield/blob/ae76b1dcfef7ecc90bd1900735b94ddee41a6376/bitfield/walk.py#L507-L567
train
NJDFan/ctypes-bitfield
bitfield/walk.py
Node.pathparts
def pathparts(self): """A list of the parts of the path, with the root node returning an empty list. """ try: parts = self.parent.pathparts() parts.append(self.name) return parts except AttributeError: return []
python
def pathparts(self): """A list of the parts of the path, with the root node returning an empty list. """ try: parts = self.parent.pathparts() parts.append(self.name) return parts except AttributeError: return []
[ "def", "pathparts", "(", "self", ")", ":", "try", ":", "parts", "=", "self", ".", "parent", ".", "pathparts", "(", ")", "parts", ".", "append", "(", "self", ".", "name", ")", "return", "parts", "except", "AttributeError", ":", "return", "[", "]" ]
A list of the parts of the path, with the root node returning an empty list.
[ "A", "list", "of", "the", "parts", "of", "the", "path", "with", "the", "root", "node", "returning", "an", "empty", "list", "." ]
ae76b1dcfef7ecc90bd1900735b94ddee41a6376
https://github.com/NJDFan/ctypes-bitfield/blob/ae76b1dcfef7ecc90bd1900735b94ddee41a6376/bitfield/walk.py#L166-L175
train
NJDFan/ctypes-bitfield
bitfield/walk.py
Node.baseoffset
def baseoffset(self): """The offset of this node from the root node.""" try: return self.parent.baseoffset + self.offset except AttributeError: return self.offset
python
def baseoffset(self): """The offset of this node from the root node.""" try: return self.parent.baseoffset + self.offset except AttributeError: return self.offset
[ "def", "baseoffset", "(", "self", ")", ":", "try", ":", "return", "self", ".", "parent", ".", "baseoffset", "+", "self", ".", "offset", "except", "AttributeError", ":", "return", "self", ".", "offset" ]
The offset of this node from the root node.
[ "The", "offset", "of", "this", "node", "from", "the", "root", "node", "." ]
ae76b1dcfef7ecc90bd1900735b94ddee41a6376
https://github.com/NJDFan/ctypes-bitfield/blob/ae76b1dcfef7ecc90bd1900735b94ddee41a6376/bitfield/walk.py#L192-L197
train
geophysics-ubonn/crtomo_tools
src/grid_translate_model.py
_almost_equal
def _almost_equal(a, b): """Check if the two numbers are almost equal """ # arbitrary small number!!! threshold = 1e-9 diff = np.abs(a - b) return (diff < threshold)
python
def _almost_equal(a, b): """Check if the two numbers are almost equal """ # arbitrary small number!!! threshold = 1e-9 diff = np.abs(a - b) return (diff < threshold)
[ "def", "_almost_equal", "(", "a", ",", "b", ")", ":", "# arbitrary small number!!!", "threshold", "=", "1e-9", "diff", "=", "np", ".", "abs", "(", "a", "-", "b", ")", "return", "(", "diff", "<", "threshold", ")" ]
Check if the two numbers are almost equal
[ "Check", "if", "the", "two", "numbers", "are", "almost", "equal" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/grid_translate_model.py#L62-L68
train
pgxcentre/geneparse
geneparse/core.py
Variant.complement_alleles
def complement_alleles(self): """Complement the alleles of this variant. This will call this module's `complement_alleles` function. Note that this will not create a new object, but modify the state of the current instance. """ self.alleles = self._encode_alleles( [complement_alleles(i) for i in self.alleles] )
python
def complement_alleles(self): """Complement the alleles of this variant. This will call this module's `complement_alleles` function. Note that this will not create a new object, but modify the state of the current instance. """ self.alleles = self._encode_alleles( [complement_alleles(i) for i in self.alleles] )
[ "def", "complement_alleles", "(", "self", ")", ":", "self", ".", "alleles", "=", "self", ".", "_encode_alleles", "(", "[", "complement_alleles", "(", "i", ")", "for", "i", "in", "self", ".", "alleles", "]", ")" ]
Complement the alleles of this variant. This will call this module's `complement_alleles` function. Note that this will not create a new object, but modify the state of the current instance.
[ "Complement", "the", "alleles", "of", "this", "variant", "." ]
f698f9708af4c7962d384a70a5a14006b1cb7108
https://github.com/pgxcentre/geneparse/blob/f698f9708af4c7962d384a70a5a14006b1cb7108/geneparse/core.py#L139-L150
train
pgxcentre/geneparse
geneparse/core.py
Genotypes.flip_coded
def flip_coded(self): """Flips the coding of the alleles.""" self.genotypes = 2 - self.genotypes self.reference, self.coded = self.coded, self.reference
python
def flip_coded(self): """Flips the coding of the alleles.""" self.genotypes = 2 - self.genotypes self.reference, self.coded = self.coded, self.reference
[ "def", "flip_coded", "(", "self", ")", ":", "self", ".", "genotypes", "=", "2", "-", "self", ".", "genotypes", "self", ".", "reference", ",", "self", ".", "coded", "=", "self", ".", "coded", ",", "self", ".", "reference" ]
Flips the coding of the alleles.
[ "Flips", "the", "coding", "of", "the", "alleles", "." ]
f698f9708af4c7962d384a70a5a14006b1cb7108
https://github.com/pgxcentre/geneparse/blob/f698f9708af4c7962d384a70a5a14006b1cb7108/geneparse/core.py#L229-L232
train
pgxcentre/geneparse
geneparse/core.py
Genotypes.flip_strand
def flip_strand(self): """Flips the strand of the alleles.""" self.reference = complement_alleles(self.reference) self.coded = complement_alleles(self.coded) self.variant.complement_alleles()
python
def flip_strand(self): """Flips the strand of the alleles.""" self.reference = complement_alleles(self.reference) self.coded = complement_alleles(self.coded) self.variant.complement_alleles()
[ "def", "flip_strand", "(", "self", ")", ":", "self", ".", "reference", "=", "complement_alleles", "(", "self", ".", "reference", ")", "self", ".", "coded", "=", "complement_alleles", "(", "self", ".", "coded", ")", "self", ".", "variant", ".", "complement_alleles", "(", ")" ]
Flips the strand of the alleles.
[ "Flips", "the", "strand", "of", "the", "alleles", "." ]
f698f9708af4c7962d384a70a5a14006b1cb7108
https://github.com/pgxcentre/geneparse/blob/f698f9708af4c7962d384a70a5a14006b1cb7108/geneparse/core.py#L234-L238
train
aglie/meerkat
meerkat/det2lab_xds.py
rotvec2mat
def rotvec2mat(u, phi): """Convert rotation from axis and angle to matrix representation""" phi = np.squeeze(phi) norm_u = np.linalg.norm(u) if norm_u < 1e-12: raise Exception("the rotation vector is equal to zero") u = u / norm_u # http://en.wikipedia.org/wiki/Rotation_matrix s = np.sin(phi) c = np.cos(phi) t = 1 - c ux = u[0] uy = u[1] uz = u[2] res = np.array([[t * ux * ux + c, t * ux * uy - s * uz, t * ux * uz + s * uy], [t * ux * uy + s * uz, t * uy * uy + c, t * uy * uz - s * ux], [t * ux * uz - s * uy, t * uy * uz + s * ux, t * uz * uz + c]]) return res
python
def rotvec2mat(u, phi): """Convert rotation from axis and angle to matrix representation""" phi = np.squeeze(phi) norm_u = np.linalg.norm(u) if norm_u < 1e-12: raise Exception("the rotation vector is equal to zero") u = u / norm_u # http://en.wikipedia.org/wiki/Rotation_matrix s = np.sin(phi) c = np.cos(phi) t = 1 - c ux = u[0] uy = u[1] uz = u[2] res = np.array([[t * ux * ux + c, t * ux * uy - s * uz, t * ux * uz + s * uy], [t * ux * uy + s * uz, t * uy * uy + c, t * uy * uz - s * ux], [t * ux * uz - s * uy, t * uy * uz + s * ux, t * uz * uz + c]]) return res
[ "def", "rotvec2mat", "(", "u", ",", "phi", ")", ":", "phi", "=", "np", ".", "squeeze", "(", "phi", ")", "norm_u", "=", "np", ".", "linalg", ".", "norm", "(", "u", ")", "if", "norm_u", "<", "1e-12", ":", "raise", "Exception", "(", "\"the rotation vector is equal to zero\"", ")", "u", "=", "u", "/", "norm_u", "# http://en.wikipedia.org/wiki/Rotation_matrix", "s", "=", "np", ".", "sin", "(", "phi", ")", "c", "=", "np", ".", "cos", "(", "phi", ")", "t", "=", "1", "-", "c", "ux", "=", "u", "[", "0", "]", "uy", "=", "u", "[", "1", "]", "uz", "=", "u", "[", "2", "]", "res", "=", "np", ".", "array", "(", "[", "[", "t", "*", "ux", "*", "ux", "+", "c", ",", "t", "*", "ux", "*", "uy", "-", "s", "*", "uz", ",", "t", "*", "ux", "*", "uz", "+", "s", "*", "uy", "]", ",", "[", "t", "*", "ux", "*", "uy", "+", "s", "*", "uz", ",", "t", "*", "uy", "*", "uy", "+", "c", ",", "t", "*", "uy", "*", "uz", "-", "s", "*", "ux", "]", ",", "[", "t", "*", "ux", "*", "uz", "-", "s", "*", "uy", ",", "t", "*", "uy", "*", "uz", "+", "s", "*", "ux", ",", "t", "*", "uz", "*", "uz", "+", "c", "]", "]", ")", "return", "res" ]
Convert rotation from axis and angle to matrix representation
[ "Convert", "rotation", "from", "axis", "and", "angle", "to", "matrix", "representation" ]
f056a3da7ed3d7cd43edb56a38903cfa146e4b24
https://github.com/aglie/meerkat/blob/f056a3da7ed3d7cd43edb56a38903cfa146e4b24/meerkat/det2lab_xds.py#L4-L26
train
aglie/meerkat
meerkat/det2lab_xds.py
det2lab_xds
def det2lab_xds( pixels_coord, frame_number, starting_frame, starting_angle, oscillation_angle, rotation_axis, wavelength, wavevector, NX, NY, pixelsize_x, pixelsize_y, distance_to_detector, x_center, y_center, detector_x, detector_y, detector_normal, **kwargs): """Converts pixels coordinates from the frame into q-vector""" array_shape = (1, 3) if detector_x.shape == array_shape: detector_x = detector_x.T detector_y = detector_y.T detector_normal = detector_normal.T if wavevector.shape == array_shape: wavevector = wavevector.T if rotation_axis.shape == array_shape: rotation_axis = rotation_axis.T xmm = (pixels_coord[:, [0]] - x_center) * pixelsize_x ymm = (pixels_coord[:, [1]] - y_center) * pixelsize_y # find scattering vector of each pixel scattering_vector_mm = np.outer(xmm, detector_x) + \ np.outer(ymm, detector_y) + \ distance_to_detector * np.outer(np.ones(shape=xmm.shape), detector_normal) scattering_vector_mm = scattering_vector_mm.T phi = (frame_number - starting_frame) * oscillation_angle + \ starting_angle # calculating norm for each column norms = np.sum(scattering_vector_mm ** 2., axis=0) ** (1. / 2) #deviding scattering vector by its own norm unit_scattering_vector = scattering_vector_mm / norms #subtracting incident beam vector h = unit_scattering_vector / wavelength - \ np.tile(wavevector, (unit_scattering_vector.shape[1], 1)).T #rotating if phi.size == 1: h = np.dot(rotvec2mat(rotation_axis.T, -2 * np.pi * phi / 360), h) else: for i in range(phi.size): h[:, [i]] = np.dot( rotvec2mat(rotation_axis.T, -2 * np.pi * phi[i] / 360), h[:, [i]]) return h, scattering_vector_mm, unit_scattering_vector
python
def det2lab_xds( pixels_coord, frame_number, starting_frame, starting_angle, oscillation_angle, rotation_axis, wavelength, wavevector, NX, NY, pixelsize_x, pixelsize_y, distance_to_detector, x_center, y_center, detector_x, detector_y, detector_normal, **kwargs): """Converts pixels coordinates from the frame into q-vector""" array_shape = (1, 3) if detector_x.shape == array_shape: detector_x = detector_x.T detector_y = detector_y.T detector_normal = detector_normal.T if wavevector.shape == array_shape: wavevector = wavevector.T if rotation_axis.shape == array_shape: rotation_axis = rotation_axis.T xmm = (pixels_coord[:, [0]] - x_center) * pixelsize_x ymm = (pixels_coord[:, [1]] - y_center) * pixelsize_y # find scattering vector of each pixel scattering_vector_mm = np.outer(xmm, detector_x) + \ np.outer(ymm, detector_y) + \ distance_to_detector * np.outer(np.ones(shape=xmm.shape), detector_normal) scattering_vector_mm = scattering_vector_mm.T phi = (frame_number - starting_frame) * oscillation_angle + \ starting_angle # calculating norm for each column norms = np.sum(scattering_vector_mm ** 2., axis=0) ** (1. / 2) #deviding scattering vector by its own norm unit_scattering_vector = scattering_vector_mm / norms #subtracting incident beam vector h = unit_scattering_vector / wavelength - \ np.tile(wavevector, (unit_scattering_vector.shape[1], 1)).T #rotating if phi.size == 1: h = np.dot(rotvec2mat(rotation_axis.T, -2 * np.pi * phi / 360), h) else: for i in range(phi.size): h[:, [i]] = np.dot( rotvec2mat(rotation_axis.T, -2 * np.pi * phi[i] / 360), h[:, [i]]) return h, scattering_vector_mm, unit_scattering_vector
[ "def", "det2lab_xds", "(", "pixels_coord", ",", "frame_number", ",", "starting_frame", ",", "starting_angle", ",", "oscillation_angle", ",", "rotation_axis", ",", "wavelength", ",", "wavevector", ",", "NX", ",", "NY", ",", "pixelsize_x", ",", "pixelsize_y", ",", "distance_to_detector", ",", "x_center", ",", "y_center", ",", "detector_x", ",", "detector_y", ",", "detector_normal", ",", "*", "*", "kwargs", ")", ":", "array_shape", "=", "(", "1", ",", "3", ")", "if", "detector_x", ".", "shape", "==", "array_shape", ":", "detector_x", "=", "detector_x", ".", "T", "detector_y", "=", "detector_y", ".", "T", "detector_normal", "=", "detector_normal", ".", "T", "if", "wavevector", ".", "shape", "==", "array_shape", ":", "wavevector", "=", "wavevector", ".", "T", "if", "rotation_axis", ".", "shape", "==", "array_shape", ":", "rotation_axis", "=", "rotation_axis", ".", "T", "xmm", "=", "(", "pixels_coord", "[", ":", ",", "[", "0", "]", "]", "-", "x_center", ")", "*", "pixelsize_x", "ymm", "=", "(", "pixels_coord", "[", ":", ",", "[", "1", "]", "]", "-", "y_center", ")", "*", "pixelsize_y", "# find scattering vector of each pixel", "scattering_vector_mm", "=", "np", ".", "outer", "(", "xmm", ",", "detector_x", ")", "+", "np", ".", "outer", "(", "ymm", ",", "detector_y", ")", "+", "distance_to_detector", "*", "np", ".", "outer", "(", "np", ".", "ones", "(", "shape", "=", "xmm", ".", "shape", ")", ",", "detector_normal", ")", "scattering_vector_mm", "=", "scattering_vector_mm", ".", "T", "phi", "=", "(", "frame_number", "-", "starting_frame", ")", "*", "oscillation_angle", "+", "starting_angle", "# calculating norm for each column", "norms", "=", "np", ".", "sum", "(", "scattering_vector_mm", "**", "2.", ",", "axis", "=", "0", ")", "**", "(", "1.", "/", "2", ")", "#deviding scattering vector by its own norm", "unit_scattering_vector", "=", "scattering_vector_mm", "/", "norms", "#subtracting incident beam vector", "h", "=", "unit_scattering_vector", "/", "wavelength", "-", "np", ".", "tile", "(", "wavevector", ",", "(", "unit_scattering_vector", ".", "shape", "[", "1", "]", ",", "1", ")", ")", ".", "T", "#rotating", "if", "phi", ".", "size", "==", "1", ":", "h", "=", "np", ".", "dot", "(", "rotvec2mat", "(", "rotation_axis", ".", "T", ",", "-", "2", "*", "np", ".", "pi", "*", "phi", "/", "360", ")", ",", "h", ")", "else", ":", "for", "i", "in", "range", "(", "phi", ".", "size", ")", ":", "h", "[", ":", ",", "[", "i", "]", "]", "=", "np", ".", "dot", "(", "rotvec2mat", "(", "rotation_axis", ".", "T", ",", "-", "2", "*", "np", ".", "pi", "*", "phi", "[", "i", "]", "/", "360", ")", ",", "h", "[", ":", ",", "[", "i", "]", "]", ")", "return", "h", ",", "scattering_vector_mm", ",", "unit_scattering_vector" ]
Converts pixels coordinates from the frame into q-vector
[ "Converts", "pixels", "coordinates", "from", "the", "frame", "into", "q", "-", "vector" ]
f056a3da7ed3d7cd43edb56a38903cfa146e4b24
https://github.com/aglie/meerkat/blob/f056a3da7ed3d7cd43edb56a38903cfa146e4b24/meerkat/det2lab_xds.py#L29-L75
train
KnightConan/sspdatatables
src/sspdatatables/datatables.py
DataTables.get_query_dict
def get_query_dict(self, **kwargs): """ function to generate a filter dictionary, in which the key is the keyword used in django filter function in string form, and the value is the searched value. :param kwargs:dict: query dict sent by data tables package :return: dict: filtering dictionary """ total_cols = ensure(int, kwargs.get('total_cols', [0])[0], 0) mapping = self.mapping filter_dict = defaultdict(dict) # set up the starter, since sometimes we start the enumeration from '1' starter = mapping.keys()[0] for i in range(starter, total_cols): key = 'columns[{index}]'.format(index=i) if kwargs.get(key + '[searchable]', [0])[0] != 'true': continue search_value = kwargs.get(key + '[search][value]', [''])[0].strip() if not search_value: continue enum_item = mapping.from_key(i) filter_obj = enum_item.extra if type(filter_obj) is tuple and len(filter_obj) == 2: filter_func, filter_key = filter_obj filter_dict[filter_func][filter_key] = search_value elif type(filter_obj) is str: filter_dict['filter'][filter_obj] = search_value else: raise ValueError("Invalid filter key.") return filter_dict
python
def get_query_dict(self, **kwargs): """ function to generate a filter dictionary, in which the key is the keyword used in django filter function in string form, and the value is the searched value. :param kwargs:dict: query dict sent by data tables package :return: dict: filtering dictionary """ total_cols = ensure(int, kwargs.get('total_cols', [0])[0], 0) mapping = self.mapping filter_dict = defaultdict(dict) # set up the starter, since sometimes we start the enumeration from '1' starter = mapping.keys()[0] for i in range(starter, total_cols): key = 'columns[{index}]'.format(index=i) if kwargs.get(key + '[searchable]', [0])[0] != 'true': continue search_value = kwargs.get(key + '[search][value]', [''])[0].strip() if not search_value: continue enum_item = mapping.from_key(i) filter_obj = enum_item.extra if type(filter_obj) is tuple and len(filter_obj) == 2: filter_func, filter_key = filter_obj filter_dict[filter_func][filter_key] = search_value elif type(filter_obj) is str: filter_dict['filter'][filter_obj] = search_value else: raise ValueError("Invalid filter key.") return filter_dict
[ "def", "get_query_dict", "(", "self", ",", "*", "*", "kwargs", ")", ":", "total_cols", "=", "ensure", "(", "int", ",", "kwargs", ".", "get", "(", "'total_cols'", ",", "[", "0", "]", ")", "[", "0", "]", ",", "0", ")", "mapping", "=", "self", ".", "mapping", "filter_dict", "=", "defaultdict", "(", "dict", ")", "# set up the starter, since sometimes we start the enumeration from '1'", "starter", "=", "mapping", ".", "keys", "(", ")", "[", "0", "]", "for", "i", "in", "range", "(", "starter", ",", "total_cols", ")", ":", "key", "=", "'columns[{index}]'", ".", "format", "(", "index", "=", "i", ")", "if", "kwargs", ".", "get", "(", "key", "+", "'[searchable]'", ",", "[", "0", "]", ")", "[", "0", "]", "!=", "'true'", ":", "continue", "search_value", "=", "kwargs", ".", "get", "(", "key", "+", "'[search][value]'", ",", "[", "''", "]", ")", "[", "0", "]", ".", "strip", "(", ")", "if", "not", "search_value", ":", "continue", "enum_item", "=", "mapping", ".", "from_key", "(", "i", ")", "filter_obj", "=", "enum_item", ".", "extra", "if", "type", "(", "filter_obj", ")", "is", "tuple", "and", "len", "(", "filter_obj", ")", "==", "2", ":", "filter_func", ",", "filter_key", "=", "filter_obj", "filter_dict", "[", "filter_func", "]", "[", "filter_key", "]", "=", "search_value", "elif", "type", "(", "filter_obj", ")", "is", "str", ":", "filter_dict", "[", "'filter'", "]", "[", "filter_obj", "]", "=", "search_value", "else", ":", "raise", "ValueError", "(", "\"Invalid filter key.\"", ")", "return", "filter_dict" ]
function to generate a filter dictionary, in which the key is the keyword used in django filter function in string form, and the value is the searched value. :param kwargs:dict: query dict sent by data tables package :return: dict: filtering dictionary
[ "function", "to", "generate", "a", "filter", "dictionary", "in", "which", "the", "key", "is", "the", "keyword", "used", "in", "django", "filter", "function", "in", "string", "form", "and", "the", "value", "is", "the", "searched", "value", "." ]
1179a11358734e5e472e5eee703e8d34fa49e9bf
https://github.com/KnightConan/sspdatatables/blob/1179a11358734e5e472e5eee703e8d34fa49e9bf/src/sspdatatables/datatables.py#L199-L230
train
KnightConan/sspdatatables
src/sspdatatables/datatables.py
DataTables.get_order_key
def get_order_key(self, **kwargs): """ function to get the order key to apply it in the filtered queryset :param kwargs: dict: query dict sent by data tables package :return: str: order key, which can be used directly in queryset's order_by function """ # get the mapping enumeration class from Meta class mapping = self.mapping # use the first element in the enumeration as default order column order_column = kwargs.get('order[0][column]', [mapping.keys()[0]])[0] order_column = ensure(int, order_column, mapping.keys()[0]) order = kwargs.get('order[0][dir]', ['asc'])[0] order_key = mapping.from_key(order_column).label # django orm '-' -> desc if order == 'desc': order_key = '-' + order_key return order_key
python
def get_order_key(self, **kwargs): """ function to get the order key to apply it in the filtered queryset :param kwargs: dict: query dict sent by data tables package :return: str: order key, which can be used directly in queryset's order_by function """ # get the mapping enumeration class from Meta class mapping = self.mapping # use the first element in the enumeration as default order column order_column = kwargs.get('order[0][column]', [mapping.keys()[0]])[0] order_column = ensure(int, order_column, mapping.keys()[0]) order = kwargs.get('order[0][dir]', ['asc'])[0] order_key = mapping.from_key(order_column).label # django orm '-' -> desc if order == 'desc': order_key = '-' + order_key return order_key
[ "def", "get_order_key", "(", "self", ",", "*", "*", "kwargs", ")", ":", "# get the mapping enumeration class from Meta class", "mapping", "=", "self", ".", "mapping", "# use the first element in the enumeration as default order column", "order_column", "=", "kwargs", ".", "get", "(", "'order[0][column]'", ",", "[", "mapping", ".", "keys", "(", ")", "[", "0", "]", "]", ")", "[", "0", "]", "order_column", "=", "ensure", "(", "int", ",", "order_column", ",", "mapping", ".", "keys", "(", ")", "[", "0", "]", ")", "order", "=", "kwargs", ".", "get", "(", "'order[0][dir]'", ",", "[", "'asc'", "]", ")", "[", "0", "]", "order_key", "=", "mapping", ".", "from_key", "(", "order_column", ")", ".", "label", "# django orm '-' -> desc", "if", "order", "==", "'desc'", ":", "order_key", "=", "'-'", "+", "order_key", "return", "order_key" ]
function to get the order key to apply it in the filtered queryset :param kwargs: dict: query dict sent by data tables package :return: str: order key, which can be used directly in queryset's order_by function
[ "function", "to", "get", "the", "order", "key", "to", "apply", "it", "in", "the", "filtered", "queryset" ]
1179a11358734e5e472e5eee703e8d34fa49e9bf
https://github.com/KnightConan/sspdatatables/blob/1179a11358734e5e472e5eee703e8d34fa49e9bf/src/sspdatatables/datatables.py#L232-L252
train
KnightConan/sspdatatables
src/sspdatatables/datatables.py
DataTables.filtering
def filtering(queryset, query_dict): """ function to apply the pre search condition to the queryset to narrow down the queryset's size :param queryset: Django Queryset: queryset of all objects :param query_dict: dict: contains selected_related, filter and other customized filter functions :return: queryset: result after applying the pre search condition dict """ # apply pre_search_condition for key, value in query_dict.items(): assert hasattr(queryset, key), "Parameter 'query_dict' contains"\ " non-existent attribute." if isinstance(value, list): queryset = getattr(queryset, key)(*value) elif isinstance(value, dict): queryset = getattr(queryset, key)(**value) else: queryset = getattr(queryset, key)(value) return queryset
python
def filtering(queryset, query_dict): """ function to apply the pre search condition to the queryset to narrow down the queryset's size :param queryset: Django Queryset: queryset of all objects :param query_dict: dict: contains selected_related, filter and other customized filter functions :return: queryset: result after applying the pre search condition dict """ # apply pre_search_condition for key, value in query_dict.items(): assert hasattr(queryset, key), "Parameter 'query_dict' contains"\ " non-existent attribute." if isinstance(value, list): queryset = getattr(queryset, key)(*value) elif isinstance(value, dict): queryset = getattr(queryset, key)(**value) else: queryset = getattr(queryset, key)(value) return queryset
[ "def", "filtering", "(", "queryset", ",", "query_dict", ")", ":", "# apply pre_search_condition", "for", "key", ",", "value", "in", "query_dict", ".", "items", "(", ")", ":", "assert", "hasattr", "(", "queryset", ",", "key", ")", ",", "\"Parameter 'query_dict' contains\"", "\" non-existent attribute.\"", "if", "isinstance", "(", "value", ",", "list", ")", ":", "queryset", "=", "getattr", "(", "queryset", ",", "key", ")", "(", "*", "value", ")", "elif", "isinstance", "(", "value", ",", "dict", ")", ":", "queryset", "=", "getattr", "(", "queryset", ",", "key", ")", "(", "*", "*", "value", ")", "else", ":", "queryset", "=", "getattr", "(", "queryset", ",", "key", ")", "(", "value", ")", "return", "queryset" ]
function to apply the pre search condition to the queryset to narrow down the queryset's size :param queryset: Django Queryset: queryset of all objects :param query_dict: dict: contains selected_related, filter and other customized filter functions :return: queryset: result after applying the pre search condition dict
[ "function", "to", "apply", "the", "pre", "search", "condition", "to", "the", "queryset", "to", "narrow", "down", "the", "queryset", "s", "size" ]
1179a11358734e5e472e5eee703e8d34fa49e9bf
https://github.com/KnightConan/sspdatatables/blob/1179a11358734e5e472e5eee703e8d34fa49e9bf/src/sspdatatables/datatables.py#L255-L275
train
KnightConan/sspdatatables
src/sspdatatables/datatables.py
DataTables.slicing
def slicing(queryset, **kwargs): """ function to slice the queryset according to the display length :param queryset: Django Queryset: filtered and ordered queryset result :param kwargs: dict: query dict sent by data tables package :return: queryset: result after slicing """ # if the length is -1, we need to display all the records # otherwise, just slicing the queryset length = ensure(int, kwargs.get('length', [0])[0], 0) start = ensure(int, kwargs.get('start', [0])[0], 0) if length >= 0: queryset = queryset[start:start + length] return queryset
python
def slicing(queryset, **kwargs): """ function to slice the queryset according to the display length :param queryset: Django Queryset: filtered and ordered queryset result :param kwargs: dict: query dict sent by data tables package :return: queryset: result after slicing """ # if the length is -1, we need to display all the records # otherwise, just slicing the queryset length = ensure(int, kwargs.get('length', [0])[0], 0) start = ensure(int, kwargs.get('start', [0])[0], 0) if length >= 0: queryset = queryset[start:start + length] return queryset
[ "def", "slicing", "(", "queryset", ",", "*", "*", "kwargs", ")", ":", "# if the length is -1, we need to display all the records", "# otherwise, just slicing the queryset", "length", "=", "ensure", "(", "int", ",", "kwargs", ".", "get", "(", "'length'", ",", "[", "0", "]", ")", "[", "0", "]", ",", "0", ")", "start", "=", "ensure", "(", "int", ",", "kwargs", ".", "get", "(", "'start'", ",", "[", "0", "]", ")", "[", "0", "]", ",", "0", ")", "if", "length", ">=", "0", ":", "queryset", "=", "queryset", "[", "start", ":", "start", "+", "length", "]", "return", "queryset" ]
function to slice the queryset according to the display length :param queryset: Django Queryset: filtered and ordered queryset result :param kwargs: dict: query dict sent by data tables package :return: queryset: result after slicing
[ "function", "to", "slice", "the", "queryset", "according", "to", "the", "display", "length" ]
1179a11358734e5e472e5eee703e8d34fa49e9bf
https://github.com/KnightConan/sspdatatables/blob/1179a11358734e5e472e5eee703e8d34fa49e9bf/src/sspdatatables/datatables.py#L278-L292
train
KnightConan/sspdatatables
src/sspdatatables/datatables.py
DataTables.query_by_args
def query_by_args(self, pre_search_condition=None, **kwargs): """ intends to process the queries sent by data tables package in frontend. The model_cls indicates the model class, get_query_dict is a function implemented by you, such that it can return a query dictionary, in which the key is the query keyword in str form and the value is the queried value :param pre_search_condition: None/OrderedDict: dictionary contains filter conditions which should be processed before applying the filter dictionary from user. None, if no pre_search_condition provided. :param kwargs: QueryDict: contains query parameters :return: dict: contains total records number, queryset of the filtered instances, size of this queryset """ if pre_search_condition and not isinstance(pre_search_condition, OrderedDict): raise TypeError( "Parameter 'pre_search_condition' must be an OrderedDict.") # extract requisite parameters from kwargs draw = ensure(int, kwargs.get('draw', [0])[0], 0) # just implement the get_query_dict function query_dict = self.get_query_dict(**kwargs) order_key = self.get_order_key(**kwargs) # get the model from the serializer parameter model_class = self.serializer.Meta.model # get the objects queryset = model_class.objects # apply the pre search condition if it exists if pre_search_condition: queryset = self.filtering(queryset, pre_search_condition) else: queryset = queryset.all() # number of the total records total = queryset.count() # if the query dict not empty, then apply the query dict if query_dict: queryset = self.filtering(queryset, query_dict) # number of the records after applying the query count = queryset.count() # order the queryset queryset = queryset.order_by(order_key) # slice the queryset queryset = self.slicing(queryset, **kwargs) return {'items': queryset, 'count': count, 'total': total, 'draw': draw}
python
def query_by_args(self, pre_search_condition=None, **kwargs): """ intends to process the queries sent by data tables package in frontend. The model_cls indicates the model class, get_query_dict is a function implemented by you, such that it can return a query dictionary, in which the key is the query keyword in str form and the value is the queried value :param pre_search_condition: None/OrderedDict: dictionary contains filter conditions which should be processed before applying the filter dictionary from user. None, if no pre_search_condition provided. :param kwargs: QueryDict: contains query parameters :return: dict: contains total records number, queryset of the filtered instances, size of this queryset """ if pre_search_condition and not isinstance(pre_search_condition, OrderedDict): raise TypeError( "Parameter 'pre_search_condition' must be an OrderedDict.") # extract requisite parameters from kwargs draw = ensure(int, kwargs.get('draw', [0])[0], 0) # just implement the get_query_dict function query_dict = self.get_query_dict(**kwargs) order_key = self.get_order_key(**kwargs) # get the model from the serializer parameter model_class = self.serializer.Meta.model # get the objects queryset = model_class.objects # apply the pre search condition if it exists if pre_search_condition: queryset = self.filtering(queryset, pre_search_condition) else: queryset = queryset.all() # number of the total records total = queryset.count() # if the query dict not empty, then apply the query dict if query_dict: queryset = self.filtering(queryset, query_dict) # number of the records after applying the query count = queryset.count() # order the queryset queryset = queryset.order_by(order_key) # slice the queryset queryset = self.slicing(queryset, **kwargs) return {'items': queryset, 'count': count, 'total': total, 'draw': draw}
[ "def", "query_by_args", "(", "self", ",", "pre_search_condition", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "pre_search_condition", "and", "not", "isinstance", "(", "pre_search_condition", ",", "OrderedDict", ")", ":", "raise", "TypeError", "(", "\"Parameter 'pre_search_condition' must be an OrderedDict.\"", ")", "# extract requisite parameters from kwargs", "draw", "=", "ensure", "(", "int", ",", "kwargs", ".", "get", "(", "'draw'", ",", "[", "0", "]", ")", "[", "0", "]", ",", "0", ")", "# just implement the get_query_dict function", "query_dict", "=", "self", ".", "get_query_dict", "(", "*", "*", "kwargs", ")", "order_key", "=", "self", ".", "get_order_key", "(", "*", "*", "kwargs", ")", "# get the model from the serializer parameter", "model_class", "=", "self", ".", "serializer", ".", "Meta", ".", "model", "# get the objects", "queryset", "=", "model_class", ".", "objects", "# apply the pre search condition if it exists", "if", "pre_search_condition", ":", "queryset", "=", "self", ".", "filtering", "(", "queryset", ",", "pre_search_condition", ")", "else", ":", "queryset", "=", "queryset", ".", "all", "(", ")", "# number of the total records", "total", "=", "queryset", ".", "count", "(", ")", "# if the query dict not empty, then apply the query dict", "if", "query_dict", ":", "queryset", "=", "self", ".", "filtering", "(", "queryset", ",", "query_dict", ")", "# number of the records after applying the query", "count", "=", "queryset", ".", "count", "(", ")", "# order the queryset", "queryset", "=", "queryset", ".", "order_by", "(", "order_key", ")", "# slice the queryset", "queryset", "=", "self", ".", "slicing", "(", "queryset", ",", "*", "*", "kwargs", ")", "return", "{", "'items'", ":", "queryset", ",", "'count'", ":", "count", ",", "'total'", ":", "total", ",", "'draw'", ":", "draw", "}" ]
intends to process the queries sent by data tables package in frontend. The model_cls indicates the model class, get_query_dict is a function implemented by you, such that it can return a query dictionary, in which the key is the query keyword in str form and the value is the queried value :param pre_search_condition: None/OrderedDict: dictionary contains filter conditions which should be processed before applying the filter dictionary from user. None, if no pre_search_condition provided. :param kwargs: QueryDict: contains query parameters :return: dict: contains total records number, queryset of the filtered instances, size of this queryset
[ "intends", "to", "process", "the", "queries", "sent", "by", "data", "tables", "package", "in", "frontend", ".", "The", "model_cls", "indicates", "the", "model", "class", "get_query_dict", "is", "a", "function", "implemented", "by", "you", "such", "that", "it", "can", "return", "a", "query", "dictionary", "in", "which", "the", "key", "is", "the", "query", "keyword", "in", "str", "form", "and", "the", "value", "is", "the", "queried", "value" ]
1179a11358734e5e472e5eee703e8d34fa49e9bf
https://github.com/KnightConan/sspdatatables/blob/1179a11358734e5e472e5eee703e8d34fa49e9bf/src/sspdatatables/datatables.py#L294-L345
train
KnightConan/sspdatatables
src/sspdatatables/datatables.py
DataTables.process
def process(self, pre_search_condition=None, **kwargs): """ function to be called outside to get the footer search condition, apply the search in DB and render the serialized result. :param pre_search_condition: None/OrderedDict: pre search condition to be applied before applying the one getting from footer :param kwargs: dict: search parameters got from footer :return: dict: contains the filtered data, total number of records, number of filtered records and drawing number. """ records = self.query_by_args(pre_search_condition=pre_search_condition, **kwargs) serializer = self.serializer(records['items'], many=True) result = { 'data': serializer.data, 'draw': records['draw'], 'recordsTotal': records['total'], 'recordsFiltered': records['count'], } return result
python
def process(self, pre_search_condition=None, **kwargs): """ function to be called outside to get the footer search condition, apply the search in DB and render the serialized result. :param pre_search_condition: None/OrderedDict: pre search condition to be applied before applying the one getting from footer :param kwargs: dict: search parameters got from footer :return: dict: contains the filtered data, total number of records, number of filtered records and drawing number. """ records = self.query_by_args(pre_search_condition=pre_search_condition, **kwargs) serializer = self.serializer(records['items'], many=True) result = { 'data': serializer.data, 'draw': records['draw'], 'recordsTotal': records['total'], 'recordsFiltered': records['count'], } return result
[ "def", "process", "(", "self", ",", "pre_search_condition", "=", "None", ",", "*", "*", "kwargs", ")", ":", "records", "=", "self", ".", "query_by_args", "(", "pre_search_condition", "=", "pre_search_condition", ",", "*", "*", "kwargs", ")", "serializer", "=", "self", ".", "serializer", "(", "records", "[", "'items'", "]", ",", "many", "=", "True", ")", "result", "=", "{", "'data'", ":", "serializer", ".", "data", ",", "'draw'", ":", "records", "[", "'draw'", "]", ",", "'recordsTotal'", ":", "records", "[", "'total'", "]", ",", "'recordsFiltered'", ":", "records", "[", "'count'", "]", ",", "}", "return", "result" ]
function to be called outside to get the footer search condition, apply the search in DB and render the serialized result. :param pre_search_condition: None/OrderedDict: pre search condition to be applied before applying the one getting from footer :param kwargs: dict: search parameters got from footer :return: dict: contains the filtered data, total number of records, number of filtered records and drawing number.
[ "function", "to", "be", "called", "outside", "to", "get", "the", "footer", "search", "condition", "apply", "the", "search", "in", "DB", "and", "render", "the", "serialized", "result", "." ]
1179a11358734e5e472e5eee703e8d34fa49e9bf
https://github.com/KnightConan/sspdatatables/blob/1179a11358734e5e472e5eee703e8d34fa49e9bf/src/sspdatatables/datatables.py#L347-L367
train
reorx/torext
torext/sql.py
MutationDict.coerce
def coerce(cls, key, value): """Convert plain dictionary to MutationDict""" self = MutationDict((k,MutationObj.coerce(key, v)) for (k, v) in value.items()) self._key = key return self
python
def coerce(cls, key, value): """Convert plain dictionary to MutationDict""" self = MutationDict((k,MutationObj.coerce(key, v)) for (k, v) in value.items()) self._key = key return self
[ "def", "coerce", "(", "cls", ",", "key", ",", "value", ")", ":", "self", "=", "MutationDict", "(", "(", "k", ",", "MutationObj", ".", "coerce", "(", "key", ",", "v", ")", ")", "for", "(", "k", ",", "v", ")", "in", "value", ".", "items", "(", ")", ")", "self", ".", "_key", "=", "key", "return", "self" ]
Convert plain dictionary to MutationDict
[ "Convert", "plain", "dictionary", "to", "MutationDict" ]
84c4300ebc7fab0dbd11cf8b020bc7d4d1570171
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/sql.py#L361-L365
train
reorx/torext
torext/sql.py
MutationList.coerce
def coerce(cls, key, value): """Convert plain list to MutationList""" self = MutationList((MutationObj.coerce(key, v) for v in value)) self._key = key return self
python
def coerce(cls, key, value): """Convert plain list to MutationList""" self = MutationList((MutationObj.coerce(key, v) for v in value)) self._key = key return self
[ "def", "coerce", "(", "cls", ",", "key", ",", "value", ")", ":", "self", "=", "MutationList", "(", "(", "MutationObj", ".", "coerce", "(", "key", ",", "v", ")", "for", "v", "in", "value", ")", ")", "self", ".", "_key", "=", "key", "return", "self" ]
Convert plain list to MutationList
[ "Convert", "plain", "list", "to", "MutationList" ]
84c4300ebc7fab0dbd11cf8b020bc7d4d1570171
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/sql.py#L378-L382
train
althonos/moclo
moclo/moclo/core/vectors.py
AbstractVector.structure
def structure(cls): # type: () -> Text """Get the vector structure, as a DNA regex pattern. Warning: If overloading this method, the returned pattern must include 3 capture groups to capture the following features: 1. The downstream (3') overhang sequence 2. The vector placeholder sequence 3. The upstream (5') overhang sequence """ downstream = cls.cutter.elucidate() upstream = str(Seq(downstream).reverse_complement()) return "".join( [ upstream.replace("^", ")(").replace("_", "("), "N*", downstream.replace("^", ")(").replace("_", ")"), ] )
python
def structure(cls): # type: () -> Text """Get the vector structure, as a DNA regex pattern. Warning: If overloading this method, the returned pattern must include 3 capture groups to capture the following features: 1. The downstream (3') overhang sequence 2. The vector placeholder sequence 3. The upstream (5') overhang sequence """ downstream = cls.cutter.elucidate() upstream = str(Seq(downstream).reverse_complement()) return "".join( [ upstream.replace("^", ")(").replace("_", "("), "N*", downstream.replace("^", ")(").replace("_", ")"), ] )
[ "def", "structure", "(", "cls", ")", ":", "# type: () -> Text", "downstream", "=", "cls", ".", "cutter", ".", "elucidate", "(", ")", "upstream", "=", "str", "(", "Seq", "(", "downstream", ")", ".", "reverse_complement", "(", ")", ")", "return", "\"\"", ".", "join", "(", "[", "upstream", ".", "replace", "(", "\"^\"", ",", "\")(\"", ")", ".", "replace", "(", "\"_\"", ",", "\"(\"", ")", ",", "\"N*\"", ",", "downstream", ".", "replace", "(", "\"^\"", ",", "\")(\"", ")", ".", "replace", "(", "\"_\"", ",", "\")\"", ")", ",", "]", ")" ]
Get the vector structure, as a DNA regex pattern. Warning: If overloading this method, the returned pattern must include 3 capture groups to capture the following features: 1. The downstream (3') overhang sequence 2. The vector placeholder sequence 3. The upstream (5') overhang sequence
[ "Get", "the", "vector", "structure", "as", "a", "DNA", "regex", "pattern", "." ]
28a03748df8a2fa43f0c0c8098ca64d11559434e
https://github.com/althonos/moclo/blob/28a03748df8a2fa43f0c0c8098ca64d11559434e/moclo/moclo/core/vectors.py#L39-L60
train
althonos/moclo
moclo/moclo/core/vectors.py
AbstractVector.placeholder_sequence
def placeholder_sequence(self): # type: () -> SeqRecord """Get the placeholder sequence in the vector. The placeholder sequence is replaced by the concatenation of modules during the assembly. It often contains a dropout sequence, such as a GFP expression cassette that can be used to measure the progress of the assembly. """ if self.cutter.is_3overhang(): return self._match.group(2) + self.overhang_end() else: return self.overhang_start() + self._match.group(2)
python
def placeholder_sequence(self): # type: () -> SeqRecord """Get the placeholder sequence in the vector. The placeholder sequence is replaced by the concatenation of modules during the assembly. It often contains a dropout sequence, such as a GFP expression cassette that can be used to measure the progress of the assembly. """ if self.cutter.is_3overhang(): return self._match.group(2) + self.overhang_end() else: return self.overhang_start() + self._match.group(2)
[ "def", "placeholder_sequence", "(", "self", ")", ":", "# type: () -> SeqRecord", "if", "self", ".", "cutter", ".", "is_3overhang", "(", ")", ":", "return", "self", ".", "_match", ".", "group", "(", "2", ")", "+", "self", ".", "overhang_end", "(", ")", "else", ":", "return", "self", ".", "overhang_start", "(", ")", "+", "self", ".", "_match", ".", "group", "(", "2", ")" ]
Get the placeholder sequence in the vector. The placeholder sequence is replaced by the concatenation of modules during the assembly. It often contains a dropout sequence, such as a GFP expression cassette that can be used to measure the progress of the assembly.
[ "Get", "the", "placeholder", "sequence", "in", "the", "vector", "." ]
28a03748df8a2fa43f0c0c8098ca64d11559434e
https://github.com/althonos/moclo/blob/28a03748df8a2fa43f0c0c8098ca64d11559434e/moclo/moclo/core/vectors.py#L74-L86
train
althonos/moclo
moclo/moclo/core/vectors.py
AbstractVector.target_sequence
def target_sequence(self): # type: () -> SeqRecord """Get the target sequence in the vector. The target sequence if the part of the plasmid that is not discarded during the assembly (everything except the placeholder sequence). """ if self.cutter.is_3overhang(): start, end = self._match.span(2)[0], self._match.span(3)[1] else: start, end = self._match.span(1)[0], self._match.span(2)[1] return add_as_source(self.record, (self.record << start)[end - start :])
python
def target_sequence(self): # type: () -> SeqRecord """Get the target sequence in the vector. The target sequence if the part of the plasmid that is not discarded during the assembly (everything except the placeholder sequence). """ if self.cutter.is_3overhang(): start, end = self._match.span(2)[0], self._match.span(3)[1] else: start, end = self._match.span(1)[0], self._match.span(2)[1] return add_as_source(self.record, (self.record << start)[end - start :])
[ "def", "target_sequence", "(", "self", ")", ":", "# type: () -> SeqRecord", "if", "self", ".", "cutter", ".", "is_3overhang", "(", ")", ":", "start", ",", "end", "=", "self", ".", "_match", ".", "span", "(", "2", ")", "[", "0", "]", ",", "self", ".", "_match", ".", "span", "(", "3", ")", "[", "1", "]", "else", ":", "start", ",", "end", "=", "self", ".", "_match", ".", "span", "(", "1", ")", "[", "0", "]", ",", "self", ".", "_match", ".", "span", "(", "2", ")", "[", "1", "]", "return", "add_as_source", "(", "self", ".", "record", ",", "(", "self", ".", "record", "<<", "start", ")", "[", "end", "-", "start", ":", "]", ")" ]
Get the target sequence in the vector. The target sequence if the part of the plasmid that is not discarded during the assembly (everything except the placeholder sequence).
[ "Get", "the", "target", "sequence", "in", "the", "vector", "." ]
28a03748df8a2fa43f0c0c8098ca64d11559434e
https://github.com/althonos/moclo/blob/28a03748df8a2fa43f0c0c8098ca64d11559434e/moclo/moclo/core/vectors.py#L88-L99
train
althonos/moclo
moclo/moclo/core/vectors.py
AbstractVector.assemble
def assemble(self, module, *modules, **kwargs): # type: (AbstractModule, *AbstractModule, **Any) -> SeqRecord """Assemble the provided modules into the vector. Arguments: module (`~moclo.base.modules.AbstractModule`): a module to insert in the vector. modules (`~moclo.base.modules.AbstractModule`, optional): additional modules to insert in the vector. The order of the parameters is not important, since modules will be sorted by their start overhang in the function. Returns: `~Bio.SeqRecord.SeqRecord`: the assembled sequence with sequence annotations inherited from the vector and the modules. Raises: `~moclo.errors.DuplicateModules`: when two different modules share the same start overhang, leading in possibly non-deterministic constructs. `~moclo.errors.MissingModule`: when a module has an end overhang that is not shared by any other module, leading to a partial construct only `~moclo.errors.InvalidSequence`: when one of the modules does not match the required module structure (missing site, wrong overhang, etc.). `~moclo.errors.UnusedModules`: when some modules were not used during the assembly (mostly caused by duplicate parts). """ mgr = AssemblyManager( vector=self, modules=[module] + list(modules), name=kwargs.get("name", "assembly"), id_=kwargs.get("id", "assembly"), ) return mgr.assemble()
python
def assemble(self, module, *modules, **kwargs): # type: (AbstractModule, *AbstractModule, **Any) -> SeqRecord """Assemble the provided modules into the vector. Arguments: module (`~moclo.base.modules.AbstractModule`): a module to insert in the vector. modules (`~moclo.base.modules.AbstractModule`, optional): additional modules to insert in the vector. The order of the parameters is not important, since modules will be sorted by their start overhang in the function. Returns: `~Bio.SeqRecord.SeqRecord`: the assembled sequence with sequence annotations inherited from the vector and the modules. Raises: `~moclo.errors.DuplicateModules`: when two different modules share the same start overhang, leading in possibly non-deterministic constructs. `~moclo.errors.MissingModule`: when a module has an end overhang that is not shared by any other module, leading to a partial construct only `~moclo.errors.InvalidSequence`: when one of the modules does not match the required module structure (missing site, wrong overhang, etc.). `~moclo.errors.UnusedModules`: when some modules were not used during the assembly (mostly caused by duplicate parts). """ mgr = AssemblyManager( vector=self, modules=[module] + list(modules), name=kwargs.get("name", "assembly"), id_=kwargs.get("id", "assembly"), ) return mgr.assemble()
[ "def", "assemble", "(", "self", ",", "module", ",", "*", "modules", ",", "*", "*", "kwargs", ")", ":", "# type: (AbstractModule, *AbstractModule, **Any) -> SeqRecord", "mgr", "=", "AssemblyManager", "(", "vector", "=", "self", ",", "modules", "=", "[", "module", "]", "+", "list", "(", "modules", ")", ",", "name", "=", "kwargs", ".", "get", "(", "\"name\"", ",", "\"assembly\"", ")", ",", "id_", "=", "kwargs", ".", "get", "(", "\"id\"", ",", "\"assembly\"", ")", ",", ")", "return", "mgr", ".", "assemble", "(", ")" ]
Assemble the provided modules into the vector. Arguments: module (`~moclo.base.modules.AbstractModule`): a module to insert in the vector. modules (`~moclo.base.modules.AbstractModule`, optional): additional modules to insert in the vector. The order of the parameters is not important, since modules will be sorted by their start overhang in the function. Returns: `~Bio.SeqRecord.SeqRecord`: the assembled sequence with sequence annotations inherited from the vector and the modules. Raises: `~moclo.errors.DuplicateModules`: when two different modules share the same start overhang, leading in possibly non-deterministic constructs. `~moclo.errors.MissingModule`: when a module has an end overhang that is not shared by any other module, leading to a partial construct only `~moclo.errors.InvalidSequence`: when one of the modules does not match the required module structure (missing site, wrong overhang, etc.). `~moclo.errors.UnusedModules`: when some modules were not used during the assembly (mostly caused by duplicate parts).
[ "Assemble", "the", "provided", "modules", "into", "the", "vector", "." ]
28a03748df8a2fa43f0c0c8098ca64d11559434e
https://github.com/althonos/moclo/blob/28a03748df8a2fa43f0c0c8098ca64d11559434e/moclo/moclo/core/vectors.py#L108-L145
train
eventifyio/eventify
eventify/drivers/base.py
BaseComponent.onConnect
async def onConnect(self): """ Configure the component """ # Add extra attribute # This allows for following crossbar/autobahn spec # without changing legacy configuration if not hasattr(self.config, 'extra'): original_config = {'config': self.config} self.config = objdict(self.config) setattr(self.config, 'extra', original_config) self.config.extra['handlers'] = self.handlers # setup transport host self.transport_host = self.config.extra['config']['transport_host'] # subscription setup self.subscribe_options = SubscribeOptions(**self.config.extra['config']['sub_options']) self.replay_events = self.config.extra['config']['replay_events'] # publishing setup self.publish_topic = self.config.extra['config']['publish_topic']['topic'] self.publish_options = PublishOptions(**self.config.extra['config']['pub_options']) # setup callback self.handlers = self.config.extra['handlers'] # optional subscribed topics from config.json self.subscribed_topics = self.config.extra['config']['subscribed_topics'] # put name on session self.name = self.config.extra['config']['name'] # setup db pool - optionally if self.config.extra['config']['pub_options']['retain'] is True: self.pool = await asyncpg.create_pool( user=EVENT_DB_USER, password=EVENT_DB_PASS, host=EVENT_DB_HOST, database=EVENT_DB_NAME ) # Handle non crossbar drivers try: self.join(self.config.realm) except AttributeError: pass
python
async def onConnect(self): """ Configure the component """ # Add extra attribute # This allows for following crossbar/autobahn spec # without changing legacy configuration if not hasattr(self.config, 'extra'): original_config = {'config': self.config} self.config = objdict(self.config) setattr(self.config, 'extra', original_config) self.config.extra['handlers'] = self.handlers # setup transport host self.transport_host = self.config.extra['config']['transport_host'] # subscription setup self.subscribe_options = SubscribeOptions(**self.config.extra['config']['sub_options']) self.replay_events = self.config.extra['config']['replay_events'] # publishing setup self.publish_topic = self.config.extra['config']['publish_topic']['topic'] self.publish_options = PublishOptions(**self.config.extra['config']['pub_options']) # setup callback self.handlers = self.config.extra['handlers'] # optional subscribed topics from config.json self.subscribed_topics = self.config.extra['config']['subscribed_topics'] # put name on session self.name = self.config.extra['config']['name'] # setup db pool - optionally if self.config.extra['config']['pub_options']['retain'] is True: self.pool = await asyncpg.create_pool( user=EVENT_DB_USER, password=EVENT_DB_PASS, host=EVENT_DB_HOST, database=EVENT_DB_NAME ) # Handle non crossbar drivers try: self.join(self.config.realm) except AttributeError: pass
[ "async", "def", "onConnect", "(", "self", ")", ":", "# Add extra attribute", "# This allows for following crossbar/autobahn spec", "# without changing legacy configuration", "if", "not", "hasattr", "(", "self", ".", "config", ",", "'extra'", ")", ":", "original_config", "=", "{", "'config'", ":", "self", ".", "config", "}", "self", ".", "config", "=", "objdict", "(", "self", ".", "config", ")", "setattr", "(", "self", ".", "config", ",", "'extra'", ",", "original_config", ")", "self", ".", "config", ".", "extra", "[", "'handlers'", "]", "=", "self", ".", "handlers", "# setup transport host", "self", ".", "transport_host", "=", "self", ".", "config", ".", "extra", "[", "'config'", "]", "[", "'transport_host'", "]", "# subscription setup", "self", ".", "subscribe_options", "=", "SubscribeOptions", "(", "*", "*", "self", ".", "config", ".", "extra", "[", "'config'", "]", "[", "'sub_options'", "]", ")", "self", ".", "replay_events", "=", "self", ".", "config", ".", "extra", "[", "'config'", "]", "[", "'replay_events'", "]", "# publishing setup", "self", ".", "publish_topic", "=", "self", ".", "config", ".", "extra", "[", "'config'", "]", "[", "'publish_topic'", "]", "[", "'topic'", "]", "self", ".", "publish_options", "=", "PublishOptions", "(", "*", "*", "self", ".", "config", ".", "extra", "[", "'config'", "]", "[", "'pub_options'", "]", ")", "# setup callback", "self", ".", "handlers", "=", "self", ".", "config", ".", "extra", "[", "'handlers'", "]", "# optional subscribed topics from config.json", "self", ".", "subscribed_topics", "=", "self", ".", "config", ".", "extra", "[", "'config'", "]", "[", "'subscribed_topics'", "]", "# put name on session", "self", ".", "name", "=", "self", ".", "config", ".", "extra", "[", "'config'", "]", "[", "'name'", "]", "# setup db pool - optionally", "if", "self", ".", "config", ".", "extra", "[", "'config'", "]", "[", "'pub_options'", "]", "[", "'retain'", "]", "is", "True", ":", "self", ".", "pool", "=", "await", "asyncpg", ".", "create_pool", "(", "user", "=", "EVENT_DB_USER", ",", "password", "=", "EVENT_DB_PASS", ",", "host", "=", "EVENT_DB_HOST", ",", "database", "=", "EVENT_DB_NAME", ")", "# Handle non crossbar drivers", "try", ":", "self", ".", "join", "(", "self", ".", "config", ".", "realm", ")", "except", "AttributeError", ":", "pass" ]
Configure the component
[ "Configure", "the", "component" ]
0e519964a56bd07a879b266f21f177749c63aaed
https://github.com/eventifyio/eventify/blob/0e519964a56bd07a879b266f21f177749c63aaed/eventify/drivers/base.py#L20-L66
train
peterbe/gg
gg/builtins/getback/gg_getback.py
getback
def getback(config, force=False): """Goes back to the master branch, deletes the current branch locally and remotely.""" repo = config.repo active_branch = repo.active_branch if active_branch.name == "master": error_out("You're already on the master branch.") if repo.is_dirty(): error_out( 'Repo is "dirty". ({})'.format( ", ".join([repr(x.b_path) for x in repo.index.diff(None)]) ) ) branch_name = active_branch.name state = read(config.configfile) origin_name = state.get("ORIGIN_NAME", "origin") upstream_remote = None fork_remote = None for remote in repo.remotes: if remote.name == origin_name: # remote.pull() upstream_remote = remote break if not upstream_remote: error_out("No remote called {!r} found".format(origin_name)) # Check out master repo.heads.master.checkout() upstream_remote.pull(repo.heads.master) # Is this one of the merged branches?! # XXX I don't know how to do this "natively" with GitPython. merged_branches = [ x.strip() for x in repo.git.branch("--merged").splitlines() if x.strip() and not x.strip().startswith("*") ] was_merged = branch_name in merged_branches certain = was_merged or force if not certain: # Need to ask the user. # XXX This is where we could get smart and compare this branch # with the master. certain = ( input("Are you certain {} is actually merged? [Y/n] ".format(branch_name)) .lower() .strip() != "n" ) if not certain: return 1 if was_merged: repo.git.branch("-d", branch_name) else: repo.git.branch("-D", branch_name) fork_remote = None for remote in repo.remotes: if remote.name == state.get("FORK_NAME"): fork_remote = remote break if fork_remote: fork_remote.push(":" + branch_name) info_out("Remote branch on fork deleted too.")
python
def getback(config, force=False): """Goes back to the master branch, deletes the current branch locally and remotely.""" repo = config.repo active_branch = repo.active_branch if active_branch.name == "master": error_out("You're already on the master branch.") if repo.is_dirty(): error_out( 'Repo is "dirty". ({})'.format( ", ".join([repr(x.b_path) for x in repo.index.diff(None)]) ) ) branch_name = active_branch.name state = read(config.configfile) origin_name = state.get("ORIGIN_NAME", "origin") upstream_remote = None fork_remote = None for remote in repo.remotes: if remote.name == origin_name: # remote.pull() upstream_remote = remote break if not upstream_remote: error_out("No remote called {!r} found".format(origin_name)) # Check out master repo.heads.master.checkout() upstream_remote.pull(repo.heads.master) # Is this one of the merged branches?! # XXX I don't know how to do this "natively" with GitPython. merged_branches = [ x.strip() for x in repo.git.branch("--merged").splitlines() if x.strip() and not x.strip().startswith("*") ] was_merged = branch_name in merged_branches certain = was_merged or force if not certain: # Need to ask the user. # XXX This is where we could get smart and compare this branch # with the master. certain = ( input("Are you certain {} is actually merged? [Y/n] ".format(branch_name)) .lower() .strip() != "n" ) if not certain: return 1 if was_merged: repo.git.branch("-d", branch_name) else: repo.git.branch("-D", branch_name) fork_remote = None for remote in repo.remotes: if remote.name == state.get("FORK_NAME"): fork_remote = remote break if fork_remote: fork_remote.push(":" + branch_name) info_out("Remote branch on fork deleted too.")
[ "def", "getback", "(", "config", ",", "force", "=", "False", ")", ":", "repo", "=", "config", ".", "repo", "active_branch", "=", "repo", ".", "active_branch", "if", "active_branch", ".", "name", "==", "\"master\"", ":", "error_out", "(", "\"You're already on the master branch.\"", ")", "if", "repo", ".", "is_dirty", "(", ")", ":", "error_out", "(", "'Repo is \"dirty\". ({})'", ".", "format", "(", "\", \"", ".", "join", "(", "[", "repr", "(", "x", ".", "b_path", ")", "for", "x", "in", "repo", ".", "index", ".", "diff", "(", "None", ")", "]", ")", ")", ")", "branch_name", "=", "active_branch", ".", "name", "state", "=", "read", "(", "config", ".", "configfile", ")", "origin_name", "=", "state", ".", "get", "(", "\"ORIGIN_NAME\"", ",", "\"origin\"", ")", "upstream_remote", "=", "None", "fork_remote", "=", "None", "for", "remote", "in", "repo", ".", "remotes", ":", "if", "remote", ".", "name", "==", "origin_name", ":", "# remote.pull()", "upstream_remote", "=", "remote", "break", "if", "not", "upstream_remote", ":", "error_out", "(", "\"No remote called {!r} found\"", ".", "format", "(", "origin_name", ")", ")", "# Check out master", "repo", ".", "heads", ".", "master", ".", "checkout", "(", ")", "upstream_remote", ".", "pull", "(", "repo", ".", "heads", ".", "master", ")", "# Is this one of the merged branches?!", "# XXX I don't know how to do this \"natively\" with GitPython.", "merged_branches", "=", "[", "x", ".", "strip", "(", ")", "for", "x", "in", "repo", ".", "git", ".", "branch", "(", "\"--merged\"", ")", ".", "splitlines", "(", ")", "if", "x", ".", "strip", "(", ")", "and", "not", "x", ".", "strip", "(", ")", ".", "startswith", "(", "\"*\"", ")", "]", "was_merged", "=", "branch_name", "in", "merged_branches", "certain", "=", "was_merged", "or", "force", "if", "not", "certain", ":", "# Need to ask the user.", "# XXX This is where we could get smart and compare this branch", "# with the master.", "certain", "=", "(", "input", "(", "\"Are you certain {} is actually merged? [Y/n] \"", ".", "format", "(", "branch_name", ")", ")", ".", "lower", "(", ")", ".", "strip", "(", ")", "!=", "\"n\"", ")", "if", "not", "certain", ":", "return", "1", "if", "was_merged", ":", "repo", ".", "git", ".", "branch", "(", "\"-d\"", ",", "branch_name", ")", "else", ":", "repo", ".", "git", ".", "branch", "(", "\"-D\"", ",", "branch_name", ")", "fork_remote", "=", "None", "for", "remote", "in", "repo", ".", "remotes", ":", "if", "remote", ".", "name", "==", "state", ".", "get", "(", "\"FORK_NAME\"", ")", ":", "fork_remote", "=", "remote", "break", "if", "fork_remote", ":", "fork_remote", ".", "push", "(", "\":\"", "+", "branch_name", ")", "info_out", "(", "\"Remote branch on fork deleted too.\"", ")" ]
Goes back to the master branch, deletes the current branch locally and remotely.
[ "Goes", "back", "to", "the", "master", "branch", "deletes", "the", "current", "branch", "locally", "and", "remotely", "." ]
2aace5bdb4a9b1cb65bea717784edf54c63b7bad
https://github.com/peterbe/gg/blob/2aace5bdb4a9b1cb65bea717784edf54c63b7bad/gg/builtins/getback/gg_getback.py#L11-L79
train
nhfruchter/pgh-bustime
pghbustime/datatypes.py
Bus.get
def get(_class, api, vid): """ Return a Bus object for a certain vehicle ID `vid` using API instance `api`. """ busses = api.vehicles(vid=vid)['vehicle'] return _class.fromapi(api, api.vehicles(vid=vid)['vehicle'])
python
def get(_class, api, vid): """ Return a Bus object for a certain vehicle ID `vid` using API instance `api`. """ busses = api.vehicles(vid=vid)['vehicle'] return _class.fromapi(api, api.vehicles(vid=vid)['vehicle'])
[ "def", "get", "(", "_class", ",", "api", ",", "vid", ")", ":", "busses", "=", "api", ".", "vehicles", "(", "vid", "=", "vid", ")", "[", "'vehicle'", "]", "return", "_class", ".", "fromapi", "(", "api", ",", "api", ".", "vehicles", "(", "vid", "=", "vid", ")", "[", "'vehicle'", "]", ")" ]
Return a Bus object for a certain vehicle ID `vid` using API instance `api`.
[ "Return", "a", "Bus", "object", "for", "a", "certain", "vehicle", "ID", "vid", "using", "API", "instance", "api", "." ]
b915e8fea28541612f0e79783c2cf12fd3daaac0
https://github.com/nhfruchter/pgh-bustime/blob/b915e8fea28541612f0e79783c2cf12fd3daaac0/pghbustime/datatypes.py#L13-L19
train
nhfruchter/pgh-bustime
pghbustime/datatypes.py
Bus.fromapi
def fromapi(_class, api, apiresponse): """ Return a Bus object from an API response dict. """ bus = apiresponse return _class( api = api, vid = bus['vid'], timeupdated = datetime.strptime(bus['tmstmp'], api.STRPTIME), lat = float(bus['lat']), lng = float(bus['lon']), heading = bus['hdg'], pid = bus['pid'], intotrip = bus['pdist'], route = bus['rt'], destination = bus['des'], speed = bus['spd'], delay = bus.get('dly') or False )
python
def fromapi(_class, api, apiresponse): """ Return a Bus object from an API response dict. """ bus = apiresponse return _class( api = api, vid = bus['vid'], timeupdated = datetime.strptime(bus['tmstmp'], api.STRPTIME), lat = float(bus['lat']), lng = float(bus['lon']), heading = bus['hdg'], pid = bus['pid'], intotrip = bus['pdist'], route = bus['rt'], destination = bus['des'], speed = bus['spd'], delay = bus.get('dly') or False )
[ "def", "fromapi", "(", "_class", ",", "api", ",", "apiresponse", ")", ":", "bus", "=", "apiresponse", "return", "_class", "(", "api", "=", "api", ",", "vid", "=", "bus", "[", "'vid'", "]", ",", "timeupdated", "=", "datetime", ".", "strptime", "(", "bus", "[", "'tmstmp'", "]", ",", "api", ".", "STRPTIME", ")", ",", "lat", "=", "float", "(", "bus", "[", "'lat'", "]", ")", ",", "lng", "=", "float", "(", "bus", "[", "'lon'", "]", ")", ",", "heading", "=", "bus", "[", "'hdg'", "]", ",", "pid", "=", "bus", "[", "'pid'", "]", ",", "intotrip", "=", "bus", "[", "'pdist'", "]", ",", "route", "=", "bus", "[", "'rt'", "]", ",", "destination", "=", "bus", "[", "'des'", "]", ",", "speed", "=", "bus", "[", "'spd'", "]", ",", "delay", "=", "bus", ".", "get", "(", "'dly'", ")", "or", "False", ")" ]
Return a Bus object from an API response dict.
[ "Return", "a", "Bus", "object", "from", "an", "API", "response", "dict", "." ]
b915e8fea28541612f0e79783c2cf12fd3daaac0
https://github.com/nhfruchter/pgh-bustime/blob/b915e8fea28541612f0e79783c2cf12fd3daaac0/pghbustime/datatypes.py#L22-L40
train
nhfruchter/pgh-bustime
pghbustime/datatypes.py
Bus.update
def update(self): """Update this bus by creating a new one and transplanting dictionaries.""" vehicle = self.api.vehicles(vid=self.vid)['vehicle'] newbus = self.fromapi(self.api, vehicle) self.__dict__ = newbus.__dict__ del newbus
python
def update(self): """Update this bus by creating a new one and transplanting dictionaries.""" vehicle = self.api.vehicles(vid=self.vid)['vehicle'] newbus = self.fromapi(self.api, vehicle) self.__dict__ = newbus.__dict__ del newbus
[ "def", "update", "(", "self", ")", ":", "vehicle", "=", "self", ".", "api", ".", "vehicles", "(", "vid", "=", "self", ".", "vid", ")", "[", "'vehicle'", "]", "newbus", "=", "self", ".", "fromapi", "(", "self", ".", "api", ",", "vehicle", ")", "self", ".", "__dict__", "=", "newbus", ".", "__dict__", "del", "newbus" ]
Update this bus by creating a new one and transplanting dictionaries.
[ "Update", "this", "bus", "by", "creating", "a", "new", "one", "and", "transplanting", "dictionaries", "." ]
b915e8fea28541612f0e79783c2cf12fd3daaac0
https://github.com/nhfruchter/pgh-bustime/blob/b915e8fea28541612f0e79783c2cf12fd3daaac0/pghbustime/datatypes.py#L61-L66
train
nhfruchter/pgh-bustime
pghbustime/datatypes.py
Bus.predictions
def predictions(self): """Generator that yields prediction objects from an API response.""" for prediction in self.api.predictions(vid=self.vid)['prd']: pobj = Prediction.fromapi(self.api, prediction) pobj._busobj = self yield pobj
python
def predictions(self): """Generator that yields prediction objects from an API response.""" for prediction in self.api.predictions(vid=self.vid)['prd']: pobj = Prediction.fromapi(self.api, prediction) pobj._busobj = self yield pobj
[ "def", "predictions", "(", "self", ")", ":", "for", "prediction", "in", "self", ".", "api", ".", "predictions", "(", "vid", "=", "self", ".", "vid", ")", "[", "'prd'", "]", ":", "pobj", "=", "Prediction", ".", "fromapi", "(", "self", ".", "api", ",", "prediction", ")", "pobj", ".", "_busobj", "=", "self", "yield", "pobj" ]
Generator that yields prediction objects from an API response.
[ "Generator", "that", "yields", "prediction", "objects", "from", "an", "API", "response", "." ]
b915e8fea28541612f0e79783c2cf12fd3daaac0
https://github.com/nhfruchter/pgh-bustime/blob/b915e8fea28541612f0e79783c2cf12fd3daaac0/pghbustime/datatypes.py#L73-L78
train
nhfruchter/pgh-bustime
pghbustime/datatypes.py
Bus.next_stop
def next_stop(self): """Return the next stop for this bus.""" p = self.api.predictions(vid=self.vid)['prd'] pobj = Prediction.fromapi(self.api, p[0]) pobj._busobj = self return pobj
python
def next_stop(self): """Return the next stop for this bus.""" p = self.api.predictions(vid=self.vid)['prd'] pobj = Prediction.fromapi(self.api, p[0]) pobj._busobj = self return pobj
[ "def", "next_stop", "(", "self", ")", ":", "p", "=", "self", ".", "api", ".", "predictions", "(", "vid", "=", "self", ".", "vid", ")", "[", "'prd'", "]", "pobj", "=", "Prediction", ".", "fromapi", "(", "self", ".", "api", ",", "p", "[", "0", "]", ")", "pobj", ".", "_busobj", "=", "self", "return", "pobj" ]
Return the next stop for this bus.
[ "Return", "the", "next", "stop", "for", "this", "bus", "." ]
b915e8fea28541612f0e79783c2cf12fd3daaac0
https://github.com/nhfruchter/pgh-bustime/blob/b915e8fea28541612f0e79783c2cf12fd3daaac0/pghbustime/datatypes.py#L81-L86
train
nhfruchter/pgh-bustime
pghbustime/datatypes.py
Route.get
def get(_class, api, rt): """ Return a Route object for route `rt` using API instance `api`. """ if not _class.all_routes: _class.all_routes = _class.update_list(api, api.routes()['route']) return _class.all_routes[str(rt)]
python
def get(_class, api, rt): """ Return a Route object for route `rt` using API instance `api`. """ if not _class.all_routes: _class.all_routes = _class.update_list(api, api.routes()['route']) return _class.all_routes[str(rt)]
[ "def", "get", "(", "_class", ",", "api", ",", "rt", ")", ":", "if", "not", "_class", ".", "all_routes", ":", "_class", ".", "all_routes", "=", "_class", ".", "update_list", "(", "api", ",", "api", ".", "routes", "(", ")", "[", "'route'", "]", ")", "return", "_class", ".", "all_routes", "[", "str", "(", "rt", ")", "]" ]
Return a Route object for route `rt` using API instance `api`.
[ "Return", "a", "Route", "object", "for", "route", "rt", "using", "API", "instance", "api", "." ]
b915e8fea28541612f0e79783c2cf12fd3daaac0
https://github.com/nhfruchter/pgh-bustime/blob/b915e8fea28541612f0e79783c2cf12fd3daaac0/pghbustime/datatypes.py#L114-L122
train
gebn/wood
wood/__init__.py
_normalise_path
def _normalise_path(path: Union[str, pathlib.Path]) -> pathlib.Path: """ Ensures a path is parsed. :param path: A path string or Path object. :return: The path as a Path object. """ if isinstance(path, str): return pathlib.Path(path) return path
python
def _normalise_path(path: Union[str, pathlib.Path]) -> pathlib.Path: """ Ensures a path is parsed. :param path: A path string or Path object. :return: The path as a Path object. """ if isinstance(path, str): return pathlib.Path(path) return path
[ "def", "_normalise_path", "(", "path", ":", "Union", "[", "str", ",", "pathlib", ".", "Path", "]", ")", "->", "pathlib", ".", "Path", ":", "if", "isinstance", "(", "path", ",", "str", ")", ":", "return", "pathlib", ".", "Path", "(", "path", ")", "return", "path" ]
Ensures a path is parsed. :param path: A path string or Path object. :return: The path as a Path object.
[ "Ensures", "a", "path", "is", "parsed", "." ]
efc71879890dbd2f2d7a0b1a65ed22a0843139dd
https://github.com/gebn/wood/blob/efc71879890dbd2f2d7a0b1a65ed22a0843139dd/wood/__init__.py#L30-L39
train
gebn/wood
wood/__init__.py
root
def root(path: Union[str, pathlib.Path]) -> _Root: """ Retrieve a root directory object from a path. :param path: The path string or Path object. :return: The created root object. """ return _Root.from_path(_normalise_path(path))
python
def root(path: Union[str, pathlib.Path]) -> _Root: """ Retrieve a root directory object from a path. :param path: The path string or Path object. :return: The created root object. """ return _Root.from_path(_normalise_path(path))
[ "def", "root", "(", "path", ":", "Union", "[", "str", ",", "pathlib", ".", "Path", "]", ")", "->", "_Root", ":", "return", "_Root", ".", "from_path", "(", "_normalise_path", "(", "path", ")", ")" ]
Retrieve a root directory object from a path. :param path: The path string or Path object. :return: The created root object.
[ "Retrieve", "a", "root", "directory", "object", "from", "a", "path", "." ]
efc71879890dbd2f2d7a0b1a65ed22a0843139dd
https://github.com/gebn/wood/blob/efc71879890dbd2f2d7a0b1a65ed22a0843139dd/wood/__init__.py#L42-L49
train
gebn/wood
wood/__init__.py
entity
def entity(path: Union[str, pathlib.Path]) -> _Entity: """ Retrieve an appropriate entity object from a path. :param path: The path of the entity to represent, either a string or Path object. :return: An entity representing the input path. """ return _Entity.from_path(_normalise_path(path))
python
def entity(path: Union[str, pathlib.Path]) -> _Entity: """ Retrieve an appropriate entity object from a path. :param path: The path of the entity to represent, either a string or Path object. :return: An entity representing the input path. """ return _Entity.from_path(_normalise_path(path))
[ "def", "entity", "(", "path", ":", "Union", "[", "str", ",", "pathlib", ".", "Path", "]", ")", "->", "_Entity", ":", "return", "_Entity", ".", "from_path", "(", "_normalise_path", "(", "path", ")", ")" ]
Retrieve an appropriate entity object from a path. :param path: The path of the entity to represent, either a string or Path object. :return: An entity representing the input path.
[ "Retrieve", "an", "appropriate", "entity", "object", "from", "a", "path", "." ]
efc71879890dbd2f2d7a0b1a65ed22a0843139dd
https://github.com/gebn/wood/blob/efc71879890dbd2f2d7a0b1a65ed22a0843139dd/wood/__init__.py#L52-L60
train
gebn/wood
wood/__init__.py
compare
def compare(left: Union[str, pathlib.Path, _Entity], right: Union[str, pathlib.Path, _Entity]) -> Comparison: """ Compare two paths. :param left: The left side or "before" entity. :param right: The right side or "after" entity. :return: A comparison details what has changed from the left side to the right side. """ def normalise(param: Union[str, pathlib.Path, _Entity]) -> _Entity: """ Turns any one of a number of types of input into an entity. :param param: The input - either a path string, a path object, or a full blown entity. :return: The input param as an entity. """ if isinstance(param, str): param = pathlib.Path(param) if isinstance(param, pathlib.Path): param = _Entity.from_path(param) return param return Comparison.compare(normalise(left), normalise(right))
python
def compare(left: Union[str, pathlib.Path, _Entity], right: Union[str, pathlib.Path, _Entity]) -> Comparison: """ Compare two paths. :param left: The left side or "before" entity. :param right: The right side or "after" entity. :return: A comparison details what has changed from the left side to the right side. """ def normalise(param: Union[str, pathlib.Path, _Entity]) -> _Entity: """ Turns any one of a number of types of input into an entity. :param param: The input - either a path string, a path object, or a full blown entity. :return: The input param as an entity. """ if isinstance(param, str): param = pathlib.Path(param) if isinstance(param, pathlib.Path): param = _Entity.from_path(param) return param return Comparison.compare(normalise(left), normalise(right))
[ "def", "compare", "(", "left", ":", "Union", "[", "str", ",", "pathlib", ".", "Path", ",", "_Entity", "]", ",", "right", ":", "Union", "[", "str", ",", "pathlib", ".", "Path", ",", "_Entity", "]", ")", "->", "Comparison", ":", "def", "normalise", "(", "param", ":", "Union", "[", "str", ",", "pathlib", ".", "Path", ",", "_Entity", "]", ")", "->", "_Entity", ":", "\"\"\"\n Turns any one of a number of types of input into an entity.\n\n :param param: The input - either a path string, a path object, or a\n full blown entity.\n :return: The input param as an entity.\n \"\"\"", "if", "isinstance", "(", "param", ",", "str", ")", ":", "param", "=", "pathlib", ".", "Path", "(", "param", ")", "if", "isinstance", "(", "param", ",", "pathlib", ".", "Path", ")", ":", "param", "=", "_Entity", ".", "from_path", "(", "param", ")", "return", "param", "return", "Comparison", ".", "compare", "(", "normalise", "(", "left", ")", ",", "normalise", "(", "right", ")", ")" ]
Compare two paths. :param left: The left side or "before" entity. :param right: The right side or "after" entity. :return: A comparison details what has changed from the left side to the right side.
[ "Compare", "two", "paths", "." ]
efc71879890dbd2f2d7a0b1a65ed22a0843139dd
https://github.com/gebn/wood/blob/efc71879890dbd2f2d7a0b1a65ed22a0843139dd/wood/__init__.py#L63-L88
train
aglie/meerkat
meerkat/meerkat.py
read_XPARM
def read_XPARM(path_to_XPARM='.'): """Loads the instrumental geometry information from the XPARM.XDS or GXPARM.XDS files at the proposed location""" if not os.path.exists(path_to_XPARM): raise Exception("path " + path_to_XPARM + "does not exist") if os.path.isdir(path_to_XPARM): candidate = os.path.join(path_to_XPARM, 'GXPARM.XDS') if os.path.isfile(candidate): path_to_XPARM = candidate else: candidate = os.path.join(path_to_XPARM, 'XPARM.XDS') if os.path.isfile(candidate): path_to_XPARM = candidate else: raise Exception("files GXPARM.XDS and XPARM.XDS are not found in the folder " + path_to_XPARM) with open(path_to_XPARM) as f: f.readline() # skip header text = f.read() # parse the rest to numbers f = re.compile('-?\d+\.?\d*').finditer(text) try: result = dict(starting_frame=r_get_numbers(f, 1), starting_angle=r_get_numbers(f, 1), oscillation_angle=r_get_numbers(f, 1), rotation_axis=r_get_numbers(f, 3), wavelength=r_get_numbers(f, 1), wavevector=r_get_numbers(f, 3), space_group_nr=r_get_numbers(f, 1), cell=r_get_numbers(f, 6), unit_cell_vectors=np.reshape(r_get_numbers(f, 9), (3, 3)), number_of_detector_segments=r_get_numbers(f, 1), NX=r_get_numbers(f, 1), NY=r_get_numbers(f, 1), pixelsize_x=r_get_numbers(f, 1), pixelsize_y=r_get_numbers(f, 1), x_center=r_get_numbers(f, 1), y_center=r_get_numbers(f, 1), distance_to_detector=r_get_numbers(f, 1), detector_x=r_get_numbers(f, 3), detector_y=r_get_numbers(f, 3), detector_normal=r_get_numbers(f, 3), detector_segment_crossection=r_get_numbers(f, 5), detector_segment_geometry=r_get_numbers(f, 9)) except StopIteration: raise Exception('Wrong format of the XPARM.XDS file') # check there is nothing left try: f.next() except StopIteration: pass else: raise Exception('Wrong format of the XPARM.XDS file') return result
python
def read_XPARM(path_to_XPARM='.'): """Loads the instrumental geometry information from the XPARM.XDS or GXPARM.XDS files at the proposed location""" if not os.path.exists(path_to_XPARM): raise Exception("path " + path_to_XPARM + "does not exist") if os.path.isdir(path_to_XPARM): candidate = os.path.join(path_to_XPARM, 'GXPARM.XDS') if os.path.isfile(candidate): path_to_XPARM = candidate else: candidate = os.path.join(path_to_XPARM, 'XPARM.XDS') if os.path.isfile(candidate): path_to_XPARM = candidate else: raise Exception("files GXPARM.XDS and XPARM.XDS are not found in the folder " + path_to_XPARM) with open(path_to_XPARM) as f: f.readline() # skip header text = f.read() # parse the rest to numbers f = re.compile('-?\d+\.?\d*').finditer(text) try: result = dict(starting_frame=r_get_numbers(f, 1), starting_angle=r_get_numbers(f, 1), oscillation_angle=r_get_numbers(f, 1), rotation_axis=r_get_numbers(f, 3), wavelength=r_get_numbers(f, 1), wavevector=r_get_numbers(f, 3), space_group_nr=r_get_numbers(f, 1), cell=r_get_numbers(f, 6), unit_cell_vectors=np.reshape(r_get_numbers(f, 9), (3, 3)), number_of_detector_segments=r_get_numbers(f, 1), NX=r_get_numbers(f, 1), NY=r_get_numbers(f, 1), pixelsize_x=r_get_numbers(f, 1), pixelsize_y=r_get_numbers(f, 1), x_center=r_get_numbers(f, 1), y_center=r_get_numbers(f, 1), distance_to_detector=r_get_numbers(f, 1), detector_x=r_get_numbers(f, 3), detector_y=r_get_numbers(f, 3), detector_normal=r_get_numbers(f, 3), detector_segment_crossection=r_get_numbers(f, 5), detector_segment_geometry=r_get_numbers(f, 9)) except StopIteration: raise Exception('Wrong format of the XPARM.XDS file') # check there is nothing left try: f.next() except StopIteration: pass else: raise Exception('Wrong format of the XPARM.XDS file') return result
[ "def", "read_XPARM", "(", "path_to_XPARM", "=", "'.'", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "path_to_XPARM", ")", ":", "raise", "Exception", "(", "\"path \"", "+", "path_to_XPARM", "+", "\"does not exist\"", ")", "if", "os", ".", "path", ".", "isdir", "(", "path_to_XPARM", ")", ":", "candidate", "=", "os", ".", "path", ".", "join", "(", "path_to_XPARM", ",", "'GXPARM.XDS'", ")", "if", "os", ".", "path", ".", "isfile", "(", "candidate", ")", ":", "path_to_XPARM", "=", "candidate", "else", ":", "candidate", "=", "os", ".", "path", ".", "join", "(", "path_to_XPARM", ",", "'XPARM.XDS'", ")", "if", "os", ".", "path", ".", "isfile", "(", "candidate", ")", ":", "path_to_XPARM", "=", "candidate", "else", ":", "raise", "Exception", "(", "\"files GXPARM.XDS and XPARM.XDS are not found in the folder \"", "+", "path_to_XPARM", ")", "with", "open", "(", "path_to_XPARM", ")", "as", "f", ":", "f", ".", "readline", "(", ")", "# skip header", "text", "=", "f", ".", "read", "(", ")", "# parse the rest to numbers", "f", "=", "re", ".", "compile", "(", "'-?\\d+\\.?\\d*'", ")", ".", "finditer", "(", "text", ")", "try", ":", "result", "=", "dict", "(", "starting_frame", "=", "r_get_numbers", "(", "f", ",", "1", ")", ",", "starting_angle", "=", "r_get_numbers", "(", "f", ",", "1", ")", ",", "oscillation_angle", "=", "r_get_numbers", "(", "f", ",", "1", ")", ",", "rotation_axis", "=", "r_get_numbers", "(", "f", ",", "3", ")", ",", "wavelength", "=", "r_get_numbers", "(", "f", ",", "1", ")", ",", "wavevector", "=", "r_get_numbers", "(", "f", ",", "3", ")", ",", "space_group_nr", "=", "r_get_numbers", "(", "f", ",", "1", ")", ",", "cell", "=", "r_get_numbers", "(", "f", ",", "6", ")", ",", "unit_cell_vectors", "=", "np", ".", "reshape", "(", "r_get_numbers", "(", "f", ",", "9", ")", ",", "(", "3", ",", "3", ")", ")", ",", "number_of_detector_segments", "=", "r_get_numbers", "(", "f", ",", "1", ")", ",", "NX", "=", "r_get_numbers", "(", "f", ",", "1", ")", ",", "NY", "=", "r_get_numbers", "(", "f", ",", "1", ")", ",", "pixelsize_x", "=", "r_get_numbers", "(", "f", ",", "1", ")", ",", "pixelsize_y", "=", "r_get_numbers", "(", "f", ",", "1", ")", ",", "x_center", "=", "r_get_numbers", "(", "f", ",", "1", ")", ",", "y_center", "=", "r_get_numbers", "(", "f", ",", "1", ")", ",", "distance_to_detector", "=", "r_get_numbers", "(", "f", ",", "1", ")", ",", "detector_x", "=", "r_get_numbers", "(", "f", ",", "3", ")", ",", "detector_y", "=", "r_get_numbers", "(", "f", ",", "3", ")", ",", "detector_normal", "=", "r_get_numbers", "(", "f", ",", "3", ")", ",", "detector_segment_crossection", "=", "r_get_numbers", "(", "f", ",", "5", ")", ",", "detector_segment_geometry", "=", "r_get_numbers", "(", "f", ",", "9", ")", ")", "except", "StopIteration", ":", "raise", "Exception", "(", "'Wrong format of the XPARM.XDS file'", ")", "# check there is nothing left", "try", ":", "f", ".", "next", "(", ")", "except", "StopIteration", ":", "pass", "else", ":", "raise", "Exception", "(", "'Wrong format of the XPARM.XDS file'", ")", "return", "result" ]
Loads the instrumental geometry information from the XPARM.XDS or GXPARM.XDS files at the proposed location
[ "Loads", "the", "instrumental", "geometry", "information", "from", "the", "XPARM", ".", "XDS", "or", "GXPARM", ".", "XDS", "files", "at", "the", "proposed", "location" ]
f056a3da7ed3d7cd43edb56a38903cfa146e4b24
https://github.com/aglie/meerkat/blob/f056a3da7ed3d7cd43edb56a38903cfa146e4b24/meerkat/meerkat.py#L18-L82
train
aglie/meerkat
meerkat/meerkat.py
create_h5py_with_large_cache
def create_h5py_with_large_cache(filename, cache_size_mb): """ Allows to open the hdf5 file with specified cache size """ # h5py does not allow to control the cache size from the high level # we employ the workaround # sources: #http://stackoverflow.com/questions/14653259/how-to-set-cache-settings-while-using-h5py-high-level-interface #https://groups.google.com/forum/#!msg/h5py/RVx1ZB6LpE4/KH57vq5yw2AJ propfaid = h5py.h5p.create(h5py.h5p.FILE_ACCESS) settings = list(propfaid.get_cache()) settings[2] = 1024 * 1024 * cache_size_mb propfaid.set_cache(*settings) fid = h5py.h5f.create(filename, flags=h5py.h5f.ACC_EXCL, fapl=propfaid) fin = h5py.File(fid) return fin
python
def create_h5py_with_large_cache(filename, cache_size_mb): """ Allows to open the hdf5 file with specified cache size """ # h5py does not allow to control the cache size from the high level # we employ the workaround # sources: #http://stackoverflow.com/questions/14653259/how-to-set-cache-settings-while-using-h5py-high-level-interface #https://groups.google.com/forum/#!msg/h5py/RVx1ZB6LpE4/KH57vq5yw2AJ propfaid = h5py.h5p.create(h5py.h5p.FILE_ACCESS) settings = list(propfaid.get_cache()) settings[2] = 1024 * 1024 * cache_size_mb propfaid.set_cache(*settings) fid = h5py.h5f.create(filename, flags=h5py.h5f.ACC_EXCL, fapl=propfaid) fin = h5py.File(fid) return fin
[ "def", "create_h5py_with_large_cache", "(", "filename", ",", "cache_size_mb", ")", ":", "# h5py does not allow to control the cache size from the high level", "# we employ the workaround", "# sources:", "#http://stackoverflow.com/questions/14653259/how-to-set-cache-settings-while-using-h5py-high-level-interface", "#https://groups.google.com/forum/#!msg/h5py/RVx1ZB6LpE4/KH57vq5yw2AJ", "propfaid", "=", "h5py", ".", "h5p", ".", "create", "(", "h5py", ".", "h5p", ".", "FILE_ACCESS", ")", "settings", "=", "list", "(", "propfaid", ".", "get_cache", "(", ")", ")", "settings", "[", "2", "]", "=", "1024", "*", "1024", "*", "cache_size_mb", "propfaid", ".", "set_cache", "(", "*", "settings", ")", "fid", "=", "h5py", ".", "h5f", ".", "create", "(", "filename", ",", "flags", "=", "h5py", ".", "h5f", ".", "ACC_EXCL", ",", "fapl", "=", "propfaid", ")", "fin", "=", "h5py", ".", "File", "(", "fid", ")", "return", "fin" ]
Allows to open the hdf5 file with specified cache size
[ "Allows", "to", "open", "the", "hdf5", "file", "with", "specified", "cache", "size" ]
f056a3da7ed3d7cd43edb56a38903cfa146e4b24
https://github.com/aglie/meerkat/blob/f056a3da7ed3d7cd43edb56a38903cfa146e4b24/meerkat/meerkat.py#L203-L218
train
kblin/bioinf-helperlibs
helperlibs/bio/featurematch.py
find_features
def find_features(seqs, locus_tag="all", utr_len=200): """Find features in sequences by locus tag""" found_features = [] for seq_i in seqs: for feature in seq_i.features: if feature.type == "CDS" and (locus_tag == "all" or \ ('locus_tag' in feature.qualifiers and \ feature.qualifiers['locus_tag'][0] == locus_tag)): start = max(0, feature.location.nofuzzy_start - utr_len) stop = max(0, feature.location.nofuzzy_end + utr_len) feature_seq = seq_i.seq[start:stop] f_match = FeatureMatch(feature, feature_seq, feature.strand, utr_len) found_features.append(f_match) return found_features
python
def find_features(seqs, locus_tag="all", utr_len=200): """Find features in sequences by locus tag""" found_features = [] for seq_i in seqs: for feature in seq_i.features: if feature.type == "CDS" and (locus_tag == "all" or \ ('locus_tag' in feature.qualifiers and \ feature.qualifiers['locus_tag'][0] == locus_tag)): start = max(0, feature.location.nofuzzy_start - utr_len) stop = max(0, feature.location.nofuzzy_end + utr_len) feature_seq = seq_i.seq[start:stop] f_match = FeatureMatch(feature, feature_seq, feature.strand, utr_len) found_features.append(f_match) return found_features
[ "def", "find_features", "(", "seqs", ",", "locus_tag", "=", "\"all\"", ",", "utr_len", "=", "200", ")", ":", "found_features", "=", "[", "]", "for", "seq_i", "in", "seqs", ":", "for", "feature", "in", "seq_i", ".", "features", ":", "if", "feature", ".", "type", "==", "\"CDS\"", "and", "(", "locus_tag", "==", "\"all\"", "or", "(", "'locus_tag'", "in", "feature", ".", "qualifiers", "and", "feature", ".", "qualifiers", "[", "'locus_tag'", "]", "[", "0", "]", "==", "locus_tag", ")", ")", ":", "start", "=", "max", "(", "0", ",", "feature", ".", "location", ".", "nofuzzy_start", "-", "utr_len", ")", "stop", "=", "max", "(", "0", ",", "feature", ".", "location", ".", "nofuzzy_end", "+", "utr_len", ")", "feature_seq", "=", "seq_i", ".", "seq", "[", "start", ":", "stop", "]", "f_match", "=", "FeatureMatch", "(", "feature", ",", "feature_seq", ",", "feature", ".", "strand", ",", "utr_len", ")", "found_features", ".", "append", "(", "f_match", ")", "return", "found_features" ]
Find features in sequences by locus tag
[ "Find", "features", "in", "sequences", "by", "locus", "tag" ]
3a732d62b4b3cc42675631db886ba534672cb134
https://github.com/kblin/bioinf-helperlibs/blob/3a732d62b4b3cc42675631db886ba534672cb134/helperlibs/bio/featurematch.py#L111-L127
train
Nic30/hwtGraph
hwtGraph/elk/containers/lPort.py
LPort.getLevel
def getLevel(self): """ Get nest-level of this port """ lvl = 0 p = self while True: p = p.parent if not isinstance(p, LPort): break lvl += 1 return lvl
python
def getLevel(self): """ Get nest-level of this port """ lvl = 0 p = self while True: p = p.parent if not isinstance(p, LPort): break lvl += 1 return lvl
[ "def", "getLevel", "(", "self", ")", ":", "lvl", "=", "0", "p", "=", "self", "while", "True", ":", "p", "=", "p", ".", "parent", "if", "not", "isinstance", "(", "p", ",", "LPort", ")", ":", "break", "lvl", "+=", "1", "return", "lvl" ]
Get nest-level of this port
[ "Get", "nest", "-", "level", "of", "this", "port" ]
6b7d4fdd759f263a0fdd2736f02f123e44e4354f
https://github.com/Nic30/hwtGraph/blob/6b7d4fdd759f263a0fdd2736f02f123e44e4354f/hwtGraph/elk/containers/lPort.py#L44-L55
train
unt-libraries/pyuntl
pyuntl/util.py
normalize_LCSH
def normalize_LCSH(subject): """Normalize a LCSH subject heading prior to indexing.""" # Strip then divide on -- which is a delimiter for LCSH; # rejoin after stripping parts. subject_parts = subject.strip().split('--') joined_subject = ' -- '.join([part.strip() for part in subject_parts]) # Check if there is punctuation at the end of the string, # and if not, add a trailing period. if re.search(r'[^a-zA-Z0-9]$', joined_subject) is None: joined_subject = joined_subject + '.' return joined_subject
python
def normalize_LCSH(subject): """Normalize a LCSH subject heading prior to indexing.""" # Strip then divide on -- which is a delimiter for LCSH; # rejoin after stripping parts. subject_parts = subject.strip().split('--') joined_subject = ' -- '.join([part.strip() for part in subject_parts]) # Check if there is punctuation at the end of the string, # and if not, add a trailing period. if re.search(r'[^a-zA-Z0-9]$', joined_subject) is None: joined_subject = joined_subject + '.' return joined_subject
[ "def", "normalize_LCSH", "(", "subject", ")", ":", "# Strip then divide on -- which is a delimiter for LCSH;", "# rejoin after stripping parts.", "subject_parts", "=", "subject", ".", "strip", "(", ")", ".", "split", "(", "'--'", ")", "joined_subject", "=", "' -- '", ".", "join", "(", "[", "part", ".", "strip", "(", ")", "for", "part", "in", "subject_parts", "]", ")", "# Check if there is punctuation at the end of the string,", "# and if not, add a trailing period.", "if", "re", ".", "search", "(", "r'[^a-zA-Z0-9]$'", ",", "joined_subject", ")", "is", "None", ":", "joined_subject", "=", "joined_subject", "+", "'.'", "return", "joined_subject" ]
Normalize a LCSH subject heading prior to indexing.
[ "Normalize", "a", "LCSH", "subject", "heading", "prior", "to", "indexing", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/util.py#L4-L16
train
unt-libraries/pyuntl
pyuntl/util.py
normalize_UNTL
def normalize_UNTL(subject): """Normalize a UNTL subject heading for consistency.""" subject = subject.strip() subject = re.sub(r'[\s]+', ' ', subject) return subject
python
def normalize_UNTL(subject): """Normalize a UNTL subject heading for consistency.""" subject = subject.strip() subject = re.sub(r'[\s]+', ' ', subject) return subject
[ "def", "normalize_UNTL", "(", "subject", ")", ":", "subject", "=", "subject", ".", "strip", "(", ")", "subject", "=", "re", ".", "sub", "(", "r'[\\s]+'", ",", "' '", ",", "subject", ")", "return", "subject" ]
Normalize a UNTL subject heading for consistency.
[ "Normalize", "a", "UNTL", "subject", "heading", "for", "consistency", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/util.py#L19-L23
train
unt-libraries/pyuntl
pyuntl/util.py
UNTL_to_encodedUNTL
def UNTL_to_encodedUNTL(subject): """Normalize a UNTL subject heading to be used in SOLR.""" subject = normalize_UNTL(subject) subject = subject.replace(' ', '_') subject = subject.replace('_-_', '/') return subject
python
def UNTL_to_encodedUNTL(subject): """Normalize a UNTL subject heading to be used in SOLR.""" subject = normalize_UNTL(subject) subject = subject.replace(' ', '_') subject = subject.replace('_-_', '/') return subject
[ "def", "UNTL_to_encodedUNTL", "(", "subject", ")", ":", "subject", "=", "normalize_UNTL", "(", "subject", ")", "subject", "=", "subject", ".", "replace", "(", "' '", ",", "'_'", ")", "subject", "=", "subject", ".", "replace", "(", "'_-_'", ",", "'/'", ")", "return", "subject" ]
Normalize a UNTL subject heading to be used in SOLR.
[ "Normalize", "a", "UNTL", "subject", "heading", "to", "be", "used", "in", "SOLR", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/util.py#L26-L31
train
unt-libraries/pyuntl
pyuntl/util.py
untldict_normalizer
def untldict_normalizer(untl_dict, normalizations): """Normalize UNTL elements by their qualifier. Takes a UNTL descriptive metadata dictionary and a dictionary of the elements and the qualifiers for normalization: {'element1': ['qualifier1', 'qualifier2'], 'element2': ['qualifier3']} and normalizes the elements with that qualifier. """ # Loop through the element types in the UNTL metadata. for element_type, element_list in untl_dict.items(): # A normalization is required for that element type. if element_type in normalizations: # Get the required normalizations for specific qualifiers list. norm_qualifier_list = normalizations.get(element_type) # Loop through the element lists within that element type. for element in element_list: # Determine if the qualifier requires normalization. qualifier = element.get('qualifier', None) if qualifier in norm_qualifier_list: content = element.get('content', None) # Determine if there is normalizing for the element. if element_type in ELEMENT_NORMALIZERS: elem_norms = ELEMENT_NORMALIZERS.get(element_type, None) # If the qualified element requires a # normalization and has content, replace the # content with the normalized. if qualifier in elem_norms: if content and content != '': element['content'] = \ elem_norms[qualifier](content) return untl_dict
python
def untldict_normalizer(untl_dict, normalizations): """Normalize UNTL elements by their qualifier. Takes a UNTL descriptive metadata dictionary and a dictionary of the elements and the qualifiers for normalization: {'element1': ['qualifier1', 'qualifier2'], 'element2': ['qualifier3']} and normalizes the elements with that qualifier. """ # Loop through the element types in the UNTL metadata. for element_type, element_list in untl_dict.items(): # A normalization is required for that element type. if element_type in normalizations: # Get the required normalizations for specific qualifiers list. norm_qualifier_list = normalizations.get(element_type) # Loop through the element lists within that element type. for element in element_list: # Determine if the qualifier requires normalization. qualifier = element.get('qualifier', None) if qualifier in norm_qualifier_list: content = element.get('content', None) # Determine if there is normalizing for the element. if element_type in ELEMENT_NORMALIZERS: elem_norms = ELEMENT_NORMALIZERS.get(element_type, None) # If the qualified element requires a # normalization and has content, replace the # content with the normalized. if qualifier in elem_norms: if content and content != '': element['content'] = \ elem_norms[qualifier](content) return untl_dict
[ "def", "untldict_normalizer", "(", "untl_dict", ",", "normalizations", ")", ":", "# Loop through the element types in the UNTL metadata.", "for", "element_type", ",", "element_list", "in", "untl_dict", ".", "items", "(", ")", ":", "# A normalization is required for that element type.", "if", "element_type", "in", "normalizations", ":", "# Get the required normalizations for specific qualifiers list.", "norm_qualifier_list", "=", "normalizations", ".", "get", "(", "element_type", ")", "# Loop through the element lists within that element type.", "for", "element", "in", "element_list", ":", "# Determine if the qualifier requires normalization.", "qualifier", "=", "element", ".", "get", "(", "'qualifier'", ",", "None", ")", "if", "qualifier", "in", "norm_qualifier_list", ":", "content", "=", "element", ".", "get", "(", "'content'", ",", "None", ")", "# Determine if there is normalizing for the element.", "if", "element_type", "in", "ELEMENT_NORMALIZERS", ":", "elem_norms", "=", "ELEMENT_NORMALIZERS", ".", "get", "(", "element_type", ",", "None", ")", "# If the qualified element requires a", "# normalization and has content, replace the", "# content with the normalized.", "if", "qualifier", "in", "elem_norms", ":", "if", "content", "and", "content", "!=", "''", ":", "element", "[", "'content'", "]", "=", "elem_norms", "[", "qualifier", "]", "(", "content", ")", "return", "untl_dict" ]
Normalize UNTL elements by their qualifier. Takes a UNTL descriptive metadata dictionary and a dictionary of the elements and the qualifiers for normalization: {'element1': ['qualifier1', 'qualifier2'], 'element2': ['qualifier3']} and normalizes the elements with that qualifier.
[ "Normalize", "UNTL", "elements", "by", "their", "qualifier", "." ]
f92413302897dab948aac18ee9e482ace0187bd4
https://github.com/unt-libraries/pyuntl/blob/f92413302897dab948aac18ee9e482ace0187bd4/pyuntl/util.py#L41-L73
train
peterbe/gg
gg/builtins/start/gg_start.py
start
def start(config, bugnumber=""): """Create a new topic branch.""" repo = config.repo if bugnumber: summary, bugnumber, url = get_summary(config, bugnumber) else: url = None summary = None if summary: summary = input('Summary ["{}"]: '.format(summary)).strip() or summary else: summary = input("Summary: ").strip() branch_name = "" if bugnumber: if is_github({"bugnumber": bugnumber, "url": url}): branch_name = "{}-".format(bugnumber) else: branch_name = "{}-".format(bugnumber) def clean_branch_name(string): string = re.sub(r"\s+", " ", string) string = string.replace(" ", "-") string = string.replace("->", "-").replace("=>", "-") for each in "@%^&:'\"/(),[]{}!.?`$<>#*;=": string = string.replace(each, "") string = re.sub("-+", "-", string) string = string.strip("-") return string.lower().strip() branch_name += clean_branch_name(summary) if not branch_name: error_out("Must provide a branch name") # Check that the branch doesn't already exist found = list(find(repo, branch_name, exact=True)) if found: error_out("There is already a branch called {!r}".format(found[0].name)) new_branch = repo.create_head(branch_name) new_branch.checkout() if config.verbose: click.echo("Checkout out new branch: {}".format(branch_name)) save(config.configfile, summary, branch_name, bugnumber=bugnumber, url=url)
python
def start(config, bugnumber=""): """Create a new topic branch.""" repo = config.repo if bugnumber: summary, bugnumber, url = get_summary(config, bugnumber) else: url = None summary = None if summary: summary = input('Summary ["{}"]: '.format(summary)).strip() or summary else: summary = input("Summary: ").strip() branch_name = "" if bugnumber: if is_github({"bugnumber": bugnumber, "url": url}): branch_name = "{}-".format(bugnumber) else: branch_name = "{}-".format(bugnumber) def clean_branch_name(string): string = re.sub(r"\s+", " ", string) string = string.replace(" ", "-") string = string.replace("->", "-").replace("=>", "-") for each in "@%^&:'\"/(),[]{}!.?`$<>#*;=": string = string.replace(each, "") string = re.sub("-+", "-", string) string = string.strip("-") return string.lower().strip() branch_name += clean_branch_name(summary) if not branch_name: error_out("Must provide a branch name") # Check that the branch doesn't already exist found = list(find(repo, branch_name, exact=True)) if found: error_out("There is already a branch called {!r}".format(found[0].name)) new_branch = repo.create_head(branch_name) new_branch.checkout() if config.verbose: click.echo("Checkout out new branch: {}".format(branch_name)) save(config.configfile, summary, branch_name, bugnumber=bugnumber, url=url)
[ "def", "start", "(", "config", ",", "bugnumber", "=", "\"\"", ")", ":", "repo", "=", "config", ".", "repo", "if", "bugnumber", ":", "summary", ",", "bugnumber", ",", "url", "=", "get_summary", "(", "config", ",", "bugnumber", ")", "else", ":", "url", "=", "None", "summary", "=", "None", "if", "summary", ":", "summary", "=", "input", "(", "'Summary [\"{}\"]: '", ".", "format", "(", "summary", ")", ")", ".", "strip", "(", ")", "or", "summary", "else", ":", "summary", "=", "input", "(", "\"Summary: \"", ")", ".", "strip", "(", ")", "branch_name", "=", "\"\"", "if", "bugnumber", ":", "if", "is_github", "(", "{", "\"bugnumber\"", ":", "bugnumber", ",", "\"url\"", ":", "url", "}", ")", ":", "branch_name", "=", "\"{}-\"", ".", "format", "(", "bugnumber", ")", "else", ":", "branch_name", "=", "\"{}-\"", ".", "format", "(", "bugnumber", ")", "def", "clean_branch_name", "(", "string", ")", ":", "string", "=", "re", ".", "sub", "(", "r\"\\s+\"", ",", "\" \"", ",", "string", ")", "string", "=", "string", ".", "replace", "(", "\" \"", ",", "\"-\"", ")", "string", "=", "string", ".", "replace", "(", "\"->\"", ",", "\"-\"", ")", ".", "replace", "(", "\"=>\"", ",", "\"-\"", ")", "for", "each", "in", "\"@%^&:'\\\"/(),[]{}!.?`$<>#*;=\"", ":", "string", "=", "string", ".", "replace", "(", "each", ",", "\"\"", ")", "string", "=", "re", ".", "sub", "(", "\"-+\"", ",", "\"-\"", ",", "string", ")", "string", "=", "string", ".", "strip", "(", "\"-\"", ")", "return", "string", ".", "lower", "(", ")", ".", "strip", "(", ")", "branch_name", "+=", "clean_branch_name", "(", "summary", ")", "if", "not", "branch_name", ":", "error_out", "(", "\"Must provide a branch name\"", ")", "# Check that the branch doesn't already exist", "found", "=", "list", "(", "find", "(", "repo", ",", "branch_name", ",", "exact", "=", "True", ")", ")", "if", "found", ":", "error_out", "(", "\"There is already a branch called {!r}\"", ".", "format", "(", "found", "[", "0", "]", ".", "name", ")", ")", "new_branch", "=", "repo", ".", "create_head", "(", "branch_name", ")", "new_branch", ".", "checkout", "(", ")", "if", "config", ".", "verbose", ":", "click", ".", "echo", "(", "\"Checkout out new branch: {}\"", ".", "format", "(", "branch_name", ")", ")", "save", "(", "config", ".", "configfile", ",", "summary", ",", "branch_name", ",", "bugnumber", "=", "bugnumber", ",", "url", "=", "url", ")" ]
Create a new topic branch.
[ "Create", "a", "new", "topic", "branch", "." ]
2aace5bdb4a9b1cb65bea717784edf54c63b7bad
https://github.com/peterbe/gg/blob/2aace5bdb4a9b1cb65bea717784edf54c63b7bad/gg/builtins/start/gg_start.py#L18-L65
train
255BITS/hyperchamber
examples/shared/ops.py
conv_cond_concat
def conv_cond_concat(x, y): """Concatenate conditioning vector on feature map axis.""" x_shapes = x.get_shape() y_shapes = y.get_shape() return tf.concat(3, [x, y*tf.ones([x_shapes[0], x_shapes[1], x_shapes[2], y_shapes[3]])])
python
def conv_cond_concat(x, y): """Concatenate conditioning vector on feature map axis.""" x_shapes = x.get_shape() y_shapes = y.get_shape() return tf.concat(3, [x, y*tf.ones([x_shapes[0], x_shapes[1], x_shapes[2], y_shapes[3]])])
[ "def", "conv_cond_concat", "(", "x", ",", "y", ")", ":", "x_shapes", "=", "x", ".", "get_shape", "(", ")", "y_shapes", "=", "y", ".", "get_shape", "(", ")", "return", "tf", ".", "concat", "(", "3", ",", "[", "x", ",", "y", "*", "tf", ".", "ones", "(", "[", "x_shapes", "[", "0", "]", ",", "x_shapes", "[", "1", "]", ",", "x_shapes", "[", "2", "]", ",", "y_shapes", "[", "3", "]", "]", ")", "]", ")" ]
Concatenate conditioning vector on feature map axis.
[ "Concatenate", "conditioning", "vector", "on", "feature", "map", "axis", "." ]
4d5774bde9ea6ce1113f77a069ffc605148482b8
https://github.com/255BITS/hyperchamber/blob/4d5774bde9ea6ce1113f77a069ffc605148482b8/examples/shared/ops.py#L106-L110
train
255BITS/hyperchamber
examples/shared/ops.py
lrelu_sq
def lrelu_sq(x): """ Concatenates lrelu and square """ dim = len(x.get_shape()) - 1 return tf.concat(dim, [lrelu(x), tf.minimum(tf.abs(x), tf.square(x))])
python
def lrelu_sq(x): """ Concatenates lrelu and square """ dim = len(x.get_shape()) - 1 return tf.concat(dim, [lrelu(x), tf.minimum(tf.abs(x), tf.square(x))])
[ "def", "lrelu_sq", "(", "x", ")", ":", "dim", "=", "len", "(", "x", ".", "get_shape", "(", ")", ")", "-", "1", "return", "tf", ".", "concat", "(", "dim", ",", "[", "lrelu", "(", "x", ")", ",", "tf", ".", "minimum", "(", "tf", ".", "abs", "(", "x", ")", ",", "tf", ".", "square", "(", "x", ")", ")", "]", ")" ]
Concatenates lrelu and square
[ "Concatenates", "lrelu", "and", "square" ]
4d5774bde9ea6ce1113f77a069ffc605148482b8
https://github.com/255BITS/hyperchamber/blob/4d5774bde9ea6ce1113f77a069ffc605148482b8/examples/shared/ops.py#L223-L228
train
255BITS/hyperchamber
examples/shared/ops.py
avg_grads
def avg_grads(tower_grads): """Calculate the average gradient for each shared variable across all towers. Note that this function provides a synchronization point across all towers. Args: tower_grads: List of lists of (gradient, variable) tuples. The outer list is over individual gradients. The inner list is over the gradient calculation for each tower. Returns: List of pairs of (gradient, variable) where the gradient has been averaged across all towers. """ average_grads = [] for grad_and_vars in zip(*tower_grads): # Note that each grad_and_vars looks like the following: # ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN)) grads = [] for g, _ in grad_and_vars: # Add 0 dimension to the gradients to represent the tower. expanded_g = tf.expand_dims(g, 0) # Append on a 'tower' dimension which we will average over below. grads.append(expanded_g) # Average over the 'tower' dimension. grad = tf.concat(0, grads) grad = tf.reduce_mean(grad, 0) # Keep in mind that the Variables are redundant because they are shared # across towers. So .. we will just return the first tower's pointer to # the Variable. v = grad_and_vars[0][1] grad_and_var = (grad, v) average_grads.append(grad_and_var) return average_grads
python
def avg_grads(tower_grads): """Calculate the average gradient for each shared variable across all towers. Note that this function provides a synchronization point across all towers. Args: tower_grads: List of lists of (gradient, variable) tuples. The outer list is over individual gradients. The inner list is over the gradient calculation for each tower. Returns: List of pairs of (gradient, variable) where the gradient has been averaged across all towers. """ average_grads = [] for grad_and_vars in zip(*tower_grads): # Note that each grad_and_vars looks like the following: # ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN)) grads = [] for g, _ in grad_and_vars: # Add 0 dimension to the gradients to represent the tower. expanded_g = tf.expand_dims(g, 0) # Append on a 'tower' dimension which we will average over below. grads.append(expanded_g) # Average over the 'tower' dimension. grad = tf.concat(0, grads) grad = tf.reduce_mean(grad, 0) # Keep in mind that the Variables are redundant because they are shared # across towers. So .. we will just return the first tower's pointer to # the Variable. v = grad_and_vars[0][1] grad_and_var = (grad, v) average_grads.append(grad_and_var) return average_grads
[ "def", "avg_grads", "(", "tower_grads", ")", ":", "average_grads", "=", "[", "]", "for", "grad_and_vars", "in", "zip", "(", "*", "tower_grads", ")", ":", "# Note that each grad_and_vars looks like the following:", "# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))", "grads", "=", "[", "]", "for", "g", ",", "_", "in", "grad_and_vars", ":", "# Add 0 dimension to the gradients to represent the tower.", "expanded_g", "=", "tf", ".", "expand_dims", "(", "g", ",", "0", ")", "# Append on a 'tower' dimension which we will average over below.", "grads", ".", "append", "(", "expanded_g", ")", "# Average over the 'tower' dimension.", "grad", "=", "tf", ".", "concat", "(", "0", ",", "grads", ")", "grad", "=", "tf", ".", "reduce_mean", "(", "grad", ",", "0", ")", "# Keep in mind that the Variables are redundant because they are shared", "# across towers. So .. we will just return the first tower's pointer to", "# the Variable.", "v", "=", "grad_and_vars", "[", "0", "]", "[", "1", "]", "grad_and_var", "=", "(", "grad", ",", "v", ")", "average_grads", ".", "append", "(", "grad_and_var", ")", "return", "average_grads" ]
Calculate the average gradient for each shared variable across all towers. Note that this function provides a synchronization point across all towers. Args: tower_grads: List of lists of (gradient, variable) tuples. The outer list is over individual gradients. The inner list is over the gradient calculation for each tower. Returns: List of pairs of (gradient, variable) where the gradient has been averaged across all towers.
[ "Calculate", "the", "average", "gradient", "for", "each", "shared", "variable", "across", "all", "towers", "." ]
4d5774bde9ea6ce1113f77a069ffc605148482b8
https://github.com/255BITS/hyperchamber/blob/4d5774bde9ea6ce1113f77a069ffc605148482b8/examples/shared/ops.py#L264-L299
train
kwlzn/blast
blast/main.py
unescape_utf8
def unescape_utf8(msg): ''' convert escaped unicode web entities to unicode ''' def sub(m): text = m.group(0) if text[:3] == "&#x": return unichr(int(text[3:-1], 16)) else: return unichr(int(text[2:-1])) return re.sub("&#?\w+;", sub, urllib.unquote(msg))
python
def unescape_utf8(msg): ''' convert escaped unicode web entities to unicode ''' def sub(m): text = m.group(0) if text[:3] == "&#x": return unichr(int(text[3:-1], 16)) else: return unichr(int(text[2:-1])) return re.sub("&#?\w+;", sub, urllib.unquote(msg))
[ "def", "unescape_utf8", "(", "msg", ")", ":", "def", "sub", "(", "m", ")", ":", "text", "=", "m", ".", "group", "(", "0", ")", "if", "text", "[", ":", "3", "]", "==", "\"&#x\"", ":", "return", "unichr", "(", "int", "(", "text", "[", "3", ":", "-", "1", "]", ",", "16", ")", ")", "else", ":", "return", "unichr", "(", "int", "(", "text", "[", "2", ":", "-", "1", "]", ")", ")", "return", "re", ".", "sub", "(", "\"&#?\\w+;\"", ",", "sub", ",", "urllib", ".", "unquote", "(", "msg", ")", ")" ]
convert escaped unicode web entities to unicode
[ "convert", "escaped", "unicode", "web", "entities", "to", "unicode" ]
ae18a19182a6884c453bf9b2a3c6386bd3b2655a
https://github.com/kwlzn/blast/blob/ae18a19182a6884c453bf9b2a3c6386bd3b2655a/blast/main.py#L41-L47
train
KnightConan/sspdatatables
src/sspdatatables/utils/data_type_ensure.py
ensure
def ensure(data_type, check_value, default_value=None): """ function to ensure the given check value is in the given data type, if yes, return the check value directly, otherwise return the default value :param data_type: different data type: can be int, str, list, tuple etc, must be python supportable data type or new defined data type :param check_value: different value: the value to check :param default_value: None/ different value: provide the default value :return: check value or default value """ if default_value is not None and not isinstance(default_value, data_type): raise ValueError("default_value must be the value in the given data " "type.") elif isinstance(check_value, data_type): return check_value try: new_value = data_type(check_value) except: return default_value return new_value
python
def ensure(data_type, check_value, default_value=None): """ function to ensure the given check value is in the given data type, if yes, return the check value directly, otherwise return the default value :param data_type: different data type: can be int, str, list, tuple etc, must be python supportable data type or new defined data type :param check_value: different value: the value to check :param default_value: None/ different value: provide the default value :return: check value or default value """ if default_value is not None and not isinstance(default_value, data_type): raise ValueError("default_value must be the value in the given data " "type.") elif isinstance(check_value, data_type): return check_value try: new_value = data_type(check_value) except: return default_value return new_value
[ "def", "ensure", "(", "data_type", ",", "check_value", ",", "default_value", "=", "None", ")", ":", "if", "default_value", "is", "not", "None", "and", "not", "isinstance", "(", "default_value", ",", "data_type", ")", ":", "raise", "ValueError", "(", "\"default_value must be the value in the given data \"", "\"type.\"", ")", "elif", "isinstance", "(", "check_value", ",", "data_type", ")", ":", "return", "check_value", "try", ":", "new_value", "=", "data_type", "(", "check_value", ")", "except", ":", "return", "default_value", "return", "new_value" ]
function to ensure the given check value is in the given data type, if yes, return the check value directly, otherwise return the default value :param data_type: different data type: can be int, str, list, tuple etc, must be python supportable data type or new defined data type :param check_value: different value: the value to check :param default_value: None/ different value: provide the default value :return: check value or default value
[ "function", "to", "ensure", "the", "given", "check", "value", "is", "in", "the", "given", "data", "type", "if", "yes", "return", "the", "check", "value", "directly", "otherwise", "return", "the", "default", "value" ]
1179a11358734e5e472e5eee703e8d34fa49e9bf
https://github.com/KnightConan/sspdatatables/blob/1179a11358734e5e472e5eee703e8d34fa49e9bf/src/sspdatatables/utils/data_type_ensure.py#L7-L27
train
edx/edx-celeryutils
celery_utils/tasks.py
mark_resolved
def mark_resolved(task_id): """ Mark the specified task as resolved in the FailedTask table. If more than one record exists with the specified task id, they will all be marked resolved. """ from . import models models.FailedTask.objects.filter(task_id=task_id, datetime_resolved=None).update(datetime_resolved=now())
python
def mark_resolved(task_id): """ Mark the specified task as resolved in the FailedTask table. If more than one record exists with the specified task id, they will all be marked resolved. """ from . import models models.FailedTask.objects.filter(task_id=task_id, datetime_resolved=None).update(datetime_resolved=now())
[ "def", "mark_resolved", "(", "task_id", ")", ":", "from", ".", "import", "models", "models", ".", "FailedTask", ".", "objects", ".", "filter", "(", "task_id", "=", "task_id", ",", "datetime_resolved", "=", "None", ")", ".", "update", "(", "datetime_resolved", "=", "now", "(", ")", ")" ]
Mark the specified task as resolved in the FailedTask table. If more than one record exists with the specified task id, they will all be marked resolved.
[ "Mark", "the", "specified", "task", "as", "resolved", "in", "the", "FailedTask", "table", "." ]
d8745f5f0929ad154fad779a19fbefe7f51e9498
https://github.com/edx/edx-celeryutils/blob/d8745f5f0929ad154fad779a19fbefe7f51e9498/celery_utils/tasks.py#L13-L21
train
flo-compbio/xlmhg
xlmhg/mhg.py
is_equal
def is_equal(a, b, tol): """Ratio test to check if two floating point numbers are equal. Parameters ---------- a: float The first floating point number. b: float The second floating point number. tol: float The tolerance used. Returns ------- bool Whether or not the two numbers are deemed equal. """ if a == b or abs(a-b) <= tol * max(abs(a), abs(b)): return True else: return False
python
def is_equal(a, b, tol): """Ratio test to check if two floating point numbers are equal. Parameters ---------- a: float The first floating point number. b: float The second floating point number. tol: float The tolerance used. Returns ------- bool Whether or not the two numbers are deemed equal. """ if a == b or abs(a-b) <= tol * max(abs(a), abs(b)): return True else: return False
[ "def", "is_equal", "(", "a", ",", "b", ",", "tol", ")", ":", "if", "a", "==", "b", "or", "abs", "(", "a", "-", "b", ")", "<=", "tol", "*", "max", "(", "abs", "(", "a", ")", ",", "abs", "(", "b", ")", ")", ":", "return", "True", "else", ":", "return", "False" ]
Ratio test to check if two floating point numbers are equal. Parameters ---------- a: float The first floating point number. b: float The second floating point number. tol: float The tolerance used. Returns ------- bool Whether or not the two numbers are deemed equal.
[ "Ratio", "test", "to", "check", "if", "two", "floating", "point", "numbers", "are", "equal", "." ]
8e5929ee1dc91b95e343b7a2b1b1d6664c4540a1
https://github.com/flo-compbio/xlmhg/blob/8e5929ee1dc91b95e343b7a2b1b1d6664c4540a1/xlmhg/mhg.py#L28-L48
train
Nic30/hwtGraph
hwtGraph/elk/containers/lNode.py
LNode.getPortSideView
def getPortSideView(self, side) -> List["LPort"]: """ Returns a sublist view for all ports of given side. :attention: Use this only after port sides are fixed! This is currently the case after running the {@link org.eclipse.elk.alg.layered.intermediate.PortListSorter}. Non-structural changes to this list are reflected in the original list. A structural modification is any operation that adds or deletes one or more elements; merely setting the value of an element is not a structural modification. Sublist indices can be cached using {@link LNode#cachePortSides()}. :param side: a port side :return: an iterable for the ports of given side """ if side == PortSide.WEST: return self.west elif side == PortSide.EAST: return self.east elif side == PortSide.NORTH: return self.north elif side == PortSide.SOUTH: return self.south else: raise ValueError(side)
python
def getPortSideView(self, side) -> List["LPort"]: """ Returns a sublist view for all ports of given side. :attention: Use this only after port sides are fixed! This is currently the case after running the {@link org.eclipse.elk.alg.layered.intermediate.PortListSorter}. Non-structural changes to this list are reflected in the original list. A structural modification is any operation that adds or deletes one or more elements; merely setting the value of an element is not a structural modification. Sublist indices can be cached using {@link LNode#cachePortSides()}. :param side: a port side :return: an iterable for the ports of given side """ if side == PortSide.WEST: return self.west elif side == PortSide.EAST: return self.east elif side == PortSide.NORTH: return self.north elif side == PortSide.SOUTH: return self.south else: raise ValueError(side)
[ "def", "getPortSideView", "(", "self", ",", "side", ")", "->", "List", "[", "\"LPort\"", "]", ":", "if", "side", "==", "PortSide", ".", "WEST", ":", "return", "self", ".", "west", "elif", "side", "==", "PortSide", ".", "EAST", ":", "return", "self", ".", "east", "elif", "side", "==", "PortSide", ".", "NORTH", ":", "return", "self", ".", "north", "elif", "side", "==", "PortSide", ".", "SOUTH", ":", "return", "self", ".", "south", "else", ":", "raise", "ValueError", "(", "side", ")" ]
Returns a sublist view for all ports of given side. :attention: Use this only after port sides are fixed! This is currently the case after running the {@link org.eclipse.elk.alg.layered.intermediate.PortListSorter}. Non-structural changes to this list are reflected in the original list. A structural modification is any operation that adds or deletes one or more elements; merely setting the value of an element is not a structural modification. Sublist indices can be cached using {@link LNode#cachePortSides()}. :param side: a port side :return: an iterable for the ports of given side
[ "Returns", "a", "sublist", "view", "for", "all", "ports", "of", "given", "side", "." ]
6b7d4fdd759f263a0fdd2736f02f123e44e4354f
https://github.com/Nic30/hwtGraph/blob/6b7d4fdd759f263a0fdd2736f02f123e44e4354f/hwtGraph/elk/containers/lNode.py#L53-L76
train
Nic30/hwtGraph
hwtGraph/elk/containers/lNode.py
LNode.iterEdges
def iterEdges(self, filterSelfLoops=False): """ Iter edges connected from outside of this unit """ for p in self.iterPorts(): yield from p.iterEdges(filterSelfLoops=filterSelfLoops)
python
def iterEdges(self, filterSelfLoops=False): """ Iter edges connected from outside of this unit """ for p in self.iterPorts(): yield from p.iterEdges(filterSelfLoops=filterSelfLoops)
[ "def", "iterEdges", "(", "self", ",", "filterSelfLoops", "=", "False", ")", ":", "for", "p", "in", "self", ".", "iterPorts", "(", ")", ":", "yield", "from", "p", ".", "iterEdges", "(", "filterSelfLoops", "=", "filterSelfLoops", ")" ]
Iter edges connected from outside of this unit
[ "Iter", "edges", "connected", "from", "outside", "of", "this", "unit" ]
6b7d4fdd759f263a0fdd2736f02f123e44e4354f
https://github.com/Nic30/hwtGraph/blob/6b7d4fdd759f263a0fdd2736f02f123e44e4354f/hwtGraph/elk/containers/lNode.py#L94-L99
train
albert12132/templar
templar/linker.py
link
def link(source_path): """Links the content found at source_path and represents a Block that represents the content.""" if not os.path.isfile(source_path): raise SourceNotFound(source_path) with open(source_path, 'r') as f: content = f.read() block_map = BlockMap() # The map will be populated with the following function call. all_block = convert_lines_to_block( content.splitlines(), block_map, LinkStack(source_path), source_path) return all_block, block_map.get_variables()
python
def link(source_path): """Links the content found at source_path and represents a Block that represents the content.""" if not os.path.isfile(source_path): raise SourceNotFound(source_path) with open(source_path, 'r') as f: content = f.read() block_map = BlockMap() # The map will be populated with the following function call. all_block = convert_lines_to_block( content.splitlines(), block_map, LinkStack(source_path), source_path) return all_block, block_map.get_variables()
[ "def", "link", "(", "source_path", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "source_path", ")", ":", "raise", "SourceNotFound", "(", "source_path", ")", "with", "open", "(", "source_path", ",", "'r'", ")", "as", "f", ":", "content", "=", "f", ".", "read", "(", ")", "block_map", "=", "BlockMap", "(", ")", "# The map will be populated with the following function call.", "all_block", "=", "convert_lines_to_block", "(", "content", ".", "splitlines", "(", ")", ",", "block_map", ",", "LinkStack", "(", "source_path", ")", ",", "source_path", ")", "return", "all_block", ",", "block_map", ".", "get_variables", "(", ")" ]
Links the content found at source_path and represents a Block that represents the content.
[ "Links", "the", "content", "found", "at", "source_path", "and", "represents", "a", "Block", "that", "represents", "the", "content", "." ]
39851c89730ab69e5c73d0a46adca2a44ecc4165
https://github.com/albert12132/templar/blob/39851c89730ab69e5c73d0a46adca2a44ecc4165/templar/linker.py#L10-L19
train
albert12132/templar
templar/linker.py
process_links
def process_links(include_match, block_map, link_stack, source_path): """Process a string of content for include tags. This function assumes there are no blocks in the content. The content is split into segments, with include tags being replaced by Block objects. PARAMETERS: content -- str; content to be converted into a Block. block_map -- BlockMap link_stack -- LinkStack source_path -- str; the filepath of the file from which this content came. RETURNS: list of str; segments that the comprise the content. """ leading_whitespace = include_match.group(1) include_path = include_match.group(2) # Optional block name. If match is None, block name was ommitted (default to 'all'). block_name = include_match.group(3) if block_name is not None: block_name = block_name.lstrip(':') else: block_name = ALL_BLOCK_NAME return retrieve_block_from_map( source_path, include_path.strip(), block_name.strip(), leading_whitespace, block_map, link_stack)
python
def process_links(include_match, block_map, link_stack, source_path): """Process a string of content for include tags. This function assumes there are no blocks in the content. The content is split into segments, with include tags being replaced by Block objects. PARAMETERS: content -- str; content to be converted into a Block. block_map -- BlockMap link_stack -- LinkStack source_path -- str; the filepath of the file from which this content came. RETURNS: list of str; segments that the comprise the content. """ leading_whitespace = include_match.group(1) include_path = include_match.group(2) # Optional block name. If match is None, block name was ommitted (default to 'all'). block_name = include_match.group(3) if block_name is not None: block_name = block_name.lstrip(':') else: block_name = ALL_BLOCK_NAME return retrieve_block_from_map( source_path, include_path.strip(), block_name.strip(), leading_whitespace, block_map, link_stack)
[ "def", "process_links", "(", "include_match", ",", "block_map", ",", "link_stack", ",", "source_path", ")", ":", "leading_whitespace", "=", "include_match", ".", "group", "(", "1", ")", "include_path", "=", "include_match", ".", "group", "(", "2", ")", "# Optional block name. If match is None, block name was ommitted (default to 'all').", "block_name", "=", "include_match", ".", "group", "(", "3", ")", "if", "block_name", "is", "not", "None", ":", "block_name", "=", "block_name", ".", "lstrip", "(", "':'", ")", "else", ":", "block_name", "=", "ALL_BLOCK_NAME", "return", "retrieve_block_from_map", "(", "source_path", ",", "include_path", ".", "strip", "(", ")", ",", "block_name", ".", "strip", "(", ")", ",", "leading_whitespace", ",", "block_map", ",", "link_stack", ")" ]
Process a string of content for include tags. This function assumes there are no blocks in the content. The content is split into segments, with include tags being replaced by Block objects. PARAMETERS: content -- str; content to be converted into a Block. block_map -- BlockMap link_stack -- LinkStack source_path -- str; the filepath of the file from which this content came. RETURNS: list of str; segments that the comprise the content.
[ "Process", "a", "string", "of", "content", "for", "include", "tags", "." ]
39851c89730ab69e5c73d0a46adca2a44ecc4165
https://github.com/albert12132/templar/blob/39851c89730ab69e5c73d0a46adca2a44ecc4165/templar/linker.py#L232-L263
train
althonos/moclo
moclo/moclo/_utils.py
catch_warnings
def catch_warnings(action, category=Warning, lineno=0, append=False): """Wrap the function in a `warnings.catch_warnings` context. It can be used to silence some particular specific warnings, or instead to treat them as errors within the function body. Example: >>> import warnings >>> from moclo.utils import catch_warnings >>> @catch_warnings('ignore') ... def are_you_scared(): ... warnings.warn("I'm warning you !") ... return False >>> are_you_scared() False """ def decorator(func): @functools.wraps(func) def newfunc(*args, **kwargs): with warnings.catch_warnings(): warnings.simplefilter(action, category, lineno, append) return func(*args, **kwargs) return newfunc return decorator
python
def catch_warnings(action, category=Warning, lineno=0, append=False): """Wrap the function in a `warnings.catch_warnings` context. It can be used to silence some particular specific warnings, or instead to treat them as errors within the function body. Example: >>> import warnings >>> from moclo.utils import catch_warnings >>> @catch_warnings('ignore') ... def are_you_scared(): ... warnings.warn("I'm warning you !") ... return False >>> are_you_scared() False """ def decorator(func): @functools.wraps(func) def newfunc(*args, **kwargs): with warnings.catch_warnings(): warnings.simplefilter(action, category, lineno, append) return func(*args, **kwargs) return newfunc return decorator
[ "def", "catch_warnings", "(", "action", ",", "category", "=", "Warning", ",", "lineno", "=", "0", ",", "append", "=", "False", ")", ":", "def", "decorator", "(", "func", ")", ":", "@", "functools", ".", "wraps", "(", "func", ")", "def", "newfunc", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "with", "warnings", ".", "catch_warnings", "(", ")", ":", "warnings", ".", "simplefilter", "(", "action", ",", "category", ",", "lineno", ",", "append", ")", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "newfunc", "return", "decorator" ]
Wrap the function in a `warnings.catch_warnings` context. It can be used to silence some particular specific warnings, or instead to treat them as errors within the function body. Example: >>> import warnings >>> from moclo.utils import catch_warnings >>> @catch_warnings('ignore') ... def are_you_scared(): ... warnings.warn("I'm warning you !") ... return False >>> are_you_scared() False
[ "Wrap", "the", "function", "in", "a", "warnings", ".", "catch_warnings", "context", "." ]
28a03748df8a2fa43f0c0c8098ca64d11559434e
https://github.com/althonos/moclo/blob/28a03748df8a2fa43f0c0c8098ca64d11559434e/moclo/moclo/_utils.py#L27-L54
train
reorx/torext
torext/app.py
_guess_caller
def _guess_caller(): """ try to guess which module import app.py """ import inspect global _caller_path caller = inspect.stack()[1] caller_module = inspect.getmodule(caller[0]) if hasattr(caller_module, '__file__'): _caller_path = os.path.abspath(caller_module.__file__) return _caller_path
python
def _guess_caller(): """ try to guess which module import app.py """ import inspect global _caller_path caller = inspect.stack()[1] caller_module = inspect.getmodule(caller[0]) if hasattr(caller_module, '__file__'): _caller_path = os.path.abspath(caller_module.__file__) return _caller_path
[ "def", "_guess_caller", "(", ")", ":", "import", "inspect", "global", "_caller_path", "caller", "=", "inspect", ".", "stack", "(", ")", "[", "1", "]", "caller_module", "=", "inspect", ".", "getmodule", "(", "caller", "[", "0", "]", ")", "if", "hasattr", "(", "caller_module", ",", "'__file__'", ")", ":", "_caller_path", "=", "os", ".", "path", ".", "abspath", "(", "caller_module", ".", "__file__", ")", "return", "_caller_path" ]
try to guess which module import app.py
[ "try", "to", "guess", "which", "module", "import", "app", ".", "py" ]
84c4300ebc7fab0dbd11cf8b020bc7d4d1570171
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/app.py#L564-L575
train
reorx/torext
torext/app.py
TorextApp._fix_paths
def _fix_paths(self, options): """ fix `static_path` and `template_path` to be absolute path according to self.root_path so that PWD can be ignoreed. """ for k in ('template_path', 'static_path'): if k in options: v = options.pop(k) if v is None: continue if not os.path.isabs(v): v = os.path.abspath( os.path.join(self.root_path, v)) app_log.debug('Fix %s to be absolute: %s' % (k, v)) options[k] = v
python
def _fix_paths(self, options): """ fix `static_path` and `template_path` to be absolute path according to self.root_path so that PWD can be ignoreed. """ for k in ('template_path', 'static_path'): if k in options: v = options.pop(k) if v is None: continue if not os.path.isabs(v): v = os.path.abspath( os.path.join(self.root_path, v)) app_log.debug('Fix %s to be absolute: %s' % (k, v)) options[k] = v
[ "def", "_fix_paths", "(", "self", ",", "options", ")", ":", "for", "k", "in", "(", "'template_path'", ",", "'static_path'", ")", ":", "if", "k", "in", "options", ":", "v", "=", "options", ".", "pop", "(", "k", ")", "if", "v", "is", "None", ":", "continue", "if", "not", "os", ".", "path", ".", "isabs", "(", "v", ")", ":", "v", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "self", ".", "root_path", ",", "v", ")", ")", "app_log", ".", "debug", "(", "'Fix %s to be absolute: %s'", "%", "(", "k", ",", "v", ")", ")", "options", "[", "k", "]", "=", "v" ]
fix `static_path` and `template_path` to be absolute path according to self.root_path so that PWD can be ignoreed.
[ "fix", "static_path", "and", "template_path", "to", "be", "absolute", "path", "according", "to", "self", ".", "root_path", "so", "that", "PWD", "can", "be", "ignoreed", "." ]
84c4300ebc7fab0dbd11cf8b020bc7d4d1570171
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/app.py#L190-L204
train
reorx/torext
torext/app.py
TorextApp.route
def route(self, url, host=None): """This is a decorator """ def fn(handler_cls): handlers = self._get_handlers_on_host(host) handlers.insert(0, (url, handler_cls)) return handler_cls return fn
python
def route(self, url, host=None): """This is a decorator """ def fn(handler_cls): handlers = self._get_handlers_on_host(host) handlers.insert(0, (url, handler_cls)) return handler_cls return fn
[ "def", "route", "(", "self", ",", "url", ",", "host", "=", "None", ")", ":", "def", "fn", "(", "handler_cls", ")", ":", "handlers", "=", "self", ".", "_get_handlers_on_host", "(", "host", ")", "handlers", ".", "insert", "(", "0", ",", "(", "url", ",", "handler_cls", ")", ")", "return", "handler_cls", "return", "fn" ]
This is a decorator
[ "This", "is", "a", "decorator" ]
84c4300ebc7fab0dbd11cf8b020bc7d4d1570171
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/app.py#L214-L221
train
reorx/torext
torext/app.py
TorextApp.command_line_config
def command_line_config(self): """ settings.py is the basis if wants to change them by command line arguments, the existing option will be transformed to the value type in settings.py the unexisting option will be treated as string by default, and transform to certain type if `!<type>` was added after the value. example: $ python app.py --PORT=1000 NOTE This method is deprecated, use `torext.script` to parse command line arguments instead. """ args = sys.argv[1:] args_dict = {} existed_keys = [] new_keys = [] for t in args: if not t.startswith('--'): raise errors.ArgsParseError('Bad arg: %s' % t) try: key, value = tuple(t[2:].split('=')) except: raise errors.ArgsParseError('Bad arg: %s' % t) args_dict[key] = value if key in settings: existed_keys.append(key) else: new_keys.append(key) if existed_keys: app_log.debug('Changed settings:') for i in existed_keys: before = settings[i] type_ = type(before) if type_ is bool: if args_dict[i] == 'True': _value = True elif args_dict[i] == 'False': _value = False else: raise errors.ArgsParseError('%s should only be True or False' % i) else: _value = type_(args_dict[i]) settings[i] = _value app_log.debug(' %s [%s]%s (%s)', i, type(settings[i]), settings[i], before) if new_keys: app_log.debug('New settings:') for i in new_keys: settings[i] = args_dict[i] app_log.debug(' %s %s', i, args_dict[i]) # NOTE if ``command_line_config`` is called, logging must be re-configed self.update_settings({})
python
def command_line_config(self): """ settings.py is the basis if wants to change them by command line arguments, the existing option will be transformed to the value type in settings.py the unexisting option will be treated as string by default, and transform to certain type if `!<type>` was added after the value. example: $ python app.py --PORT=1000 NOTE This method is deprecated, use `torext.script` to parse command line arguments instead. """ args = sys.argv[1:] args_dict = {} existed_keys = [] new_keys = [] for t in args: if not t.startswith('--'): raise errors.ArgsParseError('Bad arg: %s' % t) try: key, value = tuple(t[2:].split('=')) except: raise errors.ArgsParseError('Bad arg: %s' % t) args_dict[key] = value if key in settings: existed_keys.append(key) else: new_keys.append(key) if existed_keys: app_log.debug('Changed settings:') for i in existed_keys: before = settings[i] type_ = type(before) if type_ is bool: if args_dict[i] == 'True': _value = True elif args_dict[i] == 'False': _value = False else: raise errors.ArgsParseError('%s should only be True or False' % i) else: _value = type_(args_dict[i]) settings[i] = _value app_log.debug(' %s [%s]%s (%s)', i, type(settings[i]), settings[i], before) if new_keys: app_log.debug('New settings:') for i in new_keys: settings[i] = args_dict[i] app_log.debug(' %s %s', i, args_dict[i]) # NOTE if ``command_line_config`` is called, logging must be re-configed self.update_settings({})
[ "def", "command_line_config", "(", "self", ")", ":", "args", "=", "sys", ".", "argv", "[", "1", ":", "]", "args_dict", "=", "{", "}", "existed_keys", "=", "[", "]", "new_keys", "=", "[", "]", "for", "t", "in", "args", ":", "if", "not", "t", ".", "startswith", "(", "'--'", ")", ":", "raise", "errors", ".", "ArgsParseError", "(", "'Bad arg: %s'", "%", "t", ")", "try", ":", "key", ",", "value", "=", "tuple", "(", "t", "[", "2", ":", "]", ".", "split", "(", "'='", ")", ")", "except", ":", "raise", "errors", ".", "ArgsParseError", "(", "'Bad arg: %s'", "%", "t", ")", "args_dict", "[", "key", "]", "=", "value", "if", "key", "in", "settings", ":", "existed_keys", ".", "append", "(", "key", ")", "else", ":", "new_keys", ".", "append", "(", "key", ")", "if", "existed_keys", ":", "app_log", ".", "debug", "(", "'Changed settings:'", ")", "for", "i", "in", "existed_keys", ":", "before", "=", "settings", "[", "i", "]", "type_", "=", "type", "(", "before", ")", "if", "type_", "is", "bool", ":", "if", "args_dict", "[", "i", "]", "==", "'True'", ":", "_value", "=", "True", "elif", "args_dict", "[", "i", "]", "==", "'False'", ":", "_value", "=", "False", "else", ":", "raise", "errors", ".", "ArgsParseError", "(", "'%s should only be True or False'", "%", "i", ")", "else", ":", "_value", "=", "type_", "(", "args_dict", "[", "i", "]", ")", "settings", "[", "i", "]", "=", "_value", "app_log", ".", "debug", "(", "' %s [%s]%s (%s)'", ",", "i", ",", "type", "(", "settings", "[", "i", "]", ")", ",", "settings", "[", "i", "]", ",", "before", ")", "if", "new_keys", ":", "app_log", ".", "debug", "(", "'New settings:'", ")", "for", "i", "in", "new_keys", ":", "settings", "[", "i", "]", "=", "args_dict", "[", "i", "]", "app_log", ".", "debug", "(", "' %s %s'", ",", "i", ",", "args_dict", "[", "i", "]", ")", "# NOTE if ``command_line_config`` is called, logging must be re-configed", "self", ".", "update_settings", "(", "{", "}", ")" ]
settings.py is the basis if wants to change them by command line arguments, the existing option will be transformed to the value type in settings.py the unexisting option will be treated as string by default, and transform to certain type if `!<type>` was added after the value. example: $ python app.py --PORT=1000 NOTE This method is deprecated, use `torext.script` to parse command line arguments instead.
[ "settings", ".", "py", "is", "the", "basis" ]
84c4300ebc7fab0dbd11cf8b020bc7d4d1570171
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/app.py#L266-L325
train
reorx/torext
torext/app.py
TorextApp.setup
def setup(self): """This function will be called both before `run` and testing started. """ testing = settings.get('TESTING') if testing: # Fix nose handler in testing situation. config = settings['LOGGERS'].get('', {}) set_nose_formatter(config) #print('testing, set nose formatter: {}'.format(config)) # reset timezone os.environ['TZ'] = settings['TIME_ZONE'] time.tzset() # determine project name if settings._module: project = os.path.split(self.root_path)[1] if settings['PROJECT']: assert settings['PROJECT'] == project, 'PROJECT specialized in settings (%s) '\ 'should be the same as project directory name (%s)' % (settings['PROJECT'], project) else: settings['PROJECT'] = project # PROJECT should be importable as a python module if settings['PROJECT']: # add upper directory path to sys.path if not in if settings._module: _abs = os.path.abspath parent_path = os.path.dirname(self.root_path) if not _abs(parent_path) in [_abs(i) for i in sys.path]: sys.path.insert(0, parent_path) app_log.info('Add %s to sys.path' % _abs(parent_path)) try: __import__(settings['PROJECT']) app_log.debug('import package `%s` success' % settings['PROJECT']) except ImportError: raise ImportError('PROJECT could not be imported, may be app.py is outside the project' 'or there is no __init__ in the package.') self.is_setuped = True
python
def setup(self): """This function will be called both before `run` and testing started. """ testing = settings.get('TESTING') if testing: # Fix nose handler in testing situation. config = settings['LOGGERS'].get('', {}) set_nose_formatter(config) #print('testing, set nose formatter: {}'.format(config)) # reset timezone os.environ['TZ'] = settings['TIME_ZONE'] time.tzset() # determine project name if settings._module: project = os.path.split(self.root_path)[1] if settings['PROJECT']: assert settings['PROJECT'] == project, 'PROJECT specialized in settings (%s) '\ 'should be the same as project directory name (%s)' % (settings['PROJECT'], project) else: settings['PROJECT'] = project # PROJECT should be importable as a python module if settings['PROJECT']: # add upper directory path to sys.path if not in if settings._module: _abs = os.path.abspath parent_path = os.path.dirname(self.root_path) if not _abs(parent_path) in [_abs(i) for i in sys.path]: sys.path.insert(0, parent_path) app_log.info('Add %s to sys.path' % _abs(parent_path)) try: __import__(settings['PROJECT']) app_log.debug('import package `%s` success' % settings['PROJECT']) except ImportError: raise ImportError('PROJECT could not be imported, may be app.py is outside the project' 'or there is no __init__ in the package.') self.is_setuped = True
[ "def", "setup", "(", "self", ")", ":", "testing", "=", "settings", ".", "get", "(", "'TESTING'", ")", "if", "testing", ":", "# Fix nose handler in testing situation.", "config", "=", "settings", "[", "'LOGGERS'", "]", ".", "get", "(", "''", ",", "{", "}", ")", "set_nose_formatter", "(", "config", ")", "#print('testing, set nose formatter: {}'.format(config))", "# reset timezone", "os", ".", "environ", "[", "'TZ'", "]", "=", "settings", "[", "'TIME_ZONE'", "]", "time", ".", "tzset", "(", ")", "# determine project name", "if", "settings", ".", "_module", ":", "project", "=", "os", ".", "path", ".", "split", "(", "self", ".", "root_path", ")", "[", "1", "]", "if", "settings", "[", "'PROJECT'", "]", ":", "assert", "settings", "[", "'PROJECT'", "]", "==", "project", ",", "'PROJECT specialized in settings (%s) '", "'should be the same as project directory name (%s)'", "%", "(", "settings", "[", "'PROJECT'", "]", ",", "project", ")", "else", ":", "settings", "[", "'PROJECT'", "]", "=", "project", "# PROJECT should be importable as a python module", "if", "settings", "[", "'PROJECT'", "]", ":", "# add upper directory path to sys.path if not in", "if", "settings", ".", "_module", ":", "_abs", "=", "os", ".", "path", ".", "abspath", "parent_path", "=", "os", ".", "path", ".", "dirname", "(", "self", ".", "root_path", ")", "if", "not", "_abs", "(", "parent_path", ")", "in", "[", "_abs", "(", "i", ")", "for", "i", "in", "sys", ".", "path", "]", ":", "sys", ".", "path", ".", "insert", "(", "0", ",", "parent_path", ")", "app_log", ".", "info", "(", "'Add %s to sys.path'", "%", "_abs", "(", "parent_path", ")", ")", "try", ":", "__import__", "(", "settings", "[", "'PROJECT'", "]", ")", "app_log", ".", "debug", "(", "'import package `%s` success'", "%", "settings", "[", "'PROJECT'", "]", ")", "except", "ImportError", ":", "raise", "ImportError", "(", "'PROJECT could not be imported, may be app.py is outside the project'", "'or there is no __init__ in the package.'", ")", "self", ".", "is_setuped", "=", "True" ]
This function will be called both before `run` and testing started.
[ "This", "function", "will", "be", "called", "both", "before", "run", "and", "testing", "started", "." ]
84c4300ebc7fab0dbd11cf8b020bc7d4d1570171
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/app.py#L327-L367
train
reorx/torext
torext/app.py
TorextApp._init_application
def _init_application(self, application=None): """Initialize application object for torext app, if a existed application is passed, then just use this one without make a new one""" if application: self.application = application else: self.application = self.make_application()
python
def _init_application(self, application=None): """Initialize application object for torext app, if a existed application is passed, then just use this one without make a new one""" if application: self.application = application else: self.application = self.make_application()
[ "def", "_init_application", "(", "self", ",", "application", "=", "None", ")", ":", "if", "application", ":", "self", ".", "application", "=", "application", "else", ":", "self", ".", "application", "=", "self", ".", "make_application", "(", ")" ]
Initialize application object for torext app, if a existed application is passed, then just use this one without make a new one
[ "Initialize", "application", "object", "for", "torext", "app", "if", "a", "existed", "application", "is", "passed", "then", "just", "use", "this", "one", "without", "make", "a", "new", "one" ]
84c4300ebc7fab0dbd11cf8b020bc7d4d1570171
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/app.py#L530-L536
train
reorx/torext
torext/app.py
TorextApp._log_function
def _log_function(self, handler): """Override Application.log_function so that what to log can be controlled. """ if handler.get_status() < 400: log_method = request_log.info elif handler.get_status() < 500: log_method = request_log.warning else: log_method = request_log.error for i in settings['LOGGING_IGNORE_URLS']: if handler.request.uri.startswith(i): log_method = request_log.debug break request_time = 1000.0 * handler.request.request_time() log_method("%d %s %.2fms", handler.get_status(), handler._request_summary(), request_time)
python
def _log_function(self, handler): """Override Application.log_function so that what to log can be controlled. """ if handler.get_status() < 400: log_method = request_log.info elif handler.get_status() < 500: log_method = request_log.warning else: log_method = request_log.error for i in settings['LOGGING_IGNORE_URLS']: if handler.request.uri.startswith(i): log_method = request_log.debug break request_time = 1000.0 * handler.request.request_time() log_method("%d %s %.2fms", handler.get_status(), handler._request_summary(), request_time)
[ "def", "_log_function", "(", "self", ",", "handler", ")", ":", "if", "handler", ".", "get_status", "(", ")", "<", "400", ":", "log_method", "=", "request_log", ".", "info", "elif", "handler", ".", "get_status", "(", ")", "<", "500", ":", "log_method", "=", "request_log", ".", "warning", "else", ":", "log_method", "=", "request_log", ".", "error", "for", "i", "in", "settings", "[", "'LOGGING_IGNORE_URLS'", "]", ":", "if", "handler", ".", "request", ".", "uri", ".", "startswith", "(", "i", ")", ":", "log_method", "=", "request_log", ".", "debug", "break", "request_time", "=", "1000.0", "*", "handler", ".", "request", ".", "request_time", "(", ")", "log_method", "(", "\"%d %s %.2fms\"", ",", "handler", ".", "get_status", "(", ")", ",", "handler", ".", "_request_summary", "(", ")", ",", "request_time", ")" ]
Override Application.log_function so that what to log can be controlled.
[ "Override", "Application", ".", "log_function", "so", "that", "what", "to", "log", "can", "be", "controlled", "." ]
84c4300ebc7fab0dbd11cf8b020bc7d4d1570171
https://github.com/reorx/torext/blob/84c4300ebc7fab0dbd11cf8b020bc7d4d1570171/torext/app.py#L542-L558
train
255BITS/hyperchamber
examples/shared/variational_autoencoder.py
xavier_init
def xavier_init(fan_in, fan_out, constant=1): """ Xavier initialization of network weights""" # https://stackoverflow.com/questions/33640581/how-to-do-xavier-initialization-on-tensorflow low = -constant*np.sqrt(6.0/(fan_in + fan_out)) high = constant*np.sqrt(6.0/(fan_in + fan_out)) return tf.random_uniform((fan_in, fan_out), minval=low, maxval=high, dtype=tf.float32)
python
def xavier_init(fan_in, fan_out, constant=1): """ Xavier initialization of network weights""" # https://stackoverflow.com/questions/33640581/how-to-do-xavier-initialization-on-tensorflow low = -constant*np.sqrt(6.0/(fan_in + fan_out)) high = constant*np.sqrt(6.0/(fan_in + fan_out)) return tf.random_uniform((fan_in, fan_out), minval=low, maxval=high, dtype=tf.float32)
[ "def", "xavier_init", "(", "fan_in", ",", "fan_out", ",", "constant", "=", "1", ")", ":", "# https://stackoverflow.com/questions/33640581/how-to-do-xavier-initialization-on-tensorflow", "low", "=", "-", "constant", "*", "np", ".", "sqrt", "(", "6.0", "/", "(", "fan_in", "+", "fan_out", ")", ")", "high", "=", "constant", "*", "np", ".", "sqrt", "(", "6.0", "/", "(", "fan_in", "+", "fan_out", ")", ")", "return", "tf", ".", "random_uniform", "(", "(", "fan_in", ",", "fan_out", ")", ",", "minval", "=", "low", ",", "maxval", "=", "high", ",", "dtype", "=", "tf", ".", "float32", ")" ]
Xavier initialization of network weights
[ "Xavier", "initialization", "of", "network", "weights" ]
4d5774bde9ea6ce1113f77a069ffc605148482b8
https://github.com/255BITS/hyperchamber/blob/4d5774bde9ea6ce1113f77a069ffc605148482b8/examples/shared/variational_autoencoder.py#L8-L15
train
255BITS/hyperchamber
examples/shared/variational_autoencoder.py
VariationalAutoencoder.partial_fit
def partial_fit(self, X): """Train model based on mini-batch of input data. Return cost of mini-batch. """ opt, cost = self.sess.run((self.optimizer, self.cost), feed_dict={self.x: X}) return cost
python
def partial_fit(self, X): """Train model based on mini-batch of input data. Return cost of mini-batch. """ opt, cost = self.sess.run((self.optimizer, self.cost), feed_dict={self.x: X}) return cost
[ "def", "partial_fit", "(", "self", ",", "X", ")", ":", "opt", ",", "cost", "=", "self", ".", "sess", ".", "run", "(", "(", "self", ".", "optimizer", ",", "self", ".", "cost", ")", ",", "feed_dict", "=", "{", "self", ".", "x", ":", "X", "}", ")", "return", "cost" ]
Train model based on mini-batch of input data. Return cost of mini-batch.
[ "Train", "model", "based", "on", "mini", "-", "batch", "of", "input", "data", ".", "Return", "cost", "of", "mini", "-", "batch", "." ]
4d5774bde9ea6ce1113f77a069ffc605148482b8
https://github.com/255BITS/hyperchamber/blob/4d5774bde9ea6ce1113f77a069ffc605148482b8/examples/shared/variational_autoencoder.py#L155-L162
train
255BITS/hyperchamber
examples/shared/variational_autoencoder.py
VariationalAutoencoder.transform
def transform(self, X): """Transform data by mapping it into the latent space.""" # Note: This maps to mean of distribution, we could alternatively # sample from Gaussian distribution return self.sess.run(self.z_mean, feed_dict={self.x: X})
python
def transform(self, X): """Transform data by mapping it into the latent space.""" # Note: This maps to mean of distribution, we could alternatively # sample from Gaussian distribution return self.sess.run(self.z_mean, feed_dict={self.x: X})
[ "def", "transform", "(", "self", ",", "X", ")", ":", "# Note: This maps to mean of distribution, we could alternatively", "# sample from Gaussian distribution", "return", "self", ".", "sess", ".", "run", "(", "self", ".", "z_mean", ",", "feed_dict", "=", "{", "self", ".", "x", ":", "X", "}", ")" ]
Transform data by mapping it into the latent space.
[ "Transform", "data", "by", "mapping", "it", "into", "the", "latent", "space", "." ]
4d5774bde9ea6ce1113f77a069ffc605148482b8
https://github.com/255BITS/hyperchamber/blob/4d5774bde9ea6ce1113f77a069ffc605148482b8/examples/shared/variational_autoencoder.py#L164-L168
train
255BITS/hyperchamber
examples/shared/variational_autoencoder.py
VariationalAutoencoder.generate
def generate(self, z_mu=None): """ Generate data by sampling from latent space. If z_mu is not None, data for this point in latent space is generated. Otherwise, z_mu is drawn from prior in latent space. """ if z_mu is None: z_mu = np.random.normal(size=self.network_architecture["n_z"]) # Note: This maps to mean of distribution, we could alternatively # sample from Gaussian distribution return self.sess.run(self.x_reconstr_mean, feed_dict={self.z: z_mu})
python
def generate(self, z_mu=None): """ Generate data by sampling from latent space. If z_mu is not None, data for this point in latent space is generated. Otherwise, z_mu is drawn from prior in latent space. """ if z_mu is None: z_mu = np.random.normal(size=self.network_architecture["n_z"]) # Note: This maps to mean of distribution, we could alternatively # sample from Gaussian distribution return self.sess.run(self.x_reconstr_mean, feed_dict={self.z: z_mu})
[ "def", "generate", "(", "self", ",", "z_mu", "=", "None", ")", ":", "if", "z_mu", "is", "None", ":", "z_mu", "=", "np", ".", "random", ".", "normal", "(", "size", "=", "self", ".", "network_architecture", "[", "\"n_z\"", "]", ")", "# Note: This maps to mean of distribution, we could alternatively", "# sample from Gaussian distribution", "return", "self", ".", "sess", ".", "run", "(", "self", ".", "x_reconstr_mean", ",", "feed_dict", "=", "{", "self", ".", "z", ":", "z_mu", "}", ")" ]
Generate data by sampling from latent space. If z_mu is not None, data for this point in latent space is generated. Otherwise, z_mu is drawn from prior in latent space.
[ "Generate", "data", "by", "sampling", "from", "latent", "space", ".", "If", "z_mu", "is", "not", "None", "data", "for", "this", "point", "in", "latent", "space", "is", "generated", ".", "Otherwise", "z_mu", "is", "drawn", "from", "prior", "in", "latent", "space", "." ]
4d5774bde9ea6ce1113f77a069ffc605148482b8
https://github.com/255BITS/hyperchamber/blob/4d5774bde9ea6ce1113f77a069ffc605148482b8/examples/shared/variational_autoencoder.py#L170-L182
train
255BITS/hyperchamber
examples/shared/variational_autoencoder.py
VariationalAutoencoder.reconstruct
def reconstruct(self, X): """ Use VAE to reconstruct given data. """ return self.sess.run(self.x_reconstr_mean, feed_dict={self.x: X})
python
def reconstruct(self, X): """ Use VAE to reconstruct given data. """ return self.sess.run(self.x_reconstr_mean, feed_dict={self.x: X})
[ "def", "reconstruct", "(", "self", ",", "X", ")", ":", "return", "self", ".", "sess", ".", "run", "(", "self", ".", "x_reconstr_mean", ",", "feed_dict", "=", "{", "self", ".", "x", ":", "X", "}", ")" ]
Use VAE to reconstruct given data.
[ "Use", "VAE", "to", "reconstruct", "given", "data", "." ]
4d5774bde9ea6ce1113f77a069ffc605148482b8
https://github.com/255BITS/hyperchamber/blob/4d5774bde9ea6ce1113f77a069ffc605148482b8/examples/shared/variational_autoencoder.py#L184-L187
train
geophysics-ubonn/crtomo_tools
src/cr_trig_parse_gmsh.py
get_ajd_bound
def get_ajd_bound(mesh): """ Determine triangular elements adjacend to the boundary elements """ print('Get elements adjacent to boundaries') boundary_elements = [] str_adj_boundaries = '' # for boundary in mesh['elements']['1']: boundaries = mesh['boundaries']['12'] + mesh['boundaries']['11'] for boundary in boundaries: # now find the triangle ('2') with two nodes equal to this boundary indices = [nr if (boundary[0] in x and boundary[1] in x) else np.nan for (nr, x) in enumerate(mesh['elements']['2'])] indices = np.array(indices)[~np.isnan(indices)] if(len(indices) != 1): print('More than one neighbour found!') elif(len(indices) == 0): print('No neighbour found!') boundary_elements.append(indices[0]) str_adj_boundaries += '{0}\n'.format(int(indices[0]) + 1) return str_adj_boundaries, boundary_elements
python
def get_ajd_bound(mesh): """ Determine triangular elements adjacend to the boundary elements """ print('Get elements adjacent to boundaries') boundary_elements = [] str_adj_boundaries = '' # for boundary in mesh['elements']['1']: boundaries = mesh['boundaries']['12'] + mesh['boundaries']['11'] for boundary in boundaries: # now find the triangle ('2') with two nodes equal to this boundary indices = [nr if (boundary[0] in x and boundary[1] in x) else np.nan for (nr, x) in enumerate(mesh['elements']['2'])] indices = np.array(indices)[~np.isnan(indices)] if(len(indices) != 1): print('More than one neighbour found!') elif(len(indices) == 0): print('No neighbour found!') boundary_elements.append(indices[0]) str_adj_boundaries += '{0}\n'.format(int(indices[0]) + 1) return str_adj_boundaries, boundary_elements
[ "def", "get_ajd_bound", "(", "mesh", ")", ":", "print", "(", "'Get elements adjacent to boundaries'", ")", "boundary_elements", "=", "[", "]", "str_adj_boundaries", "=", "''", "# for boundary in mesh['elements']['1']:", "boundaries", "=", "mesh", "[", "'boundaries'", "]", "[", "'12'", "]", "+", "mesh", "[", "'boundaries'", "]", "[", "'11'", "]", "for", "boundary", "in", "boundaries", ":", "# now find the triangle ('2') with two nodes equal to this boundary", "indices", "=", "[", "nr", "if", "(", "boundary", "[", "0", "]", "in", "x", "and", "boundary", "[", "1", "]", "in", "x", ")", "else", "np", ".", "nan", "for", "(", "nr", ",", "x", ")", "in", "enumerate", "(", "mesh", "[", "'elements'", "]", "[", "'2'", "]", ")", "]", "indices", "=", "np", ".", "array", "(", "indices", ")", "[", "~", "np", ".", "isnan", "(", "indices", ")", "]", "if", "(", "len", "(", "indices", ")", "!=", "1", ")", ":", "print", "(", "'More than one neighbour found!'", ")", "elif", "(", "len", "(", "indices", ")", "==", "0", ")", ":", "print", "(", "'No neighbour found!'", ")", "boundary_elements", ".", "append", "(", "indices", "[", "0", "]", ")", "str_adj_boundaries", "+=", "'{0}\\n'", ".", "format", "(", "int", "(", "indices", "[", "0", "]", ")", "+", "1", ")", "return", "str_adj_boundaries", ",", "boundary_elements" ]
Determine triangular elements adjacend to the boundary elements
[ "Determine", "triangular", "elements", "adjacend", "to", "the", "boundary", "elements" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/cr_trig_parse_gmsh.py#L385-L405
train
geophysics-ubonn/crtomo_tools
src/cr_trig_parse_gmsh.py
write_elec_file
def write_elec_file(filename, mesh): """ Read in the electrode positions and return the indices of the electrodes # TODO: Check if you find all electrodes """ elecs = [] # print('Write electrodes') electrodes = np.loadtxt(filename) for i in electrodes: # find for nr, j in enumerate(mesh['nodes']): if np.isclose(j[1], i[0]) and np.isclose(j[2], i[1]): elecs.append(nr + 1) fid = open('elec.dat', 'w') fid.write('{0}\n'.format(len(elecs))) for i in elecs: fid.write('{0}\n'.format(i)) fid.close()
python
def write_elec_file(filename, mesh): """ Read in the electrode positions and return the indices of the electrodes # TODO: Check if you find all electrodes """ elecs = [] # print('Write electrodes') electrodes = np.loadtxt(filename) for i in electrodes: # find for nr, j in enumerate(mesh['nodes']): if np.isclose(j[1], i[0]) and np.isclose(j[2], i[1]): elecs.append(nr + 1) fid = open('elec.dat', 'w') fid.write('{0}\n'.format(len(elecs))) for i in elecs: fid.write('{0}\n'.format(i)) fid.close()
[ "def", "write_elec_file", "(", "filename", ",", "mesh", ")", ":", "elecs", "=", "[", "]", "# print('Write electrodes')", "electrodes", "=", "np", ".", "loadtxt", "(", "filename", ")", "for", "i", "in", "electrodes", ":", "# find", "for", "nr", ",", "j", "in", "enumerate", "(", "mesh", "[", "'nodes'", "]", ")", ":", "if", "np", ".", "isclose", "(", "j", "[", "1", "]", ",", "i", "[", "0", "]", ")", "and", "np", ".", "isclose", "(", "j", "[", "2", "]", ",", "i", "[", "1", "]", ")", ":", "elecs", ".", "append", "(", "nr", "+", "1", ")", "fid", "=", "open", "(", "'elec.dat'", ",", "'w'", ")", "fid", ".", "write", "(", "'{0}\\n'", ".", "format", "(", "len", "(", "elecs", ")", ")", ")", "for", "i", "in", "elecs", ":", "fid", ".", "write", "(", "'{0}\\n'", ".", "format", "(", "i", ")", ")", "fid", ".", "close", "(", ")" ]
Read in the electrode positions and return the indices of the electrodes # TODO: Check if you find all electrodes
[ "Read", "in", "the", "electrode", "positions", "and", "return", "the", "indices", "of", "the", "electrodes" ]
27c3e21a557f8df1c12455b96c4c2e00e08a5b4a
https://github.com/geophysics-ubonn/crtomo_tools/blob/27c3e21a557f8df1c12455b96c4c2e00e08a5b4a/src/cr_trig_parse_gmsh.py#L408-L427
train
thiagopbueno/tf-rddlsim
tfrddlsim/simulation/policy_simulator.py
PolicySimulationCell.state_size
def state_size(self) -> Sequence[Shape]: '''Returns the MDP state size.''' return self._sizes(self._compiler.rddl.state_size)
python
def state_size(self) -> Sequence[Shape]: '''Returns the MDP state size.''' return self._sizes(self._compiler.rddl.state_size)
[ "def", "state_size", "(", "self", ")", "->", "Sequence", "[", "Shape", "]", ":", "return", "self", ".", "_sizes", "(", "self", ".", "_compiler", ".", "rddl", ".", "state_size", ")" ]
Returns the MDP state size.
[ "Returns", "the", "MDP", "state", "size", "." ]
d7102a0ad37d179dbb23141640254ea383d3b43f
https://github.com/thiagopbueno/tf-rddlsim/blob/d7102a0ad37d179dbb23141640254ea383d3b43f/tfrddlsim/simulation/policy_simulator.py#L77-L79
train
thiagopbueno/tf-rddlsim
tfrddlsim/simulation/policy_simulator.py
PolicySimulationCell.action_size
def action_size(self) -> Sequence[Shape]: '''Returns the MDP action size.''' return self._sizes(self._compiler.rddl.action_size)
python
def action_size(self) -> Sequence[Shape]: '''Returns the MDP action size.''' return self._sizes(self._compiler.rddl.action_size)
[ "def", "action_size", "(", "self", ")", "->", "Sequence", "[", "Shape", "]", ":", "return", "self", ".", "_sizes", "(", "self", ".", "_compiler", ".", "rddl", ".", "action_size", ")" ]
Returns the MDP action size.
[ "Returns", "the", "MDP", "action", "size", "." ]
d7102a0ad37d179dbb23141640254ea383d3b43f
https://github.com/thiagopbueno/tf-rddlsim/blob/d7102a0ad37d179dbb23141640254ea383d3b43f/tfrddlsim/simulation/policy_simulator.py#L82-L84
train
thiagopbueno/tf-rddlsim
tfrddlsim/simulation/policy_simulator.py
PolicySimulationCell.interm_size
def interm_size(self) -> Sequence[Shape]: '''Returns the MDP intermediate state size.''' return self._sizes(self._compiler.rddl.interm_size)
python
def interm_size(self) -> Sequence[Shape]: '''Returns the MDP intermediate state size.''' return self._sizes(self._compiler.rddl.interm_size)
[ "def", "interm_size", "(", "self", ")", "->", "Sequence", "[", "Shape", "]", ":", "return", "self", ".", "_sizes", "(", "self", ".", "_compiler", ".", "rddl", ".", "interm_size", ")" ]
Returns the MDP intermediate state size.
[ "Returns", "the", "MDP", "intermediate", "state", "size", "." ]
d7102a0ad37d179dbb23141640254ea383d3b43f
https://github.com/thiagopbueno/tf-rddlsim/blob/d7102a0ad37d179dbb23141640254ea383d3b43f/tfrddlsim/simulation/policy_simulator.py#L87-L89
train
thiagopbueno/tf-rddlsim
tfrddlsim/simulation/policy_simulator.py
PolicySimulationCell.output_size
def output_size(self) -> Tuple[Sequence[Shape], Sequence[Shape], Sequence[Shape], int]: '''Returns the simulation cell output size.''' return (self.state_size, self.action_size, self.interm_size, 1)
python
def output_size(self) -> Tuple[Sequence[Shape], Sequence[Shape], Sequence[Shape], int]: '''Returns the simulation cell output size.''' return (self.state_size, self.action_size, self.interm_size, 1)
[ "def", "output_size", "(", "self", ")", "->", "Tuple", "[", "Sequence", "[", "Shape", "]", ",", "Sequence", "[", "Shape", "]", ",", "Sequence", "[", "Shape", "]", ",", "int", "]", ":", "return", "(", "self", ".", "state_size", ",", "self", ".", "action_size", ",", "self", ".", "interm_size", ",", "1", ")" ]
Returns the simulation cell output size.
[ "Returns", "the", "simulation", "cell", "output", "size", "." ]
d7102a0ad37d179dbb23141640254ea383d3b43f
https://github.com/thiagopbueno/tf-rddlsim/blob/d7102a0ad37d179dbb23141640254ea383d3b43f/tfrddlsim/simulation/policy_simulator.py#L92-L94
train
thiagopbueno/tf-rddlsim
tfrddlsim/simulation/policy_simulator.py
PolicySimulationCell.initial_state
def initial_state(self) -> StateTensor: '''Returns the initial state tensor.''' s0 = [] for fluent in self._compiler.compile_initial_state(self._batch_size): s0.append(self._output_size(fluent)) s0 = tuple(s0) return s0
python
def initial_state(self) -> StateTensor: '''Returns the initial state tensor.''' s0 = [] for fluent in self._compiler.compile_initial_state(self._batch_size): s0.append(self._output_size(fluent)) s0 = tuple(s0) return s0
[ "def", "initial_state", "(", "self", ")", "->", "StateTensor", ":", "s0", "=", "[", "]", "for", "fluent", "in", "self", ".", "_compiler", ".", "compile_initial_state", "(", "self", ".", "_batch_size", ")", ":", "s0", ".", "append", "(", "self", ".", "_output_size", "(", "fluent", ")", ")", "s0", "=", "tuple", "(", "s0", ")", "return", "s0" ]
Returns the initial state tensor.
[ "Returns", "the", "initial", "state", "tensor", "." ]
d7102a0ad37d179dbb23141640254ea383d3b43f
https://github.com/thiagopbueno/tf-rddlsim/blob/d7102a0ad37d179dbb23141640254ea383d3b43f/tfrddlsim/simulation/policy_simulator.py#L96-L102
train