repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
bxlab/bx-python
|
lib/bx/misc/readlengths.py
|
read_lengths_file
|
def read_lengths_file( name ):
"""
Returns a hash from sequence name to length.
"""
chrom_to_length = {}
f = file ( name, "rt" )
for line in f:
line = line.strip()
if line == '' or line[0] == '#': continue
try:
fields = line.split()
if len(fields) != 2: raise
chrom = fields[0]
length = int( fields[1] )
except:
raise ValueError("bad length file line: %s" % line)
if chrom in chrom_to_length and length != chrom_to_length[chrom]:
raise ValueError("%s has more than one length!" % chrom)
chrom_to_length[chrom] = length
f.close()
return chrom_to_length
|
python
|
def read_lengths_file( name ):
"""
Returns a hash from sequence name to length.
"""
chrom_to_length = {}
f = file ( name, "rt" )
for line in f:
line = line.strip()
if line == '' or line[0] == '#': continue
try:
fields = line.split()
if len(fields) != 2: raise
chrom = fields[0]
length = int( fields[1] )
except:
raise ValueError("bad length file line: %s" % line)
if chrom in chrom_to_length and length != chrom_to_length[chrom]:
raise ValueError("%s has more than one length!" % chrom)
chrom_to_length[chrom] = length
f.close()
return chrom_to_length
|
[
"def",
"read_lengths_file",
"(",
"name",
")",
":",
"chrom_to_length",
"=",
"{",
"}",
"f",
"=",
"file",
"(",
"name",
",",
"\"rt\"",
")",
"for",
"line",
"in",
"f",
":",
"line",
"=",
"line",
".",
"strip",
"(",
")",
"if",
"line",
"==",
"''",
"or",
"line",
"[",
"0",
"]",
"==",
"'#'",
":",
"continue",
"try",
":",
"fields",
"=",
"line",
".",
"split",
"(",
")",
"if",
"len",
"(",
"fields",
")",
"!=",
"2",
":",
"raise",
"chrom",
"=",
"fields",
"[",
"0",
"]",
"length",
"=",
"int",
"(",
"fields",
"[",
"1",
"]",
")",
"except",
":",
"raise",
"ValueError",
"(",
"\"bad length file line: %s\"",
"%",
"line",
")",
"if",
"chrom",
"in",
"chrom_to_length",
"and",
"length",
"!=",
"chrom_to_length",
"[",
"chrom",
"]",
":",
"raise",
"ValueError",
"(",
"\"%s has more than one length!\"",
"%",
"chrom",
")",
"chrom_to_length",
"[",
"chrom",
"]",
"=",
"length",
"f",
".",
"close",
"(",
")",
"return",
"chrom_to_length"
] |
Returns a hash from sequence name to length.
|
[
"Returns",
"a",
"hash",
"from",
"sequence",
"name",
"to",
"length",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/misc/readlengths.py#L7-L28
|
train
|
bxlab/bx-python
|
lib/bx/wiggle.py
|
IntervalReader
|
def IntervalReader( f ):
"""
Iterator yielding chrom, start, end, strand, value.
Values are zero-based, half-open.
Regions which lack a score are ignored.
"""
current_chrom = None
current_pos = None
current_step = None
# always for wiggle data
strand = '+'
mode = "bed"
for line in f:
if line.isspace() or line.startswith( "track" ) or line.startswith( "#" ) or line.startswith( "browser" ):
continue
elif line.startswith( "variableStep" ):
header = parse_header( line )
current_chrom = header['chrom']
current_pos = None
current_step = None
if 'span' in header: current_span = int( header['span'] )
else: current_span = 1
mode = "variableStep"
elif line.startswith( "fixedStep" ):
header = parse_header( line )
current_chrom = header['chrom']
current_pos = int( header['start'] ) - 1
current_step = int( header['step'] )
if 'span' in header: current_span = int( header['span'] )
else: current_span = 1
mode = "fixedStep"
elif mode == "bed":
fields = line.split()
if len( fields ) > 3:
if len( fields ) > 5:
yield fields[0], int( fields[1] ), int( fields[2] ), fields[5], float( fields[3] )
else:
yield fields[0], int( fields[1] ), int( fields[2] ), strand, float( fields[3] )
elif mode == "variableStep":
fields = line.split()
pos = int( fields[0] ) - 1
yield current_chrom, pos, pos + current_span, strand, float( fields[1] )
elif mode == "fixedStep":
yield current_chrom, current_pos, current_pos + current_span, strand, float( line.split()[0] )
current_pos += current_step
else:
raise ValueError("Unexpected input line: %s" % line.strip())
|
python
|
def IntervalReader( f ):
"""
Iterator yielding chrom, start, end, strand, value.
Values are zero-based, half-open.
Regions which lack a score are ignored.
"""
current_chrom = None
current_pos = None
current_step = None
# always for wiggle data
strand = '+'
mode = "bed"
for line in f:
if line.isspace() or line.startswith( "track" ) or line.startswith( "#" ) or line.startswith( "browser" ):
continue
elif line.startswith( "variableStep" ):
header = parse_header( line )
current_chrom = header['chrom']
current_pos = None
current_step = None
if 'span' in header: current_span = int( header['span'] )
else: current_span = 1
mode = "variableStep"
elif line.startswith( "fixedStep" ):
header = parse_header( line )
current_chrom = header['chrom']
current_pos = int( header['start'] ) - 1
current_step = int( header['step'] )
if 'span' in header: current_span = int( header['span'] )
else: current_span = 1
mode = "fixedStep"
elif mode == "bed":
fields = line.split()
if len( fields ) > 3:
if len( fields ) > 5:
yield fields[0], int( fields[1] ), int( fields[2] ), fields[5], float( fields[3] )
else:
yield fields[0], int( fields[1] ), int( fields[2] ), strand, float( fields[3] )
elif mode == "variableStep":
fields = line.split()
pos = int( fields[0] ) - 1
yield current_chrom, pos, pos + current_span, strand, float( fields[1] )
elif mode == "fixedStep":
yield current_chrom, current_pos, current_pos + current_span, strand, float( line.split()[0] )
current_pos += current_step
else:
raise ValueError("Unexpected input line: %s" % line.strip())
|
[
"def",
"IntervalReader",
"(",
"f",
")",
":",
"current_chrom",
"=",
"None",
"current_pos",
"=",
"None",
"current_step",
"=",
"None",
"# always for wiggle data",
"strand",
"=",
"'+'",
"mode",
"=",
"\"bed\"",
"for",
"line",
"in",
"f",
":",
"if",
"line",
".",
"isspace",
"(",
")",
"or",
"line",
".",
"startswith",
"(",
"\"track\"",
")",
"or",
"line",
".",
"startswith",
"(",
"\"#\"",
")",
"or",
"line",
".",
"startswith",
"(",
"\"browser\"",
")",
":",
"continue",
"elif",
"line",
".",
"startswith",
"(",
"\"variableStep\"",
")",
":",
"header",
"=",
"parse_header",
"(",
"line",
")",
"current_chrom",
"=",
"header",
"[",
"'chrom'",
"]",
"current_pos",
"=",
"None",
"current_step",
"=",
"None",
"if",
"'span'",
"in",
"header",
":",
"current_span",
"=",
"int",
"(",
"header",
"[",
"'span'",
"]",
")",
"else",
":",
"current_span",
"=",
"1",
"mode",
"=",
"\"variableStep\"",
"elif",
"line",
".",
"startswith",
"(",
"\"fixedStep\"",
")",
":",
"header",
"=",
"parse_header",
"(",
"line",
")",
"current_chrom",
"=",
"header",
"[",
"'chrom'",
"]",
"current_pos",
"=",
"int",
"(",
"header",
"[",
"'start'",
"]",
")",
"-",
"1",
"current_step",
"=",
"int",
"(",
"header",
"[",
"'step'",
"]",
")",
"if",
"'span'",
"in",
"header",
":",
"current_span",
"=",
"int",
"(",
"header",
"[",
"'span'",
"]",
")",
"else",
":",
"current_span",
"=",
"1",
"mode",
"=",
"\"fixedStep\"",
"elif",
"mode",
"==",
"\"bed\"",
":",
"fields",
"=",
"line",
".",
"split",
"(",
")",
"if",
"len",
"(",
"fields",
")",
">",
"3",
":",
"if",
"len",
"(",
"fields",
")",
">",
"5",
":",
"yield",
"fields",
"[",
"0",
"]",
",",
"int",
"(",
"fields",
"[",
"1",
"]",
")",
",",
"int",
"(",
"fields",
"[",
"2",
"]",
")",
",",
"fields",
"[",
"5",
"]",
",",
"float",
"(",
"fields",
"[",
"3",
"]",
")",
"else",
":",
"yield",
"fields",
"[",
"0",
"]",
",",
"int",
"(",
"fields",
"[",
"1",
"]",
")",
",",
"int",
"(",
"fields",
"[",
"2",
"]",
")",
",",
"strand",
",",
"float",
"(",
"fields",
"[",
"3",
"]",
")",
"elif",
"mode",
"==",
"\"variableStep\"",
":",
"fields",
"=",
"line",
".",
"split",
"(",
")",
"pos",
"=",
"int",
"(",
"fields",
"[",
"0",
"]",
")",
"-",
"1",
"yield",
"current_chrom",
",",
"pos",
",",
"pos",
"+",
"current_span",
",",
"strand",
",",
"float",
"(",
"fields",
"[",
"1",
"]",
")",
"elif",
"mode",
"==",
"\"fixedStep\"",
":",
"yield",
"current_chrom",
",",
"current_pos",
",",
"current_pos",
"+",
"current_span",
",",
"strand",
",",
"float",
"(",
"line",
".",
"split",
"(",
")",
"[",
"0",
"]",
")",
"current_pos",
"+=",
"current_step",
"else",
":",
"raise",
"ValueError",
"(",
"\"Unexpected input line: %s\"",
"%",
"line",
".",
"strip",
"(",
")",
")"
] |
Iterator yielding chrom, start, end, strand, value.
Values are zero-based, half-open.
Regions which lack a score are ignored.
|
[
"Iterator",
"yielding",
"chrom",
"start",
"end",
"strand",
"value",
".",
"Values",
"are",
"zero",
"-",
"based",
"half",
"-",
"open",
".",
"Regions",
"which",
"lack",
"a",
"score",
"are",
"ignored",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/wiggle.py#L14-L63
|
train
|
bxlab/bx-python
|
lib/bx/align/tools/fuse.py
|
fuse_list
|
def fuse_list( mafs ):
"""
Try to fuse a list of blocks by progressively fusing each adjacent pair.
"""
last = None
for m in mafs:
if last is None:
last = m
else:
fused = fuse( last, m )
if fused:
last = fused
else:
yield last
last = m
if last:
yield last
|
python
|
def fuse_list( mafs ):
"""
Try to fuse a list of blocks by progressively fusing each adjacent pair.
"""
last = None
for m in mafs:
if last is None:
last = m
else:
fused = fuse( last, m )
if fused:
last = fused
else:
yield last
last = m
if last:
yield last
|
[
"def",
"fuse_list",
"(",
"mafs",
")",
":",
"last",
"=",
"None",
"for",
"m",
"in",
"mafs",
":",
"if",
"last",
"is",
"None",
":",
"last",
"=",
"m",
"else",
":",
"fused",
"=",
"fuse",
"(",
"last",
",",
"m",
")",
"if",
"fused",
":",
"last",
"=",
"fused",
"else",
":",
"yield",
"last",
"last",
"=",
"m",
"if",
"last",
":",
"yield",
"last"
] |
Try to fuse a list of blocks by progressively fusing each adjacent pair.
|
[
"Try",
"to",
"fuse",
"a",
"list",
"of",
"blocks",
"by",
"progressively",
"fusing",
"each",
"adjacent",
"pair",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/align/tools/fuse.py#L8-L24
|
train
|
bxlab/bx-python
|
lib/bx_extras/pyparsing.py
|
ParserElement.setBreak
|
def setBreak(self,breakFlag = True):
"""Method to invoke the Python pdb debugger when this element is
about to be parsed. Set breakFlag to True to enable, False to
disable.
"""
if breakFlag:
_parseMethod = self._parse
def breaker(instring, loc, doActions=True, callPreParse=True):
import pdb
pdb.set_trace()
_parseMethod( instring, loc, doActions, callPreParse )
breaker._originalParseMethod = _parseMethod
self._parse = breaker
else:
if hasattr(self._parse,"_originalParseMethod"):
self._parse = self._parse._originalParseMethod
return self
|
python
|
def setBreak(self,breakFlag = True):
"""Method to invoke the Python pdb debugger when this element is
about to be parsed. Set breakFlag to True to enable, False to
disable.
"""
if breakFlag:
_parseMethod = self._parse
def breaker(instring, loc, doActions=True, callPreParse=True):
import pdb
pdb.set_trace()
_parseMethod( instring, loc, doActions, callPreParse )
breaker._originalParseMethod = _parseMethod
self._parse = breaker
else:
if hasattr(self._parse,"_originalParseMethod"):
self._parse = self._parse._originalParseMethod
return self
|
[
"def",
"setBreak",
"(",
"self",
",",
"breakFlag",
"=",
"True",
")",
":",
"if",
"breakFlag",
":",
"_parseMethod",
"=",
"self",
".",
"_parse",
"def",
"breaker",
"(",
"instring",
",",
"loc",
",",
"doActions",
"=",
"True",
",",
"callPreParse",
"=",
"True",
")",
":",
"import",
"pdb",
"pdb",
".",
"set_trace",
"(",
")",
"_parseMethod",
"(",
"instring",
",",
"loc",
",",
"doActions",
",",
"callPreParse",
")",
"breaker",
".",
"_originalParseMethod",
"=",
"_parseMethod",
"self",
".",
"_parse",
"=",
"breaker",
"else",
":",
"if",
"hasattr",
"(",
"self",
".",
"_parse",
",",
"\"_originalParseMethod\"",
")",
":",
"self",
".",
"_parse",
"=",
"self",
".",
"_parse",
".",
"_originalParseMethod",
"return",
"self"
] |
Method to invoke the Python pdb debugger when this element is
about to be parsed. Set breakFlag to True to enable, False to
disable.
|
[
"Method",
"to",
"invoke",
"the",
"Python",
"pdb",
"debugger",
"when",
"this",
"element",
"is",
"about",
"to",
"be",
"parsed",
".",
"Set",
"breakFlag",
"to",
"True",
"to",
"enable",
"False",
"to",
"disable",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx_extras/pyparsing.py#L709-L725
|
train
|
bxlab/bx-python
|
lib/bx_extras/pyparsing.py
|
ParserElement.searchString
|
def searchString( self, instring, maxMatches=_MAX_INT ):
"""Another extension to scanString, simplifying the access to the tokens found
to match the given parse expression. May be called with optional
maxMatches argument, to clip searching after 'n' matches are found.
"""
return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
|
python
|
def searchString( self, instring, maxMatches=_MAX_INT ):
"""Another extension to scanString, simplifying the access to the tokens found
to match the given parse expression. May be called with optional
maxMatches argument, to clip searching after 'n' matches are found.
"""
return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ])
|
[
"def",
"searchString",
"(",
"self",
",",
"instring",
",",
"maxMatches",
"=",
"_MAX_INT",
")",
":",
"return",
"ParseResults",
"(",
"[",
"t",
"for",
"t",
",",
"s",
",",
"e",
"in",
"self",
".",
"scanString",
"(",
"instring",
",",
"maxMatches",
")",
"]",
")"
] |
Another extension to scanString, simplifying the access to the tokens found
to match the given parse expression. May be called with optional
maxMatches argument, to clip searching after 'n' matches are found.
|
[
"Another",
"extension",
"to",
"scanString",
"simplifying",
"the",
"access",
"to",
"the",
"tokens",
"found",
"to",
"match",
"the",
"given",
"parse",
"expression",
".",
"May",
"be",
"called",
"with",
"optional",
"maxMatches",
"argument",
"to",
"clip",
"searching",
"after",
"n",
"matches",
"are",
"found",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx_extras/pyparsing.py#L1111-L1116
|
train
|
bxlab/bx-python
|
lib/bx/align/epo.py
|
Chain._strfactory
|
def _strfactory(cls, line):
"""factory class method for Chain
:param line: header of a chain (in .chain format)
"""
assert type(line) == str, "this is a factory from string"
line = line.rstrip().split()[1:] # the first component is the keyword "chain"
tup = [t[0](t[1]) for t in zip([int, str, int, str, int, int, str, int, str, int, int, str], line)]
return tuple.__new__(cls, tup)
|
python
|
def _strfactory(cls, line):
"""factory class method for Chain
:param line: header of a chain (in .chain format)
"""
assert type(line) == str, "this is a factory from string"
line = line.rstrip().split()[1:] # the first component is the keyword "chain"
tup = [t[0](t[1]) for t in zip([int, str, int, str, int, int, str, int, str, int, int, str], line)]
return tuple.__new__(cls, tup)
|
[
"def",
"_strfactory",
"(",
"cls",
",",
"line",
")",
":",
"assert",
"type",
"(",
"line",
")",
"==",
"str",
",",
"\"this is a factory from string\"",
"line",
"=",
"line",
".",
"rstrip",
"(",
")",
".",
"split",
"(",
")",
"[",
"1",
":",
"]",
"# the first component is the keyword \"chain\"",
"tup",
"=",
"[",
"t",
"[",
"0",
"]",
"(",
"t",
"[",
"1",
"]",
")",
"for",
"t",
"in",
"zip",
"(",
"[",
"int",
",",
"str",
",",
"int",
",",
"str",
",",
"int",
",",
"int",
",",
"str",
",",
"int",
",",
"str",
",",
"int",
",",
"int",
",",
"str",
"]",
",",
"line",
")",
"]",
"return",
"tuple",
".",
"__new__",
"(",
"cls",
",",
"tup",
")"
] |
factory class method for Chain
:param line: header of a chain (in .chain format)
|
[
"factory",
"class",
"method",
"for",
"Chain"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/align/epo.py#L30-L40
|
train
|
bxlab/bx-python
|
lib/bx/align/epo.py
|
Chain.bedInterval
|
def bedInterval(self, who):
"return a BED6 entry, thus DOES coordinate conversion for minus strands"
if who == 't':
st, en = self.tStart, self.tEnd
if self.tStrand == '-':
st, en = self.tSize-en, self.tSize-st
return (self.tName, st, en, self.id, self.score, self.tStrand)
else:
st, en = self.qStart, self.qEnd
if self.qStrand == '-':
st, en = self.qSize-en, self.qSize-st
assert en-st == self.qEnd - self.qStart
return (self.qName, st, en, self.id, self.score, self.qStrand)
|
python
|
def bedInterval(self, who):
"return a BED6 entry, thus DOES coordinate conversion for minus strands"
if who == 't':
st, en = self.tStart, self.tEnd
if self.tStrand == '-':
st, en = self.tSize-en, self.tSize-st
return (self.tName, st, en, self.id, self.score, self.tStrand)
else:
st, en = self.qStart, self.qEnd
if self.qStrand == '-':
st, en = self.qSize-en, self.qSize-st
assert en-st == self.qEnd - self.qStart
return (self.qName, st, en, self.id, self.score, self.qStrand)
|
[
"def",
"bedInterval",
"(",
"self",
",",
"who",
")",
":",
"if",
"who",
"==",
"'t'",
":",
"st",
",",
"en",
"=",
"self",
".",
"tStart",
",",
"self",
".",
"tEnd",
"if",
"self",
".",
"tStrand",
"==",
"'-'",
":",
"st",
",",
"en",
"=",
"self",
".",
"tSize",
"-",
"en",
",",
"self",
".",
"tSize",
"-",
"st",
"return",
"(",
"self",
".",
"tName",
",",
"st",
",",
"en",
",",
"self",
".",
"id",
",",
"self",
".",
"score",
",",
"self",
".",
"tStrand",
")",
"else",
":",
"st",
",",
"en",
"=",
"self",
".",
"qStart",
",",
"self",
".",
"qEnd",
"if",
"self",
".",
"qStrand",
"==",
"'-'",
":",
"st",
",",
"en",
"=",
"self",
".",
"qSize",
"-",
"en",
",",
"self",
".",
"qSize",
"-",
"st",
"assert",
"en",
"-",
"st",
"==",
"self",
".",
"qEnd",
"-",
"self",
".",
"qStart",
"return",
"(",
"self",
".",
"qName",
",",
"st",
",",
"en",
",",
"self",
".",
"id",
",",
"self",
".",
"score",
",",
"self",
".",
"qStrand",
")"
] |
return a BED6 entry, thus DOES coordinate conversion for minus strands
|
[
"return",
"a",
"BED6",
"entry",
"thus",
"DOES",
"coordinate",
"conversion",
"for",
"minus",
"strands"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/align/epo.py#L135-L148
|
train
|
bxlab/bx-python
|
lib/bx/align/epo.py
|
EPOitem._strfactory
|
def _strfactory(cls, line):
"""factory method for an EPOitem
:param line: a line of input"""
cmp = line.rstrip().split()
chrom = cmp[2]
if not chrom.startswith("chr"):
chrom = "chr%s" % chrom
instance = tuple.__new__(cls,
(cmp[0], cmp[1],
chrom, int(cmp[3]), int(cmp[4]),
{'1' : '+', '-1' : '-'}[cmp[5]], cmp[6]))
span = instance.end - instance.start + 1
m_num = sum( (t[1] == "M" and [t[0]] or [0])[0] for t in instance.cigar_iter(False) )
if span != m_num:
log.warning("[{gabid}] {species}.{chrom}:{start}-{end}.".format(**instance._asdict()) + "(span) %d != %d (matches)" % (span, m_num))
return None
return instance
|
python
|
def _strfactory(cls, line):
"""factory method for an EPOitem
:param line: a line of input"""
cmp = line.rstrip().split()
chrom = cmp[2]
if not chrom.startswith("chr"):
chrom = "chr%s" % chrom
instance = tuple.__new__(cls,
(cmp[0], cmp[1],
chrom, int(cmp[3]), int(cmp[4]),
{'1' : '+', '-1' : '-'}[cmp[5]], cmp[6]))
span = instance.end - instance.start + 1
m_num = sum( (t[1] == "M" and [t[0]] or [0])[0] for t in instance.cigar_iter(False) )
if span != m_num:
log.warning("[{gabid}] {species}.{chrom}:{start}-{end}.".format(**instance._asdict()) + "(span) %d != %d (matches)" % (span, m_num))
return None
return instance
|
[
"def",
"_strfactory",
"(",
"cls",
",",
"line",
")",
":",
"cmp",
"=",
"line",
".",
"rstrip",
"(",
")",
".",
"split",
"(",
")",
"chrom",
"=",
"cmp",
"[",
"2",
"]",
"if",
"not",
"chrom",
".",
"startswith",
"(",
"\"chr\"",
")",
":",
"chrom",
"=",
"\"chr%s\"",
"%",
"chrom",
"instance",
"=",
"tuple",
".",
"__new__",
"(",
"cls",
",",
"(",
"cmp",
"[",
"0",
"]",
",",
"cmp",
"[",
"1",
"]",
",",
"chrom",
",",
"int",
"(",
"cmp",
"[",
"3",
"]",
")",
",",
"int",
"(",
"cmp",
"[",
"4",
"]",
")",
",",
"{",
"'1'",
":",
"'+'",
",",
"'-1'",
":",
"'-'",
"}",
"[",
"cmp",
"[",
"5",
"]",
"]",
",",
"cmp",
"[",
"6",
"]",
")",
")",
"span",
"=",
"instance",
".",
"end",
"-",
"instance",
".",
"start",
"+",
"1",
"m_num",
"=",
"sum",
"(",
"(",
"t",
"[",
"1",
"]",
"==",
"\"M\"",
"and",
"[",
"t",
"[",
"0",
"]",
"]",
"or",
"[",
"0",
"]",
")",
"[",
"0",
"]",
"for",
"t",
"in",
"instance",
".",
"cigar_iter",
"(",
"False",
")",
")",
"if",
"span",
"!=",
"m_num",
":",
"log",
".",
"warning",
"(",
"\"[{gabid}] {species}.{chrom}:{start}-{end}.\"",
".",
"format",
"(",
"*",
"*",
"instance",
".",
"_asdict",
"(",
")",
")",
"+",
"\"(span) %d != %d (matches)\"",
"%",
"(",
"span",
",",
"m_num",
")",
")",
"return",
"None",
"return",
"instance"
] |
factory method for an EPOitem
:param line: a line of input
|
[
"factory",
"method",
"for",
"an",
"EPOitem"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/align/epo.py#L192-L210
|
train
|
bxlab/bx-python
|
lib/bx/bitset_builders.py
|
binned_bitsets_proximity
|
def binned_bitsets_proximity( f, chrom_col=0, start_col=1, end_col=2, strand_col=5, upstream=0, downstream=0 ):
"""Read a file into a dictionary of bitsets"""
last_chrom = None
last_bitset = None
bitsets = dict()
for line in f:
if line.startswith("#"): continue
# print "input=%s" % ( line ),
fields = line.split()
strand = "+"
if len(fields) >= strand_col + 1:
if fields[strand_col] == "-": strand = "-"
chrom = fields[chrom_col]
if chrom != last_chrom:
if chrom not in bitsets:
bitsets[chrom] = BinnedBitSet( MAX )
last_chrom = chrom
last_bitset = bitsets[chrom]
start, end = int( fields[start_col] ), int( fields[end_col] )
if strand == "+":
if upstream: start = max( 0, start - upstream )
if downstream: end = min( MAX, end + downstream )
if strand == "-":
if upstream: end = min( MAX, end + upstream )
if downstream: start = max( 0, start - downstream )
# print "set: start=%d\tend=%d" % ( start, end )
if end-start > 0:
last_bitset.set_range( start, end-start )
return bitsets
|
python
|
def binned_bitsets_proximity( f, chrom_col=0, start_col=1, end_col=2, strand_col=5, upstream=0, downstream=0 ):
"""Read a file into a dictionary of bitsets"""
last_chrom = None
last_bitset = None
bitsets = dict()
for line in f:
if line.startswith("#"): continue
# print "input=%s" % ( line ),
fields = line.split()
strand = "+"
if len(fields) >= strand_col + 1:
if fields[strand_col] == "-": strand = "-"
chrom = fields[chrom_col]
if chrom != last_chrom:
if chrom not in bitsets:
bitsets[chrom] = BinnedBitSet( MAX )
last_chrom = chrom
last_bitset = bitsets[chrom]
start, end = int( fields[start_col] ), int( fields[end_col] )
if strand == "+":
if upstream: start = max( 0, start - upstream )
if downstream: end = min( MAX, end + downstream )
if strand == "-":
if upstream: end = min( MAX, end + upstream )
if downstream: start = max( 0, start - downstream )
# print "set: start=%d\tend=%d" % ( start, end )
if end-start > 0:
last_bitset.set_range( start, end-start )
return bitsets
|
[
"def",
"binned_bitsets_proximity",
"(",
"f",
",",
"chrom_col",
"=",
"0",
",",
"start_col",
"=",
"1",
",",
"end_col",
"=",
"2",
",",
"strand_col",
"=",
"5",
",",
"upstream",
"=",
"0",
",",
"downstream",
"=",
"0",
")",
":",
"last_chrom",
"=",
"None",
"last_bitset",
"=",
"None",
"bitsets",
"=",
"dict",
"(",
")",
"for",
"line",
"in",
"f",
":",
"if",
"line",
".",
"startswith",
"(",
"\"#\"",
")",
":",
"continue",
"# print \"input=%s\" % ( line ),",
"fields",
"=",
"line",
".",
"split",
"(",
")",
"strand",
"=",
"\"+\"",
"if",
"len",
"(",
"fields",
")",
">=",
"strand_col",
"+",
"1",
":",
"if",
"fields",
"[",
"strand_col",
"]",
"==",
"\"-\"",
":",
"strand",
"=",
"\"-\"",
"chrom",
"=",
"fields",
"[",
"chrom_col",
"]",
"if",
"chrom",
"!=",
"last_chrom",
":",
"if",
"chrom",
"not",
"in",
"bitsets",
":",
"bitsets",
"[",
"chrom",
"]",
"=",
"BinnedBitSet",
"(",
"MAX",
")",
"last_chrom",
"=",
"chrom",
"last_bitset",
"=",
"bitsets",
"[",
"chrom",
"]",
"start",
",",
"end",
"=",
"int",
"(",
"fields",
"[",
"start_col",
"]",
")",
",",
"int",
"(",
"fields",
"[",
"end_col",
"]",
")",
"if",
"strand",
"==",
"\"+\"",
":",
"if",
"upstream",
":",
"start",
"=",
"max",
"(",
"0",
",",
"start",
"-",
"upstream",
")",
"if",
"downstream",
":",
"end",
"=",
"min",
"(",
"MAX",
",",
"end",
"+",
"downstream",
")",
"if",
"strand",
"==",
"\"-\"",
":",
"if",
"upstream",
":",
"end",
"=",
"min",
"(",
"MAX",
",",
"end",
"+",
"upstream",
")",
"if",
"downstream",
":",
"start",
"=",
"max",
"(",
"0",
",",
"start",
"-",
"downstream",
")",
"# print \"set: start=%d\\tend=%d\" % ( start, end )",
"if",
"end",
"-",
"start",
">",
"0",
":",
"last_bitset",
".",
"set_range",
"(",
"start",
",",
"end",
"-",
"start",
")",
"return",
"bitsets"
] |
Read a file into a dictionary of bitsets
|
[
"Read",
"a",
"file",
"into",
"a",
"dictionary",
"of",
"bitsets"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/bitset_builders.py#L100-L128
|
train
|
bxlab/bx-python
|
lib/bx/bitset_builders.py
|
binned_bitsets_from_list
|
def binned_bitsets_from_list( list=[] ):
"""Read a list into a dictionary of bitsets"""
last_chrom = None
last_bitset = None
bitsets = dict()
for l in list:
chrom = l[0]
if chrom != last_chrom:
if chrom not in bitsets:
bitsets[chrom] = BinnedBitSet(MAX)
last_chrom = chrom
last_bitset = bitsets[chrom]
start, end = int( l[1] ), int( l[2] )
last_bitset.set_range( start, end - start )
return bitsets
|
python
|
def binned_bitsets_from_list( list=[] ):
"""Read a list into a dictionary of bitsets"""
last_chrom = None
last_bitset = None
bitsets = dict()
for l in list:
chrom = l[0]
if chrom != last_chrom:
if chrom not in bitsets:
bitsets[chrom] = BinnedBitSet(MAX)
last_chrom = chrom
last_bitset = bitsets[chrom]
start, end = int( l[1] ), int( l[2] )
last_bitset.set_range( start, end - start )
return bitsets
|
[
"def",
"binned_bitsets_from_list",
"(",
"list",
"=",
"[",
"]",
")",
":",
"last_chrom",
"=",
"None",
"last_bitset",
"=",
"None",
"bitsets",
"=",
"dict",
"(",
")",
"for",
"l",
"in",
"list",
":",
"chrom",
"=",
"l",
"[",
"0",
"]",
"if",
"chrom",
"!=",
"last_chrom",
":",
"if",
"chrom",
"not",
"in",
"bitsets",
":",
"bitsets",
"[",
"chrom",
"]",
"=",
"BinnedBitSet",
"(",
"MAX",
")",
"last_chrom",
"=",
"chrom",
"last_bitset",
"=",
"bitsets",
"[",
"chrom",
"]",
"start",
",",
"end",
"=",
"int",
"(",
"l",
"[",
"1",
"]",
")",
",",
"int",
"(",
"l",
"[",
"2",
"]",
")",
"last_bitset",
".",
"set_range",
"(",
"start",
",",
"end",
"-",
"start",
")",
"return",
"bitsets"
] |
Read a list into a dictionary of bitsets
|
[
"Read",
"a",
"list",
"into",
"a",
"dictionary",
"of",
"bitsets"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/bitset_builders.py#L130-L144
|
train
|
bxlab/bx-python
|
lib/bx/bitset_builders.py
|
binned_bitsets_by_chrom
|
def binned_bitsets_by_chrom( f, chrom, chrom_col=0, start_col=1, end_col=2):
"""Read a file by chrom name into a bitset"""
bitset = BinnedBitSet( MAX )
for line in f:
if line.startswith("#"): continue
fields = line.split()
if fields[chrom_col] == chrom:
start, end = int( fields[start_col] ), int( fields[end_col] )
bitset.set_range( start, end-start )
return bitset
|
python
|
def binned_bitsets_by_chrom( f, chrom, chrom_col=0, start_col=1, end_col=2):
"""Read a file by chrom name into a bitset"""
bitset = BinnedBitSet( MAX )
for line in f:
if line.startswith("#"): continue
fields = line.split()
if fields[chrom_col] == chrom:
start, end = int( fields[start_col] ), int( fields[end_col] )
bitset.set_range( start, end-start )
return bitset
|
[
"def",
"binned_bitsets_by_chrom",
"(",
"f",
",",
"chrom",
",",
"chrom_col",
"=",
"0",
",",
"start_col",
"=",
"1",
",",
"end_col",
"=",
"2",
")",
":",
"bitset",
"=",
"BinnedBitSet",
"(",
"MAX",
")",
"for",
"line",
"in",
"f",
":",
"if",
"line",
".",
"startswith",
"(",
"\"#\"",
")",
":",
"continue",
"fields",
"=",
"line",
".",
"split",
"(",
")",
"if",
"fields",
"[",
"chrom_col",
"]",
"==",
"chrom",
":",
"start",
",",
"end",
"=",
"int",
"(",
"fields",
"[",
"start_col",
"]",
")",
",",
"int",
"(",
"fields",
"[",
"end_col",
"]",
")",
"bitset",
".",
"set_range",
"(",
"start",
",",
"end",
"-",
"start",
")",
"return",
"bitset"
] |
Read a file by chrom name into a bitset
|
[
"Read",
"a",
"file",
"by",
"chrom",
"name",
"into",
"a",
"bitset"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/bitset_builders.py#L146-L155
|
train
|
bxlab/bx-python
|
lib/bx_extras/fpconst.py
|
_double_as_bytes
|
def _double_as_bytes(dval):
"Use struct.unpack to decode a double precision float into eight bytes"
tmp = list(struct.unpack('8B',struct.pack('d', dval)))
if not _big_endian:
tmp.reverse()
return tmp
|
python
|
def _double_as_bytes(dval):
"Use struct.unpack to decode a double precision float into eight bytes"
tmp = list(struct.unpack('8B',struct.pack('d', dval)))
if not _big_endian:
tmp.reverse()
return tmp
|
[
"def",
"_double_as_bytes",
"(",
"dval",
")",
":",
"tmp",
"=",
"list",
"(",
"struct",
".",
"unpack",
"(",
"'8B'",
",",
"struct",
".",
"pack",
"(",
"'d'",
",",
"dval",
")",
")",
")",
"if",
"not",
"_big_endian",
":",
"tmp",
".",
"reverse",
"(",
")",
"return",
"tmp"
] |
Use struct.unpack to decode a double precision float into eight bytes
|
[
"Use",
"struct",
".",
"unpack",
"to",
"decode",
"a",
"double",
"precision",
"float",
"into",
"eight",
"bytes"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx_extras/fpconst.py#L45-L50
|
train
|
bxlab/bx-python
|
lib/bx_extras/fpconst.py
|
_mantissa
|
def _mantissa(dval):
"""Extract the _mantissa bits from a double-precision floating
point value."""
bb = _double_as_bytes(dval)
mantissa = bb[1] & 0x0f << 48
mantissa += bb[2] << 40
mantissa += bb[3] << 32
mantissa += bb[4]
return mantissa
|
python
|
def _mantissa(dval):
"""Extract the _mantissa bits from a double-precision floating
point value."""
bb = _double_as_bytes(dval)
mantissa = bb[1] & 0x0f << 48
mantissa += bb[2] << 40
mantissa += bb[3] << 32
mantissa += bb[4]
return mantissa
|
[
"def",
"_mantissa",
"(",
"dval",
")",
":",
"bb",
"=",
"_double_as_bytes",
"(",
"dval",
")",
"mantissa",
"=",
"bb",
"[",
"1",
"]",
"&",
"0x0f",
"<<",
"48",
"mantissa",
"+=",
"bb",
"[",
"2",
"]",
"<<",
"40",
"mantissa",
"+=",
"bb",
"[",
"3",
"]",
"<<",
"32",
"mantissa",
"+=",
"bb",
"[",
"4",
"]",
"return",
"mantissa"
] |
Extract the _mantissa bits from a double-precision floating
point value.
|
[
"Extract",
"the",
"_mantissa",
"bits",
"from",
"a",
"double",
"-",
"precision",
"floating",
"point",
"value",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx_extras/fpconst.py#L72-L81
|
train
|
bxlab/bx-python
|
lib/bx_extras/fpconst.py
|
_zero_mantissa
|
def _zero_mantissa(dval):
"""Determine whether the mantissa bits of the given double are all
zero."""
bb = _double_as_bytes(dval)
return ((bb[1] & 0x0f) | reduce(operator.or_, bb[2:])) == 0
|
python
|
def _zero_mantissa(dval):
"""Determine whether the mantissa bits of the given double are all
zero."""
bb = _double_as_bytes(dval)
return ((bb[1] & 0x0f) | reduce(operator.or_, bb[2:])) == 0
|
[
"def",
"_zero_mantissa",
"(",
"dval",
")",
":",
"bb",
"=",
"_double_as_bytes",
"(",
"dval",
")",
"return",
"(",
"(",
"bb",
"[",
"1",
"]",
"&",
"0x0f",
")",
"|",
"reduce",
"(",
"operator",
".",
"or_",
",",
"bb",
"[",
"2",
":",
"]",
")",
")",
"==",
"0"
] |
Determine whether the mantissa bits of the given double are all
zero.
|
[
"Determine",
"whether",
"the",
"mantissa",
"bits",
"of",
"the",
"given",
"double",
"are",
"all",
"zero",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx_extras/fpconst.py#L83-L87
|
train
|
bxlab/bx-python
|
scripts/aggregate_scores_in_intervals.py
|
load_scores_wiggle
|
def load_scores_wiggle( fname ):
"""
Read a wiggle file and return a dict of BinnedArray objects keyed
by chromosome.
"""
scores_by_chrom = dict()
for chrom, pos, val in bx.wiggle.Reader( misc.open_compressed( fname ) ):
if chrom not in scores_by_chrom:
scores_by_chrom[chrom] = BinnedArray()
scores_by_chrom[chrom][pos] = val
return scores_by_chrom
|
python
|
def load_scores_wiggle( fname ):
"""
Read a wiggle file and return a dict of BinnedArray objects keyed
by chromosome.
"""
scores_by_chrom = dict()
for chrom, pos, val in bx.wiggle.Reader( misc.open_compressed( fname ) ):
if chrom not in scores_by_chrom:
scores_by_chrom[chrom] = BinnedArray()
scores_by_chrom[chrom][pos] = val
return scores_by_chrom
|
[
"def",
"load_scores_wiggle",
"(",
"fname",
")",
":",
"scores_by_chrom",
"=",
"dict",
"(",
")",
"for",
"chrom",
",",
"pos",
",",
"val",
"in",
"bx",
".",
"wiggle",
".",
"Reader",
"(",
"misc",
".",
"open_compressed",
"(",
"fname",
")",
")",
":",
"if",
"chrom",
"not",
"in",
"scores_by_chrom",
":",
"scores_by_chrom",
"[",
"chrom",
"]",
"=",
"BinnedArray",
"(",
")",
"scores_by_chrom",
"[",
"chrom",
"]",
"[",
"pos",
"]",
"=",
"val",
"return",
"scores_by_chrom"
] |
Read a wiggle file and return a dict of BinnedArray objects keyed
by chromosome.
|
[
"Read",
"a",
"wiggle",
"file",
"and",
"return",
"a",
"dict",
"of",
"BinnedArray",
"objects",
"keyed",
"by",
"chromosome",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/scripts/aggregate_scores_in_intervals.py#L60-L70
|
train
|
bxlab/bx-python
|
lib/bx/interval_index_file.py
|
Index.new
|
def new( self, min, max ):
"""Create an empty index for intervals in the range min, max"""
# Ensure the range will fit given the shifting strategy
assert MIN <= min <= max <= MAX
self.min = min
self.max = max
# Determine offsets to use
self.offsets = offsets_for_max_size( max )
# Determine the largest bin we will actually use
self.bin_count = bin_for_range( max - 1, max, offsets = self.offsets ) + 1
# Create empty bins
self.bins = [ [] for i in range( self.bin_count ) ]
|
python
|
def new( self, min, max ):
"""Create an empty index for intervals in the range min, max"""
# Ensure the range will fit given the shifting strategy
assert MIN <= min <= max <= MAX
self.min = min
self.max = max
# Determine offsets to use
self.offsets = offsets_for_max_size( max )
# Determine the largest bin we will actually use
self.bin_count = bin_for_range( max - 1, max, offsets = self.offsets ) + 1
# Create empty bins
self.bins = [ [] for i in range( self.bin_count ) ]
|
[
"def",
"new",
"(",
"self",
",",
"min",
",",
"max",
")",
":",
"# Ensure the range will fit given the shifting strategy",
"assert",
"MIN",
"<=",
"min",
"<=",
"max",
"<=",
"MAX",
"self",
".",
"min",
"=",
"min",
"self",
".",
"max",
"=",
"max",
"# Determine offsets to use",
"self",
".",
"offsets",
"=",
"offsets_for_max_size",
"(",
"max",
")",
"# Determine the largest bin we will actually use",
"self",
".",
"bin_count",
"=",
"bin_for_range",
"(",
"max",
"-",
"1",
",",
"max",
",",
"offsets",
"=",
"self",
".",
"offsets",
")",
"+",
"1",
"# Create empty bins",
"self",
".",
"bins",
"=",
"[",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"bin_count",
")",
"]"
] |
Create an empty index for intervals in the range min, max
|
[
"Create",
"an",
"empty",
"index",
"for",
"intervals",
"in",
"the",
"range",
"min",
"max"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/interval_index_file.py#L357-L368
|
train
|
bxlab/bx-python
|
lib/bx/misc/filecache.py
|
FileCache.seek
|
def seek( self, offset, whence=0 ):
"""
Move the file pointer to a particular offset.
"""
# Determine absolute target position
if whence == 0:
target_pos = offset
elif whence == 1:
target_pos = self.file_pos + offset
elif whence == 2:
target_pos = self.size - offset
else:
raise Exception( "Invalid `whence` argument: %r", whence )
# Check if this is a noop
if target_pos == self.file_pos:
return
# Verify it is valid
assert 0 <= target_pos < self.size, "Attempt to seek outside file"
# Move the position
self.file_pos = target_pos
# Mark as dirty, the next time a read is done we need to actually
# move the position in the bzip2 file
self.dirty = True
|
python
|
def seek( self, offset, whence=0 ):
"""
Move the file pointer to a particular offset.
"""
# Determine absolute target position
if whence == 0:
target_pos = offset
elif whence == 1:
target_pos = self.file_pos + offset
elif whence == 2:
target_pos = self.size - offset
else:
raise Exception( "Invalid `whence` argument: %r", whence )
# Check if this is a noop
if target_pos == self.file_pos:
return
# Verify it is valid
assert 0 <= target_pos < self.size, "Attempt to seek outside file"
# Move the position
self.file_pos = target_pos
# Mark as dirty, the next time a read is done we need to actually
# move the position in the bzip2 file
self.dirty = True
|
[
"def",
"seek",
"(",
"self",
",",
"offset",
",",
"whence",
"=",
"0",
")",
":",
"# Determine absolute target position",
"if",
"whence",
"==",
"0",
":",
"target_pos",
"=",
"offset",
"elif",
"whence",
"==",
"1",
":",
"target_pos",
"=",
"self",
".",
"file_pos",
"+",
"offset",
"elif",
"whence",
"==",
"2",
":",
"target_pos",
"=",
"self",
".",
"size",
"-",
"offset",
"else",
":",
"raise",
"Exception",
"(",
"\"Invalid `whence` argument: %r\"",
",",
"whence",
")",
"# Check if this is a noop",
"if",
"target_pos",
"==",
"self",
".",
"file_pos",
":",
"return",
"# Verify it is valid",
"assert",
"0",
"<=",
"target_pos",
"<",
"self",
".",
"size",
",",
"\"Attempt to seek outside file\"",
"# Move the position",
"self",
".",
"file_pos",
"=",
"target_pos",
"# Mark as dirty, the next time a read is done we need to actually",
"# move the position in the bzip2 file",
"self",
".",
"dirty",
"=",
"True"
] |
Move the file pointer to a particular offset.
|
[
"Move",
"the",
"file",
"pointer",
"to",
"a",
"particular",
"offset",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/misc/filecache.py#L60-L82
|
train
|
bxlab/bx-python
|
lib/bx_extras/lrucache.py
|
LRUCache.mtime
|
def mtime(self, key):
"""Return the last modification time for the cache record with key.
May be useful for cache instances where the stored values can get
'stale', such as caching file or network resource contents."""
if key not in self.__dict:
raise CacheKeyError(key)
else:
node = self.__dict[key]
return node.mtime
|
python
|
def mtime(self, key):
"""Return the last modification time for the cache record with key.
May be useful for cache instances where the stored values can get
'stale', such as caching file or network resource contents."""
if key not in self.__dict:
raise CacheKeyError(key)
else:
node = self.__dict[key]
return node.mtime
|
[
"def",
"mtime",
"(",
"self",
",",
"key",
")",
":",
"if",
"key",
"not",
"in",
"self",
".",
"__dict",
":",
"raise",
"CacheKeyError",
"(",
"key",
")",
"else",
":",
"node",
"=",
"self",
".",
"__dict",
"[",
"key",
"]",
"return",
"node",
".",
"mtime"
] |
Return the last modification time for the cache record with key.
May be useful for cache instances where the stored values can get
'stale', such as caching file or network resource contents.
|
[
"Return",
"the",
"last",
"modification",
"time",
"for",
"the",
"cache",
"record",
"with",
"key",
".",
"May",
"be",
"useful",
"for",
"cache",
"instances",
"where",
"the",
"stored",
"values",
"can",
"get",
"stale",
"such",
"as",
"caching",
"file",
"or",
"network",
"resource",
"contents",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx_extras/lrucache.py#L203-L211
|
train
|
bxlab/bx-python
|
lib/bx/cookbook/attribute.py
|
class_space
|
def class_space(classlevel=3):
"returns the calling class' name and dictionary"
frame = sys._getframe(classlevel)
classname = frame.f_code.co_name
classdict = frame.f_locals
return classname, classdict
|
python
|
def class_space(classlevel=3):
"returns the calling class' name and dictionary"
frame = sys._getframe(classlevel)
classname = frame.f_code.co_name
classdict = frame.f_locals
return classname, classdict
|
[
"def",
"class_space",
"(",
"classlevel",
"=",
"3",
")",
":",
"frame",
"=",
"sys",
".",
"_getframe",
"(",
"classlevel",
")",
"classname",
"=",
"frame",
".",
"f_code",
".",
"co_name",
"classdict",
"=",
"frame",
".",
"f_locals",
"return",
"classname",
",",
"classdict"
] |
returns the calling class' name and dictionary
|
[
"returns",
"the",
"calling",
"class",
"name",
"and",
"dictionary"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/cookbook/attribute.py#L67-L72
|
train
|
bxlab/bx-python
|
lib/bx/align/lav.py
|
Reader.build_alignment
|
def build_alignment(self,score,pieces):
"""converts a score and pieces to an alignment"""
# build text
self.open_seqs()
text1 = text2 = ""
end1 = end2 = None
for (start1,start2,length,pctId) in pieces:
if (end1 != None):
if (start1 == end1): # insertion in sequence 2
text1 += self.seq1_gap * (start2-end2)
text2 += self.seq2_file.get(end2,start2-end2)
else: # insertion in sequence 1
text1 += self.seq1_file.get(end1,start1-end1)
text2 += self.seq2_gap * (start1-end1)
text1 += self.seq1_file.get(start1,length)
text2 += self.seq2_file.get(start2,length)
end1 = start1 + length
end2 = start2 + length
# create alignment
start1 = pieces[0][0]
start2 = pieces[0][1]
end1 = pieces[-1][0] + pieces[-1][2]
end2 = pieces[-1][1] + pieces[-1][2]
size1 = end1 - start1
size2 = end2 - start2
a = Alignment(score=score,species_to_lengths=self.species_to_lengths)
#if (self.seq1_strand == "-"): start1 = self.seq1_file.length - end1
a.add_component(Component(self.seq1_src,start1,size1,self.seq1_strand,text=text1))
#if (self.seq2_strand == "-"): start2 = self.seq2_file.length - end2
a.add_component(Component(self.seq2_src,start2,size2,self.seq2_strand,text=text2))
return a
|
python
|
def build_alignment(self,score,pieces):
"""converts a score and pieces to an alignment"""
# build text
self.open_seqs()
text1 = text2 = ""
end1 = end2 = None
for (start1,start2,length,pctId) in pieces:
if (end1 != None):
if (start1 == end1): # insertion in sequence 2
text1 += self.seq1_gap * (start2-end2)
text2 += self.seq2_file.get(end2,start2-end2)
else: # insertion in sequence 1
text1 += self.seq1_file.get(end1,start1-end1)
text2 += self.seq2_gap * (start1-end1)
text1 += self.seq1_file.get(start1,length)
text2 += self.seq2_file.get(start2,length)
end1 = start1 + length
end2 = start2 + length
# create alignment
start1 = pieces[0][0]
start2 = pieces[0][1]
end1 = pieces[-1][0] + pieces[-1][2]
end2 = pieces[-1][1] + pieces[-1][2]
size1 = end1 - start1
size2 = end2 - start2
a = Alignment(score=score,species_to_lengths=self.species_to_lengths)
#if (self.seq1_strand == "-"): start1 = self.seq1_file.length - end1
a.add_component(Component(self.seq1_src,start1,size1,self.seq1_strand,text=text1))
#if (self.seq2_strand == "-"): start2 = self.seq2_file.length - end2
a.add_component(Component(self.seq2_src,start2,size2,self.seq2_strand,text=text2))
return a
|
[
"def",
"build_alignment",
"(",
"self",
",",
"score",
",",
"pieces",
")",
":",
"# build text",
"self",
".",
"open_seqs",
"(",
")",
"text1",
"=",
"text2",
"=",
"\"\"",
"end1",
"=",
"end2",
"=",
"None",
"for",
"(",
"start1",
",",
"start2",
",",
"length",
",",
"pctId",
")",
"in",
"pieces",
":",
"if",
"(",
"end1",
"!=",
"None",
")",
":",
"if",
"(",
"start1",
"==",
"end1",
")",
":",
"# insertion in sequence 2",
"text1",
"+=",
"self",
".",
"seq1_gap",
"*",
"(",
"start2",
"-",
"end2",
")",
"text2",
"+=",
"self",
".",
"seq2_file",
".",
"get",
"(",
"end2",
",",
"start2",
"-",
"end2",
")",
"else",
":",
"# insertion in sequence 1",
"text1",
"+=",
"self",
".",
"seq1_file",
".",
"get",
"(",
"end1",
",",
"start1",
"-",
"end1",
")",
"text2",
"+=",
"self",
".",
"seq2_gap",
"*",
"(",
"start1",
"-",
"end1",
")",
"text1",
"+=",
"self",
".",
"seq1_file",
".",
"get",
"(",
"start1",
",",
"length",
")",
"text2",
"+=",
"self",
".",
"seq2_file",
".",
"get",
"(",
"start2",
",",
"length",
")",
"end1",
"=",
"start1",
"+",
"length",
"end2",
"=",
"start2",
"+",
"length",
"# create alignment",
"start1",
"=",
"pieces",
"[",
"0",
"]",
"[",
"0",
"]",
"start2",
"=",
"pieces",
"[",
"0",
"]",
"[",
"1",
"]",
"end1",
"=",
"pieces",
"[",
"-",
"1",
"]",
"[",
"0",
"]",
"+",
"pieces",
"[",
"-",
"1",
"]",
"[",
"2",
"]",
"end2",
"=",
"pieces",
"[",
"-",
"1",
"]",
"[",
"1",
"]",
"+",
"pieces",
"[",
"-",
"1",
"]",
"[",
"2",
"]",
"size1",
"=",
"end1",
"-",
"start1",
"size2",
"=",
"end2",
"-",
"start2",
"a",
"=",
"Alignment",
"(",
"score",
"=",
"score",
",",
"species_to_lengths",
"=",
"self",
".",
"species_to_lengths",
")",
"#if (self.seq1_strand == \"-\"): start1 = self.seq1_file.length - end1",
"a",
".",
"add_component",
"(",
"Component",
"(",
"self",
".",
"seq1_src",
",",
"start1",
",",
"size1",
",",
"self",
".",
"seq1_strand",
",",
"text",
"=",
"text1",
")",
")",
"#if (self.seq2_strand == \"-\"): start2 = self.seq2_file.length - end2",
"a",
".",
"add_component",
"(",
"Component",
"(",
"self",
".",
"seq2_src",
",",
"start2",
",",
"size2",
",",
"self",
".",
"seq2_strand",
",",
"text",
"=",
"text2",
")",
")",
"return",
"a"
] |
converts a score and pieces to an alignment
|
[
"converts",
"a",
"score",
"and",
"pieces",
"to",
"an",
"alignment"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/align/lav.py#L326-L357
|
train
|
bxlab/bx-python
|
lib/bx/intervals/operations/__init__.py
|
bits_clear_in_range
|
def bits_clear_in_range( bits, range_start, range_end ):
"""
Yield start,end tuples for each span of clear bits in [range_start,range_end)
"""
end = range_start
while 1:
start = bits.next_clear( end )
if start >= range_end: break
end = min( bits.next_set( start ), range_end )
yield start, end
|
python
|
def bits_clear_in_range( bits, range_start, range_end ):
"""
Yield start,end tuples for each span of clear bits in [range_start,range_end)
"""
end = range_start
while 1:
start = bits.next_clear( end )
if start >= range_end: break
end = min( bits.next_set( start ), range_end )
yield start, end
|
[
"def",
"bits_clear_in_range",
"(",
"bits",
",",
"range_start",
",",
"range_end",
")",
":",
"end",
"=",
"range_start",
"while",
"1",
":",
"start",
"=",
"bits",
".",
"next_clear",
"(",
"end",
")",
"if",
"start",
">=",
"range_end",
":",
"break",
"end",
"=",
"min",
"(",
"bits",
".",
"next_set",
"(",
"start",
")",
",",
"range_end",
")",
"yield",
"start",
",",
"end"
] |
Yield start,end tuples for each span of clear bits in [range_start,range_end)
|
[
"Yield",
"start",
"end",
"tuples",
"for",
"each",
"span",
"of",
"clear",
"bits",
"in",
"[",
"range_start",
"range_end",
")"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/intervals/operations/__init__.py#L31-L40
|
train
|
bxlab/bx-python
|
lib/bx/cookbook/progress_bar.py
|
iterprogress
|
def iterprogress( sized_iterable ):
"""
Iterate something printing progress bar to stdout
"""
pb = ProgressBar( 0, len( sized_iterable ) )
for i, value in enumerate( sized_iterable ):
yield value
pb.update_and_print( i, sys.stderr )
|
python
|
def iterprogress( sized_iterable ):
"""
Iterate something printing progress bar to stdout
"""
pb = ProgressBar( 0, len( sized_iterable ) )
for i, value in enumerate( sized_iterable ):
yield value
pb.update_and_print( i, sys.stderr )
|
[
"def",
"iterprogress",
"(",
"sized_iterable",
")",
":",
"pb",
"=",
"ProgressBar",
"(",
"0",
",",
"len",
"(",
"sized_iterable",
")",
")",
"for",
"i",
",",
"value",
"in",
"enumerate",
"(",
"sized_iterable",
")",
":",
"yield",
"value",
"pb",
".",
"update_and_print",
"(",
"i",
",",
"sys",
".",
"stderr",
")"
] |
Iterate something printing progress bar to stdout
|
[
"Iterate",
"something",
"printing",
"progress",
"bar",
"to",
"stdout"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/cookbook/progress_bar.py#L61-L68
|
train
|
bxlab/bx-python
|
lib/bx/misc/cdb.py
|
FileCDBDict.to_file
|
def to_file( Class, dict, file, is_little_endian=True ):
"""
For constructing a CDB structure in a file. Able to calculate size on
disk and write to a file
"""
io = BinaryFileWriter( file, is_little_endian=is_little_endian )
start_offset = io.tell()
# Header is of fixed length
io.seek( start_offset + ( 8 * 256 ) )
# For each item, key and value length (written as length prefixed
# strings). We also calculate the subtables on this pass.
# NOTE: This requires the key and value be byte strings, support for
# dealing with encoding specific value types should be
# added to this wrapper
subtables = [ [] for i in range(256) ]
for key, value in dict.items():
pair_offset = io.tell()
io.write_uint32( len( key ) )
io.write_uint32( len( value ) )
io.write( key )
io.write( value )
hash = cdbhash( key )
subtables[ hash % 256 ].append( ( hash, pair_offset ) )
# Save the offset where the subtables will start
subtable_offset = io.tell()
# Write subtables
for subtable in subtables:
if len( subtable ) > 0:
# Construct hashtable to be twice the size of the number
# of items in the subtable, and built it in memory
ncells = len( subtable ) * 2
cells = [ (0,0) for i in range( ncells ) ]
for hash, pair_offset in subtable:
index = ( hash >> 8 ) % ncells
while cells[index][1] != 0:
index = ( index + 1 ) % ncells
# Guaranteed to find a non-empty cell
cells[index] = ( hash, pair_offset )
# Write subtable
for hash, pair_offset in cells:
io.write_uint32( hash )
io.write_uint32( pair_offset )
# Go back and write the header
end_offset = io.tell()
io.seek( start_offset )
index = subtable_offset
for subtable in subtables:
io.write_uint32( index )
io.write_uint32( len( subtable * 2 ) )
# For each cell in the subtable, a hash and a pointer to a value
index += ( len( subtable ) * 2 ) * 8
# Leave fp at end of cdb
io.seek( end_offset )
|
python
|
def to_file( Class, dict, file, is_little_endian=True ):
"""
For constructing a CDB structure in a file. Able to calculate size on
disk and write to a file
"""
io = BinaryFileWriter( file, is_little_endian=is_little_endian )
start_offset = io.tell()
# Header is of fixed length
io.seek( start_offset + ( 8 * 256 ) )
# For each item, key and value length (written as length prefixed
# strings). We also calculate the subtables on this pass.
# NOTE: This requires the key and value be byte strings, support for
# dealing with encoding specific value types should be
# added to this wrapper
subtables = [ [] for i in range(256) ]
for key, value in dict.items():
pair_offset = io.tell()
io.write_uint32( len( key ) )
io.write_uint32( len( value ) )
io.write( key )
io.write( value )
hash = cdbhash( key )
subtables[ hash % 256 ].append( ( hash, pair_offset ) )
# Save the offset where the subtables will start
subtable_offset = io.tell()
# Write subtables
for subtable in subtables:
if len( subtable ) > 0:
# Construct hashtable to be twice the size of the number
# of items in the subtable, and built it in memory
ncells = len( subtable ) * 2
cells = [ (0,0) for i in range( ncells ) ]
for hash, pair_offset in subtable:
index = ( hash >> 8 ) % ncells
while cells[index][1] != 0:
index = ( index + 1 ) % ncells
# Guaranteed to find a non-empty cell
cells[index] = ( hash, pair_offset )
# Write subtable
for hash, pair_offset in cells:
io.write_uint32( hash )
io.write_uint32( pair_offset )
# Go back and write the header
end_offset = io.tell()
io.seek( start_offset )
index = subtable_offset
for subtable in subtables:
io.write_uint32( index )
io.write_uint32( len( subtable * 2 ) )
# For each cell in the subtable, a hash and a pointer to a value
index += ( len( subtable ) * 2 ) * 8
# Leave fp at end of cdb
io.seek( end_offset )
|
[
"def",
"to_file",
"(",
"Class",
",",
"dict",
",",
"file",
",",
"is_little_endian",
"=",
"True",
")",
":",
"io",
"=",
"BinaryFileWriter",
"(",
"file",
",",
"is_little_endian",
"=",
"is_little_endian",
")",
"start_offset",
"=",
"io",
".",
"tell",
"(",
")",
"# Header is of fixed length",
"io",
".",
"seek",
"(",
"start_offset",
"+",
"(",
"8",
"*",
"256",
")",
")",
"# For each item, key and value length (written as length prefixed",
"# strings). We also calculate the subtables on this pass.",
"# NOTE: This requires the key and value be byte strings, support for",
"# dealing with encoding specific value types should be",
"# added to this wrapper",
"subtables",
"=",
"[",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"256",
")",
"]",
"for",
"key",
",",
"value",
"in",
"dict",
".",
"items",
"(",
")",
":",
"pair_offset",
"=",
"io",
".",
"tell",
"(",
")",
"io",
".",
"write_uint32",
"(",
"len",
"(",
"key",
")",
")",
"io",
".",
"write_uint32",
"(",
"len",
"(",
"value",
")",
")",
"io",
".",
"write",
"(",
"key",
")",
"io",
".",
"write",
"(",
"value",
")",
"hash",
"=",
"cdbhash",
"(",
"key",
")",
"subtables",
"[",
"hash",
"%",
"256",
"]",
".",
"append",
"(",
"(",
"hash",
",",
"pair_offset",
")",
")",
"# Save the offset where the subtables will start",
"subtable_offset",
"=",
"io",
".",
"tell",
"(",
")",
"# Write subtables",
"for",
"subtable",
"in",
"subtables",
":",
"if",
"len",
"(",
"subtable",
")",
">",
"0",
":",
"# Construct hashtable to be twice the size of the number",
"# of items in the subtable, and built it in memory",
"ncells",
"=",
"len",
"(",
"subtable",
")",
"*",
"2",
"cells",
"=",
"[",
"(",
"0",
",",
"0",
")",
"for",
"i",
"in",
"range",
"(",
"ncells",
")",
"]",
"for",
"hash",
",",
"pair_offset",
"in",
"subtable",
":",
"index",
"=",
"(",
"hash",
">>",
"8",
")",
"%",
"ncells",
"while",
"cells",
"[",
"index",
"]",
"[",
"1",
"]",
"!=",
"0",
":",
"index",
"=",
"(",
"index",
"+",
"1",
")",
"%",
"ncells",
"# Guaranteed to find a non-empty cell",
"cells",
"[",
"index",
"]",
"=",
"(",
"hash",
",",
"pair_offset",
")",
"# Write subtable",
"for",
"hash",
",",
"pair_offset",
"in",
"cells",
":",
"io",
".",
"write_uint32",
"(",
"hash",
")",
"io",
".",
"write_uint32",
"(",
"pair_offset",
")",
"# Go back and write the header",
"end_offset",
"=",
"io",
".",
"tell",
"(",
")",
"io",
".",
"seek",
"(",
"start_offset",
")",
"index",
"=",
"subtable_offset",
"for",
"subtable",
"in",
"subtables",
":",
"io",
".",
"write_uint32",
"(",
"index",
")",
"io",
".",
"write_uint32",
"(",
"len",
"(",
"subtable",
"*",
"2",
")",
")",
"# For each cell in the subtable, a hash and a pointer to a value",
"index",
"+=",
"(",
"len",
"(",
"subtable",
")",
"*",
"2",
")",
"*",
"8",
"# Leave fp at end of cdb",
"io",
".",
"seek",
"(",
"end_offset",
")"
] |
For constructing a CDB structure in a file. Able to calculate size on
disk and write to a file
|
[
"For",
"constructing",
"a",
"CDB",
"structure",
"in",
"a",
"file",
".",
"Able",
"to",
"calculate",
"size",
"on",
"disk",
"and",
"write",
"to",
"a",
"file"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/misc/cdb.py#L65-L117
|
train
|
bxlab/bx-python
|
scripts/bed_complement.py
|
read_len
|
def read_len( f ):
"""Read a 'LEN' file and return a mapping from chromosome to length"""
mapping = dict()
for line in f:
fields = line.split()
mapping[ fields[0] ] = int( fields[1] )
return mapping
|
python
|
def read_len( f ):
"""Read a 'LEN' file and return a mapping from chromosome to length"""
mapping = dict()
for line in f:
fields = line.split()
mapping[ fields[0] ] = int( fields[1] )
return mapping
|
[
"def",
"read_len",
"(",
"f",
")",
":",
"mapping",
"=",
"dict",
"(",
")",
"for",
"line",
"in",
"f",
":",
"fields",
"=",
"line",
".",
"split",
"(",
")",
"mapping",
"[",
"fields",
"[",
"0",
"]",
"]",
"=",
"int",
"(",
"fields",
"[",
"1",
"]",
")",
"return",
"mapping"
] |
Read a 'LEN' file and return a mapping from chromosome to length
|
[
"Read",
"a",
"LEN",
"file",
"and",
"return",
"a",
"mapping",
"from",
"chromosome",
"to",
"length"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/scripts/bed_complement.py#L20-L26
|
train
|
bxlab/bx-python
|
lib/bx/motif/logo/__init__.py
|
eps_logo
|
def eps_logo( matrix, base_width, height, colors=DNA_DEFAULT_COLORS ):
"""
Return an EPS document containing a sequence logo for matrix where each
bases is shown as a column of `base_width` points and the total logo
height is `height` points. If `colors` is provided it is a mapping from
characters to rgb color strings.
"""
alphabet = matrix.sorted_alphabet
rval = StringIO()
# Read header ans substitute in width / height
header = Template( pkg_resources.resource_string( __name__, "template.ps" ) )
rval.write( header.substitute( bounding_box_width = ceil( base_width * matrix.width ) + PAD,
bounding_box_height = ceil( height ) + PAD ) )
# Determine heights
heights = freqs_to_heights( matrix )
height_scale = height / log2( len( alphabet ) )
# Draw each "row" of the matrix
for i, row in enumerate( heights ):
x = ( i * base_width )
y = 0
for j, base_height in enumerate( row ):
char = alphabet[j]
page_height = height_scale * base_height
# print matrix.alphabet[j], base_height, height_scale, page_height
if page_height > 1:
# Draw letter
rval.write( "%s setrgbcolor\n" % colors.get( char, '0 0 0' ) )
rval.write( "%3.2f " % x )
rval.write( "%3.2f " % y )
rval.write( "%3.2f " % ( x + base_width ) )
rval.write( "%3.2f " % ( y + page_height ) )
rval.write( "(%s) textInBox\n" % char )
y += page_height
rval.write( "showpage" )
return rval.getvalue()
|
python
|
def eps_logo( matrix, base_width, height, colors=DNA_DEFAULT_COLORS ):
"""
Return an EPS document containing a sequence logo for matrix where each
bases is shown as a column of `base_width` points and the total logo
height is `height` points. If `colors` is provided it is a mapping from
characters to rgb color strings.
"""
alphabet = matrix.sorted_alphabet
rval = StringIO()
# Read header ans substitute in width / height
header = Template( pkg_resources.resource_string( __name__, "template.ps" ) )
rval.write( header.substitute( bounding_box_width = ceil( base_width * matrix.width ) + PAD,
bounding_box_height = ceil( height ) + PAD ) )
# Determine heights
heights = freqs_to_heights( matrix )
height_scale = height / log2( len( alphabet ) )
# Draw each "row" of the matrix
for i, row in enumerate( heights ):
x = ( i * base_width )
y = 0
for j, base_height in enumerate( row ):
char = alphabet[j]
page_height = height_scale * base_height
# print matrix.alphabet[j], base_height, height_scale, page_height
if page_height > 1:
# Draw letter
rval.write( "%s setrgbcolor\n" % colors.get( char, '0 0 0' ) )
rval.write( "%3.2f " % x )
rval.write( "%3.2f " % y )
rval.write( "%3.2f " % ( x + base_width ) )
rval.write( "%3.2f " % ( y + page_height ) )
rval.write( "(%s) textInBox\n" % char )
y += page_height
rval.write( "showpage" )
return rval.getvalue()
|
[
"def",
"eps_logo",
"(",
"matrix",
",",
"base_width",
",",
"height",
",",
"colors",
"=",
"DNA_DEFAULT_COLORS",
")",
":",
"alphabet",
"=",
"matrix",
".",
"sorted_alphabet",
"rval",
"=",
"StringIO",
"(",
")",
"# Read header ans substitute in width / height",
"header",
"=",
"Template",
"(",
"pkg_resources",
".",
"resource_string",
"(",
"__name__",
",",
"\"template.ps\"",
")",
")",
"rval",
".",
"write",
"(",
"header",
".",
"substitute",
"(",
"bounding_box_width",
"=",
"ceil",
"(",
"base_width",
"*",
"matrix",
".",
"width",
")",
"+",
"PAD",
",",
"bounding_box_height",
"=",
"ceil",
"(",
"height",
")",
"+",
"PAD",
")",
")",
"# Determine heights",
"heights",
"=",
"freqs_to_heights",
"(",
"matrix",
")",
"height_scale",
"=",
"height",
"/",
"log2",
"(",
"len",
"(",
"alphabet",
")",
")",
"# Draw each \"row\" of the matrix",
"for",
"i",
",",
"row",
"in",
"enumerate",
"(",
"heights",
")",
":",
"x",
"=",
"(",
"i",
"*",
"base_width",
")",
"y",
"=",
"0",
"for",
"j",
",",
"base_height",
"in",
"enumerate",
"(",
"row",
")",
":",
"char",
"=",
"alphabet",
"[",
"j",
"]",
"page_height",
"=",
"height_scale",
"*",
"base_height",
"# print matrix.alphabet[j], base_height, height_scale, page_height",
"if",
"page_height",
">",
"1",
":",
"# Draw letter",
"rval",
".",
"write",
"(",
"\"%s setrgbcolor\\n\"",
"%",
"colors",
".",
"get",
"(",
"char",
",",
"'0 0 0'",
")",
")",
"rval",
".",
"write",
"(",
"\"%3.2f \"",
"%",
"x",
")",
"rval",
".",
"write",
"(",
"\"%3.2f \"",
"%",
"y",
")",
"rval",
".",
"write",
"(",
"\"%3.2f \"",
"%",
"(",
"x",
"+",
"base_width",
")",
")",
"rval",
".",
"write",
"(",
"\"%3.2f \"",
"%",
"(",
"y",
"+",
"page_height",
")",
")",
"rval",
".",
"write",
"(",
"\"(%s) textInBox\\n\"",
"%",
"char",
")",
"y",
"+=",
"page_height",
"rval",
".",
"write",
"(",
"\"showpage\"",
")",
"return",
"rval",
".",
"getvalue",
"(",
")"
] |
Return an EPS document containing a sequence logo for matrix where each
bases is shown as a column of `base_width` points and the total logo
height is `height` points. If `colors` is provided it is a mapping from
characters to rgb color strings.
|
[
"Return",
"an",
"EPS",
"document",
"containing",
"a",
"sequence",
"logo",
"for",
"matrix",
"where",
"each",
"bases",
"is",
"shown",
"as",
"a",
"column",
"of",
"base_width",
"points",
"and",
"the",
"total",
"logo",
"height",
"is",
"height",
"points",
".",
"If",
"colors",
"is",
"provided",
"it",
"is",
"a",
"mapping",
"from",
"characters",
"to",
"rgb",
"color",
"strings",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/motif/logo/__init__.py#L38-L72
|
train
|
bxlab/bx-python
|
scripts/bnMapper.py
|
transform
|
def transform(elem, chain_CT_CQ, max_gap):
"""transform the coordinates of this elem into the other species.
elem intersects this chain's ginterval.
:return: a list of the type [(to_chr, start, end, elem[id]) ... ]"""
(chain, CT, CQ) = chain_CT_CQ
start, end = max(elem['start'], chain.tStart) - chain.tStart, min(elem['end'], chain.tEnd) - chain.tStart
assert np.all( (CT[:,1] - CT[:,0]) == (CQ[:,1] - CQ[:,0]) )
to_chrom = chain.qName
to_gab_start = chain.qStart
start_idx = np.where( CT[:,1] > start )[0][0]
end_idx = np.where( CT[:,0] < end )[0][-1]
if start_idx > end_idx: #maps to a gap region on the other species
return []
## apply the gap threshold
if max_gap >= 0 and start_idx < end_idx - 1:
if np.max(CT[(start_idx+1):end_idx,0] - CT[start_idx:(end_idx-1),1]) > max_gap or np.max(CQ[(start_idx+1):end_idx,0] - CQ[start_idx:(end_idx-1),1]) > max_gap:
return []
assert start < CT[start_idx, 1]
assert CT[end_idx, 0] < end
to_start = CQ[start_idx, 0] + max(0, start - CT[start_idx,0]) # correct if on middle of interval
to_end = CQ[end_idx, 1] - max(0, CT[end_idx, 1] - end) # idem
if start_idx == end_idx: #elem falls in a single run of matches
slices = [(to_start, to_end)]
else:
slices = [(to_start, CQ[start_idx,1])]
slices += [(CQ[i,0], CQ[i,1]) for i in range(start_idx+1, end_idx)]
slices.append( (CQ[end_idx,0], to_end) )
if chain.qStrand == '-':
Sz = chain.qEnd - chain.qStart
slices = [(Sz-t[1], Sz-t[0]) for t in slices]
return [(to_chrom, to_gab_start + t[0], to_gab_start + t[1], elem['id']) for t in slices]
|
python
|
def transform(elem, chain_CT_CQ, max_gap):
"""transform the coordinates of this elem into the other species.
elem intersects this chain's ginterval.
:return: a list of the type [(to_chr, start, end, elem[id]) ... ]"""
(chain, CT, CQ) = chain_CT_CQ
start, end = max(elem['start'], chain.tStart) - chain.tStart, min(elem['end'], chain.tEnd) - chain.tStart
assert np.all( (CT[:,1] - CT[:,0]) == (CQ[:,1] - CQ[:,0]) )
to_chrom = chain.qName
to_gab_start = chain.qStart
start_idx = np.where( CT[:,1] > start )[0][0]
end_idx = np.where( CT[:,0] < end )[0][-1]
if start_idx > end_idx: #maps to a gap region on the other species
return []
## apply the gap threshold
if max_gap >= 0 and start_idx < end_idx - 1:
if np.max(CT[(start_idx+1):end_idx,0] - CT[start_idx:(end_idx-1),1]) > max_gap or np.max(CQ[(start_idx+1):end_idx,0] - CQ[start_idx:(end_idx-1),1]) > max_gap:
return []
assert start < CT[start_idx, 1]
assert CT[end_idx, 0] < end
to_start = CQ[start_idx, 0] + max(0, start - CT[start_idx,0]) # correct if on middle of interval
to_end = CQ[end_idx, 1] - max(0, CT[end_idx, 1] - end) # idem
if start_idx == end_idx: #elem falls in a single run of matches
slices = [(to_start, to_end)]
else:
slices = [(to_start, CQ[start_idx,1])]
slices += [(CQ[i,0], CQ[i,1]) for i in range(start_idx+1, end_idx)]
slices.append( (CQ[end_idx,0], to_end) )
if chain.qStrand == '-':
Sz = chain.qEnd - chain.qStart
slices = [(Sz-t[1], Sz-t[0]) for t in slices]
return [(to_chrom, to_gab_start + t[0], to_gab_start + t[1], elem['id']) for t in slices]
|
[
"def",
"transform",
"(",
"elem",
",",
"chain_CT_CQ",
",",
"max_gap",
")",
":",
"(",
"chain",
",",
"CT",
",",
"CQ",
")",
"=",
"chain_CT_CQ",
"start",
",",
"end",
"=",
"max",
"(",
"elem",
"[",
"'start'",
"]",
",",
"chain",
".",
"tStart",
")",
"-",
"chain",
".",
"tStart",
",",
"min",
"(",
"elem",
"[",
"'end'",
"]",
",",
"chain",
".",
"tEnd",
")",
"-",
"chain",
".",
"tStart",
"assert",
"np",
".",
"all",
"(",
"(",
"CT",
"[",
":",
",",
"1",
"]",
"-",
"CT",
"[",
":",
",",
"0",
"]",
")",
"==",
"(",
"CQ",
"[",
":",
",",
"1",
"]",
"-",
"CQ",
"[",
":",
",",
"0",
"]",
")",
")",
"to_chrom",
"=",
"chain",
".",
"qName",
"to_gab_start",
"=",
"chain",
".",
"qStart",
"start_idx",
"=",
"np",
".",
"where",
"(",
"CT",
"[",
":",
",",
"1",
"]",
">",
"start",
")",
"[",
"0",
"]",
"[",
"0",
"]",
"end_idx",
"=",
"np",
".",
"where",
"(",
"CT",
"[",
":",
",",
"0",
"]",
"<",
"end",
")",
"[",
"0",
"]",
"[",
"-",
"1",
"]",
"if",
"start_idx",
">",
"end_idx",
":",
"#maps to a gap region on the other species",
"return",
"[",
"]",
"## apply the gap threshold",
"if",
"max_gap",
">=",
"0",
"and",
"start_idx",
"<",
"end_idx",
"-",
"1",
":",
"if",
"np",
".",
"max",
"(",
"CT",
"[",
"(",
"start_idx",
"+",
"1",
")",
":",
"end_idx",
",",
"0",
"]",
"-",
"CT",
"[",
"start_idx",
":",
"(",
"end_idx",
"-",
"1",
")",
",",
"1",
"]",
")",
">",
"max_gap",
"or",
"np",
".",
"max",
"(",
"CQ",
"[",
"(",
"start_idx",
"+",
"1",
")",
":",
"end_idx",
",",
"0",
"]",
"-",
"CQ",
"[",
"start_idx",
":",
"(",
"end_idx",
"-",
"1",
")",
",",
"1",
"]",
")",
">",
"max_gap",
":",
"return",
"[",
"]",
"assert",
"start",
"<",
"CT",
"[",
"start_idx",
",",
"1",
"]",
"assert",
"CT",
"[",
"end_idx",
",",
"0",
"]",
"<",
"end",
"to_start",
"=",
"CQ",
"[",
"start_idx",
",",
"0",
"]",
"+",
"max",
"(",
"0",
",",
"start",
"-",
"CT",
"[",
"start_idx",
",",
"0",
"]",
")",
"# correct if on middle of interval",
"to_end",
"=",
"CQ",
"[",
"end_idx",
",",
"1",
"]",
"-",
"max",
"(",
"0",
",",
"CT",
"[",
"end_idx",
",",
"1",
"]",
"-",
"end",
")",
"# idem",
"if",
"start_idx",
"==",
"end_idx",
":",
"#elem falls in a single run of matches",
"slices",
"=",
"[",
"(",
"to_start",
",",
"to_end",
")",
"]",
"else",
":",
"slices",
"=",
"[",
"(",
"to_start",
",",
"CQ",
"[",
"start_idx",
",",
"1",
"]",
")",
"]",
"slices",
"+=",
"[",
"(",
"CQ",
"[",
"i",
",",
"0",
"]",
",",
"CQ",
"[",
"i",
",",
"1",
"]",
")",
"for",
"i",
"in",
"range",
"(",
"start_idx",
"+",
"1",
",",
"end_idx",
")",
"]",
"slices",
".",
"append",
"(",
"(",
"CQ",
"[",
"end_idx",
",",
"0",
"]",
",",
"to_end",
")",
")",
"if",
"chain",
".",
"qStrand",
"==",
"'-'",
":",
"Sz",
"=",
"chain",
".",
"qEnd",
"-",
"chain",
".",
"qStart",
"slices",
"=",
"[",
"(",
"Sz",
"-",
"t",
"[",
"1",
"]",
",",
"Sz",
"-",
"t",
"[",
"0",
"]",
")",
"for",
"t",
"in",
"slices",
"]",
"return",
"[",
"(",
"to_chrom",
",",
"to_gab_start",
"+",
"t",
"[",
"0",
"]",
",",
"to_gab_start",
"+",
"t",
"[",
"1",
"]",
",",
"elem",
"[",
"'id'",
"]",
")",
"for",
"t",
"in",
"slices",
"]"
] |
transform the coordinates of this elem into the other species.
elem intersects this chain's ginterval.
:return: a list of the type [(to_chr, start, end, elem[id]) ... ]
|
[
"transform",
"the",
"coordinates",
"of",
"this",
"elem",
"into",
"the",
"other",
"species",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/scripts/bnMapper.py#L63-L100
|
train
|
bxlab/bx-python
|
scripts/bnMapper.py
|
loadChains
|
def loadChains(path):
"name says it."
EPO = epo.Chain._parse_file(path, True)
## convert coordinates w.r.t the forward strand (into slices)
## compute cummulative intervals
for i in range( len(EPO) ):
ch, S, T, Q = EPO[i]
if ch.tStrand == '-':
ch = ch._replace(tEnd = ch.tSize - ch.tStart,
tStart = ch.tSize - ch.tEnd)
if ch.qStrand == '-':
ch = ch._replace(qEnd = ch.qSize - ch.qStart,
qStart = ch.qSize - ch.qEnd)
EPO[i] = (ch,
epo.cummulative_intervals(S, T),
epo.cummulative_intervals(S, Q)
)
##now each element of epo is (chain_header, target_intervals, query_intervals)
assert all( t[0].tStrand == '+' for t in EPO ), "all target strands should be +"
return EPO
|
python
|
def loadChains(path):
"name says it."
EPO = epo.Chain._parse_file(path, True)
## convert coordinates w.r.t the forward strand (into slices)
## compute cummulative intervals
for i in range( len(EPO) ):
ch, S, T, Q = EPO[i]
if ch.tStrand == '-':
ch = ch._replace(tEnd = ch.tSize - ch.tStart,
tStart = ch.tSize - ch.tEnd)
if ch.qStrand == '-':
ch = ch._replace(qEnd = ch.qSize - ch.qStart,
qStart = ch.qSize - ch.qEnd)
EPO[i] = (ch,
epo.cummulative_intervals(S, T),
epo.cummulative_intervals(S, Q)
)
##now each element of epo is (chain_header, target_intervals, query_intervals)
assert all( t[0].tStrand == '+' for t in EPO ), "all target strands should be +"
return EPO
|
[
"def",
"loadChains",
"(",
"path",
")",
":",
"EPO",
"=",
"epo",
".",
"Chain",
".",
"_parse_file",
"(",
"path",
",",
"True",
")",
"## convert coordinates w.r.t the forward strand (into slices)",
"## compute cummulative intervals",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"EPO",
")",
")",
":",
"ch",
",",
"S",
",",
"T",
",",
"Q",
"=",
"EPO",
"[",
"i",
"]",
"if",
"ch",
".",
"tStrand",
"==",
"'-'",
":",
"ch",
"=",
"ch",
".",
"_replace",
"(",
"tEnd",
"=",
"ch",
".",
"tSize",
"-",
"ch",
".",
"tStart",
",",
"tStart",
"=",
"ch",
".",
"tSize",
"-",
"ch",
".",
"tEnd",
")",
"if",
"ch",
".",
"qStrand",
"==",
"'-'",
":",
"ch",
"=",
"ch",
".",
"_replace",
"(",
"qEnd",
"=",
"ch",
".",
"qSize",
"-",
"ch",
".",
"qStart",
",",
"qStart",
"=",
"ch",
".",
"qSize",
"-",
"ch",
".",
"qEnd",
")",
"EPO",
"[",
"i",
"]",
"=",
"(",
"ch",
",",
"epo",
".",
"cummulative_intervals",
"(",
"S",
",",
"T",
")",
",",
"epo",
".",
"cummulative_intervals",
"(",
"S",
",",
"Q",
")",
")",
"##now each element of epo is (chain_header, target_intervals, query_intervals)",
"assert",
"all",
"(",
"t",
"[",
"0",
"]",
".",
"tStrand",
"==",
"'+'",
"for",
"t",
"in",
"EPO",
")",
",",
"\"all target strands should be +\"",
"return",
"EPO"
] |
name says it.
|
[
"name",
"says",
"it",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/scripts/bnMapper.py#L228-L248
|
train
|
bxlab/bx-python
|
scripts/bnMapper.py
|
loadFeatures
|
def loadFeatures(path, opt):
"""
Load features. For BED, only BED4 columns are loaded.
For narrowPeak, all columns are loaded.
"""
log.info("loading from %s ..." % path)
data = []
if opt.in_format == "BED":
with open(path) as fd:
for line in fd:
cols = line.split()
data.append( (cols[0], int(cols[1]), int(cols[2]), cols[3]) )
data = np.array(data, dtype=elem_t)
else:
with open(path) as fd:
for line in fd:
cols = line.split()
data.append( (cols[0], int(cols[1]), int(cols[2]), cols[3], int(cols[4]),
cols[5], float(cols[6]), float(cols[7]), float(cols[8]),
int(cols[-1])+int(cols[1])) )
data = np.array(data, dtype=narrowPeak_t)
return data
|
python
|
def loadFeatures(path, opt):
"""
Load features. For BED, only BED4 columns are loaded.
For narrowPeak, all columns are loaded.
"""
log.info("loading from %s ..." % path)
data = []
if opt.in_format == "BED":
with open(path) as fd:
for line in fd:
cols = line.split()
data.append( (cols[0], int(cols[1]), int(cols[2]), cols[3]) )
data = np.array(data, dtype=elem_t)
else:
with open(path) as fd:
for line in fd:
cols = line.split()
data.append( (cols[0], int(cols[1]), int(cols[2]), cols[3], int(cols[4]),
cols[5], float(cols[6]), float(cols[7]), float(cols[8]),
int(cols[-1])+int(cols[1])) )
data = np.array(data, dtype=narrowPeak_t)
return data
|
[
"def",
"loadFeatures",
"(",
"path",
",",
"opt",
")",
":",
"log",
".",
"info",
"(",
"\"loading from %s ...\"",
"%",
"path",
")",
"data",
"=",
"[",
"]",
"if",
"opt",
".",
"in_format",
"==",
"\"BED\"",
":",
"with",
"open",
"(",
"path",
")",
"as",
"fd",
":",
"for",
"line",
"in",
"fd",
":",
"cols",
"=",
"line",
".",
"split",
"(",
")",
"data",
".",
"append",
"(",
"(",
"cols",
"[",
"0",
"]",
",",
"int",
"(",
"cols",
"[",
"1",
"]",
")",
",",
"int",
"(",
"cols",
"[",
"2",
"]",
")",
",",
"cols",
"[",
"3",
"]",
")",
")",
"data",
"=",
"np",
".",
"array",
"(",
"data",
",",
"dtype",
"=",
"elem_t",
")",
"else",
":",
"with",
"open",
"(",
"path",
")",
"as",
"fd",
":",
"for",
"line",
"in",
"fd",
":",
"cols",
"=",
"line",
".",
"split",
"(",
")",
"data",
".",
"append",
"(",
"(",
"cols",
"[",
"0",
"]",
",",
"int",
"(",
"cols",
"[",
"1",
"]",
")",
",",
"int",
"(",
"cols",
"[",
"2",
"]",
")",
",",
"cols",
"[",
"3",
"]",
",",
"int",
"(",
"cols",
"[",
"4",
"]",
")",
",",
"cols",
"[",
"5",
"]",
",",
"float",
"(",
"cols",
"[",
"6",
"]",
")",
",",
"float",
"(",
"cols",
"[",
"7",
"]",
")",
",",
"float",
"(",
"cols",
"[",
"8",
"]",
")",
",",
"int",
"(",
"cols",
"[",
"-",
"1",
"]",
")",
"+",
"int",
"(",
"cols",
"[",
"1",
"]",
")",
")",
")",
"data",
"=",
"np",
".",
"array",
"(",
"data",
",",
"dtype",
"=",
"narrowPeak_t",
")",
"return",
"data"
] |
Load features. For BED, only BED4 columns are loaded.
For narrowPeak, all columns are loaded.
|
[
"Load",
"features",
".",
"For",
"BED",
"only",
"BED4",
"columns",
"are",
"loaded",
".",
"For",
"narrowPeak",
"all",
"columns",
"are",
"loaded",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/scripts/bnMapper.py#L250-L272
|
train
|
bxlab/bx-python
|
scripts/bnMapper.py
|
GIntervalTree.add
|
def add(self, chrom, element):
"""insert an element. use this method as the IntervalTree one.
this will simply call the IntervalTree.add method on the right tree
:param chrom: chromosome
:param element: the argument of IntervalTree.insert_interval
:return: None
"""
self._trees.setdefault(chrom, IntervalTree()).insert_interval( element )
|
python
|
def add(self, chrom, element):
"""insert an element. use this method as the IntervalTree one.
this will simply call the IntervalTree.add method on the right tree
:param chrom: chromosome
:param element: the argument of IntervalTree.insert_interval
:return: None
"""
self._trees.setdefault(chrom, IntervalTree()).insert_interval( element )
|
[
"def",
"add",
"(",
"self",
",",
"chrom",
",",
"element",
")",
":",
"self",
".",
"_trees",
".",
"setdefault",
"(",
"chrom",
",",
"IntervalTree",
"(",
")",
")",
".",
"insert_interval",
"(",
"element",
")"
] |
insert an element. use this method as the IntervalTree one.
this will simply call the IntervalTree.add method on the right tree
:param chrom: chromosome
:param element: the argument of IntervalTree.insert_interval
:return: None
|
[
"insert",
"an",
"element",
".",
"use",
"this",
"method",
"as",
"the",
"IntervalTree",
"one",
".",
"this",
"will",
"simply",
"call",
"the",
"IntervalTree",
".",
"add",
"method",
"on",
"the",
"right",
"tree"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/scripts/bnMapper.py#L38-L47
|
train
|
bxlab/bx-python
|
scripts/bnMapper.py
|
GIntervalTree.find
|
def find(self, chrom, start, end):
"""find the intersecting elements
:param chrom: chromosome
:param start: start
:param end: end
:return: a list of intersecting elements"""
tree = self._trees.get( chrom, None )
if tree:
return tree.find( start, end )
#return always a list
return []
|
python
|
def find(self, chrom, start, end):
"""find the intersecting elements
:param chrom: chromosome
:param start: start
:param end: end
:return: a list of intersecting elements"""
tree = self._trees.get( chrom, None )
if tree:
return tree.find( start, end )
#return always a list
return []
|
[
"def",
"find",
"(",
"self",
",",
"chrom",
",",
"start",
",",
"end",
")",
":",
"tree",
"=",
"self",
".",
"_trees",
".",
"get",
"(",
"chrom",
",",
"None",
")",
"if",
"tree",
":",
"return",
"tree",
".",
"find",
"(",
"start",
",",
"end",
")",
"#return always a list",
"return",
"[",
"]"
] |
find the intersecting elements
:param chrom: chromosome
:param start: start
:param end: end
:return: a list of intersecting elements
|
[
"find",
"the",
"intersecting",
"elements"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/scripts/bnMapper.py#L49-L61
|
train
|
bxlab/bx-python
|
lib/bx/motif/pwm.py
|
BaseMatrix.create_from_other
|
def create_from_other( Class, other, values=None ):
"""
Create a new Matrix with attributes taken from `other` but with the
values taken from `values` if provided
"""
m = Class()
m.alphabet = other.alphabet
m.sorted_alphabet = other.sorted_alphabet
m.char_to_index = other.char_to_index
if values is not None:
m.values = values
else:
m.values = other.values
return m
|
python
|
def create_from_other( Class, other, values=None ):
"""
Create a new Matrix with attributes taken from `other` but with the
values taken from `values` if provided
"""
m = Class()
m.alphabet = other.alphabet
m.sorted_alphabet = other.sorted_alphabet
m.char_to_index = other.char_to_index
if values is not None:
m.values = values
else:
m.values = other.values
return m
|
[
"def",
"create_from_other",
"(",
"Class",
",",
"other",
",",
"values",
"=",
"None",
")",
":",
"m",
"=",
"Class",
"(",
")",
"m",
".",
"alphabet",
"=",
"other",
".",
"alphabet",
"m",
".",
"sorted_alphabet",
"=",
"other",
".",
"sorted_alphabet",
"m",
".",
"char_to_index",
"=",
"other",
".",
"char_to_index",
"if",
"values",
"is",
"not",
"None",
":",
"m",
".",
"values",
"=",
"values",
"else",
":",
"m",
".",
"values",
"=",
"other",
".",
"values",
"return",
"m"
] |
Create a new Matrix with attributes taken from `other` but with the
values taken from `values` if provided
|
[
"Create",
"a",
"new",
"Matrix",
"with",
"attributes",
"taken",
"from",
"other",
"but",
"with",
"the",
"values",
"taken",
"from",
"values",
"if",
"provided"
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/motif/pwm.py#L51-L64
|
train
|
bxlab/bx-python
|
lib/bx/motif/pwm.py
|
FrequencyMatrix.to_logodds_scoring_matrix
|
def to_logodds_scoring_matrix( self, background=None, correction=DEFAULT_CORRECTION ):
"""
Create a standard logodds scoring matrix.
"""
alphabet_size = len( self.alphabet )
if background is None:
background = ones( alphabet_size, float32 ) / alphabet_size
# Row totals as a one column array
totals = numpy.sum( self.values, 1 )[:,newaxis]
values = log2( maximum( self.values, correction ) ) \
- log2( totals ) \
- log2( maximum( background, correction ) )
return ScoringMatrix.create_from_other( self, values.astype( float32 ) )
|
python
|
def to_logodds_scoring_matrix( self, background=None, correction=DEFAULT_CORRECTION ):
"""
Create a standard logodds scoring matrix.
"""
alphabet_size = len( self.alphabet )
if background is None:
background = ones( alphabet_size, float32 ) / alphabet_size
# Row totals as a one column array
totals = numpy.sum( self.values, 1 )[:,newaxis]
values = log2( maximum( self.values, correction ) ) \
- log2( totals ) \
- log2( maximum( background, correction ) )
return ScoringMatrix.create_from_other( self, values.astype( float32 ) )
|
[
"def",
"to_logodds_scoring_matrix",
"(",
"self",
",",
"background",
"=",
"None",
",",
"correction",
"=",
"DEFAULT_CORRECTION",
")",
":",
"alphabet_size",
"=",
"len",
"(",
"self",
".",
"alphabet",
")",
"if",
"background",
"is",
"None",
":",
"background",
"=",
"ones",
"(",
"alphabet_size",
",",
"float32",
")",
"/",
"alphabet_size",
"# Row totals as a one column array",
"totals",
"=",
"numpy",
".",
"sum",
"(",
"self",
".",
"values",
",",
"1",
")",
"[",
":",
",",
"newaxis",
"]",
"values",
"=",
"log2",
"(",
"maximum",
"(",
"self",
".",
"values",
",",
"correction",
")",
")",
"-",
"log2",
"(",
"totals",
")",
"-",
"log2",
"(",
"maximum",
"(",
"background",
",",
"correction",
")",
")",
"return",
"ScoringMatrix",
".",
"create_from_other",
"(",
"self",
",",
"values",
".",
"astype",
"(",
"float32",
")",
")"
] |
Create a standard logodds scoring matrix.
|
[
"Create",
"a",
"standard",
"logodds",
"scoring",
"matrix",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/motif/pwm.py#L95-L107
|
train
|
bxlab/bx-python
|
lib/bx/motif/pwm.py
|
ScoringMatrix.score_string
|
def score_string( self, string ):
"""
Score each valid position in `string` using this scoring matrix.
Positions which were not scored are set to nan.
"""
rval = zeros( len( string ), float32 )
rval[:] = nan
_pwm.score_string( self.values, self.char_to_index, string, rval )
return rval
|
python
|
def score_string( self, string ):
"""
Score each valid position in `string` using this scoring matrix.
Positions which were not scored are set to nan.
"""
rval = zeros( len( string ), float32 )
rval[:] = nan
_pwm.score_string( self.values, self.char_to_index, string, rval )
return rval
|
[
"def",
"score_string",
"(",
"self",
",",
"string",
")",
":",
"rval",
"=",
"zeros",
"(",
"len",
"(",
"string",
")",
",",
"float32",
")",
"rval",
"[",
":",
"]",
"=",
"nan",
"_pwm",
".",
"score_string",
"(",
"self",
".",
"values",
",",
"self",
".",
"char_to_index",
",",
"string",
",",
"rval",
")",
"return",
"rval"
] |
Score each valid position in `string` using this scoring matrix.
Positions which were not scored are set to nan.
|
[
"Score",
"each",
"valid",
"position",
"in",
"string",
"using",
"this",
"scoring",
"matrix",
".",
"Positions",
"which",
"were",
"not",
"scored",
"are",
"set",
"to",
"nan",
"."
] |
09cb725284803df90a468d910f2274628d8647de
|
https://github.com/bxlab/bx-python/blob/09cb725284803df90a468d910f2274628d8647de/lib/bx/motif/pwm.py#L131-L139
|
train
|
jborean93/ntlm-auth
|
ntlm_auth/compute_response.py
|
ComputeResponse._calc_resp
|
def _calc_resp(password_hash, server_challenge):
"""
Generate the LM response given a 16-byte password hash and the
challenge from the CHALLENGE_MESSAGE
:param password_hash: A 16-byte password hash
:param server_challenge: A random 8-byte response generated by the
server in the CHALLENGE_MESSAGE
:return res: A 24-byte buffer to contain the LM response upon return
"""
# padding with zeros to make the hash 21 bytes long
password_hash += b'\x00' * (21 - len(password_hash))
res = b''
dobj = DES(DES.key56_to_key64(password_hash[0:7]))
res = res + dobj.encrypt(server_challenge[0:8])
dobj = DES(DES.key56_to_key64(password_hash[7:14]))
res = res + dobj.encrypt(server_challenge[0:8])
dobj = DES(DES.key56_to_key64(password_hash[14:21]))
res = res + dobj.encrypt(server_challenge[0:8])
return res
|
python
|
def _calc_resp(password_hash, server_challenge):
"""
Generate the LM response given a 16-byte password hash and the
challenge from the CHALLENGE_MESSAGE
:param password_hash: A 16-byte password hash
:param server_challenge: A random 8-byte response generated by the
server in the CHALLENGE_MESSAGE
:return res: A 24-byte buffer to contain the LM response upon return
"""
# padding with zeros to make the hash 21 bytes long
password_hash += b'\x00' * (21 - len(password_hash))
res = b''
dobj = DES(DES.key56_to_key64(password_hash[0:7]))
res = res + dobj.encrypt(server_challenge[0:8])
dobj = DES(DES.key56_to_key64(password_hash[7:14]))
res = res + dobj.encrypt(server_challenge[0:8])
dobj = DES(DES.key56_to_key64(password_hash[14:21]))
res = res + dobj.encrypt(server_challenge[0:8])
return res
|
[
"def",
"_calc_resp",
"(",
"password_hash",
",",
"server_challenge",
")",
":",
"# padding with zeros to make the hash 21 bytes long",
"password_hash",
"+=",
"b'\\x00'",
"*",
"(",
"21",
"-",
"len",
"(",
"password_hash",
")",
")",
"res",
"=",
"b''",
"dobj",
"=",
"DES",
"(",
"DES",
".",
"key56_to_key64",
"(",
"password_hash",
"[",
"0",
":",
"7",
"]",
")",
")",
"res",
"=",
"res",
"+",
"dobj",
".",
"encrypt",
"(",
"server_challenge",
"[",
"0",
":",
"8",
"]",
")",
"dobj",
"=",
"DES",
"(",
"DES",
".",
"key56_to_key64",
"(",
"password_hash",
"[",
"7",
":",
"14",
"]",
")",
")",
"res",
"=",
"res",
"+",
"dobj",
".",
"encrypt",
"(",
"server_challenge",
"[",
"0",
":",
"8",
"]",
")",
"dobj",
"=",
"DES",
"(",
"DES",
".",
"key56_to_key64",
"(",
"password_hash",
"[",
"14",
":",
"21",
"]",
")",
")",
"res",
"=",
"res",
"+",
"dobj",
".",
"encrypt",
"(",
"server_challenge",
"[",
"0",
":",
"8",
"]",
")",
"return",
"res"
] |
Generate the LM response given a 16-byte password hash and the
challenge from the CHALLENGE_MESSAGE
:param password_hash: A 16-byte password hash
:param server_challenge: A random 8-byte response generated by the
server in the CHALLENGE_MESSAGE
:return res: A 24-byte buffer to contain the LM response upon return
|
[
"Generate",
"the",
"LM",
"response",
"given",
"a",
"16",
"-",
"byte",
"password",
"hash",
"and",
"the",
"challenge",
"from",
"the",
"CHALLENGE_MESSAGE"
] |
2c7cd81516d9bfd42e8ff473a534d876b21ebb38
|
https://github.com/jborean93/ntlm-auth/blob/2c7cd81516d9bfd42e8ff473a534d876b21ebb38/ntlm_auth/compute_response.py#L433-L455
|
train
|
jborean93/ntlm-auth
|
ntlm_auth/des.py
|
DES.encrypt
|
def encrypt(self, data, pad=True):
"""
DES encrypts the data based on the key it was initialised with.
:param data: The bytes string to encrypt
:param pad: Whether to right pad data with \x00 to a multiple of 8
:return: The encrypted bytes string
"""
encrypted_data = b""
for i in range(0, len(data), 8):
block = data[i:i + 8]
block_length = len(block)
if block_length != 8 and pad:
block += b"\x00" * (8 - block_length)
elif block_length != 8:
raise ValueError("DES encryption must be a multiple of 8 "
"bytes")
encrypted_data += self._encode_block(block)
return encrypted_data
|
python
|
def encrypt(self, data, pad=True):
"""
DES encrypts the data based on the key it was initialised with.
:param data: The bytes string to encrypt
:param pad: Whether to right pad data with \x00 to a multiple of 8
:return: The encrypted bytes string
"""
encrypted_data = b""
for i in range(0, len(data), 8):
block = data[i:i + 8]
block_length = len(block)
if block_length != 8 and pad:
block += b"\x00" * (8 - block_length)
elif block_length != 8:
raise ValueError("DES encryption must be a multiple of 8 "
"bytes")
encrypted_data += self._encode_block(block)
return encrypted_data
|
[
"def",
"encrypt",
"(",
"self",
",",
"data",
",",
"pad",
"=",
"True",
")",
":",
"encrypted_data",
"=",
"b\"\"",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"data",
")",
",",
"8",
")",
":",
"block",
"=",
"data",
"[",
"i",
":",
"i",
"+",
"8",
"]",
"block_length",
"=",
"len",
"(",
"block",
")",
"if",
"block_length",
"!=",
"8",
"and",
"pad",
":",
"block",
"+=",
"b\"\\x00\"",
"*",
"(",
"8",
"-",
"block_length",
")",
"elif",
"block_length",
"!=",
"8",
":",
"raise",
"ValueError",
"(",
"\"DES encryption must be a multiple of 8 \"",
"\"bytes\"",
")",
"encrypted_data",
"+=",
"self",
".",
"_encode_block",
"(",
"block",
")",
"return",
"encrypted_data"
] |
DES encrypts the data based on the key it was initialised with.
:param data: The bytes string to encrypt
:param pad: Whether to right pad data with \x00 to a multiple of 8
:return: The encrypted bytes string
|
[
"DES",
"encrypts",
"the",
"data",
"based",
"on",
"the",
"key",
"it",
"was",
"initialised",
"with",
"."
] |
2c7cd81516d9bfd42e8ff473a534d876b21ebb38
|
https://github.com/jborean93/ntlm-auth/blob/2c7cd81516d9bfd42e8ff473a534d876b21ebb38/ntlm_auth/des.py#L150-L169
|
train
|
jborean93/ntlm-auth
|
ntlm_auth/des.py
|
DES.decrypt
|
def decrypt(self, data):
"""
DES decrypts the data based on the key it was initialised with.
:param data: The encrypted bytes string to decrypt
:return: The decrypted bytes string
"""
decrypted_data = b""
for i in range(0, len(data), 8):
block = data[i:i + 8]
block_length = len(block)
if block_length != 8:
raise ValueError("DES decryption must be a multiple of 8 "
"bytes")
decrypted_data += self._decode_block(block)
return decrypted_data
|
python
|
def decrypt(self, data):
"""
DES decrypts the data based on the key it was initialised with.
:param data: The encrypted bytes string to decrypt
:return: The decrypted bytes string
"""
decrypted_data = b""
for i in range(0, len(data), 8):
block = data[i:i + 8]
block_length = len(block)
if block_length != 8:
raise ValueError("DES decryption must be a multiple of 8 "
"bytes")
decrypted_data += self._decode_block(block)
return decrypted_data
|
[
"def",
"decrypt",
"(",
"self",
",",
"data",
")",
":",
"decrypted_data",
"=",
"b\"\"",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"data",
")",
",",
"8",
")",
":",
"block",
"=",
"data",
"[",
"i",
":",
"i",
"+",
"8",
"]",
"block_length",
"=",
"len",
"(",
"block",
")",
"if",
"block_length",
"!=",
"8",
":",
"raise",
"ValueError",
"(",
"\"DES decryption must be a multiple of 8 \"",
"\"bytes\"",
")",
"decrypted_data",
"+=",
"self",
".",
"_decode_block",
"(",
"block",
")",
"return",
"decrypted_data"
] |
DES decrypts the data based on the key it was initialised with.
:param data: The encrypted bytes string to decrypt
:return: The decrypted bytes string
|
[
"DES",
"decrypts",
"the",
"data",
"based",
"on",
"the",
"key",
"it",
"was",
"initialised",
"with",
"."
] |
2c7cd81516d9bfd42e8ff473a534d876b21ebb38
|
https://github.com/jborean93/ntlm-auth/blob/2c7cd81516d9bfd42e8ff473a534d876b21ebb38/ntlm_auth/des.py#L171-L188
|
train
|
jborean93/ntlm-auth
|
ntlm_auth/des.py
|
DES.key56_to_key64
|
def key56_to_key64(key):
"""
This takes in an a bytes string of 7 bytes and converts it to a bytes
string of 8 bytes with the odd parity bit being set to every 8 bits,
For example
b"\x01\x02\x03\x04\x05\x06\x07"
00000001 00000010 00000011 00000100 00000101 00000110 00000111
is converted to
b"\x01\x80\x80\x61\x40\x29\x19\x0E"
00000001 10000000 10000000 01100001 01000000 00101001 00011001 00001110
https://crypto.stackexchange.com/questions/15799/des-with-actual-7-byte-key
:param key: 7-byte string sized key
:return: 8-byte string with the parity bits sets from the 7-byte string
"""
if len(key) != 7:
raise ValueError("DES 7-byte key is not 7 bytes in length, "
"actual: %d" % len(key))
new_key = b""
for i in range(0, 8):
if i == 0:
new_value = struct.unpack("B", key[i:i+1])[0]
elif i == 7:
new_value = struct.unpack("B", key[6:7])[0]
new_value = (new_value << 1) & 0xFF
else:
new_value = struct.unpack("B", key[i - 1:i])[0]
next_value = struct.unpack("B", key[i:i + 1])[0]
new_value = ((new_value << (8 - i)) & 0xFF) | next_value >> i
# clear the last bit so the count isn't off
new_value = new_value & ~(1 << 0)
# set the last bit if the number of set bits are even
new_value = new_value | int(not DES.bit_count(new_value) & 0x1)
new_key += struct.pack("B", new_value)
return new_key
|
python
|
def key56_to_key64(key):
"""
This takes in an a bytes string of 7 bytes and converts it to a bytes
string of 8 bytes with the odd parity bit being set to every 8 bits,
For example
b"\x01\x02\x03\x04\x05\x06\x07"
00000001 00000010 00000011 00000100 00000101 00000110 00000111
is converted to
b"\x01\x80\x80\x61\x40\x29\x19\x0E"
00000001 10000000 10000000 01100001 01000000 00101001 00011001 00001110
https://crypto.stackexchange.com/questions/15799/des-with-actual-7-byte-key
:param key: 7-byte string sized key
:return: 8-byte string with the parity bits sets from the 7-byte string
"""
if len(key) != 7:
raise ValueError("DES 7-byte key is not 7 bytes in length, "
"actual: %d" % len(key))
new_key = b""
for i in range(0, 8):
if i == 0:
new_value = struct.unpack("B", key[i:i+1])[0]
elif i == 7:
new_value = struct.unpack("B", key[6:7])[0]
new_value = (new_value << 1) & 0xFF
else:
new_value = struct.unpack("B", key[i - 1:i])[0]
next_value = struct.unpack("B", key[i:i + 1])[0]
new_value = ((new_value << (8 - i)) & 0xFF) | next_value >> i
# clear the last bit so the count isn't off
new_value = new_value & ~(1 << 0)
# set the last bit if the number of set bits are even
new_value = new_value | int(not DES.bit_count(new_value) & 0x1)
new_key += struct.pack("B", new_value)
return new_key
|
[
"def",
"key56_to_key64",
"(",
"key",
")",
":",
"if",
"len",
"(",
"key",
")",
"!=",
"7",
":",
"raise",
"ValueError",
"(",
"\"DES 7-byte key is not 7 bytes in length, \"",
"\"actual: %d\"",
"%",
"len",
"(",
"key",
")",
")",
"new_key",
"=",
"b\"\"",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"8",
")",
":",
"if",
"i",
"==",
"0",
":",
"new_value",
"=",
"struct",
".",
"unpack",
"(",
"\"B\"",
",",
"key",
"[",
"i",
":",
"i",
"+",
"1",
"]",
")",
"[",
"0",
"]",
"elif",
"i",
"==",
"7",
":",
"new_value",
"=",
"struct",
".",
"unpack",
"(",
"\"B\"",
",",
"key",
"[",
"6",
":",
"7",
"]",
")",
"[",
"0",
"]",
"new_value",
"=",
"(",
"new_value",
"<<",
"1",
")",
"&",
"0xFF",
"else",
":",
"new_value",
"=",
"struct",
".",
"unpack",
"(",
"\"B\"",
",",
"key",
"[",
"i",
"-",
"1",
":",
"i",
"]",
")",
"[",
"0",
"]",
"next_value",
"=",
"struct",
".",
"unpack",
"(",
"\"B\"",
",",
"key",
"[",
"i",
":",
"i",
"+",
"1",
"]",
")",
"[",
"0",
"]",
"new_value",
"=",
"(",
"(",
"new_value",
"<<",
"(",
"8",
"-",
"i",
")",
")",
"&",
"0xFF",
")",
"|",
"next_value",
">>",
"i",
"# clear the last bit so the count isn't off",
"new_value",
"=",
"new_value",
"&",
"~",
"(",
"1",
"<<",
"0",
")",
"# set the last bit if the number of set bits are even",
"new_value",
"=",
"new_value",
"|",
"int",
"(",
"not",
"DES",
".",
"bit_count",
"(",
"new_value",
")",
"&",
"0x1",
")",
"new_key",
"+=",
"struct",
".",
"pack",
"(",
"\"B\"",
",",
"new_value",
")",
"return",
"new_key"
] |
This takes in an a bytes string of 7 bytes and converts it to a bytes
string of 8 bytes with the odd parity bit being set to every 8 bits,
For example
b"\x01\x02\x03\x04\x05\x06\x07"
00000001 00000010 00000011 00000100 00000101 00000110 00000111
is converted to
b"\x01\x80\x80\x61\x40\x29\x19\x0E"
00000001 10000000 10000000 01100001 01000000 00101001 00011001 00001110
https://crypto.stackexchange.com/questions/15799/des-with-actual-7-byte-key
:param key: 7-byte string sized key
:return: 8-byte string with the parity bits sets from the 7-byte string
|
[
"This",
"takes",
"in",
"an",
"a",
"bytes",
"string",
"of",
"7",
"bytes",
"and",
"converts",
"it",
"to",
"a",
"bytes",
"string",
"of",
"8",
"bytes",
"with",
"the",
"odd",
"parity",
"bit",
"being",
"set",
"to",
"every",
"8",
"bits"
] |
2c7cd81516d9bfd42e8ff473a534d876b21ebb38
|
https://github.com/jborean93/ntlm-auth/blob/2c7cd81516d9bfd42e8ff473a534d876b21ebb38/ntlm_auth/des.py#L191-L234
|
train
|
datawire/quark
|
quarkc/compiler.py
|
Check.visit_Method
|
def visit_Method(self, method):
"""
Ensure method has the same signature matching method on parent interface.
:param method: L{quarkc.ast.Method} instance.
"""
resolved_method = method.resolved.type
def get_params(method, extra_bindings):
# The Method should already be the resolved version.
result = []
for param in method.params:
resolved_param = texpr(param.resolved.type, param.resolved.bindings, extra_bindings)
result.append(resolved_param.id)
return result
def get_return_type(method, extra_bindings):
# The Method should already be the resolved version.
return texpr(method.type.resolved.type, method.type.resolved.bindings,
extra_bindings).id
def signature(method, return_type, params):
return "%s %s(%s)" % (return_type, method.name.text, ", ".join(params))
# Ensure the method has the same signature as matching methods on parent
# interfaces:
interfaces = list(t for t in method.clazz.bases if isinstance(t.resolved.type, Interface))
for interface in interfaces:
interfaceTypeExpr = interface.resolved
for definition in interfaceTypeExpr.type.definitions:
if definition.name.text == method.name.text:
resolved_definition = definition.resolved.type
method_params = get_params(resolved_method, method.clazz.resolved.bindings)
definition_params = get_params(resolved_definition, interfaceTypeExpr.bindings)
method_return = get_return_type(resolved_method, method.clazz.resolved.bindings)
definition_return = get_return_type(resolved_definition, interfaceTypeExpr.bindings)
if method_params != definition_params or method_return != definition_return:
self.errors.append(
"%s: method signature '%s' on %s does not match method '%s' on interface %s" % (
lineinfo(method), signature(resolved_method, method_return, method_params),
method.clazz.resolved.type.id,
signature(resolved_definition, definition_return, definition_params),
interface.resolved.type.id))
|
python
|
def visit_Method(self, method):
"""
Ensure method has the same signature matching method on parent interface.
:param method: L{quarkc.ast.Method} instance.
"""
resolved_method = method.resolved.type
def get_params(method, extra_bindings):
# The Method should already be the resolved version.
result = []
for param in method.params:
resolved_param = texpr(param.resolved.type, param.resolved.bindings, extra_bindings)
result.append(resolved_param.id)
return result
def get_return_type(method, extra_bindings):
# The Method should already be the resolved version.
return texpr(method.type.resolved.type, method.type.resolved.bindings,
extra_bindings).id
def signature(method, return_type, params):
return "%s %s(%s)" % (return_type, method.name.text, ", ".join(params))
# Ensure the method has the same signature as matching methods on parent
# interfaces:
interfaces = list(t for t in method.clazz.bases if isinstance(t.resolved.type, Interface))
for interface in interfaces:
interfaceTypeExpr = interface.resolved
for definition in interfaceTypeExpr.type.definitions:
if definition.name.text == method.name.text:
resolved_definition = definition.resolved.type
method_params = get_params(resolved_method, method.clazz.resolved.bindings)
definition_params = get_params(resolved_definition, interfaceTypeExpr.bindings)
method_return = get_return_type(resolved_method, method.clazz.resolved.bindings)
definition_return = get_return_type(resolved_definition, interfaceTypeExpr.bindings)
if method_params != definition_params or method_return != definition_return:
self.errors.append(
"%s: method signature '%s' on %s does not match method '%s' on interface %s" % (
lineinfo(method), signature(resolved_method, method_return, method_params),
method.clazz.resolved.type.id,
signature(resolved_definition, definition_return, definition_params),
interface.resolved.type.id))
|
[
"def",
"visit_Method",
"(",
"self",
",",
"method",
")",
":",
"resolved_method",
"=",
"method",
".",
"resolved",
".",
"type",
"def",
"get_params",
"(",
"method",
",",
"extra_bindings",
")",
":",
"# The Method should already be the resolved version.",
"result",
"=",
"[",
"]",
"for",
"param",
"in",
"method",
".",
"params",
":",
"resolved_param",
"=",
"texpr",
"(",
"param",
".",
"resolved",
".",
"type",
",",
"param",
".",
"resolved",
".",
"bindings",
",",
"extra_bindings",
")",
"result",
".",
"append",
"(",
"resolved_param",
".",
"id",
")",
"return",
"result",
"def",
"get_return_type",
"(",
"method",
",",
"extra_bindings",
")",
":",
"# The Method should already be the resolved version.",
"return",
"texpr",
"(",
"method",
".",
"type",
".",
"resolved",
".",
"type",
",",
"method",
".",
"type",
".",
"resolved",
".",
"bindings",
",",
"extra_bindings",
")",
".",
"id",
"def",
"signature",
"(",
"method",
",",
"return_type",
",",
"params",
")",
":",
"return",
"\"%s %s(%s)\"",
"%",
"(",
"return_type",
",",
"method",
".",
"name",
".",
"text",
",",
"\", \"",
".",
"join",
"(",
"params",
")",
")",
"# Ensure the method has the same signature as matching methods on parent",
"# interfaces:",
"interfaces",
"=",
"list",
"(",
"t",
"for",
"t",
"in",
"method",
".",
"clazz",
".",
"bases",
"if",
"isinstance",
"(",
"t",
".",
"resolved",
".",
"type",
",",
"Interface",
")",
")",
"for",
"interface",
"in",
"interfaces",
":",
"interfaceTypeExpr",
"=",
"interface",
".",
"resolved",
"for",
"definition",
"in",
"interfaceTypeExpr",
".",
"type",
".",
"definitions",
":",
"if",
"definition",
".",
"name",
".",
"text",
"==",
"method",
".",
"name",
".",
"text",
":",
"resolved_definition",
"=",
"definition",
".",
"resolved",
".",
"type",
"method_params",
"=",
"get_params",
"(",
"resolved_method",
",",
"method",
".",
"clazz",
".",
"resolved",
".",
"bindings",
")",
"definition_params",
"=",
"get_params",
"(",
"resolved_definition",
",",
"interfaceTypeExpr",
".",
"bindings",
")",
"method_return",
"=",
"get_return_type",
"(",
"resolved_method",
",",
"method",
".",
"clazz",
".",
"resolved",
".",
"bindings",
")",
"definition_return",
"=",
"get_return_type",
"(",
"resolved_definition",
",",
"interfaceTypeExpr",
".",
"bindings",
")",
"if",
"method_params",
"!=",
"definition_params",
"or",
"method_return",
"!=",
"definition_return",
":",
"self",
".",
"errors",
".",
"append",
"(",
"\"%s: method signature '%s' on %s does not match method '%s' on interface %s\"",
"%",
"(",
"lineinfo",
"(",
"method",
")",
",",
"signature",
"(",
"resolved_method",
",",
"method_return",
",",
"method_params",
")",
",",
"method",
".",
"clazz",
".",
"resolved",
".",
"type",
".",
"id",
",",
"signature",
"(",
"resolved_definition",
",",
"definition_return",
",",
"definition_params",
")",
",",
"interface",
".",
"resolved",
".",
"type",
".",
"id",
")",
")"
] |
Ensure method has the same signature matching method on parent interface.
:param method: L{quarkc.ast.Method} instance.
|
[
"Ensure",
"method",
"has",
"the",
"same",
"signature",
"matching",
"method",
"on",
"parent",
"interface",
"."
] |
df0058a148b077c0aff535eb6ee382605c556273
|
https://github.com/datawire/quark/blob/df0058a148b077c0aff535eb6ee382605c556273/quarkc/compiler.py#L743-L786
|
train
|
datawire/quark
|
quarkc/docmaker.py
|
get_doc
|
def get_doc(node):
"""
Return a node's documentation as a string, pulling from annotations
or constructing a simple fake as needed.
"""
res = " ".join(get_doc_annotations(node))
if not res:
res = "(%s)" % node.__class__.__name__.lower()
return res
|
python
|
def get_doc(node):
"""
Return a node's documentation as a string, pulling from annotations
or constructing a simple fake as needed.
"""
res = " ".join(get_doc_annotations(node))
if not res:
res = "(%s)" % node.__class__.__name__.lower()
return res
|
[
"def",
"get_doc",
"(",
"node",
")",
":",
"res",
"=",
"\" \"",
".",
"join",
"(",
"get_doc_annotations",
"(",
"node",
")",
")",
"if",
"not",
"res",
":",
"res",
"=",
"\"(%s)\"",
"%",
"node",
".",
"__class__",
".",
"__name__",
".",
"lower",
"(",
")",
"return",
"res"
] |
Return a node's documentation as a string, pulling from annotations
or constructing a simple fake as needed.
|
[
"Return",
"a",
"node",
"s",
"documentation",
"as",
"a",
"string",
"pulling",
"from",
"annotations",
"or",
"constructing",
"a",
"simple",
"fake",
"as",
"needed",
"."
] |
df0058a148b077c0aff535eb6ee382605c556273
|
https://github.com/datawire/quark/blob/df0058a148b077c0aff535eb6ee382605c556273/quarkc/docmaker.py#L75-L83
|
train
|
datawire/quark
|
quarkc/docmaker.py
|
get_code
|
def get_code(node, coder=Coder()):
"""
Return a node's code
"""
return cgi.escape(str(coder.code(node)), quote=True)
|
python
|
def get_code(node, coder=Coder()):
"""
Return a node's code
"""
return cgi.escape(str(coder.code(node)), quote=True)
|
[
"def",
"get_code",
"(",
"node",
",",
"coder",
"=",
"Coder",
"(",
")",
")",
":",
"return",
"cgi",
".",
"escape",
"(",
"str",
"(",
"coder",
".",
"code",
"(",
"node",
")",
")",
",",
"quote",
"=",
"True",
")"
] |
Return a node's code
|
[
"Return",
"a",
"node",
"s",
"code"
] |
df0058a148b077c0aff535eb6ee382605c556273
|
https://github.com/datawire/quark/blob/df0058a148b077c0aff535eb6ee382605c556273/quarkc/docmaker.py#L86-L90
|
train
|
datawire/quark
|
quarkc/lib/quark_ws4py_fixup.py
|
WebSocketWSGIHandler.setup_environ
|
def setup_environ(self):
"""
Setup the environ dictionary and add the
`'ws4py.socket'` key. Its associated value
is the real socket underlying socket.
"""
SimpleHandler.setup_environ(self)
self.environ['ws4py.socket'] = get_connection(self.environ['wsgi.input'])
self.http_version = self.environ['SERVER_PROTOCOL'].rsplit('/')[-1]
|
python
|
def setup_environ(self):
"""
Setup the environ dictionary and add the
`'ws4py.socket'` key. Its associated value
is the real socket underlying socket.
"""
SimpleHandler.setup_environ(self)
self.environ['ws4py.socket'] = get_connection(self.environ['wsgi.input'])
self.http_version = self.environ['SERVER_PROTOCOL'].rsplit('/')[-1]
|
[
"def",
"setup_environ",
"(",
"self",
")",
":",
"SimpleHandler",
".",
"setup_environ",
"(",
"self",
")",
"self",
".",
"environ",
"[",
"'ws4py.socket'",
"]",
"=",
"get_connection",
"(",
"self",
".",
"environ",
"[",
"'wsgi.input'",
"]",
")",
"self",
".",
"http_version",
"=",
"self",
".",
"environ",
"[",
"'SERVER_PROTOCOL'",
"]",
".",
"rsplit",
"(",
"'/'",
")",
"[",
"-",
"1",
"]"
] |
Setup the environ dictionary and add the
`'ws4py.socket'` key. Its associated value
is the real socket underlying socket.
|
[
"Setup",
"the",
"environ",
"dictionary",
"and",
"add",
"the",
"ws4py",
".",
"socket",
"key",
".",
"Its",
"associated",
"value",
"is",
"the",
"real",
"socket",
"underlying",
"socket",
"."
] |
df0058a148b077c0aff535eb6ee382605c556273
|
https://github.com/datawire/quark/blob/df0058a148b077c0aff535eb6ee382605c556273/quarkc/lib/quark_ws4py_fixup.py#L21-L29
|
train
|
datawire/quark
|
quarkc/lib/quark_ws4py_fixup.py
|
WebSocketWSGIRequestHandler.handle
|
def handle(self):
"""
Unfortunately the base class forces us
to override the whole method to actually provide our wsgi handler.
"""
self.raw_requestline = self.rfile.readline()
if not self.parse_request(): # An error code has been sent, just exit
return
# next line is where we'd have expect a configuration key somehow
handler = self.WebSocketWSGIHandler(
self.rfile, self.wfile, self.get_stderr(), self.get_environ()
)
handler.request_handler = self # backpointer for logging
handler.run(self.server.get_app())
|
python
|
def handle(self):
"""
Unfortunately the base class forces us
to override the whole method to actually provide our wsgi handler.
"""
self.raw_requestline = self.rfile.readline()
if not self.parse_request(): # An error code has been sent, just exit
return
# next line is where we'd have expect a configuration key somehow
handler = self.WebSocketWSGIHandler(
self.rfile, self.wfile, self.get_stderr(), self.get_environ()
)
handler.request_handler = self # backpointer for logging
handler.run(self.server.get_app())
|
[
"def",
"handle",
"(",
"self",
")",
":",
"self",
".",
"raw_requestline",
"=",
"self",
".",
"rfile",
".",
"readline",
"(",
")",
"if",
"not",
"self",
".",
"parse_request",
"(",
")",
":",
"# An error code has been sent, just exit",
"return",
"# next line is where we'd have expect a configuration key somehow",
"handler",
"=",
"self",
".",
"WebSocketWSGIHandler",
"(",
"self",
".",
"rfile",
",",
"self",
".",
"wfile",
",",
"self",
".",
"get_stderr",
"(",
")",
",",
"self",
".",
"get_environ",
"(",
")",
")",
"handler",
".",
"request_handler",
"=",
"self",
"# backpointer for logging",
"handler",
".",
"run",
"(",
"self",
".",
"server",
".",
"get_app",
"(",
")",
")"
] |
Unfortunately the base class forces us
to override the whole method to actually provide our wsgi handler.
|
[
"Unfortunately",
"the",
"base",
"class",
"forces",
"us",
"to",
"override",
"the",
"whole",
"method",
"to",
"actually",
"provide",
"our",
"wsgi",
"handler",
"."
] |
df0058a148b077c0aff535eb6ee382605c556273
|
https://github.com/datawire/quark/blob/df0058a148b077c0aff535eb6ee382605c556273/quarkc/lib/quark_ws4py_fixup.py#L62-L76
|
train
|
chrisb2/pi_ina219
|
ina219.py
|
INA219.configure
|
def configure(self, voltage_range=RANGE_32V, gain=GAIN_AUTO,
bus_adc=ADC_12BIT, shunt_adc=ADC_12BIT):
""" Configures and calibrates how the INA219 will take measurements.
Arguments:
voltage_range -- The full scale voltage range, this is either 16V
or 32V represented by one of the following constants;
RANGE_16V, RANGE_32V (default).
gain -- The gain which controls the maximum range of the shunt
voltage represented by one of the following constants;
GAIN_1_40MV, GAIN_2_80MV, GAIN_4_160MV,
GAIN_8_320MV, GAIN_AUTO (default).
bus_adc -- The bus ADC resolution (9, 10, 11, or 12-bit) or
set the number of samples used when averaging results
represent by one of the following constants; ADC_9BIT,
ADC_10BIT, ADC_11BIT, ADC_12BIT (default),
ADC_2SAMP, ADC_4SAMP, ADC_8SAMP, ADC_16SAMP,
ADC_32SAMP, ADC_64SAMP, ADC_128SAMP
shunt_adc -- The shunt ADC resolution (9, 10, 11, or 12-bit) or
set the number of samples used when averaging results
represent by one of the following constants; ADC_9BIT,
ADC_10BIT, ADC_11BIT, ADC_12BIT (default),
ADC_2SAMP, ADC_4SAMP, ADC_8SAMP, ADC_16SAMP,
ADC_32SAMP, ADC_64SAMP, ADC_128SAMP
"""
self.__validate_voltage_range(voltage_range)
self._voltage_range = voltage_range
if self._max_expected_amps is not None:
if gain == self.GAIN_AUTO:
self._auto_gain_enabled = True
self._gain = self._determine_gain(self._max_expected_amps)
else:
self._gain = gain
else:
if gain != self.GAIN_AUTO:
self._gain = gain
else:
self._auto_gain_enabled = True
self._gain = self.GAIN_1_40MV
logging.info('gain set to %.2fV' % self.__GAIN_VOLTS[self._gain])
logging.debug(
self.__LOG_MSG_1 %
(self._shunt_ohms, self.__BUS_RANGE[voltage_range],
self.__GAIN_VOLTS[self._gain],
self.__max_expected_amps_to_string(self._max_expected_amps),
bus_adc, shunt_adc))
self._calibrate(
self.__BUS_RANGE[voltage_range], self.__GAIN_VOLTS[self._gain],
self._max_expected_amps)
self._configure(voltage_range, self._gain, bus_adc, shunt_adc)
|
python
|
def configure(self, voltage_range=RANGE_32V, gain=GAIN_AUTO,
bus_adc=ADC_12BIT, shunt_adc=ADC_12BIT):
""" Configures and calibrates how the INA219 will take measurements.
Arguments:
voltage_range -- The full scale voltage range, this is either 16V
or 32V represented by one of the following constants;
RANGE_16V, RANGE_32V (default).
gain -- The gain which controls the maximum range of the shunt
voltage represented by one of the following constants;
GAIN_1_40MV, GAIN_2_80MV, GAIN_4_160MV,
GAIN_8_320MV, GAIN_AUTO (default).
bus_adc -- The bus ADC resolution (9, 10, 11, or 12-bit) or
set the number of samples used when averaging results
represent by one of the following constants; ADC_9BIT,
ADC_10BIT, ADC_11BIT, ADC_12BIT (default),
ADC_2SAMP, ADC_4SAMP, ADC_8SAMP, ADC_16SAMP,
ADC_32SAMP, ADC_64SAMP, ADC_128SAMP
shunt_adc -- The shunt ADC resolution (9, 10, 11, or 12-bit) or
set the number of samples used when averaging results
represent by one of the following constants; ADC_9BIT,
ADC_10BIT, ADC_11BIT, ADC_12BIT (default),
ADC_2SAMP, ADC_4SAMP, ADC_8SAMP, ADC_16SAMP,
ADC_32SAMP, ADC_64SAMP, ADC_128SAMP
"""
self.__validate_voltage_range(voltage_range)
self._voltage_range = voltage_range
if self._max_expected_amps is not None:
if gain == self.GAIN_AUTO:
self._auto_gain_enabled = True
self._gain = self._determine_gain(self._max_expected_amps)
else:
self._gain = gain
else:
if gain != self.GAIN_AUTO:
self._gain = gain
else:
self._auto_gain_enabled = True
self._gain = self.GAIN_1_40MV
logging.info('gain set to %.2fV' % self.__GAIN_VOLTS[self._gain])
logging.debug(
self.__LOG_MSG_1 %
(self._shunt_ohms, self.__BUS_RANGE[voltage_range],
self.__GAIN_VOLTS[self._gain],
self.__max_expected_amps_to_string(self._max_expected_amps),
bus_adc, shunt_adc))
self._calibrate(
self.__BUS_RANGE[voltage_range], self.__GAIN_VOLTS[self._gain],
self._max_expected_amps)
self._configure(voltage_range, self._gain, bus_adc, shunt_adc)
|
[
"def",
"configure",
"(",
"self",
",",
"voltage_range",
"=",
"RANGE_32V",
",",
"gain",
"=",
"GAIN_AUTO",
",",
"bus_adc",
"=",
"ADC_12BIT",
",",
"shunt_adc",
"=",
"ADC_12BIT",
")",
":",
"self",
".",
"__validate_voltage_range",
"(",
"voltage_range",
")",
"self",
".",
"_voltage_range",
"=",
"voltage_range",
"if",
"self",
".",
"_max_expected_amps",
"is",
"not",
"None",
":",
"if",
"gain",
"==",
"self",
".",
"GAIN_AUTO",
":",
"self",
".",
"_auto_gain_enabled",
"=",
"True",
"self",
".",
"_gain",
"=",
"self",
".",
"_determine_gain",
"(",
"self",
".",
"_max_expected_amps",
")",
"else",
":",
"self",
".",
"_gain",
"=",
"gain",
"else",
":",
"if",
"gain",
"!=",
"self",
".",
"GAIN_AUTO",
":",
"self",
".",
"_gain",
"=",
"gain",
"else",
":",
"self",
".",
"_auto_gain_enabled",
"=",
"True",
"self",
".",
"_gain",
"=",
"self",
".",
"GAIN_1_40MV",
"logging",
".",
"info",
"(",
"'gain set to %.2fV'",
"%",
"self",
".",
"__GAIN_VOLTS",
"[",
"self",
".",
"_gain",
"]",
")",
"logging",
".",
"debug",
"(",
"self",
".",
"__LOG_MSG_1",
"%",
"(",
"self",
".",
"_shunt_ohms",
",",
"self",
".",
"__BUS_RANGE",
"[",
"voltage_range",
"]",
",",
"self",
".",
"__GAIN_VOLTS",
"[",
"self",
".",
"_gain",
"]",
",",
"self",
".",
"__max_expected_amps_to_string",
"(",
"self",
".",
"_max_expected_amps",
")",
",",
"bus_adc",
",",
"shunt_adc",
")",
")",
"self",
".",
"_calibrate",
"(",
"self",
".",
"__BUS_RANGE",
"[",
"voltage_range",
"]",
",",
"self",
".",
"__GAIN_VOLTS",
"[",
"self",
".",
"_gain",
"]",
",",
"self",
".",
"_max_expected_amps",
")",
"self",
".",
"_configure",
"(",
"voltage_range",
",",
"self",
".",
"_gain",
",",
"bus_adc",
",",
"shunt_adc",
")"
] |
Configures and calibrates how the INA219 will take measurements.
Arguments:
voltage_range -- The full scale voltage range, this is either 16V
or 32V represented by one of the following constants;
RANGE_16V, RANGE_32V (default).
gain -- The gain which controls the maximum range of the shunt
voltage represented by one of the following constants;
GAIN_1_40MV, GAIN_2_80MV, GAIN_4_160MV,
GAIN_8_320MV, GAIN_AUTO (default).
bus_adc -- The bus ADC resolution (9, 10, 11, or 12-bit) or
set the number of samples used when averaging results
represent by one of the following constants; ADC_9BIT,
ADC_10BIT, ADC_11BIT, ADC_12BIT (default),
ADC_2SAMP, ADC_4SAMP, ADC_8SAMP, ADC_16SAMP,
ADC_32SAMP, ADC_64SAMP, ADC_128SAMP
shunt_adc -- The shunt ADC resolution (9, 10, 11, or 12-bit) or
set the number of samples used when averaging results
represent by one of the following constants; ADC_9BIT,
ADC_10BIT, ADC_11BIT, ADC_12BIT (default),
ADC_2SAMP, ADC_4SAMP, ADC_8SAMP, ADC_16SAMP,
ADC_32SAMP, ADC_64SAMP, ADC_128SAMP
|
[
"Configures",
"and",
"calibrates",
"how",
"the",
"INA219",
"will",
"take",
"measurements",
"."
] |
2caeb8a387286ac3504905a0d2d478370a691339
|
https://github.com/chrisb2/pi_ina219/blob/2caeb8a387286ac3504905a0d2d478370a691339/ina219.py#L113-L166
|
train
|
chrisb2/pi_ina219
|
ina219.py
|
INA219.wake
|
def wake(self):
""" Wake the INA219 from power down mode """
configuration = self._read_configuration()
self._configuration_register(configuration | 0x0007)
# 40us delay to recover from powerdown (p14 of spec)
time.sleep(0.00004)
|
python
|
def wake(self):
""" Wake the INA219 from power down mode """
configuration = self._read_configuration()
self._configuration_register(configuration | 0x0007)
# 40us delay to recover from powerdown (p14 of spec)
time.sleep(0.00004)
|
[
"def",
"wake",
"(",
"self",
")",
":",
"configuration",
"=",
"self",
".",
"_read_configuration",
"(",
")",
"self",
".",
"_configuration_register",
"(",
"configuration",
"|",
"0x0007",
")",
"# 40us delay to recover from powerdown (p14 of spec)",
"time",
".",
"sleep",
"(",
"0.00004",
")"
] |
Wake the INA219 from power down mode
|
[
"Wake",
"the",
"INA219",
"from",
"power",
"down",
"mode"
] |
2caeb8a387286ac3504905a0d2d478370a691339
|
https://github.com/chrisb2/pi_ina219/blob/2caeb8a387286ac3504905a0d2d478370a691339/ina219.py#L202-L207
|
train
|
blacktop/virustotal-api
|
virus_total_apis/api.py
|
_return_response_and_status_code
|
def _return_response_and_status_code(response, json_results=True):
""" Output the requests response content or content as json and status code
:rtype : dict
:param response: requests response object
:param json_results: Should return JSON or raw content
:return: dict containing the response content and/or the status code with error string.
"""
if response.status_code == requests.codes.ok:
return dict(results=response.json() if json_results else response.content, response_code=response.status_code)
elif response.status_code == 400:
return dict(
error='package sent is either malformed or not within the past 24 hours.',
response_code=response.status_code)
elif response.status_code == 204:
return dict(
error='You exceeded the public API request rate limit (4 requests of any nature per minute)',
response_code=response.status_code)
elif response.status_code == 403:
return dict(
error='You tried to perform calls to functions for which you require a Private API key.',
response_code=response.status_code)
elif response.status_code == 404:
return dict(error='File not found.', response_code=response.status_code)
else:
return dict(response_code=response.status_code)
|
python
|
def _return_response_and_status_code(response, json_results=True):
""" Output the requests response content or content as json and status code
:rtype : dict
:param response: requests response object
:param json_results: Should return JSON or raw content
:return: dict containing the response content and/or the status code with error string.
"""
if response.status_code == requests.codes.ok:
return dict(results=response.json() if json_results else response.content, response_code=response.status_code)
elif response.status_code == 400:
return dict(
error='package sent is either malformed or not within the past 24 hours.',
response_code=response.status_code)
elif response.status_code == 204:
return dict(
error='You exceeded the public API request rate limit (4 requests of any nature per minute)',
response_code=response.status_code)
elif response.status_code == 403:
return dict(
error='You tried to perform calls to functions for which you require a Private API key.',
response_code=response.status_code)
elif response.status_code == 404:
return dict(error='File not found.', response_code=response.status_code)
else:
return dict(response_code=response.status_code)
|
[
"def",
"_return_response_and_status_code",
"(",
"response",
",",
"json_results",
"=",
"True",
")",
":",
"if",
"response",
".",
"status_code",
"==",
"requests",
".",
"codes",
".",
"ok",
":",
"return",
"dict",
"(",
"results",
"=",
"response",
".",
"json",
"(",
")",
"if",
"json_results",
"else",
"response",
".",
"content",
",",
"response_code",
"=",
"response",
".",
"status_code",
")",
"elif",
"response",
".",
"status_code",
"==",
"400",
":",
"return",
"dict",
"(",
"error",
"=",
"'package sent is either malformed or not within the past 24 hours.'",
",",
"response_code",
"=",
"response",
".",
"status_code",
")",
"elif",
"response",
".",
"status_code",
"==",
"204",
":",
"return",
"dict",
"(",
"error",
"=",
"'You exceeded the public API request rate limit (4 requests of any nature per minute)'",
",",
"response_code",
"=",
"response",
".",
"status_code",
")",
"elif",
"response",
".",
"status_code",
"==",
"403",
":",
"return",
"dict",
"(",
"error",
"=",
"'You tried to perform calls to functions for which you require a Private API key.'",
",",
"response_code",
"=",
"response",
".",
"status_code",
")",
"elif",
"response",
".",
"status_code",
"==",
"404",
":",
"return",
"dict",
"(",
"error",
"=",
"'File not found.'",
",",
"response_code",
"=",
"response",
".",
"status_code",
")",
"else",
":",
"return",
"dict",
"(",
"response_code",
"=",
"response",
".",
"status_code",
")"
] |
Output the requests response content or content as json and status code
:rtype : dict
:param response: requests response object
:param json_results: Should return JSON or raw content
:return: dict containing the response content and/or the status code with error string.
|
[
"Output",
"the",
"requests",
"response",
"content",
"or",
"content",
"as",
"json",
"and",
"status",
"code"
] |
4e01e1c6d87255ec8370ac2a4ee16edce00e1e86
|
https://github.com/blacktop/virustotal-api/blob/4e01e1c6d87255ec8370ac2a4ee16edce00e1e86/virus_total_apis/api.py#L954-L979
|
train
|
blacktop/virustotal-api
|
virus_total_apis/api.py
|
PublicApi.put_comments
|
def put_comments(self, resource, comment, timeout=None):
""" Post a comment on a file or URL.
The initial idea of VirusTotal Community was that users should be able to make comments on files and URLs,
the comments may be malware analyses, false positive flags, disinfection instructions, etc.
Imagine you have some automatic setup that can produce interesting results related to a given sample or URL
that you submit to VirusTotal for antivirus characterization, you might want to give visibility to your setup
by automatically reviewing samples and URLs with the output of your automation.
:param resource: either a md5/sha1/sha256 hash of the file you want to review or the URL itself that you want
to comment on.
:param comment: the actual review, you can tag it using the "#" twitter-like syntax (e.g. #disinfection #zbot)
and reference users using the "@" syntax (e.g. @VirusTotalTeam).
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: If the comment was successfully posted the response code will be 1, 0 otherwise.
"""
params = {'apikey': self.api_key, 'resource': resource, 'comment': comment}
try:
response = requests.post(self.base + 'comments/put', params=params, proxies=self.proxies, timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
python
|
def put_comments(self, resource, comment, timeout=None):
""" Post a comment on a file or URL.
The initial idea of VirusTotal Community was that users should be able to make comments on files and URLs,
the comments may be malware analyses, false positive flags, disinfection instructions, etc.
Imagine you have some automatic setup that can produce interesting results related to a given sample or URL
that you submit to VirusTotal for antivirus characterization, you might want to give visibility to your setup
by automatically reviewing samples and URLs with the output of your automation.
:param resource: either a md5/sha1/sha256 hash of the file you want to review or the URL itself that you want
to comment on.
:param comment: the actual review, you can tag it using the "#" twitter-like syntax (e.g. #disinfection #zbot)
and reference users using the "@" syntax (e.g. @VirusTotalTeam).
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: If the comment was successfully posted the response code will be 1, 0 otherwise.
"""
params = {'apikey': self.api_key, 'resource': resource, 'comment': comment}
try:
response = requests.post(self.base + 'comments/put', params=params, proxies=self.proxies, timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
[
"def",
"put_comments",
"(",
"self",
",",
"resource",
",",
"comment",
",",
"timeout",
"=",
"None",
")",
":",
"params",
"=",
"{",
"'apikey'",
":",
"self",
".",
"api_key",
",",
"'resource'",
":",
"resource",
",",
"'comment'",
":",
"comment",
"}",
"try",
":",
"response",
"=",
"requests",
".",
"post",
"(",
"self",
".",
"base",
"+",
"'comments/put'",
",",
"params",
"=",
"params",
",",
"proxies",
"=",
"self",
".",
"proxies",
",",
"timeout",
"=",
"timeout",
")",
"except",
"requests",
".",
"RequestException",
"as",
"e",
":",
"return",
"dict",
"(",
"error",
"=",
"str",
"(",
"e",
")",
")",
"return",
"_return_response_and_status_code",
"(",
"response",
")"
] |
Post a comment on a file or URL.
The initial idea of VirusTotal Community was that users should be able to make comments on files and URLs,
the comments may be malware analyses, false positive flags, disinfection instructions, etc.
Imagine you have some automatic setup that can produce interesting results related to a given sample or URL
that you submit to VirusTotal for antivirus characterization, you might want to give visibility to your setup
by automatically reviewing samples and URLs with the output of your automation.
:param resource: either a md5/sha1/sha256 hash of the file you want to review or the URL itself that you want
to comment on.
:param comment: the actual review, you can tag it using the "#" twitter-like syntax (e.g. #disinfection #zbot)
and reference users using the "@" syntax (e.g. @VirusTotalTeam).
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: If the comment was successfully posted the response code will be 1, 0 otherwise.
|
[
"Post",
"a",
"comment",
"on",
"a",
"file",
"or",
"URL",
"."
] |
4e01e1c6d87255ec8370ac2a4ee16edce00e1e86
|
https://github.com/blacktop/virustotal-api/blob/4e01e1c6d87255ec8370ac2a4ee16edce00e1e86/virus_total_apis/api.py#L188-L213
|
train
|
blacktop/virustotal-api
|
virus_total_apis/api.py
|
PublicApi.get_ip_report
|
def get_ip_report(self, this_ip, timeout=None):
""" Get IP address reports.
:param this_ip: a valid IPv4 address in dotted quad notation, for the time being only IPv4 addresses are
supported.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON response
"""
params = {'apikey': self.api_key, 'ip': this_ip}
try:
response = requests.get(self.base + 'ip-address/report',
params=params,
proxies=self.proxies,
timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
python
|
def get_ip_report(self, this_ip, timeout=None):
""" Get IP address reports.
:param this_ip: a valid IPv4 address in dotted quad notation, for the time being only IPv4 addresses are
supported.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON response
"""
params = {'apikey': self.api_key, 'ip': this_ip}
try:
response = requests.get(self.base + 'ip-address/report',
params=params,
proxies=self.proxies,
timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
[
"def",
"get_ip_report",
"(",
"self",
",",
"this_ip",
",",
"timeout",
"=",
"None",
")",
":",
"params",
"=",
"{",
"'apikey'",
":",
"self",
".",
"api_key",
",",
"'ip'",
":",
"this_ip",
"}",
"try",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"self",
".",
"base",
"+",
"'ip-address/report'",
",",
"params",
"=",
"params",
",",
"proxies",
"=",
"self",
".",
"proxies",
",",
"timeout",
"=",
"timeout",
")",
"except",
"requests",
".",
"RequestException",
"as",
"e",
":",
"return",
"dict",
"(",
"error",
"=",
"str",
"(",
"e",
")",
")",
"return",
"_return_response_and_status_code",
"(",
"response",
")"
] |
Get IP address reports.
:param this_ip: a valid IPv4 address in dotted quad notation, for the time being only IPv4 addresses are
supported.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON response
|
[
"Get",
"IP",
"address",
"reports",
"."
] |
4e01e1c6d87255ec8370ac2a4ee16edce00e1e86
|
https://github.com/blacktop/virustotal-api/blob/4e01e1c6d87255ec8370ac2a4ee16edce00e1e86/virus_total_apis/api.py#L215-L234
|
train
|
blacktop/virustotal-api
|
virus_total_apis/api.py
|
PublicApi.get_domain_report
|
def get_domain_report(self, this_domain, timeout=None):
""" Get information about a given domain.
:param this_domain: a domain name.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON response
"""
params = {'apikey': self.api_key, 'domain': this_domain}
try:
response = requests.get(self.base + 'domain/report', params=params, proxies=self.proxies, timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
python
|
def get_domain_report(self, this_domain, timeout=None):
""" Get information about a given domain.
:param this_domain: a domain name.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON response
"""
params = {'apikey': self.api_key, 'domain': this_domain}
try:
response = requests.get(self.base + 'domain/report', params=params, proxies=self.proxies, timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
[
"def",
"get_domain_report",
"(",
"self",
",",
"this_domain",
",",
"timeout",
"=",
"None",
")",
":",
"params",
"=",
"{",
"'apikey'",
":",
"self",
".",
"api_key",
",",
"'domain'",
":",
"this_domain",
"}",
"try",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"self",
".",
"base",
"+",
"'domain/report'",
",",
"params",
"=",
"params",
",",
"proxies",
"=",
"self",
".",
"proxies",
",",
"timeout",
"=",
"timeout",
")",
"except",
"requests",
".",
"RequestException",
"as",
"e",
":",
"return",
"dict",
"(",
"error",
"=",
"str",
"(",
"e",
")",
")",
"return",
"_return_response_and_status_code",
"(",
"response",
")"
] |
Get information about a given domain.
:param this_domain: a domain name.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON response
|
[
"Get",
"information",
"about",
"a",
"given",
"domain",
"."
] |
4e01e1c6d87255ec8370ac2a4ee16edce00e1e86
|
https://github.com/blacktop/virustotal-api/blob/4e01e1c6d87255ec8370ac2a4ee16edce00e1e86/virus_total_apis/api.py#L236-L251
|
train
|
blacktop/virustotal-api
|
virus_total_apis/api.py
|
PrivateApi.get_upload_url
|
def get_upload_url(self, timeout=None):
""" Get a special URL for submitted files bigger than 32MB.
In order to submit files bigger than 32MB you need to obtain a special upload URL to which you
can POST files up to 200MB in size. This API generates such a URL.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON special upload URL to which you can POST files up to 200MB in size.
"""
params = {'apikey': self.api_key}
try:
response = requests.get(self.base + 'file/scan/upload_url',
params=params,
proxies=self.proxies,
timeout=timeout)
if response.status_code == requests.codes.ok:
return response.json().get('upload_url')
else:
return dict(response_code=response.status_code)
except requests.RequestException as e:
return dict(error=str(e))
|
python
|
def get_upload_url(self, timeout=None):
""" Get a special URL for submitted files bigger than 32MB.
In order to submit files bigger than 32MB you need to obtain a special upload URL to which you
can POST files up to 200MB in size. This API generates such a URL.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON special upload URL to which you can POST files up to 200MB in size.
"""
params = {'apikey': self.api_key}
try:
response = requests.get(self.base + 'file/scan/upload_url',
params=params,
proxies=self.proxies,
timeout=timeout)
if response.status_code == requests.codes.ok:
return response.json().get('upload_url')
else:
return dict(response_code=response.status_code)
except requests.RequestException as e:
return dict(error=str(e))
|
[
"def",
"get_upload_url",
"(",
"self",
",",
"timeout",
"=",
"None",
")",
":",
"params",
"=",
"{",
"'apikey'",
":",
"self",
".",
"api_key",
"}",
"try",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"self",
".",
"base",
"+",
"'file/scan/upload_url'",
",",
"params",
"=",
"params",
",",
"proxies",
"=",
"self",
".",
"proxies",
",",
"timeout",
"=",
"timeout",
")",
"if",
"response",
".",
"status_code",
"==",
"requests",
".",
"codes",
".",
"ok",
":",
"return",
"response",
".",
"json",
"(",
")",
".",
"get",
"(",
"'upload_url'",
")",
"else",
":",
"return",
"dict",
"(",
"response_code",
"=",
"response",
".",
"status_code",
")",
"except",
"requests",
".",
"RequestException",
"as",
"e",
":",
"return",
"dict",
"(",
"error",
"=",
"str",
"(",
"e",
")",
")"
] |
Get a special URL for submitted files bigger than 32MB.
In order to submit files bigger than 32MB you need to obtain a special upload URL to which you
can POST files up to 200MB in size. This API generates such a URL.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON special upload URL to which you can POST files up to 200MB in size.
|
[
"Get",
"a",
"special",
"URL",
"for",
"submitted",
"files",
"bigger",
"than",
"32MB",
"."
] |
4e01e1c6d87255ec8370ac2a4ee16edce00e1e86
|
https://github.com/blacktop/virustotal-api/blob/4e01e1c6d87255ec8370ac2a4ee16edce00e1e86/virus_total_apis/api.py#L301-L323
|
train
|
blacktop/virustotal-api
|
virus_total_apis/api.py
|
PrivateApi.file_search
|
def file_search(self, query, offset=None, timeout=None):
""" Search for samples.
In addition to retrieving all information on a particular file, VirusTotal allows you to perform what we
call "advanced reverse searches". Reverse searches take you from a file property to a list of files that
match that property. For example, this functionality enables you to retrieve all those files marked by at
least one antivirus vendor as Zbot, or all those files that have a size under 90KB and are detected by at
least 10 antivirus solutions, or all those PDF files that have an invalid XREF section, etc.
This API is equivalent to VirusTotal Intelligence advanced searches. A very wide variety of search modifiers
are available, including: file size, file type, first submission date to VirusTotal, last submission date to
VirusTotal, number of positives, dynamic behavioural properties, binary content, submission file name, and a
very long etcetera. The full list of search modifiers allowed for file search queries is documented at:
https://www.virustotal.com/intelligence/help/file-search/#search-modifiers
NOTE:
Daily limited! No matter what API step you have licensed, this API call is limited to 50K requests per day.
If you need any more, chances are you are approaching your engineering problem erroneously and you can
probably solve it using the file distribution call. Do not hesitate to contact us with your particular
use case.
EXAMPLE:
search_options = 'type:peexe size:90kb+ positives:5+ behaviour:"taskkill"'
:param query: A search modifier compliant file search query.
:param offset: (optional) The offset value returned by a previously issued identical query, allows you to
paginate over the results. If not specified the first 300 matching files sorted according to last submission
date to VirusTotal in a descending fashion will be returned.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON response - By default the list returned contains at most 300 hashes, ordered according to
last submission date to VirusTotal in a descending fashion.
"""
params = dict(apikey=self.api_key, query=query, offset=offset)
try:
response = requests.get(self.base + 'file/search', params=params, proxies=self.proxies, timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
python
|
def file_search(self, query, offset=None, timeout=None):
""" Search for samples.
In addition to retrieving all information on a particular file, VirusTotal allows you to perform what we
call "advanced reverse searches". Reverse searches take you from a file property to a list of files that
match that property. For example, this functionality enables you to retrieve all those files marked by at
least one antivirus vendor as Zbot, or all those files that have a size under 90KB and are detected by at
least 10 antivirus solutions, or all those PDF files that have an invalid XREF section, etc.
This API is equivalent to VirusTotal Intelligence advanced searches. A very wide variety of search modifiers
are available, including: file size, file type, first submission date to VirusTotal, last submission date to
VirusTotal, number of positives, dynamic behavioural properties, binary content, submission file name, and a
very long etcetera. The full list of search modifiers allowed for file search queries is documented at:
https://www.virustotal.com/intelligence/help/file-search/#search-modifiers
NOTE:
Daily limited! No matter what API step you have licensed, this API call is limited to 50K requests per day.
If you need any more, chances are you are approaching your engineering problem erroneously and you can
probably solve it using the file distribution call. Do not hesitate to contact us with your particular
use case.
EXAMPLE:
search_options = 'type:peexe size:90kb+ positives:5+ behaviour:"taskkill"'
:param query: A search modifier compliant file search query.
:param offset: (optional) The offset value returned by a previously issued identical query, allows you to
paginate over the results. If not specified the first 300 matching files sorted according to last submission
date to VirusTotal in a descending fashion will be returned.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON response - By default the list returned contains at most 300 hashes, ordered according to
last submission date to VirusTotal in a descending fashion.
"""
params = dict(apikey=self.api_key, query=query, offset=offset)
try:
response = requests.get(self.base + 'file/search', params=params, proxies=self.proxies, timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
[
"def",
"file_search",
"(",
"self",
",",
"query",
",",
"offset",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"params",
"=",
"dict",
"(",
"apikey",
"=",
"self",
".",
"api_key",
",",
"query",
"=",
"query",
",",
"offset",
"=",
"offset",
")",
"try",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"self",
".",
"base",
"+",
"'file/search'",
",",
"params",
"=",
"params",
",",
"proxies",
"=",
"self",
".",
"proxies",
",",
"timeout",
"=",
"timeout",
")",
"except",
"requests",
".",
"RequestException",
"as",
"e",
":",
"return",
"dict",
"(",
"error",
"=",
"str",
"(",
"e",
")",
")",
"return",
"_return_response_and_status_code",
"(",
"response",
")"
] |
Search for samples.
In addition to retrieving all information on a particular file, VirusTotal allows you to perform what we
call "advanced reverse searches". Reverse searches take you from a file property to a list of files that
match that property. For example, this functionality enables you to retrieve all those files marked by at
least one antivirus vendor as Zbot, or all those files that have a size under 90KB and are detected by at
least 10 antivirus solutions, or all those PDF files that have an invalid XREF section, etc.
This API is equivalent to VirusTotal Intelligence advanced searches. A very wide variety of search modifiers
are available, including: file size, file type, first submission date to VirusTotal, last submission date to
VirusTotal, number of positives, dynamic behavioural properties, binary content, submission file name, and a
very long etcetera. The full list of search modifiers allowed for file search queries is documented at:
https://www.virustotal.com/intelligence/help/file-search/#search-modifiers
NOTE:
Daily limited! No matter what API step you have licensed, this API call is limited to 50K requests per day.
If you need any more, chances are you are approaching your engineering problem erroneously and you can
probably solve it using the file distribution call. Do not hesitate to contact us with your particular
use case.
EXAMPLE:
search_options = 'type:peexe size:90kb+ positives:5+ behaviour:"taskkill"'
:param query: A search modifier compliant file search query.
:param offset: (optional) The offset value returned by a previously issued identical query, allows you to
paginate over the results. If not specified the first 300 matching files sorted according to last submission
date to VirusTotal in a descending fashion will be returned.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON response - By default the list returned contains at most 300 hashes, ordered according to
last submission date to VirusTotal in a descending fashion.
|
[
"Search",
"for",
"samples",
"."
] |
4e01e1c6d87255ec8370ac2a4ee16edce00e1e86
|
https://github.com/blacktop/virustotal-api/blob/4e01e1c6d87255ec8370ac2a4ee16edce00e1e86/virus_total_apis/api.py#L469-L509
|
train
|
blacktop/virustotal-api
|
virus_total_apis/api.py
|
PrivateApi.get_file_clusters
|
def get_file_clusters(self, this_date, timeout=None):
""" File similarity clusters for a given time frame.
VirusTotal has built its own in-house file similarity clustering functionality. At present, this clustering
works only on PE, PDF, DOC and RTF files and is based on a very simple structural feature hash. This hash
can very often be confused by certain compression and packing strategies, in other words, this clustering
logic is no holly grail, yet it has proven itself very useful in the past.
This API offers a programmatic access to the clustering section of VirusTotal Intelligence:
https://www.virustotal.com/intelligence/clustering/
NOTE:
Please note that you must be logged in with a valid VirusTotal Community user account with access to
VirusTotal Intelligence in order to be able to view the clustering listing.
:param this_date: A specific day for which we want to access the clustering details, example: 2013-09-10.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON object contains several properties
num_candidates - Total number of files submitted during the given time frame for which a feature hash could
be calculated.
num_clusters - Total number of clusters generated for the given time period under consideration, a cluster
can be as small as an individual file, meaning that no other feature-wise similar file was
found.
size_top200 - The sum of the number of files in the 200 largest clusters identified.
clusters - List of JSON objects that contain details about the 200 largest clusters identified. These
objects contain 4 properties: id, label, size and avg_positives.. The id field can be used
to then query the search API call for files contained in the given cluster. The label
property is a verbose human-intelligible name for the cluster. The size field is the number
of files that make up the cluster. Finally, avg_positives represents the average number of
antivirus detections that the files in the cluster exhibit.
"""
params = {'apikey': self.api_key, 'date': this_date}
try:
response = requests.get(self.base + 'file/clusters', params=params, proxies=self.proxies, timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
python
|
def get_file_clusters(self, this_date, timeout=None):
""" File similarity clusters for a given time frame.
VirusTotal has built its own in-house file similarity clustering functionality. At present, this clustering
works only on PE, PDF, DOC and RTF files and is based on a very simple structural feature hash. This hash
can very often be confused by certain compression and packing strategies, in other words, this clustering
logic is no holly grail, yet it has proven itself very useful in the past.
This API offers a programmatic access to the clustering section of VirusTotal Intelligence:
https://www.virustotal.com/intelligence/clustering/
NOTE:
Please note that you must be logged in with a valid VirusTotal Community user account with access to
VirusTotal Intelligence in order to be able to view the clustering listing.
:param this_date: A specific day for which we want to access the clustering details, example: 2013-09-10.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON object contains several properties
num_candidates - Total number of files submitted during the given time frame for which a feature hash could
be calculated.
num_clusters - Total number of clusters generated for the given time period under consideration, a cluster
can be as small as an individual file, meaning that no other feature-wise similar file was
found.
size_top200 - The sum of the number of files in the 200 largest clusters identified.
clusters - List of JSON objects that contain details about the 200 largest clusters identified. These
objects contain 4 properties: id, label, size and avg_positives.. The id field can be used
to then query the search API call for files contained in the given cluster. The label
property is a verbose human-intelligible name for the cluster. The size field is the number
of files that make up the cluster. Finally, avg_positives represents the average number of
antivirus detections that the files in the cluster exhibit.
"""
params = {'apikey': self.api_key, 'date': this_date}
try:
response = requests.get(self.base + 'file/clusters', params=params, proxies=self.proxies, timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
[
"def",
"get_file_clusters",
"(",
"self",
",",
"this_date",
",",
"timeout",
"=",
"None",
")",
":",
"params",
"=",
"{",
"'apikey'",
":",
"self",
".",
"api_key",
",",
"'date'",
":",
"this_date",
"}",
"try",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"self",
".",
"base",
"+",
"'file/clusters'",
",",
"params",
"=",
"params",
",",
"proxies",
"=",
"self",
".",
"proxies",
",",
"timeout",
"=",
"timeout",
")",
"except",
"requests",
".",
"RequestException",
"as",
"e",
":",
"return",
"dict",
"(",
"error",
"=",
"str",
"(",
"e",
")",
")",
"return",
"_return_response_and_status_code",
"(",
"response",
")"
] |
File similarity clusters for a given time frame.
VirusTotal has built its own in-house file similarity clustering functionality. At present, this clustering
works only on PE, PDF, DOC and RTF files and is based on a very simple structural feature hash. This hash
can very often be confused by certain compression and packing strategies, in other words, this clustering
logic is no holly grail, yet it has proven itself very useful in the past.
This API offers a programmatic access to the clustering section of VirusTotal Intelligence:
https://www.virustotal.com/intelligence/clustering/
NOTE:
Please note that you must be logged in with a valid VirusTotal Community user account with access to
VirusTotal Intelligence in order to be able to view the clustering listing.
:param this_date: A specific day for which we want to access the clustering details, example: 2013-09-10.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON object contains several properties
num_candidates - Total number of files submitted during the given time frame for which a feature hash could
be calculated.
num_clusters - Total number of clusters generated for the given time period under consideration, a cluster
can be as small as an individual file, meaning that no other feature-wise similar file was
found.
size_top200 - The sum of the number of files in the 200 largest clusters identified.
clusters - List of JSON objects that contain details about the 200 largest clusters identified. These
objects contain 4 properties: id, label, size and avg_positives.. The id field can be used
to then query the search API call for files contained in the given cluster. The label
property is a verbose human-intelligible name for the cluster. The size field is the number
of files that make up the cluster. Finally, avg_positives represents the average number of
antivirus detections that the files in the cluster exhibit.
|
[
"File",
"similarity",
"clusters",
"for",
"a",
"given",
"time",
"frame",
"."
] |
4e01e1c6d87255ec8370ac2a4ee16edce00e1e86
|
https://github.com/blacktop/virustotal-api/blob/4e01e1c6d87255ec8370ac2a4ee16edce00e1e86/virus_total_apis/api.py#L511-L550
|
train
|
blacktop/virustotal-api
|
virus_total_apis/api.py
|
PrivateApi.get_url_distribution
|
def get_url_distribution(self, after=None, reports='true', limit=1000, timeout=None):
""" Get a live feed with the lastest URLs submitted to VirusTotal.
Allows you to retrieve a live feed of URLs submitted to VirusTotal, along with their scan reports. This
call enables you to stay synced with VirusTotal URL submissions and replicate our dataset.
:param after: (optional) Retrieve URLs received after the given timestamp, in timestamp ascending order.
:param reports: (optional) When set to "true" each item retrieved will include the results for each particular
URL scan (in exactly the same format as the URL scan retrieving API). If the parameter is not specified, each
item returned will only contain the scanned URL and its detection ratio.
:param limit: (optional) Retrieve limit file items at most (default: 1000).
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON response
"""
params = {'apikey': self.api_key, 'after': after, 'reports': reports, 'limit': limit}
try:
response = requests.get(self.base + 'url/distribution',
params=params,
proxies=self.proxies,
timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
python
|
def get_url_distribution(self, after=None, reports='true', limit=1000, timeout=None):
""" Get a live feed with the lastest URLs submitted to VirusTotal.
Allows you to retrieve a live feed of URLs submitted to VirusTotal, along with their scan reports. This
call enables you to stay synced with VirusTotal URL submissions and replicate our dataset.
:param after: (optional) Retrieve URLs received after the given timestamp, in timestamp ascending order.
:param reports: (optional) When set to "true" each item retrieved will include the results for each particular
URL scan (in exactly the same format as the URL scan retrieving API). If the parameter is not specified, each
item returned will only contain the scanned URL and its detection ratio.
:param limit: (optional) Retrieve limit file items at most (default: 1000).
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON response
"""
params = {'apikey': self.api_key, 'after': after, 'reports': reports, 'limit': limit}
try:
response = requests.get(self.base + 'url/distribution',
params=params,
proxies=self.proxies,
timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
[
"def",
"get_url_distribution",
"(",
"self",
",",
"after",
"=",
"None",
",",
"reports",
"=",
"'true'",
",",
"limit",
"=",
"1000",
",",
"timeout",
"=",
"None",
")",
":",
"params",
"=",
"{",
"'apikey'",
":",
"self",
".",
"api_key",
",",
"'after'",
":",
"after",
",",
"'reports'",
":",
"reports",
",",
"'limit'",
":",
"limit",
"}",
"try",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"self",
".",
"base",
"+",
"'url/distribution'",
",",
"params",
"=",
"params",
",",
"proxies",
"=",
"self",
".",
"proxies",
",",
"timeout",
"=",
"timeout",
")",
"except",
"requests",
".",
"RequestException",
"as",
"e",
":",
"return",
"dict",
"(",
"error",
"=",
"str",
"(",
"e",
")",
")",
"return",
"_return_response_and_status_code",
"(",
"response",
")"
] |
Get a live feed with the lastest URLs submitted to VirusTotal.
Allows you to retrieve a live feed of URLs submitted to VirusTotal, along with their scan reports. This
call enables you to stay synced with VirusTotal URL submissions and replicate our dataset.
:param after: (optional) Retrieve URLs received after the given timestamp, in timestamp ascending order.
:param reports: (optional) When set to "true" each item retrieved will include the results for each particular
URL scan (in exactly the same format as the URL scan retrieving API). If the parameter is not specified, each
item returned will only contain the scanned URL and its detection ratio.
:param limit: (optional) Retrieve limit file items at most (default: 1000).
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: JSON response
|
[
"Get",
"a",
"live",
"feed",
"with",
"the",
"lastest",
"URLs",
"submitted",
"to",
"VirusTotal",
"."
] |
4e01e1c6d87255ec8370ac2a4ee16edce00e1e86
|
https://github.com/blacktop/virustotal-api/blob/4e01e1c6d87255ec8370ac2a4ee16edce00e1e86/virus_total_apis/api.py#L663-L689
|
train
|
blacktop/virustotal-api
|
virus_total_apis/api.py
|
PrivateApi.get_url_feed
|
def get_url_feed(self, package=None, timeout=None):
""" Get a live file feed with the latest files submitted to VirusTotal.
Allows you to retrieve a live feed of reports on absolutely all URLs scanned by VirusTotal. This API requires
you to stay relatively synced with the live submissions as only a backlog of 24 hours is provided at any given
point in time.
This API returns a bzip2 compressed tarball. For per-minute packages the compressed package contains a unique
file, the file contains a json per line, this json is a full report on a given URL processed by VirusTotal
during the given time window. The URL report follows the exact same format as the response of the URL report
API if the allinfo=1 parameter is provided. For hourly packages, the tarball contains 60 files, one per each
minute of the window.
:param package: Indicates a time window to pull reports on all items received during such window.
Only per-minute and hourly windows are allowed, the format is %Y%m%dT%H%M (e.g. 20160304T0900)
or %Y%m%dT%H (e.g. 20160304T09). Time is expressed in UTC.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: BZIP2 response: please see https://www.virustotal.com/en/documentation/private-api/#file-feed
"""
if package is None:
now = datetime.utcnow()
five_minutes_ago = now - timedelta(
minutes=now.minute % 5 + 5, seconds=now.second, microseconds=now.microsecond)
package = five_minutes_ago.strftime('%Y%m%dT%H%M')
params = {'apikey': self.api_key, 'package': package}
try:
response = requests.get(self.base + 'url/feed', params=params, proxies=self.proxies, timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response, json_results=False)
|
python
|
def get_url_feed(self, package=None, timeout=None):
""" Get a live file feed with the latest files submitted to VirusTotal.
Allows you to retrieve a live feed of reports on absolutely all URLs scanned by VirusTotal. This API requires
you to stay relatively synced with the live submissions as only a backlog of 24 hours is provided at any given
point in time.
This API returns a bzip2 compressed tarball. For per-minute packages the compressed package contains a unique
file, the file contains a json per line, this json is a full report on a given URL processed by VirusTotal
during the given time window. The URL report follows the exact same format as the response of the URL report
API if the allinfo=1 parameter is provided. For hourly packages, the tarball contains 60 files, one per each
minute of the window.
:param package: Indicates a time window to pull reports on all items received during such window.
Only per-minute and hourly windows are allowed, the format is %Y%m%dT%H%M (e.g. 20160304T0900)
or %Y%m%dT%H (e.g. 20160304T09). Time is expressed in UTC.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: BZIP2 response: please see https://www.virustotal.com/en/documentation/private-api/#file-feed
"""
if package is None:
now = datetime.utcnow()
five_minutes_ago = now - timedelta(
minutes=now.minute % 5 + 5, seconds=now.second, microseconds=now.microsecond)
package = five_minutes_ago.strftime('%Y%m%dT%H%M')
params = {'apikey': self.api_key, 'package': package}
try:
response = requests.get(self.base + 'url/feed', params=params, proxies=self.proxies, timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response, json_results=False)
|
[
"def",
"get_url_feed",
"(",
"self",
",",
"package",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"if",
"package",
"is",
"None",
":",
"now",
"=",
"datetime",
".",
"utcnow",
"(",
")",
"five_minutes_ago",
"=",
"now",
"-",
"timedelta",
"(",
"minutes",
"=",
"now",
".",
"minute",
"%",
"5",
"+",
"5",
",",
"seconds",
"=",
"now",
".",
"second",
",",
"microseconds",
"=",
"now",
".",
"microsecond",
")",
"package",
"=",
"five_minutes_ago",
".",
"strftime",
"(",
"'%Y%m%dT%H%M'",
")",
"params",
"=",
"{",
"'apikey'",
":",
"self",
".",
"api_key",
",",
"'package'",
":",
"package",
"}",
"try",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"self",
".",
"base",
"+",
"'url/feed'",
",",
"params",
"=",
"params",
",",
"proxies",
"=",
"self",
".",
"proxies",
",",
"timeout",
"=",
"timeout",
")",
"except",
"requests",
".",
"RequestException",
"as",
"e",
":",
"return",
"dict",
"(",
"error",
"=",
"str",
"(",
"e",
")",
")",
"return",
"_return_response_and_status_code",
"(",
"response",
",",
"json_results",
"=",
"False",
")"
] |
Get a live file feed with the latest files submitted to VirusTotal.
Allows you to retrieve a live feed of reports on absolutely all URLs scanned by VirusTotal. This API requires
you to stay relatively synced with the live submissions as only a backlog of 24 hours is provided at any given
point in time.
This API returns a bzip2 compressed tarball. For per-minute packages the compressed package contains a unique
file, the file contains a json per line, this json is a full report on a given URL processed by VirusTotal
during the given time window. The URL report follows the exact same format as the response of the URL report
API if the allinfo=1 parameter is provided. For hourly packages, the tarball contains 60 files, one per each
minute of the window.
:param package: Indicates a time window to pull reports on all items received during such window.
Only per-minute and hourly windows are allowed, the format is %Y%m%dT%H%M (e.g. 20160304T0900)
or %Y%m%dT%H (e.g. 20160304T09). Time is expressed in UTC.
:param timeout: The amount of time in seconds the request should wait before timing out.
:return: BZIP2 response: please see https://www.virustotal.com/en/documentation/private-api/#file-feed
|
[
"Get",
"a",
"live",
"file",
"feed",
"with",
"the",
"latest",
"files",
"submitted",
"to",
"VirusTotal",
"."
] |
4e01e1c6d87255ec8370ac2a4ee16edce00e1e86
|
https://github.com/blacktop/virustotal-api/blob/4e01e1c6d87255ec8370ac2a4ee16edce00e1e86/virus_total_apis/api.py#L691-L724
|
train
|
blacktop/virustotal-api
|
virus_total_apis/api.py
|
IntelApi.get_intel_notifications_feed
|
def get_intel_notifications_feed(self, page=None, timeout=None):
""" Get notification feed in JSON for further processing.
:param page: the next_page property of the results of a previously issued query to this API. This parameter
should not be provided if it is the very first query to the API, i.e. if we are retrieving the
first page of results.
:param timeout: The amount of time in seconds the request should wait before timing out.
:returns: The next page identifier, The results (JSON is possible with .json())
"""
params = {'apikey': self.api_key, 'next': page}
try:
response = requests.get(self.base + 'hunting/notifications-feed/',
params=params,
proxies=self.proxies,
timeout=timeout)
# VT returns an empty result, len(content)==0, and status OK if there are no pending notifications.
# To keep the API consistent we generate an empty object instead.
# This might not be necessary with a later release of the VTI API. (bug has been submitted)
if len(response.content) == 0:
response.__dict__['_content'] = \
b'{"notifications":[],"verbose_msg":"No pending notification","result":0,"next":null}'
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
python
|
def get_intel_notifications_feed(self, page=None, timeout=None):
""" Get notification feed in JSON for further processing.
:param page: the next_page property of the results of a previously issued query to this API. This parameter
should not be provided if it is the very first query to the API, i.e. if we are retrieving the
first page of results.
:param timeout: The amount of time in seconds the request should wait before timing out.
:returns: The next page identifier, The results (JSON is possible with .json())
"""
params = {'apikey': self.api_key, 'next': page}
try:
response = requests.get(self.base + 'hunting/notifications-feed/',
params=params,
proxies=self.proxies,
timeout=timeout)
# VT returns an empty result, len(content)==0, and status OK if there are no pending notifications.
# To keep the API consistent we generate an empty object instead.
# This might not be necessary with a later release of the VTI API. (bug has been submitted)
if len(response.content) == 0:
response.__dict__['_content'] = \
b'{"notifications":[],"verbose_msg":"No pending notification","result":0,"next":null}'
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
[
"def",
"get_intel_notifications_feed",
"(",
"self",
",",
"page",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"params",
"=",
"{",
"'apikey'",
":",
"self",
".",
"api_key",
",",
"'next'",
":",
"page",
"}",
"try",
":",
"response",
"=",
"requests",
".",
"get",
"(",
"self",
".",
"base",
"+",
"'hunting/notifications-feed/'",
",",
"params",
"=",
"params",
",",
"proxies",
"=",
"self",
".",
"proxies",
",",
"timeout",
"=",
"timeout",
")",
"# VT returns an empty result, len(content)==0, and status OK if there are no pending notifications.",
"# To keep the API consistent we generate an empty object instead.",
"# This might not be necessary with a later release of the VTI API. (bug has been submitted)",
"if",
"len",
"(",
"response",
".",
"content",
")",
"==",
"0",
":",
"response",
".",
"__dict__",
"[",
"'_content'",
"]",
"=",
"b'{\"notifications\":[],\"verbose_msg\":\"No pending notification\",\"result\":0,\"next\":null}'",
"except",
"requests",
".",
"RequestException",
"as",
"e",
":",
"return",
"dict",
"(",
"error",
"=",
"str",
"(",
"e",
")",
")",
"return",
"_return_response_and_status_code",
"(",
"response",
")"
] |
Get notification feed in JSON for further processing.
:param page: the next_page property of the results of a previously issued query to this API. This parameter
should not be provided if it is the very first query to the API, i.e. if we are retrieving the
first page of results.
:param timeout: The amount of time in seconds the request should wait before timing out.
:returns: The next page identifier, The results (JSON is possible with .json())
|
[
"Get",
"notification",
"feed",
"in",
"JSON",
"for",
"further",
"processing",
"."
] |
4e01e1c6d87255ec8370ac2a4ee16edce00e1e86
|
https://github.com/blacktop/virustotal-api/blob/4e01e1c6d87255ec8370ac2a4ee16edce00e1e86/virus_total_apis/api.py#L887-L911
|
train
|
blacktop/virustotal-api
|
virus_total_apis/api.py
|
IntelApi.delete_intel_notifications
|
def delete_intel_notifications(self, ids, timeout=None):
""" Programmatically delete notifications via the Intel API.
:param ids: A list of IDs to delete from the notification feed.
:returns: The post response.
"""
if not isinstance(ids, list):
raise TypeError("ids must be a list")
# VirusTotal needs ids as a stringified array
data = json.dumps(ids)
try:
response = requests.post(
self.base + 'hunting/delete-notifications/programmatic/?key=' + self.api_key,
data=data,
proxies=self.proxies,
timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
python
|
def delete_intel_notifications(self, ids, timeout=None):
""" Programmatically delete notifications via the Intel API.
:param ids: A list of IDs to delete from the notification feed.
:returns: The post response.
"""
if not isinstance(ids, list):
raise TypeError("ids must be a list")
# VirusTotal needs ids as a stringified array
data = json.dumps(ids)
try:
response = requests.post(
self.base + 'hunting/delete-notifications/programmatic/?key=' + self.api_key,
data=data,
proxies=self.proxies,
timeout=timeout)
except requests.RequestException as e:
return dict(error=str(e))
return _return_response_and_status_code(response)
|
[
"def",
"delete_intel_notifications",
"(",
"self",
",",
"ids",
",",
"timeout",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"ids",
",",
"list",
")",
":",
"raise",
"TypeError",
"(",
"\"ids must be a list\"",
")",
"# VirusTotal needs ids as a stringified array",
"data",
"=",
"json",
".",
"dumps",
"(",
"ids",
")",
"try",
":",
"response",
"=",
"requests",
".",
"post",
"(",
"self",
".",
"base",
"+",
"'hunting/delete-notifications/programmatic/?key='",
"+",
"self",
".",
"api_key",
",",
"data",
"=",
"data",
",",
"proxies",
"=",
"self",
".",
"proxies",
",",
"timeout",
"=",
"timeout",
")",
"except",
"requests",
".",
"RequestException",
"as",
"e",
":",
"return",
"dict",
"(",
"error",
"=",
"str",
"(",
"e",
")",
")",
"return",
"_return_response_and_status_code",
"(",
"response",
")"
] |
Programmatically delete notifications via the Intel API.
:param ids: A list of IDs to delete from the notification feed.
:returns: The post response.
|
[
"Programmatically",
"delete",
"notifications",
"via",
"the",
"Intel",
"API",
"."
] |
4e01e1c6d87255ec8370ac2a4ee16edce00e1e86
|
https://github.com/blacktop/virustotal-api/blob/4e01e1c6d87255ec8370ac2a4ee16edce00e1e86/virus_total_apis/api.py#L913-L934
|
train
|
localstack/localstack-python-client
|
localstack_client/session.py
|
Session.get_credentials
|
def get_credentials(self):
"""
Returns botocore.credential.Credential object.
"""
return Credentials(access_key=self.aws_access_key_id,
secret_key=self.aws_secret_access_key,
token=self.aws_session_token)
|
python
|
def get_credentials(self):
"""
Returns botocore.credential.Credential object.
"""
return Credentials(access_key=self.aws_access_key_id,
secret_key=self.aws_secret_access_key,
token=self.aws_session_token)
|
[
"def",
"get_credentials",
"(",
"self",
")",
":",
"return",
"Credentials",
"(",
"access_key",
"=",
"self",
".",
"aws_access_key_id",
",",
"secret_key",
"=",
"self",
".",
"aws_secret_access_key",
",",
"token",
"=",
"self",
".",
"aws_session_token",
")"
] |
Returns botocore.credential.Credential object.
|
[
"Returns",
"botocore",
".",
"credential",
".",
"Credential",
"object",
"."
] |
62ab3f3d5ce94105f8374963397dfbf05d4f0642
|
https://github.com/localstack/localstack-python-client/blob/62ab3f3d5ce94105f8374963397dfbf05d4f0642/localstack_client/session.py#L25-L31
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/hosts/views.py
|
HostCreate.form_valid
|
def form_valid(self, form):
"""First call the parent's form valid then let the user know it worked."""
form_valid_from_parent = super(HostCreate, self).form_valid(form)
messages.success(self.request, 'Host {} Successfully Created'.format(self.object))
return form_valid_from_parent
|
python
|
def form_valid(self, form):
"""First call the parent's form valid then let the user know it worked."""
form_valid_from_parent = super(HostCreate, self).form_valid(form)
messages.success(self.request, 'Host {} Successfully Created'.format(self.object))
return form_valid_from_parent
|
[
"def",
"form_valid",
"(",
"self",
",",
"form",
")",
":",
"form_valid_from_parent",
"=",
"super",
"(",
"HostCreate",
",",
"self",
")",
".",
"form_valid",
"(",
"form",
")",
"messages",
".",
"success",
"(",
"self",
".",
"request",
",",
"'Host {} Successfully Created'",
".",
"format",
"(",
"self",
".",
"object",
")",
")",
"return",
"form_valid_from_parent"
] |
First call the parent's form valid then let the user know it worked.
|
[
"First",
"call",
"the",
"parent",
"s",
"form",
"valid",
"then",
"let",
"the",
"user",
"know",
"it",
"worked",
"."
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/hosts/views.py#L30-L36
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/hosts/views.py
|
SSHKeys.post
|
def post(self, *args, **kwargs):
"""Create the SSH file & then return the normal get method..."""
existing_ssh = models.SSHConfig.objects.all()
if existing_ssh.exists():
return self.get_view()
remote_user = self.request.POST.get('remote_user', 'root')
create_ssh_config(remote_user=remote_user)
return self.get_view()
|
python
|
def post(self, *args, **kwargs):
"""Create the SSH file & then return the normal get method..."""
existing_ssh = models.SSHConfig.objects.all()
if existing_ssh.exists():
return self.get_view()
remote_user = self.request.POST.get('remote_user', 'root')
create_ssh_config(remote_user=remote_user)
return self.get_view()
|
[
"def",
"post",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"existing_ssh",
"=",
"models",
".",
"SSHConfig",
".",
"objects",
".",
"all",
"(",
")",
"if",
"existing_ssh",
".",
"exists",
"(",
")",
":",
"return",
"self",
".",
"get_view",
"(",
")",
"remote_user",
"=",
"self",
".",
"request",
".",
"POST",
".",
"get",
"(",
"'remote_user'",
",",
"'root'",
")",
"create_ssh_config",
"(",
"remote_user",
"=",
"remote_user",
")",
"return",
"self",
".",
"get_view",
"(",
")"
] |
Create the SSH file & then return the normal get method...
|
[
"Create",
"the",
"SSH",
"file",
"&",
"then",
"return",
"the",
"normal",
"get",
"method",
"..."
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/hosts/views.py#L79-L91
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/fabfile.py
|
update_sandbox_site
|
def update_sandbox_site(comment_text):
"""put's a text file on the server"""
file_to_deliver = NamedTemporaryFile(delete=False)
file_text = "Deployed at: {} <br /> Comment: {}".format(datetime.datetime.now().strftime('%c'), cgi.escape(comment_text))
file_to_deliver.write(file_text)
file_to_deliver.close()
put(file_to_deliver.name, '/var/www/html/index.html', use_sudo=True)
|
python
|
def update_sandbox_site(comment_text):
"""put's a text file on the server"""
file_to_deliver = NamedTemporaryFile(delete=False)
file_text = "Deployed at: {} <br /> Comment: {}".format(datetime.datetime.now().strftime('%c'), cgi.escape(comment_text))
file_to_deliver.write(file_text)
file_to_deliver.close()
put(file_to_deliver.name, '/var/www/html/index.html', use_sudo=True)
|
[
"def",
"update_sandbox_site",
"(",
"comment_text",
")",
":",
"file_to_deliver",
"=",
"NamedTemporaryFile",
"(",
"delete",
"=",
"False",
")",
"file_text",
"=",
"\"Deployed at: {} <br /> Comment: {}\"",
".",
"format",
"(",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
".",
"strftime",
"(",
"'%c'",
")",
",",
"cgi",
".",
"escape",
"(",
"comment_text",
")",
")",
"file_to_deliver",
".",
"write",
"(",
"file_text",
")",
"file_to_deliver",
".",
"close",
"(",
")",
"put",
"(",
"file_to_deliver",
".",
"name",
",",
"'/var/www/html/index.html'",
",",
"use_sudo",
"=",
"True",
")"
] |
put's a text file on the server
|
[
"put",
"s",
"a",
"text",
"file",
"on",
"the",
"server"
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/fabfile.py#L133-L143
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/projects/models.py
|
Project.web_hooks
|
def web_hooks(self, include_global=True):
"""Get all web hooks for this project. Includes global hooks."""
from fabric_bolt.web_hooks.models import Hook
ors = [Q(project=self)]
if include_global:
ors.append(Q(project=None))
hooks = Hook.objects.filter(reduce(operator.or_, ors))
return hooks
|
python
|
def web_hooks(self, include_global=True):
"""Get all web hooks for this project. Includes global hooks."""
from fabric_bolt.web_hooks.models import Hook
ors = [Q(project=self)]
if include_global:
ors.append(Q(project=None))
hooks = Hook.objects.filter(reduce(operator.or_, ors))
return hooks
|
[
"def",
"web_hooks",
"(",
"self",
",",
"include_global",
"=",
"True",
")",
":",
"from",
"fabric_bolt",
".",
"web_hooks",
".",
"models",
"import",
"Hook",
"ors",
"=",
"[",
"Q",
"(",
"project",
"=",
"self",
")",
"]",
"if",
"include_global",
":",
"ors",
".",
"append",
"(",
"Q",
"(",
"project",
"=",
"None",
")",
")",
"hooks",
"=",
"Hook",
".",
"objects",
".",
"filter",
"(",
"reduce",
"(",
"operator",
".",
"or_",
",",
"ors",
")",
")",
"return",
"hooks"
] |
Get all web hooks for this project. Includes global hooks.
|
[
"Get",
"all",
"web",
"hooks",
"for",
"this",
"project",
".",
"Includes",
"global",
"hooks",
"."
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/projects/models.py#L54-L64
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/projects/models.py
|
Project.get_deployment_count
|
def get_deployment_count(self):
"""Utility function to get the number of deployments a given project has"""
ret = self.stage_set.annotate(num_deployments=Count('deployment')).aggregate(total_deployments=Sum('num_deployments'))
return ret['total_deployments']
|
python
|
def get_deployment_count(self):
"""Utility function to get the number of deployments a given project has"""
ret = self.stage_set.annotate(num_deployments=Count('deployment')).aggregate(total_deployments=Sum('num_deployments'))
return ret['total_deployments']
|
[
"def",
"get_deployment_count",
"(",
"self",
")",
":",
"ret",
"=",
"self",
".",
"stage_set",
".",
"annotate",
"(",
"num_deployments",
"=",
"Count",
"(",
"'deployment'",
")",
")",
".",
"aggregate",
"(",
"total_deployments",
"=",
"Sum",
"(",
"'num_deployments'",
")",
")",
"return",
"ret",
"[",
"'total_deployments'",
"]"
] |
Utility function to get the number of deployments a given project has
|
[
"Utility",
"function",
"to",
"get",
"the",
"number",
"of",
"deployments",
"a",
"given",
"project",
"has"
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/projects/models.py#L66-L70
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/projects/models.py
|
Stage.get_configurations
|
def get_configurations(self):
"""
Generates a dictionary that's made up of the configurations on the project.
Any configurations on a project that are duplicated on a stage, the stage configuration will take precedence.
"""
project_configurations_dictionary = {}
project_configurations = self.project.project_configurations()
# Create project specific configurations dictionary
for config in project_configurations:
project_configurations_dictionary[config.key] = config
stage_configurations_dictionary = {}
stage_configurations = self.stage_configurations()
# Create stage specific configurations dictionary
for s in stage_configurations:
stage_configurations_dictionary[s.key] = s
# override project specific configuration with the ones in the stage if they are there
project_configurations_dictionary.update(stage_configurations_dictionary)
# Return the updated configurations
return project_configurations_dictionary
|
python
|
def get_configurations(self):
"""
Generates a dictionary that's made up of the configurations on the project.
Any configurations on a project that are duplicated on a stage, the stage configuration will take precedence.
"""
project_configurations_dictionary = {}
project_configurations = self.project.project_configurations()
# Create project specific configurations dictionary
for config in project_configurations:
project_configurations_dictionary[config.key] = config
stage_configurations_dictionary = {}
stage_configurations = self.stage_configurations()
# Create stage specific configurations dictionary
for s in stage_configurations:
stage_configurations_dictionary[s.key] = s
# override project specific configuration with the ones in the stage if they are there
project_configurations_dictionary.update(stage_configurations_dictionary)
# Return the updated configurations
return project_configurations_dictionary
|
[
"def",
"get_configurations",
"(",
"self",
")",
":",
"project_configurations_dictionary",
"=",
"{",
"}",
"project_configurations",
"=",
"self",
".",
"project",
".",
"project_configurations",
"(",
")",
"# Create project specific configurations dictionary",
"for",
"config",
"in",
"project_configurations",
":",
"project_configurations_dictionary",
"[",
"config",
".",
"key",
"]",
"=",
"config",
"stage_configurations_dictionary",
"=",
"{",
"}",
"stage_configurations",
"=",
"self",
".",
"stage_configurations",
"(",
")",
"# Create stage specific configurations dictionary",
"for",
"s",
"in",
"stage_configurations",
":",
"stage_configurations_dictionary",
"[",
"s",
".",
"key",
"]",
"=",
"s",
"# override project specific configuration with the ones in the stage if they are there",
"project_configurations_dictionary",
".",
"update",
"(",
"stage_configurations_dictionary",
")",
"# Return the updated configurations",
"return",
"project_configurations_dictionary"
] |
Generates a dictionary that's made up of the configurations on the project.
Any configurations on a project that are duplicated on a stage, the stage configuration will take precedence.
|
[
"Generates",
"a",
"dictionary",
"that",
"s",
"made",
"up",
"of",
"the",
"configurations",
"on",
"the",
"project",
".",
"Any",
"configurations",
"on",
"a",
"project",
"that",
"are",
"duplicated",
"on",
"a",
"stage",
"the",
"stage",
"configuration",
"will",
"take",
"precedence",
"."
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/projects/models.py#L133-L157
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/projects/models.py
|
Configuration.get_absolute_url
|
def get_absolute_url(self):
"""Determine where I am coming from and where I am going"""
# Determine if this configuration is on a stage
if self.stage:
# Stage specific configurations go back to the stage view
url = reverse('projects_stage_view', args=(self.project.pk, self.stage.pk))
else:
# Project specific configurations go back to the project page
url = self.project.get_absolute_url()
return url
|
python
|
def get_absolute_url(self):
"""Determine where I am coming from and where I am going"""
# Determine if this configuration is on a stage
if self.stage:
# Stage specific configurations go back to the stage view
url = reverse('projects_stage_view', args=(self.project.pk, self.stage.pk))
else:
# Project specific configurations go back to the project page
url = self.project.get_absolute_url()
return url
|
[
"def",
"get_absolute_url",
"(",
"self",
")",
":",
"# Determine if this configuration is on a stage",
"if",
"self",
".",
"stage",
":",
"# Stage specific configurations go back to the stage view",
"url",
"=",
"reverse",
"(",
"'projects_stage_view'",
",",
"args",
"=",
"(",
"self",
".",
"project",
".",
"pk",
",",
"self",
".",
"stage",
".",
"pk",
")",
")",
"else",
":",
"# Project specific configurations go back to the project page",
"url",
"=",
"self",
".",
"project",
".",
"get_absolute_url",
"(",
")",
"return",
"url"
] |
Determine where I am coming from and where I am going
|
[
"Determine",
"where",
"I",
"am",
"coming",
"from",
"and",
"where",
"I",
"am",
"going"
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/projects/models.py#L215-L226
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/accounts/models.py
|
DeployUser.gravatar
|
def gravatar(self, size=20):
"""
Construct a gravatar image address for the user
"""
default = "mm"
gravatar_url = "//www.gravatar.com/avatar/" + hashlib.md5(self.email.lower()).hexdigest() + "?"
gravatar_url += urllib.urlencode({'d': default, 's': str(size)})
return gravatar_url
|
python
|
def gravatar(self, size=20):
"""
Construct a gravatar image address for the user
"""
default = "mm"
gravatar_url = "//www.gravatar.com/avatar/" + hashlib.md5(self.email.lower()).hexdigest() + "?"
gravatar_url += urllib.urlencode({'d': default, 's': str(size)})
return gravatar_url
|
[
"def",
"gravatar",
"(",
"self",
",",
"size",
"=",
"20",
")",
":",
"default",
"=",
"\"mm\"",
"gravatar_url",
"=",
"\"//www.gravatar.com/avatar/\"",
"+",
"hashlib",
".",
"md5",
"(",
"self",
".",
"email",
".",
"lower",
"(",
")",
")",
".",
"hexdigest",
"(",
")",
"+",
"\"?\"",
"gravatar_url",
"+=",
"urllib",
".",
"urlencode",
"(",
"{",
"'d'",
":",
"default",
",",
"'s'",
":",
"str",
"(",
"size",
")",
"}",
")",
"return",
"gravatar_url"
] |
Construct a gravatar image address for the user
|
[
"Construct",
"a",
"gravatar",
"image",
"address",
"for",
"the",
"user"
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/accounts/models.py#L65-L74
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/web_hooks/managers.py
|
HookManager.hooks
|
def hooks(self, project):
""" Look up the urls we need to post to"""
return self.get_queryset().filter(
Q(project=None) |
Q(project=project)
).distinct('url')
|
python
|
def hooks(self, project):
""" Look up the urls we need to post to"""
return self.get_queryset().filter(
Q(project=None) |
Q(project=project)
).distinct('url')
|
[
"def",
"hooks",
"(",
"self",
",",
"project",
")",
":",
"return",
"self",
".",
"get_queryset",
"(",
")",
".",
"filter",
"(",
"Q",
"(",
"project",
"=",
"None",
")",
"|",
"Q",
"(",
"project",
"=",
"project",
")",
")",
".",
"distinct",
"(",
"'url'",
")"
] |
Look up the urls we need to post to
|
[
"Look",
"up",
"the",
"urls",
"we",
"need",
"to",
"post",
"to"
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/web_hooks/managers.py#L7-L13
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/web_hooks/receivers.py
|
web_hook_receiver
|
def web_hook_receiver(sender, **kwargs):
"""Generic receiver for the web hook firing piece."""
deployment = Deployment.objects.get(pk=kwargs.get('deployment_id'))
hooks = deployment.web_hooks
if not hooks:
return
for hook in hooks:
data = payload_generator(deployment)
deliver_hook(deployment, hook.url, data)
|
python
|
def web_hook_receiver(sender, **kwargs):
"""Generic receiver for the web hook firing piece."""
deployment = Deployment.objects.get(pk=kwargs.get('deployment_id'))
hooks = deployment.web_hooks
if not hooks:
return
for hook in hooks:
data = payload_generator(deployment)
deliver_hook(deployment, hook.url, data)
|
[
"def",
"web_hook_receiver",
"(",
"sender",
",",
"*",
"*",
"kwargs",
")",
":",
"deployment",
"=",
"Deployment",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"kwargs",
".",
"get",
"(",
"'deployment_id'",
")",
")",
"hooks",
"=",
"deployment",
".",
"web_hooks",
"if",
"not",
"hooks",
":",
"return",
"for",
"hook",
"in",
"hooks",
":",
"data",
"=",
"payload_generator",
"(",
"deployment",
")",
"deliver_hook",
"(",
"deployment",
",",
"hook",
".",
"url",
",",
"data",
")"
] |
Generic receiver for the web hook firing piece.
|
[
"Generic",
"receiver",
"for",
"the",
"web",
"hook",
"firing",
"piece",
"."
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/web_hooks/receivers.py#L9-L23
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/core/mixins/tables.py
|
PaginateTable.paginate
|
def paginate(self, klass=Paginator, per_page=None, page=1, *args, **kwargs):
"""
Paginates the table using a paginator and creates a ``page`` property
containing information for the current page.
:type klass: Paginator class
:param klass: a paginator class to paginate the results
:type per_page: `int`
:param per_page: how many records are displayed on each page
:type page: `int`
:param page: which page should be displayed.
Extra arguments are passed to the paginator.
Pagination exceptions (`~django.core.paginator.EmptyPage` and
`~django.core.paginator.PageNotAnInteger`) may be raised from this
method and should be handled by the caller.
"""
self.per_page_options = [25, 50, 100, 200] # This should probably be a passed in option
self.per_page = per_page = per_page or self._meta.per_page
self.paginator = klass(self.rows, per_page, *args, **kwargs)
self.page = self.paginator.page(page)
# Calc variables for use in displaying first, adjacent, and last page links
adjacent_pages = 1 # This should probably be a passed in option
# Starting page (first page between the ellipsis)
start_page = max(self.page.number - adjacent_pages, 1)
if start_page <= 3:
start_page = 1
# Ending page (last page between the ellipsis)
end_page = self.page.number + adjacent_pages + 1
if end_page >= self.paginator.num_pages - 1:
end_page = self.paginator.num_pages + 1
# Paging vars used in template
self.page_numbers = [n for n in range(start_page, end_page) if 0 < n <= self.paginator.num_pages]
self.show_first = 1 not in self.page_numbers
self.show_last = self.paginator.num_pages not in self.page_numbers
|
python
|
def paginate(self, klass=Paginator, per_page=None, page=1, *args, **kwargs):
"""
Paginates the table using a paginator and creates a ``page`` property
containing information for the current page.
:type klass: Paginator class
:param klass: a paginator class to paginate the results
:type per_page: `int`
:param per_page: how many records are displayed on each page
:type page: `int`
:param page: which page should be displayed.
Extra arguments are passed to the paginator.
Pagination exceptions (`~django.core.paginator.EmptyPage` and
`~django.core.paginator.PageNotAnInteger`) may be raised from this
method and should be handled by the caller.
"""
self.per_page_options = [25, 50, 100, 200] # This should probably be a passed in option
self.per_page = per_page = per_page or self._meta.per_page
self.paginator = klass(self.rows, per_page, *args, **kwargs)
self.page = self.paginator.page(page)
# Calc variables for use in displaying first, adjacent, and last page links
adjacent_pages = 1 # This should probably be a passed in option
# Starting page (first page between the ellipsis)
start_page = max(self.page.number - adjacent_pages, 1)
if start_page <= 3:
start_page = 1
# Ending page (last page between the ellipsis)
end_page = self.page.number + adjacent_pages + 1
if end_page >= self.paginator.num_pages - 1:
end_page = self.paginator.num_pages + 1
# Paging vars used in template
self.page_numbers = [n for n in range(start_page, end_page) if 0 < n <= self.paginator.num_pages]
self.show_first = 1 not in self.page_numbers
self.show_last = self.paginator.num_pages not in self.page_numbers
|
[
"def",
"paginate",
"(",
"self",
",",
"klass",
"=",
"Paginator",
",",
"per_page",
"=",
"None",
",",
"page",
"=",
"1",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"per_page_options",
"=",
"[",
"25",
",",
"50",
",",
"100",
",",
"200",
"]",
"# This should probably be a passed in option",
"self",
".",
"per_page",
"=",
"per_page",
"=",
"per_page",
"or",
"self",
".",
"_meta",
".",
"per_page",
"self",
".",
"paginator",
"=",
"klass",
"(",
"self",
".",
"rows",
",",
"per_page",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"page",
"=",
"self",
".",
"paginator",
".",
"page",
"(",
"page",
")",
"# Calc variables for use in displaying first, adjacent, and last page links",
"adjacent_pages",
"=",
"1",
"# This should probably be a passed in option",
"# Starting page (first page between the ellipsis)",
"start_page",
"=",
"max",
"(",
"self",
".",
"page",
".",
"number",
"-",
"adjacent_pages",
",",
"1",
")",
"if",
"start_page",
"<=",
"3",
":",
"start_page",
"=",
"1",
"# Ending page (last page between the ellipsis)",
"end_page",
"=",
"self",
".",
"page",
".",
"number",
"+",
"adjacent_pages",
"+",
"1",
"if",
"end_page",
">=",
"self",
".",
"paginator",
".",
"num_pages",
"-",
"1",
":",
"end_page",
"=",
"self",
".",
"paginator",
".",
"num_pages",
"+",
"1",
"# Paging vars used in template",
"self",
".",
"page_numbers",
"=",
"[",
"n",
"for",
"n",
"in",
"range",
"(",
"start_page",
",",
"end_page",
")",
"if",
"0",
"<",
"n",
"<=",
"self",
".",
"paginator",
".",
"num_pages",
"]",
"self",
".",
"show_first",
"=",
"1",
"not",
"in",
"self",
".",
"page_numbers",
"self",
".",
"show_last",
"=",
"self",
".",
"paginator",
".",
"num_pages",
"not",
"in",
"self",
".",
"page_numbers"
] |
Paginates the table using a paginator and creates a ``page`` property
containing information for the current page.
:type klass: Paginator class
:param klass: a paginator class to paginate the results
:type per_page: `int`
:param per_page: how many records are displayed on each page
:type page: `int`
:param page: which page should be displayed.
Extra arguments are passed to the paginator.
Pagination exceptions (`~django.core.paginator.EmptyPage` and
`~django.core.paginator.PageNotAnInteger`) may be raised from this
method and should be handled by the caller.
|
[
"Paginates",
"the",
"table",
"using",
"a",
"paginator",
"and",
"creates",
"a",
"page",
"property",
"containing",
"information",
"for",
"the",
"current",
"page",
"."
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/core/mixins/tables.py#L79-L121
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/task_runners/base.py
|
BaseTaskRunnerBackend.get_fabric_tasks
|
def get_fabric_tasks(self, project):
"""
Generate a list of fabric tasks that are available
"""
cache_key = 'project_{}_fabfile_tasks'.format(project.pk)
cached_result = cache.get(cache_key)
if cached_result:
return cached_result
try:
fabfile_path, activate_loc = self.get_fabfile_path(project)
if activate_loc:
output = self.check_output(
'source {};fab --list --list-format=short --fabfile={}'.format(activate_loc, fabfile_path),
shell=True
)
else:
output = self.check_output(
'fab --list --list-format=short --fabfile={}'.format(fabfile_path),
shell=True
)
lines = output.splitlines()
tasks = []
for line in lines:
name = line.strip()
if activate_loc:
o = self.check_output(
'source {};fab --display={} --fabfile={}'.format(activate_loc, name, fabfile_path),
shell=True
)
else:
o = self.check_output(
['fab', '--display={}'.format(name), '--fabfile={}'.format(fabfile_path)]
)
tasks.append(self.parse_task_details(name, o))
cache.set(cache_key, tasks, settings.FABRIC_TASK_CACHE_TIMEOUT)
except Exception as e:
tasks = []
return tasks
|
python
|
def get_fabric_tasks(self, project):
"""
Generate a list of fabric tasks that are available
"""
cache_key = 'project_{}_fabfile_tasks'.format(project.pk)
cached_result = cache.get(cache_key)
if cached_result:
return cached_result
try:
fabfile_path, activate_loc = self.get_fabfile_path(project)
if activate_loc:
output = self.check_output(
'source {};fab --list --list-format=short --fabfile={}'.format(activate_loc, fabfile_path),
shell=True
)
else:
output = self.check_output(
'fab --list --list-format=short --fabfile={}'.format(fabfile_path),
shell=True
)
lines = output.splitlines()
tasks = []
for line in lines:
name = line.strip()
if activate_loc:
o = self.check_output(
'source {};fab --display={} --fabfile={}'.format(activate_loc, name, fabfile_path),
shell=True
)
else:
o = self.check_output(
['fab', '--display={}'.format(name), '--fabfile={}'.format(fabfile_path)]
)
tasks.append(self.parse_task_details(name, o))
cache.set(cache_key, tasks, settings.FABRIC_TASK_CACHE_TIMEOUT)
except Exception as e:
tasks = []
return tasks
|
[
"def",
"get_fabric_tasks",
"(",
"self",
",",
"project",
")",
":",
"cache_key",
"=",
"'project_{}_fabfile_tasks'",
".",
"format",
"(",
"project",
".",
"pk",
")",
"cached_result",
"=",
"cache",
".",
"get",
"(",
"cache_key",
")",
"if",
"cached_result",
":",
"return",
"cached_result",
"try",
":",
"fabfile_path",
",",
"activate_loc",
"=",
"self",
".",
"get_fabfile_path",
"(",
"project",
")",
"if",
"activate_loc",
":",
"output",
"=",
"self",
".",
"check_output",
"(",
"'source {};fab --list --list-format=short --fabfile={}'",
".",
"format",
"(",
"activate_loc",
",",
"fabfile_path",
")",
",",
"shell",
"=",
"True",
")",
"else",
":",
"output",
"=",
"self",
".",
"check_output",
"(",
"'fab --list --list-format=short --fabfile={}'",
".",
"format",
"(",
"fabfile_path",
")",
",",
"shell",
"=",
"True",
")",
"lines",
"=",
"output",
".",
"splitlines",
"(",
")",
"tasks",
"=",
"[",
"]",
"for",
"line",
"in",
"lines",
":",
"name",
"=",
"line",
".",
"strip",
"(",
")",
"if",
"activate_loc",
":",
"o",
"=",
"self",
".",
"check_output",
"(",
"'source {};fab --display={} --fabfile={}'",
".",
"format",
"(",
"activate_loc",
",",
"name",
",",
"fabfile_path",
")",
",",
"shell",
"=",
"True",
")",
"else",
":",
"o",
"=",
"self",
".",
"check_output",
"(",
"[",
"'fab'",
",",
"'--display={}'",
".",
"format",
"(",
"name",
")",
",",
"'--fabfile={}'",
".",
"format",
"(",
"fabfile_path",
")",
"]",
")",
"tasks",
".",
"append",
"(",
"self",
".",
"parse_task_details",
"(",
"name",
",",
"o",
")",
")",
"cache",
".",
"set",
"(",
"cache_key",
",",
"tasks",
",",
"settings",
".",
"FABRIC_TASK_CACHE_TIMEOUT",
")",
"except",
"Exception",
"as",
"e",
":",
"tasks",
"=",
"[",
"]",
"return",
"tasks"
] |
Generate a list of fabric tasks that are available
|
[
"Generate",
"a",
"list",
"of",
"fabric",
"tasks",
"that",
"are",
"available"
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/task_runners/base.py#L143-L188
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/projects/views.py
|
ProjectCopy.get_initial
|
def get_initial(self):
"""
Returns the initial data to use for forms on this view.
"""
initial = super(ProjectCopy, self).get_initial()
if self.copy_object:
initial.update({'name': '%s copy' % self.copy_object.name,
'description': self.copy_object.description,
'use_repo_fabfile': self.copy_object.use_repo_fabfile,
'fabfile_requirements': self.copy_object.fabfile_requirements,
'repo_url': self.copy_object.repo_url})
return initial
|
python
|
def get_initial(self):
"""
Returns the initial data to use for forms on this view.
"""
initial = super(ProjectCopy, self).get_initial()
if self.copy_object:
initial.update({'name': '%s copy' % self.copy_object.name,
'description': self.copy_object.description,
'use_repo_fabfile': self.copy_object.use_repo_fabfile,
'fabfile_requirements': self.copy_object.fabfile_requirements,
'repo_url': self.copy_object.repo_url})
return initial
|
[
"def",
"get_initial",
"(",
"self",
")",
":",
"initial",
"=",
"super",
"(",
"ProjectCopy",
",",
"self",
")",
".",
"get_initial",
"(",
")",
"if",
"self",
".",
"copy_object",
":",
"initial",
".",
"update",
"(",
"{",
"'name'",
":",
"'%s copy'",
"%",
"self",
".",
"copy_object",
".",
"name",
",",
"'description'",
":",
"self",
".",
"copy_object",
".",
"description",
",",
"'use_repo_fabfile'",
":",
"self",
".",
"copy_object",
".",
"use_repo_fabfile",
",",
"'fabfile_requirements'",
":",
"self",
".",
"copy_object",
".",
"fabfile_requirements",
",",
"'repo_url'",
":",
"self",
".",
"copy_object",
".",
"repo_url",
"}",
")",
"return",
"initial"
] |
Returns the initial data to use for forms on this view.
|
[
"Returns",
"the",
"initial",
"data",
"to",
"use",
"for",
"forms",
"on",
"this",
"view",
"."
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/projects/views.py#L101-L112
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/projects/views.py
|
ProjectConfigurationDelete.get_success_url
|
def get_success_url(self):
"""Get the url depending on what type of configuration I deleted."""
if self.stage_id:
url = reverse('projects_stage_view', args=(self.project_id, self.stage_id))
else:
url = reverse('projects_project_view', args=(self.project_id,))
return url
|
python
|
def get_success_url(self):
"""Get the url depending on what type of configuration I deleted."""
if self.stage_id:
url = reverse('projects_stage_view', args=(self.project_id, self.stage_id))
else:
url = reverse('projects_project_view', args=(self.project_id,))
return url
|
[
"def",
"get_success_url",
"(",
"self",
")",
":",
"if",
"self",
".",
"stage_id",
":",
"url",
"=",
"reverse",
"(",
"'projects_stage_view'",
",",
"args",
"=",
"(",
"self",
".",
"project_id",
",",
"self",
".",
"stage_id",
")",
")",
"else",
":",
"url",
"=",
"reverse",
"(",
"'projects_project_view'",
",",
"args",
"=",
"(",
"self",
".",
"project_id",
",",
")",
")",
"return",
"url"
] |
Get the url depending on what type of configuration I deleted.
|
[
"Get",
"the",
"url",
"depending",
"on",
"what",
"type",
"of",
"configuration",
"I",
"deleted",
"."
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/projects/views.py#L300-L308
|
train
|
fabric-bolt/fabric-bolt
|
fabric_bolt/hosts/utils.py
|
create_ssh_config
|
def create_ssh_config(remote_user='root', name='Auto Generated SSH Key',
file_name='fabricbolt_private.key', email='[email protected]', public_key_text=None,
private_key_text=None):
"""Create SSH Key"""
if not private_key_text and not public_key_text:
key = RSA.generate(2048)
pubkey = key.publickey()
private_key_text = key.exportKey('PEM')
public_key_text = pubkey.exportKey('OpenSSH')
ssh_config = models.SSHConfig()
ssh_config.name = name
ssh_config.private_key_file.save(file_name, ContentFile(private_key_text))
ssh_config.public_key = '{} {}'.format(public_key_text, email)
ssh_config.remote_user = remote_user
ssh_config.save()
return ssh_config
|
python
|
def create_ssh_config(remote_user='root', name='Auto Generated SSH Key',
file_name='fabricbolt_private.key', email='[email protected]', public_key_text=None,
private_key_text=None):
"""Create SSH Key"""
if not private_key_text and not public_key_text:
key = RSA.generate(2048)
pubkey = key.publickey()
private_key_text = key.exportKey('PEM')
public_key_text = pubkey.exportKey('OpenSSH')
ssh_config = models.SSHConfig()
ssh_config.name = name
ssh_config.private_key_file.save(file_name, ContentFile(private_key_text))
ssh_config.public_key = '{} {}'.format(public_key_text, email)
ssh_config.remote_user = remote_user
ssh_config.save()
return ssh_config
|
[
"def",
"create_ssh_config",
"(",
"remote_user",
"=",
"'root'",
",",
"name",
"=",
"'Auto Generated SSH Key'",
",",
"file_name",
"=",
"'fabricbolt_private.key'",
",",
"email",
"=",
"'[email protected]'",
",",
"public_key_text",
"=",
"None",
",",
"private_key_text",
"=",
"None",
")",
":",
"if",
"not",
"private_key_text",
"and",
"not",
"public_key_text",
":",
"key",
"=",
"RSA",
".",
"generate",
"(",
"2048",
")",
"pubkey",
"=",
"key",
".",
"publickey",
"(",
")",
"private_key_text",
"=",
"key",
".",
"exportKey",
"(",
"'PEM'",
")",
"public_key_text",
"=",
"pubkey",
".",
"exportKey",
"(",
"'OpenSSH'",
")",
"ssh_config",
"=",
"models",
".",
"SSHConfig",
"(",
")",
"ssh_config",
".",
"name",
"=",
"name",
"ssh_config",
".",
"private_key_file",
".",
"save",
"(",
"file_name",
",",
"ContentFile",
"(",
"private_key_text",
")",
")",
"ssh_config",
".",
"public_key",
"=",
"'{} {}'",
".",
"format",
"(",
"public_key_text",
",",
"email",
")",
"ssh_config",
".",
"remote_user",
"=",
"remote_user",
"ssh_config",
".",
"save",
"(",
")",
"return",
"ssh_config"
] |
Create SSH Key
|
[
"Create",
"SSH",
"Key"
] |
0f434783026f1b9ce16a416fa496d76921fe49ca
|
https://github.com/fabric-bolt/fabric-bolt/blob/0f434783026f1b9ce16a416fa496d76921fe49ca/fabric_bolt/hosts/utils.py#L7-L26
|
train
|
softvar/json2html
|
json2html/jsonconv.py
|
Json2Html.convert
|
def convert(self, json="", table_attributes='border="1"', clubbing=True, encode=False, escape=True):
"""
Convert JSON to HTML Table format
"""
# table attributes such as class, id, data-attr-*, etc.
# eg: table_attributes = 'class = "table table-bordered sortable"'
self.table_init_markup = "<table %s>" % table_attributes
self.clubbing = clubbing
self.escape = escape
json_input = None
if not json:
json_input = {}
elif type(json) in text_types:
try:
json_input = json_parser.loads(json, object_pairs_hook=OrderedDict)
except ValueError as e:
#so the string passed here is actually not a json string
# - let's analyze whether we want to pass on the error or use the string as-is as a text node
if u"Expecting property name" in text(e):
#if this specific json loads error is raised, then the user probably actually wanted to pass json, but made a mistake
raise e
json_input = json
else:
json_input = json
converted = self.convert_json_node(json_input)
if encode:
return converted.encode('ascii', 'xmlcharrefreplace')
return converted
|
python
|
def convert(self, json="", table_attributes='border="1"', clubbing=True, encode=False, escape=True):
"""
Convert JSON to HTML Table format
"""
# table attributes such as class, id, data-attr-*, etc.
# eg: table_attributes = 'class = "table table-bordered sortable"'
self.table_init_markup = "<table %s>" % table_attributes
self.clubbing = clubbing
self.escape = escape
json_input = None
if not json:
json_input = {}
elif type(json) in text_types:
try:
json_input = json_parser.loads(json, object_pairs_hook=OrderedDict)
except ValueError as e:
#so the string passed here is actually not a json string
# - let's analyze whether we want to pass on the error or use the string as-is as a text node
if u"Expecting property name" in text(e):
#if this specific json loads error is raised, then the user probably actually wanted to pass json, but made a mistake
raise e
json_input = json
else:
json_input = json
converted = self.convert_json_node(json_input)
if encode:
return converted.encode('ascii', 'xmlcharrefreplace')
return converted
|
[
"def",
"convert",
"(",
"self",
",",
"json",
"=",
"\"\"",
",",
"table_attributes",
"=",
"'border=\"1\"'",
",",
"clubbing",
"=",
"True",
",",
"encode",
"=",
"False",
",",
"escape",
"=",
"True",
")",
":",
"# table attributes such as class, id, data-attr-*, etc.",
"# eg: table_attributes = 'class = \"table table-bordered sortable\"'",
"self",
".",
"table_init_markup",
"=",
"\"<table %s>\"",
"%",
"table_attributes",
"self",
".",
"clubbing",
"=",
"clubbing",
"self",
".",
"escape",
"=",
"escape",
"json_input",
"=",
"None",
"if",
"not",
"json",
":",
"json_input",
"=",
"{",
"}",
"elif",
"type",
"(",
"json",
")",
"in",
"text_types",
":",
"try",
":",
"json_input",
"=",
"json_parser",
".",
"loads",
"(",
"json",
",",
"object_pairs_hook",
"=",
"OrderedDict",
")",
"except",
"ValueError",
"as",
"e",
":",
"#so the string passed here is actually not a json string",
"# - let's analyze whether we want to pass on the error or use the string as-is as a text node",
"if",
"u\"Expecting property name\"",
"in",
"text",
"(",
"e",
")",
":",
"#if this specific json loads error is raised, then the user probably actually wanted to pass json, but made a mistake",
"raise",
"e",
"json_input",
"=",
"json",
"else",
":",
"json_input",
"=",
"json",
"converted",
"=",
"self",
".",
"convert_json_node",
"(",
"json_input",
")",
"if",
"encode",
":",
"return",
"converted",
".",
"encode",
"(",
"'ascii'",
",",
"'xmlcharrefreplace'",
")",
"return",
"converted"
] |
Convert JSON to HTML Table format
|
[
"Convert",
"JSON",
"to",
"HTML",
"Table",
"format"
] |
7070939172f1afd5c11c664e6cfece280cfde7e6
|
https://github.com/softvar/json2html/blob/7070939172f1afd5c11c664e6cfece280cfde7e6/json2html/jsonconv.py#L37-L64
|
train
|
softvar/json2html
|
json2html/jsonconv.py
|
Json2Html.column_headers_from_list_of_dicts
|
def column_headers_from_list_of_dicts(self, json_input):
"""
This method is required to implement clubbing.
It tries to come up with column headers for your input
"""
if not json_input \
or not hasattr(json_input, '__getitem__') \
or not hasattr(json_input[0], 'keys'):
return None
column_headers = json_input[0].keys()
for entry in json_input:
if not hasattr(entry, 'keys') \
or not hasattr(entry, '__iter__') \
or len(entry.keys()) != len(column_headers):
return None
for header in column_headers:
if header not in entry:
return None
return column_headers
|
python
|
def column_headers_from_list_of_dicts(self, json_input):
"""
This method is required to implement clubbing.
It tries to come up with column headers for your input
"""
if not json_input \
or not hasattr(json_input, '__getitem__') \
or not hasattr(json_input[0], 'keys'):
return None
column_headers = json_input[0].keys()
for entry in json_input:
if not hasattr(entry, 'keys') \
or not hasattr(entry, '__iter__') \
or len(entry.keys()) != len(column_headers):
return None
for header in column_headers:
if header not in entry:
return None
return column_headers
|
[
"def",
"column_headers_from_list_of_dicts",
"(",
"self",
",",
"json_input",
")",
":",
"if",
"not",
"json_input",
"or",
"not",
"hasattr",
"(",
"json_input",
",",
"'__getitem__'",
")",
"or",
"not",
"hasattr",
"(",
"json_input",
"[",
"0",
"]",
",",
"'keys'",
")",
":",
"return",
"None",
"column_headers",
"=",
"json_input",
"[",
"0",
"]",
".",
"keys",
"(",
")",
"for",
"entry",
"in",
"json_input",
":",
"if",
"not",
"hasattr",
"(",
"entry",
",",
"'keys'",
")",
"or",
"not",
"hasattr",
"(",
"entry",
",",
"'__iter__'",
")",
"or",
"len",
"(",
"entry",
".",
"keys",
"(",
")",
")",
"!=",
"len",
"(",
"column_headers",
")",
":",
"return",
"None",
"for",
"header",
"in",
"column_headers",
":",
"if",
"header",
"not",
"in",
"entry",
":",
"return",
"None",
"return",
"column_headers"
] |
This method is required to implement clubbing.
It tries to come up with column headers for your input
|
[
"This",
"method",
"is",
"required",
"to",
"implement",
"clubbing",
".",
"It",
"tries",
"to",
"come",
"up",
"with",
"column",
"headers",
"for",
"your",
"input"
] |
7070939172f1afd5c11c664e6cfece280cfde7e6
|
https://github.com/softvar/json2html/blob/7070939172f1afd5c11c664e6cfece280cfde7e6/json2html/jsonconv.py#L66-L84
|
train
|
softvar/json2html
|
json2html/jsonconv.py
|
Json2Html.convert_list
|
def convert_list(self, list_input):
"""
Iterate over the JSON list and process it
to generate either an HTML table or a HTML list, depending on what's inside.
If suppose some key has array of objects and all the keys are same,
instead of creating a new row for each such entry,
club such values, thus it makes more sense and more readable table.
@example:
jsonObject = {
"sampleData": [
{"a":1, "b":2, "c":3},
{"a":5, "b":6, "c":7}
]
}
OUTPUT:
_____________________________
| | | | |
| | a | c | b |
| sampleData |---|---|---|
| | 1 | 3 | 2 |
| | 5 | 7 | 6 |
-----------------------------
@contributed by: @muellermichel
"""
if not list_input:
return ""
converted_output = ""
column_headers = None
if self.clubbing:
column_headers = self.column_headers_from_list_of_dicts(list_input)
if column_headers is not None:
converted_output += self.table_init_markup
converted_output += '<thead>'
converted_output += '<tr><th>' + '</th><th>'.join(column_headers) + '</th></tr>'
converted_output += '</thead>'
converted_output += '<tbody>'
for list_entry in list_input:
converted_output += '<tr><td>'
converted_output += '</td><td>'.join([self.convert_json_node(list_entry[column_header]) for column_header in
column_headers])
converted_output += '</td></tr>'
converted_output += '</tbody>'
converted_output += '</table>'
return converted_output
#so you don't want or need clubbing eh? This makes @muellermichel very sad... ;(
#alright, let's fall back to a basic list here...
converted_output = '<ul><li>'
converted_output += '</li><li>'.join([self.convert_json_node(child) for child in list_input])
converted_output += '</li></ul>'
return converted_output
|
python
|
def convert_list(self, list_input):
"""
Iterate over the JSON list and process it
to generate either an HTML table or a HTML list, depending on what's inside.
If suppose some key has array of objects and all the keys are same,
instead of creating a new row for each such entry,
club such values, thus it makes more sense and more readable table.
@example:
jsonObject = {
"sampleData": [
{"a":1, "b":2, "c":3},
{"a":5, "b":6, "c":7}
]
}
OUTPUT:
_____________________________
| | | | |
| | a | c | b |
| sampleData |---|---|---|
| | 1 | 3 | 2 |
| | 5 | 7 | 6 |
-----------------------------
@contributed by: @muellermichel
"""
if not list_input:
return ""
converted_output = ""
column_headers = None
if self.clubbing:
column_headers = self.column_headers_from_list_of_dicts(list_input)
if column_headers is not None:
converted_output += self.table_init_markup
converted_output += '<thead>'
converted_output += '<tr><th>' + '</th><th>'.join(column_headers) + '</th></tr>'
converted_output += '</thead>'
converted_output += '<tbody>'
for list_entry in list_input:
converted_output += '<tr><td>'
converted_output += '</td><td>'.join([self.convert_json_node(list_entry[column_header]) for column_header in
column_headers])
converted_output += '</td></tr>'
converted_output += '</tbody>'
converted_output += '</table>'
return converted_output
#so you don't want or need clubbing eh? This makes @muellermichel very sad... ;(
#alright, let's fall back to a basic list here...
converted_output = '<ul><li>'
converted_output += '</li><li>'.join([self.convert_json_node(child) for child in list_input])
converted_output += '</li></ul>'
return converted_output
|
[
"def",
"convert_list",
"(",
"self",
",",
"list_input",
")",
":",
"if",
"not",
"list_input",
":",
"return",
"\"\"",
"converted_output",
"=",
"\"\"",
"column_headers",
"=",
"None",
"if",
"self",
".",
"clubbing",
":",
"column_headers",
"=",
"self",
".",
"column_headers_from_list_of_dicts",
"(",
"list_input",
")",
"if",
"column_headers",
"is",
"not",
"None",
":",
"converted_output",
"+=",
"self",
".",
"table_init_markup",
"converted_output",
"+=",
"'<thead>'",
"converted_output",
"+=",
"'<tr><th>'",
"+",
"'</th><th>'",
".",
"join",
"(",
"column_headers",
")",
"+",
"'</th></tr>'",
"converted_output",
"+=",
"'</thead>'",
"converted_output",
"+=",
"'<tbody>'",
"for",
"list_entry",
"in",
"list_input",
":",
"converted_output",
"+=",
"'<tr><td>'",
"converted_output",
"+=",
"'</td><td>'",
".",
"join",
"(",
"[",
"self",
".",
"convert_json_node",
"(",
"list_entry",
"[",
"column_header",
"]",
")",
"for",
"column_header",
"in",
"column_headers",
"]",
")",
"converted_output",
"+=",
"'</td></tr>'",
"converted_output",
"+=",
"'</tbody>'",
"converted_output",
"+=",
"'</table>'",
"return",
"converted_output",
"#so you don't want or need clubbing eh? This makes @muellermichel very sad... ;(",
"#alright, let's fall back to a basic list here...",
"converted_output",
"=",
"'<ul><li>'",
"converted_output",
"+=",
"'</li><li>'",
".",
"join",
"(",
"[",
"self",
".",
"convert_json_node",
"(",
"child",
")",
"for",
"child",
"in",
"list_input",
"]",
")",
"converted_output",
"+=",
"'</li></ul>'",
"return",
"converted_output"
] |
Iterate over the JSON list and process it
to generate either an HTML table or a HTML list, depending on what's inside.
If suppose some key has array of objects and all the keys are same,
instead of creating a new row for each such entry,
club such values, thus it makes more sense and more readable table.
@example:
jsonObject = {
"sampleData": [
{"a":1, "b":2, "c":3},
{"a":5, "b":6, "c":7}
]
}
OUTPUT:
_____________________________
| | | | |
| | a | c | b |
| sampleData |---|---|---|
| | 1 | 3 | 2 |
| | 5 | 7 | 6 |
-----------------------------
@contributed by: @muellermichel
|
[
"Iterate",
"over",
"the",
"JSON",
"list",
"and",
"process",
"it",
"to",
"generate",
"either",
"an",
"HTML",
"table",
"or",
"a",
"HTML",
"list",
"depending",
"on",
"what",
"s",
"inside",
".",
"If",
"suppose",
"some",
"key",
"has",
"array",
"of",
"objects",
"and",
"all",
"the",
"keys",
"are",
"same",
"instead",
"of",
"creating",
"a",
"new",
"row",
"for",
"each",
"such",
"entry",
"club",
"such",
"values",
"thus",
"it",
"makes",
"more",
"sense",
"and",
"more",
"readable",
"table",
"."
] |
7070939172f1afd5c11c664e6cfece280cfde7e6
|
https://github.com/softvar/json2html/blob/7070939172f1afd5c11c664e6cfece280cfde7e6/json2html/jsonconv.py#L105-L157
|
train
|
softvar/json2html
|
json2html/jsonconv.py
|
Json2Html.convert_object
|
def convert_object(self, json_input):
"""
Iterate over the JSON object and process it
to generate the super awesome HTML Table format
"""
if not json_input:
return "" #avoid empty tables
converted_output = self.table_init_markup + "<tr>"
converted_output += "</tr><tr>".join([
"<th>%s</th><td>%s</td>" %(
self.convert_json_node(k),
self.convert_json_node(v)
)
for k, v in json_input.items()
])
converted_output += '</tr></table>'
return converted_output
|
python
|
def convert_object(self, json_input):
"""
Iterate over the JSON object and process it
to generate the super awesome HTML Table format
"""
if not json_input:
return "" #avoid empty tables
converted_output = self.table_init_markup + "<tr>"
converted_output += "</tr><tr>".join([
"<th>%s</th><td>%s</td>" %(
self.convert_json_node(k),
self.convert_json_node(v)
)
for k, v in json_input.items()
])
converted_output += '</tr></table>'
return converted_output
|
[
"def",
"convert_object",
"(",
"self",
",",
"json_input",
")",
":",
"if",
"not",
"json_input",
":",
"return",
"\"\"",
"#avoid empty tables",
"converted_output",
"=",
"self",
".",
"table_init_markup",
"+",
"\"<tr>\"",
"converted_output",
"+=",
"\"</tr><tr>\"",
".",
"join",
"(",
"[",
"\"<th>%s</th><td>%s</td>\"",
"%",
"(",
"self",
".",
"convert_json_node",
"(",
"k",
")",
",",
"self",
".",
"convert_json_node",
"(",
"v",
")",
")",
"for",
"k",
",",
"v",
"in",
"json_input",
".",
"items",
"(",
")",
"]",
")",
"converted_output",
"+=",
"'</tr></table>'",
"return",
"converted_output"
] |
Iterate over the JSON object and process it
to generate the super awesome HTML Table format
|
[
"Iterate",
"over",
"the",
"JSON",
"object",
"and",
"process",
"it",
"to",
"generate",
"the",
"super",
"awesome",
"HTML",
"Table",
"format"
] |
7070939172f1afd5c11c664e6cfece280cfde7e6
|
https://github.com/softvar/json2html/blob/7070939172f1afd5c11c664e6cfece280cfde7e6/json2html/jsonconv.py#L159-L175
|
train
|
philippelt/netatmo-api-python
|
lnetatmo.py
|
HomeData.personsAtHome
|
def personsAtHome(self, home=None):
"""
Return the list of known persons who are currently at home
"""
if not home: home = self.default_home
home_data = self.homeByName(home)
atHome = []
for p in home_data['persons']:
#Only check known persons
if 'pseudo' in p:
if not p["out_of_sight"]:
atHome.append(p['pseudo'])
return atHome
|
python
|
def personsAtHome(self, home=None):
"""
Return the list of known persons who are currently at home
"""
if not home: home = self.default_home
home_data = self.homeByName(home)
atHome = []
for p in home_data['persons']:
#Only check known persons
if 'pseudo' in p:
if not p["out_of_sight"]:
atHome.append(p['pseudo'])
return atHome
|
[
"def",
"personsAtHome",
"(",
"self",
",",
"home",
"=",
"None",
")",
":",
"if",
"not",
"home",
":",
"home",
"=",
"self",
".",
"default_home",
"home_data",
"=",
"self",
".",
"homeByName",
"(",
"home",
")",
"atHome",
"=",
"[",
"]",
"for",
"p",
"in",
"home_data",
"[",
"'persons'",
"]",
":",
"#Only check known persons",
"if",
"'pseudo'",
"in",
"p",
":",
"if",
"not",
"p",
"[",
"\"out_of_sight\"",
"]",
":",
"atHome",
".",
"append",
"(",
"p",
"[",
"'pseudo'",
"]",
")",
"return",
"atHome"
] |
Return the list of known persons who are currently at home
|
[
"Return",
"the",
"list",
"of",
"known",
"persons",
"who",
"are",
"currently",
"at",
"home"
] |
d749fca3637c07c2943aba7992f683fff1812f77
|
https://github.com/philippelt/netatmo-api-python/blob/d749fca3637c07c2943aba7992f683fff1812f77/lnetatmo.py#L566-L578
|
train
|
philippelt/netatmo-api-python
|
lnetatmo.py
|
HomeData.getProfileImage
|
def getProfileImage(self, name):
"""
Retrieve the face of a given person
"""
for p in self.persons:
if 'pseudo' in self.persons[p]:
if name == self.persons[p]['pseudo']:
image_id = self.persons[p]['face']['id']
key = self.persons[p]['face']['key']
return self.getCameraPicture(image_id, key)
return None, None
|
python
|
def getProfileImage(self, name):
"""
Retrieve the face of a given person
"""
for p in self.persons:
if 'pseudo' in self.persons[p]:
if name == self.persons[p]['pseudo']:
image_id = self.persons[p]['face']['id']
key = self.persons[p]['face']['key']
return self.getCameraPicture(image_id, key)
return None, None
|
[
"def",
"getProfileImage",
"(",
"self",
",",
"name",
")",
":",
"for",
"p",
"in",
"self",
".",
"persons",
":",
"if",
"'pseudo'",
"in",
"self",
".",
"persons",
"[",
"p",
"]",
":",
"if",
"name",
"==",
"self",
".",
"persons",
"[",
"p",
"]",
"[",
"'pseudo'",
"]",
":",
"image_id",
"=",
"self",
".",
"persons",
"[",
"p",
"]",
"[",
"'face'",
"]",
"[",
"'id'",
"]",
"key",
"=",
"self",
".",
"persons",
"[",
"p",
"]",
"[",
"'face'",
"]",
"[",
"'key'",
"]",
"return",
"self",
".",
"getCameraPicture",
"(",
"image_id",
",",
"key",
")",
"return",
"None",
",",
"None"
] |
Retrieve the face of a given person
|
[
"Retrieve",
"the",
"face",
"of",
"a",
"given",
"person"
] |
d749fca3637c07c2943aba7992f683fff1812f77
|
https://github.com/philippelt/netatmo-api-python/blob/d749fca3637c07c2943aba7992f683fff1812f77/lnetatmo.py#L593-L603
|
train
|
philippelt/netatmo-api-python
|
lnetatmo.py
|
HomeData.updateEvent
|
def updateEvent(self, event=None, home=None):
"""
Update the list of event with the latest ones
"""
if not home: home=self.default_home
if not event:
#If not event is provided we need to retrieve the oldest of the last event seen by each camera
listEvent = dict()
for cam_id in self.lastEvent:
listEvent[self.lastEvent[cam_id]['time']] = self.lastEvent[cam_id]
event = listEvent[sorted(listEvent)[0]]
home_data = self.homeByName(home)
postParams = {
"access_token" : self.getAuthToken,
"home_id" : home_data['id'],
"event_id" : event['id']
}
resp = postRequest(_GETEVENTSUNTIL_REQ, postParams)
eventList = resp['body']['events_list']
for e in eventList:
self.events[ e['camera_id'] ][ e['time'] ] = e
for camera in self.events:
self.lastEvent[camera]=self.events[camera][sorted(self.events[camera])[-1]]
|
python
|
def updateEvent(self, event=None, home=None):
"""
Update the list of event with the latest ones
"""
if not home: home=self.default_home
if not event:
#If not event is provided we need to retrieve the oldest of the last event seen by each camera
listEvent = dict()
for cam_id in self.lastEvent:
listEvent[self.lastEvent[cam_id]['time']] = self.lastEvent[cam_id]
event = listEvent[sorted(listEvent)[0]]
home_data = self.homeByName(home)
postParams = {
"access_token" : self.getAuthToken,
"home_id" : home_data['id'],
"event_id" : event['id']
}
resp = postRequest(_GETEVENTSUNTIL_REQ, postParams)
eventList = resp['body']['events_list']
for e in eventList:
self.events[ e['camera_id'] ][ e['time'] ] = e
for camera in self.events:
self.lastEvent[camera]=self.events[camera][sorted(self.events[camera])[-1]]
|
[
"def",
"updateEvent",
"(",
"self",
",",
"event",
"=",
"None",
",",
"home",
"=",
"None",
")",
":",
"if",
"not",
"home",
":",
"home",
"=",
"self",
".",
"default_home",
"if",
"not",
"event",
":",
"#If not event is provided we need to retrieve the oldest of the last event seen by each camera",
"listEvent",
"=",
"dict",
"(",
")",
"for",
"cam_id",
"in",
"self",
".",
"lastEvent",
":",
"listEvent",
"[",
"self",
".",
"lastEvent",
"[",
"cam_id",
"]",
"[",
"'time'",
"]",
"]",
"=",
"self",
".",
"lastEvent",
"[",
"cam_id",
"]",
"event",
"=",
"listEvent",
"[",
"sorted",
"(",
"listEvent",
")",
"[",
"0",
"]",
"]",
"home_data",
"=",
"self",
".",
"homeByName",
"(",
"home",
")",
"postParams",
"=",
"{",
"\"access_token\"",
":",
"self",
".",
"getAuthToken",
",",
"\"home_id\"",
":",
"home_data",
"[",
"'id'",
"]",
",",
"\"event_id\"",
":",
"event",
"[",
"'id'",
"]",
"}",
"resp",
"=",
"postRequest",
"(",
"_GETEVENTSUNTIL_REQ",
",",
"postParams",
")",
"eventList",
"=",
"resp",
"[",
"'body'",
"]",
"[",
"'events_list'",
"]",
"for",
"e",
"in",
"eventList",
":",
"self",
".",
"events",
"[",
"e",
"[",
"'camera_id'",
"]",
"]",
"[",
"e",
"[",
"'time'",
"]",
"]",
"=",
"e",
"for",
"camera",
"in",
"self",
".",
"events",
":",
"self",
".",
"lastEvent",
"[",
"camera",
"]",
"=",
"self",
".",
"events",
"[",
"camera",
"]",
"[",
"sorted",
"(",
"self",
".",
"events",
"[",
"camera",
"]",
")",
"[",
"-",
"1",
"]",
"]"
] |
Update the list of event with the latest ones
|
[
"Update",
"the",
"list",
"of",
"event",
"with",
"the",
"latest",
"ones"
] |
d749fca3637c07c2943aba7992f683fff1812f77
|
https://github.com/philippelt/netatmo-api-python/blob/d749fca3637c07c2943aba7992f683fff1812f77/lnetatmo.py#L605-L628
|
train
|
philippelt/netatmo-api-python
|
lnetatmo.py
|
HomeData.personSeenByCamera
|
def personSeenByCamera(self, name, home=None, camera=None):
"""
Return True if a specific person has been seen by a camera
"""
try:
cam_id = self.cameraByName(camera=camera, home=home)['id']
except TypeError:
logger.warning("personSeenByCamera: Camera name or home is unknown")
return False
#Check in the last event is someone known has been seen
if self.lastEvent[cam_id]['type'] == 'person':
person_id = self.lastEvent[cam_id]['person_id']
if 'pseudo' in self.persons[person_id]:
if self.persons[person_id]['pseudo'] == name:
return True
return False
|
python
|
def personSeenByCamera(self, name, home=None, camera=None):
"""
Return True if a specific person has been seen by a camera
"""
try:
cam_id = self.cameraByName(camera=camera, home=home)['id']
except TypeError:
logger.warning("personSeenByCamera: Camera name or home is unknown")
return False
#Check in the last event is someone known has been seen
if self.lastEvent[cam_id]['type'] == 'person':
person_id = self.lastEvent[cam_id]['person_id']
if 'pseudo' in self.persons[person_id]:
if self.persons[person_id]['pseudo'] == name:
return True
return False
|
[
"def",
"personSeenByCamera",
"(",
"self",
",",
"name",
",",
"home",
"=",
"None",
",",
"camera",
"=",
"None",
")",
":",
"try",
":",
"cam_id",
"=",
"self",
".",
"cameraByName",
"(",
"camera",
"=",
"camera",
",",
"home",
"=",
"home",
")",
"[",
"'id'",
"]",
"except",
"TypeError",
":",
"logger",
".",
"warning",
"(",
"\"personSeenByCamera: Camera name or home is unknown\"",
")",
"return",
"False",
"#Check in the last event is someone known has been seen",
"if",
"self",
".",
"lastEvent",
"[",
"cam_id",
"]",
"[",
"'type'",
"]",
"==",
"'person'",
":",
"person_id",
"=",
"self",
".",
"lastEvent",
"[",
"cam_id",
"]",
"[",
"'person_id'",
"]",
"if",
"'pseudo'",
"in",
"self",
".",
"persons",
"[",
"person_id",
"]",
":",
"if",
"self",
".",
"persons",
"[",
"person_id",
"]",
"[",
"'pseudo'",
"]",
"==",
"name",
":",
"return",
"True",
"return",
"False"
] |
Return True if a specific person has been seen by a camera
|
[
"Return",
"True",
"if",
"a",
"specific",
"person",
"has",
"been",
"seen",
"by",
"a",
"camera"
] |
d749fca3637c07c2943aba7992f683fff1812f77
|
https://github.com/philippelt/netatmo-api-python/blob/d749fca3637c07c2943aba7992f683fff1812f77/lnetatmo.py#L630-L645
|
train
|
philippelt/netatmo-api-python
|
lnetatmo.py
|
HomeData.someoneKnownSeen
|
def someoneKnownSeen(self, home=None, camera=None):
"""
Return True if someone known has been seen
"""
try:
cam_id = self.cameraByName(camera=camera, home=home)['id']
except TypeError:
logger.warning("personSeenByCamera: Camera name or home is unknown")
return False
#Check in the last event is someone known has been seen
if self.lastEvent[cam_id]['type'] == 'person':
if self.lastEvent[cam_id]['person_id'] in self._knownPersons():
return True
return False
|
python
|
def someoneKnownSeen(self, home=None, camera=None):
"""
Return True if someone known has been seen
"""
try:
cam_id = self.cameraByName(camera=camera, home=home)['id']
except TypeError:
logger.warning("personSeenByCamera: Camera name or home is unknown")
return False
#Check in the last event is someone known has been seen
if self.lastEvent[cam_id]['type'] == 'person':
if self.lastEvent[cam_id]['person_id'] in self._knownPersons():
return True
return False
|
[
"def",
"someoneKnownSeen",
"(",
"self",
",",
"home",
"=",
"None",
",",
"camera",
"=",
"None",
")",
":",
"try",
":",
"cam_id",
"=",
"self",
".",
"cameraByName",
"(",
"camera",
"=",
"camera",
",",
"home",
"=",
"home",
")",
"[",
"'id'",
"]",
"except",
"TypeError",
":",
"logger",
".",
"warning",
"(",
"\"personSeenByCamera: Camera name or home is unknown\"",
")",
"return",
"False",
"#Check in the last event is someone known has been seen",
"if",
"self",
".",
"lastEvent",
"[",
"cam_id",
"]",
"[",
"'type'",
"]",
"==",
"'person'",
":",
"if",
"self",
".",
"lastEvent",
"[",
"cam_id",
"]",
"[",
"'person_id'",
"]",
"in",
"self",
".",
"_knownPersons",
"(",
")",
":",
"return",
"True",
"return",
"False"
] |
Return True if someone known has been seen
|
[
"Return",
"True",
"if",
"someone",
"known",
"has",
"been",
"seen"
] |
d749fca3637c07c2943aba7992f683fff1812f77
|
https://github.com/philippelt/netatmo-api-python/blob/d749fca3637c07c2943aba7992f683fff1812f77/lnetatmo.py#L654-L667
|
train
|
philippelt/netatmo-api-python
|
lnetatmo.py
|
HomeData.motionDetected
|
def motionDetected(self, home=None, camera=None):
"""
Return True if movement has been detected
"""
try:
cam_id = self.cameraByName(camera=camera, home=home)['id']
except TypeError:
logger.warning("personSeenByCamera: Camera name or home is unknown")
return False
if self.lastEvent[cam_id]['type'] == 'movement':
return True
return False
|
python
|
def motionDetected(self, home=None, camera=None):
"""
Return True if movement has been detected
"""
try:
cam_id = self.cameraByName(camera=camera, home=home)['id']
except TypeError:
logger.warning("personSeenByCamera: Camera name or home is unknown")
return False
if self.lastEvent[cam_id]['type'] == 'movement':
return True
return False
|
[
"def",
"motionDetected",
"(",
"self",
",",
"home",
"=",
"None",
",",
"camera",
"=",
"None",
")",
":",
"try",
":",
"cam_id",
"=",
"self",
".",
"cameraByName",
"(",
"camera",
"=",
"camera",
",",
"home",
"=",
"home",
")",
"[",
"'id'",
"]",
"except",
"TypeError",
":",
"logger",
".",
"warning",
"(",
"\"personSeenByCamera: Camera name or home is unknown\"",
")",
"return",
"False",
"if",
"self",
".",
"lastEvent",
"[",
"cam_id",
"]",
"[",
"'type'",
"]",
"==",
"'movement'",
":",
"return",
"True",
"return",
"False"
] |
Return True if movement has been detected
|
[
"Return",
"True",
"if",
"movement",
"has",
"been",
"detected"
] |
d749fca3637c07c2943aba7992f683fff1812f77
|
https://github.com/philippelt/netatmo-api-python/blob/d749fca3637c07c2943aba7992f683fff1812f77/lnetatmo.py#L684-L695
|
train
|
fprimex/zdesk
|
zdesk/zdesk.py
|
batch
|
def batch(sequence, callback, size=100, **kwargs):
"""Helper to setup batch requests.
There are endpoints which support updating multiple resources at once,
but they are often limited to 100 updates per request.
This function helps with splitting bigger requests into sequence of
smaller ones.
Example:
def add_organization_tag(organizations, tag):
request = {'organizations': [
{
'id': org['id'],
'tags': org['tags'] + [tag],
} for org in organizations
]}
job = z.organizations_update_many(request)['job_status']
return job['id']
# z = Zendesk(...)
orgs = z.organizations_list(get_all_pages=True)['organizations']
job_ids = [job for job in
batch(orgs, add_organization_tag, tag='new_tag')]
Parameters:
sequence - any sequence you want to split
callback - function to call with slices of sequence,
its return value is yielded on each slice
size - size of chunks, combined with length of sequence determines
how many times callback is called (defaults to 100)
**kwargs - any additional keyword arguments are passed to callback
"""
batch_len, rem = divmod(len(sequence), size)
if rem > 0:
batch_len += 1
for i in range(batch_len):
offset = i * size
yield callback(sequence[offset:offset + size], **kwargs)
|
python
|
def batch(sequence, callback, size=100, **kwargs):
"""Helper to setup batch requests.
There are endpoints which support updating multiple resources at once,
but they are often limited to 100 updates per request.
This function helps with splitting bigger requests into sequence of
smaller ones.
Example:
def add_organization_tag(organizations, tag):
request = {'organizations': [
{
'id': org['id'],
'tags': org['tags'] + [tag],
} for org in organizations
]}
job = z.organizations_update_many(request)['job_status']
return job['id']
# z = Zendesk(...)
orgs = z.organizations_list(get_all_pages=True)['organizations']
job_ids = [job for job in
batch(orgs, add_organization_tag, tag='new_tag')]
Parameters:
sequence - any sequence you want to split
callback - function to call with slices of sequence,
its return value is yielded on each slice
size - size of chunks, combined with length of sequence determines
how many times callback is called (defaults to 100)
**kwargs - any additional keyword arguments are passed to callback
"""
batch_len, rem = divmod(len(sequence), size)
if rem > 0:
batch_len += 1
for i in range(batch_len):
offset = i * size
yield callback(sequence[offset:offset + size], **kwargs)
|
[
"def",
"batch",
"(",
"sequence",
",",
"callback",
",",
"size",
"=",
"100",
",",
"*",
"*",
"kwargs",
")",
":",
"batch_len",
",",
"rem",
"=",
"divmod",
"(",
"len",
"(",
"sequence",
")",
",",
"size",
")",
"if",
"rem",
">",
"0",
":",
"batch_len",
"+=",
"1",
"for",
"i",
"in",
"range",
"(",
"batch_len",
")",
":",
"offset",
"=",
"i",
"*",
"size",
"yield",
"callback",
"(",
"sequence",
"[",
"offset",
":",
"offset",
"+",
"size",
"]",
",",
"*",
"*",
"kwargs",
")"
] |
Helper to setup batch requests.
There are endpoints which support updating multiple resources at once,
but they are often limited to 100 updates per request.
This function helps with splitting bigger requests into sequence of
smaller ones.
Example:
def add_organization_tag(organizations, tag):
request = {'organizations': [
{
'id': org['id'],
'tags': org['tags'] + [tag],
} for org in organizations
]}
job = z.organizations_update_many(request)['job_status']
return job['id']
# z = Zendesk(...)
orgs = z.organizations_list(get_all_pages=True)['organizations']
job_ids = [job for job in
batch(orgs, add_organization_tag, tag='new_tag')]
Parameters:
sequence - any sequence you want to split
callback - function to call with slices of sequence,
its return value is yielded on each slice
size - size of chunks, combined with length of sequence determines
how many times callback is called (defaults to 100)
**kwargs - any additional keyword arguments are passed to callback
|
[
"Helper",
"to",
"setup",
"batch",
"requests",
"."
] |
851611c13b4d530e9df31390b3ec709baf0a0188
|
https://github.com/fprimex/zdesk/blob/851611c13b4d530e9df31390b3ec709baf0a0188/zdesk/zdesk.py#L20-L57
|
train
|
fprimex/zdesk
|
zdesk/zdesk.py
|
Zendesk._handle_retry
|
def _handle_retry(self, resp):
"""Handle any exceptions during API request or
parsing its response status code.
Parameters:
resp: requests.Response instance obtained during concerning request
or None, when request failed
Returns: True if should retry our request or raises original Exception
"""
exc_t, exc_v, exc_tb = sys.exc_info()
if exc_t is None:
raise TypeError('Must be called in except block.')
retry_on_exc = tuple(
(x for x in self._retry_on if inspect.isclass(x)))
retry_on_codes = tuple(
(x for x in self._retry_on if isinstance(x, int)))
if issubclass(exc_t, ZendeskError):
code = exc_v.error_code
if exc_t not in retry_on_exc and code not in retry_on_codes:
six.reraise(exc_t, exc_v, exc_tb)
else:
if not issubclass(exc_t, retry_on_exc):
six.reraise(exc_t, exc_v, exc_tb)
if resp is not None:
try:
retry_after = float(resp.headers.get('Retry-After', 0))
time.sleep(retry_after)
except (TypeError, ValueError):
pass
return True
|
python
|
def _handle_retry(self, resp):
"""Handle any exceptions during API request or
parsing its response status code.
Parameters:
resp: requests.Response instance obtained during concerning request
or None, when request failed
Returns: True if should retry our request or raises original Exception
"""
exc_t, exc_v, exc_tb = sys.exc_info()
if exc_t is None:
raise TypeError('Must be called in except block.')
retry_on_exc = tuple(
(x for x in self._retry_on if inspect.isclass(x)))
retry_on_codes = tuple(
(x for x in self._retry_on if isinstance(x, int)))
if issubclass(exc_t, ZendeskError):
code = exc_v.error_code
if exc_t not in retry_on_exc and code not in retry_on_codes:
six.reraise(exc_t, exc_v, exc_tb)
else:
if not issubclass(exc_t, retry_on_exc):
six.reraise(exc_t, exc_v, exc_tb)
if resp is not None:
try:
retry_after = float(resp.headers.get('Retry-After', 0))
time.sleep(retry_after)
except (TypeError, ValueError):
pass
return True
|
[
"def",
"_handle_retry",
"(",
"self",
",",
"resp",
")",
":",
"exc_t",
",",
"exc_v",
",",
"exc_tb",
"=",
"sys",
".",
"exc_info",
"(",
")",
"if",
"exc_t",
"is",
"None",
":",
"raise",
"TypeError",
"(",
"'Must be called in except block.'",
")",
"retry_on_exc",
"=",
"tuple",
"(",
"(",
"x",
"for",
"x",
"in",
"self",
".",
"_retry_on",
"if",
"inspect",
".",
"isclass",
"(",
"x",
")",
")",
")",
"retry_on_codes",
"=",
"tuple",
"(",
"(",
"x",
"for",
"x",
"in",
"self",
".",
"_retry_on",
"if",
"isinstance",
"(",
"x",
",",
"int",
")",
")",
")",
"if",
"issubclass",
"(",
"exc_t",
",",
"ZendeskError",
")",
":",
"code",
"=",
"exc_v",
".",
"error_code",
"if",
"exc_t",
"not",
"in",
"retry_on_exc",
"and",
"code",
"not",
"in",
"retry_on_codes",
":",
"six",
".",
"reraise",
"(",
"exc_t",
",",
"exc_v",
",",
"exc_tb",
")",
"else",
":",
"if",
"not",
"issubclass",
"(",
"exc_t",
",",
"retry_on_exc",
")",
":",
"six",
".",
"reraise",
"(",
"exc_t",
",",
"exc_v",
",",
"exc_tb",
")",
"if",
"resp",
"is",
"not",
"None",
":",
"try",
":",
"retry_after",
"=",
"float",
"(",
"resp",
".",
"headers",
".",
"get",
"(",
"'Retry-After'",
",",
"0",
")",
")",
"time",
".",
"sleep",
"(",
"retry_after",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"pass",
"return",
"True"
] |
Handle any exceptions during API request or
parsing its response status code.
Parameters:
resp: requests.Response instance obtained during concerning request
or None, when request failed
Returns: True if should retry our request or raises original Exception
|
[
"Handle",
"any",
"exceptions",
"during",
"API",
"request",
"or",
"parsing",
"its",
"response",
"status",
"code",
"."
] |
851611c13b4d530e9df31390b3ec709baf0a0188
|
https://github.com/fprimex/zdesk/blob/851611c13b4d530e9df31390b3ec709baf0a0188/zdesk/zdesk.py#L600-L635
|
train
|
v1k45/django-notify-x
|
notify/views.py
|
notification_redirect
|
def notification_redirect(request, ctx):
"""
Helper to handle HTTP response after an action is performed on notification
:param request: HTTP request context of the notification
:param ctx: context to be returned when a AJAX call is made.
:returns: Either JSON for AJAX or redirects to the calculated next page.
"""
if request.is_ajax():
return JsonResponse(ctx)
else:
next_page = request.POST.get('next', reverse('notifications:all'))
if not ctx['success']:
return HttpResponseBadRequest(ctx['msg'])
if is_safe_url(next_page):
return HttpResponseRedirect(next_page)
else:
return HttpResponseRedirect(reverse('notifications:all'))
|
python
|
def notification_redirect(request, ctx):
"""
Helper to handle HTTP response after an action is performed on notification
:param request: HTTP request context of the notification
:param ctx: context to be returned when a AJAX call is made.
:returns: Either JSON for AJAX or redirects to the calculated next page.
"""
if request.is_ajax():
return JsonResponse(ctx)
else:
next_page = request.POST.get('next', reverse('notifications:all'))
if not ctx['success']:
return HttpResponseBadRequest(ctx['msg'])
if is_safe_url(next_page):
return HttpResponseRedirect(next_page)
else:
return HttpResponseRedirect(reverse('notifications:all'))
|
[
"def",
"notification_redirect",
"(",
"request",
",",
"ctx",
")",
":",
"if",
"request",
".",
"is_ajax",
"(",
")",
":",
"return",
"JsonResponse",
"(",
"ctx",
")",
"else",
":",
"next_page",
"=",
"request",
".",
"POST",
".",
"get",
"(",
"'next'",
",",
"reverse",
"(",
"'notifications:all'",
")",
")",
"if",
"not",
"ctx",
"[",
"'success'",
"]",
":",
"return",
"HttpResponseBadRequest",
"(",
"ctx",
"[",
"'msg'",
"]",
")",
"if",
"is_safe_url",
"(",
"next_page",
")",
":",
"return",
"HttpResponseRedirect",
"(",
"next_page",
")",
"else",
":",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'notifications:all'",
")",
")"
] |
Helper to handle HTTP response after an action is performed on notification
:param request: HTTP request context of the notification
:param ctx: context to be returned when a AJAX call is made.
:returns: Either JSON for AJAX or redirects to the calculated next page.
|
[
"Helper",
"to",
"handle",
"HTTP",
"response",
"after",
"an",
"action",
"is",
"performed",
"on",
"notification"
] |
b4aa03039759126889666a59117e83dcd4cdb374
|
https://github.com/v1k45/django-notify-x/blob/b4aa03039759126889666a59117e83dcd4cdb374/notify/views.py#L20-L39
|
train
|
v1k45/django-notify-x
|
notify/views.py
|
mark
|
def mark(request):
"""
Handles marking of individual notifications as read or unread.
Takes ``notification id`` and mark ``action`` as POST data.
:param request: HTTP request context.
:returns: Response to mark action of supplied notification ID.
"""
notification_id = request.POST.get('id', None)
action = request.POST.get('action', None)
success = True
if notification_id:
try:
notification = Notification.objects.get(pk=notification_id,
recipient=request.user)
if action == 'read':
notification.mark_as_read()
msg = _("Marked as read")
elif action == 'unread':
notification.mark_as_unread()
msg = _("Marked as unread")
else:
success = False
msg = _("Invalid mark action.")
except Notification.DoesNotExist:
success = False
msg = _("Notification does not exists.")
else:
success = False
msg = _("Invalid Notification ID")
ctx = {'msg': msg, 'success': success, 'action': action}
return notification_redirect(request, ctx)
|
python
|
def mark(request):
"""
Handles marking of individual notifications as read or unread.
Takes ``notification id`` and mark ``action`` as POST data.
:param request: HTTP request context.
:returns: Response to mark action of supplied notification ID.
"""
notification_id = request.POST.get('id', None)
action = request.POST.get('action', None)
success = True
if notification_id:
try:
notification = Notification.objects.get(pk=notification_id,
recipient=request.user)
if action == 'read':
notification.mark_as_read()
msg = _("Marked as read")
elif action == 'unread':
notification.mark_as_unread()
msg = _("Marked as unread")
else:
success = False
msg = _("Invalid mark action.")
except Notification.DoesNotExist:
success = False
msg = _("Notification does not exists.")
else:
success = False
msg = _("Invalid Notification ID")
ctx = {'msg': msg, 'success': success, 'action': action}
return notification_redirect(request, ctx)
|
[
"def",
"mark",
"(",
"request",
")",
":",
"notification_id",
"=",
"request",
".",
"POST",
".",
"get",
"(",
"'id'",
",",
"None",
")",
"action",
"=",
"request",
".",
"POST",
".",
"get",
"(",
"'action'",
",",
"None",
")",
"success",
"=",
"True",
"if",
"notification_id",
":",
"try",
":",
"notification",
"=",
"Notification",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"notification_id",
",",
"recipient",
"=",
"request",
".",
"user",
")",
"if",
"action",
"==",
"'read'",
":",
"notification",
".",
"mark_as_read",
"(",
")",
"msg",
"=",
"_",
"(",
"\"Marked as read\"",
")",
"elif",
"action",
"==",
"'unread'",
":",
"notification",
".",
"mark_as_unread",
"(",
")",
"msg",
"=",
"_",
"(",
"\"Marked as unread\"",
")",
"else",
":",
"success",
"=",
"False",
"msg",
"=",
"_",
"(",
"\"Invalid mark action.\"",
")",
"except",
"Notification",
".",
"DoesNotExist",
":",
"success",
"=",
"False",
"msg",
"=",
"_",
"(",
"\"Notification does not exists.\"",
")",
"else",
":",
"success",
"=",
"False",
"msg",
"=",
"_",
"(",
"\"Invalid Notification ID\"",
")",
"ctx",
"=",
"{",
"'msg'",
":",
"msg",
",",
"'success'",
":",
"success",
",",
"'action'",
":",
"action",
"}",
"return",
"notification_redirect",
"(",
"request",
",",
"ctx",
")"
] |
Handles marking of individual notifications as read or unread.
Takes ``notification id`` and mark ``action`` as POST data.
:param request: HTTP request context.
:returns: Response to mark action of supplied notification ID.
|
[
"Handles",
"marking",
"of",
"individual",
"notifications",
"as",
"read",
"or",
"unread",
".",
"Takes",
"notification",
"id",
"and",
"mark",
"action",
"as",
"POST",
"data",
"."
] |
b4aa03039759126889666a59117e83dcd4cdb374
|
https://github.com/v1k45/django-notify-x/blob/b4aa03039759126889666a59117e83dcd4cdb374/notify/views.py#L61-L96
|
train
|
v1k45/django-notify-x
|
notify/views.py
|
mark_all
|
def mark_all(request):
"""
Marks notifications as either read or unread depending of POST parameters.
Takes ``action`` as POST data, it can either be ``read`` or ``unread``.
:param request: HTTP Request context.
:return: Response to mark_all action.
"""
action = request.POST.get('action', None)
success = True
if action == 'read':
request.user.notifications.read_all()
msg = _("Marked all notifications as read")
elif action == 'unread':
request.user.notifications.unread_all()
msg = _("Marked all notifications as unread")
else:
msg = _("Invalid mark action")
success = False
ctx = {'msg': msg, 'success': success, 'action': action}
return notification_redirect(request, ctx)
|
python
|
def mark_all(request):
"""
Marks notifications as either read or unread depending of POST parameters.
Takes ``action`` as POST data, it can either be ``read`` or ``unread``.
:param request: HTTP Request context.
:return: Response to mark_all action.
"""
action = request.POST.get('action', None)
success = True
if action == 'read':
request.user.notifications.read_all()
msg = _("Marked all notifications as read")
elif action == 'unread':
request.user.notifications.unread_all()
msg = _("Marked all notifications as unread")
else:
msg = _("Invalid mark action")
success = False
ctx = {'msg': msg, 'success': success, 'action': action}
return notification_redirect(request, ctx)
|
[
"def",
"mark_all",
"(",
"request",
")",
":",
"action",
"=",
"request",
".",
"POST",
".",
"get",
"(",
"'action'",
",",
"None",
")",
"success",
"=",
"True",
"if",
"action",
"==",
"'read'",
":",
"request",
".",
"user",
".",
"notifications",
".",
"read_all",
"(",
")",
"msg",
"=",
"_",
"(",
"\"Marked all notifications as read\"",
")",
"elif",
"action",
"==",
"'unread'",
":",
"request",
".",
"user",
".",
"notifications",
".",
"unread_all",
"(",
")",
"msg",
"=",
"_",
"(",
"\"Marked all notifications as unread\"",
")",
"else",
":",
"msg",
"=",
"_",
"(",
"\"Invalid mark action\"",
")",
"success",
"=",
"False",
"ctx",
"=",
"{",
"'msg'",
":",
"msg",
",",
"'success'",
":",
"success",
",",
"'action'",
":",
"action",
"}",
"return",
"notification_redirect",
"(",
"request",
",",
"ctx",
")"
] |
Marks notifications as either read or unread depending of POST parameters.
Takes ``action`` as POST data, it can either be ``read`` or ``unread``.
:param request: HTTP Request context.
:return: Response to mark_all action.
|
[
"Marks",
"notifications",
"as",
"either",
"read",
"or",
"unread",
"depending",
"of",
"POST",
"parameters",
".",
"Takes",
"action",
"as",
"POST",
"data",
"it",
"can",
"either",
"be",
"read",
"or",
"unread",
"."
] |
b4aa03039759126889666a59117e83dcd4cdb374
|
https://github.com/v1k45/django-notify-x/blob/b4aa03039759126889666a59117e83dcd4cdb374/notify/views.py#L101-L125
|
train
|
v1k45/django-notify-x
|
notify/views.py
|
delete
|
def delete(request):
"""
Deletes notification of supplied notification ID.
Depending on project settings, if ``NOTIFICATIONS_SOFT_DELETE``
is set to ``False``, the notifications will be deleted from DB.
If not, a soft delete will be performed.
By default, notifications are deleted softly.
:param request: HTTP request context.
:return: Response to delete action on supplied notification ID.
"""
notification_id = request.POST.get('id', None)
success = True
if notification_id:
try:
notification = Notification.objects.get(pk=notification_id,
recipient=request.user)
soft_delete = getattr(settings, 'NOTIFY_SOFT_DELETE', True)
if soft_delete:
notification.deleted = True
notification.save()
else:
notification.delete()
msg = _("Deleted notification successfully")
except Notification.DoesNotExist:
success = False
msg = _("Notification does not exists.")
else:
success = False
msg = _("Invalid Notification ID")
ctx = {'msg': msg, 'success': success, }
return notification_redirect(request, ctx)
|
python
|
def delete(request):
"""
Deletes notification of supplied notification ID.
Depending on project settings, if ``NOTIFICATIONS_SOFT_DELETE``
is set to ``False``, the notifications will be deleted from DB.
If not, a soft delete will be performed.
By default, notifications are deleted softly.
:param request: HTTP request context.
:return: Response to delete action on supplied notification ID.
"""
notification_id = request.POST.get('id', None)
success = True
if notification_id:
try:
notification = Notification.objects.get(pk=notification_id,
recipient=request.user)
soft_delete = getattr(settings, 'NOTIFY_SOFT_DELETE', True)
if soft_delete:
notification.deleted = True
notification.save()
else:
notification.delete()
msg = _("Deleted notification successfully")
except Notification.DoesNotExist:
success = False
msg = _("Notification does not exists.")
else:
success = False
msg = _("Invalid Notification ID")
ctx = {'msg': msg, 'success': success, }
return notification_redirect(request, ctx)
|
[
"def",
"delete",
"(",
"request",
")",
":",
"notification_id",
"=",
"request",
".",
"POST",
".",
"get",
"(",
"'id'",
",",
"None",
")",
"success",
"=",
"True",
"if",
"notification_id",
":",
"try",
":",
"notification",
"=",
"Notification",
".",
"objects",
".",
"get",
"(",
"pk",
"=",
"notification_id",
",",
"recipient",
"=",
"request",
".",
"user",
")",
"soft_delete",
"=",
"getattr",
"(",
"settings",
",",
"'NOTIFY_SOFT_DELETE'",
",",
"True",
")",
"if",
"soft_delete",
":",
"notification",
".",
"deleted",
"=",
"True",
"notification",
".",
"save",
"(",
")",
"else",
":",
"notification",
".",
"delete",
"(",
")",
"msg",
"=",
"_",
"(",
"\"Deleted notification successfully\"",
")",
"except",
"Notification",
".",
"DoesNotExist",
":",
"success",
"=",
"False",
"msg",
"=",
"_",
"(",
"\"Notification does not exists.\"",
")",
"else",
":",
"success",
"=",
"False",
"msg",
"=",
"_",
"(",
"\"Invalid Notification ID\"",
")",
"ctx",
"=",
"{",
"'msg'",
":",
"msg",
",",
"'success'",
":",
"success",
",",
"}",
"return",
"notification_redirect",
"(",
"request",
",",
"ctx",
")"
] |
Deletes notification of supplied notification ID.
Depending on project settings, if ``NOTIFICATIONS_SOFT_DELETE``
is set to ``False``, the notifications will be deleted from DB.
If not, a soft delete will be performed.
By default, notifications are deleted softly.
:param request: HTTP request context.
:return: Response to delete action on supplied notification ID.
|
[
"Deletes",
"notification",
"of",
"supplied",
"notification",
"ID",
"."
] |
b4aa03039759126889666a59117e83dcd4cdb374
|
https://github.com/v1k45/django-notify-x/blob/b4aa03039759126889666a59117e83dcd4cdb374/notify/views.py#L130-L167
|
train
|
v1k45/django-notify-x
|
notify/views.py
|
notification_update
|
def notification_update(request):
"""
Handles live updating of notifications, follows ajax-polling approach.
Read more: http://stackoverflow.com/a/12855533/4726598
Required URL parameters: ``flag``.
Explanation:
- The ``flag`` parameter carries the last notification ID \
received by the user's browser.
- This ``flag`` is most likely to be generated by using \
a simple JS/JQuery DOM. Just grab the first element of \
the notification list.
- The element will have a ``data-id`` attribute set to the \
corresponding notification.
- We'll use it's value as the flag parameter.
- The view treats the ``last notification flag`` as a model \
```filter()`` and fetches all notifications greater than \
the flag for the user.
- Then the a JSON data is prepared with all necessary \
details such as, ``verb``, ``actor``, ``target`` and their \
URL etc. The foreignkey are serialized as their \
default ``__str__`` value.
- Everything will be HTML escaped by django's ``escape()``.
- Since these notification sent will only serve temporarily \
on the notification box and will be generated fresh \
using a whole template, to avoid client-side notification \
generation using the JSON data, the JSON data will also \
contain a rendered HTML string so that you can easily \
do a JQuery ``$yourNotificationBox.prepend()`` on the \
rendered html string of the notification.
- The template used is expected to be different than the \
template used in full page notification as the css \
and some other elements are highly likely to be \
different than the full page notification list. \
- The template used will be the ``notification type`` of the \
notification suffixed ``_box.html``. So, if your \
notification type is ``comment_reply``, the template \
will be ``comment_reply_box.html``.
- This template will be stored in ``notifications/includes/`` \
of your template directory.
- That makes: ``notifications/includes/comment_reply_box.html``
- The rest is self-explanatory.
:param request: HTTP request context.
:return: Notification updates (if any) in JSON format.
"""
flag = request.GET.get('flag', None)
target = request.GET.get('target', 'box')
last_notification = int(flag) if flag.isdigit() else None
if last_notification:
new_notifications = request.user.notifications.filter(
id__gt=last_notification).active().prefetch()
msg = _("Notifications successfully retrieved.") \
if new_notifications else _("No new notifications.")
notification_list = []
for nf in new_notifications:
notification = nf.as_json()
notification_list.append(notification)
notification['html'] = render_notification(
nf, render_target=target, **notification)
ctx = {
"retrieved": len(new_notifications),
"unread_count": request.user.notifications.unread().count(),
"notifications": notification_list,
"success": True,
"msg": msg,
}
return JsonResponse(ctx)
else:
msg = _("Notification flag not sent.")
ctx = {"success": False, "msg": msg}
return JsonResponse(ctx)
|
python
|
def notification_update(request):
"""
Handles live updating of notifications, follows ajax-polling approach.
Read more: http://stackoverflow.com/a/12855533/4726598
Required URL parameters: ``flag``.
Explanation:
- The ``flag`` parameter carries the last notification ID \
received by the user's browser.
- This ``flag`` is most likely to be generated by using \
a simple JS/JQuery DOM. Just grab the first element of \
the notification list.
- The element will have a ``data-id`` attribute set to the \
corresponding notification.
- We'll use it's value as the flag parameter.
- The view treats the ``last notification flag`` as a model \
```filter()`` and fetches all notifications greater than \
the flag for the user.
- Then the a JSON data is prepared with all necessary \
details such as, ``verb``, ``actor``, ``target`` and their \
URL etc. The foreignkey are serialized as their \
default ``__str__`` value.
- Everything will be HTML escaped by django's ``escape()``.
- Since these notification sent will only serve temporarily \
on the notification box and will be generated fresh \
using a whole template, to avoid client-side notification \
generation using the JSON data, the JSON data will also \
contain a rendered HTML string so that you can easily \
do a JQuery ``$yourNotificationBox.prepend()`` on the \
rendered html string of the notification.
- The template used is expected to be different than the \
template used in full page notification as the css \
and some other elements are highly likely to be \
different than the full page notification list. \
- The template used will be the ``notification type`` of the \
notification suffixed ``_box.html``. So, if your \
notification type is ``comment_reply``, the template \
will be ``comment_reply_box.html``.
- This template will be stored in ``notifications/includes/`` \
of your template directory.
- That makes: ``notifications/includes/comment_reply_box.html``
- The rest is self-explanatory.
:param request: HTTP request context.
:return: Notification updates (if any) in JSON format.
"""
flag = request.GET.get('flag', None)
target = request.GET.get('target', 'box')
last_notification = int(flag) if flag.isdigit() else None
if last_notification:
new_notifications = request.user.notifications.filter(
id__gt=last_notification).active().prefetch()
msg = _("Notifications successfully retrieved.") \
if new_notifications else _("No new notifications.")
notification_list = []
for nf in new_notifications:
notification = nf.as_json()
notification_list.append(notification)
notification['html'] = render_notification(
nf, render_target=target, **notification)
ctx = {
"retrieved": len(new_notifications),
"unread_count": request.user.notifications.unread().count(),
"notifications": notification_list,
"success": True,
"msg": msg,
}
return JsonResponse(ctx)
else:
msg = _("Notification flag not sent.")
ctx = {"success": False, "msg": msg}
return JsonResponse(ctx)
|
[
"def",
"notification_update",
"(",
"request",
")",
":",
"flag",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'flag'",
",",
"None",
")",
"target",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'target'",
",",
"'box'",
")",
"last_notification",
"=",
"int",
"(",
"flag",
")",
"if",
"flag",
".",
"isdigit",
"(",
")",
"else",
"None",
"if",
"last_notification",
":",
"new_notifications",
"=",
"request",
".",
"user",
".",
"notifications",
".",
"filter",
"(",
"id__gt",
"=",
"last_notification",
")",
".",
"active",
"(",
")",
".",
"prefetch",
"(",
")",
"msg",
"=",
"_",
"(",
"\"Notifications successfully retrieved.\"",
")",
"if",
"new_notifications",
"else",
"_",
"(",
"\"No new notifications.\"",
")",
"notification_list",
"=",
"[",
"]",
"for",
"nf",
"in",
"new_notifications",
":",
"notification",
"=",
"nf",
".",
"as_json",
"(",
")",
"notification_list",
".",
"append",
"(",
"notification",
")",
"notification",
"[",
"'html'",
"]",
"=",
"render_notification",
"(",
"nf",
",",
"render_target",
"=",
"target",
",",
"*",
"*",
"notification",
")",
"ctx",
"=",
"{",
"\"retrieved\"",
":",
"len",
"(",
"new_notifications",
")",
",",
"\"unread_count\"",
":",
"request",
".",
"user",
".",
"notifications",
".",
"unread",
"(",
")",
".",
"count",
"(",
")",
",",
"\"notifications\"",
":",
"notification_list",
",",
"\"success\"",
":",
"True",
",",
"\"msg\"",
":",
"msg",
",",
"}",
"return",
"JsonResponse",
"(",
"ctx",
")",
"else",
":",
"msg",
"=",
"_",
"(",
"\"Notification flag not sent.\"",
")",
"ctx",
"=",
"{",
"\"success\"",
":",
"False",
",",
"\"msg\"",
":",
"msg",
"}",
"return",
"JsonResponse",
"(",
"ctx",
")"
] |
Handles live updating of notifications, follows ajax-polling approach.
Read more: http://stackoverflow.com/a/12855533/4726598
Required URL parameters: ``flag``.
Explanation:
- The ``flag`` parameter carries the last notification ID \
received by the user's browser.
- This ``flag`` is most likely to be generated by using \
a simple JS/JQuery DOM. Just grab the first element of \
the notification list.
- The element will have a ``data-id`` attribute set to the \
corresponding notification.
- We'll use it's value as the flag parameter.
- The view treats the ``last notification flag`` as a model \
```filter()`` and fetches all notifications greater than \
the flag for the user.
- Then the a JSON data is prepared with all necessary \
details such as, ``verb``, ``actor``, ``target`` and their \
URL etc. The foreignkey are serialized as their \
default ``__str__`` value.
- Everything will be HTML escaped by django's ``escape()``.
- Since these notification sent will only serve temporarily \
on the notification box and will be generated fresh \
using a whole template, to avoid client-side notification \
generation using the JSON data, the JSON data will also \
contain a rendered HTML string so that you can easily \
do a JQuery ``$yourNotificationBox.prepend()`` on the \
rendered html string of the notification.
- The template used is expected to be different than the \
template used in full page notification as the css \
and some other elements are highly likely to be \
different than the full page notification list. \
- The template used will be the ``notification type`` of the \
notification suffixed ``_box.html``. So, if your \
notification type is ``comment_reply``, the template \
will be ``comment_reply_box.html``.
- This template will be stored in ``notifications/includes/`` \
of your template directory.
- That makes: ``notifications/includes/comment_reply_box.html``
- The rest is self-explanatory.
:param request: HTTP request context.
:return: Notification updates (if any) in JSON format.
|
[
"Handles",
"live",
"updating",
"of",
"notifications",
"follows",
"ajax",
"-",
"polling",
"approach",
"."
] |
b4aa03039759126889666a59117e83dcd4cdb374
|
https://github.com/v1k45/django-notify-x/blob/b4aa03039759126889666a59117e83dcd4cdb374/notify/views.py#L171-L264
|
train
|
v1k45/django-notify-x
|
notify/views.py
|
read_and_redirect
|
def read_and_redirect(request, notification_id):
"""
Marks the supplied notification as read and then redirects
to the supplied URL from the ``next`` URL parameter.
**IMPORTANT**: This is CSRF - unsafe method.
Only use it if its okay for you to mark notifications \
as read without a robust check.
:param request: HTTP request context.
:param notification_id: ID of the notification to be marked a read.
:returns: Redirect response to a valid target url.
"""
notification_page = reverse('notifications:all')
next_page = request.GET.get('next', notification_page)
if is_safe_url(next_page):
target = next_page
else:
target = notification_page
try:
user_nf = request.user.notifications.get(pk=notification_id)
if not user_nf.read:
user_nf.mark_as_read()
except Notification.DoesNotExist:
pass
return HttpResponseRedirect(target)
|
python
|
def read_and_redirect(request, notification_id):
"""
Marks the supplied notification as read and then redirects
to the supplied URL from the ``next`` URL parameter.
**IMPORTANT**: This is CSRF - unsafe method.
Only use it if its okay for you to mark notifications \
as read without a robust check.
:param request: HTTP request context.
:param notification_id: ID of the notification to be marked a read.
:returns: Redirect response to a valid target url.
"""
notification_page = reverse('notifications:all')
next_page = request.GET.get('next', notification_page)
if is_safe_url(next_page):
target = next_page
else:
target = notification_page
try:
user_nf = request.user.notifications.get(pk=notification_id)
if not user_nf.read:
user_nf.mark_as_read()
except Notification.DoesNotExist:
pass
return HttpResponseRedirect(target)
|
[
"def",
"read_and_redirect",
"(",
"request",
",",
"notification_id",
")",
":",
"notification_page",
"=",
"reverse",
"(",
"'notifications:all'",
")",
"next_page",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'next'",
",",
"notification_page",
")",
"if",
"is_safe_url",
"(",
"next_page",
")",
":",
"target",
"=",
"next_page",
"else",
":",
"target",
"=",
"notification_page",
"try",
":",
"user_nf",
"=",
"request",
".",
"user",
".",
"notifications",
".",
"get",
"(",
"pk",
"=",
"notification_id",
")",
"if",
"not",
"user_nf",
".",
"read",
":",
"user_nf",
".",
"mark_as_read",
"(",
")",
"except",
"Notification",
".",
"DoesNotExist",
":",
"pass",
"return",
"HttpResponseRedirect",
"(",
"target",
")"
] |
Marks the supplied notification as read and then redirects
to the supplied URL from the ``next`` URL parameter.
**IMPORTANT**: This is CSRF - unsafe method.
Only use it if its okay for you to mark notifications \
as read without a robust check.
:param request: HTTP request context.
:param notification_id: ID of the notification to be marked a read.
:returns: Redirect response to a valid target url.
|
[
"Marks",
"the",
"supplied",
"notification",
"as",
"read",
"and",
"then",
"redirects",
"to",
"the",
"supplied",
"URL",
"from",
"the",
"next",
"URL",
"parameter",
"."
] |
b4aa03039759126889666a59117e83dcd4cdb374
|
https://github.com/v1k45/django-notify-x/blob/b4aa03039759126889666a59117e83dcd4cdb374/notify/views.py#L268-L296
|
train
|
mezz64/pyHik
|
pyhik/hikvision.py
|
HikCamera.get_motion_detection
|
def get_motion_detection(self):
"""Fetch current motion state from camera"""
url = ('%s/ISAPI/System/Video/inputs/'
'channels/1/motionDetection') % self.root_url
try:
response = self.hik_request.get(url, timeout=CONNECT_TIMEOUT)
except (requests.exceptions.RequestException,
requests.exceptions.ConnectionError) as err:
_LOGGING.error('Unable to fetch MotionDetection, error: %s', err)
self.motion_detection = None
return self.motion_detection
if response.status_code == requests.codes.unauthorized:
_LOGGING.error('Authentication failed')
self.motion_detection = None
return self.motion_detection
if response.status_code != requests.codes.ok:
# If we didn't receive 200, abort
_LOGGING.debug('Unable to fetch motion detection.')
self.motion_detection = None
return self.motion_detection
try:
tree = ET.fromstring(response.text)
ET.register_namespace("", self.namespace)
enabled = tree.find(self.element_query('enabled'))
if enabled is not None:
self._motion_detection_xml = tree
self.motion_detection = {'true': True, 'false': False}[enabled.text]
return self.motion_detection
except AttributeError as err:
_LOGGING.error('Entire response: %s', response.text)
_LOGGING.error('There was a problem: %s', err)
self.motion_detection = None
return self.motion_detection
|
python
|
def get_motion_detection(self):
"""Fetch current motion state from camera"""
url = ('%s/ISAPI/System/Video/inputs/'
'channels/1/motionDetection') % self.root_url
try:
response = self.hik_request.get(url, timeout=CONNECT_TIMEOUT)
except (requests.exceptions.RequestException,
requests.exceptions.ConnectionError) as err:
_LOGGING.error('Unable to fetch MotionDetection, error: %s', err)
self.motion_detection = None
return self.motion_detection
if response.status_code == requests.codes.unauthorized:
_LOGGING.error('Authentication failed')
self.motion_detection = None
return self.motion_detection
if response.status_code != requests.codes.ok:
# If we didn't receive 200, abort
_LOGGING.debug('Unable to fetch motion detection.')
self.motion_detection = None
return self.motion_detection
try:
tree = ET.fromstring(response.text)
ET.register_namespace("", self.namespace)
enabled = tree.find(self.element_query('enabled'))
if enabled is not None:
self._motion_detection_xml = tree
self.motion_detection = {'true': True, 'false': False}[enabled.text]
return self.motion_detection
except AttributeError as err:
_LOGGING.error('Entire response: %s', response.text)
_LOGGING.error('There was a problem: %s', err)
self.motion_detection = None
return self.motion_detection
|
[
"def",
"get_motion_detection",
"(",
"self",
")",
":",
"url",
"=",
"(",
"'%s/ISAPI/System/Video/inputs/'",
"'channels/1/motionDetection'",
")",
"%",
"self",
".",
"root_url",
"try",
":",
"response",
"=",
"self",
".",
"hik_request",
".",
"get",
"(",
"url",
",",
"timeout",
"=",
"CONNECT_TIMEOUT",
")",
"except",
"(",
"requests",
".",
"exceptions",
".",
"RequestException",
",",
"requests",
".",
"exceptions",
".",
"ConnectionError",
")",
"as",
"err",
":",
"_LOGGING",
".",
"error",
"(",
"'Unable to fetch MotionDetection, error: %s'",
",",
"err",
")",
"self",
".",
"motion_detection",
"=",
"None",
"return",
"self",
".",
"motion_detection",
"if",
"response",
".",
"status_code",
"==",
"requests",
".",
"codes",
".",
"unauthorized",
":",
"_LOGGING",
".",
"error",
"(",
"'Authentication failed'",
")",
"self",
".",
"motion_detection",
"=",
"None",
"return",
"self",
".",
"motion_detection",
"if",
"response",
".",
"status_code",
"!=",
"requests",
".",
"codes",
".",
"ok",
":",
"# If we didn't receive 200, abort",
"_LOGGING",
".",
"debug",
"(",
"'Unable to fetch motion detection.'",
")",
"self",
".",
"motion_detection",
"=",
"None",
"return",
"self",
".",
"motion_detection",
"try",
":",
"tree",
"=",
"ET",
".",
"fromstring",
"(",
"response",
".",
"text",
")",
"ET",
".",
"register_namespace",
"(",
"\"\"",
",",
"self",
".",
"namespace",
")",
"enabled",
"=",
"tree",
".",
"find",
"(",
"self",
".",
"element_query",
"(",
"'enabled'",
")",
")",
"if",
"enabled",
"is",
"not",
"None",
":",
"self",
".",
"_motion_detection_xml",
"=",
"tree",
"self",
".",
"motion_detection",
"=",
"{",
"'true'",
":",
"True",
",",
"'false'",
":",
"False",
"}",
"[",
"enabled",
".",
"text",
"]",
"return",
"self",
".",
"motion_detection",
"except",
"AttributeError",
"as",
"err",
":",
"_LOGGING",
".",
"error",
"(",
"'Entire response: %s'",
",",
"response",
".",
"text",
")",
"_LOGGING",
".",
"error",
"(",
"'There was a problem: %s'",
",",
"err",
")",
"self",
".",
"motion_detection",
"=",
"None",
"return",
"self",
".",
"motion_detection"
] |
Fetch current motion state from camera
|
[
"Fetch",
"current",
"motion",
"state",
"from",
"camera"
] |
1e7afca926e2b045257a43cbf8b1236a435493c2
|
https://github.com/mezz64/pyHik/blob/1e7afca926e2b045257a43cbf8b1236a435493c2/pyhik/hikvision.py#L141-L179
|
train
|
mezz64/pyHik
|
pyhik/hikvision.py
|
HikCamera._set_motion_detection
|
def _set_motion_detection(self, enable):
"""Set desired motion detection state on camera"""
url = ('%s/ISAPI/System/Video/inputs/'
'channels/1/motionDetection') % self.root_url
enabled = self._motion_detection_xml.find(self.element_query('enabled'))
if enabled is None:
_LOGGING.error("Couldn't find 'enabled' in the xml")
_LOGGING.error('XML: %s', ET.tostring(self._motion_detection_xml))
return
enabled.text = 'true' if enable else 'false'
xml = ET.tostring(self._motion_detection_xml)
try:
response = self.hik_request.put(url, data=xml, timeout=CONNECT_TIMEOUT)
except (requests.exceptions.RequestException,
requests.exceptions.ConnectionError) as err:
_LOGGING.error('Unable to set MotionDetection, error: %s', err)
return
if response.status_code == requests.codes.unauthorized:
_LOGGING.error('Authentication failed')
return
if response.status_code != requests.codes.ok:
# If we didn't receive 200, abort
_LOGGING.error('Unable to set motion detection: %s', response.text)
self.motion_detection = enable
|
python
|
def _set_motion_detection(self, enable):
"""Set desired motion detection state on camera"""
url = ('%s/ISAPI/System/Video/inputs/'
'channels/1/motionDetection') % self.root_url
enabled = self._motion_detection_xml.find(self.element_query('enabled'))
if enabled is None:
_LOGGING.error("Couldn't find 'enabled' in the xml")
_LOGGING.error('XML: %s', ET.tostring(self._motion_detection_xml))
return
enabled.text = 'true' if enable else 'false'
xml = ET.tostring(self._motion_detection_xml)
try:
response = self.hik_request.put(url, data=xml, timeout=CONNECT_TIMEOUT)
except (requests.exceptions.RequestException,
requests.exceptions.ConnectionError) as err:
_LOGGING.error('Unable to set MotionDetection, error: %s', err)
return
if response.status_code == requests.codes.unauthorized:
_LOGGING.error('Authentication failed')
return
if response.status_code != requests.codes.ok:
# If we didn't receive 200, abort
_LOGGING.error('Unable to set motion detection: %s', response.text)
self.motion_detection = enable
|
[
"def",
"_set_motion_detection",
"(",
"self",
",",
"enable",
")",
":",
"url",
"=",
"(",
"'%s/ISAPI/System/Video/inputs/'",
"'channels/1/motionDetection'",
")",
"%",
"self",
".",
"root_url",
"enabled",
"=",
"self",
".",
"_motion_detection_xml",
".",
"find",
"(",
"self",
".",
"element_query",
"(",
"'enabled'",
")",
")",
"if",
"enabled",
"is",
"None",
":",
"_LOGGING",
".",
"error",
"(",
"\"Couldn't find 'enabled' in the xml\"",
")",
"_LOGGING",
".",
"error",
"(",
"'XML: %s'",
",",
"ET",
".",
"tostring",
"(",
"self",
".",
"_motion_detection_xml",
")",
")",
"return",
"enabled",
".",
"text",
"=",
"'true'",
"if",
"enable",
"else",
"'false'",
"xml",
"=",
"ET",
".",
"tostring",
"(",
"self",
".",
"_motion_detection_xml",
")",
"try",
":",
"response",
"=",
"self",
".",
"hik_request",
".",
"put",
"(",
"url",
",",
"data",
"=",
"xml",
",",
"timeout",
"=",
"CONNECT_TIMEOUT",
")",
"except",
"(",
"requests",
".",
"exceptions",
".",
"RequestException",
",",
"requests",
".",
"exceptions",
".",
"ConnectionError",
")",
"as",
"err",
":",
"_LOGGING",
".",
"error",
"(",
"'Unable to set MotionDetection, error: %s'",
",",
"err",
")",
"return",
"if",
"response",
".",
"status_code",
"==",
"requests",
".",
"codes",
".",
"unauthorized",
":",
"_LOGGING",
".",
"error",
"(",
"'Authentication failed'",
")",
"return",
"if",
"response",
".",
"status_code",
"!=",
"requests",
".",
"codes",
".",
"ok",
":",
"# If we didn't receive 200, abort",
"_LOGGING",
".",
"error",
"(",
"'Unable to set motion detection: %s'",
",",
"response",
".",
"text",
")",
"self",
".",
"motion_detection",
"=",
"enable"
] |
Set desired motion detection state on camera
|
[
"Set",
"desired",
"motion",
"detection",
"state",
"on",
"camera"
] |
1e7afca926e2b045257a43cbf8b1236a435493c2
|
https://github.com/mezz64/pyHik/blob/1e7afca926e2b045257a43cbf8b1236a435493c2/pyhik/hikvision.py#L189-L218
|
train
|
mezz64/pyHik
|
pyhik/hikvision.py
|
HikCamera.add_update_callback
|
def add_update_callback(self, callback, sensor):
"""Register as callback for when a matching device sensor changes."""
self._updateCallbacks.append([callback, sensor])
_LOGGING.debug('Added update callback to %s on %s', callback, sensor)
|
python
|
def add_update_callback(self, callback, sensor):
"""Register as callback for when a matching device sensor changes."""
self._updateCallbacks.append([callback, sensor])
_LOGGING.debug('Added update callback to %s on %s', callback, sensor)
|
[
"def",
"add_update_callback",
"(",
"self",
",",
"callback",
",",
"sensor",
")",
":",
"self",
".",
"_updateCallbacks",
".",
"append",
"(",
"[",
"callback",
",",
"sensor",
"]",
")",
"_LOGGING",
".",
"debug",
"(",
"'Added update callback to %s on %s'",
",",
"callback",
",",
"sensor",
")"
] |
Register as callback for when a matching device sensor changes.
|
[
"Register",
"as",
"callback",
"for",
"when",
"a",
"matching",
"device",
"sensor",
"changes",
"."
] |
1e7afca926e2b045257a43cbf8b1236a435493c2
|
https://github.com/mezz64/pyHik/blob/1e7afca926e2b045257a43cbf8b1236a435493c2/pyhik/hikvision.py#L220-L223
|
train
|
mezz64/pyHik
|
pyhik/hikvision.py
|
HikCamera.initialize
|
def initialize(self):
"""Initialize deviceInfo and available events."""
device_info = self.get_device_info()
if device_info is None:
self.name = None
self.cam_id = None
self.event_states = None
return
for key in device_info:
if key == 'deviceName':
self.name = device_info[key]
elif key == 'deviceID':
if len(device_info[key]) > 10:
self.cam_id = device_info[key]
else:
self.cam_id = uuid.uuid4()
events_available = self.get_event_triggers()
if events_available:
for event, channel_list in events_available.items():
for channel in channel_list:
try:
self.event_states.setdefault(
SENSOR_MAP[event.lower()], []).append(
[False, channel, 0, datetime.datetime.now()])
except KeyError:
# Sensor type doesn't have a known friendly name
# We can't reliably handle it at this time...
_LOGGING.warning(
'Sensor type "%s" is unsupported.', event)
_LOGGING.debug('Initialized Dictionary: %s', self.event_states)
else:
_LOGGING.debug('No Events available in dictionary.')
self.get_motion_detection()
|
python
|
def initialize(self):
"""Initialize deviceInfo and available events."""
device_info = self.get_device_info()
if device_info is None:
self.name = None
self.cam_id = None
self.event_states = None
return
for key in device_info:
if key == 'deviceName':
self.name = device_info[key]
elif key == 'deviceID':
if len(device_info[key]) > 10:
self.cam_id = device_info[key]
else:
self.cam_id = uuid.uuid4()
events_available = self.get_event_triggers()
if events_available:
for event, channel_list in events_available.items():
for channel in channel_list:
try:
self.event_states.setdefault(
SENSOR_MAP[event.lower()], []).append(
[False, channel, 0, datetime.datetime.now()])
except KeyError:
# Sensor type doesn't have a known friendly name
# We can't reliably handle it at this time...
_LOGGING.warning(
'Sensor type "%s" is unsupported.', event)
_LOGGING.debug('Initialized Dictionary: %s', self.event_states)
else:
_LOGGING.debug('No Events available in dictionary.')
self.get_motion_detection()
|
[
"def",
"initialize",
"(",
"self",
")",
":",
"device_info",
"=",
"self",
".",
"get_device_info",
"(",
")",
"if",
"device_info",
"is",
"None",
":",
"self",
".",
"name",
"=",
"None",
"self",
".",
"cam_id",
"=",
"None",
"self",
".",
"event_states",
"=",
"None",
"return",
"for",
"key",
"in",
"device_info",
":",
"if",
"key",
"==",
"'deviceName'",
":",
"self",
".",
"name",
"=",
"device_info",
"[",
"key",
"]",
"elif",
"key",
"==",
"'deviceID'",
":",
"if",
"len",
"(",
"device_info",
"[",
"key",
"]",
")",
">",
"10",
":",
"self",
".",
"cam_id",
"=",
"device_info",
"[",
"key",
"]",
"else",
":",
"self",
".",
"cam_id",
"=",
"uuid",
".",
"uuid4",
"(",
")",
"events_available",
"=",
"self",
".",
"get_event_triggers",
"(",
")",
"if",
"events_available",
":",
"for",
"event",
",",
"channel_list",
"in",
"events_available",
".",
"items",
"(",
")",
":",
"for",
"channel",
"in",
"channel_list",
":",
"try",
":",
"self",
".",
"event_states",
".",
"setdefault",
"(",
"SENSOR_MAP",
"[",
"event",
".",
"lower",
"(",
")",
"]",
",",
"[",
"]",
")",
".",
"append",
"(",
"[",
"False",
",",
"channel",
",",
"0",
",",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"]",
")",
"except",
"KeyError",
":",
"# Sensor type doesn't have a known friendly name",
"# We can't reliably handle it at this time...",
"_LOGGING",
".",
"warning",
"(",
"'Sensor type \"%s\" is unsupported.'",
",",
"event",
")",
"_LOGGING",
".",
"debug",
"(",
"'Initialized Dictionary: %s'",
",",
"self",
".",
"event_states",
")",
"else",
":",
"_LOGGING",
".",
"debug",
"(",
"'No Events available in dictionary.'",
")",
"self",
".",
"get_motion_detection",
"(",
")"
] |
Initialize deviceInfo and available events.
|
[
"Initialize",
"deviceInfo",
"and",
"available",
"events",
"."
] |
1e7afca926e2b045257a43cbf8b1236a435493c2
|
https://github.com/mezz64/pyHik/blob/1e7afca926e2b045257a43cbf8b1236a435493c2/pyhik/hikvision.py#L237-L274
|
train
|
mezz64/pyHik
|
pyhik/hikvision.py
|
HikCamera.get_event_triggers
|
def get_event_triggers(self):
"""
Returns dict of supported events.
Key = Event Type
List = Channels that have that event activated
"""
events = {}
nvrflag = False
event_xml = []
url = '%s/ISAPI/Event/triggers' % self.root_url
try:
response = self.hik_request.get(url, timeout=CONNECT_TIMEOUT)
if response.status_code == requests.codes.not_found:
# Try alternate URL for triggers
_LOGGING.debug('Using alternate triggers URL.')
url = '%s/Event/triggers' % self.root_url
response = self.hik_request.get(url)
except (requests.exceptions.RequestException,
requests.exceptions.ConnectionError) as err:
_LOGGING.error('Unable to fetch events, error: %s', err)
return None
if response.status_code != 200:
# If we didn't recieve 200, abort
return None
# pylint: disable=too-many-nested-blocks
try:
content = ET.fromstring(response.text)
if content[0].find(self.element_query('EventTrigger')):
event_xml = content[0].findall(
self.element_query('EventTrigger'))
elif content.find(self.element_query('EventTrigger')):
# This is either an NVR or a rebadged camera
event_xml = content.findall(
self.element_query('EventTrigger'))
for eventtrigger in event_xml:
ettype = eventtrigger.find(self.element_query('eventType'))
# Catch empty xml defintions
if ettype is None:
break
etnotify = eventtrigger.find(
self.element_query('EventTriggerNotificationList'))
etchannel = None
etchannel_num = 0
for node_name in CHANNEL_NAMES:
etchannel = eventtrigger.find(
self.element_query(node_name))
if etchannel is not None:
try:
# Need to make sure this is actually a number
etchannel_num = int(etchannel.text)
if etchannel_num > 1:
# Must be an nvr
nvrflag = True
break
except ValueError:
# Field must not be an integer
pass
if etnotify:
for notifytrigger in etnotify:
ntype = notifytrigger.find(
self.element_query('notificationMethod'))
if ntype.text == 'center' or ntype.text == 'HTTP':
"""
If we got this far we found an event that we want
to track.
"""
events.setdefault(ettype.text, []) \
.append(etchannel_num)
except (AttributeError, ET.ParseError) as err:
_LOGGING.error(
'There was a problem finding an element: %s', err)
return None
if nvrflag:
self.device_type = NVR_DEVICE
else:
self.device_type = CAM_DEVICE
_LOGGING.debug('Processed %s as %s Device.',
self.cam_id, self.device_type)
_LOGGING.debug('Found events: %s', events)
self.hik_request.close()
return events
|
python
|
def get_event_triggers(self):
"""
Returns dict of supported events.
Key = Event Type
List = Channels that have that event activated
"""
events = {}
nvrflag = False
event_xml = []
url = '%s/ISAPI/Event/triggers' % self.root_url
try:
response = self.hik_request.get(url, timeout=CONNECT_TIMEOUT)
if response.status_code == requests.codes.not_found:
# Try alternate URL for triggers
_LOGGING.debug('Using alternate triggers URL.')
url = '%s/Event/triggers' % self.root_url
response = self.hik_request.get(url)
except (requests.exceptions.RequestException,
requests.exceptions.ConnectionError) as err:
_LOGGING.error('Unable to fetch events, error: %s', err)
return None
if response.status_code != 200:
# If we didn't recieve 200, abort
return None
# pylint: disable=too-many-nested-blocks
try:
content = ET.fromstring(response.text)
if content[0].find(self.element_query('EventTrigger')):
event_xml = content[0].findall(
self.element_query('EventTrigger'))
elif content.find(self.element_query('EventTrigger')):
# This is either an NVR or a rebadged camera
event_xml = content.findall(
self.element_query('EventTrigger'))
for eventtrigger in event_xml:
ettype = eventtrigger.find(self.element_query('eventType'))
# Catch empty xml defintions
if ettype is None:
break
etnotify = eventtrigger.find(
self.element_query('EventTriggerNotificationList'))
etchannel = None
etchannel_num = 0
for node_name in CHANNEL_NAMES:
etchannel = eventtrigger.find(
self.element_query(node_name))
if etchannel is not None:
try:
# Need to make sure this is actually a number
etchannel_num = int(etchannel.text)
if etchannel_num > 1:
# Must be an nvr
nvrflag = True
break
except ValueError:
# Field must not be an integer
pass
if etnotify:
for notifytrigger in etnotify:
ntype = notifytrigger.find(
self.element_query('notificationMethod'))
if ntype.text == 'center' or ntype.text == 'HTTP':
"""
If we got this far we found an event that we want
to track.
"""
events.setdefault(ettype.text, []) \
.append(etchannel_num)
except (AttributeError, ET.ParseError) as err:
_LOGGING.error(
'There was a problem finding an element: %s', err)
return None
if nvrflag:
self.device_type = NVR_DEVICE
else:
self.device_type = CAM_DEVICE
_LOGGING.debug('Processed %s as %s Device.',
self.cam_id, self.device_type)
_LOGGING.debug('Found events: %s', events)
self.hik_request.close()
return events
|
[
"def",
"get_event_triggers",
"(",
"self",
")",
":",
"events",
"=",
"{",
"}",
"nvrflag",
"=",
"False",
"event_xml",
"=",
"[",
"]",
"url",
"=",
"'%s/ISAPI/Event/triggers'",
"%",
"self",
".",
"root_url",
"try",
":",
"response",
"=",
"self",
".",
"hik_request",
".",
"get",
"(",
"url",
",",
"timeout",
"=",
"CONNECT_TIMEOUT",
")",
"if",
"response",
".",
"status_code",
"==",
"requests",
".",
"codes",
".",
"not_found",
":",
"# Try alternate URL for triggers",
"_LOGGING",
".",
"debug",
"(",
"'Using alternate triggers URL.'",
")",
"url",
"=",
"'%s/Event/triggers'",
"%",
"self",
".",
"root_url",
"response",
"=",
"self",
".",
"hik_request",
".",
"get",
"(",
"url",
")",
"except",
"(",
"requests",
".",
"exceptions",
".",
"RequestException",
",",
"requests",
".",
"exceptions",
".",
"ConnectionError",
")",
"as",
"err",
":",
"_LOGGING",
".",
"error",
"(",
"'Unable to fetch events, error: %s'",
",",
"err",
")",
"return",
"None",
"if",
"response",
".",
"status_code",
"!=",
"200",
":",
"# If we didn't recieve 200, abort",
"return",
"None",
"# pylint: disable=too-many-nested-blocks",
"try",
":",
"content",
"=",
"ET",
".",
"fromstring",
"(",
"response",
".",
"text",
")",
"if",
"content",
"[",
"0",
"]",
".",
"find",
"(",
"self",
".",
"element_query",
"(",
"'EventTrigger'",
")",
")",
":",
"event_xml",
"=",
"content",
"[",
"0",
"]",
".",
"findall",
"(",
"self",
".",
"element_query",
"(",
"'EventTrigger'",
")",
")",
"elif",
"content",
".",
"find",
"(",
"self",
".",
"element_query",
"(",
"'EventTrigger'",
")",
")",
":",
"# This is either an NVR or a rebadged camera",
"event_xml",
"=",
"content",
".",
"findall",
"(",
"self",
".",
"element_query",
"(",
"'EventTrigger'",
")",
")",
"for",
"eventtrigger",
"in",
"event_xml",
":",
"ettype",
"=",
"eventtrigger",
".",
"find",
"(",
"self",
".",
"element_query",
"(",
"'eventType'",
")",
")",
"# Catch empty xml defintions",
"if",
"ettype",
"is",
"None",
":",
"break",
"etnotify",
"=",
"eventtrigger",
".",
"find",
"(",
"self",
".",
"element_query",
"(",
"'EventTriggerNotificationList'",
")",
")",
"etchannel",
"=",
"None",
"etchannel_num",
"=",
"0",
"for",
"node_name",
"in",
"CHANNEL_NAMES",
":",
"etchannel",
"=",
"eventtrigger",
".",
"find",
"(",
"self",
".",
"element_query",
"(",
"node_name",
")",
")",
"if",
"etchannel",
"is",
"not",
"None",
":",
"try",
":",
"# Need to make sure this is actually a number",
"etchannel_num",
"=",
"int",
"(",
"etchannel",
".",
"text",
")",
"if",
"etchannel_num",
">",
"1",
":",
"# Must be an nvr",
"nvrflag",
"=",
"True",
"break",
"except",
"ValueError",
":",
"# Field must not be an integer",
"pass",
"if",
"etnotify",
":",
"for",
"notifytrigger",
"in",
"etnotify",
":",
"ntype",
"=",
"notifytrigger",
".",
"find",
"(",
"self",
".",
"element_query",
"(",
"'notificationMethod'",
")",
")",
"if",
"ntype",
".",
"text",
"==",
"'center'",
"or",
"ntype",
".",
"text",
"==",
"'HTTP'",
":",
"\"\"\"\n If we got this far we found an event that we want\n to track.\n \"\"\"",
"events",
".",
"setdefault",
"(",
"ettype",
".",
"text",
",",
"[",
"]",
")",
".",
"append",
"(",
"etchannel_num",
")",
"except",
"(",
"AttributeError",
",",
"ET",
".",
"ParseError",
")",
"as",
"err",
":",
"_LOGGING",
".",
"error",
"(",
"'There was a problem finding an element: %s'",
",",
"err",
")",
"return",
"None",
"if",
"nvrflag",
":",
"self",
".",
"device_type",
"=",
"NVR_DEVICE",
"else",
":",
"self",
".",
"device_type",
"=",
"CAM_DEVICE",
"_LOGGING",
".",
"debug",
"(",
"'Processed %s as %s Device.'",
",",
"self",
".",
"cam_id",
",",
"self",
".",
"device_type",
")",
"_LOGGING",
".",
"debug",
"(",
"'Found events: %s'",
",",
"events",
")",
"self",
".",
"hik_request",
".",
"close",
"(",
")",
"return",
"events"
] |
Returns dict of supported events.
Key = Event Type
List = Channels that have that event activated
|
[
"Returns",
"dict",
"of",
"supported",
"events",
".",
"Key",
"=",
"Event",
"Type",
"List",
"=",
"Channels",
"that",
"have",
"that",
"event",
"activated"
] |
1e7afca926e2b045257a43cbf8b1236a435493c2
|
https://github.com/mezz64/pyHik/blob/1e7afca926e2b045257a43cbf8b1236a435493c2/pyhik/hikvision.py#L276-L369
|
train
|
mezz64/pyHik
|
pyhik/hikvision.py
|
HikCamera.get_device_info
|
def get_device_info(self):
"""Parse deviceInfo into dictionary."""
device_info = {}
url = '%s/ISAPI/System/deviceInfo' % self.root_url
using_digest = False
try:
response = self.hik_request.get(url, timeout=CONNECT_TIMEOUT)
if response.status_code == requests.codes.unauthorized:
_LOGGING.debug('Basic authentication failed. Using digest.')
self.hik_request.auth = HTTPDigestAuth(self.usr, self.pwd)
using_digest = True
response = self.hik_request.get(url)
if response.status_code == requests.codes.not_found:
# Try alternate URL for deviceInfo
_LOGGING.debug('Using alternate deviceInfo URL.')
url = '%s/System/deviceInfo' % self.root_url
response = self.hik_request.get(url)
# Seems to be difference between camera and nvr, they can't seem to
# agree if they should 404 or 401 first
if not using_digest and response.status_code == requests.codes.unauthorized:
_LOGGING.debug('Basic authentication failed. Using digest.')
self.hik_request.auth = HTTPDigestAuth(self.usr, self.pwd)
using_digest = True
response = self.hik_request.get(url)
except (requests.exceptions.RequestException,
requests.exceptions.ConnectionError) as err:
_LOGGING.error('Unable to fetch deviceInfo, error: %s', err)
return None
if response.status_code == requests.codes.unauthorized:
_LOGGING.error('Authentication failed')
return None
if response.status_code != requests.codes.ok:
# If we didn't receive 200, abort
_LOGGING.debug('Unable to fetch device info.')
return None
try:
tree = ET.fromstring(response.text)
# Try to fetch namespace from XML
nmsp = tree.tag.split('}')[0].strip('{')
self.namespace = nmsp if nmsp.startswith('http') else XML_NAMESPACE
_LOGGING.debug('Using Namespace: %s', self.namespace)
for item in tree:
tag = item.tag.split('}')[1]
device_info[tag] = item.text
return device_info
except AttributeError as err:
_LOGGING.error('Entire response: %s', response.text)
_LOGGING.error('There was a problem: %s', err)
return None
|
python
|
def get_device_info(self):
"""Parse deviceInfo into dictionary."""
device_info = {}
url = '%s/ISAPI/System/deviceInfo' % self.root_url
using_digest = False
try:
response = self.hik_request.get(url, timeout=CONNECT_TIMEOUT)
if response.status_code == requests.codes.unauthorized:
_LOGGING.debug('Basic authentication failed. Using digest.')
self.hik_request.auth = HTTPDigestAuth(self.usr, self.pwd)
using_digest = True
response = self.hik_request.get(url)
if response.status_code == requests.codes.not_found:
# Try alternate URL for deviceInfo
_LOGGING.debug('Using alternate deviceInfo URL.')
url = '%s/System/deviceInfo' % self.root_url
response = self.hik_request.get(url)
# Seems to be difference between camera and nvr, they can't seem to
# agree if they should 404 or 401 first
if not using_digest and response.status_code == requests.codes.unauthorized:
_LOGGING.debug('Basic authentication failed. Using digest.')
self.hik_request.auth = HTTPDigestAuth(self.usr, self.pwd)
using_digest = True
response = self.hik_request.get(url)
except (requests.exceptions.RequestException,
requests.exceptions.ConnectionError) as err:
_LOGGING.error('Unable to fetch deviceInfo, error: %s', err)
return None
if response.status_code == requests.codes.unauthorized:
_LOGGING.error('Authentication failed')
return None
if response.status_code != requests.codes.ok:
# If we didn't receive 200, abort
_LOGGING.debug('Unable to fetch device info.')
return None
try:
tree = ET.fromstring(response.text)
# Try to fetch namespace from XML
nmsp = tree.tag.split('}')[0].strip('{')
self.namespace = nmsp if nmsp.startswith('http') else XML_NAMESPACE
_LOGGING.debug('Using Namespace: %s', self.namespace)
for item in tree:
tag = item.tag.split('}')[1]
device_info[tag] = item.text
return device_info
except AttributeError as err:
_LOGGING.error('Entire response: %s', response.text)
_LOGGING.error('There was a problem: %s', err)
return None
|
[
"def",
"get_device_info",
"(",
"self",
")",
":",
"device_info",
"=",
"{",
"}",
"url",
"=",
"'%s/ISAPI/System/deviceInfo'",
"%",
"self",
".",
"root_url",
"using_digest",
"=",
"False",
"try",
":",
"response",
"=",
"self",
".",
"hik_request",
".",
"get",
"(",
"url",
",",
"timeout",
"=",
"CONNECT_TIMEOUT",
")",
"if",
"response",
".",
"status_code",
"==",
"requests",
".",
"codes",
".",
"unauthorized",
":",
"_LOGGING",
".",
"debug",
"(",
"'Basic authentication failed. Using digest.'",
")",
"self",
".",
"hik_request",
".",
"auth",
"=",
"HTTPDigestAuth",
"(",
"self",
".",
"usr",
",",
"self",
".",
"pwd",
")",
"using_digest",
"=",
"True",
"response",
"=",
"self",
".",
"hik_request",
".",
"get",
"(",
"url",
")",
"if",
"response",
".",
"status_code",
"==",
"requests",
".",
"codes",
".",
"not_found",
":",
"# Try alternate URL for deviceInfo",
"_LOGGING",
".",
"debug",
"(",
"'Using alternate deviceInfo URL.'",
")",
"url",
"=",
"'%s/System/deviceInfo'",
"%",
"self",
".",
"root_url",
"response",
"=",
"self",
".",
"hik_request",
".",
"get",
"(",
"url",
")",
"# Seems to be difference between camera and nvr, they can't seem to",
"# agree if they should 404 or 401 first",
"if",
"not",
"using_digest",
"and",
"response",
".",
"status_code",
"==",
"requests",
".",
"codes",
".",
"unauthorized",
":",
"_LOGGING",
".",
"debug",
"(",
"'Basic authentication failed. Using digest.'",
")",
"self",
".",
"hik_request",
".",
"auth",
"=",
"HTTPDigestAuth",
"(",
"self",
".",
"usr",
",",
"self",
".",
"pwd",
")",
"using_digest",
"=",
"True",
"response",
"=",
"self",
".",
"hik_request",
".",
"get",
"(",
"url",
")",
"except",
"(",
"requests",
".",
"exceptions",
".",
"RequestException",
",",
"requests",
".",
"exceptions",
".",
"ConnectionError",
")",
"as",
"err",
":",
"_LOGGING",
".",
"error",
"(",
"'Unable to fetch deviceInfo, error: %s'",
",",
"err",
")",
"return",
"None",
"if",
"response",
".",
"status_code",
"==",
"requests",
".",
"codes",
".",
"unauthorized",
":",
"_LOGGING",
".",
"error",
"(",
"'Authentication failed'",
")",
"return",
"None",
"if",
"response",
".",
"status_code",
"!=",
"requests",
".",
"codes",
".",
"ok",
":",
"# If we didn't receive 200, abort",
"_LOGGING",
".",
"debug",
"(",
"'Unable to fetch device info.'",
")",
"return",
"None",
"try",
":",
"tree",
"=",
"ET",
".",
"fromstring",
"(",
"response",
".",
"text",
")",
"# Try to fetch namespace from XML",
"nmsp",
"=",
"tree",
".",
"tag",
".",
"split",
"(",
"'}'",
")",
"[",
"0",
"]",
".",
"strip",
"(",
"'{'",
")",
"self",
".",
"namespace",
"=",
"nmsp",
"if",
"nmsp",
".",
"startswith",
"(",
"'http'",
")",
"else",
"XML_NAMESPACE",
"_LOGGING",
".",
"debug",
"(",
"'Using Namespace: %s'",
",",
"self",
".",
"namespace",
")",
"for",
"item",
"in",
"tree",
":",
"tag",
"=",
"item",
".",
"tag",
".",
"split",
"(",
"'}'",
")",
"[",
"1",
"]",
"device_info",
"[",
"tag",
"]",
"=",
"item",
".",
"text",
"return",
"device_info",
"except",
"AttributeError",
"as",
"err",
":",
"_LOGGING",
".",
"error",
"(",
"'Entire response: %s'",
",",
"response",
".",
"text",
")",
"_LOGGING",
".",
"error",
"(",
"'There was a problem: %s'",
",",
"err",
")",
"return",
"None"
] |
Parse deviceInfo into dictionary.
|
[
"Parse",
"deviceInfo",
"into",
"dictionary",
"."
] |
1e7afca926e2b045257a43cbf8b1236a435493c2
|
https://github.com/mezz64/pyHik/blob/1e7afca926e2b045257a43cbf8b1236a435493c2/pyhik/hikvision.py#L371-L428
|
train
|
mezz64/pyHik
|
pyhik/hikvision.py
|
HikCamera.watchdog_handler
|
def watchdog_handler(self):
"""Take care of threads if wachdog expires."""
_LOGGING.debug('%s Watchdog expired. Resetting connection.', self.name)
self.watchdog.stop()
self.reset_thrd.set()
|
python
|
def watchdog_handler(self):
"""Take care of threads if wachdog expires."""
_LOGGING.debug('%s Watchdog expired. Resetting connection.', self.name)
self.watchdog.stop()
self.reset_thrd.set()
|
[
"def",
"watchdog_handler",
"(",
"self",
")",
":",
"_LOGGING",
".",
"debug",
"(",
"'%s Watchdog expired. Resetting connection.'",
",",
"self",
".",
"name",
")",
"self",
".",
"watchdog",
".",
"stop",
"(",
")",
"self",
".",
"reset_thrd",
".",
"set",
"(",
")"
] |
Take care of threads if wachdog expires.
|
[
"Take",
"care",
"of",
"threads",
"if",
"wachdog",
"expires",
"."
] |
1e7afca926e2b045257a43cbf8b1236a435493c2
|
https://github.com/mezz64/pyHik/blob/1e7afca926e2b045257a43cbf8b1236a435493c2/pyhik/hikvision.py#L430-L434
|
train
|
mezz64/pyHik
|
pyhik/hikvision.py
|
HikCamera.disconnect
|
def disconnect(self):
"""Disconnect from event stream."""
_LOGGING.debug('Disconnecting from stream: %s', self.name)
self.kill_thrd.set()
self.thrd.join()
_LOGGING.debug('Event stream thread for %s is stopped', self.name)
self.kill_thrd.clear()
|
python
|
def disconnect(self):
"""Disconnect from event stream."""
_LOGGING.debug('Disconnecting from stream: %s', self.name)
self.kill_thrd.set()
self.thrd.join()
_LOGGING.debug('Event stream thread for %s is stopped', self.name)
self.kill_thrd.clear()
|
[
"def",
"disconnect",
"(",
"self",
")",
":",
"_LOGGING",
".",
"debug",
"(",
"'Disconnecting from stream: %s'",
",",
"self",
".",
"name",
")",
"self",
".",
"kill_thrd",
".",
"set",
"(",
")",
"self",
".",
"thrd",
".",
"join",
"(",
")",
"_LOGGING",
".",
"debug",
"(",
"'Event stream thread for %s is stopped'",
",",
"self",
".",
"name",
")",
"self",
".",
"kill_thrd",
".",
"clear",
"(",
")"
] |
Disconnect from event stream.
|
[
"Disconnect",
"from",
"event",
"stream",
"."
] |
1e7afca926e2b045257a43cbf8b1236a435493c2
|
https://github.com/mezz64/pyHik/blob/1e7afca926e2b045257a43cbf8b1236a435493c2/pyhik/hikvision.py#L436-L442
|
train
|
mezz64/pyHik
|
pyhik/hikvision.py
|
HikCamera.alert_stream
|
def alert_stream(self, reset_event, kill_event):
"""Open event stream."""
_LOGGING.debug('Stream Thread Started: %s, %s', self.name, self.cam_id)
start_event = False
parse_string = ""
fail_count = 0
url = '%s/ISAPI/Event/notification/alertStream' % self.root_url
# pylint: disable=too-many-nested-blocks
while True:
try:
stream = self.hik_request.get(url, stream=True,
timeout=(CONNECT_TIMEOUT,
READ_TIMEOUT))
if stream.status_code == requests.codes.not_found:
# Try alternate URL for stream
url = '%s/Event/notification/alertStream' % self.root_url
stream = self.hik_request.get(url, stream=True)
if stream.status_code != requests.codes.ok:
raise ValueError('Connection unsucessful.')
else:
_LOGGING.debug('%s Connection Successful.', self.name)
fail_count = 0
self.watchdog.start()
for line in stream.iter_lines():
# _LOGGING.debug('Processing line from %s', self.name)
# filter out keep-alive new lines
if line:
str_line = line.decode("utf-8", "ignore")
# New events start with --boundry
if str_line.find('<EventNotificationAlert') != -1:
# Start of event message
start_event = True
parse_string += str_line
elif str_line.find('</EventNotificationAlert>') != -1:
# Message end found found
parse_string += str_line
start_event = False
if parse_string:
tree = ET.fromstring(parse_string)
self.process_stream(tree)
self.update_stale()
parse_string = ""
else:
if start_event:
parse_string += str_line
if kill_event.is_set():
# We were asked to stop the thread so lets do so.
break
elif reset_event.is_set():
# We need to reset the connection.
raise ValueError('Watchdog failed.')
if kill_event.is_set():
# We were asked to stop the thread so lets do so.
_LOGGING.debug('Stopping event stream thread for %s',
self.name)
self.watchdog.stop()
self.hik_request.close()
return
elif reset_event.is_set():
# We need to reset the connection.
raise ValueError('Watchdog failed.')
except (ValueError,
requests.exceptions.ConnectionError,
requests.exceptions.ChunkedEncodingError) as err:
fail_count += 1
reset_event.clear()
_LOGGING.warning('%s Connection Failed (count=%d). Waiting %ss. Err: %s',
self.name, fail_count, (fail_count * 5) + 5, err)
parse_string = ""
self.watchdog.stop()
self.hik_request.close()
time.sleep(5)
self.update_stale()
time.sleep(fail_count * 5)
continue
|
python
|
def alert_stream(self, reset_event, kill_event):
"""Open event stream."""
_LOGGING.debug('Stream Thread Started: %s, %s', self.name, self.cam_id)
start_event = False
parse_string = ""
fail_count = 0
url = '%s/ISAPI/Event/notification/alertStream' % self.root_url
# pylint: disable=too-many-nested-blocks
while True:
try:
stream = self.hik_request.get(url, stream=True,
timeout=(CONNECT_TIMEOUT,
READ_TIMEOUT))
if stream.status_code == requests.codes.not_found:
# Try alternate URL for stream
url = '%s/Event/notification/alertStream' % self.root_url
stream = self.hik_request.get(url, stream=True)
if stream.status_code != requests.codes.ok:
raise ValueError('Connection unsucessful.')
else:
_LOGGING.debug('%s Connection Successful.', self.name)
fail_count = 0
self.watchdog.start()
for line in stream.iter_lines():
# _LOGGING.debug('Processing line from %s', self.name)
# filter out keep-alive new lines
if line:
str_line = line.decode("utf-8", "ignore")
# New events start with --boundry
if str_line.find('<EventNotificationAlert') != -1:
# Start of event message
start_event = True
parse_string += str_line
elif str_line.find('</EventNotificationAlert>') != -1:
# Message end found found
parse_string += str_line
start_event = False
if parse_string:
tree = ET.fromstring(parse_string)
self.process_stream(tree)
self.update_stale()
parse_string = ""
else:
if start_event:
parse_string += str_line
if kill_event.is_set():
# We were asked to stop the thread so lets do so.
break
elif reset_event.is_set():
# We need to reset the connection.
raise ValueError('Watchdog failed.')
if kill_event.is_set():
# We were asked to stop the thread so lets do so.
_LOGGING.debug('Stopping event stream thread for %s',
self.name)
self.watchdog.stop()
self.hik_request.close()
return
elif reset_event.is_set():
# We need to reset the connection.
raise ValueError('Watchdog failed.')
except (ValueError,
requests.exceptions.ConnectionError,
requests.exceptions.ChunkedEncodingError) as err:
fail_count += 1
reset_event.clear()
_LOGGING.warning('%s Connection Failed (count=%d). Waiting %ss. Err: %s',
self.name, fail_count, (fail_count * 5) + 5, err)
parse_string = ""
self.watchdog.stop()
self.hik_request.close()
time.sleep(5)
self.update_stale()
time.sleep(fail_count * 5)
continue
|
[
"def",
"alert_stream",
"(",
"self",
",",
"reset_event",
",",
"kill_event",
")",
":",
"_LOGGING",
".",
"debug",
"(",
"'Stream Thread Started: %s, %s'",
",",
"self",
".",
"name",
",",
"self",
".",
"cam_id",
")",
"start_event",
"=",
"False",
"parse_string",
"=",
"\"\"",
"fail_count",
"=",
"0",
"url",
"=",
"'%s/ISAPI/Event/notification/alertStream'",
"%",
"self",
".",
"root_url",
"# pylint: disable=too-many-nested-blocks",
"while",
"True",
":",
"try",
":",
"stream",
"=",
"self",
".",
"hik_request",
".",
"get",
"(",
"url",
",",
"stream",
"=",
"True",
",",
"timeout",
"=",
"(",
"CONNECT_TIMEOUT",
",",
"READ_TIMEOUT",
")",
")",
"if",
"stream",
".",
"status_code",
"==",
"requests",
".",
"codes",
".",
"not_found",
":",
"# Try alternate URL for stream",
"url",
"=",
"'%s/Event/notification/alertStream'",
"%",
"self",
".",
"root_url",
"stream",
"=",
"self",
".",
"hik_request",
".",
"get",
"(",
"url",
",",
"stream",
"=",
"True",
")",
"if",
"stream",
".",
"status_code",
"!=",
"requests",
".",
"codes",
".",
"ok",
":",
"raise",
"ValueError",
"(",
"'Connection unsucessful.'",
")",
"else",
":",
"_LOGGING",
".",
"debug",
"(",
"'%s Connection Successful.'",
",",
"self",
".",
"name",
")",
"fail_count",
"=",
"0",
"self",
".",
"watchdog",
".",
"start",
"(",
")",
"for",
"line",
"in",
"stream",
".",
"iter_lines",
"(",
")",
":",
"# _LOGGING.debug('Processing line from %s', self.name)",
"# filter out keep-alive new lines",
"if",
"line",
":",
"str_line",
"=",
"line",
".",
"decode",
"(",
"\"utf-8\"",
",",
"\"ignore\"",
")",
"# New events start with --boundry",
"if",
"str_line",
".",
"find",
"(",
"'<EventNotificationAlert'",
")",
"!=",
"-",
"1",
":",
"# Start of event message",
"start_event",
"=",
"True",
"parse_string",
"+=",
"str_line",
"elif",
"str_line",
".",
"find",
"(",
"'</EventNotificationAlert>'",
")",
"!=",
"-",
"1",
":",
"# Message end found found",
"parse_string",
"+=",
"str_line",
"start_event",
"=",
"False",
"if",
"parse_string",
":",
"tree",
"=",
"ET",
".",
"fromstring",
"(",
"parse_string",
")",
"self",
".",
"process_stream",
"(",
"tree",
")",
"self",
".",
"update_stale",
"(",
")",
"parse_string",
"=",
"\"\"",
"else",
":",
"if",
"start_event",
":",
"parse_string",
"+=",
"str_line",
"if",
"kill_event",
".",
"is_set",
"(",
")",
":",
"# We were asked to stop the thread so lets do so.",
"break",
"elif",
"reset_event",
".",
"is_set",
"(",
")",
":",
"# We need to reset the connection.",
"raise",
"ValueError",
"(",
"'Watchdog failed.'",
")",
"if",
"kill_event",
".",
"is_set",
"(",
")",
":",
"# We were asked to stop the thread so lets do so.",
"_LOGGING",
".",
"debug",
"(",
"'Stopping event stream thread for %s'",
",",
"self",
".",
"name",
")",
"self",
".",
"watchdog",
".",
"stop",
"(",
")",
"self",
".",
"hik_request",
".",
"close",
"(",
")",
"return",
"elif",
"reset_event",
".",
"is_set",
"(",
")",
":",
"# We need to reset the connection.",
"raise",
"ValueError",
"(",
"'Watchdog failed.'",
")",
"except",
"(",
"ValueError",
",",
"requests",
".",
"exceptions",
".",
"ConnectionError",
",",
"requests",
".",
"exceptions",
".",
"ChunkedEncodingError",
")",
"as",
"err",
":",
"fail_count",
"+=",
"1",
"reset_event",
".",
"clear",
"(",
")",
"_LOGGING",
".",
"warning",
"(",
"'%s Connection Failed (count=%d). Waiting %ss. Err: %s'",
",",
"self",
".",
"name",
",",
"fail_count",
",",
"(",
"fail_count",
"*",
"5",
")",
"+",
"5",
",",
"err",
")",
"parse_string",
"=",
"\"\"",
"self",
".",
"watchdog",
".",
"stop",
"(",
")",
"self",
".",
"hik_request",
".",
"close",
"(",
")",
"time",
".",
"sleep",
"(",
"5",
")",
"self",
".",
"update_stale",
"(",
")",
"time",
".",
"sleep",
"(",
"fail_count",
"*",
"5",
")",
"continue"
] |
Open event stream.
|
[
"Open",
"event",
"stream",
"."
] |
1e7afca926e2b045257a43cbf8b1236a435493c2
|
https://github.com/mezz64/pyHik/blob/1e7afca926e2b045257a43cbf8b1236a435493c2/pyhik/hikvision.py#L449-L531
|
train
|
mezz64/pyHik
|
pyhik/hikvision.py
|
HikCamera.process_stream
|
def process_stream(self, tree):
"""Process incoming event stream packets."""
try:
etype = SENSOR_MAP[tree.find(
self.element_query('eventType')).text.lower()]
estate = tree.find(
self.element_query('eventState')).text
echid = tree.find(
self.element_query('channelID'))
if echid is None:
# Some devices use a different key
echid = tree.find(
self.element_query('dynChannelID'))
echid = int(echid.text)
ecount = tree.find(
self.element_query('activePostCount')).text
except (AttributeError, KeyError, IndexError) as err:
_LOGGING.error('Problem finding attribute: %s', err)
return
# Take care of keep-alive
if len(etype) > 0 and etype == 'Video Loss':
self.watchdog.pet()
# Track state if it's in the event list.
if len(etype) > 0:
state = self.fetch_attributes(etype, echid)
if state:
# Determine if state has changed
# If so, publish, otherwise do nothing
estate = (estate == 'active')
old_state = state[0]
attr = [estate, echid, int(ecount),
datetime.datetime.now()]
self.update_attributes(etype, echid, attr)
if estate != old_state:
self.publish_changes(etype, echid)
self.watchdog.pet()
|
python
|
def process_stream(self, tree):
"""Process incoming event stream packets."""
try:
etype = SENSOR_MAP[tree.find(
self.element_query('eventType')).text.lower()]
estate = tree.find(
self.element_query('eventState')).text
echid = tree.find(
self.element_query('channelID'))
if echid is None:
# Some devices use a different key
echid = tree.find(
self.element_query('dynChannelID'))
echid = int(echid.text)
ecount = tree.find(
self.element_query('activePostCount')).text
except (AttributeError, KeyError, IndexError) as err:
_LOGGING.error('Problem finding attribute: %s', err)
return
# Take care of keep-alive
if len(etype) > 0 and etype == 'Video Loss':
self.watchdog.pet()
# Track state if it's in the event list.
if len(etype) > 0:
state = self.fetch_attributes(etype, echid)
if state:
# Determine if state has changed
# If so, publish, otherwise do nothing
estate = (estate == 'active')
old_state = state[0]
attr = [estate, echid, int(ecount),
datetime.datetime.now()]
self.update_attributes(etype, echid, attr)
if estate != old_state:
self.publish_changes(etype, echid)
self.watchdog.pet()
|
[
"def",
"process_stream",
"(",
"self",
",",
"tree",
")",
":",
"try",
":",
"etype",
"=",
"SENSOR_MAP",
"[",
"tree",
".",
"find",
"(",
"self",
".",
"element_query",
"(",
"'eventType'",
")",
")",
".",
"text",
".",
"lower",
"(",
")",
"]",
"estate",
"=",
"tree",
".",
"find",
"(",
"self",
".",
"element_query",
"(",
"'eventState'",
")",
")",
".",
"text",
"echid",
"=",
"tree",
".",
"find",
"(",
"self",
".",
"element_query",
"(",
"'channelID'",
")",
")",
"if",
"echid",
"is",
"None",
":",
"# Some devices use a different key",
"echid",
"=",
"tree",
".",
"find",
"(",
"self",
".",
"element_query",
"(",
"'dynChannelID'",
")",
")",
"echid",
"=",
"int",
"(",
"echid",
".",
"text",
")",
"ecount",
"=",
"tree",
".",
"find",
"(",
"self",
".",
"element_query",
"(",
"'activePostCount'",
")",
")",
".",
"text",
"except",
"(",
"AttributeError",
",",
"KeyError",
",",
"IndexError",
")",
"as",
"err",
":",
"_LOGGING",
".",
"error",
"(",
"'Problem finding attribute: %s'",
",",
"err",
")",
"return",
"# Take care of keep-alive",
"if",
"len",
"(",
"etype",
")",
">",
"0",
"and",
"etype",
"==",
"'Video Loss'",
":",
"self",
".",
"watchdog",
".",
"pet",
"(",
")",
"# Track state if it's in the event list.",
"if",
"len",
"(",
"etype",
")",
">",
"0",
":",
"state",
"=",
"self",
".",
"fetch_attributes",
"(",
"etype",
",",
"echid",
")",
"if",
"state",
":",
"# Determine if state has changed",
"# If so, publish, otherwise do nothing",
"estate",
"=",
"(",
"estate",
"==",
"'active'",
")",
"old_state",
"=",
"state",
"[",
"0",
"]",
"attr",
"=",
"[",
"estate",
",",
"echid",
",",
"int",
"(",
"ecount",
")",
",",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"]",
"self",
".",
"update_attributes",
"(",
"etype",
",",
"echid",
",",
"attr",
")",
"if",
"estate",
"!=",
"old_state",
":",
"self",
".",
"publish_changes",
"(",
"etype",
",",
"echid",
")",
"self",
".",
"watchdog",
".",
"pet",
"(",
")"
] |
Process incoming event stream packets.
|
[
"Process",
"incoming",
"event",
"stream",
"packets",
"."
] |
1e7afca926e2b045257a43cbf8b1236a435493c2
|
https://github.com/mezz64/pyHik/blob/1e7afca926e2b045257a43cbf8b1236a435493c2/pyhik/hikvision.py#L533-L571
|
train
|
mezz64/pyHik
|
pyhik/hikvision.py
|
HikCamera.update_stale
|
def update_stale(self):
"""Update stale active statuses"""
# Some events don't post an inactive XML, only active.
# If we don't get an active update for 5 seconds we can
# assume the event is no longer active and update accordingly.
for etype, echannels in self.event_states.items():
for eprop in echannels:
if eprop[3] is not None:
sec_elap = ((datetime.datetime.now()-eprop[3])
.total_seconds())
# print('Seconds since last update: {}'.format(sec_elap))
if sec_elap > 5 and eprop[0] is True:
_LOGGING.debug('Updating stale event %s on CH(%s)',
etype, eprop[1])
attr = [False, eprop[1], eprop[2],
datetime.datetime.now()]
self.update_attributes(etype, eprop[1], attr)
self.publish_changes(etype, eprop[1])
|
python
|
def update_stale(self):
"""Update stale active statuses"""
# Some events don't post an inactive XML, only active.
# If we don't get an active update for 5 seconds we can
# assume the event is no longer active and update accordingly.
for etype, echannels in self.event_states.items():
for eprop in echannels:
if eprop[3] is not None:
sec_elap = ((datetime.datetime.now()-eprop[3])
.total_seconds())
# print('Seconds since last update: {}'.format(sec_elap))
if sec_elap > 5 and eprop[0] is True:
_LOGGING.debug('Updating stale event %s on CH(%s)',
etype, eprop[1])
attr = [False, eprop[1], eprop[2],
datetime.datetime.now()]
self.update_attributes(etype, eprop[1], attr)
self.publish_changes(etype, eprop[1])
|
[
"def",
"update_stale",
"(",
"self",
")",
":",
"# Some events don't post an inactive XML, only active.",
"# If we don't get an active update for 5 seconds we can",
"# assume the event is no longer active and update accordingly.",
"for",
"etype",
",",
"echannels",
"in",
"self",
".",
"event_states",
".",
"items",
"(",
")",
":",
"for",
"eprop",
"in",
"echannels",
":",
"if",
"eprop",
"[",
"3",
"]",
"is",
"not",
"None",
":",
"sec_elap",
"=",
"(",
"(",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"-",
"eprop",
"[",
"3",
"]",
")",
".",
"total_seconds",
"(",
")",
")",
"# print('Seconds since last update: {}'.format(sec_elap))",
"if",
"sec_elap",
">",
"5",
"and",
"eprop",
"[",
"0",
"]",
"is",
"True",
":",
"_LOGGING",
".",
"debug",
"(",
"'Updating stale event %s on CH(%s)'",
",",
"etype",
",",
"eprop",
"[",
"1",
"]",
")",
"attr",
"=",
"[",
"False",
",",
"eprop",
"[",
"1",
"]",
",",
"eprop",
"[",
"2",
"]",
",",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"]",
"self",
".",
"update_attributes",
"(",
"etype",
",",
"eprop",
"[",
"1",
"]",
",",
"attr",
")",
"self",
".",
"publish_changes",
"(",
"etype",
",",
"eprop",
"[",
"1",
"]",
")"
] |
Update stale active statuses
|
[
"Update",
"stale",
"active",
"statuses"
] |
1e7afca926e2b045257a43cbf8b1236a435493c2
|
https://github.com/mezz64/pyHik/blob/1e7afca926e2b045257a43cbf8b1236a435493c2/pyhik/hikvision.py#L573-L590
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.