repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
esafak/mca | src/mca.py | MCA.cos_r | def cos_r(self, N=None): # percent=0.9
"""Return the squared cosines for each row."""
if not hasattr(self, 'F') or self.F.shape[1] < self.rank:
self.fs_r(N=self.rank) # generate F
self.dr = norm(self.F, axis=1)**2
# cheaper than diag(self.F.dot(self.F.T))?
return apply_along_axis(lambda _: _/self.dr, 0, self.F[:, :N]**2) | python | def cos_r(self, N=None): # percent=0.9
"""Return the squared cosines for each row."""
if not hasattr(self, 'F') or self.F.shape[1] < self.rank:
self.fs_r(N=self.rank) # generate F
self.dr = norm(self.F, axis=1)**2
# cheaper than diag(self.F.dot(self.F.T))?
return apply_along_axis(lambda _: _/self.dr, 0, self.F[:, :N]**2) | [
"def",
"cos_r",
"(",
"self",
",",
"N",
"=",
"None",
")",
":",
"# percent=0.9",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'F'",
")",
"or",
"self",
".",
"F",
".",
"shape",
"[",
"1",
"]",
"<",
"self",
".",
"rank",
":",
"self",
".",
"fs_r",
"(",
"N",
"=",
"self",
".",
"rank",
")",
"# generate F",
"self",
".",
"dr",
"=",
"norm",
"(",
"self",
".",
"F",
",",
"axis",
"=",
"1",
")",
"**",
"2",
"# cheaper than diag(self.F.dot(self.F.T))?",
"return",
"apply_along_axis",
"(",
"lambda",
"_",
":",
"_",
"/",
"self",
".",
"dr",
",",
"0",
",",
"self",
".",
"F",
"[",
":",
",",
":",
"N",
"]",
"**",
"2",
")"
]
| Return the squared cosines for each row. | [
"Return",
"the",
"squared",
"cosines",
"for",
"each",
"row",
"."
]
| f2b79ecbf37629902ccdbad2e1a556977c53d370 | https://github.com/esafak/mca/blob/f2b79ecbf37629902ccdbad2e1a556977c53d370/src/mca.py#L149-L157 | train |
esafak/mca | src/mca.py | MCA.cos_c | def cos_c(self, N=None): # percent=0.9,
"""Return the squared cosines for each column."""
if not hasattr(self, 'G') or self.G.shape[1] < self.rank:
self.fs_c(N=self.rank) # generate
self.dc = norm(self.G, axis=1)**2
# cheaper than diag(self.G.dot(self.G.T))?
return apply_along_axis(lambda _: _/self.dc, 0, self.G[:, :N]**2) | python | def cos_c(self, N=None): # percent=0.9,
"""Return the squared cosines for each column."""
if not hasattr(self, 'G') or self.G.shape[1] < self.rank:
self.fs_c(N=self.rank) # generate
self.dc = norm(self.G, axis=1)**2
# cheaper than diag(self.G.dot(self.G.T))?
return apply_along_axis(lambda _: _/self.dc, 0, self.G[:, :N]**2) | [
"def",
"cos_c",
"(",
"self",
",",
"N",
"=",
"None",
")",
":",
"# percent=0.9,",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'G'",
")",
"or",
"self",
".",
"G",
".",
"shape",
"[",
"1",
"]",
"<",
"self",
".",
"rank",
":",
"self",
".",
"fs_c",
"(",
"N",
"=",
"self",
".",
"rank",
")",
"# generate",
"self",
".",
"dc",
"=",
"norm",
"(",
"self",
".",
"G",
",",
"axis",
"=",
"1",
")",
"**",
"2",
"# cheaper than diag(self.G.dot(self.G.T))?",
"return",
"apply_along_axis",
"(",
"lambda",
"_",
":",
"_",
"/",
"self",
".",
"dc",
",",
"0",
",",
"self",
".",
"G",
"[",
":",
",",
":",
"N",
"]",
"**",
"2",
")"
]
| Return the squared cosines for each column. | [
"Return",
"the",
"squared",
"cosines",
"for",
"each",
"column",
"."
]
| f2b79ecbf37629902ccdbad2e1a556977c53d370 | https://github.com/esafak/mca/blob/f2b79ecbf37629902ccdbad2e1a556977c53d370/src/mca.py#L159-L167 | train |
esafak/mca | src/mca.py | MCA.cont_r | def cont_r(self, percent=0.9, N=None):
"""Return the contribution of each row."""
if not hasattr(self, 'F'):
self.fs_r(N=self.rank) # generate F
return apply_along_axis(lambda _: _/self.L[:N], 1,
apply_along_axis(lambda _: _*self.r, 0, self.F[:, :N]**2)) | python | def cont_r(self, percent=0.9, N=None):
"""Return the contribution of each row."""
if not hasattr(self, 'F'):
self.fs_r(N=self.rank) # generate F
return apply_along_axis(lambda _: _/self.L[:N], 1,
apply_along_axis(lambda _: _*self.r, 0, self.F[:, :N]**2)) | [
"def",
"cont_r",
"(",
"self",
",",
"percent",
"=",
"0.9",
",",
"N",
"=",
"None",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'F'",
")",
":",
"self",
".",
"fs_r",
"(",
"N",
"=",
"self",
".",
"rank",
")",
"# generate F",
"return",
"apply_along_axis",
"(",
"lambda",
"_",
":",
"_",
"/",
"self",
".",
"L",
"[",
":",
"N",
"]",
",",
"1",
",",
"apply_along_axis",
"(",
"lambda",
"_",
":",
"_",
"*",
"self",
".",
"r",
",",
"0",
",",
"self",
".",
"F",
"[",
":",
",",
":",
"N",
"]",
"**",
"2",
")",
")"
]
| Return the contribution of each row. | [
"Return",
"the",
"contribution",
"of",
"each",
"row",
"."
]
| f2b79ecbf37629902ccdbad2e1a556977c53d370 | https://github.com/esafak/mca/blob/f2b79ecbf37629902ccdbad2e1a556977c53d370/src/mca.py#L169-L175 | train |
esafak/mca | src/mca.py | MCA.cont_c | def cont_c(self, percent=0.9, N=None): # bug? check axis number 0 vs 1 here
"""Return the contribution of each column."""
if not hasattr(self, 'G'):
self.fs_c(N=self.rank) # generate G
return apply_along_axis(lambda _: _/self.L[:N], 1,
apply_along_axis(lambda _: _*self.c, 0, self.G[:, :N]**2)) | python | def cont_c(self, percent=0.9, N=None): # bug? check axis number 0 vs 1 here
"""Return the contribution of each column."""
if not hasattr(self, 'G'):
self.fs_c(N=self.rank) # generate G
return apply_along_axis(lambda _: _/self.L[:N], 1,
apply_along_axis(lambda _: _*self.c, 0, self.G[:, :N]**2)) | [
"def",
"cont_c",
"(",
"self",
",",
"percent",
"=",
"0.9",
",",
"N",
"=",
"None",
")",
":",
"# bug? check axis number 0 vs 1 here",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'G'",
")",
":",
"self",
".",
"fs_c",
"(",
"N",
"=",
"self",
".",
"rank",
")",
"# generate G",
"return",
"apply_along_axis",
"(",
"lambda",
"_",
":",
"_",
"/",
"self",
".",
"L",
"[",
":",
"N",
"]",
",",
"1",
",",
"apply_along_axis",
"(",
"lambda",
"_",
":",
"_",
"*",
"self",
".",
"c",
",",
"0",
",",
"self",
".",
"G",
"[",
":",
",",
":",
"N",
"]",
"**",
"2",
")",
")"
]
| Return the contribution of each column. | [
"Return",
"the",
"contribution",
"of",
"each",
"column",
"."
]
| f2b79ecbf37629902ccdbad2e1a556977c53d370 | https://github.com/esafak/mca/blob/f2b79ecbf37629902ccdbad2e1a556977c53d370/src/mca.py#L177-L183 | train |
esafak/mca | src/mca.py | MCA.fs_r_sup | def fs_r_sup(self, DF, N=None):
"""Find the supplementary row factor scores.
ncols: The number of singular vectors to retain.
If both are passed, cols is given preference.
"""
if not hasattr(self, 'G'):
self.fs_c(N=self.rank) # generate G
if N and (not isinstance(N, int) or N <= 0):
raise ValueError("ncols should be a positive integer.")
s = -sqrt(self.E) if self.cor else self.s
N = min(N, self.rank) if N else self.rank
S_inv = diagsvd(-1/s[:N], len(self.G.T), N)
# S = diagsvd(s[:N], len(self.tau), N)
return _mul(DF.div(DF.sum(axis=1), axis=0), self.G, S_inv)[:, :N] | python | def fs_r_sup(self, DF, N=None):
"""Find the supplementary row factor scores.
ncols: The number of singular vectors to retain.
If both are passed, cols is given preference.
"""
if not hasattr(self, 'G'):
self.fs_c(N=self.rank) # generate G
if N and (not isinstance(N, int) or N <= 0):
raise ValueError("ncols should be a positive integer.")
s = -sqrt(self.E) if self.cor else self.s
N = min(N, self.rank) if N else self.rank
S_inv = diagsvd(-1/s[:N], len(self.G.T), N)
# S = diagsvd(s[:N], len(self.tau), N)
return _mul(DF.div(DF.sum(axis=1), axis=0), self.G, S_inv)[:, :N] | [
"def",
"fs_r_sup",
"(",
"self",
",",
"DF",
",",
"N",
"=",
"None",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'G'",
")",
":",
"self",
".",
"fs_c",
"(",
"N",
"=",
"self",
".",
"rank",
")",
"# generate G",
"if",
"N",
"and",
"(",
"not",
"isinstance",
"(",
"N",
",",
"int",
")",
"or",
"N",
"<=",
"0",
")",
":",
"raise",
"ValueError",
"(",
"\"ncols should be a positive integer.\"",
")",
"s",
"=",
"-",
"sqrt",
"(",
"self",
".",
"E",
")",
"if",
"self",
".",
"cor",
"else",
"self",
".",
"s",
"N",
"=",
"min",
"(",
"N",
",",
"self",
".",
"rank",
")",
"if",
"N",
"else",
"self",
".",
"rank",
"S_inv",
"=",
"diagsvd",
"(",
"-",
"1",
"/",
"s",
"[",
":",
"N",
"]",
",",
"len",
"(",
"self",
".",
"G",
".",
"T",
")",
",",
"N",
")",
"# S = diagsvd(s[:N], len(self.tau), N)",
"return",
"_mul",
"(",
"DF",
".",
"div",
"(",
"DF",
".",
"sum",
"(",
"axis",
"=",
"1",
")",
",",
"axis",
"=",
"0",
")",
",",
"self",
".",
"G",
",",
"S_inv",
")",
"[",
":",
",",
":",
"N",
"]"
]
| Find the supplementary row factor scores.
ncols: The number of singular vectors to retain.
If both are passed, cols is given preference. | [
"Find",
"the",
"supplementary",
"row",
"factor",
"scores",
"."
]
| f2b79ecbf37629902ccdbad2e1a556977c53d370 | https://github.com/esafak/mca/blob/f2b79ecbf37629902ccdbad2e1a556977c53d370/src/mca.py#L199-L214 | train |
esafak/mca | src/mca.py | MCA.fs_c_sup | def fs_c_sup(self, DF, N=None):
"""Find the supplementary column factor scores.
ncols: The number of singular vectors to retain.
If both are passed, cols is given preference.
"""
if not hasattr(self, 'F'):
self.fs_r(N=self.rank) # generate F
if N and (not isinstance(N, int) or N <= 0):
raise ValueError("ncols should be a positive integer.")
s = -sqrt(self.E) if self.cor else self.s
N = min(N, self.rank) if N else self.rank
S_inv = diagsvd(-1/s[:N], len(self.F.T), N)
# S = diagsvd(s[:N], len(self.tau), N)
return _mul((DF/DF.sum()).T, self.F, S_inv)[:, :N] | python | def fs_c_sup(self, DF, N=None):
"""Find the supplementary column factor scores.
ncols: The number of singular vectors to retain.
If both are passed, cols is given preference.
"""
if not hasattr(self, 'F'):
self.fs_r(N=self.rank) # generate F
if N and (not isinstance(N, int) or N <= 0):
raise ValueError("ncols should be a positive integer.")
s = -sqrt(self.E) if self.cor else self.s
N = min(N, self.rank) if N else self.rank
S_inv = diagsvd(-1/s[:N], len(self.F.T), N)
# S = diagsvd(s[:N], len(self.tau), N)
return _mul((DF/DF.sum()).T, self.F, S_inv)[:, :N] | [
"def",
"fs_c_sup",
"(",
"self",
",",
"DF",
",",
"N",
"=",
"None",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'F'",
")",
":",
"self",
".",
"fs_r",
"(",
"N",
"=",
"self",
".",
"rank",
")",
"# generate F",
"if",
"N",
"and",
"(",
"not",
"isinstance",
"(",
"N",
",",
"int",
")",
"or",
"N",
"<=",
"0",
")",
":",
"raise",
"ValueError",
"(",
"\"ncols should be a positive integer.\"",
")",
"s",
"=",
"-",
"sqrt",
"(",
"self",
".",
"E",
")",
"if",
"self",
".",
"cor",
"else",
"self",
".",
"s",
"N",
"=",
"min",
"(",
"N",
",",
"self",
".",
"rank",
")",
"if",
"N",
"else",
"self",
".",
"rank",
"S_inv",
"=",
"diagsvd",
"(",
"-",
"1",
"/",
"s",
"[",
":",
"N",
"]",
",",
"len",
"(",
"self",
".",
"F",
".",
"T",
")",
",",
"N",
")",
"# S = diagsvd(s[:N], len(self.tau), N)",
"return",
"_mul",
"(",
"(",
"DF",
"/",
"DF",
".",
"sum",
"(",
")",
")",
".",
"T",
",",
"self",
".",
"F",
",",
"S_inv",
")",
"[",
":",
",",
":",
"N",
"]"
]
| Find the supplementary column factor scores.
ncols: The number of singular vectors to retain.
If both are passed, cols is given preference. | [
"Find",
"the",
"supplementary",
"column",
"factor",
"scores",
"."
]
| f2b79ecbf37629902ccdbad2e1a556977c53d370 | https://github.com/esafak/mca/blob/f2b79ecbf37629902ccdbad2e1a556977c53d370/src/mca.py#L216-L231 | train |
primetang/qrtools | src/qrtools.py | QR.data_recognise | def data_recognise(self, data=None):
"""Returns an unicode string indicating the data type of the data paramater"""
data = data or self.data
data_lower = data.lower()
if data_lower.startswith(u"http://") or data_lower.startswith(u"https://"):
return u'url'
elif data_lower.startswith(u"mailto:"):
return u'email'
elif data_lower.startswith(u"matmsg:to:"):
return u'emailmessage'
elif data_lower.startswith(u"tel:"):
return u'telephone'
elif data_lower.startswith(u"smsto:"):
return u'sms'
elif data_lower.startswith(u"mmsto:"):
return u'mms'
elif data_lower.startswith(u"geo:"):
return u'geo'
elif data_lower.startswith(u"mebkm:title:"):
return u'bookmark'
elif data_lower.startswith(u"mecard:"):
return u'phonebook'
else:
return u'text' | python | def data_recognise(self, data=None):
"""Returns an unicode string indicating the data type of the data paramater"""
data = data or self.data
data_lower = data.lower()
if data_lower.startswith(u"http://") or data_lower.startswith(u"https://"):
return u'url'
elif data_lower.startswith(u"mailto:"):
return u'email'
elif data_lower.startswith(u"matmsg:to:"):
return u'emailmessage'
elif data_lower.startswith(u"tel:"):
return u'telephone'
elif data_lower.startswith(u"smsto:"):
return u'sms'
elif data_lower.startswith(u"mmsto:"):
return u'mms'
elif data_lower.startswith(u"geo:"):
return u'geo'
elif data_lower.startswith(u"mebkm:title:"):
return u'bookmark'
elif data_lower.startswith(u"mecard:"):
return u'phonebook'
else:
return u'text' | [
"def",
"data_recognise",
"(",
"self",
",",
"data",
"=",
"None",
")",
":",
"data",
"=",
"data",
"or",
"self",
".",
"data",
"data_lower",
"=",
"data",
".",
"lower",
"(",
")",
"if",
"data_lower",
".",
"startswith",
"(",
"u\"http://\"",
")",
"or",
"data_lower",
".",
"startswith",
"(",
"u\"https://\"",
")",
":",
"return",
"u'url'",
"elif",
"data_lower",
".",
"startswith",
"(",
"u\"mailto:\"",
")",
":",
"return",
"u'email'",
"elif",
"data_lower",
".",
"startswith",
"(",
"u\"matmsg:to:\"",
")",
":",
"return",
"u'emailmessage'",
"elif",
"data_lower",
".",
"startswith",
"(",
"u\"tel:\"",
")",
":",
"return",
"u'telephone'",
"elif",
"data_lower",
".",
"startswith",
"(",
"u\"smsto:\"",
")",
":",
"return",
"u'sms'",
"elif",
"data_lower",
".",
"startswith",
"(",
"u\"mmsto:\"",
")",
":",
"return",
"u'mms'",
"elif",
"data_lower",
".",
"startswith",
"(",
"u\"geo:\"",
")",
":",
"return",
"u'geo'",
"elif",
"data_lower",
".",
"startswith",
"(",
"u\"mebkm:title:\"",
")",
":",
"return",
"u'bookmark'",
"elif",
"data_lower",
".",
"startswith",
"(",
"u\"mecard:\"",
")",
":",
"return",
"u'phonebook'",
"else",
":",
"return",
"u'text'"
]
| Returns an unicode string indicating the data type of the data paramater | [
"Returns",
"an",
"unicode",
"string",
"indicating",
"the",
"data",
"type",
"of",
"the",
"data",
"paramater"
]
| 3263c6136f54f0499b9945bfad593537d436c7a1 | https://github.com/primetang/qrtools/blob/3263c6136f54f0499b9945bfad593537d436c7a1/src/qrtools.py#L84-L107 | train |
primetang/qrtools | src/qrtools.py | QR.data_to_string | def data_to_string(self):
"""Returns a UTF8 string with the QR Code's data"""
# FIX-ME: if we don't add the BOM_UTF8 char, QtQR doesn't decode
# correctly; but if we add it, mobile apps don't.-
# Apparently is a zbar bug.
if self.data_type == 'text':
return BOM_UTF8 + self.__class__.data_encode[self.data_type](self.data).encode('utf-8')
else:
return self.__class__.data_encode[self.data_type](self.data).encode('utf-8') | python | def data_to_string(self):
"""Returns a UTF8 string with the QR Code's data"""
# FIX-ME: if we don't add the BOM_UTF8 char, QtQR doesn't decode
# correctly; but if we add it, mobile apps don't.-
# Apparently is a zbar bug.
if self.data_type == 'text':
return BOM_UTF8 + self.__class__.data_encode[self.data_type](self.data).encode('utf-8')
else:
return self.__class__.data_encode[self.data_type](self.data).encode('utf-8') | [
"def",
"data_to_string",
"(",
"self",
")",
":",
"# FIX-ME: if we don't add the BOM_UTF8 char, QtQR doesn't decode",
"# correctly; but if we add it, mobile apps don't.-",
"# Apparently is a zbar bug.",
"if",
"self",
".",
"data_type",
"==",
"'text'",
":",
"return",
"BOM_UTF8",
"+",
"self",
".",
"__class__",
".",
"data_encode",
"[",
"self",
".",
"data_type",
"]",
"(",
"self",
".",
"data",
")",
".",
"encode",
"(",
"'utf-8'",
")",
"else",
":",
"return",
"self",
".",
"__class__",
".",
"data_encode",
"[",
"self",
".",
"data_type",
"]",
"(",
"self",
".",
"data",
")",
".",
"encode",
"(",
"'utf-8'",
")"
]
| Returns a UTF8 string with the QR Code's data | [
"Returns",
"a",
"UTF8",
"string",
"with",
"the",
"QR",
"Code",
"s",
"data"
]
| 3263c6136f54f0499b9945bfad593537d436c7a1 | https://github.com/primetang/qrtools/blob/3263c6136f54f0499b9945bfad593537d436c7a1/src/qrtools.py#L125-L133 | train |
python-visualization/branca | branca/utilities.py | split_six | def split_six(series=None):
"""
Given a Pandas Series, get a domain of values from zero to the 90% quantile
rounded to the nearest order-of-magnitude integer. For example, 2100 is
rounded to 2000, 2790 to 3000.
Parameters
----------
series: Pandas series, default None
Returns
-------
list
"""
if pd is None:
raise ImportError('The Pandas package is required'
' for this functionality')
if np is None:
raise ImportError('The NumPy package is required'
' for this functionality')
def base(x):
if x > 0:
base = pow(10, math.floor(math.log10(x)))
return round(x/base)*base
else:
return 0
quants = [0, 50, 75, 85, 90]
# Some weirdness in series quantiles a la 0.13.
arr = series.values
return [base(np.percentile(arr, x)) for x in quants] | python | def split_six(series=None):
"""
Given a Pandas Series, get a domain of values from zero to the 90% quantile
rounded to the nearest order-of-magnitude integer. For example, 2100 is
rounded to 2000, 2790 to 3000.
Parameters
----------
series: Pandas series, default None
Returns
-------
list
"""
if pd is None:
raise ImportError('The Pandas package is required'
' for this functionality')
if np is None:
raise ImportError('The NumPy package is required'
' for this functionality')
def base(x):
if x > 0:
base = pow(10, math.floor(math.log10(x)))
return round(x/base)*base
else:
return 0
quants = [0, 50, 75, 85, 90]
# Some weirdness in series quantiles a la 0.13.
arr = series.values
return [base(np.percentile(arr, x)) for x in quants] | [
"def",
"split_six",
"(",
"series",
"=",
"None",
")",
":",
"if",
"pd",
"is",
"None",
":",
"raise",
"ImportError",
"(",
"'The Pandas package is required'",
"' for this functionality'",
")",
"if",
"np",
"is",
"None",
":",
"raise",
"ImportError",
"(",
"'The NumPy package is required'",
"' for this functionality'",
")",
"def",
"base",
"(",
"x",
")",
":",
"if",
"x",
">",
"0",
":",
"base",
"=",
"pow",
"(",
"10",
",",
"math",
".",
"floor",
"(",
"math",
".",
"log10",
"(",
"x",
")",
")",
")",
"return",
"round",
"(",
"x",
"/",
"base",
")",
"*",
"base",
"else",
":",
"return",
"0",
"quants",
"=",
"[",
"0",
",",
"50",
",",
"75",
",",
"85",
",",
"90",
"]",
"# Some weirdness in series quantiles a la 0.13.",
"arr",
"=",
"series",
".",
"values",
"return",
"[",
"base",
"(",
"np",
".",
"percentile",
"(",
"arr",
",",
"x",
")",
")",
"for",
"x",
"in",
"quants",
"]"
]
| Given a Pandas Series, get a domain of values from zero to the 90% quantile
rounded to the nearest order-of-magnitude integer. For example, 2100 is
rounded to 2000, 2790 to 3000.
Parameters
----------
series: Pandas series, default None
Returns
-------
list | [
"Given",
"a",
"Pandas",
"Series",
"get",
"a",
"domain",
"of",
"values",
"from",
"zero",
"to",
"the",
"90%",
"quantile",
"rounded",
"to",
"the",
"nearest",
"order",
"-",
"of",
"-",
"magnitude",
"integer",
".",
"For",
"example",
"2100",
"is",
"rounded",
"to",
"2000",
"2790",
"to",
"3000",
"."
]
| 4e89e88a5a7ff3586f0852249c2c125f72316da8 | https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/utilities.py#L183-L215 | train |
python-visualization/branca | branca/colormap.py | StepColormap.to_linear | def to_linear(self, index=None):
"""
Transforms the StepColormap into a LinearColormap.
Parameters
----------
index : list of floats, default None
The values corresponding to each color in the output colormap.
It has to be sorted.
If None, a regular grid between `vmin` and `vmax` is created.
"""
if index is None:
n = len(self.index)-1
index = [self.index[i]*(1.-i/(n-1.))+self.index[i+1]*i/(n-1.) for
i in range(n)]
colors = [self.rgba_floats_tuple(x) for x in index]
return LinearColormap(colors, index=index,
vmin=self.vmin, vmax=self.vmax) | python | def to_linear(self, index=None):
"""
Transforms the StepColormap into a LinearColormap.
Parameters
----------
index : list of floats, default None
The values corresponding to each color in the output colormap.
It has to be sorted.
If None, a regular grid between `vmin` and `vmax` is created.
"""
if index is None:
n = len(self.index)-1
index = [self.index[i]*(1.-i/(n-1.))+self.index[i+1]*i/(n-1.) for
i in range(n)]
colors = [self.rgba_floats_tuple(x) for x in index]
return LinearColormap(colors, index=index,
vmin=self.vmin, vmax=self.vmax) | [
"def",
"to_linear",
"(",
"self",
",",
"index",
"=",
"None",
")",
":",
"if",
"index",
"is",
"None",
":",
"n",
"=",
"len",
"(",
"self",
".",
"index",
")",
"-",
"1",
"index",
"=",
"[",
"self",
".",
"index",
"[",
"i",
"]",
"*",
"(",
"1.",
"-",
"i",
"/",
"(",
"n",
"-",
"1.",
")",
")",
"+",
"self",
".",
"index",
"[",
"i",
"+",
"1",
"]",
"*",
"i",
"/",
"(",
"n",
"-",
"1.",
")",
"for",
"i",
"in",
"range",
"(",
"n",
")",
"]",
"colors",
"=",
"[",
"self",
".",
"rgba_floats_tuple",
"(",
"x",
")",
"for",
"x",
"in",
"index",
"]",
"return",
"LinearColormap",
"(",
"colors",
",",
"index",
"=",
"index",
",",
"vmin",
"=",
"self",
".",
"vmin",
",",
"vmax",
"=",
"self",
".",
"vmax",
")"
]
| Transforms the StepColormap into a LinearColormap.
Parameters
----------
index : list of floats, default None
The values corresponding to each color in the output colormap.
It has to be sorted.
If None, a regular grid between `vmin` and `vmax` is created. | [
"Transforms",
"the",
"StepColormap",
"into",
"a",
"LinearColormap",
"."
]
| 4e89e88a5a7ff3586f0852249c2c125f72316da8 | https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/colormap.py#L390-L409 | train |
python-visualization/branca | branca/element.py | Element.add_to | def add_to(self, parent, name=None, index=None):
"""Add element to a parent."""
parent.add_child(self, name=name, index=index)
return self | python | def add_to(self, parent, name=None, index=None):
"""Add element to a parent."""
parent.add_child(self, name=name, index=index)
return self | [
"def",
"add_to",
"(",
"self",
",",
"parent",
",",
"name",
"=",
"None",
",",
"index",
"=",
"None",
")",
":",
"parent",
".",
"add_child",
"(",
"self",
",",
"name",
"=",
"name",
",",
"index",
"=",
"index",
")",
"return",
"self"
]
| Add element to a parent. | [
"Add",
"element",
"to",
"a",
"parent",
"."
]
| 4e89e88a5a7ff3586f0852249c2c125f72316da8 | https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/element.py#L119-L122 | train |
python-visualization/branca | branca/element.py | Element.to_json | def to_json(self, depth=-1, **kwargs):
"""Returns a JSON representation of the object."""
return json.dumps(self.to_dict(depth=depth, ordered=True), **kwargs) | python | def to_json(self, depth=-1, **kwargs):
"""Returns a JSON representation of the object."""
return json.dumps(self.to_dict(depth=depth, ordered=True), **kwargs) | [
"def",
"to_json",
"(",
"self",
",",
"depth",
"=",
"-",
"1",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"json",
".",
"dumps",
"(",
"self",
".",
"to_dict",
"(",
"depth",
"=",
"depth",
",",
"ordered",
"=",
"True",
")",
",",
"*",
"*",
"kwargs",
")"
]
| Returns a JSON representation of the object. | [
"Returns",
"a",
"JSON",
"representation",
"of",
"the",
"object",
"."
]
| 4e89e88a5a7ff3586f0852249c2c125f72316da8 | https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/element.py#L138-L140 | train |
python-visualization/branca | branca/element.py | Element.save | def save(self, outfile, close_file=True, **kwargs):
"""Saves an Element into a file.
Parameters
----------
outfile : str or file object
The file (or filename) where you want to output the html.
close_file : bool, default True
Whether the file has to be closed after write.
"""
if isinstance(outfile, text_type) or isinstance(outfile, binary_type):
fid = open(outfile, 'wb')
else:
fid = outfile
root = self.get_root()
html = root.render(**kwargs)
fid.write(html.encode('utf8'))
if close_file:
fid.close() | python | def save(self, outfile, close_file=True, **kwargs):
"""Saves an Element into a file.
Parameters
----------
outfile : str or file object
The file (or filename) where you want to output the html.
close_file : bool, default True
Whether the file has to be closed after write.
"""
if isinstance(outfile, text_type) or isinstance(outfile, binary_type):
fid = open(outfile, 'wb')
else:
fid = outfile
root = self.get_root()
html = root.render(**kwargs)
fid.write(html.encode('utf8'))
if close_file:
fid.close() | [
"def",
"save",
"(",
"self",
",",
"outfile",
",",
"close_file",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"isinstance",
"(",
"outfile",
",",
"text_type",
")",
"or",
"isinstance",
"(",
"outfile",
",",
"binary_type",
")",
":",
"fid",
"=",
"open",
"(",
"outfile",
",",
"'wb'",
")",
"else",
":",
"fid",
"=",
"outfile",
"root",
"=",
"self",
".",
"get_root",
"(",
")",
"html",
"=",
"root",
".",
"render",
"(",
"*",
"*",
"kwargs",
")",
"fid",
".",
"write",
"(",
"html",
".",
"encode",
"(",
"'utf8'",
")",
")",
"if",
"close_file",
":",
"fid",
".",
"close",
"(",
")"
]
| Saves an Element into a file.
Parameters
----------
outfile : str or file object
The file (or filename) where you want to output the html.
close_file : bool, default True
Whether the file has to be closed after write. | [
"Saves",
"an",
"Element",
"into",
"a",
"file",
"."
]
| 4e89e88a5a7ff3586f0852249c2c125f72316da8 | https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/element.py#L153-L172 | train |
python-visualization/branca | branca/element.py | Link.get_code | def get_code(self):
"""Opens the link and returns the response's content."""
if self.code is None:
self.code = urlopen(self.url).read()
return self.code | python | def get_code(self):
"""Opens the link and returns the response's content."""
if self.code is None:
self.code = urlopen(self.url).read()
return self.code | [
"def",
"get_code",
"(",
"self",
")",
":",
"if",
"self",
".",
"code",
"is",
"None",
":",
"self",
".",
"code",
"=",
"urlopen",
"(",
"self",
".",
"url",
")",
".",
"read",
"(",
")",
"return",
"self",
".",
"code"
]
| Opens the link and returns the response's content. | [
"Opens",
"the",
"link",
"and",
"returns",
"the",
"response",
"s",
"content",
"."
]
| 4e89e88a5a7ff3586f0852249c2c125f72316da8 | https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/element.py#L177-L181 | train |
python-visualization/branca | branca/element.py | Figure._repr_html_ | def _repr_html_(self, **kwargs):
"""Displays the Figure in a Jupyter notebook.
"""
html = self.render(**kwargs)
html = "data:text/html;charset=utf-8;base64," + base64.b64encode(html.encode('utf8')).decode('utf8') # noqa
if self.height is None:
iframe = (
'<div style="width:{width};">'
'<div style="position:relative;width:100%;height:0;padding-bottom:{ratio};">' # noqa
'<iframe src="{html}" style="position:absolute;width:100%;height:100%;left:0;top:0;' # noqa
'border:none !important;" '
'allowfullscreen webkitallowfullscreen mozallowfullscreen>'
'</iframe>'
'</div></div>').format
iframe = iframe(html=html,
width=self.width,
ratio=self.ratio)
else:
iframe = ('<iframe src="{html}" width="{width}" height="{height}"'
'style="border:none !important;" '
'"allowfullscreen" "webkitallowfullscreen" "mozallowfullscreen">' # noqa
'</iframe>').format
iframe = iframe(html=html, width=self.width, height=self.height)
return iframe | python | def _repr_html_(self, **kwargs):
"""Displays the Figure in a Jupyter notebook.
"""
html = self.render(**kwargs)
html = "data:text/html;charset=utf-8;base64," + base64.b64encode(html.encode('utf8')).decode('utf8') # noqa
if self.height is None:
iframe = (
'<div style="width:{width};">'
'<div style="position:relative;width:100%;height:0;padding-bottom:{ratio};">' # noqa
'<iframe src="{html}" style="position:absolute;width:100%;height:100%;left:0;top:0;' # noqa
'border:none !important;" '
'allowfullscreen webkitallowfullscreen mozallowfullscreen>'
'</iframe>'
'</div></div>').format
iframe = iframe(html=html,
width=self.width,
ratio=self.ratio)
else:
iframe = ('<iframe src="{html}" width="{width}" height="{height}"'
'style="border:none !important;" '
'"allowfullscreen" "webkitallowfullscreen" "mozallowfullscreen">' # noqa
'</iframe>').format
iframe = iframe(html=html, width=self.width, height=self.height)
return iframe | [
"def",
"_repr_html_",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"html",
"=",
"self",
".",
"render",
"(",
"*",
"*",
"kwargs",
")",
"html",
"=",
"\"data:text/html;charset=utf-8;base64,\"",
"+",
"base64",
".",
"b64encode",
"(",
"html",
".",
"encode",
"(",
"'utf8'",
")",
")",
".",
"decode",
"(",
"'utf8'",
")",
"# noqa",
"if",
"self",
".",
"height",
"is",
"None",
":",
"iframe",
"=",
"(",
"'<div style=\"width:{width};\">'",
"'<div style=\"position:relative;width:100%;height:0;padding-bottom:{ratio};\">'",
"# noqa",
"'<iframe src=\"{html}\" style=\"position:absolute;width:100%;height:100%;left:0;top:0;'",
"# noqa",
"'border:none !important;\" '",
"'allowfullscreen webkitallowfullscreen mozallowfullscreen>'",
"'</iframe>'",
"'</div></div>'",
")",
".",
"format",
"iframe",
"=",
"iframe",
"(",
"html",
"=",
"html",
",",
"width",
"=",
"self",
".",
"width",
",",
"ratio",
"=",
"self",
".",
"ratio",
")",
"else",
":",
"iframe",
"=",
"(",
"'<iframe src=\"{html}\" width=\"{width}\" height=\"{height}\"'",
"'style=\"border:none !important;\" '",
"'\"allowfullscreen\" \"webkitallowfullscreen\" \"mozallowfullscreen\">'",
"# noqa",
"'</iframe>'",
")",
".",
"format",
"iframe",
"=",
"iframe",
"(",
"html",
"=",
"html",
",",
"width",
"=",
"self",
".",
"width",
",",
"height",
"=",
"self",
".",
"height",
")",
"return",
"iframe"
]
| Displays the Figure in a Jupyter notebook. | [
"Displays",
"the",
"Figure",
"in",
"a",
"Jupyter",
"notebook",
"."
]
| 4e89e88a5a7ff3586f0852249c2c125f72316da8 | https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/element.py#L324-L349 | train |
python-visualization/branca | branca/element.py | Figure.add_subplot | def add_subplot(self, x, y, n, margin=0.05):
"""Creates a div child subplot in a matplotlib.figure.add_subplot style.
Parameters
----------
x : int
The number of rows in the grid.
y : int
The number of columns in the grid.
n : int
The cell number in the grid, counted from 1 to x*y.
Example:
>>> fig.add_subplot(3,2,5)
# Create a div in the 5th cell of a 3rows x 2columns
grid(bottom-left corner).
"""
width = 1./y
height = 1./x
left = ((n-1) % y)*width
top = ((n-1)//y)*height
left = left+width*margin
top = top+height*margin
width = width*(1-2.*margin)
height = height*(1-2.*margin)
div = Div(position='absolute',
width='{}%'.format(100.*width),
height='{}%'.format(100.*height),
left='{}%'.format(100.*left),
top='{}%'.format(100.*top),
)
self.add_child(div)
return div | python | def add_subplot(self, x, y, n, margin=0.05):
"""Creates a div child subplot in a matplotlib.figure.add_subplot style.
Parameters
----------
x : int
The number of rows in the grid.
y : int
The number of columns in the grid.
n : int
The cell number in the grid, counted from 1 to x*y.
Example:
>>> fig.add_subplot(3,2,5)
# Create a div in the 5th cell of a 3rows x 2columns
grid(bottom-left corner).
"""
width = 1./y
height = 1./x
left = ((n-1) % y)*width
top = ((n-1)//y)*height
left = left+width*margin
top = top+height*margin
width = width*(1-2.*margin)
height = height*(1-2.*margin)
div = Div(position='absolute',
width='{}%'.format(100.*width),
height='{}%'.format(100.*height),
left='{}%'.format(100.*left),
top='{}%'.format(100.*top),
)
self.add_child(div)
return div | [
"def",
"add_subplot",
"(",
"self",
",",
"x",
",",
"y",
",",
"n",
",",
"margin",
"=",
"0.05",
")",
":",
"width",
"=",
"1.",
"/",
"y",
"height",
"=",
"1.",
"/",
"x",
"left",
"=",
"(",
"(",
"n",
"-",
"1",
")",
"%",
"y",
")",
"*",
"width",
"top",
"=",
"(",
"(",
"n",
"-",
"1",
")",
"//",
"y",
")",
"*",
"height",
"left",
"=",
"left",
"+",
"width",
"*",
"margin",
"top",
"=",
"top",
"+",
"height",
"*",
"margin",
"width",
"=",
"width",
"*",
"(",
"1",
"-",
"2.",
"*",
"margin",
")",
"height",
"=",
"height",
"*",
"(",
"1",
"-",
"2.",
"*",
"margin",
")",
"div",
"=",
"Div",
"(",
"position",
"=",
"'absolute'",
",",
"width",
"=",
"'{}%'",
".",
"format",
"(",
"100.",
"*",
"width",
")",
",",
"height",
"=",
"'{}%'",
".",
"format",
"(",
"100.",
"*",
"height",
")",
",",
"left",
"=",
"'{}%'",
".",
"format",
"(",
"100.",
"*",
"left",
")",
",",
"top",
"=",
"'{}%'",
".",
"format",
"(",
"100.",
"*",
"top",
")",
",",
")",
"self",
".",
"add_child",
"(",
"div",
")",
"return",
"div"
]
| Creates a div child subplot in a matplotlib.figure.add_subplot style.
Parameters
----------
x : int
The number of rows in the grid.
y : int
The number of columns in the grid.
n : int
The cell number in the grid, counted from 1 to x*y.
Example:
>>> fig.add_subplot(3,2,5)
# Create a div in the 5th cell of a 3rows x 2columns
grid(bottom-left corner). | [
"Creates",
"a",
"div",
"child",
"subplot",
"in",
"a",
"matplotlib",
".",
"figure",
".",
"add_subplot",
"style",
"."
]
| 4e89e88a5a7ff3586f0852249c2c125f72316da8 | https://github.com/python-visualization/branca/blob/4e89e88a5a7ff3586f0852249c2c125f72316da8/branca/element.py#L351-L385 | train |
rasbt/pyprind | pyprind/prog_class.py | Prog._elapsed | def _elapsed(self):
""" Returns elapsed time at update. """
self.last_time = time.time()
return self.last_time - self.start | python | def _elapsed(self):
""" Returns elapsed time at update. """
self.last_time = time.time()
return self.last_time - self.start | [
"def",
"_elapsed",
"(",
"self",
")",
":",
"self",
".",
"last_time",
"=",
"time",
".",
"time",
"(",
")",
"return",
"self",
".",
"last_time",
"-",
"self",
".",
"start"
]
| Returns elapsed time at update. | [
"Returns",
"elapsed",
"time",
"at",
"update",
"."
]
| 57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a | https://github.com/rasbt/pyprind/blob/57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a/pyprind/prog_class.py#L120-L123 | train |
rasbt/pyprind | pyprind/prog_class.py | Prog._calc_eta | def _calc_eta(self):
""" Calculates estimated time left until completion. """
elapsed = self._elapsed()
if self.cnt == 0 or elapsed < 0.001:
return None
rate = float(self.cnt) / elapsed
self.eta = (float(self.max_iter) - float(self.cnt)) / rate | python | def _calc_eta(self):
""" Calculates estimated time left until completion. """
elapsed = self._elapsed()
if self.cnt == 0 or elapsed < 0.001:
return None
rate = float(self.cnt) / elapsed
self.eta = (float(self.max_iter) - float(self.cnt)) / rate | [
"def",
"_calc_eta",
"(",
"self",
")",
":",
"elapsed",
"=",
"self",
".",
"_elapsed",
"(",
")",
"if",
"self",
".",
"cnt",
"==",
"0",
"or",
"elapsed",
"<",
"0.001",
":",
"return",
"None",
"rate",
"=",
"float",
"(",
"self",
".",
"cnt",
")",
"/",
"elapsed",
"self",
".",
"eta",
"=",
"(",
"float",
"(",
"self",
".",
"max_iter",
")",
"-",
"float",
"(",
"self",
".",
"cnt",
")",
")",
"/",
"rate"
]
| Calculates estimated time left until completion. | [
"Calculates",
"estimated",
"time",
"left",
"until",
"completion",
"."
]
| 57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a | https://github.com/rasbt/pyprind/blob/57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a/pyprind/prog_class.py#L125-L131 | train |
rasbt/pyprind | pyprind/prog_class.py | Prog._print_title | def _print_title(self):
""" Prints tracking title at initialization. """
if self.title:
self._stream_out('{}\n'.format(self.title))
self._stream_flush() | python | def _print_title(self):
""" Prints tracking title at initialization. """
if self.title:
self._stream_out('{}\n'.format(self.title))
self._stream_flush() | [
"def",
"_print_title",
"(",
"self",
")",
":",
"if",
"self",
".",
"title",
":",
"self",
".",
"_stream_out",
"(",
"'{}\\n'",
".",
"format",
"(",
"self",
".",
"title",
")",
")",
"self",
".",
"_stream_flush",
"(",
")"
]
| Prints tracking title at initialization. | [
"Prints",
"tracking",
"title",
"at",
"initialization",
"."
]
| 57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a | https://github.com/rasbt/pyprind/blob/57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a/pyprind/prog_class.py#L162-L166 | train |
rasbt/pyprind | pyprind/prog_class.py | Prog._cache_eta | def _cache_eta(self):
""" Prints the estimated time left."""
self._calc_eta()
self._cached_output += ' | ETA: ' + self._get_time(self.eta) | python | def _cache_eta(self):
""" Prints the estimated time left."""
self._calc_eta()
self._cached_output += ' | ETA: ' + self._get_time(self.eta) | [
"def",
"_cache_eta",
"(",
"self",
")",
":",
"self",
".",
"_calc_eta",
"(",
")",
"self",
".",
"_cached_output",
"+=",
"' | ETA: '",
"+",
"self",
".",
"_get_time",
"(",
"self",
".",
"eta",
")"
]
| Prints the estimated time left. | [
"Prints",
"the",
"estimated",
"time",
"left",
"."
]
| 57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a | https://github.com/rasbt/pyprind/blob/57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a/pyprind/prog_class.py#L168-L171 | train |
rasbt/pyprind | pyprind/progbar.py | ProgBar._adjust_width | def _adjust_width(self):
"""Shrinks bar if number of iterations is less than the bar width"""
if self.bar_width > self.max_iter:
self.bar_width = int(self.max_iter) | python | def _adjust_width(self):
"""Shrinks bar if number of iterations is less than the bar width"""
if self.bar_width > self.max_iter:
self.bar_width = int(self.max_iter) | [
"def",
"_adjust_width",
"(",
"self",
")",
":",
"if",
"self",
".",
"bar_width",
">",
"self",
".",
"max_iter",
":",
"self",
".",
"bar_width",
"=",
"int",
"(",
"self",
".",
"max_iter",
")"
]
| Shrinks bar if number of iterations is less than the bar width | [
"Shrinks",
"bar",
"if",
"number",
"of",
"iterations",
"is",
"less",
"than",
"the",
"bar",
"width"
]
| 57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a | https://github.com/rasbt/pyprind/blob/57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a/pyprind/progbar.py#L64-L67 | train |
rasbt/pyprind | pyprind/progpercent.py | ProgPercent._print | def _print(self, force_flush=False):
""" Prints formatted percentage and tracked time to the screen."""
self._stream_flush()
next_perc = self._calc_percent()
if self.update_interval:
do_update = time.time() - self.last_time >= self.update_interval
elif force_flush:
do_update = True
else:
do_update = next_perc > self.last_progress
if do_update and self.active:
self.last_progress = next_perc
self._cache_percent_indicator(self.last_progress)
if self.track:
self._cached_output += ' Time elapsed: ' + \
self._get_time(self._elapsed())
self._cache_eta()
if self.item_id:
self._cache_item_id()
self._stream_out('\r%s' % self._cached_output)
self._stream_flush()
self._cached_output = '' | python | def _print(self, force_flush=False):
""" Prints formatted percentage and tracked time to the screen."""
self._stream_flush()
next_perc = self._calc_percent()
if self.update_interval:
do_update = time.time() - self.last_time >= self.update_interval
elif force_flush:
do_update = True
else:
do_update = next_perc > self.last_progress
if do_update and self.active:
self.last_progress = next_perc
self._cache_percent_indicator(self.last_progress)
if self.track:
self._cached_output += ' Time elapsed: ' + \
self._get_time(self._elapsed())
self._cache_eta()
if self.item_id:
self._cache_item_id()
self._stream_out('\r%s' % self._cached_output)
self._stream_flush()
self._cached_output = '' | [
"def",
"_print",
"(",
"self",
",",
"force_flush",
"=",
"False",
")",
":",
"self",
".",
"_stream_flush",
"(",
")",
"next_perc",
"=",
"self",
".",
"_calc_percent",
"(",
")",
"if",
"self",
".",
"update_interval",
":",
"do_update",
"=",
"time",
".",
"time",
"(",
")",
"-",
"self",
".",
"last_time",
">=",
"self",
".",
"update_interval",
"elif",
"force_flush",
":",
"do_update",
"=",
"True",
"else",
":",
"do_update",
"=",
"next_perc",
">",
"self",
".",
"last_progress",
"if",
"do_update",
"and",
"self",
".",
"active",
":",
"self",
".",
"last_progress",
"=",
"next_perc",
"self",
".",
"_cache_percent_indicator",
"(",
"self",
".",
"last_progress",
")",
"if",
"self",
".",
"track",
":",
"self",
".",
"_cached_output",
"+=",
"' Time elapsed: '",
"+",
"self",
".",
"_get_time",
"(",
"self",
".",
"_elapsed",
"(",
")",
")",
"self",
".",
"_cache_eta",
"(",
")",
"if",
"self",
".",
"item_id",
":",
"self",
".",
"_cache_item_id",
"(",
")",
"self",
".",
"_stream_out",
"(",
"'\\r%s'",
"%",
"self",
".",
"_cached_output",
")",
"self",
".",
"_stream_flush",
"(",
")",
"self",
".",
"_cached_output",
"=",
"''"
]
| Prints formatted percentage and tracked time to the screen. | [
"Prints",
"formatted",
"percentage",
"and",
"tracked",
"time",
"to",
"the",
"screen",
"."
]
| 57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a | https://github.com/rasbt/pyprind/blob/57d8611ae86cc2cb71d6f1ab973476fc9bea5b7a/pyprind/progpercent.py#L58-L80 | train |
bgreenlee/pygtail | pygtail/core.py | Pygtail.next | def next(self):
"""
Return the next line in the file, updating the offset.
"""
try:
line = self._get_next_line()
except StopIteration:
# we've reached the end of the file; if we're processing the
# rotated log file or the file has been renamed, we can continue with the actual file; otherwise
# update the offset file
if self._is_new_file():
self._rotated_logfile = None
self._fh.close()
self._offset = 0
# open up current logfile and continue
try:
line = self._get_next_line()
except StopIteration: # oops, empty file
self._update_offset_file()
raise
else:
self._update_offset_file()
raise
if self.paranoid:
self._update_offset_file()
elif self.every_n and self.every_n <= self._since_update:
self._update_offset_file()
return line | python | def next(self):
"""
Return the next line in the file, updating the offset.
"""
try:
line = self._get_next_line()
except StopIteration:
# we've reached the end of the file; if we're processing the
# rotated log file or the file has been renamed, we can continue with the actual file; otherwise
# update the offset file
if self._is_new_file():
self._rotated_logfile = None
self._fh.close()
self._offset = 0
# open up current logfile and continue
try:
line = self._get_next_line()
except StopIteration: # oops, empty file
self._update_offset_file()
raise
else:
self._update_offset_file()
raise
if self.paranoid:
self._update_offset_file()
elif self.every_n and self.every_n <= self._since_update:
self._update_offset_file()
return line | [
"def",
"next",
"(",
"self",
")",
":",
"try",
":",
"line",
"=",
"self",
".",
"_get_next_line",
"(",
")",
"except",
"StopIteration",
":",
"# we've reached the end of the file; if we're processing the",
"# rotated log file or the file has been renamed, we can continue with the actual file; otherwise",
"# update the offset file",
"if",
"self",
".",
"_is_new_file",
"(",
")",
":",
"self",
".",
"_rotated_logfile",
"=",
"None",
"self",
".",
"_fh",
".",
"close",
"(",
")",
"self",
".",
"_offset",
"=",
"0",
"# open up current logfile and continue",
"try",
":",
"line",
"=",
"self",
".",
"_get_next_line",
"(",
")",
"except",
"StopIteration",
":",
"# oops, empty file",
"self",
".",
"_update_offset_file",
"(",
")",
"raise",
"else",
":",
"self",
".",
"_update_offset_file",
"(",
")",
"raise",
"if",
"self",
".",
"paranoid",
":",
"self",
".",
"_update_offset_file",
"(",
")",
"elif",
"self",
".",
"every_n",
"and",
"self",
".",
"every_n",
"<=",
"self",
".",
"_since_update",
":",
"self",
".",
"_update_offset_file",
"(",
")",
"return",
"line"
]
| Return the next line in the file, updating the offset. | [
"Return",
"the",
"next",
"line",
"in",
"the",
"file",
"updating",
"the",
"offset",
"."
]
| d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890 | https://github.com/bgreenlee/pygtail/blob/d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890/pygtail/core.py#L101-L130 | train |
bgreenlee/pygtail | pygtail/core.py | Pygtail.read | def read(self):
"""
Read in all unread lines and return them as a single string.
"""
lines = self.readlines()
if lines:
try:
return ''.join(lines)
except TypeError:
return ''.join(force_text(line) for line in lines)
else:
return None | python | def read(self):
"""
Read in all unread lines and return them as a single string.
"""
lines = self.readlines()
if lines:
try:
return ''.join(lines)
except TypeError:
return ''.join(force_text(line) for line in lines)
else:
return None | [
"def",
"read",
"(",
"self",
")",
":",
"lines",
"=",
"self",
".",
"readlines",
"(",
")",
"if",
"lines",
":",
"try",
":",
"return",
"''",
".",
"join",
"(",
"lines",
")",
"except",
"TypeError",
":",
"return",
"''",
".",
"join",
"(",
"force_text",
"(",
"line",
")",
"for",
"line",
"in",
"lines",
")",
"else",
":",
"return",
"None"
]
| Read in all unread lines and return them as a single string. | [
"Read",
"in",
"all",
"unread",
"lines",
"and",
"return",
"them",
"as",
"a",
"single",
"string",
"."
]
| d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890 | https://github.com/bgreenlee/pygtail/blob/d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890/pygtail/core.py#L142-L153 | train |
bgreenlee/pygtail | pygtail/core.py | Pygtail._filehandle | def _filehandle(self):
"""
Return a filehandle to the file being tailed, with the position set
to the current offset.
"""
if not self._fh or self._is_closed():
filename = self._rotated_logfile or self.filename
if filename.endswith('.gz'):
self._fh = gzip.open(filename, 'r')
else:
self._fh = open(filename, "r", 1)
if self.read_from_end and not exists(self._offset_file):
self._fh.seek(0, os.SEEK_END)
else:
self._fh.seek(self._offset)
return self._fh | python | def _filehandle(self):
"""
Return a filehandle to the file being tailed, with the position set
to the current offset.
"""
if not self._fh or self._is_closed():
filename = self._rotated_logfile or self.filename
if filename.endswith('.gz'):
self._fh = gzip.open(filename, 'r')
else:
self._fh = open(filename, "r", 1)
if self.read_from_end and not exists(self._offset_file):
self._fh.seek(0, os.SEEK_END)
else:
self._fh.seek(self._offset)
return self._fh | [
"def",
"_filehandle",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_fh",
"or",
"self",
".",
"_is_closed",
"(",
")",
":",
"filename",
"=",
"self",
".",
"_rotated_logfile",
"or",
"self",
".",
"filename",
"if",
"filename",
".",
"endswith",
"(",
"'.gz'",
")",
":",
"self",
".",
"_fh",
"=",
"gzip",
".",
"open",
"(",
"filename",
",",
"'r'",
")",
"else",
":",
"self",
".",
"_fh",
"=",
"open",
"(",
"filename",
",",
"\"r\"",
",",
"1",
")",
"if",
"self",
".",
"read_from_end",
"and",
"not",
"exists",
"(",
"self",
".",
"_offset_file",
")",
":",
"self",
".",
"_fh",
".",
"seek",
"(",
"0",
",",
"os",
".",
"SEEK_END",
")",
"else",
":",
"self",
".",
"_fh",
".",
"seek",
"(",
"self",
".",
"_offset",
")",
"return",
"self",
".",
"_fh"
]
| Return a filehandle to the file being tailed, with the position set
to the current offset. | [
"Return",
"a",
"filehandle",
"to",
"the",
"file",
"being",
"tailed",
"with",
"the",
"position",
"set",
"to",
"the",
"current",
"offset",
"."
]
| d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890 | https://github.com/bgreenlee/pygtail/blob/d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890/pygtail/core.py#L167-L183 | train |
bgreenlee/pygtail | pygtail/core.py | Pygtail._update_offset_file | def _update_offset_file(self):
"""
Update the offset file with the current inode and offset.
"""
if self.on_update:
self.on_update()
offset = self._filehandle().tell()
inode = stat(self.filename).st_ino
fh = open(self._offset_file, "w")
fh.write("%s\n%s\n" % (inode, offset))
fh.close()
self._since_update = 0 | python | def _update_offset_file(self):
"""
Update the offset file with the current inode and offset.
"""
if self.on_update:
self.on_update()
offset = self._filehandle().tell()
inode = stat(self.filename).st_ino
fh = open(self._offset_file, "w")
fh.write("%s\n%s\n" % (inode, offset))
fh.close()
self._since_update = 0 | [
"def",
"_update_offset_file",
"(",
"self",
")",
":",
"if",
"self",
".",
"on_update",
":",
"self",
".",
"on_update",
"(",
")",
"offset",
"=",
"self",
".",
"_filehandle",
"(",
")",
".",
"tell",
"(",
")",
"inode",
"=",
"stat",
"(",
"self",
".",
"filename",
")",
".",
"st_ino",
"fh",
"=",
"open",
"(",
"self",
".",
"_offset_file",
",",
"\"w\"",
")",
"fh",
".",
"write",
"(",
"\"%s\\n%s\\n\"",
"%",
"(",
"inode",
",",
"offset",
")",
")",
"fh",
".",
"close",
"(",
")",
"self",
".",
"_since_update",
"=",
"0"
]
| Update the offset file with the current inode and offset. | [
"Update",
"the",
"offset",
"file",
"with",
"the",
"current",
"inode",
"and",
"offset",
"."
]
| d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890 | https://github.com/bgreenlee/pygtail/blob/d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890/pygtail/core.py#L185-L196 | train |
bgreenlee/pygtail | pygtail/core.py | Pygtail._determine_rotated_logfile | def _determine_rotated_logfile(self):
"""
We suspect the logfile has been rotated, so try to guess what the
rotated filename is, and return it.
"""
rotated_filename = self._check_rotated_filename_candidates()
if rotated_filename and exists(rotated_filename):
if stat(rotated_filename).st_ino == self._offset_file_inode:
return rotated_filename
# if the inode hasn't changed, then the file shrank; this is expected with copytruncate,
# otherwise print a warning
if stat(self.filename).st_ino == self._offset_file_inode:
if self.copytruncate:
return rotated_filename
else:
sys.stderr.write(
"[pygtail] [WARN] file size of %s shrank, and copytruncate support is "
"disabled (expected at least %d bytes, was %d bytes).\n" %
(self.filename, self._offset, stat(self.filename).st_size))
return None | python | def _determine_rotated_logfile(self):
"""
We suspect the logfile has been rotated, so try to guess what the
rotated filename is, and return it.
"""
rotated_filename = self._check_rotated_filename_candidates()
if rotated_filename and exists(rotated_filename):
if stat(rotated_filename).st_ino == self._offset_file_inode:
return rotated_filename
# if the inode hasn't changed, then the file shrank; this is expected with copytruncate,
# otherwise print a warning
if stat(self.filename).st_ino == self._offset_file_inode:
if self.copytruncate:
return rotated_filename
else:
sys.stderr.write(
"[pygtail] [WARN] file size of %s shrank, and copytruncate support is "
"disabled (expected at least %d bytes, was %d bytes).\n" %
(self.filename, self._offset, stat(self.filename).st_size))
return None | [
"def",
"_determine_rotated_logfile",
"(",
"self",
")",
":",
"rotated_filename",
"=",
"self",
".",
"_check_rotated_filename_candidates",
"(",
")",
"if",
"rotated_filename",
"and",
"exists",
"(",
"rotated_filename",
")",
":",
"if",
"stat",
"(",
"rotated_filename",
")",
".",
"st_ino",
"==",
"self",
".",
"_offset_file_inode",
":",
"return",
"rotated_filename",
"# if the inode hasn't changed, then the file shrank; this is expected with copytruncate,",
"# otherwise print a warning",
"if",
"stat",
"(",
"self",
".",
"filename",
")",
".",
"st_ino",
"==",
"self",
".",
"_offset_file_inode",
":",
"if",
"self",
".",
"copytruncate",
":",
"return",
"rotated_filename",
"else",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"[pygtail] [WARN] file size of %s shrank, and copytruncate support is \"",
"\"disabled (expected at least %d bytes, was %d bytes).\\n\"",
"%",
"(",
"self",
".",
"filename",
",",
"self",
".",
"_offset",
",",
"stat",
"(",
"self",
".",
"filename",
")",
".",
"st_size",
")",
")",
"return",
"None"
]
| We suspect the logfile has been rotated, so try to guess what the
rotated filename is, and return it. | [
"We",
"suspect",
"the",
"logfile",
"has",
"been",
"rotated",
"so",
"try",
"to",
"guess",
"what",
"the",
"rotated",
"filename",
"is",
"and",
"return",
"it",
"."
]
| d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890 | https://github.com/bgreenlee/pygtail/blob/d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890/pygtail/core.py#L198-L219 | train |
bgreenlee/pygtail | pygtail/core.py | Pygtail._check_rotated_filename_candidates | def _check_rotated_filename_candidates(self):
"""
Check for various rotated logfile filename patterns and return the first
match we find.
"""
# savelog(8)
candidate = "%s.0" % self.filename
if (exists(candidate) and exists("%s.1.gz" % self.filename) and
(stat(candidate).st_mtime > stat("%s.1.gz" % self.filename).st_mtime)):
return candidate
# logrotate(8)
# with delaycompress
candidate = "%s.1" % self.filename
if exists(candidate):
return candidate
# without delaycompress
candidate = "%s.1.gz" % self.filename
if exists(candidate):
return candidate
rotated_filename_patterns = [
# logrotate dateext rotation scheme - `dateformat -%Y%m%d` + with `delaycompress`
"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]",
# logrotate dateext rotation scheme - `dateformat -%Y%m%d` + without `delaycompress`
"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz",
# logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + with `delaycompress`
"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]",
# logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + without `delaycompress`
"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz",
# for TimedRotatingFileHandler
"%s.[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]",
]
if self.log_patterns:
rotated_filename_patterns.extend(self.log_patterns)
# break into directory and filename components to support cases where the
# the file is prepended as part of rotation
file_dir, rel_filename = os.path.split(self.filename)
for rotated_filename_pattern in rotated_filename_patterns:
candidates = glob.glob(os.path.join(file_dir, rotated_filename_pattern % rel_filename))
if candidates:
candidates.sort()
return candidates[-1] # return most recent
# no match
return None | python | def _check_rotated_filename_candidates(self):
"""
Check for various rotated logfile filename patterns and return the first
match we find.
"""
# savelog(8)
candidate = "%s.0" % self.filename
if (exists(candidate) and exists("%s.1.gz" % self.filename) and
(stat(candidate).st_mtime > stat("%s.1.gz" % self.filename).st_mtime)):
return candidate
# logrotate(8)
# with delaycompress
candidate = "%s.1" % self.filename
if exists(candidate):
return candidate
# without delaycompress
candidate = "%s.1.gz" % self.filename
if exists(candidate):
return candidate
rotated_filename_patterns = [
# logrotate dateext rotation scheme - `dateformat -%Y%m%d` + with `delaycompress`
"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]",
# logrotate dateext rotation scheme - `dateformat -%Y%m%d` + without `delaycompress`
"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz",
# logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + with `delaycompress`
"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]",
# logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + without `delaycompress`
"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz",
# for TimedRotatingFileHandler
"%s.[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]",
]
if self.log_patterns:
rotated_filename_patterns.extend(self.log_patterns)
# break into directory and filename components to support cases where the
# the file is prepended as part of rotation
file_dir, rel_filename = os.path.split(self.filename)
for rotated_filename_pattern in rotated_filename_patterns:
candidates = glob.glob(os.path.join(file_dir, rotated_filename_pattern % rel_filename))
if candidates:
candidates.sort()
return candidates[-1] # return most recent
# no match
return None | [
"def",
"_check_rotated_filename_candidates",
"(",
"self",
")",
":",
"# savelog(8)",
"candidate",
"=",
"\"%s.0\"",
"%",
"self",
".",
"filename",
"if",
"(",
"exists",
"(",
"candidate",
")",
"and",
"exists",
"(",
"\"%s.1.gz\"",
"%",
"self",
".",
"filename",
")",
"and",
"(",
"stat",
"(",
"candidate",
")",
".",
"st_mtime",
">",
"stat",
"(",
"\"%s.1.gz\"",
"%",
"self",
".",
"filename",
")",
".",
"st_mtime",
")",
")",
":",
"return",
"candidate",
"# logrotate(8)",
"# with delaycompress",
"candidate",
"=",
"\"%s.1\"",
"%",
"self",
".",
"filename",
"if",
"exists",
"(",
"candidate",
")",
":",
"return",
"candidate",
"# without delaycompress",
"candidate",
"=",
"\"%s.1.gz\"",
"%",
"self",
".",
"filename",
"if",
"exists",
"(",
"candidate",
")",
":",
"return",
"candidate",
"rotated_filename_patterns",
"=",
"[",
"# logrotate dateext rotation scheme - `dateformat -%Y%m%d` + with `delaycompress`",
"\"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]\"",
",",
"# logrotate dateext rotation scheme - `dateformat -%Y%m%d` + without `delaycompress`",
"\"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz\"",
",",
"# logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + with `delaycompress`",
"\"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]\"",
",",
"# logrotate dateext rotation scheme - `dateformat -%Y%m%d-%s` + without `delaycompress`",
"\"%s-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9].gz\"",
",",
"# for TimedRotatingFileHandler",
"\"%s.[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]\"",
",",
"]",
"if",
"self",
".",
"log_patterns",
":",
"rotated_filename_patterns",
".",
"extend",
"(",
"self",
".",
"log_patterns",
")",
"# break into directory and filename components to support cases where the",
"# the file is prepended as part of rotation",
"file_dir",
",",
"rel_filename",
"=",
"os",
".",
"path",
".",
"split",
"(",
"self",
".",
"filename",
")",
"for",
"rotated_filename_pattern",
"in",
"rotated_filename_patterns",
":",
"candidates",
"=",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"file_dir",
",",
"rotated_filename_pattern",
"%",
"rel_filename",
")",
")",
"if",
"candidates",
":",
"candidates",
".",
"sort",
"(",
")",
"return",
"candidates",
"[",
"-",
"1",
"]",
"# return most recent",
"# no match",
"return",
"None"
]
| Check for various rotated logfile filename patterns and return the first
match we find. | [
"Check",
"for",
"various",
"rotated",
"logfile",
"filename",
"patterns",
"and",
"return",
"the",
"first",
"match",
"we",
"find",
"."
]
| d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890 | https://github.com/bgreenlee/pygtail/blob/d2caeb6fece041d5d6c5ecf600dc5a9e46c8d890/pygtail/core.py#L221-L268 | train |
quiltdata/quilt | compiler/quilt/tools/data_transfer.py | create_s3_session | def create_s3_session():
"""
Creates a session with automatic retries on 5xx errors.
"""
sess = requests.Session()
retries = Retry(total=3,
backoff_factor=.5,
status_forcelist=[500, 502, 503, 504])
sess.mount('https://', HTTPAdapter(max_retries=retries))
return sess | python | def create_s3_session():
"""
Creates a session with automatic retries on 5xx errors.
"""
sess = requests.Session()
retries = Retry(total=3,
backoff_factor=.5,
status_forcelist=[500, 502, 503, 504])
sess.mount('https://', HTTPAdapter(max_retries=retries))
return sess | [
"def",
"create_s3_session",
"(",
")",
":",
"sess",
"=",
"requests",
".",
"Session",
"(",
")",
"retries",
"=",
"Retry",
"(",
"total",
"=",
"3",
",",
"backoff_factor",
"=",
".5",
",",
"status_forcelist",
"=",
"[",
"500",
",",
"502",
",",
"503",
",",
"504",
"]",
")",
"sess",
".",
"mount",
"(",
"'https://'",
",",
"HTTPAdapter",
"(",
"max_retries",
"=",
"retries",
")",
")",
"return",
"sess"
]
| Creates a session with automatic retries on 5xx errors. | [
"Creates",
"a",
"session",
"with",
"automatic",
"retries",
"on",
"5xx",
"errors",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/data_transfer.py#L48-L57 | train |
quiltdata/quilt | compiler/quilt/imports.py | FakeLoader.load_module | def load_module(self, fullname):
"""
Returns an empty module.
"""
mod = sys.modules.setdefault(fullname, imp.new_module(fullname))
mod.__file__ = self._path
mod.__loader__ = self
mod.__path__ = []
mod.__package__ = fullname
return mod | python | def load_module(self, fullname):
"""
Returns an empty module.
"""
mod = sys.modules.setdefault(fullname, imp.new_module(fullname))
mod.__file__ = self._path
mod.__loader__ = self
mod.__path__ = []
mod.__package__ = fullname
return mod | [
"def",
"load_module",
"(",
"self",
",",
"fullname",
")",
":",
"mod",
"=",
"sys",
".",
"modules",
".",
"setdefault",
"(",
"fullname",
",",
"imp",
".",
"new_module",
"(",
"fullname",
")",
")",
"mod",
".",
"__file__",
"=",
"self",
".",
"_path",
"mod",
".",
"__loader__",
"=",
"self",
"mod",
".",
"__path__",
"=",
"[",
"]",
"mod",
".",
"__package__",
"=",
"fullname",
"return",
"mod"
]
| Returns an empty module. | [
"Returns",
"an",
"empty",
"module",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/imports.py#L31-L40 | train |
quiltdata/quilt | compiler/quilt/imports.py | PackageLoader.load_module | def load_module(self, fullname):
"""
Returns an object that lazily looks up tables and groups.
"""
mod = sys.modules.get(fullname)
if mod is not None:
return mod
# We're creating an object rather than a module. It's a hack, but it's approved by Guido:
# https://mail.python.org/pipermail/python-ideas/2012-May/014969.html
mod = _from_core_node(self._store, self._root)
sys.modules[fullname] = mod
return mod | python | def load_module(self, fullname):
"""
Returns an object that lazily looks up tables and groups.
"""
mod = sys.modules.get(fullname)
if mod is not None:
return mod
# We're creating an object rather than a module. It's a hack, but it's approved by Guido:
# https://mail.python.org/pipermail/python-ideas/2012-May/014969.html
mod = _from_core_node(self._store, self._root)
sys.modules[fullname] = mod
return mod | [
"def",
"load_module",
"(",
"self",
",",
"fullname",
")",
":",
"mod",
"=",
"sys",
".",
"modules",
".",
"get",
"(",
"fullname",
")",
"if",
"mod",
"is",
"not",
"None",
":",
"return",
"mod",
"# We're creating an object rather than a module. It's a hack, but it's approved by Guido:",
"# https://mail.python.org/pipermail/python-ideas/2012-May/014969.html",
"mod",
"=",
"_from_core_node",
"(",
"self",
".",
"_store",
",",
"self",
".",
"_root",
")",
"sys",
".",
"modules",
"[",
"fullname",
"]",
"=",
"mod",
"return",
"mod"
]
| Returns an object that lazily looks up tables and groups. | [
"Returns",
"an",
"object",
"that",
"lazily",
"looks",
"up",
"tables",
"and",
"groups",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/imports.py#L81-L94 | train |
quiltdata/quilt | compiler/quilt/imports.py | ModuleFinder.find_module | def find_module(self, fullname, path=None):
"""
Looks up the table based on the module path.
"""
if not fullname.startswith(self._module_name + '.'):
# Not a quilt submodule.
return None
submodule = fullname[len(self._module_name) + 1:]
parts = submodule.split('.')
# Pop the team prefix if this is a team import.
if self._teams:
team = parts.pop(0)
else:
team = None
# Handle full paths first.
if len(parts) == 2:
store, pkg = PackageStore.find_package(team, parts[0], parts[1])
if pkg is not None:
return PackageLoader(store, pkg)
else:
return None
# Return fake loaders for partial paths.
for store_dir in PackageStore.find_store_dirs():
store = PackageStore(store_dir)
if len(parts) == 0:
assert self._teams
path = store.team_path(team)
elif len(parts) == 1:
path = store.user_path(team, parts[0])
if os.path.isdir(path):
return FakeLoader(path)
# Nothing is found.
return None | python | def find_module(self, fullname, path=None):
"""
Looks up the table based on the module path.
"""
if not fullname.startswith(self._module_name + '.'):
# Not a quilt submodule.
return None
submodule = fullname[len(self._module_name) + 1:]
parts = submodule.split('.')
# Pop the team prefix if this is a team import.
if self._teams:
team = parts.pop(0)
else:
team = None
# Handle full paths first.
if len(parts) == 2:
store, pkg = PackageStore.find_package(team, parts[0], parts[1])
if pkg is not None:
return PackageLoader(store, pkg)
else:
return None
# Return fake loaders for partial paths.
for store_dir in PackageStore.find_store_dirs():
store = PackageStore(store_dir)
if len(parts) == 0:
assert self._teams
path = store.team_path(team)
elif len(parts) == 1:
path = store.user_path(team, parts[0])
if os.path.isdir(path):
return FakeLoader(path)
# Nothing is found.
return None | [
"def",
"find_module",
"(",
"self",
",",
"fullname",
",",
"path",
"=",
"None",
")",
":",
"if",
"not",
"fullname",
".",
"startswith",
"(",
"self",
".",
"_module_name",
"+",
"'.'",
")",
":",
"# Not a quilt submodule.",
"return",
"None",
"submodule",
"=",
"fullname",
"[",
"len",
"(",
"self",
".",
"_module_name",
")",
"+",
"1",
":",
"]",
"parts",
"=",
"submodule",
".",
"split",
"(",
"'.'",
")",
"# Pop the team prefix if this is a team import.",
"if",
"self",
".",
"_teams",
":",
"team",
"=",
"parts",
".",
"pop",
"(",
"0",
")",
"else",
":",
"team",
"=",
"None",
"# Handle full paths first.",
"if",
"len",
"(",
"parts",
")",
"==",
"2",
":",
"store",
",",
"pkg",
"=",
"PackageStore",
".",
"find_package",
"(",
"team",
",",
"parts",
"[",
"0",
"]",
",",
"parts",
"[",
"1",
"]",
")",
"if",
"pkg",
"is",
"not",
"None",
":",
"return",
"PackageLoader",
"(",
"store",
",",
"pkg",
")",
"else",
":",
"return",
"None",
"# Return fake loaders for partial paths.",
"for",
"store_dir",
"in",
"PackageStore",
".",
"find_store_dirs",
"(",
")",
":",
"store",
"=",
"PackageStore",
"(",
"store_dir",
")",
"if",
"len",
"(",
"parts",
")",
"==",
"0",
":",
"assert",
"self",
".",
"_teams",
"path",
"=",
"store",
".",
"team_path",
"(",
"team",
")",
"elif",
"len",
"(",
"parts",
")",
"==",
"1",
":",
"path",
"=",
"store",
".",
"user_path",
"(",
"team",
",",
"parts",
"[",
"0",
"]",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"return",
"FakeLoader",
"(",
"path",
")",
"# Nothing is found.",
"return",
"None"
]
| Looks up the table based on the module path. | [
"Looks",
"up",
"the",
"table",
"based",
"on",
"the",
"module",
"path",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/imports.py#L105-L144 | train |
quiltdata/quilt | compiler/quilt/tools/build.py | _have_pyspark | def _have_pyspark():
"""
Check if we're running Pyspark
"""
if _have_pyspark.flag is None:
try:
if PackageStore.get_parquet_lib() is ParquetLib.SPARK:
import pyspark # pylint:disable=W0612
_have_pyspark.flag = True
else:
_have_pyspark.flag = False
except ImportError:
_have_pyspark.flag = False
return _have_pyspark.flag | python | def _have_pyspark():
"""
Check if we're running Pyspark
"""
if _have_pyspark.flag is None:
try:
if PackageStore.get_parquet_lib() is ParquetLib.SPARK:
import pyspark # pylint:disable=W0612
_have_pyspark.flag = True
else:
_have_pyspark.flag = False
except ImportError:
_have_pyspark.flag = False
return _have_pyspark.flag | [
"def",
"_have_pyspark",
"(",
")",
":",
"if",
"_have_pyspark",
".",
"flag",
"is",
"None",
":",
"try",
":",
"if",
"PackageStore",
".",
"get_parquet_lib",
"(",
")",
"is",
"ParquetLib",
".",
"SPARK",
":",
"import",
"pyspark",
"# pylint:disable=W0612",
"_have_pyspark",
".",
"flag",
"=",
"True",
"else",
":",
"_have_pyspark",
".",
"flag",
"=",
"False",
"except",
"ImportError",
":",
"_have_pyspark",
".",
"flag",
"=",
"False",
"return",
"_have_pyspark",
".",
"flag"
]
| Check if we're running Pyspark | [
"Check",
"if",
"we",
"re",
"running",
"Pyspark"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/build.py#L37-L50 | train |
quiltdata/quilt | compiler/quilt/tools/build.py | _path_hash | def _path_hash(path, transform, kwargs):
"""
Generate a hash of source file path + transform + args
"""
sortedargs = ["%s:%r:%s" % (key, value, type(value))
for key, value in sorted(iteritems(kwargs))]
srcinfo = "{path}:{transform}:{{{kwargs}}}".format(path=os.path.abspath(path),
transform=transform,
kwargs=",".join(sortedargs))
return digest_string(srcinfo) | python | def _path_hash(path, transform, kwargs):
"""
Generate a hash of source file path + transform + args
"""
sortedargs = ["%s:%r:%s" % (key, value, type(value))
for key, value in sorted(iteritems(kwargs))]
srcinfo = "{path}:{transform}:{{{kwargs}}}".format(path=os.path.abspath(path),
transform=transform,
kwargs=",".join(sortedargs))
return digest_string(srcinfo) | [
"def",
"_path_hash",
"(",
"path",
",",
"transform",
",",
"kwargs",
")",
":",
"sortedargs",
"=",
"[",
"\"%s:%r:%s\"",
"%",
"(",
"key",
",",
"value",
",",
"type",
"(",
"value",
")",
")",
"for",
"key",
",",
"value",
"in",
"sorted",
"(",
"iteritems",
"(",
"kwargs",
")",
")",
"]",
"srcinfo",
"=",
"\"{path}:{transform}:{{{kwargs}}}\"",
".",
"format",
"(",
"path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"path",
")",
",",
"transform",
"=",
"transform",
",",
"kwargs",
"=",
"\",\"",
".",
"join",
"(",
"sortedargs",
")",
")",
"return",
"digest_string",
"(",
"srcinfo",
")"
]
| Generate a hash of source file path + transform + args | [
"Generate",
"a",
"hash",
"of",
"source",
"file",
"path",
"+",
"transform",
"+",
"args"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/build.py#L53-L62 | train |
quiltdata/quilt | compiler/quilt/tools/build.py | _gen_glob_data | def _gen_glob_data(dir, pattern, child_table):
"""Generates node data by globbing a directory for a pattern"""
dir = pathlib.Path(dir)
matched = False
used_names = set() # Used by to_nodename to prevent duplicate names
# sorted so that renames (if any) are consistently ordered
for filepath in sorted(dir.glob(pattern)):
if filepath.is_dir():
continue
else:
matched = True
# create node info
node_table = {} if child_table is None else child_table.copy()
filepath = filepath.relative_to(dir)
node_table[RESERVED['file']] = str(filepath)
node_name = to_nodename(filepath.stem, invalid=used_names)
used_names.add(node_name)
print("Matched with {!r}: {!r} from {!r}".format(pattern, node_name, str(filepath)))
yield node_name, node_table
if not matched:
print("Warning: {!r} matched no files.".format(pattern))
return | python | def _gen_glob_data(dir, pattern, child_table):
"""Generates node data by globbing a directory for a pattern"""
dir = pathlib.Path(dir)
matched = False
used_names = set() # Used by to_nodename to prevent duplicate names
# sorted so that renames (if any) are consistently ordered
for filepath in sorted(dir.glob(pattern)):
if filepath.is_dir():
continue
else:
matched = True
# create node info
node_table = {} if child_table is None else child_table.copy()
filepath = filepath.relative_to(dir)
node_table[RESERVED['file']] = str(filepath)
node_name = to_nodename(filepath.stem, invalid=used_names)
used_names.add(node_name)
print("Matched with {!r}: {!r} from {!r}".format(pattern, node_name, str(filepath)))
yield node_name, node_table
if not matched:
print("Warning: {!r} matched no files.".format(pattern))
return | [
"def",
"_gen_glob_data",
"(",
"dir",
",",
"pattern",
",",
"child_table",
")",
":",
"dir",
"=",
"pathlib",
".",
"Path",
"(",
"dir",
")",
"matched",
"=",
"False",
"used_names",
"=",
"set",
"(",
")",
"# Used by to_nodename to prevent duplicate names",
"# sorted so that renames (if any) are consistently ordered",
"for",
"filepath",
"in",
"sorted",
"(",
"dir",
".",
"glob",
"(",
"pattern",
")",
")",
":",
"if",
"filepath",
".",
"is_dir",
"(",
")",
":",
"continue",
"else",
":",
"matched",
"=",
"True",
"# create node info",
"node_table",
"=",
"{",
"}",
"if",
"child_table",
"is",
"None",
"else",
"child_table",
".",
"copy",
"(",
")",
"filepath",
"=",
"filepath",
".",
"relative_to",
"(",
"dir",
")",
"node_table",
"[",
"RESERVED",
"[",
"'file'",
"]",
"]",
"=",
"str",
"(",
"filepath",
")",
"node_name",
"=",
"to_nodename",
"(",
"filepath",
".",
"stem",
",",
"invalid",
"=",
"used_names",
")",
"used_names",
".",
"add",
"(",
"node_name",
")",
"print",
"(",
"\"Matched with {!r}: {!r} from {!r}\"",
".",
"format",
"(",
"pattern",
",",
"node_name",
",",
"str",
"(",
"filepath",
")",
")",
")",
"yield",
"node_name",
",",
"node_table",
"if",
"not",
"matched",
":",
"print",
"(",
"\"Warning: {!r} matched no files.\"",
".",
"format",
"(",
"pattern",
")",
")",
"return"
]
| Generates node data by globbing a directory for a pattern | [
"Generates",
"node",
"data",
"by",
"globbing",
"a",
"directory",
"for",
"a",
"pattern"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/build.py#L95-L119 | train |
quiltdata/quilt | compiler/quilt/tools/build.py | _remove_keywords | def _remove_keywords(d):
"""
copy the dict, filter_keywords
Parameters
----------
d : dict
"""
return { k:v for k, v in iteritems(d) if k not in RESERVED } | python | def _remove_keywords(d):
"""
copy the dict, filter_keywords
Parameters
----------
d : dict
"""
return { k:v for k, v in iteritems(d) if k not in RESERVED } | [
"def",
"_remove_keywords",
"(",
"d",
")",
":",
"return",
"{",
"k",
":",
"v",
"for",
"k",
",",
"v",
"in",
"iteritems",
"(",
"d",
")",
"if",
"k",
"not",
"in",
"RESERVED",
"}"
]
| copy the dict, filter_keywords
Parameters
----------
d : dict | [
"copy",
"the",
"dict",
"filter_keywords"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/build.py#L372-L380 | train |
quiltdata/quilt | compiler/quilt/tools/build.py | build_package | def build_package(team, username, package, subpath, yaml_path,
checks_path=None, dry_run=False, env='default'):
"""
Builds a package from a given Yaml file and installs it locally.
Returns the name of the package.
"""
def find(key, value):
"""
find matching nodes recursively;
only descend iterables that aren't strings
"""
if isinstance(value, Iterable) and not isinstance(value, string_types):
for k, v in iteritems(value):
if k == key:
yield v
elif isinstance(v, dict):
for result in find(key, v):
yield result
elif isinstance(v, list):
for item in v:
for result in find(key, item):
yield result
build_data = load_yaml(yaml_path)
# default to 'checks.yml' if build.yml contents: contains checks, but
# there's no inlined checks: defined by build.yml
if (checks_path is None and list(find('checks', build_data['contents'])) and
'checks' not in build_data):
checks_path = 'checks.yml'
checks_contents = load_yaml(checks_path, optional=True)
elif checks_path is not None:
checks_contents = load_yaml(checks_path)
else:
checks_contents = None
build_package_from_contents(team, username, package, subpath, os.path.dirname(yaml_path), build_data,
checks_contents=checks_contents, dry_run=dry_run, env=env) | python | def build_package(team, username, package, subpath, yaml_path,
checks_path=None, dry_run=False, env='default'):
"""
Builds a package from a given Yaml file and installs it locally.
Returns the name of the package.
"""
def find(key, value):
"""
find matching nodes recursively;
only descend iterables that aren't strings
"""
if isinstance(value, Iterable) and not isinstance(value, string_types):
for k, v in iteritems(value):
if k == key:
yield v
elif isinstance(v, dict):
for result in find(key, v):
yield result
elif isinstance(v, list):
for item in v:
for result in find(key, item):
yield result
build_data = load_yaml(yaml_path)
# default to 'checks.yml' if build.yml contents: contains checks, but
# there's no inlined checks: defined by build.yml
if (checks_path is None and list(find('checks', build_data['contents'])) and
'checks' not in build_data):
checks_path = 'checks.yml'
checks_contents = load_yaml(checks_path, optional=True)
elif checks_path is not None:
checks_contents = load_yaml(checks_path)
else:
checks_contents = None
build_package_from_contents(team, username, package, subpath, os.path.dirname(yaml_path), build_data,
checks_contents=checks_contents, dry_run=dry_run, env=env) | [
"def",
"build_package",
"(",
"team",
",",
"username",
",",
"package",
",",
"subpath",
",",
"yaml_path",
",",
"checks_path",
"=",
"None",
",",
"dry_run",
"=",
"False",
",",
"env",
"=",
"'default'",
")",
":",
"def",
"find",
"(",
"key",
",",
"value",
")",
":",
"\"\"\"\n find matching nodes recursively;\n only descend iterables that aren't strings\n \"\"\"",
"if",
"isinstance",
"(",
"value",
",",
"Iterable",
")",
"and",
"not",
"isinstance",
"(",
"value",
",",
"string_types",
")",
":",
"for",
"k",
",",
"v",
"in",
"iteritems",
"(",
"value",
")",
":",
"if",
"k",
"==",
"key",
":",
"yield",
"v",
"elif",
"isinstance",
"(",
"v",
",",
"dict",
")",
":",
"for",
"result",
"in",
"find",
"(",
"key",
",",
"v",
")",
":",
"yield",
"result",
"elif",
"isinstance",
"(",
"v",
",",
"list",
")",
":",
"for",
"item",
"in",
"v",
":",
"for",
"result",
"in",
"find",
"(",
"key",
",",
"item",
")",
":",
"yield",
"result",
"build_data",
"=",
"load_yaml",
"(",
"yaml_path",
")",
"# default to 'checks.yml' if build.yml contents: contains checks, but",
"# there's no inlined checks: defined by build.yml",
"if",
"(",
"checks_path",
"is",
"None",
"and",
"list",
"(",
"find",
"(",
"'checks'",
",",
"build_data",
"[",
"'contents'",
"]",
")",
")",
"and",
"'checks'",
"not",
"in",
"build_data",
")",
":",
"checks_path",
"=",
"'checks.yml'",
"checks_contents",
"=",
"load_yaml",
"(",
"checks_path",
",",
"optional",
"=",
"True",
")",
"elif",
"checks_path",
"is",
"not",
"None",
":",
"checks_contents",
"=",
"load_yaml",
"(",
"checks_path",
")",
"else",
":",
"checks_contents",
"=",
"None",
"build_package_from_contents",
"(",
"team",
",",
"username",
",",
"package",
",",
"subpath",
",",
"os",
".",
"path",
".",
"dirname",
"(",
"yaml_path",
")",
",",
"build_data",
",",
"checks_contents",
"=",
"checks_contents",
",",
"dry_run",
"=",
"dry_run",
",",
"env",
"=",
"env",
")"
]
| Builds a package from a given Yaml file and installs it locally.
Returns the name of the package. | [
"Builds",
"a",
"package",
"from",
"a",
"given",
"Yaml",
"file",
"and",
"installs",
"it",
"locally",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/build.py#L454-L490 | train |
quiltdata/quilt | registry/quilt_server/mail.py | send_comment_email | def send_comment_email(email, package_owner, package_name, commenter):
"""Send email to owner of package regarding new comment"""
link = '{CATALOG_URL}/package/{owner}/{pkg}/comments'.format(
CATALOG_URL=CATALOG_URL, owner=package_owner, pkg=package_name)
subject = "New comment on {package_owner}/{package_name}".format(
package_owner=package_owner, package_name=package_name)
html = render_template('comment_email.html', commenter=commenter, link=link)
body = render_template('comment_email.txt', commenter=commenter, link=link)
send_email(recipients=[email], sender=DEFAULT_SENDER, subject=subject,
html=html, body=body) | python | def send_comment_email(email, package_owner, package_name, commenter):
"""Send email to owner of package regarding new comment"""
link = '{CATALOG_URL}/package/{owner}/{pkg}/comments'.format(
CATALOG_URL=CATALOG_URL, owner=package_owner, pkg=package_name)
subject = "New comment on {package_owner}/{package_name}".format(
package_owner=package_owner, package_name=package_name)
html = render_template('comment_email.html', commenter=commenter, link=link)
body = render_template('comment_email.txt', commenter=commenter, link=link)
send_email(recipients=[email], sender=DEFAULT_SENDER, subject=subject,
html=html, body=body) | [
"def",
"send_comment_email",
"(",
"email",
",",
"package_owner",
",",
"package_name",
",",
"commenter",
")",
":",
"link",
"=",
"'{CATALOG_URL}/package/{owner}/{pkg}/comments'",
".",
"format",
"(",
"CATALOG_URL",
"=",
"CATALOG_URL",
",",
"owner",
"=",
"package_owner",
",",
"pkg",
"=",
"package_name",
")",
"subject",
"=",
"\"New comment on {package_owner}/{package_name}\"",
".",
"format",
"(",
"package_owner",
"=",
"package_owner",
",",
"package_name",
"=",
"package_name",
")",
"html",
"=",
"render_template",
"(",
"'comment_email.html'",
",",
"commenter",
"=",
"commenter",
",",
"link",
"=",
"link",
")",
"body",
"=",
"render_template",
"(",
"'comment_email.txt'",
",",
"commenter",
"=",
"commenter",
",",
"link",
"=",
"link",
")",
"send_email",
"(",
"recipients",
"=",
"[",
"email",
"]",
",",
"sender",
"=",
"DEFAULT_SENDER",
",",
"subject",
"=",
"subject",
",",
"html",
"=",
"html",
",",
"body",
"=",
"body",
")"
]
| Send email to owner of package regarding new comment | [
"Send",
"email",
"to",
"owner",
"of",
"package",
"regarding",
"new",
"comment"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/registry/quilt_server/mail.py#L75-L84 | train |
quiltdata/quilt | compiler/quilt/tools/core.py | hash_contents | def hash_contents(contents):
"""
Creates a hash of key names and hashes in a package dictionary.
"contents" must be a GroupNode.
"""
assert isinstance(contents, GroupNode)
result = hashlib.sha256()
def _hash_int(value):
result.update(struct.pack(">L", value))
def _hash_str(string):
assert isinstance(string, string_types)
_hash_int(len(string))
result.update(string.encode())
def _hash_object(obj):
_hash_str(obj.json_type)
if isinstance(obj, (TableNode, FileNode)):
hashes = obj.hashes
_hash_int(len(hashes))
for hval in hashes:
_hash_str(hval)
elif isinstance(obj, GroupNode):
children = obj.children
_hash_int(len(children))
for key, child in sorted(iteritems(children)):
_hash_str(key)
_hash_object(child)
else:
assert False, "Unexpected object: %r" % obj
# Backward compatibility: only hash metadata_hash if it's present.
if obj.metadata_hash is not None:
_hash_str(obj.metadata_hash)
_hash_object(contents)
return result.hexdigest() | python | def hash_contents(contents):
"""
Creates a hash of key names and hashes in a package dictionary.
"contents" must be a GroupNode.
"""
assert isinstance(contents, GroupNode)
result = hashlib.sha256()
def _hash_int(value):
result.update(struct.pack(">L", value))
def _hash_str(string):
assert isinstance(string, string_types)
_hash_int(len(string))
result.update(string.encode())
def _hash_object(obj):
_hash_str(obj.json_type)
if isinstance(obj, (TableNode, FileNode)):
hashes = obj.hashes
_hash_int(len(hashes))
for hval in hashes:
_hash_str(hval)
elif isinstance(obj, GroupNode):
children = obj.children
_hash_int(len(children))
for key, child in sorted(iteritems(children)):
_hash_str(key)
_hash_object(child)
else:
assert False, "Unexpected object: %r" % obj
# Backward compatibility: only hash metadata_hash if it's present.
if obj.metadata_hash is not None:
_hash_str(obj.metadata_hash)
_hash_object(contents)
return result.hexdigest() | [
"def",
"hash_contents",
"(",
"contents",
")",
":",
"assert",
"isinstance",
"(",
"contents",
",",
"GroupNode",
")",
"result",
"=",
"hashlib",
".",
"sha256",
"(",
")",
"def",
"_hash_int",
"(",
"value",
")",
":",
"result",
".",
"update",
"(",
"struct",
".",
"pack",
"(",
"\">L\"",
",",
"value",
")",
")",
"def",
"_hash_str",
"(",
"string",
")",
":",
"assert",
"isinstance",
"(",
"string",
",",
"string_types",
")",
"_hash_int",
"(",
"len",
"(",
"string",
")",
")",
"result",
".",
"update",
"(",
"string",
".",
"encode",
"(",
")",
")",
"def",
"_hash_object",
"(",
"obj",
")",
":",
"_hash_str",
"(",
"obj",
".",
"json_type",
")",
"if",
"isinstance",
"(",
"obj",
",",
"(",
"TableNode",
",",
"FileNode",
")",
")",
":",
"hashes",
"=",
"obj",
".",
"hashes",
"_hash_int",
"(",
"len",
"(",
"hashes",
")",
")",
"for",
"hval",
"in",
"hashes",
":",
"_hash_str",
"(",
"hval",
")",
"elif",
"isinstance",
"(",
"obj",
",",
"GroupNode",
")",
":",
"children",
"=",
"obj",
".",
"children",
"_hash_int",
"(",
"len",
"(",
"children",
")",
")",
"for",
"key",
",",
"child",
"in",
"sorted",
"(",
"iteritems",
"(",
"children",
")",
")",
":",
"_hash_str",
"(",
"key",
")",
"_hash_object",
"(",
"child",
")",
"else",
":",
"assert",
"False",
",",
"\"Unexpected object: %r\"",
"%",
"obj",
"# Backward compatibility: only hash metadata_hash if it's present.",
"if",
"obj",
".",
"metadata_hash",
"is",
"not",
"None",
":",
"_hash_str",
"(",
"obj",
".",
"metadata_hash",
")",
"_hash_object",
"(",
"contents",
")",
"return",
"result",
".",
"hexdigest",
"(",
")"
]
| Creates a hash of key names and hashes in a package dictionary.
"contents" must be a GroupNode. | [
"Creates",
"a",
"hash",
"of",
"key",
"names",
"and",
"hashes",
"in",
"a",
"package",
"dictionary",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/core.py#L144-L184 | train |
quiltdata/quilt | compiler/quilt/tools/core.py | find_object_hashes | def find_object_hashes(root, meta_only=False):
"""
Iterator that returns hashes of all of the file and table nodes.
:param root: starting node
"""
stack = [root]
while stack:
obj = stack.pop()
if not meta_only and isinstance(obj, (TableNode, FileNode)):
for objhash in obj.hashes:
yield objhash
stack.extend(itervalues(obj.get_children()))
if obj.metadata_hash is not None:
yield obj.metadata_hash | python | def find_object_hashes(root, meta_only=False):
"""
Iterator that returns hashes of all of the file and table nodes.
:param root: starting node
"""
stack = [root]
while stack:
obj = stack.pop()
if not meta_only and isinstance(obj, (TableNode, FileNode)):
for objhash in obj.hashes:
yield objhash
stack.extend(itervalues(obj.get_children()))
if obj.metadata_hash is not None:
yield obj.metadata_hash | [
"def",
"find_object_hashes",
"(",
"root",
",",
"meta_only",
"=",
"False",
")",
":",
"stack",
"=",
"[",
"root",
"]",
"while",
"stack",
":",
"obj",
"=",
"stack",
".",
"pop",
"(",
")",
"if",
"not",
"meta_only",
"and",
"isinstance",
"(",
"obj",
",",
"(",
"TableNode",
",",
"FileNode",
")",
")",
":",
"for",
"objhash",
"in",
"obj",
".",
"hashes",
":",
"yield",
"objhash",
"stack",
".",
"extend",
"(",
"itervalues",
"(",
"obj",
".",
"get_children",
"(",
")",
")",
")",
"if",
"obj",
".",
"metadata_hash",
"is",
"not",
"None",
":",
"yield",
"obj",
".",
"metadata_hash"
]
| Iterator that returns hashes of all of the file and table nodes.
:param root: starting node | [
"Iterator",
"that",
"returns",
"hashes",
"of",
"all",
"of",
"the",
"file",
"and",
"table",
"nodes",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/core.py#L186-L200 | train |
quiltdata/quilt | registry/quilt_server/analytics.py | _send_event_task | def _send_event_task(args):
"""
Actually sends the MixPanel event. Runs in a uwsgi worker process.
"""
endpoint = args['endpoint']
json_message = args['json_message']
_consumer_impl.send(endpoint, json_message) | python | def _send_event_task(args):
"""
Actually sends the MixPanel event. Runs in a uwsgi worker process.
"""
endpoint = args['endpoint']
json_message = args['json_message']
_consumer_impl.send(endpoint, json_message) | [
"def",
"_send_event_task",
"(",
"args",
")",
":",
"endpoint",
"=",
"args",
"[",
"'endpoint'",
"]",
"json_message",
"=",
"args",
"[",
"'json_message'",
"]",
"_consumer_impl",
".",
"send",
"(",
"endpoint",
",",
"json_message",
")"
]
| Actually sends the MixPanel event. Runs in a uwsgi worker process. | [
"Actually",
"sends",
"the",
"MixPanel",
"event",
".",
"Runs",
"in",
"a",
"uwsgi",
"worker",
"process",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/registry/quilt_server/analytics.py#L28-L34 | train |
quiltdata/quilt | registry/quilt_server/analytics.py | AsyncConsumer.send | def send(self, endpoint, json_message):
"""
Queues the message to be sent.
"""
_send_event_task.spool(endpoint=endpoint, json_message=json_message) | python | def send(self, endpoint, json_message):
"""
Queues the message to be sent.
"""
_send_event_task.spool(endpoint=endpoint, json_message=json_message) | [
"def",
"send",
"(",
"self",
",",
"endpoint",
",",
"json_message",
")",
":",
"_send_event_task",
".",
"spool",
"(",
"endpoint",
"=",
"endpoint",
",",
"json_message",
"=",
"json_message",
")"
]
| Queues the message to be sent. | [
"Queues",
"the",
"message",
"to",
"be",
"sent",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/registry/quilt_server/analytics.py#L41-L45 | train |
quiltdata/quilt | compiler/quilt/tools/main.py | main | def main(args=None):
"""Build and run parser
:param args: cli args from tests
"""
parser = argument_parser()
args = parser.parse_args(args)
# If 'func' isn't present, something is misconfigured above or no (positional) arg was given.
if not hasattr(args, 'func'):
args = parser.parse_args(['help']) # show help
# Convert argparse.Namespace into dict and clean it up.
# We can then pass it directly to the helper function.
kwargs = vars(args)
# handle the '--dev' option
if kwargs.pop('dev') or os.environ.get('QUILT_DEV_MODE', '').strip().lower() == 'true':
# Enables CLI ctrl-c tracebacks, and whatever anyone else uses it for
quilt._DEV_MODE = True
else:
# Disables CLI ctrl-c tracebacks, etc.
quilt._DEV_MODE = False
func = kwargs.pop('func')
try:
func(**kwargs)
return 0
except QuiltException as ex:
print(ex.message, file=sys.stderr)
return 1
except requests.exceptions.ConnectionError as ex:
print("Failed to connect: %s" % ex, file=sys.stderr)
return 1 | python | def main(args=None):
"""Build and run parser
:param args: cli args from tests
"""
parser = argument_parser()
args = parser.parse_args(args)
# If 'func' isn't present, something is misconfigured above or no (positional) arg was given.
if not hasattr(args, 'func'):
args = parser.parse_args(['help']) # show help
# Convert argparse.Namespace into dict and clean it up.
# We can then pass it directly to the helper function.
kwargs = vars(args)
# handle the '--dev' option
if kwargs.pop('dev') or os.environ.get('QUILT_DEV_MODE', '').strip().lower() == 'true':
# Enables CLI ctrl-c tracebacks, and whatever anyone else uses it for
quilt._DEV_MODE = True
else:
# Disables CLI ctrl-c tracebacks, etc.
quilt._DEV_MODE = False
func = kwargs.pop('func')
try:
func(**kwargs)
return 0
except QuiltException as ex:
print(ex.message, file=sys.stderr)
return 1
except requests.exceptions.ConnectionError as ex:
print("Failed to connect: %s" % ex, file=sys.stderr)
return 1 | [
"def",
"main",
"(",
"args",
"=",
"None",
")",
":",
"parser",
"=",
"argument_parser",
"(",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
"args",
")",
"# If 'func' isn't present, something is misconfigured above or no (positional) arg was given.",
"if",
"not",
"hasattr",
"(",
"args",
",",
"'func'",
")",
":",
"args",
"=",
"parser",
".",
"parse_args",
"(",
"[",
"'help'",
"]",
")",
"# show help",
"# Convert argparse.Namespace into dict and clean it up.",
"# We can then pass it directly to the helper function.",
"kwargs",
"=",
"vars",
"(",
"args",
")",
"# handle the '--dev' option",
"if",
"kwargs",
".",
"pop",
"(",
"'dev'",
")",
"or",
"os",
".",
"environ",
".",
"get",
"(",
"'QUILT_DEV_MODE'",
",",
"''",
")",
".",
"strip",
"(",
")",
".",
"lower",
"(",
")",
"==",
"'true'",
":",
"# Enables CLI ctrl-c tracebacks, and whatever anyone else uses it for",
"quilt",
".",
"_DEV_MODE",
"=",
"True",
"else",
":",
"# Disables CLI ctrl-c tracebacks, etc.",
"quilt",
".",
"_DEV_MODE",
"=",
"False",
"func",
"=",
"kwargs",
".",
"pop",
"(",
"'func'",
")",
"try",
":",
"func",
"(",
"*",
"*",
"kwargs",
")",
"return",
"0",
"except",
"QuiltException",
"as",
"ex",
":",
"print",
"(",
"ex",
".",
"message",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"return",
"1",
"except",
"requests",
".",
"exceptions",
".",
"ConnectionError",
"as",
"ex",
":",
"print",
"(",
"\"Failed to connect: %s\"",
"%",
"ex",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"return",
"1"
]
| Build and run parser
:param args: cli args from tests | [
"Build",
"and",
"run",
"parser"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/main.py#L338-L372 | train |
quiltdata/quilt | compiler/quilt/tools/util.py | is_identifier | def is_identifier(string):
"""Check if string could be a valid python identifier
:param string: string to be tested
:returns: True if string can be a python identifier, False otherwise
:rtype: bool
"""
matched = PYTHON_IDENTIFIER_RE.match(string)
return bool(matched) and not keyword.iskeyword(string) | python | def is_identifier(string):
"""Check if string could be a valid python identifier
:param string: string to be tested
:returns: True if string can be a python identifier, False otherwise
:rtype: bool
"""
matched = PYTHON_IDENTIFIER_RE.match(string)
return bool(matched) and not keyword.iskeyword(string) | [
"def",
"is_identifier",
"(",
"string",
")",
":",
"matched",
"=",
"PYTHON_IDENTIFIER_RE",
".",
"match",
"(",
"string",
")",
"return",
"bool",
"(",
"matched",
")",
"and",
"not",
"keyword",
".",
"iskeyword",
"(",
"string",
")"
]
| Check if string could be a valid python identifier
:param string: string to be tested
:returns: True if string can be a python identifier, False otherwise
:rtype: bool | [
"Check",
"if",
"string",
"could",
"be",
"a",
"valid",
"python",
"identifier"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/util.py#L160-L168 | train |
quiltdata/quilt | compiler/quilt/tools/util.py | fs_link | def fs_link(path, linkpath, linktype='soft'):
"""Create a hard or soft link of `path` at `linkpath`
Works on Linux/OSX/Windows (Vista+).
:param src: File or directory to be linked
:param dest: Path of link to create
:param linktype: 'soft' or 'hard'
"""
global WIN_SOFTLINK
global WIN_HARDLINK
WIN_NO_ERROR = 22
assert linktype in ('soft', 'hard')
path, linkpath = pathlib.Path(path), pathlib.Path(linkpath)
# Checks
if not path.exists(): # particularly important on Windows to prevent false success
raise QuiltException("Path to link to does not exist: {}".format(path))
if linkpath.exists():
raise QuiltException("Link path already exists: {}".format(linkpath))
# Windows
if os.name == 'nt':
# clear out any pre-existing, un-checked errors
ctypes.WinError()
# Check Windows version (reasonably) supports symlinks
if not sys.getwindowsversion()[0] >= 6:
raise QuiltException("Unsupported operation: This version of Windows does not support linking.")
# Acquire the windows CreateXLinkW() function
if linktype == 'soft':
if WIN_SOFTLINK is None:
WIN_SOFTLINK = ctypes.windll.kernel32.CreateSymbolicLinkW
WIN_SOFTLINK.restype = ctypes.c_bool
create_link = lambda l, p: WIN_SOFTLINK(str(l), str(p), p.is_dir())
elif linktype == 'hard':
if WIN_HARDLINK is None:
WIN_HARDLINK = ctypes.windll.kernel32.CreateHardLinkW
WIN_HARDLINK.restype = ctypes.c_bool
create_link = WIN_HARDLINK
# Call and check results
create_link(linkpath, path)
# Check WinError, because the return value for CreateSymbolicLinkW's type is suspect due to a
# (possible) bug: https://stackoverflow.com/questions/33010440/createsymboliclink-on-windows-10
# We have user results with similar effects (success reported, but not actual)
error = ctypes.WinError()
if error.winerror:
raise QuiltException("Linking failed: " + str(error), original_error=error)
# Handle the case wehere linking failed and windows gave no error:
if not linkpath.exists() and linkpath.is_symlink():
raise QuiltException("Linking failed: Expected symlink at: {}".format(linkpath))
# Linux, OSX
else:
try:
if linktype == 'soft':
linkpath.symlink_to(path)
elif linktype == 'hard':
os.link(str(path), str(linkpath))
except OSError as error:
raise QuiltException("Linking failed: " + str(error), original_error=error) | python | def fs_link(path, linkpath, linktype='soft'):
"""Create a hard or soft link of `path` at `linkpath`
Works on Linux/OSX/Windows (Vista+).
:param src: File or directory to be linked
:param dest: Path of link to create
:param linktype: 'soft' or 'hard'
"""
global WIN_SOFTLINK
global WIN_HARDLINK
WIN_NO_ERROR = 22
assert linktype in ('soft', 'hard')
path, linkpath = pathlib.Path(path), pathlib.Path(linkpath)
# Checks
if not path.exists(): # particularly important on Windows to prevent false success
raise QuiltException("Path to link to does not exist: {}".format(path))
if linkpath.exists():
raise QuiltException("Link path already exists: {}".format(linkpath))
# Windows
if os.name == 'nt':
# clear out any pre-existing, un-checked errors
ctypes.WinError()
# Check Windows version (reasonably) supports symlinks
if not sys.getwindowsversion()[0] >= 6:
raise QuiltException("Unsupported operation: This version of Windows does not support linking.")
# Acquire the windows CreateXLinkW() function
if linktype == 'soft':
if WIN_SOFTLINK is None:
WIN_SOFTLINK = ctypes.windll.kernel32.CreateSymbolicLinkW
WIN_SOFTLINK.restype = ctypes.c_bool
create_link = lambda l, p: WIN_SOFTLINK(str(l), str(p), p.is_dir())
elif linktype == 'hard':
if WIN_HARDLINK is None:
WIN_HARDLINK = ctypes.windll.kernel32.CreateHardLinkW
WIN_HARDLINK.restype = ctypes.c_bool
create_link = WIN_HARDLINK
# Call and check results
create_link(linkpath, path)
# Check WinError, because the return value for CreateSymbolicLinkW's type is suspect due to a
# (possible) bug: https://stackoverflow.com/questions/33010440/createsymboliclink-on-windows-10
# We have user results with similar effects (success reported, but not actual)
error = ctypes.WinError()
if error.winerror:
raise QuiltException("Linking failed: " + str(error), original_error=error)
# Handle the case wehere linking failed and windows gave no error:
if not linkpath.exists() and linkpath.is_symlink():
raise QuiltException("Linking failed: Expected symlink at: {}".format(linkpath))
# Linux, OSX
else:
try:
if linktype == 'soft':
linkpath.symlink_to(path)
elif linktype == 'hard':
os.link(str(path), str(linkpath))
except OSError as error:
raise QuiltException("Linking failed: " + str(error), original_error=error) | [
"def",
"fs_link",
"(",
"path",
",",
"linkpath",
",",
"linktype",
"=",
"'soft'",
")",
":",
"global",
"WIN_SOFTLINK",
"global",
"WIN_HARDLINK",
"WIN_NO_ERROR",
"=",
"22",
"assert",
"linktype",
"in",
"(",
"'soft'",
",",
"'hard'",
")",
"path",
",",
"linkpath",
"=",
"pathlib",
".",
"Path",
"(",
"path",
")",
",",
"pathlib",
".",
"Path",
"(",
"linkpath",
")",
"# Checks",
"if",
"not",
"path",
".",
"exists",
"(",
")",
":",
"# particularly important on Windows to prevent false success",
"raise",
"QuiltException",
"(",
"\"Path to link to does not exist: {}\"",
".",
"format",
"(",
"path",
")",
")",
"if",
"linkpath",
".",
"exists",
"(",
")",
":",
"raise",
"QuiltException",
"(",
"\"Link path already exists: {}\"",
".",
"format",
"(",
"linkpath",
")",
")",
"# Windows",
"if",
"os",
".",
"name",
"==",
"'nt'",
":",
"# clear out any pre-existing, un-checked errors",
"ctypes",
".",
"WinError",
"(",
")",
"# Check Windows version (reasonably) supports symlinks",
"if",
"not",
"sys",
".",
"getwindowsversion",
"(",
")",
"[",
"0",
"]",
">=",
"6",
":",
"raise",
"QuiltException",
"(",
"\"Unsupported operation: This version of Windows does not support linking.\"",
")",
"# Acquire the windows CreateXLinkW() function",
"if",
"linktype",
"==",
"'soft'",
":",
"if",
"WIN_SOFTLINK",
"is",
"None",
":",
"WIN_SOFTLINK",
"=",
"ctypes",
".",
"windll",
".",
"kernel32",
".",
"CreateSymbolicLinkW",
"WIN_SOFTLINK",
".",
"restype",
"=",
"ctypes",
".",
"c_bool",
"create_link",
"=",
"lambda",
"l",
",",
"p",
":",
"WIN_SOFTLINK",
"(",
"str",
"(",
"l",
")",
",",
"str",
"(",
"p",
")",
",",
"p",
".",
"is_dir",
"(",
")",
")",
"elif",
"linktype",
"==",
"'hard'",
":",
"if",
"WIN_HARDLINK",
"is",
"None",
":",
"WIN_HARDLINK",
"=",
"ctypes",
".",
"windll",
".",
"kernel32",
".",
"CreateHardLinkW",
"WIN_HARDLINK",
".",
"restype",
"=",
"ctypes",
".",
"c_bool",
"create_link",
"=",
"WIN_HARDLINK",
"# Call and check results",
"create_link",
"(",
"linkpath",
",",
"path",
")",
"# Check WinError, because the return value for CreateSymbolicLinkW's type is suspect due to a",
"# (possible) bug: https://stackoverflow.com/questions/33010440/createsymboliclink-on-windows-10",
"# We have user results with similar effects (success reported, but not actual)",
"error",
"=",
"ctypes",
".",
"WinError",
"(",
")",
"if",
"error",
".",
"winerror",
":",
"raise",
"QuiltException",
"(",
"\"Linking failed: \"",
"+",
"str",
"(",
"error",
")",
",",
"original_error",
"=",
"error",
")",
"# Handle the case wehere linking failed and windows gave no error:",
"if",
"not",
"linkpath",
".",
"exists",
"(",
")",
"and",
"linkpath",
".",
"is_symlink",
"(",
")",
":",
"raise",
"QuiltException",
"(",
"\"Linking failed: Expected symlink at: {}\"",
".",
"format",
"(",
"linkpath",
")",
")",
"# Linux, OSX",
"else",
":",
"try",
":",
"if",
"linktype",
"==",
"'soft'",
":",
"linkpath",
".",
"symlink_to",
"(",
"path",
")",
"elif",
"linktype",
"==",
"'hard'",
":",
"os",
".",
"link",
"(",
"str",
"(",
"path",
")",
",",
"str",
"(",
"linkpath",
")",
")",
"except",
"OSError",
"as",
"error",
":",
"raise",
"QuiltException",
"(",
"\"Linking failed: \"",
"+",
"str",
"(",
"error",
")",
",",
"original_error",
"=",
"error",
")"
]
| Create a hard or soft link of `path` at `linkpath`
Works on Linux/OSX/Windows (Vista+).
:param src: File or directory to be linked
:param dest: Path of link to create
:param linktype: 'soft' or 'hard' | [
"Create",
"a",
"hard",
"or",
"soft",
"link",
"of",
"path",
"at",
"linkpath"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/util.py#L289-L352 | train |
quiltdata/quilt | compiler/quilt/tools/util.py | FileWithReadProgress.read | def read(self, size=-1):
"""Read bytes and update the progress bar."""
buf = self._fd.read(size)
self._progress_cb(len(buf))
return buf | python | def read(self, size=-1):
"""Read bytes and update the progress bar."""
buf = self._fd.read(size)
self._progress_cb(len(buf))
return buf | [
"def",
"read",
"(",
"self",
",",
"size",
"=",
"-",
"1",
")",
":",
"buf",
"=",
"self",
".",
"_fd",
".",
"read",
"(",
"size",
")",
"self",
".",
"_progress_cb",
"(",
"len",
"(",
"buf",
")",
")",
"return",
"buf"
]
| Read bytes and update the progress bar. | [
"Read",
"bytes",
"and",
"update",
"the",
"progress",
"bar",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/util.py#L81-L85 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.create_dirs | def create_dirs(self):
"""
Creates the store directory and its subdirectories.
"""
if not os.path.isdir(self._path):
os.makedirs(self._path)
for dir_name in [self.OBJ_DIR, self.TMP_OBJ_DIR, self.PKG_DIR, self.CACHE_DIR]:
path = os.path.join(self._path, dir_name)
if not os.path.isdir(path):
os.mkdir(path)
if not os.path.exists(self._version_path()):
self._write_format_version() | python | def create_dirs(self):
"""
Creates the store directory and its subdirectories.
"""
if not os.path.isdir(self._path):
os.makedirs(self._path)
for dir_name in [self.OBJ_DIR, self.TMP_OBJ_DIR, self.PKG_DIR, self.CACHE_DIR]:
path = os.path.join(self._path, dir_name)
if not os.path.isdir(path):
os.mkdir(path)
if not os.path.exists(self._version_path()):
self._write_format_version() | [
"def",
"create_dirs",
"(",
"self",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"self",
".",
"_path",
")",
":",
"os",
".",
"makedirs",
"(",
"self",
".",
"_path",
")",
"for",
"dir_name",
"in",
"[",
"self",
".",
"OBJ_DIR",
",",
"self",
".",
"TMP_OBJ_DIR",
",",
"self",
".",
"PKG_DIR",
",",
"self",
".",
"CACHE_DIR",
"]",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_path",
",",
"dir_name",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"os",
".",
"mkdir",
"(",
"path",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"_version_path",
"(",
")",
")",
":",
"self",
".",
"_write_format_version",
"(",
")"
]
| Creates the store directory and its subdirectories. | [
"Creates",
"the",
"store",
"directory",
"and",
"its",
"subdirectories",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L121-L132 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.find_store_dirs | def find_store_dirs(cls):
"""
Returns the primary package directory and any additional ones from QUILT_PACKAGE_DIRS.
"""
store_dirs = [default_store_location()]
extra_dirs_str = os.getenv('QUILT_PACKAGE_DIRS')
if extra_dirs_str:
store_dirs.extend(extra_dirs_str.split(':'))
return store_dirs | python | def find_store_dirs(cls):
"""
Returns the primary package directory and any additional ones from QUILT_PACKAGE_DIRS.
"""
store_dirs = [default_store_location()]
extra_dirs_str = os.getenv('QUILT_PACKAGE_DIRS')
if extra_dirs_str:
store_dirs.extend(extra_dirs_str.split(':'))
return store_dirs | [
"def",
"find_store_dirs",
"(",
"cls",
")",
":",
"store_dirs",
"=",
"[",
"default_store_location",
"(",
")",
"]",
"extra_dirs_str",
"=",
"os",
".",
"getenv",
"(",
"'QUILT_PACKAGE_DIRS'",
")",
"if",
"extra_dirs_str",
":",
"store_dirs",
".",
"extend",
"(",
"extra_dirs_str",
".",
"split",
"(",
"':'",
")",
")",
"return",
"store_dirs"
]
| Returns the primary package directory and any additional ones from QUILT_PACKAGE_DIRS. | [
"Returns",
"the",
"primary",
"package",
"directory",
"and",
"any",
"additional",
"ones",
"from",
"QUILT_PACKAGE_DIRS",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L135-L143 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.find_package | def find_package(cls, team, user, package, pkghash=None, store_dir=None):
"""
Finds an existing package in one of the package directories.
"""
cls.check_name(team, user, package)
dirs = cls.find_store_dirs()
for store_dir in dirs:
store = PackageStore(store_dir)
pkg = store.get_package(team, user, package, pkghash=pkghash)
if pkg is not None:
return store, pkg
return None, None | python | def find_package(cls, team, user, package, pkghash=None, store_dir=None):
"""
Finds an existing package in one of the package directories.
"""
cls.check_name(team, user, package)
dirs = cls.find_store_dirs()
for store_dir in dirs:
store = PackageStore(store_dir)
pkg = store.get_package(team, user, package, pkghash=pkghash)
if pkg is not None:
return store, pkg
return None, None | [
"def",
"find_package",
"(",
"cls",
",",
"team",
",",
"user",
",",
"package",
",",
"pkghash",
"=",
"None",
",",
"store_dir",
"=",
"None",
")",
":",
"cls",
".",
"check_name",
"(",
"team",
",",
"user",
",",
"package",
")",
"dirs",
"=",
"cls",
".",
"find_store_dirs",
"(",
")",
"for",
"store_dir",
"in",
"dirs",
":",
"store",
"=",
"PackageStore",
"(",
"store_dir",
")",
"pkg",
"=",
"store",
".",
"get_package",
"(",
"team",
",",
"user",
",",
"package",
",",
"pkghash",
"=",
"pkghash",
")",
"if",
"pkg",
"is",
"not",
"None",
":",
"return",
"store",
",",
"pkg",
"return",
"None",
",",
"None"
]
| Finds an existing package in one of the package directories. | [
"Finds",
"an",
"existing",
"package",
"in",
"one",
"of",
"the",
"package",
"directories",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L146-L157 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.get_package | def get_package(self, team, user, package, pkghash=None):
"""
Gets a package from this store.
"""
self.check_name(team, user, package)
path = self.package_path(team, user, package)
if not os.path.isdir(path):
return None
if pkghash is None:
latest_tag = os.path.join(path, self.TAGS_DIR, self.LATEST)
if not os.path.exists(latest_tag):
return None
with open (latest_tag, 'r') as tagfile:
pkghash = tagfile.read()
assert pkghash is not None
contents_path = os.path.join(path, self.CONTENTS_DIR, pkghash)
if not os.path.isfile(contents_path):
return None
with open(contents_path, 'r') as contents_file:
try:
return json.load(contents_file, object_hook=decode_node)
except AssertionError as err:
if str(err).startswith("Bad package format"):
name = "{}{}/{}, {}".format(
team + ':' if team else '',
user,
package,
pkghash
)
raise StoreException("Error in {}: {}".format(name, str(err)))
else:
raise | python | def get_package(self, team, user, package, pkghash=None):
"""
Gets a package from this store.
"""
self.check_name(team, user, package)
path = self.package_path(team, user, package)
if not os.path.isdir(path):
return None
if pkghash is None:
latest_tag = os.path.join(path, self.TAGS_DIR, self.LATEST)
if not os.path.exists(latest_tag):
return None
with open (latest_tag, 'r') as tagfile:
pkghash = tagfile.read()
assert pkghash is not None
contents_path = os.path.join(path, self.CONTENTS_DIR, pkghash)
if not os.path.isfile(contents_path):
return None
with open(contents_path, 'r') as contents_file:
try:
return json.load(contents_file, object_hook=decode_node)
except AssertionError as err:
if str(err).startswith("Bad package format"):
name = "{}{}/{}, {}".format(
team + ':' if team else '',
user,
package,
pkghash
)
raise StoreException("Error in {}: {}".format(name, str(err)))
else:
raise | [
"def",
"get_package",
"(",
"self",
",",
"team",
",",
"user",
",",
"package",
",",
"pkghash",
"=",
"None",
")",
":",
"self",
".",
"check_name",
"(",
"team",
",",
"user",
",",
"package",
")",
"path",
"=",
"self",
".",
"package_path",
"(",
"team",
",",
"user",
",",
"package",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"return",
"None",
"if",
"pkghash",
"is",
"None",
":",
"latest_tag",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"self",
".",
"TAGS_DIR",
",",
"self",
".",
"LATEST",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"latest_tag",
")",
":",
"return",
"None",
"with",
"open",
"(",
"latest_tag",
",",
"'r'",
")",
"as",
"tagfile",
":",
"pkghash",
"=",
"tagfile",
".",
"read",
"(",
")",
"assert",
"pkghash",
"is",
"not",
"None",
"contents_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"self",
".",
"CONTENTS_DIR",
",",
"pkghash",
")",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"contents_path",
")",
":",
"return",
"None",
"with",
"open",
"(",
"contents_path",
",",
"'r'",
")",
"as",
"contents_file",
":",
"try",
":",
"return",
"json",
".",
"load",
"(",
"contents_file",
",",
"object_hook",
"=",
"decode_node",
")",
"except",
"AssertionError",
"as",
"err",
":",
"if",
"str",
"(",
"err",
")",
".",
"startswith",
"(",
"\"Bad package format\"",
")",
":",
"name",
"=",
"\"{}{}/{}, {}\"",
".",
"format",
"(",
"team",
"+",
"':'",
"if",
"team",
"else",
"''",
",",
"user",
",",
"package",
",",
"pkghash",
")",
"raise",
"StoreException",
"(",
"\"Error in {}: {}\"",
".",
"format",
"(",
"name",
",",
"str",
"(",
"err",
")",
")",
")",
"else",
":",
"raise"
]
| Gets a package from this store. | [
"Gets",
"a",
"package",
"from",
"this",
"store",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L189-L224 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.install_package | def install_package(self, team, user, package, contents):
"""
Creates a new package in the default package store
and allocates a per-user directory if needed.
"""
self.check_name(team, user, package)
assert contents is not None
self.create_dirs()
path = self.package_path(team, user, package)
# Delete any existing data.
try:
os.remove(path)
except OSError:
pass | python | def install_package(self, team, user, package, contents):
"""
Creates a new package in the default package store
and allocates a per-user directory if needed.
"""
self.check_name(team, user, package)
assert contents is not None
self.create_dirs()
path = self.package_path(team, user, package)
# Delete any existing data.
try:
os.remove(path)
except OSError:
pass | [
"def",
"install_package",
"(",
"self",
",",
"team",
",",
"user",
",",
"package",
",",
"contents",
")",
":",
"self",
".",
"check_name",
"(",
"team",
",",
"user",
",",
"package",
")",
"assert",
"contents",
"is",
"not",
"None",
"self",
".",
"create_dirs",
"(",
")",
"path",
"=",
"self",
".",
"package_path",
"(",
"team",
",",
"user",
",",
"package",
")",
"# Delete any existing data.",
"try",
":",
"os",
".",
"remove",
"(",
"path",
")",
"except",
"OSError",
":",
"pass"
]
| Creates a new package in the default package store
and allocates a per-user directory if needed. | [
"Creates",
"a",
"new",
"package",
"in",
"the",
"default",
"package",
"store",
"and",
"allocates",
"a",
"per",
"-",
"user",
"directory",
"if",
"needed",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L226-L241 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.create_package_node | def create_package_node(self, team, user, package, dry_run=False):
"""
Creates a new package and initializes its contents. See `install_package`.
"""
contents = RootNode(dict())
if dry_run:
return contents
self.check_name(team, user, package)
assert contents is not None
self.create_dirs()
# Delete any existing data.
path = self.package_path(team, user, package)
try:
os.remove(path)
except OSError:
pass
return contents | python | def create_package_node(self, team, user, package, dry_run=False):
"""
Creates a new package and initializes its contents. See `install_package`.
"""
contents = RootNode(dict())
if dry_run:
return contents
self.check_name(team, user, package)
assert contents is not None
self.create_dirs()
# Delete any existing data.
path = self.package_path(team, user, package)
try:
os.remove(path)
except OSError:
pass
return contents | [
"def",
"create_package_node",
"(",
"self",
",",
"team",
",",
"user",
",",
"package",
",",
"dry_run",
"=",
"False",
")",
":",
"contents",
"=",
"RootNode",
"(",
"dict",
"(",
")",
")",
"if",
"dry_run",
":",
"return",
"contents",
"self",
".",
"check_name",
"(",
"team",
",",
"user",
",",
"package",
")",
"assert",
"contents",
"is",
"not",
"None",
"self",
".",
"create_dirs",
"(",
")",
"# Delete any existing data.",
"path",
"=",
"self",
".",
"package_path",
"(",
"team",
",",
"user",
",",
"package",
")",
"try",
":",
"os",
".",
"remove",
"(",
"path",
")",
"except",
"OSError",
":",
"pass",
"return",
"contents"
]
| Creates a new package and initializes its contents. See `install_package`. | [
"Creates",
"a",
"new",
"package",
"and",
"initializes",
"its",
"contents",
".",
"See",
"install_package",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L243-L262 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.iterpackages | def iterpackages(self):
"""
Return an iterator over all the packages in the PackageStore.
"""
pkgdir = os.path.join(self._path, self.PKG_DIR)
if not os.path.isdir(pkgdir):
return
for team in sub_dirs(pkgdir):
for user in sub_dirs(self.team_path(team)):
for pkg in sub_dirs(self.user_path(team, user)):
pkgpath = self.package_path(team, user, pkg)
for hsh in sub_files(os.path.join(pkgpath, PackageStore.CONTENTS_DIR)):
yield self.get_package(team, user, pkg, pkghash=hsh) | python | def iterpackages(self):
"""
Return an iterator over all the packages in the PackageStore.
"""
pkgdir = os.path.join(self._path, self.PKG_DIR)
if not os.path.isdir(pkgdir):
return
for team in sub_dirs(pkgdir):
for user in sub_dirs(self.team_path(team)):
for pkg in sub_dirs(self.user_path(team, user)):
pkgpath = self.package_path(team, user, pkg)
for hsh in sub_files(os.path.join(pkgpath, PackageStore.CONTENTS_DIR)):
yield self.get_package(team, user, pkg, pkghash=hsh) | [
"def",
"iterpackages",
"(",
"self",
")",
":",
"pkgdir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_path",
",",
"self",
".",
"PKG_DIR",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"pkgdir",
")",
":",
"return",
"for",
"team",
"in",
"sub_dirs",
"(",
"pkgdir",
")",
":",
"for",
"user",
"in",
"sub_dirs",
"(",
"self",
".",
"team_path",
"(",
"team",
")",
")",
":",
"for",
"pkg",
"in",
"sub_dirs",
"(",
"self",
".",
"user_path",
"(",
"team",
",",
"user",
")",
")",
":",
"pkgpath",
"=",
"self",
".",
"package_path",
"(",
"team",
",",
"user",
",",
"pkg",
")",
"for",
"hsh",
"in",
"sub_files",
"(",
"os",
".",
"path",
".",
"join",
"(",
"pkgpath",
",",
"PackageStore",
".",
"CONTENTS_DIR",
")",
")",
":",
"yield",
"self",
".",
"get_package",
"(",
"team",
",",
"user",
",",
"pkg",
",",
"pkghash",
"=",
"hsh",
")"
]
| Return an iterator over all the packages in the PackageStore. | [
"Return",
"an",
"iterator",
"over",
"all",
"the",
"packages",
"in",
"the",
"PackageStore",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L284-L296 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.ls_packages | def ls_packages(self):
"""
List packages in this store.
"""
packages = []
pkgdir = os.path.join(self._path, self.PKG_DIR)
if not os.path.isdir(pkgdir):
return []
for team in sub_dirs(pkgdir):
for user in sub_dirs(self.team_path(team)):
for pkg in sub_dirs(self.user_path(team, user)):
pkgpath = self.package_path(team, user, pkg)
pkgmap = {h : [] for h in sub_files(os.path.join(pkgpath, PackageStore.CONTENTS_DIR))}
for tag in sub_files(os.path.join(pkgpath, PackageStore.TAGS_DIR)):
with open(os.path.join(pkgpath, PackageStore.TAGS_DIR, tag), 'r') as tagfile:
pkghash = tagfile.read()
pkgmap[pkghash].append(tag)
for pkghash, tags in pkgmap.items():
# add teams here if any other than DEFAULT_TEAM should be hidden.
team_token = '' if team in (DEFAULT_TEAM,) else team + ':'
fullpkg = "{team}{owner}/{pkg}".format(team=team_token, owner=user, pkg=pkg)
# Add an empty string tag for untagged hashes
displaytags = tags if tags else [""]
# Display a separate full line per tag like Docker
for tag in displaytags:
packages.append((fullpkg, str(tag), pkghash))
return packages | python | def ls_packages(self):
"""
List packages in this store.
"""
packages = []
pkgdir = os.path.join(self._path, self.PKG_DIR)
if not os.path.isdir(pkgdir):
return []
for team in sub_dirs(pkgdir):
for user in sub_dirs(self.team_path(team)):
for pkg in sub_dirs(self.user_path(team, user)):
pkgpath = self.package_path(team, user, pkg)
pkgmap = {h : [] for h in sub_files(os.path.join(pkgpath, PackageStore.CONTENTS_DIR))}
for tag in sub_files(os.path.join(pkgpath, PackageStore.TAGS_DIR)):
with open(os.path.join(pkgpath, PackageStore.TAGS_DIR, tag), 'r') as tagfile:
pkghash = tagfile.read()
pkgmap[pkghash].append(tag)
for pkghash, tags in pkgmap.items():
# add teams here if any other than DEFAULT_TEAM should be hidden.
team_token = '' if team in (DEFAULT_TEAM,) else team + ':'
fullpkg = "{team}{owner}/{pkg}".format(team=team_token, owner=user, pkg=pkg)
# Add an empty string tag for untagged hashes
displaytags = tags if tags else [""]
# Display a separate full line per tag like Docker
for tag in displaytags:
packages.append((fullpkg, str(tag), pkghash))
return packages | [
"def",
"ls_packages",
"(",
"self",
")",
":",
"packages",
"=",
"[",
"]",
"pkgdir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_path",
",",
"self",
".",
"PKG_DIR",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"pkgdir",
")",
":",
"return",
"[",
"]",
"for",
"team",
"in",
"sub_dirs",
"(",
"pkgdir",
")",
":",
"for",
"user",
"in",
"sub_dirs",
"(",
"self",
".",
"team_path",
"(",
"team",
")",
")",
":",
"for",
"pkg",
"in",
"sub_dirs",
"(",
"self",
".",
"user_path",
"(",
"team",
",",
"user",
")",
")",
":",
"pkgpath",
"=",
"self",
".",
"package_path",
"(",
"team",
",",
"user",
",",
"pkg",
")",
"pkgmap",
"=",
"{",
"h",
":",
"[",
"]",
"for",
"h",
"in",
"sub_files",
"(",
"os",
".",
"path",
".",
"join",
"(",
"pkgpath",
",",
"PackageStore",
".",
"CONTENTS_DIR",
")",
")",
"}",
"for",
"tag",
"in",
"sub_files",
"(",
"os",
".",
"path",
".",
"join",
"(",
"pkgpath",
",",
"PackageStore",
".",
"TAGS_DIR",
")",
")",
":",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"pkgpath",
",",
"PackageStore",
".",
"TAGS_DIR",
",",
"tag",
")",
",",
"'r'",
")",
"as",
"tagfile",
":",
"pkghash",
"=",
"tagfile",
".",
"read",
"(",
")",
"pkgmap",
"[",
"pkghash",
"]",
".",
"append",
"(",
"tag",
")",
"for",
"pkghash",
",",
"tags",
"in",
"pkgmap",
".",
"items",
"(",
")",
":",
"# add teams here if any other than DEFAULT_TEAM should be hidden.",
"team_token",
"=",
"''",
"if",
"team",
"in",
"(",
"DEFAULT_TEAM",
",",
")",
"else",
"team",
"+",
"':'",
"fullpkg",
"=",
"\"{team}{owner}/{pkg}\"",
".",
"format",
"(",
"team",
"=",
"team_token",
",",
"owner",
"=",
"user",
",",
"pkg",
"=",
"pkg",
")",
"# Add an empty string tag for untagged hashes",
"displaytags",
"=",
"tags",
"if",
"tags",
"else",
"[",
"\"\"",
"]",
"# Display a separate full line per tag like Docker",
"for",
"tag",
"in",
"displaytags",
":",
"packages",
".",
"append",
"(",
"(",
"fullpkg",
",",
"str",
"(",
"tag",
")",
",",
"pkghash",
")",
")",
"return",
"packages"
]
| List packages in this store. | [
"List",
"packages",
"in",
"this",
"store",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L298-L325 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.team_path | def team_path(self, team=None):
"""
Returns the path to directory with the team's users' package repositories.
"""
if team is None:
team = DEFAULT_TEAM
return os.path.join(self._path, self.PKG_DIR, team) | python | def team_path(self, team=None):
"""
Returns the path to directory with the team's users' package repositories.
"""
if team is None:
team = DEFAULT_TEAM
return os.path.join(self._path, self.PKG_DIR, team) | [
"def",
"team_path",
"(",
"self",
",",
"team",
"=",
"None",
")",
":",
"if",
"team",
"is",
"None",
":",
"team",
"=",
"DEFAULT_TEAM",
"return",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_path",
",",
"self",
".",
"PKG_DIR",
",",
"team",
")"
]
| Returns the path to directory with the team's users' package repositories. | [
"Returns",
"the",
"path",
"to",
"directory",
"with",
"the",
"team",
"s",
"users",
"package",
"repositories",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L327-L333 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.user_path | def user_path(self, team, user):
"""
Returns the path to directory with the user's package repositories.
"""
return os.path.join(self.team_path(team), user) | python | def user_path(self, team, user):
"""
Returns the path to directory with the user's package repositories.
"""
return os.path.join(self.team_path(team), user) | [
"def",
"user_path",
"(",
"self",
",",
"team",
",",
"user",
")",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"team_path",
"(",
"team",
")",
",",
"user",
")"
]
| Returns the path to directory with the user's package repositories. | [
"Returns",
"the",
"path",
"to",
"directory",
"with",
"the",
"user",
"s",
"package",
"repositories",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L335-L339 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.package_path | def package_path(self, team, user, package):
"""
Returns the path to a package repository.
"""
return os.path.join(self.user_path(team, user), package) | python | def package_path(self, team, user, package):
"""
Returns the path to a package repository.
"""
return os.path.join(self.user_path(team, user), package) | [
"def",
"package_path",
"(",
"self",
",",
"team",
",",
"user",
",",
"package",
")",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"user_path",
"(",
"team",
",",
"user",
")",
",",
"package",
")"
]
| Returns the path to a package repository. | [
"Returns",
"the",
"path",
"to",
"a",
"package",
"repository",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L341-L345 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.object_path | def object_path(self, objhash):
"""
Returns the path to an object file based on its hash.
"""
return os.path.join(self._path, self.OBJ_DIR, objhash) | python | def object_path(self, objhash):
"""
Returns the path to an object file based on its hash.
"""
return os.path.join(self._path, self.OBJ_DIR, objhash) | [
"def",
"object_path",
"(",
"self",
",",
"objhash",
")",
":",
"return",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_path",
",",
"self",
".",
"OBJ_DIR",
",",
"objhash",
")"
]
| Returns the path to an object file based on its hash. | [
"Returns",
"the",
"path",
"to",
"an",
"object",
"file",
"based",
"on",
"its",
"hash",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L347-L351 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.prune | def prune(self, objs=None):
"""
Clean up objects not referenced by any packages. Try to prune all
objects by default.
"""
if objs is None:
objdir = os.path.join(self._path, self.OBJ_DIR)
objs = os.listdir(objdir)
remove_objs = set(objs)
for pkg in self.iterpackages():
remove_objs.difference_update(find_object_hashes(pkg))
for obj in remove_objs:
path = self.object_path(obj)
if os.path.exists(path):
os.chmod(path, S_IWUSR)
os.remove(path)
return remove_objs | python | def prune(self, objs=None):
"""
Clean up objects not referenced by any packages. Try to prune all
objects by default.
"""
if objs is None:
objdir = os.path.join(self._path, self.OBJ_DIR)
objs = os.listdir(objdir)
remove_objs = set(objs)
for pkg in self.iterpackages():
remove_objs.difference_update(find_object_hashes(pkg))
for obj in remove_objs:
path = self.object_path(obj)
if os.path.exists(path):
os.chmod(path, S_IWUSR)
os.remove(path)
return remove_objs | [
"def",
"prune",
"(",
"self",
",",
"objs",
"=",
"None",
")",
":",
"if",
"objs",
"is",
"None",
":",
"objdir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"_path",
",",
"self",
".",
"OBJ_DIR",
")",
"objs",
"=",
"os",
".",
"listdir",
"(",
"objdir",
")",
"remove_objs",
"=",
"set",
"(",
"objs",
")",
"for",
"pkg",
"in",
"self",
".",
"iterpackages",
"(",
")",
":",
"remove_objs",
".",
"difference_update",
"(",
"find_object_hashes",
"(",
"pkg",
")",
")",
"for",
"obj",
"in",
"remove_objs",
":",
"path",
"=",
"self",
".",
"object_path",
"(",
"obj",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"os",
".",
"chmod",
"(",
"path",
",",
"S_IWUSR",
")",
"os",
".",
"remove",
"(",
"path",
")",
"return",
"remove_objs"
]
| Clean up objects not referenced by any packages. Try to prune all
objects by default. | [
"Clean",
"up",
"objects",
"not",
"referenced",
"by",
"any",
"packages",
".",
"Try",
"to",
"prune",
"all",
"objects",
"by",
"default",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L365-L383 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.save_dataframe | def save_dataframe(self, dataframe):
"""
Save a DataFrame to the store.
"""
storepath = self.temporary_object_path(str(uuid.uuid4()))
# switch parquet lib
parqlib = self.get_parquet_lib()
if isinstance(dataframe, pd.DataFrame):
#parqlib is ParquetLib.ARROW: # other parquet libs are deprecated, remove?
import pyarrow as pa
from pyarrow import parquet
table = pa.Table.from_pandas(dataframe)
parquet.write_table(table, storepath)
elif parqlib is ParquetLib.SPARK:
from pyspark import sql as sparksql
assert isinstance(dataframe, sparksql.DataFrame)
dataframe.write.parquet(storepath)
else:
assert False, "Unimplemented ParquetLib %s" % parqlib
# Move serialized DataFrame to object store
if os.path.isdir(storepath): # Pyspark
hashes = []
files = [ofile for ofile in os.listdir(storepath) if ofile.endswith(".parquet")]
for obj in files:
path = os.path.join(storepath, obj)
objhash = digest_file(path)
self._move_to_store(path, objhash)
hashes.append(objhash)
rmtree(storepath)
else:
filehash = digest_file(storepath)
self._move_to_store(storepath, filehash)
hashes = [filehash]
return hashes | python | def save_dataframe(self, dataframe):
"""
Save a DataFrame to the store.
"""
storepath = self.temporary_object_path(str(uuid.uuid4()))
# switch parquet lib
parqlib = self.get_parquet_lib()
if isinstance(dataframe, pd.DataFrame):
#parqlib is ParquetLib.ARROW: # other parquet libs are deprecated, remove?
import pyarrow as pa
from pyarrow import parquet
table = pa.Table.from_pandas(dataframe)
parquet.write_table(table, storepath)
elif parqlib is ParquetLib.SPARK:
from pyspark import sql as sparksql
assert isinstance(dataframe, sparksql.DataFrame)
dataframe.write.parquet(storepath)
else:
assert False, "Unimplemented ParquetLib %s" % parqlib
# Move serialized DataFrame to object store
if os.path.isdir(storepath): # Pyspark
hashes = []
files = [ofile for ofile in os.listdir(storepath) if ofile.endswith(".parquet")]
for obj in files:
path = os.path.join(storepath, obj)
objhash = digest_file(path)
self._move_to_store(path, objhash)
hashes.append(objhash)
rmtree(storepath)
else:
filehash = digest_file(storepath)
self._move_to_store(storepath, filehash)
hashes = [filehash]
return hashes | [
"def",
"save_dataframe",
"(",
"self",
",",
"dataframe",
")",
":",
"storepath",
"=",
"self",
".",
"temporary_object_path",
"(",
"str",
"(",
"uuid",
".",
"uuid4",
"(",
")",
")",
")",
"# switch parquet lib",
"parqlib",
"=",
"self",
".",
"get_parquet_lib",
"(",
")",
"if",
"isinstance",
"(",
"dataframe",
",",
"pd",
".",
"DataFrame",
")",
":",
"#parqlib is ParquetLib.ARROW: # other parquet libs are deprecated, remove?",
"import",
"pyarrow",
"as",
"pa",
"from",
"pyarrow",
"import",
"parquet",
"table",
"=",
"pa",
".",
"Table",
".",
"from_pandas",
"(",
"dataframe",
")",
"parquet",
".",
"write_table",
"(",
"table",
",",
"storepath",
")",
"elif",
"parqlib",
"is",
"ParquetLib",
".",
"SPARK",
":",
"from",
"pyspark",
"import",
"sql",
"as",
"sparksql",
"assert",
"isinstance",
"(",
"dataframe",
",",
"sparksql",
".",
"DataFrame",
")",
"dataframe",
".",
"write",
".",
"parquet",
"(",
"storepath",
")",
"else",
":",
"assert",
"False",
",",
"\"Unimplemented ParquetLib %s\"",
"%",
"parqlib",
"# Move serialized DataFrame to object store",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"storepath",
")",
":",
"# Pyspark",
"hashes",
"=",
"[",
"]",
"files",
"=",
"[",
"ofile",
"for",
"ofile",
"in",
"os",
".",
"listdir",
"(",
"storepath",
")",
"if",
"ofile",
".",
"endswith",
"(",
"\".parquet\"",
")",
"]",
"for",
"obj",
"in",
"files",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"storepath",
",",
"obj",
")",
"objhash",
"=",
"digest_file",
"(",
"path",
")",
"self",
".",
"_move_to_store",
"(",
"path",
",",
"objhash",
")",
"hashes",
".",
"append",
"(",
"objhash",
")",
"rmtree",
"(",
"storepath",
")",
"else",
":",
"filehash",
"=",
"digest_file",
"(",
"storepath",
")",
"self",
".",
"_move_to_store",
"(",
"storepath",
",",
"filehash",
")",
"hashes",
"=",
"[",
"filehash",
"]",
"return",
"hashes"
]
| Save a DataFrame to the store. | [
"Save",
"a",
"DataFrame",
"to",
"the",
"store",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L436-L472 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.load_numpy | def load_numpy(self, hash_list):
"""
Loads a numpy array.
"""
assert len(hash_list) == 1
self._check_hashes(hash_list)
with open(self.object_path(hash_list[0]), 'rb') as fd:
return np.load(fd, allow_pickle=False) | python | def load_numpy(self, hash_list):
"""
Loads a numpy array.
"""
assert len(hash_list) == 1
self._check_hashes(hash_list)
with open(self.object_path(hash_list[0]), 'rb') as fd:
return np.load(fd, allow_pickle=False) | [
"def",
"load_numpy",
"(",
"self",
",",
"hash_list",
")",
":",
"assert",
"len",
"(",
"hash_list",
")",
"==",
"1",
"self",
".",
"_check_hashes",
"(",
"hash_list",
")",
"with",
"open",
"(",
"self",
".",
"object_path",
"(",
"hash_list",
"[",
"0",
"]",
")",
",",
"'rb'",
")",
"as",
"fd",
":",
"return",
"np",
".",
"load",
"(",
"fd",
",",
"allow_pickle",
"=",
"False",
")"
]
| Loads a numpy array. | [
"Loads",
"a",
"numpy",
"array",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L474-L481 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.get_file | def get_file(self, hash_list):
"""
Returns the path of the file - but verifies that the hash is actually present.
"""
assert len(hash_list) == 1
self._check_hashes(hash_list)
return self.object_path(hash_list[0]) | python | def get_file(self, hash_list):
"""
Returns the path of the file - but verifies that the hash is actually present.
"""
assert len(hash_list) == 1
self._check_hashes(hash_list)
return self.object_path(hash_list[0]) | [
"def",
"get_file",
"(",
"self",
",",
"hash_list",
")",
":",
"assert",
"len",
"(",
"hash_list",
")",
"==",
"1",
"self",
".",
"_check_hashes",
"(",
"hash_list",
")",
"return",
"self",
".",
"object_path",
"(",
"hash_list",
"[",
"0",
"]",
")"
]
| Returns the path of the file - but verifies that the hash is actually present. | [
"Returns",
"the",
"path",
"of",
"the",
"file",
"-",
"but",
"verifies",
"that",
"the",
"hash",
"is",
"actually",
"present",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L493-L499 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.save_metadata | def save_metadata(self, metadata):
"""
Save metadata to the store.
"""
if metadata in (None, {}):
return None
if SYSTEM_METADATA in metadata:
raise StoreException("Not allowed to store %r in metadata" % SYSTEM_METADATA)
path = self.temporary_object_path(str(uuid.uuid4()))
with open(path, 'w') as fd:
try:
# IMPORTANT: JSON format affects the hash of the package.
# In particular, it cannot contain line breaks because of Windows (LF vs CRLF).
# To be safe, we use the most compact encoding.
json.dump(metadata, fd, sort_keys=True, separators=(',', ':'))
except (TypeError, ValueError):
raise StoreException("Metadata is not serializable")
metahash = digest_file(path)
self._move_to_store(path, metahash)
return metahash | python | def save_metadata(self, metadata):
"""
Save metadata to the store.
"""
if metadata in (None, {}):
return None
if SYSTEM_METADATA in metadata:
raise StoreException("Not allowed to store %r in metadata" % SYSTEM_METADATA)
path = self.temporary_object_path(str(uuid.uuid4()))
with open(path, 'w') as fd:
try:
# IMPORTANT: JSON format affects the hash of the package.
# In particular, it cannot contain line breaks because of Windows (LF vs CRLF).
# To be safe, we use the most compact encoding.
json.dump(metadata, fd, sort_keys=True, separators=(',', ':'))
except (TypeError, ValueError):
raise StoreException("Metadata is not serializable")
metahash = digest_file(path)
self._move_to_store(path, metahash)
return metahash | [
"def",
"save_metadata",
"(",
"self",
",",
"metadata",
")",
":",
"if",
"metadata",
"in",
"(",
"None",
",",
"{",
"}",
")",
":",
"return",
"None",
"if",
"SYSTEM_METADATA",
"in",
"metadata",
":",
"raise",
"StoreException",
"(",
"\"Not allowed to store %r in metadata\"",
"%",
"SYSTEM_METADATA",
")",
"path",
"=",
"self",
".",
"temporary_object_path",
"(",
"str",
"(",
"uuid",
".",
"uuid4",
"(",
")",
")",
")",
"with",
"open",
"(",
"path",
",",
"'w'",
")",
"as",
"fd",
":",
"try",
":",
"# IMPORTANT: JSON format affects the hash of the package.",
"# In particular, it cannot contain line breaks because of Windows (LF vs CRLF).",
"# To be safe, we use the most compact encoding.",
"json",
".",
"dump",
"(",
"metadata",
",",
"fd",
",",
"sort_keys",
"=",
"True",
",",
"separators",
"=",
"(",
"','",
",",
"':'",
")",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"raise",
"StoreException",
"(",
"\"Metadata is not serializable\"",
")",
"metahash",
"=",
"digest_file",
"(",
"path",
")",
"self",
".",
"_move_to_store",
"(",
"path",
",",
"metahash",
")",
"return",
"metahash"
]
| Save metadata to the store. | [
"Save",
"metadata",
"to",
"the",
"store",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L521-L544 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.save_package_contents | def save_package_contents(self, root, team, owner, pkgname):
"""
Saves the in-memory contents to a file in the local
package repository.
"""
assert isinstance(root, RootNode)
instance_hash = hash_contents(root)
pkg_path = self.package_path(team, owner, pkgname)
if not os.path.isdir(pkg_path):
os.makedirs(pkg_path)
os.mkdir(os.path.join(pkg_path, self.CONTENTS_DIR))
os.mkdir(os.path.join(pkg_path, self.TAGS_DIR))
os.mkdir(os.path.join(pkg_path, self.VERSIONS_DIR))
dest = os.path.join(pkg_path, self.CONTENTS_DIR, instance_hash)
with open(dest, 'w') as contents_file:
json.dump(root, contents_file, default=encode_node, indent=2, sort_keys=True)
tag_dir = os.path.join(pkg_path, self.TAGS_DIR)
if not os.path.isdir(tag_dir):
os.mkdir(tag_dir)
latest_tag = os.path.join(pkg_path, self.TAGS_DIR, self.LATEST)
with open (latest_tag, 'w') as tagfile:
tagfile.write("{hsh}".format(hsh=instance_hash)) | python | def save_package_contents(self, root, team, owner, pkgname):
"""
Saves the in-memory contents to a file in the local
package repository.
"""
assert isinstance(root, RootNode)
instance_hash = hash_contents(root)
pkg_path = self.package_path(team, owner, pkgname)
if not os.path.isdir(pkg_path):
os.makedirs(pkg_path)
os.mkdir(os.path.join(pkg_path, self.CONTENTS_DIR))
os.mkdir(os.path.join(pkg_path, self.TAGS_DIR))
os.mkdir(os.path.join(pkg_path, self.VERSIONS_DIR))
dest = os.path.join(pkg_path, self.CONTENTS_DIR, instance_hash)
with open(dest, 'w') as contents_file:
json.dump(root, contents_file, default=encode_node, indent=2, sort_keys=True)
tag_dir = os.path.join(pkg_path, self.TAGS_DIR)
if not os.path.isdir(tag_dir):
os.mkdir(tag_dir)
latest_tag = os.path.join(pkg_path, self.TAGS_DIR, self.LATEST)
with open (latest_tag, 'w') as tagfile:
tagfile.write("{hsh}".format(hsh=instance_hash)) | [
"def",
"save_package_contents",
"(",
"self",
",",
"root",
",",
"team",
",",
"owner",
",",
"pkgname",
")",
":",
"assert",
"isinstance",
"(",
"root",
",",
"RootNode",
")",
"instance_hash",
"=",
"hash_contents",
"(",
"root",
")",
"pkg_path",
"=",
"self",
".",
"package_path",
"(",
"team",
",",
"owner",
",",
"pkgname",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"pkg_path",
")",
":",
"os",
".",
"makedirs",
"(",
"pkg_path",
")",
"os",
".",
"mkdir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"pkg_path",
",",
"self",
".",
"CONTENTS_DIR",
")",
")",
"os",
".",
"mkdir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"pkg_path",
",",
"self",
".",
"TAGS_DIR",
")",
")",
"os",
".",
"mkdir",
"(",
"os",
".",
"path",
".",
"join",
"(",
"pkg_path",
",",
"self",
".",
"VERSIONS_DIR",
")",
")",
"dest",
"=",
"os",
".",
"path",
".",
"join",
"(",
"pkg_path",
",",
"self",
".",
"CONTENTS_DIR",
",",
"instance_hash",
")",
"with",
"open",
"(",
"dest",
",",
"'w'",
")",
"as",
"contents_file",
":",
"json",
".",
"dump",
"(",
"root",
",",
"contents_file",
",",
"default",
"=",
"encode_node",
",",
"indent",
"=",
"2",
",",
"sort_keys",
"=",
"True",
")",
"tag_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"pkg_path",
",",
"self",
".",
"TAGS_DIR",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"tag_dir",
")",
":",
"os",
".",
"mkdir",
"(",
"tag_dir",
")",
"latest_tag",
"=",
"os",
".",
"path",
".",
"join",
"(",
"pkg_path",
",",
"self",
".",
"TAGS_DIR",
",",
"self",
".",
"LATEST",
")",
"with",
"open",
"(",
"latest_tag",
",",
"'w'",
")",
"as",
"tagfile",
":",
"tagfile",
".",
"write",
"(",
"\"{hsh}\"",
".",
"format",
"(",
"hsh",
"=",
"instance_hash",
")",
")"
]
| Saves the in-memory contents to a file in the local
package repository. | [
"Saves",
"the",
"in",
"-",
"memory",
"contents",
"to",
"a",
"file",
"in",
"the",
"local",
"package",
"repository",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L546-L570 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore._move_to_store | def _move_to_store(self, srcpath, objhash):
"""
Make the object read-only and move it to the store.
"""
destpath = self.object_path(objhash)
if os.path.exists(destpath):
# Windows: delete any existing object at the destination.
os.chmod(destpath, S_IWUSR)
os.remove(destpath)
os.chmod(srcpath, S_IRUSR | S_IRGRP | S_IROTH) # Make read-only
move(srcpath, destpath) | python | def _move_to_store(self, srcpath, objhash):
"""
Make the object read-only and move it to the store.
"""
destpath = self.object_path(objhash)
if os.path.exists(destpath):
# Windows: delete any existing object at the destination.
os.chmod(destpath, S_IWUSR)
os.remove(destpath)
os.chmod(srcpath, S_IRUSR | S_IRGRP | S_IROTH) # Make read-only
move(srcpath, destpath) | [
"def",
"_move_to_store",
"(",
"self",
",",
"srcpath",
",",
"objhash",
")",
":",
"destpath",
"=",
"self",
".",
"object_path",
"(",
"objhash",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"destpath",
")",
":",
"# Windows: delete any existing object at the destination.",
"os",
".",
"chmod",
"(",
"destpath",
",",
"S_IWUSR",
")",
"os",
".",
"remove",
"(",
"destpath",
")",
"os",
".",
"chmod",
"(",
"srcpath",
",",
"S_IRUSR",
"|",
"S_IRGRP",
"|",
"S_IROTH",
")",
"# Make read-only",
"move",
"(",
"srcpath",
",",
"destpath",
")"
]
| Make the object read-only and move it to the store. | [
"Make",
"the",
"object",
"read",
"-",
"only",
"and",
"move",
"it",
"to",
"the",
"store",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L572-L582 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.add_to_package_numpy | def add_to_package_numpy(self, root, ndarray, node_path, target, source_path, transform, custom_meta):
"""
Save a Numpy array to the store.
"""
filehash = self.save_numpy(ndarray)
metahash = self.save_metadata(custom_meta)
self._add_to_package_contents(root, node_path, [filehash], target, source_path, transform, metahash) | python | def add_to_package_numpy(self, root, ndarray, node_path, target, source_path, transform, custom_meta):
"""
Save a Numpy array to the store.
"""
filehash = self.save_numpy(ndarray)
metahash = self.save_metadata(custom_meta)
self._add_to_package_contents(root, node_path, [filehash], target, source_path, transform, metahash) | [
"def",
"add_to_package_numpy",
"(",
"self",
",",
"root",
",",
"ndarray",
",",
"node_path",
",",
"target",
",",
"source_path",
",",
"transform",
",",
"custom_meta",
")",
":",
"filehash",
"=",
"self",
".",
"save_numpy",
"(",
"ndarray",
")",
"metahash",
"=",
"self",
".",
"save_metadata",
"(",
"custom_meta",
")",
"self",
".",
"_add_to_package_contents",
"(",
"root",
",",
"node_path",
",",
"[",
"filehash",
"]",
",",
"target",
",",
"source_path",
",",
"transform",
",",
"metahash",
")"
]
| Save a Numpy array to the store. | [
"Save",
"a",
"Numpy",
"array",
"to",
"the",
"store",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L638-L644 | train |
quiltdata/quilt | compiler/quilt/tools/store.py | PackageStore.add_to_package_package_tree | def add_to_package_package_tree(self, root, node_path, pkgnode):
"""
Adds a package or sub-package tree from an existing package to this package's
contents.
"""
if node_path:
ptr = root
for node in node_path[:-1]:
ptr = ptr.children.setdefault(node, GroupNode(dict()))
ptr.children[node_path[-1]] = pkgnode
else:
if root.children:
raise PackageException("Attempting to overwrite root node of a non-empty package.")
root.children = pkgnode.children.copy() | python | def add_to_package_package_tree(self, root, node_path, pkgnode):
"""
Adds a package or sub-package tree from an existing package to this package's
contents.
"""
if node_path:
ptr = root
for node in node_path[:-1]:
ptr = ptr.children.setdefault(node, GroupNode(dict()))
ptr.children[node_path[-1]] = pkgnode
else:
if root.children:
raise PackageException("Attempting to overwrite root node of a non-empty package.")
root.children = pkgnode.children.copy() | [
"def",
"add_to_package_package_tree",
"(",
"self",
",",
"root",
",",
"node_path",
",",
"pkgnode",
")",
":",
"if",
"node_path",
":",
"ptr",
"=",
"root",
"for",
"node",
"in",
"node_path",
"[",
":",
"-",
"1",
"]",
":",
"ptr",
"=",
"ptr",
".",
"children",
".",
"setdefault",
"(",
"node",
",",
"GroupNode",
"(",
"dict",
"(",
")",
")",
")",
"ptr",
".",
"children",
"[",
"node_path",
"[",
"-",
"1",
"]",
"]",
"=",
"pkgnode",
"else",
":",
"if",
"root",
".",
"children",
":",
"raise",
"PackageException",
"(",
"\"Attempting to overwrite root node of a non-empty package.\"",
")",
"root",
".",
"children",
"=",
"pkgnode",
".",
"children",
".",
"copy",
"(",
")"
]
| Adds a package or sub-package tree from an existing package to this package's
contents. | [
"Adds",
"a",
"package",
"or",
"sub",
"-",
"package",
"tree",
"from",
"an",
"existing",
"package",
"to",
"this",
"package",
"s",
"contents",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/store.py#L658-L671 | train |
quiltdata/quilt | compiler/quilt/__init__.py | _install_interrupt_handler | def _install_interrupt_handler():
"""Suppress KeyboardInterrupt traceback display in specific situations
If not running in dev mode, and if executed from the command line, then
we raise SystemExit instead of KeyboardInterrupt. This provides a clean
exit.
:returns: None if no action is taken, original interrupt handler otherwise
"""
# These would clutter the quilt.x namespace, so they're imported here instead.
import os
import sys
import signal
import pkg_resources
from .tools import const
# Check to see what entry points / scripts are configred to run quilt from the CLI
# By doing this, we have these benefits:
# * Avoid closing someone's Jupyter/iPython/bPython session when they hit ctrl-c
# * Avoid calling exit() when being used as an external lib
# * Provide exceptions when running in Jupyter/iPython/bPython
# * Provide exceptions when running in unexpected circumstances
quilt = pkg_resources.get_distribution('quilt')
executable = os.path.basename(sys.argv[0])
entry_points = quilt.get_entry_map().get('console_scripts', [])
# When python is run with '-c', this was executed via 'python -c "<some python code>"'
if executable == '-c':
# This is awkward and somewhat hackish, but we have to ensure that this is *us*
# executing via 'python -c'
if len(sys.argv) > 1 and sys.argv[1] == 'quilt testing':
# it's us. Let's pretend '-c' is an entry point.
entry_points['-c'] = 'blah'
sys.argv.pop(1)
if executable not in entry_points:
return
# We're running as a console script.
# If not in dev mode, use SystemExit instead of raising KeyboardInterrupt
def handle_interrupt(signum, stack):
# Check for dev mode
if _DEV_MODE is None:
# Args and environment have not been parsed, and no _DEV_MODE state has been set.
dev_mode = True if len(sys.argv) > 1 and sys.argv[1] == '--dev' else False
dev_mode = True if os.environ.get('QUILT_DEV_MODE', '').strip().lower() == 'true' else dev_mode
else: # Use forced dev-mode if _DEV_MODE is set
dev_mode = _DEV_MODE
# In order to display the full traceback, we lose control of the exit code here.
# Dev mode ctrl-c exit just produces the generic exit error code 1
if dev_mode:
raise KeyboardInterrupt()
# Normal exit
# avoid annoying prompt displacement when hitting ctrl-c
print()
exit(const.EXIT_KB_INTERRUPT)
return signal.signal(signal.SIGINT, handle_interrupt) | python | def _install_interrupt_handler():
"""Suppress KeyboardInterrupt traceback display in specific situations
If not running in dev mode, and if executed from the command line, then
we raise SystemExit instead of KeyboardInterrupt. This provides a clean
exit.
:returns: None if no action is taken, original interrupt handler otherwise
"""
# These would clutter the quilt.x namespace, so they're imported here instead.
import os
import sys
import signal
import pkg_resources
from .tools import const
# Check to see what entry points / scripts are configred to run quilt from the CLI
# By doing this, we have these benefits:
# * Avoid closing someone's Jupyter/iPython/bPython session when they hit ctrl-c
# * Avoid calling exit() when being used as an external lib
# * Provide exceptions when running in Jupyter/iPython/bPython
# * Provide exceptions when running in unexpected circumstances
quilt = pkg_resources.get_distribution('quilt')
executable = os.path.basename(sys.argv[0])
entry_points = quilt.get_entry_map().get('console_scripts', [])
# When python is run with '-c', this was executed via 'python -c "<some python code>"'
if executable == '-c':
# This is awkward and somewhat hackish, but we have to ensure that this is *us*
# executing via 'python -c'
if len(sys.argv) > 1 and sys.argv[1] == 'quilt testing':
# it's us. Let's pretend '-c' is an entry point.
entry_points['-c'] = 'blah'
sys.argv.pop(1)
if executable not in entry_points:
return
# We're running as a console script.
# If not in dev mode, use SystemExit instead of raising KeyboardInterrupt
def handle_interrupt(signum, stack):
# Check for dev mode
if _DEV_MODE is None:
# Args and environment have not been parsed, and no _DEV_MODE state has been set.
dev_mode = True if len(sys.argv) > 1 and sys.argv[1] == '--dev' else False
dev_mode = True if os.environ.get('QUILT_DEV_MODE', '').strip().lower() == 'true' else dev_mode
else: # Use forced dev-mode if _DEV_MODE is set
dev_mode = _DEV_MODE
# In order to display the full traceback, we lose control of the exit code here.
# Dev mode ctrl-c exit just produces the generic exit error code 1
if dev_mode:
raise KeyboardInterrupt()
# Normal exit
# avoid annoying prompt displacement when hitting ctrl-c
print()
exit(const.EXIT_KB_INTERRUPT)
return signal.signal(signal.SIGINT, handle_interrupt) | [
"def",
"_install_interrupt_handler",
"(",
")",
":",
"# These would clutter the quilt.x namespace, so they're imported here instead.",
"import",
"os",
"import",
"sys",
"import",
"signal",
"import",
"pkg_resources",
"from",
".",
"tools",
"import",
"const",
"# Check to see what entry points / scripts are configred to run quilt from the CLI",
"# By doing this, we have these benefits:",
"# * Avoid closing someone's Jupyter/iPython/bPython session when they hit ctrl-c",
"# * Avoid calling exit() when being used as an external lib",
"# * Provide exceptions when running in Jupyter/iPython/bPython",
"# * Provide exceptions when running in unexpected circumstances",
"quilt",
"=",
"pkg_resources",
".",
"get_distribution",
"(",
"'quilt'",
")",
"executable",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"sys",
".",
"argv",
"[",
"0",
"]",
")",
"entry_points",
"=",
"quilt",
".",
"get_entry_map",
"(",
")",
".",
"get",
"(",
"'console_scripts'",
",",
"[",
"]",
")",
"# When python is run with '-c', this was executed via 'python -c \"<some python code>\"'",
"if",
"executable",
"==",
"'-c'",
":",
"# This is awkward and somewhat hackish, but we have to ensure that this is *us*",
"# executing via 'python -c'",
"if",
"len",
"(",
"sys",
".",
"argv",
")",
">",
"1",
"and",
"sys",
".",
"argv",
"[",
"1",
"]",
"==",
"'quilt testing'",
":",
"# it's us. Let's pretend '-c' is an entry point.",
"entry_points",
"[",
"'-c'",
"]",
"=",
"'blah'",
"sys",
".",
"argv",
".",
"pop",
"(",
"1",
")",
"if",
"executable",
"not",
"in",
"entry_points",
":",
"return",
"# We're running as a console script.",
"# If not in dev mode, use SystemExit instead of raising KeyboardInterrupt",
"def",
"handle_interrupt",
"(",
"signum",
",",
"stack",
")",
":",
"# Check for dev mode",
"if",
"_DEV_MODE",
"is",
"None",
":",
"# Args and environment have not been parsed, and no _DEV_MODE state has been set.",
"dev_mode",
"=",
"True",
"if",
"len",
"(",
"sys",
".",
"argv",
")",
">",
"1",
"and",
"sys",
".",
"argv",
"[",
"1",
"]",
"==",
"'--dev'",
"else",
"False",
"dev_mode",
"=",
"True",
"if",
"os",
".",
"environ",
".",
"get",
"(",
"'QUILT_DEV_MODE'",
",",
"''",
")",
".",
"strip",
"(",
")",
".",
"lower",
"(",
")",
"==",
"'true'",
"else",
"dev_mode",
"else",
":",
"# Use forced dev-mode if _DEV_MODE is set",
"dev_mode",
"=",
"_DEV_MODE",
"# In order to display the full traceback, we lose control of the exit code here.",
"# Dev mode ctrl-c exit just produces the generic exit error code 1",
"if",
"dev_mode",
":",
"raise",
"KeyboardInterrupt",
"(",
")",
"# Normal exit",
"# avoid annoying prompt displacement when hitting ctrl-c",
"print",
"(",
")",
"exit",
"(",
"const",
".",
"EXIT_KB_INTERRUPT",
")",
"return",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGINT",
",",
"handle_interrupt",
")"
]
| Suppress KeyboardInterrupt traceback display in specific situations
If not running in dev mode, and if executed from the command line, then
we raise SystemExit instead of KeyboardInterrupt. This provides a clean
exit.
:returns: None if no action is taken, original interrupt handler otherwise | [
"Suppress",
"KeyboardInterrupt",
"traceback",
"display",
"in",
"specific",
"situations"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/__init__.py#L23-L80 | train |
quiltdata/quilt | compiler/quilt/nodes.py | GroupNode._data_keys | def _data_keys(self):
"""
every child key referencing a dataframe
"""
return [name for name, child in iteritems(self._children) if not isinstance(child, GroupNode)] | python | def _data_keys(self):
"""
every child key referencing a dataframe
"""
return [name for name, child in iteritems(self._children) if not isinstance(child, GroupNode)] | [
"def",
"_data_keys",
"(",
"self",
")",
":",
"return",
"[",
"name",
"for",
"name",
",",
"child",
"in",
"iteritems",
"(",
"self",
".",
"_children",
")",
"if",
"not",
"isinstance",
"(",
"child",
",",
"GroupNode",
")",
"]"
]
| every child key referencing a dataframe | [
"every",
"child",
"key",
"referencing",
"a",
"dataframe"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/nodes.py#L162-L166 | train |
quiltdata/quilt | compiler/quilt/nodes.py | GroupNode._group_keys | def _group_keys(self):
"""
every child key referencing a group that is not a dataframe
"""
return [name for name, child in iteritems(self._children) if isinstance(child, GroupNode)] | python | def _group_keys(self):
"""
every child key referencing a group that is not a dataframe
"""
return [name for name, child in iteritems(self._children) if isinstance(child, GroupNode)] | [
"def",
"_group_keys",
"(",
"self",
")",
":",
"return",
"[",
"name",
"for",
"name",
",",
"child",
"in",
"iteritems",
"(",
"self",
".",
"_children",
")",
"if",
"isinstance",
"(",
"child",
",",
"GroupNode",
")",
"]"
]
| every child key referencing a group that is not a dataframe | [
"every",
"child",
"key",
"referencing",
"a",
"group",
"that",
"is",
"not",
"a",
"dataframe"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/nodes.py#L168-L172 | train |
quiltdata/quilt | compiler/quilt/nodes.py | GroupNode._data | def _data(self, asa=None):
"""
Merges all child dataframes. Only works for dataframes stored on disk - not in memory.
"""
hash_list = []
stack = [self]
alldfs = True
store = None
while stack:
node = stack.pop()
if isinstance(node, GroupNode):
stack.extend(child for _, child in sorted(node._items(), reverse=True))
else:
if node._target() != TargetType.PANDAS:
alldfs = False
if node._store is None or node._hashes is None:
msg = "Can only merge built dataframes. Build this package and try again."
raise NotImplementedError(msg)
node_store = node._store
if store is None:
store = node_store
if node_store != store:
raise NotImplementedError("Can only merge dataframes from the same store")
hash_list += node._hashes
if asa is None:
if not hash_list:
return None
if not alldfs:
raise ValueError("Group contains non-dataframe nodes")
return store.load_dataframe(hash_list)
else:
if hash_list:
assert store is not None
return asa(self, [store.object_path(obj) for obj in hash_list])
else:
return asa(self, []) | python | def _data(self, asa=None):
"""
Merges all child dataframes. Only works for dataframes stored on disk - not in memory.
"""
hash_list = []
stack = [self]
alldfs = True
store = None
while stack:
node = stack.pop()
if isinstance(node, GroupNode):
stack.extend(child for _, child in sorted(node._items(), reverse=True))
else:
if node._target() != TargetType.PANDAS:
alldfs = False
if node._store is None or node._hashes is None:
msg = "Can only merge built dataframes. Build this package and try again."
raise NotImplementedError(msg)
node_store = node._store
if store is None:
store = node_store
if node_store != store:
raise NotImplementedError("Can only merge dataframes from the same store")
hash_list += node._hashes
if asa is None:
if not hash_list:
return None
if not alldfs:
raise ValueError("Group contains non-dataframe nodes")
return store.load_dataframe(hash_list)
else:
if hash_list:
assert store is not None
return asa(self, [store.object_path(obj) for obj in hash_list])
else:
return asa(self, []) | [
"def",
"_data",
"(",
"self",
",",
"asa",
"=",
"None",
")",
":",
"hash_list",
"=",
"[",
"]",
"stack",
"=",
"[",
"self",
"]",
"alldfs",
"=",
"True",
"store",
"=",
"None",
"while",
"stack",
":",
"node",
"=",
"stack",
".",
"pop",
"(",
")",
"if",
"isinstance",
"(",
"node",
",",
"GroupNode",
")",
":",
"stack",
".",
"extend",
"(",
"child",
"for",
"_",
",",
"child",
"in",
"sorted",
"(",
"node",
".",
"_items",
"(",
")",
",",
"reverse",
"=",
"True",
")",
")",
"else",
":",
"if",
"node",
".",
"_target",
"(",
")",
"!=",
"TargetType",
".",
"PANDAS",
":",
"alldfs",
"=",
"False",
"if",
"node",
".",
"_store",
"is",
"None",
"or",
"node",
".",
"_hashes",
"is",
"None",
":",
"msg",
"=",
"\"Can only merge built dataframes. Build this package and try again.\"",
"raise",
"NotImplementedError",
"(",
"msg",
")",
"node_store",
"=",
"node",
".",
"_store",
"if",
"store",
"is",
"None",
":",
"store",
"=",
"node_store",
"if",
"node_store",
"!=",
"store",
":",
"raise",
"NotImplementedError",
"(",
"\"Can only merge dataframes from the same store\"",
")",
"hash_list",
"+=",
"node",
".",
"_hashes",
"if",
"asa",
"is",
"None",
":",
"if",
"not",
"hash_list",
":",
"return",
"None",
"if",
"not",
"alldfs",
":",
"raise",
"ValueError",
"(",
"\"Group contains non-dataframe nodes\"",
")",
"return",
"store",
".",
"load_dataframe",
"(",
"hash_list",
")",
"else",
":",
"if",
"hash_list",
":",
"assert",
"store",
"is",
"not",
"None",
"return",
"asa",
"(",
"self",
",",
"[",
"store",
".",
"object_path",
"(",
"obj",
")",
"for",
"obj",
"in",
"hash_list",
"]",
")",
"else",
":",
"return",
"asa",
"(",
"self",
",",
"[",
"]",
")"
]
| Merges all child dataframes. Only works for dataframes stored on disk - not in memory. | [
"Merges",
"all",
"child",
"dataframes",
".",
"Only",
"works",
"for",
"dataframes",
"stored",
"on",
"disk",
"-",
"not",
"in",
"memory",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/nodes.py#L184-L220 | train |
quiltdata/quilt | compiler/quilt/nodes.py | GroupNode._set | def _set(self, path, value, build_dir=''):
"""Create and set a node by path
This creates a node from a filename or pandas DataFrame.
If `value` is a filename, it must be relative to `build_dir`.
`value` is stored as the export path.
`build_dir` defaults to the current directory, but may be any
arbitrary directory path, including an absolute path.
Example:
# Set `pkg.graph_image` to the data in '/home/user/bin/graph.png'.
# If exported, it would export to '<export_dir>/bin/graph.png'
`pkg._set(['graph_image'], 'bin/fizz.bin', '/home/user')`
:param path: Path list -- I.e. ['examples', 'new_node']
:param value: Pandas dataframe, or a filename relative to build_dir
:param build_dir: Directory containing `value` if value is a filename.
"""
assert isinstance(path, list) and len(path) > 0
if isinstance(value, pd.DataFrame):
metadata = {SYSTEM_METADATA: {'target': TargetType.PANDAS.value}}
elif isinstance(value, np.ndarray):
metadata = {SYSTEM_METADATA: {'target': TargetType.NUMPY.value}}
elif isinstance(value, string_types + (bytes,)):
# bytes -> string for consistency when retrieving metadata
value = value.decode() if isinstance(value, bytes) else value
if os.path.isabs(value):
raise ValueError("Invalid path: expected a relative path, but received {!r}".format(value))
# Security: filepath does not and should not retain the build_dir's location!
metadata = {SYSTEM_METADATA: {'filepath': value, 'transform': 'id'}}
if build_dir:
value = os.path.join(build_dir, value)
else:
accepted_types = tuple(set((pd.DataFrame, np.ndarray, bytes) + string_types))
raise TypeError("Bad value type: Expected instance of any type {!r}, but received type {!r}"
.format(accepted_types, type(value)), repr(value)[0:100])
for key in path:
if not is_nodename(key):
raise ValueError("Invalid name for node: {}".format(key))
node = self
for key in path[:-1]:
child = node._get(key)
if not isinstance(child, GroupNode):
child = GroupNode({})
node[key] = child
node = child
key = path[-1]
node[key] = DataNode(None, None, value, metadata) | python | def _set(self, path, value, build_dir=''):
"""Create and set a node by path
This creates a node from a filename or pandas DataFrame.
If `value` is a filename, it must be relative to `build_dir`.
`value` is stored as the export path.
`build_dir` defaults to the current directory, but may be any
arbitrary directory path, including an absolute path.
Example:
# Set `pkg.graph_image` to the data in '/home/user/bin/graph.png'.
# If exported, it would export to '<export_dir>/bin/graph.png'
`pkg._set(['graph_image'], 'bin/fizz.bin', '/home/user')`
:param path: Path list -- I.e. ['examples', 'new_node']
:param value: Pandas dataframe, or a filename relative to build_dir
:param build_dir: Directory containing `value` if value is a filename.
"""
assert isinstance(path, list) and len(path) > 0
if isinstance(value, pd.DataFrame):
metadata = {SYSTEM_METADATA: {'target': TargetType.PANDAS.value}}
elif isinstance(value, np.ndarray):
metadata = {SYSTEM_METADATA: {'target': TargetType.NUMPY.value}}
elif isinstance(value, string_types + (bytes,)):
# bytes -> string for consistency when retrieving metadata
value = value.decode() if isinstance(value, bytes) else value
if os.path.isabs(value):
raise ValueError("Invalid path: expected a relative path, but received {!r}".format(value))
# Security: filepath does not and should not retain the build_dir's location!
metadata = {SYSTEM_METADATA: {'filepath': value, 'transform': 'id'}}
if build_dir:
value = os.path.join(build_dir, value)
else:
accepted_types = tuple(set((pd.DataFrame, np.ndarray, bytes) + string_types))
raise TypeError("Bad value type: Expected instance of any type {!r}, but received type {!r}"
.format(accepted_types, type(value)), repr(value)[0:100])
for key in path:
if not is_nodename(key):
raise ValueError("Invalid name for node: {}".format(key))
node = self
for key in path[:-1]:
child = node._get(key)
if not isinstance(child, GroupNode):
child = GroupNode({})
node[key] = child
node = child
key = path[-1]
node[key] = DataNode(None, None, value, metadata) | [
"def",
"_set",
"(",
"self",
",",
"path",
",",
"value",
",",
"build_dir",
"=",
"''",
")",
":",
"assert",
"isinstance",
"(",
"path",
",",
"list",
")",
"and",
"len",
"(",
"path",
")",
">",
"0",
"if",
"isinstance",
"(",
"value",
",",
"pd",
".",
"DataFrame",
")",
":",
"metadata",
"=",
"{",
"SYSTEM_METADATA",
":",
"{",
"'target'",
":",
"TargetType",
".",
"PANDAS",
".",
"value",
"}",
"}",
"elif",
"isinstance",
"(",
"value",
",",
"np",
".",
"ndarray",
")",
":",
"metadata",
"=",
"{",
"SYSTEM_METADATA",
":",
"{",
"'target'",
":",
"TargetType",
".",
"NUMPY",
".",
"value",
"}",
"}",
"elif",
"isinstance",
"(",
"value",
",",
"string_types",
"+",
"(",
"bytes",
",",
")",
")",
":",
"# bytes -> string for consistency when retrieving metadata",
"value",
"=",
"value",
".",
"decode",
"(",
")",
"if",
"isinstance",
"(",
"value",
",",
"bytes",
")",
"else",
"value",
"if",
"os",
".",
"path",
".",
"isabs",
"(",
"value",
")",
":",
"raise",
"ValueError",
"(",
"\"Invalid path: expected a relative path, but received {!r}\"",
".",
"format",
"(",
"value",
")",
")",
"# Security: filepath does not and should not retain the build_dir's location!",
"metadata",
"=",
"{",
"SYSTEM_METADATA",
":",
"{",
"'filepath'",
":",
"value",
",",
"'transform'",
":",
"'id'",
"}",
"}",
"if",
"build_dir",
":",
"value",
"=",
"os",
".",
"path",
".",
"join",
"(",
"build_dir",
",",
"value",
")",
"else",
":",
"accepted_types",
"=",
"tuple",
"(",
"set",
"(",
"(",
"pd",
".",
"DataFrame",
",",
"np",
".",
"ndarray",
",",
"bytes",
")",
"+",
"string_types",
")",
")",
"raise",
"TypeError",
"(",
"\"Bad value type: Expected instance of any type {!r}, but received type {!r}\"",
".",
"format",
"(",
"accepted_types",
",",
"type",
"(",
"value",
")",
")",
",",
"repr",
"(",
"value",
")",
"[",
"0",
":",
"100",
"]",
")",
"for",
"key",
"in",
"path",
":",
"if",
"not",
"is_nodename",
"(",
"key",
")",
":",
"raise",
"ValueError",
"(",
"\"Invalid name for node: {}\"",
".",
"format",
"(",
"key",
")",
")",
"node",
"=",
"self",
"for",
"key",
"in",
"path",
"[",
":",
"-",
"1",
"]",
":",
"child",
"=",
"node",
".",
"_get",
"(",
"key",
")",
"if",
"not",
"isinstance",
"(",
"child",
",",
"GroupNode",
")",
":",
"child",
"=",
"GroupNode",
"(",
"{",
"}",
")",
"node",
"[",
"key",
"]",
"=",
"child",
"node",
"=",
"child",
"key",
"=",
"path",
"[",
"-",
"1",
"]",
"node",
"[",
"key",
"]",
"=",
"DataNode",
"(",
"None",
",",
"None",
",",
"value",
",",
"metadata",
")"
]
| Create and set a node by path
This creates a node from a filename or pandas DataFrame.
If `value` is a filename, it must be relative to `build_dir`.
`value` is stored as the export path.
`build_dir` defaults to the current directory, but may be any
arbitrary directory path, including an absolute path.
Example:
# Set `pkg.graph_image` to the data in '/home/user/bin/graph.png'.
# If exported, it would export to '<export_dir>/bin/graph.png'
`pkg._set(['graph_image'], 'bin/fizz.bin', '/home/user')`
:param path: Path list -- I.e. ['examples', 'new_node']
:param value: Pandas dataframe, or a filename relative to build_dir
:param build_dir: Directory containing `value` if value is a filename. | [
"Create",
"and",
"set",
"a",
"node",
"by",
"path"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/nodes.py#L222-L276 | train |
quiltdata/quilt | registry/quilt_server/views.py | handle_api_exception | def handle_api_exception(error):
"""
Converts an API exception into an error response.
"""
_mp_track(
type="exception",
status_code=error.status_code,
message=error.message,
)
response = jsonify(dict(
message=error.message
))
response.status_code = error.status_code
return response | python | def handle_api_exception(error):
"""
Converts an API exception into an error response.
"""
_mp_track(
type="exception",
status_code=error.status_code,
message=error.message,
)
response = jsonify(dict(
message=error.message
))
response.status_code = error.status_code
return response | [
"def",
"handle_api_exception",
"(",
"error",
")",
":",
"_mp_track",
"(",
"type",
"=",
"\"exception\"",
",",
"status_code",
"=",
"error",
".",
"status_code",
",",
"message",
"=",
"error",
".",
"message",
",",
")",
"response",
"=",
"jsonify",
"(",
"dict",
"(",
"message",
"=",
"error",
".",
"message",
")",
")",
"response",
".",
"status_code",
"=",
"error",
".",
"status_code",
"return",
"response"
]
| Converts an API exception into an error response. | [
"Converts",
"an",
"API",
"exception",
"into",
"an",
"error",
"response",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/registry/quilt_server/views.py#L188-L202 | train |
quiltdata/quilt | registry/quilt_server/views.py | api | def api(require_login=True, schema=None, enabled=True,
require_admin=False, require_anonymous=False):
"""
Decorator for API requests.
Handles auth and adds the username as the first argument.
"""
if require_admin:
require_login = True
if schema is not None:
Draft4Validator.check_schema(schema)
validator = Draft4Validator(schema)
else:
validator = None
assert not (require_login and require_anonymous), (
"Can't both require login and require anonymous access.")
def innerdec(f):
@wraps(f)
def wrapper(*args, **kwargs):
g.auth = Auth(user=None, email=None, is_logged_in=False, is_admin=False, is_active=True)
user_agent_str = request.headers.get('user-agent', '')
g.user_agent = httpagentparser.detect(user_agent_str, fill_none=True)
if not enabled:
raise ApiException(
requests.codes.bad_request,
"This endpoint is not enabled."
)
if validator is not None:
try:
validator.validate(request.get_json(cache=True))
except ValidationError as ex:
raise ApiException(requests.codes.bad_request, ex.message)
auth = request.headers.get(AUTHORIZATION_HEADER)
g.auth_header = auth
if auth is None:
if not require_anonymous:
if require_login or not ALLOW_ANONYMOUS_ACCESS:
raise ApiException(requests.codes.unauthorized, "Not logged in")
else:
# try to validate new auth
token = auth
# for compatibility with old clients
if token.startswith("Bearer "):
token = token[7:]
try:
user = verify_token_string(token)
except AuthException:
raise ApiException(requests.codes.unauthorized, "Token invalid.")
g.user = user
g.auth = Auth(user=user.name,
email=user.email,
is_logged_in=True,
is_admin=user.is_admin,
is_active=user.is_active)
g.auth_token = token
if not g.auth.is_active:
raise ApiException(
requests.codes.forbidden,
"Account is inactive. Must have an active account."
)
if require_admin and not g.auth.is_admin:
raise ApiException(
requests.codes.forbidden,
"Must be authenticated as an admin to use this endpoint."
)
return f(*args, **kwargs)
return wrapper
return innerdec | python | def api(require_login=True, schema=None, enabled=True,
require_admin=False, require_anonymous=False):
"""
Decorator for API requests.
Handles auth and adds the username as the first argument.
"""
if require_admin:
require_login = True
if schema is not None:
Draft4Validator.check_schema(schema)
validator = Draft4Validator(schema)
else:
validator = None
assert not (require_login and require_anonymous), (
"Can't both require login and require anonymous access.")
def innerdec(f):
@wraps(f)
def wrapper(*args, **kwargs):
g.auth = Auth(user=None, email=None, is_logged_in=False, is_admin=False, is_active=True)
user_agent_str = request.headers.get('user-agent', '')
g.user_agent = httpagentparser.detect(user_agent_str, fill_none=True)
if not enabled:
raise ApiException(
requests.codes.bad_request,
"This endpoint is not enabled."
)
if validator is not None:
try:
validator.validate(request.get_json(cache=True))
except ValidationError as ex:
raise ApiException(requests.codes.bad_request, ex.message)
auth = request.headers.get(AUTHORIZATION_HEADER)
g.auth_header = auth
if auth is None:
if not require_anonymous:
if require_login or not ALLOW_ANONYMOUS_ACCESS:
raise ApiException(requests.codes.unauthorized, "Not logged in")
else:
# try to validate new auth
token = auth
# for compatibility with old clients
if token.startswith("Bearer "):
token = token[7:]
try:
user = verify_token_string(token)
except AuthException:
raise ApiException(requests.codes.unauthorized, "Token invalid.")
g.user = user
g.auth = Auth(user=user.name,
email=user.email,
is_logged_in=True,
is_admin=user.is_admin,
is_active=user.is_active)
g.auth_token = token
if not g.auth.is_active:
raise ApiException(
requests.codes.forbidden,
"Account is inactive. Must have an active account."
)
if require_admin and not g.auth.is_admin:
raise ApiException(
requests.codes.forbidden,
"Must be authenticated as an admin to use this endpoint."
)
return f(*args, **kwargs)
return wrapper
return innerdec | [
"def",
"api",
"(",
"require_login",
"=",
"True",
",",
"schema",
"=",
"None",
",",
"enabled",
"=",
"True",
",",
"require_admin",
"=",
"False",
",",
"require_anonymous",
"=",
"False",
")",
":",
"if",
"require_admin",
":",
"require_login",
"=",
"True",
"if",
"schema",
"is",
"not",
"None",
":",
"Draft4Validator",
".",
"check_schema",
"(",
"schema",
")",
"validator",
"=",
"Draft4Validator",
"(",
"schema",
")",
"else",
":",
"validator",
"=",
"None",
"assert",
"not",
"(",
"require_login",
"and",
"require_anonymous",
")",
",",
"(",
"\"Can't both require login and require anonymous access.\"",
")",
"def",
"innerdec",
"(",
"f",
")",
":",
"@",
"wraps",
"(",
"f",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"g",
".",
"auth",
"=",
"Auth",
"(",
"user",
"=",
"None",
",",
"email",
"=",
"None",
",",
"is_logged_in",
"=",
"False",
",",
"is_admin",
"=",
"False",
",",
"is_active",
"=",
"True",
")",
"user_agent_str",
"=",
"request",
".",
"headers",
".",
"get",
"(",
"'user-agent'",
",",
"''",
")",
"g",
".",
"user_agent",
"=",
"httpagentparser",
".",
"detect",
"(",
"user_agent_str",
",",
"fill_none",
"=",
"True",
")",
"if",
"not",
"enabled",
":",
"raise",
"ApiException",
"(",
"requests",
".",
"codes",
".",
"bad_request",
",",
"\"This endpoint is not enabled.\"",
")",
"if",
"validator",
"is",
"not",
"None",
":",
"try",
":",
"validator",
".",
"validate",
"(",
"request",
".",
"get_json",
"(",
"cache",
"=",
"True",
")",
")",
"except",
"ValidationError",
"as",
"ex",
":",
"raise",
"ApiException",
"(",
"requests",
".",
"codes",
".",
"bad_request",
",",
"ex",
".",
"message",
")",
"auth",
"=",
"request",
".",
"headers",
".",
"get",
"(",
"AUTHORIZATION_HEADER",
")",
"g",
".",
"auth_header",
"=",
"auth",
"if",
"auth",
"is",
"None",
":",
"if",
"not",
"require_anonymous",
":",
"if",
"require_login",
"or",
"not",
"ALLOW_ANONYMOUS_ACCESS",
":",
"raise",
"ApiException",
"(",
"requests",
".",
"codes",
".",
"unauthorized",
",",
"\"Not logged in\"",
")",
"else",
":",
"# try to validate new auth",
"token",
"=",
"auth",
"# for compatibility with old clients",
"if",
"token",
".",
"startswith",
"(",
"\"Bearer \"",
")",
":",
"token",
"=",
"token",
"[",
"7",
":",
"]",
"try",
":",
"user",
"=",
"verify_token_string",
"(",
"token",
")",
"except",
"AuthException",
":",
"raise",
"ApiException",
"(",
"requests",
".",
"codes",
".",
"unauthorized",
",",
"\"Token invalid.\"",
")",
"g",
".",
"user",
"=",
"user",
"g",
".",
"auth",
"=",
"Auth",
"(",
"user",
"=",
"user",
".",
"name",
",",
"email",
"=",
"user",
".",
"email",
",",
"is_logged_in",
"=",
"True",
",",
"is_admin",
"=",
"user",
".",
"is_admin",
",",
"is_active",
"=",
"user",
".",
"is_active",
")",
"g",
".",
"auth_token",
"=",
"token",
"if",
"not",
"g",
".",
"auth",
".",
"is_active",
":",
"raise",
"ApiException",
"(",
"requests",
".",
"codes",
".",
"forbidden",
",",
"\"Account is inactive. Must have an active account.\"",
")",
"if",
"require_admin",
"and",
"not",
"g",
".",
"auth",
".",
"is_admin",
":",
"raise",
"ApiException",
"(",
"requests",
".",
"codes",
".",
"forbidden",
",",
"\"Must be authenticated as an admin to use this endpoint.\"",
")",
"return",
"f",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"wrapper",
"return",
"innerdec"
]
| Decorator for API requests.
Handles auth and adds the username as the first argument. | [
"Decorator",
"for",
"API",
"requests",
".",
"Handles",
"auth",
"and",
"adds",
"the",
"username",
"as",
"the",
"first",
"argument",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/registry/quilt_server/views.py#L204-L283 | train |
quiltdata/quilt | registry/quilt_server/views.py | _private_packages_allowed | def _private_packages_allowed():
"""
Checks if the current user is allowed to create private packages.
In the public cloud, the user needs to be on a paid plan.
There are no restrictions in other deployments.
"""
if not HAVE_PAYMENTS or TEAM_ID:
return True
customer = _get_or_create_customer()
plan = _get_customer_plan(customer)
return plan != PaymentPlan.FREE | python | def _private_packages_allowed():
"""
Checks if the current user is allowed to create private packages.
In the public cloud, the user needs to be on a paid plan.
There are no restrictions in other deployments.
"""
if not HAVE_PAYMENTS or TEAM_ID:
return True
customer = _get_or_create_customer()
plan = _get_customer_plan(customer)
return plan != PaymentPlan.FREE | [
"def",
"_private_packages_allowed",
"(",
")",
":",
"if",
"not",
"HAVE_PAYMENTS",
"or",
"TEAM_ID",
":",
"return",
"True",
"customer",
"=",
"_get_or_create_customer",
"(",
")",
"plan",
"=",
"_get_customer_plan",
"(",
"customer",
")",
"return",
"plan",
"!=",
"PaymentPlan",
".",
"FREE"
]
| Checks if the current user is allowed to create private packages.
In the public cloud, the user needs to be on a paid plan.
There are no restrictions in other deployments. | [
"Checks",
"if",
"the",
"current",
"user",
"is",
"allowed",
"to",
"create",
"private",
"packages",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/registry/quilt_server/views.py#L566-L578 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | _create_auth | def _create_auth(team, timeout=None):
"""
Reads the credentials, updates the access token if necessary, and returns it.
"""
url = get_registry_url(team)
contents = _load_auth()
auth = contents.get(url)
if auth is not None:
# If the access token expires within a minute, update it.
if auth['expires_at'] < time.time() + 60:
try:
auth = _update_auth(team, auth['refresh_token'], timeout)
except CommandException as ex:
raise CommandException(
"Failed to update the access token (%s). Run `quilt login%s` again." %
(ex, ' ' + team if team else '')
)
contents[url] = auth
_save_auth(contents)
return auth | python | def _create_auth(team, timeout=None):
"""
Reads the credentials, updates the access token if necessary, and returns it.
"""
url = get_registry_url(team)
contents = _load_auth()
auth = contents.get(url)
if auth is not None:
# If the access token expires within a minute, update it.
if auth['expires_at'] < time.time() + 60:
try:
auth = _update_auth(team, auth['refresh_token'], timeout)
except CommandException as ex:
raise CommandException(
"Failed to update the access token (%s). Run `quilt login%s` again." %
(ex, ' ' + team if team else '')
)
contents[url] = auth
_save_auth(contents)
return auth | [
"def",
"_create_auth",
"(",
"team",
",",
"timeout",
"=",
"None",
")",
":",
"url",
"=",
"get_registry_url",
"(",
"team",
")",
"contents",
"=",
"_load_auth",
"(",
")",
"auth",
"=",
"contents",
".",
"get",
"(",
"url",
")",
"if",
"auth",
"is",
"not",
"None",
":",
"# If the access token expires within a minute, update it.",
"if",
"auth",
"[",
"'expires_at'",
"]",
"<",
"time",
".",
"time",
"(",
")",
"+",
"60",
":",
"try",
":",
"auth",
"=",
"_update_auth",
"(",
"team",
",",
"auth",
"[",
"'refresh_token'",
"]",
",",
"timeout",
")",
"except",
"CommandException",
"as",
"ex",
":",
"raise",
"CommandException",
"(",
"\"Failed to update the access token (%s). Run `quilt login%s` again.\"",
"%",
"(",
"ex",
",",
"' '",
"+",
"team",
"if",
"team",
"else",
"''",
")",
")",
"contents",
"[",
"url",
"]",
"=",
"auth",
"_save_auth",
"(",
"contents",
")",
"return",
"auth"
]
| Reads the credentials, updates the access token if necessary, and returns it. | [
"Reads",
"the",
"credentials",
"updates",
"the",
"access",
"token",
"if",
"necessary",
"and",
"returns",
"it",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L227-L248 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | _create_session | def _create_session(team, auth):
"""
Creates a session object to be used for `push`, `install`, etc.
"""
session = requests.Session()
session.hooks.update(dict(
response=partial(_handle_response, team)
))
session.headers.update({
"Content-Type": "application/json",
"Accept": "application/json",
"User-Agent": "quilt-cli/%s (%s %s) %s/%s" % (
VERSION, platform.system(), platform.release(),
platform.python_implementation(), platform.python_version()
)
})
if auth is not None:
session.headers["Authorization"] = "Bearer %s" % auth['access_token']
return session | python | def _create_session(team, auth):
"""
Creates a session object to be used for `push`, `install`, etc.
"""
session = requests.Session()
session.hooks.update(dict(
response=partial(_handle_response, team)
))
session.headers.update({
"Content-Type": "application/json",
"Accept": "application/json",
"User-Agent": "quilt-cli/%s (%s %s) %s/%s" % (
VERSION, platform.system(), platform.release(),
platform.python_implementation(), platform.python_version()
)
})
if auth is not None:
session.headers["Authorization"] = "Bearer %s" % auth['access_token']
return session | [
"def",
"_create_session",
"(",
"team",
",",
"auth",
")",
":",
"session",
"=",
"requests",
".",
"Session",
"(",
")",
"session",
".",
"hooks",
".",
"update",
"(",
"dict",
"(",
"response",
"=",
"partial",
"(",
"_handle_response",
",",
"team",
")",
")",
")",
"session",
".",
"headers",
".",
"update",
"(",
"{",
"\"Content-Type\"",
":",
"\"application/json\"",
",",
"\"Accept\"",
":",
"\"application/json\"",
",",
"\"User-Agent\"",
":",
"\"quilt-cli/%s (%s %s) %s/%s\"",
"%",
"(",
"VERSION",
",",
"platform",
".",
"system",
"(",
")",
",",
"platform",
".",
"release",
"(",
")",
",",
"platform",
".",
"python_implementation",
"(",
")",
",",
"platform",
".",
"python_version",
"(",
")",
")",
"}",
")",
"if",
"auth",
"is",
"not",
"None",
":",
"session",
".",
"headers",
"[",
"\"Authorization\"",
"]",
"=",
"\"Bearer %s\"",
"%",
"auth",
"[",
"'access_token'",
"]",
"return",
"session"
]
| Creates a session object to be used for `push`, `install`, etc. | [
"Creates",
"a",
"session",
"object",
"to",
"be",
"used",
"for",
"push",
"install",
"etc",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L250-L269 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | _get_session | def _get_session(team, timeout=None):
"""
Creates a session or returns an existing session.
"""
global _sessions # pylint:disable=C0103
session = _sessions.get(team)
if session is None:
auth = _create_auth(team, timeout)
_sessions[team] = session = _create_session(team, auth)
assert session is not None
return session | python | def _get_session(team, timeout=None):
"""
Creates a session or returns an existing session.
"""
global _sessions # pylint:disable=C0103
session = _sessions.get(team)
if session is None:
auth = _create_auth(team, timeout)
_sessions[team] = session = _create_session(team, auth)
assert session is not None
return session | [
"def",
"_get_session",
"(",
"team",
",",
"timeout",
"=",
"None",
")",
":",
"global",
"_sessions",
"# pylint:disable=C0103",
"session",
"=",
"_sessions",
".",
"get",
"(",
"team",
")",
"if",
"session",
"is",
"None",
":",
"auth",
"=",
"_create_auth",
"(",
"team",
",",
"timeout",
")",
"_sessions",
"[",
"team",
"]",
"=",
"session",
"=",
"_create_session",
"(",
"team",
",",
"auth",
")",
"assert",
"session",
"is",
"not",
"None",
"return",
"session"
]
| Creates a session or returns an existing session. | [
"Creates",
"a",
"session",
"or",
"returns",
"an",
"existing",
"session",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L273-L285 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | _check_team_login | def _check_team_login(team):
"""
Disallow simultaneous public cloud and team logins.
"""
contents = _load_auth()
for auth in itervalues(contents):
existing_team = auth.get('team')
if team and team != existing_team:
raise CommandException(
"Can't log in as team %r; log out first." % team
)
elif not team and existing_team:
raise CommandException(
"Can't log in as a public user; log out from team %r first." % existing_team
) | python | def _check_team_login(team):
"""
Disallow simultaneous public cloud and team logins.
"""
contents = _load_auth()
for auth in itervalues(contents):
existing_team = auth.get('team')
if team and team != existing_team:
raise CommandException(
"Can't log in as team %r; log out first." % team
)
elif not team and existing_team:
raise CommandException(
"Can't log in as a public user; log out from team %r first." % existing_team
) | [
"def",
"_check_team_login",
"(",
"team",
")",
":",
"contents",
"=",
"_load_auth",
"(",
")",
"for",
"auth",
"in",
"itervalues",
"(",
"contents",
")",
":",
"existing_team",
"=",
"auth",
".",
"get",
"(",
"'team'",
")",
"if",
"team",
"and",
"team",
"!=",
"existing_team",
":",
"raise",
"CommandException",
"(",
"\"Can't log in as team %r; log out first.\"",
"%",
"team",
")",
"elif",
"not",
"team",
"and",
"existing_team",
":",
"raise",
"CommandException",
"(",
"\"Can't log in as a public user; log out from team %r first.\"",
"%",
"existing_team",
")"
]
| Disallow simultaneous public cloud and team logins. | [
"Disallow",
"simultaneous",
"public",
"cloud",
"and",
"team",
"logins",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L351-L366 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | _check_team_exists | def _check_team_exists(team):
"""
Check that the team registry actually exists.
"""
if team is None:
return
hostname = urlparse(get_registry_url(team)).hostname
try:
socket.gethostbyname(hostname)
except IOError:
try:
# Do we have internet?
socket.gethostbyname('quiltdata.com')
except IOError:
message = "Can't find quiltdata.com. Check your internet connection."
else:
message = "Unable to connect to registry. Is the team name %r correct?" % team
raise CommandException(message) | python | def _check_team_exists(team):
"""
Check that the team registry actually exists.
"""
if team is None:
return
hostname = urlparse(get_registry_url(team)).hostname
try:
socket.gethostbyname(hostname)
except IOError:
try:
# Do we have internet?
socket.gethostbyname('quiltdata.com')
except IOError:
message = "Can't find quiltdata.com. Check your internet connection."
else:
message = "Unable to connect to registry. Is the team name %r correct?" % team
raise CommandException(message) | [
"def",
"_check_team_exists",
"(",
"team",
")",
":",
"if",
"team",
"is",
"None",
":",
"return",
"hostname",
"=",
"urlparse",
"(",
"get_registry_url",
"(",
"team",
")",
")",
".",
"hostname",
"try",
":",
"socket",
".",
"gethostbyname",
"(",
"hostname",
")",
"except",
"IOError",
":",
"try",
":",
"# Do we have internet?",
"socket",
".",
"gethostbyname",
"(",
"'quiltdata.com'",
")",
"except",
"IOError",
":",
"message",
"=",
"\"Can't find quiltdata.com. Check your internet connection.\"",
"else",
":",
"message",
"=",
"\"Unable to connect to registry. Is the team name %r correct?\"",
"%",
"team",
"raise",
"CommandException",
"(",
"message",
")"
]
| Check that the team registry actually exists. | [
"Check",
"that",
"the",
"team",
"registry",
"actually",
"exists",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L375-L393 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | login_with_token | def login_with_token(refresh_token, team=None):
"""
Authenticate using an existing token.
"""
# Get an access token and a new refresh token.
_check_team_id(team)
auth = _update_auth(team, refresh_token)
url = get_registry_url(team)
contents = _load_auth()
contents[url] = auth
_save_auth(contents)
_clear_session(team) | python | def login_with_token(refresh_token, team=None):
"""
Authenticate using an existing token.
"""
# Get an access token and a new refresh token.
_check_team_id(team)
auth = _update_auth(team, refresh_token)
url = get_registry_url(team)
contents = _load_auth()
contents[url] = auth
_save_auth(contents)
_clear_session(team) | [
"def",
"login_with_token",
"(",
"refresh_token",
",",
"team",
"=",
"None",
")",
":",
"# Get an access token and a new refresh token.",
"_check_team_id",
"(",
"team",
")",
"auth",
"=",
"_update_auth",
"(",
"team",
",",
"refresh_token",
")",
"url",
"=",
"get_registry_url",
"(",
"team",
")",
"contents",
"=",
"_load_auth",
"(",
")",
"contents",
"[",
"url",
"]",
"=",
"auth",
"_save_auth",
"(",
"contents",
")",
"_clear_session",
"(",
"team",
")"
]
| Authenticate using an existing token. | [
"Authenticate",
"using",
"an",
"existing",
"token",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L417-L430 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | generate | def generate(directory, outfilename=DEFAULT_BUILDFILE):
"""
Generate a build-file for quilt build from a directory of
source files.
"""
try:
buildfilepath = generate_build_file(directory, outfilename=outfilename)
except BuildException as builderror:
raise CommandException(str(builderror))
print("Generated build-file %s." % (buildfilepath)) | python | def generate(directory, outfilename=DEFAULT_BUILDFILE):
"""
Generate a build-file for quilt build from a directory of
source files.
"""
try:
buildfilepath = generate_build_file(directory, outfilename=outfilename)
except BuildException as builderror:
raise CommandException(str(builderror))
print("Generated build-file %s." % (buildfilepath)) | [
"def",
"generate",
"(",
"directory",
",",
"outfilename",
"=",
"DEFAULT_BUILDFILE",
")",
":",
"try",
":",
"buildfilepath",
"=",
"generate_build_file",
"(",
"directory",
",",
"outfilename",
"=",
"outfilename",
")",
"except",
"BuildException",
"as",
"builderror",
":",
"raise",
"CommandException",
"(",
"str",
"(",
"builderror",
")",
")",
"print",
"(",
"\"Generated build-file %s.\"",
"%",
"(",
"buildfilepath",
")",
")"
]
| Generate a build-file for quilt build from a directory of
source files. | [
"Generate",
"a",
"build",
"-",
"file",
"for",
"quilt",
"build",
"from",
"a",
"directory",
"of",
"source",
"files",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L445-L455 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | build | def build(package, path=None, dry_run=False, env='default', force=False, build_file=False):
"""
Compile a Quilt data package, either from a build file or an existing package node.
:param package: short package specifier, i.e. 'team:user/pkg'
:param path: file path, git url, or existing package node
"""
# TODO: rename 'path' param to 'target'?
team, _, _, subpath = parse_package(package, allow_subpath=True)
_check_team_id(team)
logged_in_team = _find_logged_in_team()
if logged_in_team is not None and team is None and force is False:
answer = input("You're logged in as a team member, but you aren't specifying "
"a team for the package you're currently building. Maybe you meant:\n"
"quilt build {team}:{package}\n"
"Are you sure you want to continue? (y/N) ".format(
team=logged_in_team, package=package))
if answer.lower() != 'y':
return
# Backward compatibility: if there's no subpath, we're building a top-level package,
# so treat `path` as a build file, not as a data node.
if not subpath:
build_file = True
package_hash = hashlib.md5(package.encode('utf-8')).hexdigest()
try:
_build_internal(package, path, dry_run, env, build_file)
except Exception as ex:
_log(team, type='build', package=package_hash, dry_run=dry_run, env=env, error=str(ex))
raise
_log(team, type='build', package=package_hash, dry_run=dry_run, env=env) | python | def build(package, path=None, dry_run=False, env='default', force=False, build_file=False):
"""
Compile a Quilt data package, either from a build file or an existing package node.
:param package: short package specifier, i.e. 'team:user/pkg'
:param path: file path, git url, or existing package node
"""
# TODO: rename 'path' param to 'target'?
team, _, _, subpath = parse_package(package, allow_subpath=True)
_check_team_id(team)
logged_in_team = _find_logged_in_team()
if logged_in_team is not None and team is None and force is False:
answer = input("You're logged in as a team member, but you aren't specifying "
"a team for the package you're currently building. Maybe you meant:\n"
"quilt build {team}:{package}\n"
"Are you sure you want to continue? (y/N) ".format(
team=logged_in_team, package=package))
if answer.lower() != 'y':
return
# Backward compatibility: if there's no subpath, we're building a top-level package,
# so treat `path` as a build file, not as a data node.
if not subpath:
build_file = True
package_hash = hashlib.md5(package.encode('utf-8')).hexdigest()
try:
_build_internal(package, path, dry_run, env, build_file)
except Exception as ex:
_log(team, type='build', package=package_hash, dry_run=dry_run, env=env, error=str(ex))
raise
_log(team, type='build', package=package_hash, dry_run=dry_run, env=env) | [
"def",
"build",
"(",
"package",
",",
"path",
"=",
"None",
",",
"dry_run",
"=",
"False",
",",
"env",
"=",
"'default'",
",",
"force",
"=",
"False",
",",
"build_file",
"=",
"False",
")",
":",
"# TODO: rename 'path' param to 'target'?",
"team",
",",
"_",
",",
"_",
",",
"subpath",
"=",
"parse_package",
"(",
"package",
",",
"allow_subpath",
"=",
"True",
")",
"_check_team_id",
"(",
"team",
")",
"logged_in_team",
"=",
"_find_logged_in_team",
"(",
")",
"if",
"logged_in_team",
"is",
"not",
"None",
"and",
"team",
"is",
"None",
"and",
"force",
"is",
"False",
":",
"answer",
"=",
"input",
"(",
"\"You're logged in as a team member, but you aren't specifying \"",
"\"a team for the package you're currently building. Maybe you meant:\\n\"",
"\"quilt build {team}:{package}\\n\"",
"\"Are you sure you want to continue? (y/N) \"",
".",
"format",
"(",
"team",
"=",
"logged_in_team",
",",
"package",
"=",
"package",
")",
")",
"if",
"answer",
".",
"lower",
"(",
")",
"!=",
"'y'",
":",
"return",
"# Backward compatibility: if there's no subpath, we're building a top-level package,",
"# so treat `path` as a build file, not as a data node.",
"if",
"not",
"subpath",
":",
"build_file",
"=",
"True",
"package_hash",
"=",
"hashlib",
".",
"md5",
"(",
"package",
".",
"encode",
"(",
"'utf-8'",
")",
")",
".",
"hexdigest",
"(",
")",
"try",
":",
"_build_internal",
"(",
"package",
",",
"path",
",",
"dry_run",
",",
"env",
",",
"build_file",
")",
"except",
"Exception",
"as",
"ex",
":",
"_log",
"(",
"team",
",",
"type",
"=",
"'build'",
",",
"package",
"=",
"package_hash",
",",
"dry_run",
"=",
"dry_run",
",",
"env",
"=",
"env",
",",
"error",
"=",
"str",
"(",
"ex",
")",
")",
"raise",
"_log",
"(",
"team",
",",
"type",
"=",
"'build'",
",",
"package",
"=",
"package_hash",
",",
"dry_run",
"=",
"dry_run",
",",
"env",
"=",
"env",
")"
]
| Compile a Quilt data package, either from a build file or an existing package node.
:param package: short package specifier, i.e. 'team:user/pkg'
:param path: file path, git url, or existing package node | [
"Compile",
"a",
"Quilt",
"data",
"package",
"either",
"from",
"a",
"build",
"file",
"or",
"an",
"existing",
"package",
"node",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L502-L533 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | build_from_node | def build_from_node(package, node):
"""
Compile a Quilt data package from an existing package node.
"""
team, owner, pkg, subpath = parse_package(package, allow_subpath=True)
_check_team_id(team)
store = PackageStore()
pkg_root = get_or_create_package(store, team, owner, pkg, subpath)
if not subpath and not isinstance(node, nodes.GroupNode):
raise CommandException("Top-level node must be a group")
def _process_node(node, path):
if not isinstance(node._meta, dict):
raise CommandException(
"Error in %s: value must be a dictionary" % '.'.join(path + ['_meta'])
)
meta = dict(node._meta)
system_meta = meta.pop(SYSTEM_METADATA, {})
if not isinstance(system_meta, dict):
raise CommandException(
"Error in %s: %s overwritten. %s is a reserved metadata key. Try a different key." %
('.'.join(path + ['_meta']), SYSTEM_METADATA, SYSTEM_METADATA)
)
if isinstance(node, nodes.GroupNode):
store.add_to_package_group(pkg_root, path, meta)
for key, child in node._items():
_process_node(child, path + [key])
elif isinstance(node, nodes.DataNode):
# TODO: Reuse existing fragments if we have them.
data = node._data()
filepath = system_meta.get('filepath')
transform = system_meta.get('transform')
if isinstance(data, pd.DataFrame):
store.add_to_package_df(pkg_root, data, path, TargetType.PANDAS, filepath, transform, meta)
elif isinstance(data, np.ndarray):
store.add_to_package_numpy(pkg_root, data, path, TargetType.NUMPY, filepath, transform, meta)
elif isinstance(data, string_types):
store.add_to_package_file(pkg_root, data, path, TargetType.FILE, filepath, transform, meta)
else:
assert False, "Unexpected data type: %r" % data
else:
assert False, "Unexpected node type: %r" % node
try:
_process_node(node, subpath)
except StoreException as ex:
raise CommandException("Failed to build the package: %s" % ex)
store.save_package_contents(pkg_root, team, owner, pkg) | python | def build_from_node(package, node):
"""
Compile a Quilt data package from an existing package node.
"""
team, owner, pkg, subpath = parse_package(package, allow_subpath=True)
_check_team_id(team)
store = PackageStore()
pkg_root = get_or_create_package(store, team, owner, pkg, subpath)
if not subpath and not isinstance(node, nodes.GroupNode):
raise CommandException("Top-level node must be a group")
def _process_node(node, path):
if not isinstance(node._meta, dict):
raise CommandException(
"Error in %s: value must be a dictionary" % '.'.join(path + ['_meta'])
)
meta = dict(node._meta)
system_meta = meta.pop(SYSTEM_METADATA, {})
if not isinstance(system_meta, dict):
raise CommandException(
"Error in %s: %s overwritten. %s is a reserved metadata key. Try a different key." %
('.'.join(path + ['_meta']), SYSTEM_METADATA, SYSTEM_METADATA)
)
if isinstance(node, nodes.GroupNode):
store.add_to_package_group(pkg_root, path, meta)
for key, child in node._items():
_process_node(child, path + [key])
elif isinstance(node, nodes.DataNode):
# TODO: Reuse existing fragments if we have them.
data = node._data()
filepath = system_meta.get('filepath')
transform = system_meta.get('transform')
if isinstance(data, pd.DataFrame):
store.add_to_package_df(pkg_root, data, path, TargetType.PANDAS, filepath, transform, meta)
elif isinstance(data, np.ndarray):
store.add_to_package_numpy(pkg_root, data, path, TargetType.NUMPY, filepath, transform, meta)
elif isinstance(data, string_types):
store.add_to_package_file(pkg_root, data, path, TargetType.FILE, filepath, transform, meta)
else:
assert False, "Unexpected data type: %r" % data
else:
assert False, "Unexpected node type: %r" % node
try:
_process_node(node, subpath)
except StoreException as ex:
raise CommandException("Failed to build the package: %s" % ex)
store.save_package_contents(pkg_root, team, owner, pkg) | [
"def",
"build_from_node",
"(",
"package",
",",
"node",
")",
":",
"team",
",",
"owner",
",",
"pkg",
",",
"subpath",
"=",
"parse_package",
"(",
"package",
",",
"allow_subpath",
"=",
"True",
")",
"_check_team_id",
"(",
"team",
")",
"store",
"=",
"PackageStore",
"(",
")",
"pkg_root",
"=",
"get_or_create_package",
"(",
"store",
",",
"team",
",",
"owner",
",",
"pkg",
",",
"subpath",
")",
"if",
"not",
"subpath",
"and",
"not",
"isinstance",
"(",
"node",
",",
"nodes",
".",
"GroupNode",
")",
":",
"raise",
"CommandException",
"(",
"\"Top-level node must be a group\"",
")",
"def",
"_process_node",
"(",
"node",
",",
"path",
")",
":",
"if",
"not",
"isinstance",
"(",
"node",
".",
"_meta",
",",
"dict",
")",
":",
"raise",
"CommandException",
"(",
"\"Error in %s: value must be a dictionary\"",
"%",
"'.'",
".",
"join",
"(",
"path",
"+",
"[",
"'_meta'",
"]",
")",
")",
"meta",
"=",
"dict",
"(",
"node",
".",
"_meta",
")",
"system_meta",
"=",
"meta",
".",
"pop",
"(",
"SYSTEM_METADATA",
",",
"{",
"}",
")",
"if",
"not",
"isinstance",
"(",
"system_meta",
",",
"dict",
")",
":",
"raise",
"CommandException",
"(",
"\"Error in %s: %s overwritten. %s is a reserved metadata key. Try a different key.\"",
"%",
"(",
"'.'",
".",
"join",
"(",
"path",
"+",
"[",
"'_meta'",
"]",
")",
",",
"SYSTEM_METADATA",
",",
"SYSTEM_METADATA",
")",
")",
"if",
"isinstance",
"(",
"node",
",",
"nodes",
".",
"GroupNode",
")",
":",
"store",
".",
"add_to_package_group",
"(",
"pkg_root",
",",
"path",
",",
"meta",
")",
"for",
"key",
",",
"child",
"in",
"node",
".",
"_items",
"(",
")",
":",
"_process_node",
"(",
"child",
",",
"path",
"+",
"[",
"key",
"]",
")",
"elif",
"isinstance",
"(",
"node",
",",
"nodes",
".",
"DataNode",
")",
":",
"# TODO: Reuse existing fragments if we have them.",
"data",
"=",
"node",
".",
"_data",
"(",
")",
"filepath",
"=",
"system_meta",
".",
"get",
"(",
"'filepath'",
")",
"transform",
"=",
"system_meta",
".",
"get",
"(",
"'transform'",
")",
"if",
"isinstance",
"(",
"data",
",",
"pd",
".",
"DataFrame",
")",
":",
"store",
".",
"add_to_package_df",
"(",
"pkg_root",
",",
"data",
",",
"path",
",",
"TargetType",
".",
"PANDAS",
",",
"filepath",
",",
"transform",
",",
"meta",
")",
"elif",
"isinstance",
"(",
"data",
",",
"np",
".",
"ndarray",
")",
":",
"store",
".",
"add_to_package_numpy",
"(",
"pkg_root",
",",
"data",
",",
"path",
",",
"TargetType",
".",
"NUMPY",
",",
"filepath",
",",
"transform",
",",
"meta",
")",
"elif",
"isinstance",
"(",
"data",
",",
"string_types",
")",
":",
"store",
".",
"add_to_package_file",
"(",
"pkg_root",
",",
"data",
",",
"path",
",",
"TargetType",
".",
"FILE",
",",
"filepath",
",",
"transform",
",",
"meta",
")",
"else",
":",
"assert",
"False",
",",
"\"Unexpected data type: %r\"",
"%",
"data",
"else",
":",
"assert",
"False",
",",
"\"Unexpected node type: %r\"",
"%",
"node",
"try",
":",
"_process_node",
"(",
"node",
",",
"subpath",
")",
"except",
"StoreException",
"as",
"ex",
":",
"raise",
"CommandException",
"(",
"\"Failed to build the package: %s\"",
"%",
"ex",
")",
"store",
".",
"save_package_contents",
"(",
"pkg_root",
",",
"team",
",",
"owner",
",",
"pkg",
")"
]
| Compile a Quilt data package from an existing package node. | [
"Compile",
"a",
"Quilt",
"data",
"package",
"from",
"an",
"existing",
"package",
"node",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L568-L618 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | build_from_path | def build_from_path(package, path, dry_run=False, env='default', outfilename=DEFAULT_BUILDFILE):
"""
Compile a Quilt data package from a build file.
Path can be a directory, in which case the build file will be generated automatically.
"""
team, owner, pkg, subpath = parse_package(package, allow_subpath=True)
if not os.path.exists(path):
raise CommandException("%s does not exist." % path)
try:
if os.path.isdir(path):
buildpath = os.path.join(path, outfilename)
if os.path.exists(buildpath):
raise CommandException(
"Build file already exists. Run `quilt build %r` instead." % buildpath
)
contents = generate_contents(path, outfilename)
build_package_from_contents(team, owner, pkg, subpath, path, contents, dry_run=dry_run, env=env)
else:
build_package(team, owner, pkg, subpath, path, dry_run=dry_run, env=env)
if not dry_run:
print("Built %s successfully." % package)
except BuildException as ex:
raise CommandException("Failed to build the package: %s" % ex) | python | def build_from_path(package, path, dry_run=False, env='default', outfilename=DEFAULT_BUILDFILE):
"""
Compile a Quilt data package from a build file.
Path can be a directory, in which case the build file will be generated automatically.
"""
team, owner, pkg, subpath = parse_package(package, allow_subpath=True)
if not os.path.exists(path):
raise CommandException("%s does not exist." % path)
try:
if os.path.isdir(path):
buildpath = os.path.join(path, outfilename)
if os.path.exists(buildpath):
raise CommandException(
"Build file already exists. Run `quilt build %r` instead." % buildpath
)
contents = generate_contents(path, outfilename)
build_package_from_contents(team, owner, pkg, subpath, path, contents, dry_run=dry_run, env=env)
else:
build_package(team, owner, pkg, subpath, path, dry_run=dry_run, env=env)
if not dry_run:
print("Built %s successfully." % package)
except BuildException as ex:
raise CommandException("Failed to build the package: %s" % ex) | [
"def",
"build_from_path",
"(",
"package",
",",
"path",
",",
"dry_run",
"=",
"False",
",",
"env",
"=",
"'default'",
",",
"outfilename",
"=",
"DEFAULT_BUILDFILE",
")",
":",
"team",
",",
"owner",
",",
"pkg",
",",
"subpath",
"=",
"parse_package",
"(",
"package",
",",
"allow_subpath",
"=",
"True",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"raise",
"CommandException",
"(",
"\"%s does not exist.\"",
"%",
"path",
")",
"try",
":",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"buildpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"outfilename",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"buildpath",
")",
":",
"raise",
"CommandException",
"(",
"\"Build file already exists. Run `quilt build %r` instead.\"",
"%",
"buildpath",
")",
"contents",
"=",
"generate_contents",
"(",
"path",
",",
"outfilename",
")",
"build_package_from_contents",
"(",
"team",
",",
"owner",
",",
"pkg",
",",
"subpath",
",",
"path",
",",
"contents",
",",
"dry_run",
"=",
"dry_run",
",",
"env",
"=",
"env",
")",
"else",
":",
"build_package",
"(",
"team",
",",
"owner",
",",
"pkg",
",",
"subpath",
",",
"path",
",",
"dry_run",
"=",
"dry_run",
",",
"env",
"=",
"env",
")",
"if",
"not",
"dry_run",
":",
"print",
"(",
"\"Built %s successfully.\"",
"%",
"package",
")",
"except",
"BuildException",
"as",
"ex",
":",
"raise",
"CommandException",
"(",
"\"Failed to build the package: %s\"",
"%",
"ex",
")"
]
| Compile a Quilt data package from a build file.
Path can be a directory, in which case the build file will be generated automatically. | [
"Compile",
"a",
"Quilt",
"data",
"package",
"from",
"a",
"build",
"file",
".",
"Path",
"can",
"be",
"a",
"directory",
"in",
"which",
"case",
"the",
"build",
"file",
"will",
"be",
"generated",
"automatically",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L620-L646 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | log | def log(package):
"""
List all of the changes to a package on the server.
"""
team, owner, pkg = parse_package(package)
session = _get_session(team)
response = session.get(
"{url}/api/log/{owner}/{pkg}/".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg
)
)
table = [("Hash", "Pushed", "Author", "Tags", "Versions")]
for entry in reversed(response.json()['logs']):
ugly = datetime.fromtimestamp(entry['created'])
nice = ugly.strftime("%Y-%m-%d %H:%M:%S")
table.append((entry['hash'], nice, entry['author'],
str(entry.get('tags', [])), str(entry.get('versions', []))))
_print_table(table) | python | def log(package):
"""
List all of the changes to a package on the server.
"""
team, owner, pkg = parse_package(package)
session = _get_session(team)
response = session.get(
"{url}/api/log/{owner}/{pkg}/".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg
)
)
table = [("Hash", "Pushed", "Author", "Tags", "Versions")]
for entry in reversed(response.json()['logs']):
ugly = datetime.fromtimestamp(entry['created'])
nice = ugly.strftime("%Y-%m-%d %H:%M:%S")
table.append((entry['hash'], nice, entry['author'],
str(entry.get('tags', [])), str(entry.get('versions', []))))
_print_table(table) | [
"def",
"log",
"(",
"package",
")",
":",
"team",
",",
"owner",
",",
"pkg",
"=",
"parse_package",
"(",
"package",
")",
"session",
"=",
"_get_session",
"(",
"team",
")",
"response",
"=",
"session",
".",
"get",
"(",
"\"{url}/api/log/{owner}/{pkg}/\"",
".",
"format",
"(",
"url",
"=",
"get_registry_url",
"(",
"team",
")",
",",
"owner",
"=",
"owner",
",",
"pkg",
"=",
"pkg",
")",
")",
"table",
"=",
"[",
"(",
"\"Hash\"",
",",
"\"Pushed\"",
",",
"\"Author\"",
",",
"\"Tags\"",
",",
"\"Versions\"",
")",
"]",
"for",
"entry",
"in",
"reversed",
"(",
"response",
".",
"json",
"(",
")",
"[",
"'logs'",
"]",
")",
":",
"ugly",
"=",
"datetime",
".",
"fromtimestamp",
"(",
"entry",
"[",
"'created'",
"]",
")",
"nice",
"=",
"ugly",
".",
"strftime",
"(",
"\"%Y-%m-%d %H:%M:%S\"",
")",
"table",
".",
"append",
"(",
"(",
"entry",
"[",
"'hash'",
"]",
",",
"nice",
",",
"entry",
"[",
"'author'",
"]",
",",
"str",
"(",
"entry",
".",
"get",
"(",
"'tags'",
",",
"[",
"]",
")",
")",
",",
"str",
"(",
"entry",
".",
"get",
"(",
"'versions'",
",",
"[",
"]",
")",
")",
")",
")",
"_print_table",
"(",
"table",
")"
]
| List all of the changes to a package on the server. | [
"List",
"all",
"of",
"the",
"changes",
"to",
"a",
"package",
"on",
"the",
"server",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L648-L669 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | push | def push(package, is_public=False, is_team=False, reupload=False, hash=None):
"""
Push a Quilt data package to the server
"""
team, owner, pkg, subpath = parse_package(package, allow_subpath=True)
_check_team_id(team)
session = _get_session(team)
store, pkgroot = PackageStore.find_package(team, owner, pkg, pkghash=hash)
if pkgroot is None:
raise CommandException("Package {package} not found.".format(package=package))
pkghash = hash_contents(pkgroot)
if hash is not None:
assert pkghash == hash
contents = pkgroot
for component in subpath:
try:
contents = contents.children[component]
except (AttributeError, KeyError):
raise CommandException("Invalid subpath: %r" % component)
def _push_package(dry_run=False, sizes=dict()):
data = json.dumps(dict(
dry_run=dry_run,
is_public=is_public,
is_team=is_team,
contents=contents,
description="", # TODO
sizes=sizes
), default=encode_node)
compressed_data = gzip_compress(data.encode('utf-8'))
if subpath:
return session.post(
"{url}/api/package_update/{owner}/{pkg}/{subpath}".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg,
subpath='/'.join(subpath)
),
data=compressed_data,
headers={
'Content-Encoding': 'gzip'
}
)
else:
return session.put(
"{url}/api/package/{owner}/{pkg}/{hash}".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg,
hash=pkghash
),
data=compressed_data,
headers={
'Content-Encoding': 'gzip'
}
)
print("Fetching upload URLs from the registry...")
resp = _push_package(dry_run=True)
obj_urls = resp.json()['upload_urls']
assert set(obj_urls) == set(find_object_hashes(contents))
obj_sizes = {
obj_hash: os.path.getsize(store.object_path(obj_hash)) for obj_hash in obj_urls
}
success = upload_fragments(store, obj_urls, obj_sizes, reupload=reupload)
if not success:
raise CommandException("Failed to upload fragments")
print("Uploading package metadata...")
resp = _push_package(sizes=obj_sizes)
package_url = resp.json()['package_url']
if not subpath:
# Update the latest tag.
print("Updating the 'latest' tag...")
session.put(
"{url}/api/tag/{owner}/{pkg}/{tag}".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg,
tag=LATEST_TAG
),
data=json.dumps(dict(
hash=pkghash
))
)
print("Push complete. %s is live:\n%s" % (package, package_url)) | python | def push(package, is_public=False, is_team=False, reupload=False, hash=None):
"""
Push a Quilt data package to the server
"""
team, owner, pkg, subpath = parse_package(package, allow_subpath=True)
_check_team_id(team)
session = _get_session(team)
store, pkgroot = PackageStore.find_package(team, owner, pkg, pkghash=hash)
if pkgroot is None:
raise CommandException("Package {package} not found.".format(package=package))
pkghash = hash_contents(pkgroot)
if hash is not None:
assert pkghash == hash
contents = pkgroot
for component in subpath:
try:
contents = contents.children[component]
except (AttributeError, KeyError):
raise CommandException("Invalid subpath: %r" % component)
def _push_package(dry_run=False, sizes=dict()):
data = json.dumps(dict(
dry_run=dry_run,
is_public=is_public,
is_team=is_team,
contents=contents,
description="", # TODO
sizes=sizes
), default=encode_node)
compressed_data = gzip_compress(data.encode('utf-8'))
if subpath:
return session.post(
"{url}/api/package_update/{owner}/{pkg}/{subpath}".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg,
subpath='/'.join(subpath)
),
data=compressed_data,
headers={
'Content-Encoding': 'gzip'
}
)
else:
return session.put(
"{url}/api/package/{owner}/{pkg}/{hash}".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg,
hash=pkghash
),
data=compressed_data,
headers={
'Content-Encoding': 'gzip'
}
)
print("Fetching upload URLs from the registry...")
resp = _push_package(dry_run=True)
obj_urls = resp.json()['upload_urls']
assert set(obj_urls) == set(find_object_hashes(contents))
obj_sizes = {
obj_hash: os.path.getsize(store.object_path(obj_hash)) for obj_hash in obj_urls
}
success = upload_fragments(store, obj_urls, obj_sizes, reupload=reupload)
if not success:
raise CommandException("Failed to upload fragments")
print("Uploading package metadata...")
resp = _push_package(sizes=obj_sizes)
package_url = resp.json()['package_url']
if not subpath:
# Update the latest tag.
print("Updating the 'latest' tag...")
session.put(
"{url}/api/tag/{owner}/{pkg}/{tag}".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg,
tag=LATEST_TAG
),
data=json.dumps(dict(
hash=pkghash
))
)
print("Push complete. %s is live:\n%s" % (package, package_url)) | [
"def",
"push",
"(",
"package",
",",
"is_public",
"=",
"False",
",",
"is_team",
"=",
"False",
",",
"reupload",
"=",
"False",
",",
"hash",
"=",
"None",
")",
":",
"team",
",",
"owner",
",",
"pkg",
",",
"subpath",
"=",
"parse_package",
"(",
"package",
",",
"allow_subpath",
"=",
"True",
")",
"_check_team_id",
"(",
"team",
")",
"session",
"=",
"_get_session",
"(",
"team",
")",
"store",
",",
"pkgroot",
"=",
"PackageStore",
".",
"find_package",
"(",
"team",
",",
"owner",
",",
"pkg",
",",
"pkghash",
"=",
"hash",
")",
"if",
"pkgroot",
"is",
"None",
":",
"raise",
"CommandException",
"(",
"\"Package {package} not found.\"",
".",
"format",
"(",
"package",
"=",
"package",
")",
")",
"pkghash",
"=",
"hash_contents",
"(",
"pkgroot",
")",
"if",
"hash",
"is",
"not",
"None",
":",
"assert",
"pkghash",
"==",
"hash",
"contents",
"=",
"pkgroot",
"for",
"component",
"in",
"subpath",
":",
"try",
":",
"contents",
"=",
"contents",
".",
"children",
"[",
"component",
"]",
"except",
"(",
"AttributeError",
",",
"KeyError",
")",
":",
"raise",
"CommandException",
"(",
"\"Invalid subpath: %r\"",
"%",
"component",
")",
"def",
"_push_package",
"(",
"dry_run",
"=",
"False",
",",
"sizes",
"=",
"dict",
"(",
")",
")",
":",
"data",
"=",
"json",
".",
"dumps",
"(",
"dict",
"(",
"dry_run",
"=",
"dry_run",
",",
"is_public",
"=",
"is_public",
",",
"is_team",
"=",
"is_team",
",",
"contents",
"=",
"contents",
",",
"description",
"=",
"\"\"",
",",
"# TODO",
"sizes",
"=",
"sizes",
")",
",",
"default",
"=",
"encode_node",
")",
"compressed_data",
"=",
"gzip_compress",
"(",
"data",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"if",
"subpath",
":",
"return",
"session",
".",
"post",
"(",
"\"{url}/api/package_update/{owner}/{pkg}/{subpath}\"",
".",
"format",
"(",
"url",
"=",
"get_registry_url",
"(",
"team",
")",
",",
"owner",
"=",
"owner",
",",
"pkg",
"=",
"pkg",
",",
"subpath",
"=",
"'/'",
".",
"join",
"(",
"subpath",
")",
")",
",",
"data",
"=",
"compressed_data",
",",
"headers",
"=",
"{",
"'Content-Encoding'",
":",
"'gzip'",
"}",
")",
"else",
":",
"return",
"session",
".",
"put",
"(",
"\"{url}/api/package/{owner}/{pkg}/{hash}\"",
".",
"format",
"(",
"url",
"=",
"get_registry_url",
"(",
"team",
")",
",",
"owner",
"=",
"owner",
",",
"pkg",
"=",
"pkg",
",",
"hash",
"=",
"pkghash",
")",
",",
"data",
"=",
"compressed_data",
",",
"headers",
"=",
"{",
"'Content-Encoding'",
":",
"'gzip'",
"}",
")",
"print",
"(",
"\"Fetching upload URLs from the registry...\"",
")",
"resp",
"=",
"_push_package",
"(",
"dry_run",
"=",
"True",
")",
"obj_urls",
"=",
"resp",
".",
"json",
"(",
")",
"[",
"'upload_urls'",
"]",
"assert",
"set",
"(",
"obj_urls",
")",
"==",
"set",
"(",
"find_object_hashes",
"(",
"contents",
")",
")",
"obj_sizes",
"=",
"{",
"obj_hash",
":",
"os",
".",
"path",
".",
"getsize",
"(",
"store",
".",
"object_path",
"(",
"obj_hash",
")",
")",
"for",
"obj_hash",
"in",
"obj_urls",
"}",
"success",
"=",
"upload_fragments",
"(",
"store",
",",
"obj_urls",
",",
"obj_sizes",
",",
"reupload",
"=",
"reupload",
")",
"if",
"not",
"success",
":",
"raise",
"CommandException",
"(",
"\"Failed to upload fragments\"",
")",
"print",
"(",
"\"Uploading package metadata...\"",
")",
"resp",
"=",
"_push_package",
"(",
"sizes",
"=",
"obj_sizes",
")",
"package_url",
"=",
"resp",
".",
"json",
"(",
")",
"[",
"'package_url'",
"]",
"if",
"not",
"subpath",
":",
"# Update the latest tag.",
"print",
"(",
"\"Updating the 'latest' tag...\"",
")",
"session",
".",
"put",
"(",
"\"{url}/api/tag/{owner}/{pkg}/{tag}\"",
".",
"format",
"(",
"url",
"=",
"get_registry_url",
"(",
"team",
")",
",",
"owner",
"=",
"owner",
",",
"pkg",
"=",
"pkg",
",",
"tag",
"=",
"LATEST_TAG",
")",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"dict",
"(",
"hash",
"=",
"pkghash",
")",
")",
")",
"print",
"(",
"\"Push complete. %s is live:\\n%s\"",
"%",
"(",
"package",
",",
"package_url",
")",
")"
]
| Push a Quilt data package to the server | [
"Push",
"a",
"Quilt",
"data",
"package",
"to",
"the",
"server"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L671-L766 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | version_list | def version_list(package):
"""
List the versions of a package.
"""
team, owner, pkg = parse_package(package)
session = _get_session(team)
response = session.get(
"{url}/api/version/{owner}/{pkg}/".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg
)
)
for version in response.json()['versions']:
print("%s: %s" % (version['version'], version['hash'])) | python | def version_list(package):
"""
List the versions of a package.
"""
team, owner, pkg = parse_package(package)
session = _get_session(team)
response = session.get(
"{url}/api/version/{owner}/{pkg}/".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg
)
)
for version in response.json()['versions']:
print("%s: %s" % (version['version'], version['hash'])) | [
"def",
"version_list",
"(",
"package",
")",
":",
"team",
",",
"owner",
",",
"pkg",
"=",
"parse_package",
"(",
"package",
")",
"session",
"=",
"_get_session",
"(",
"team",
")",
"response",
"=",
"session",
".",
"get",
"(",
"\"{url}/api/version/{owner}/{pkg}/\"",
".",
"format",
"(",
"url",
"=",
"get_registry_url",
"(",
"team",
")",
",",
"owner",
"=",
"owner",
",",
"pkg",
"=",
"pkg",
")",
")",
"for",
"version",
"in",
"response",
".",
"json",
"(",
")",
"[",
"'versions'",
"]",
":",
"print",
"(",
"\"%s: %s\"",
"%",
"(",
"version",
"[",
"'version'",
"]",
",",
"version",
"[",
"'hash'",
"]",
")",
")"
]
| List the versions of a package. | [
"List",
"the",
"versions",
"of",
"a",
"package",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L768-L784 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | version_add | def version_add(package, version, pkghash, force=False):
"""
Add a new version for a given package hash.
Version format needs to follow PEP 440.
Versions are permanent - once created, they cannot be modified or deleted.
"""
team, owner, pkg = parse_package(package)
session = _get_session(team)
try:
Version(version)
except ValueError:
url = "https://www.python.org/dev/peps/pep-0440/#examples-of-compliant-version-schemes"
raise CommandException(
"Invalid version format; see %s" % url
)
if not force:
answer = input("Versions cannot be modified or deleted; are you sure? (y/n) ")
if answer.lower() != 'y':
return
session.put(
"{url}/api/version/{owner}/{pkg}/{version}".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg,
version=version
),
data=json.dumps(dict(
hash=_match_hash(package, pkghash)
))
) | python | def version_add(package, version, pkghash, force=False):
"""
Add a new version for a given package hash.
Version format needs to follow PEP 440.
Versions are permanent - once created, they cannot be modified or deleted.
"""
team, owner, pkg = parse_package(package)
session = _get_session(team)
try:
Version(version)
except ValueError:
url = "https://www.python.org/dev/peps/pep-0440/#examples-of-compliant-version-schemes"
raise CommandException(
"Invalid version format; see %s" % url
)
if not force:
answer = input("Versions cannot be modified or deleted; are you sure? (y/n) ")
if answer.lower() != 'y':
return
session.put(
"{url}/api/version/{owner}/{pkg}/{version}".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg,
version=version
),
data=json.dumps(dict(
hash=_match_hash(package, pkghash)
))
) | [
"def",
"version_add",
"(",
"package",
",",
"version",
",",
"pkghash",
",",
"force",
"=",
"False",
")",
":",
"team",
",",
"owner",
",",
"pkg",
"=",
"parse_package",
"(",
"package",
")",
"session",
"=",
"_get_session",
"(",
"team",
")",
"try",
":",
"Version",
"(",
"version",
")",
"except",
"ValueError",
":",
"url",
"=",
"\"https://www.python.org/dev/peps/pep-0440/#examples-of-compliant-version-schemes\"",
"raise",
"CommandException",
"(",
"\"Invalid version format; see %s\"",
"%",
"url",
")",
"if",
"not",
"force",
":",
"answer",
"=",
"input",
"(",
"\"Versions cannot be modified or deleted; are you sure? (y/n) \"",
")",
"if",
"answer",
".",
"lower",
"(",
")",
"!=",
"'y'",
":",
"return",
"session",
".",
"put",
"(",
"\"{url}/api/version/{owner}/{pkg}/{version}\"",
".",
"format",
"(",
"url",
"=",
"get_registry_url",
"(",
"team",
")",
",",
"owner",
"=",
"owner",
",",
"pkg",
"=",
"pkg",
",",
"version",
"=",
"version",
")",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"dict",
"(",
"hash",
"=",
"_match_hash",
"(",
"package",
",",
"pkghash",
")",
")",
")",
")"
]
| Add a new version for a given package hash.
Version format needs to follow PEP 440.
Versions are permanent - once created, they cannot be modified or deleted. | [
"Add",
"a",
"new",
"version",
"for",
"a",
"given",
"package",
"hash",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L786-L819 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | tag_list | def tag_list(package):
"""
List the tags of a package.
"""
team, owner, pkg = parse_package(package)
session = _get_session(team)
response = session.get(
"{url}/api/tag/{owner}/{pkg}/".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg
)
)
for tag in response.json()['tags']:
print("%s: %s" % (tag['tag'], tag['hash'])) | python | def tag_list(package):
"""
List the tags of a package.
"""
team, owner, pkg = parse_package(package)
session = _get_session(team)
response = session.get(
"{url}/api/tag/{owner}/{pkg}/".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg
)
)
for tag in response.json()['tags']:
print("%s: %s" % (tag['tag'], tag['hash'])) | [
"def",
"tag_list",
"(",
"package",
")",
":",
"team",
",",
"owner",
",",
"pkg",
"=",
"parse_package",
"(",
"package",
")",
"session",
"=",
"_get_session",
"(",
"team",
")",
"response",
"=",
"session",
".",
"get",
"(",
"\"{url}/api/tag/{owner}/{pkg}/\"",
".",
"format",
"(",
"url",
"=",
"get_registry_url",
"(",
"team",
")",
",",
"owner",
"=",
"owner",
",",
"pkg",
"=",
"pkg",
")",
")",
"for",
"tag",
"in",
"response",
".",
"json",
"(",
")",
"[",
"'tags'",
"]",
":",
"print",
"(",
"\"%s: %s\"",
"%",
"(",
"tag",
"[",
"'tag'",
"]",
",",
"tag",
"[",
"'hash'",
"]",
")",
")"
]
| List the tags of a package. | [
"List",
"the",
"tags",
"of",
"a",
"package",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L821-L837 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | tag_add | def tag_add(package, tag, pkghash):
"""
Add a new tag for a given package hash.
Unlike versions, tags can have an arbitrary format, and can be modified
and deleted.
When a package is pushed, it gets the "latest" tag.
"""
team, owner, pkg = parse_package(package)
session = _get_session(team)
session.put(
"{url}/api/tag/{owner}/{pkg}/{tag}".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg,
tag=tag
),
data=json.dumps(dict(
hash=_match_hash(package, pkghash)
))
) | python | def tag_add(package, tag, pkghash):
"""
Add a new tag for a given package hash.
Unlike versions, tags can have an arbitrary format, and can be modified
and deleted.
When a package is pushed, it gets the "latest" tag.
"""
team, owner, pkg = parse_package(package)
session = _get_session(team)
session.put(
"{url}/api/tag/{owner}/{pkg}/{tag}".format(
url=get_registry_url(team),
owner=owner,
pkg=pkg,
tag=tag
),
data=json.dumps(dict(
hash=_match_hash(package, pkghash)
))
) | [
"def",
"tag_add",
"(",
"package",
",",
"tag",
",",
"pkghash",
")",
":",
"team",
",",
"owner",
",",
"pkg",
"=",
"parse_package",
"(",
"package",
")",
"session",
"=",
"_get_session",
"(",
"team",
")",
"session",
".",
"put",
"(",
"\"{url}/api/tag/{owner}/{pkg}/{tag}\"",
".",
"format",
"(",
"url",
"=",
"get_registry_url",
"(",
"team",
")",
",",
"owner",
"=",
"owner",
",",
"pkg",
"=",
"pkg",
",",
"tag",
"=",
"tag",
")",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"dict",
"(",
"hash",
"=",
"_match_hash",
"(",
"package",
",",
"pkghash",
")",
")",
")",
")"
]
| Add a new tag for a given package hash.
Unlike versions, tags can have an arbitrary format, and can be modified
and deleted.
When a package is pushed, it gets the "latest" tag. | [
"Add",
"a",
"new",
"tag",
"for",
"a",
"given",
"package",
"hash",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L839-L861 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | install_via_requirements | def install_via_requirements(requirements_str, force=False):
"""
Download multiple Quilt data packages via quilt.xml requirements file.
"""
if requirements_str[0] == '@':
path = requirements_str[1:]
if os.path.isfile(path):
yaml_data = load_yaml(path)
if 'packages' not in yaml_data.keys():
raise CommandException('Error in {filename}: missing "packages" node'.format(filename=path))
else:
raise CommandException("Requirements file not found: {filename}".format(filename=path))
else:
yaml_data = yaml.safe_load(requirements_str)
for pkginfo in yaml_data['packages']:
info = parse_package_extended(pkginfo)
install(info.full_name, info.hash, info.version, info.tag, force=force) | python | def install_via_requirements(requirements_str, force=False):
"""
Download multiple Quilt data packages via quilt.xml requirements file.
"""
if requirements_str[0] == '@':
path = requirements_str[1:]
if os.path.isfile(path):
yaml_data = load_yaml(path)
if 'packages' not in yaml_data.keys():
raise CommandException('Error in {filename}: missing "packages" node'.format(filename=path))
else:
raise CommandException("Requirements file not found: {filename}".format(filename=path))
else:
yaml_data = yaml.safe_load(requirements_str)
for pkginfo in yaml_data['packages']:
info = parse_package_extended(pkginfo)
install(info.full_name, info.hash, info.version, info.tag, force=force) | [
"def",
"install_via_requirements",
"(",
"requirements_str",
",",
"force",
"=",
"False",
")",
":",
"if",
"requirements_str",
"[",
"0",
"]",
"==",
"'@'",
":",
"path",
"=",
"requirements_str",
"[",
"1",
":",
"]",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"path",
")",
":",
"yaml_data",
"=",
"load_yaml",
"(",
"path",
")",
"if",
"'packages'",
"not",
"in",
"yaml_data",
".",
"keys",
"(",
")",
":",
"raise",
"CommandException",
"(",
"'Error in {filename}: missing \"packages\" node'",
".",
"format",
"(",
"filename",
"=",
"path",
")",
")",
"else",
":",
"raise",
"CommandException",
"(",
"\"Requirements file not found: {filename}\"",
".",
"format",
"(",
"filename",
"=",
"path",
")",
")",
"else",
":",
"yaml_data",
"=",
"yaml",
".",
"safe_load",
"(",
"requirements_str",
")",
"for",
"pkginfo",
"in",
"yaml_data",
"[",
"'packages'",
"]",
":",
"info",
"=",
"parse_package_extended",
"(",
"pkginfo",
")",
"install",
"(",
"info",
".",
"full_name",
",",
"info",
".",
"hash",
",",
"info",
".",
"version",
",",
"info",
".",
"tag",
",",
"force",
"=",
"force",
")"
]
| Download multiple Quilt data packages via quilt.xml requirements file. | [
"Download",
"multiple",
"Quilt",
"data",
"packages",
"via",
"quilt",
".",
"xml",
"requirements",
"file",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L879-L895 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | access_list | def access_list(package):
"""
Print list of users who can access a package.
"""
team, owner, pkg = parse_package(package)
session = _get_session(team)
lookup_url = "{url}/api/access/{owner}/{pkg}/".format(url=get_registry_url(team), owner=owner, pkg=pkg)
response = session.get(lookup_url)
data = response.json()
users = data['users']
print('\n'.join(users)) | python | def access_list(package):
"""
Print list of users who can access a package.
"""
team, owner, pkg = parse_package(package)
session = _get_session(team)
lookup_url = "{url}/api/access/{owner}/{pkg}/".format(url=get_registry_url(team), owner=owner, pkg=pkg)
response = session.get(lookup_url)
data = response.json()
users = data['users']
print('\n'.join(users)) | [
"def",
"access_list",
"(",
"package",
")",
":",
"team",
",",
"owner",
",",
"pkg",
"=",
"parse_package",
"(",
"package",
")",
"session",
"=",
"_get_session",
"(",
"team",
")",
"lookup_url",
"=",
"\"{url}/api/access/{owner}/{pkg}/\"",
".",
"format",
"(",
"url",
"=",
"get_registry_url",
"(",
"team",
")",
",",
"owner",
"=",
"owner",
",",
"pkg",
"=",
"pkg",
")",
"response",
"=",
"session",
".",
"get",
"(",
"lookup_url",
")",
"data",
"=",
"response",
".",
"json",
"(",
")",
"users",
"=",
"data",
"[",
"'users'",
"]",
"print",
"(",
"'\\n'",
".",
"join",
"(",
"users",
")",
")"
]
| Print list of users who can access a package. | [
"Print",
"list",
"of",
"users",
"who",
"can",
"access",
"a",
"package",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L1061-L1074 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | delete | def delete(package):
"""
Delete a package from the server.
Irreversibly deletes the package along with its history, tags, versions, etc.
"""
team, owner, pkg = parse_package(package)
answer = input(
"Are you sure you want to delete this package and its entire history? "
"Type '%s' to confirm: " % package
)
if answer != package:
print("Not deleting.")
return 1
session = _get_session(team)
session.delete("%s/api/package/%s/%s/" % (get_registry_url(team), owner, pkg))
print("Deleted.") | python | def delete(package):
"""
Delete a package from the server.
Irreversibly deletes the package along with its history, tags, versions, etc.
"""
team, owner, pkg = parse_package(package)
answer = input(
"Are you sure you want to delete this package and its entire history? "
"Type '%s' to confirm: " % package
)
if answer != package:
print("Not deleting.")
return 1
session = _get_session(team)
session.delete("%s/api/package/%s/%s/" % (get_registry_url(team), owner, pkg))
print("Deleted.") | [
"def",
"delete",
"(",
"package",
")",
":",
"team",
",",
"owner",
",",
"pkg",
"=",
"parse_package",
"(",
"package",
")",
"answer",
"=",
"input",
"(",
"\"Are you sure you want to delete this package and its entire history? \"",
"\"Type '%s' to confirm: \"",
"%",
"package",
")",
"if",
"answer",
"!=",
"package",
":",
"print",
"(",
"\"Not deleting.\"",
")",
"return",
"1",
"session",
"=",
"_get_session",
"(",
"team",
")",
"session",
".",
"delete",
"(",
"\"%s/api/package/%s/%s/\"",
"%",
"(",
"get_registry_url",
"(",
"team",
")",
",",
"owner",
",",
"pkg",
")",
")",
"print",
"(",
"\"Deleted.\"",
")"
]
| Delete a package from the server.
Irreversibly deletes the package along with its history, tags, versions, etc. | [
"Delete",
"a",
"package",
"from",
"the",
"server",
"."
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L1096-L1116 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | search | def search(query, team=None):
"""
Search for packages
"""
if team is None:
team = _find_logged_in_team()
if team is not None:
session = _get_session(team)
response = session.get("%s/api/search/" % get_registry_url(team), params=dict(q=query))
print("* Packages in team %s" % team)
packages = response.json()['packages']
for pkg in packages:
print(("%s:" % team) + ("%(owner)s/%(name)s" % pkg))
if len(packages) == 0:
print("(No results)")
print("* Packages in public cloud")
public_session = _get_session(None)
response = public_session.get("%s/api/search/" % get_registry_url(None), params=dict(q=query))
packages = response.json()['packages']
for pkg in packages:
print("%(owner)s/%(name)s" % pkg)
if len(packages) == 0:
print("(No results)") | python | def search(query, team=None):
"""
Search for packages
"""
if team is None:
team = _find_logged_in_team()
if team is not None:
session = _get_session(team)
response = session.get("%s/api/search/" % get_registry_url(team), params=dict(q=query))
print("* Packages in team %s" % team)
packages = response.json()['packages']
for pkg in packages:
print(("%s:" % team) + ("%(owner)s/%(name)s" % pkg))
if len(packages) == 0:
print("(No results)")
print("* Packages in public cloud")
public_session = _get_session(None)
response = public_session.get("%s/api/search/" % get_registry_url(None), params=dict(q=query))
packages = response.json()['packages']
for pkg in packages:
print("%(owner)s/%(name)s" % pkg)
if len(packages) == 0:
print("(No results)") | [
"def",
"search",
"(",
"query",
",",
"team",
"=",
"None",
")",
":",
"if",
"team",
"is",
"None",
":",
"team",
"=",
"_find_logged_in_team",
"(",
")",
"if",
"team",
"is",
"not",
"None",
":",
"session",
"=",
"_get_session",
"(",
"team",
")",
"response",
"=",
"session",
".",
"get",
"(",
"\"%s/api/search/\"",
"%",
"get_registry_url",
"(",
"team",
")",
",",
"params",
"=",
"dict",
"(",
"q",
"=",
"query",
")",
")",
"print",
"(",
"\"* Packages in team %s\"",
"%",
"team",
")",
"packages",
"=",
"response",
".",
"json",
"(",
")",
"[",
"'packages'",
"]",
"for",
"pkg",
"in",
"packages",
":",
"print",
"(",
"(",
"\"%s:\"",
"%",
"team",
")",
"+",
"(",
"\"%(owner)s/%(name)s\"",
"%",
"pkg",
")",
")",
"if",
"len",
"(",
"packages",
")",
"==",
"0",
":",
"print",
"(",
"\"(No results)\"",
")",
"print",
"(",
"\"* Packages in public cloud\"",
")",
"public_session",
"=",
"_get_session",
"(",
"None",
")",
"response",
"=",
"public_session",
".",
"get",
"(",
"\"%s/api/search/\"",
"%",
"get_registry_url",
"(",
"None",
")",
",",
"params",
"=",
"dict",
"(",
"q",
"=",
"query",
")",
")",
"packages",
"=",
"response",
".",
"json",
"(",
")",
"[",
"'packages'",
"]",
"for",
"pkg",
"in",
"packages",
":",
"print",
"(",
"\"%(owner)s/%(name)s\"",
"%",
"pkg",
")",
"if",
"len",
"(",
"packages",
")",
"==",
"0",
":",
"print",
"(",
"\"(No results)\"",
")"
]
| Search for packages | [
"Search",
"for",
"packages"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L1118-L1142 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | ls | def ls(): # pylint:disable=C0103
"""
List all installed Quilt data packages
"""
for pkg_dir in PackageStore.find_store_dirs():
print("%s" % pkg_dir)
packages = PackageStore(pkg_dir).ls_packages()
for package, tag, pkghash in sorted(packages):
print("{0:30} {1:20} {2}".format(package, tag, pkghash)) | python | def ls(): # pylint:disable=C0103
"""
List all installed Quilt data packages
"""
for pkg_dir in PackageStore.find_store_dirs():
print("%s" % pkg_dir)
packages = PackageStore(pkg_dir).ls_packages()
for package, tag, pkghash in sorted(packages):
print("{0:30} {1:20} {2}".format(package, tag, pkghash)) | [
"def",
"ls",
"(",
")",
":",
"# pylint:disable=C0103",
"for",
"pkg_dir",
"in",
"PackageStore",
".",
"find_store_dirs",
"(",
")",
":",
"print",
"(",
"\"%s\"",
"%",
"pkg_dir",
")",
"packages",
"=",
"PackageStore",
"(",
"pkg_dir",
")",
".",
"ls_packages",
"(",
")",
"for",
"package",
",",
"tag",
",",
"pkghash",
"in",
"sorted",
"(",
"packages",
")",
":",
"print",
"(",
"\"{0:30} {1:20} {2}\"",
".",
"format",
"(",
"package",
",",
"tag",
",",
"pkghash",
")",
")"
]
| List all installed Quilt data packages | [
"List",
"all",
"installed",
"Quilt",
"data",
"packages"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L1144-L1152 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | inspect | def inspect(package):
"""
Inspect package details
"""
team, owner, pkg = parse_package(package)
store, pkgroot = PackageStore.find_package(team, owner, pkg)
if pkgroot is None:
raise CommandException("Package {package} not found.".format(package=package))
def _print_children(children, prefix, path):
for idx, (name, child) in enumerate(children):
if idx == len(children) - 1:
new_prefix = u"ββ"
new_child_prefix = u" "
else:
new_prefix = u"ββ"
new_child_prefix = u"β "
_print_node(child, prefix + new_prefix, prefix + new_child_prefix, name, path)
def _print_node(node, prefix, child_prefix, name, path):
name_prefix = u"β "
if isinstance(node, GroupNode):
children = list(node.children.items())
if children:
name_prefix = u"β¬ "
print(prefix + name_prefix + name)
_print_children(children, child_prefix, path + name)
elif node.metadata['q_target'] == TargetType.PANDAS.value:
df = store.load_dataframe(node.hashes)
assert isinstance(df, pd.DataFrame)
types = ", ".join("%r: %s" % (name, dtype) for name, dtype in df.dtypes.items())
if len(types) > 64:
types = types[:63] + u"β¦"
info = "shape %s, types %s" % (df.shape, types)
print(prefix + name_prefix + name + ": " + info)
else:
print(prefix + name_prefix + name)
print(store.package_path(team, owner, pkg))
_print_children(children=pkgroot.children.items(), prefix='', path='') | python | def inspect(package):
"""
Inspect package details
"""
team, owner, pkg = parse_package(package)
store, pkgroot = PackageStore.find_package(team, owner, pkg)
if pkgroot is None:
raise CommandException("Package {package} not found.".format(package=package))
def _print_children(children, prefix, path):
for idx, (name, child) in enumerate(children):
if idx == len(children) - 1:
new_prefix = u"ββ"
new_child_prefix = u" "
else:
new_prefix = u"ββ"
new_child_prefix = u"β "
_print_node(child, prefix + new_prefix, prefix + new_child_prefix, name, path)
def _print_node(node, prefix, child_prefix, name, path):
name_prefix = u"β "
if isinstance(node, GroupNode):
children = list(node.children.items())
if children:
name_prefix = u"β¬ "
print(prefix + name_prefix + name)
_print_children(children, child_prefix, path + name)
elif node.metadata['q_target'] == TargetType.PANDAS.value:
df = store.load_dataframe(node.hashes)
assert isinstance(df, pd.DataFrame)
types = ", ".join("%r: %s" % (name, dtype) for name, dtype in df.dtypes.items())
if len(types) > 64:
types = types[:63] + u"β¦"
info = "shape %s, types %s" % (df.shape, types)
print(prefix + name_prefix + name + ": " + info)
else:
print(prefix + name_prefix + name)
print(store.package_path(team, owner, pkg))
_print_children(children=pkgroot.children.items(), prefix='', path='') | [
"def",
"inspect",
"(",
"package",
")",
":",
"team",
",",
"owner",
",",
"pkg",
"=",
"parse_package",
"(",
"package",
")",
"store",
",",
"pkgroot",
"=",
"PackageStore",
".",
"find_package",
"(",
"team",
",",
"owner",
",",
"pkg",
")",
"if",
"pkgroot",
"is",
"None",
":",
"raise",
"CommandException",
"(",
"\"Package {package} not found.\"",
".",
"format",
"(",
"package",
"=",
"package",
")",
")",
"def",
"_print_children",
"(",
"children",
",",
"prefix",
",",
"path",
")",
":",
"for",
"idx",
",",
"(",
"name",
",",
"child",
")",
"in",
"enumerate",
"(",
"children",
")",
":",
"if",
"idx",
"==",
"len",
"(",
"children",
")",
"-",
"1",
":",
"new_prefix",
"=",
"u\"ββ\"",
"new_child_prefix",
"=",
"u\" \"",
"else",
":",
"new_prefix",
"=",
"u\"ββ\"",
"new_child_prefix",
"=",
"u\"β \"",
"_print_node",
"(",
"child",
",",
"prefix",
"+",
"new_prefix",
",",
"prefix",
"+",
"new_child_prefix",
",",
"name",
",",
"path",
")",
"def",
"_print_node",
"(",
"node",
",",
"prefix",
",",
"child_prefix",
",",
"name",
",",
"path",
")",
":",
"name_prefix",
"=",
"u\"β \"",
"if",
"isinstance",
"(",
"node",
",",
"GroupNode",
")",
":",
"children",
"=",
"list",
"(",
"node",
".",
"children",
".",
"items",
"(",
")",
")",
"if",
"children",
":",
"name_prefix",
"=",
"u\"β¬ \"",
"print",
"(",
"prefix",
"+",
"name_prefix",
"+",
"name",
")",
"_print_children",
"(",
"children",
",",
"child_prefix",
",",
"path",
"+",
"name",
")",
"elif",
"node",
".",
"metadata",
"[",
"'q_target'",
"]",
"==",
"TargetType",
".",
"PANDAS",
".",
"value",
":",
"df",
"=",
"store",
".",
"load_dataframe",
"(",
"node",
".",
"hashes",
")",
"assert",
"isinstance",
"(",
"df",
",",
"pd",
".",
"DataFrame",
")",
"types",
"=",
"\", \"",
".",
"join",
"(",
"\"%r: %s\"",
"%",
"(",
"name",
",",
"dtype",
")",
"for",
"name",
",",
"dtype",
"in",
"df",
".",
"dtypes",
".",
"items",
"(",
")",
")",
"if",
"len",
"(",
"types",
")",
">",
"64",
":",
"types",
"=",
"types",
"[",
":",
"63",
"]",
"+",
"u\"β¦\"",
"info",
"=",
"\"shape %s, types %s\"",
"%",
"(",
"df",
".",
"shape",
",",
"types",
")",
"print",
"(",
"prefix",
"+",
"name_prefix",
"+",
"name",
"+",
"\": \"",
"+",
"info",
")",
"else",
":",
"print",
"(",
"prefix",
"+",
"name_prefix",
"+",
"name",
")",
"print",
"(",
"store",
".",
"package_path",
"(",
"team",
",",
"owner",
",",
"pkg",
")",
")",
"_print_children",
"(",
"children",
"=",
"pkgroot",
".",
"children",
".",
"items",
"(",
")",
",",
"prefix",
"=",
"''",
",",
"path",
"=",
"''",
")"
]
| Inspect package details | [
"Inspect",
"package",
"details"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L1154-L1194 | train |
quiltdata/quilt | compiler/quilt/tools/command.py | load | def load(pkginfo, hash=None):
"""
functional interface to "from quilt.data.USER import PKG"
"""
node, pkgroot, info = _load(pkginfo, hash)
for subnode_name in info.subpath:
node = node[subnode_name]
return node | python | def load(pkginfo, hash=None):
"""
functional interface to "from quilt.data.USER import PKG"
"""
node, pkgroot, info = _load(pkginfo, hash)
for subnode_name in info.subpath:
node = node[subnode_name]
return node | [
"def",
"load",
"(",
"pkginfo",
",",
"hash",
"=",
"None",
")",
":",
"node",
",",
"pkgroot",
",",
"info",
"=",
"_load",
"(",
"pkginfo",
",",
"hash",
")",
"for",
"subnode_name",
"in",
"info",
".",
"subpath",
":",
"node",
"=",
"node",
"[",
"subnode_name",
"]",
"return",
"node"
]
| functional interface to "from quilt.data.USER import PKG" | [
"functional",
"interface",
"to",
"from",
"quilt",
".",
"data",
".",
"USER",
"import",
"PKG"
]
| 651853e7e89a8af86e0ff26167e752efa5878c12 | https://github.com/quiltdata/quilt/blob/651853e7e89a8af86e0ff26167e752efa5878c12/compiler/quilt/tools/command.py#L1351-L1359 | train |
CalebBell/fluids | fluids/core.py | c_ideal_gas | def c_ideal_gas(T, k, MW):
r'''Calculates speed of sound `c` in an ideal gas at temperature T.
.. math::
c = \sqrt{kR_{specific}T}
Parameters
----------
T : float
Temperature of fluid, [K]
k : float
Isentropic exponent of fluid, [-]
MW : float
Molecular weight of fluid, [g/mol]
Returns
-------
c : float
Speed of sound in fluid, [m/s]
Notes
-----
Used in compressible flow calculations.
Note that the gas constant used is the specific gas constant:
.. math::
R_{specific} = R\frac{1000}{MW}
Examples
--------
>>> c_ideal_gas(T=303, k=1.4, MW=28.96)
348.9820953185441
References
----------
.. [1] Green, Don, and Robert Perry. Perry's Chemical Engineers' Handbook,
Eighth Edition. McGraw-Hill Professional, 2007.
.. [2] Cengel, Yunus, and John Cimbala. Fluid Mechanics: Fundamentals and
Applications. Boston: McGraw Hill Higher Education, 2006.
'''
Rspecific = R*1000./MW
return (k*Rspecific*T)**0.5 | python | def c_ideal_gas(T, k, MW):
r'''Calculates speed of sound `c` in an ideal gas at temperature T.
.. math::
c = \sqrt{kR_{specific}T}
Parameters
----------
T : float
Temperature of fluid, [K]
k : float
Isentropic exponent of fluid, [-]
MW : float
Molecular weight of fluid, [g/mol]
Returns
-------
c : float
Speed of sound in fluid, [m/s]
Notes
-----
Used in compressible flow calculations.
Note that the gas constant used is the specific gas constant:
.. math::
R_{specific} = R\frac{1000}{MW}
Examples
--------
>>> c_ideal_gas(T=303, k=1.4, MW=28.96)
348.9820953185441
References
----------
.. [1] Green, Don, and Robert Perry. Perry's Chemical Engineers' Handbook,
Eighth Edition. McGraw-Hill Professional, 2007.
.. [2] Cengel, Yunus, and John Cimbala. Fluid Mechanics: Fundamentals and
Applications. Boston: McGraw Hill Higher Education, 2006.
'''
Rspecific = R*1000./MW
return (k*Rspecific*T)**0.5 | [
"def",
"c_ideal_gas",
"(",
"T",
",",
"k",
",",
"MW",
")",
":",
"Rspecific",
"=",
"R",
"*",
"1000.",
"/",
"MW",
"return",
"(",
"k",
"*",
"Rspecific",
"*",
"T",
")",
"**",
"0.5"
]
| r'''Calculates speed of sound `c` in an ideal gas at temperature T.
.. math::
c = \sqrt{kR_{specific}T}
Parameters
----------
T : float
Temperature of fluid, [K]
k : float
Isentropic exponent of fluid, [-]
MW : float
Molecular weight of fluid, [g/mol]
Returns
-------
c : float
Speed of sound in fluid, [m/s]
Notes
-----
Used in compressible flow calculations.
Note that the gas constant used is the specific gas constant:
.. math::
R_{specific} = R\frac{1000}{MW}
Examples
--------
>>> c_ideal_gas(T=303, k=1.4, MW=28.96)
348.9820953185441
References
----------
.. [1] Green, Don, and Robert Perry. Perry's Chemical Engineers' Handbook,
Eighth Edition. McGraw-Hill Professional, 2007.
.. [2] Cengel, Yunus, and John Cimbala. Fluid Mechanics: Fundamentals and
Applications. Boston: McGraw Hill Higher Education, 2006. | [
"r",
"Calculates",
"speed",
"of",
"sound",
"c",
"in",
"an",
"ideal",
"gas",
"at",
"temperature",
"T",
"."
]
| 57f556752e039f1d3e5a822f408c184783db2828 | https://github.com/CalebBell/fluids/blob/57f556752e039f1d3e5a822f408c184783db2828/fluids/core.py#L82-L123 | train |
CalebBell/fluids | fluids/core.py | Reynolds | def Reynolds(V, D, rho=None, mu=None, nu=None):
r'''Calculates Reynolds number or `Re` for a fluid with the given
properties for the specified velocity and diameter.
.. math::
Re = \frac{D \cdot V}{\nu} = \frac{\rho V D}{\mu}
Inputs either of any of the following sets:
* V, D, density `rho` and kinematic viscosity `mu`
* V, D, and dynamic viscosity `nu`
Parameters
----------
V : float
Velocity [m/s]
D : float
Diameter [m]
rho : float, optional
Density, [kg/m^3]
mu : float, optional
Dynamic viscosity, [Pa*s]
nu : float, optional
Kinematic viscosity, [m^2/s]
Returns
-------
Re : float
Reynolds number []
Notes
-----
.. math::
Re = \frac{\text{Momentum}}{\text{Viscosity}}
An error is raised if none of the required input sets are provided.
Examples
--------
>>> Reynolds(2.5, 0.25, 1.1613, 1.9E-5)
38200.65789473684
>>> Reynolds(2.5, 0.25, nu=1.636e-05)
38202.93398533008
References
----------
.. [1] Green, Don, and Robert Perry. Perry's Chemical Engineers' Handbook,
Eighth Edition. McGraw-Hill Professional, 2007.
.. [2] Cengel, Yunus, and John Cimbala. Fluid Mechanics: Fundamentals and
Applications. Boston: McGraw Hill Higher Education, 2006.
'''
if rho and mu:
nu = mu/rho
elif not nu:
raise Exception('Either density and viscosity, or dynamic viscosity, \
is needed')
return V*D/nu | python | def Reynolds(V, D, rho=None, mu=None, nu=None):
r'''Calculates Reynolds number or `Re` for a fluid with the given
properties for the specified velocity and diameter.
.. math::
Re = \frac{D \cdot V}{\nu} = \frac{\rho V D}{\mu}
Inputs either of any of the following sets:
* V, D, density `rho` and kinematic viscosity `mu`
* V, D, and dynamic viscosity `nu`
Parameters
----------
V : float
Velocity [m/s]
D : float
Diameter [m]
rho : float, optional
Density, [kg/m^3]
mu : float, optional
Dynamic viscosity, [Pa*s]
nu : float, optional
Kinematic viscosity, [m^2/s]
Returns
-------
Re : float
Reynolds number []
Notes
-----
.. math::
Re = \frac{\text{Momentum}}{\text{Viscosity}}
An error is raised if none of the required input sets are provided.
Examples
--------
>>> Reynolds(2.5, 0.25, 1.1613, 1.9E-5)
38200.65789473684
>>> Reynolds(2.5, 0.25, nu=1.636e-05)
38202.93398533008
References
----------
.. [1] Green, Don, and Robert Perry. Perry's Chemical Engineers' Handbook,
Eighth Edition. McGraw-Hill Professional, 2007.
.. [2] Cengel, Yunus, and John Cimbala. Fluid Mechanics: Fundamentals and
Applications. Boston: McGraw Hill Higher Education, 2006.
'''
if rho and mu:
nu = mu/rho
elif not nu:
raise Exception('Either density and viscosity, or dynamic viscosity, \
is needed')
return V*D/nu | [
"def",
"Reynolds",
"(",
"V",
",",
"D",
",",
"rho",
"=",
"None",
",",
"mu",
"=",
"None",
",",
"nu",
"=",
"None",
")",
":",
"if",
"rho",
"and",
"mu",
":",
"nu",
"=",
"mu",
"/",
"rho",
"elif",
"not",
"nu",
":",
"raise",
"Exception",
"(",
"'Either density and viscosity, or dynamic viscosity, \\\n is needed'",
")",
"return",
"V",
"*",
"D",
"/",
"nu"
]
| r'''Calculates Reynolds number or `Re` for a fluid with the given
properties for the specified velocity and diameter.
.. math::
Re = \frac{D \cdot V}{\nu} = \frac{\rho V D}{\mu}
Inputs either of any of the following sets:
* V, D, density `rho` and kinematic viscosity `mu`
* V, D, and dynamic viscosity `nu`
Parameters
----------
V : float
Velocity [m/s]
D : float
Diameter [m]
rho : float, optional
Density, [kg/m^3]
mu : float, optional
Dynamic viscosity, [Pa*s]
nu : float, optional
Kinematic viscosity, [m^2/s]
Returns
-------
Re : float
Reynolds number []
Notes
-----
.. math::
Re = \frac{\text{Momentum}}{\text{Viscosity}}
An error is raised if none of the required input sets are provided.
Examples
--------
>>> Reynolds(2.5, 0.25, 1.1613, 1.9E-5)
38200.65789473684
>>> Reynolds(2.5, 0.25, nu=1.636e-05)
38202.93398533008
References
----------
.. [1] Green, Don, and Robert Perry. Perry's Chemical Engineers' Handbook,
Eighth Edition. McGraw-Hill Professional, 2007.
.. [2] Cengel, Yunus, and John Cimbala. Fluid Mechanics: Fundamentals and
Applications. Boston: McGraw Hill Higher Education, 2006. | [
"r",
"Calculates",
"Reynolds",
"number",
"or",
"Re",
"for",
"a",
"fluid",
"with",
"the",
"given",
"properties",
"for",
"the",
"specified",
"velocity",
"and",
"diameter",
"."
]
| 57f556752e039f1d3e5a822f408c184783db2828 | https://github.com/CalebBell/fluids/blob/57f556752e039f1d3e5a822f408c184783db2828/fluids/core.py#L128-L184 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.