repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|---|
thanethomson/statik | statik/autogen.py | generate_model_file | def generate_model_file(filename, project, model, fields):
"""Creates a webpage for a given instance of a model."""
for field in fields:
field.type = field.__class__.__name__
content = open(os.path.join(os.path.dirname(__file__), 'templates/model_page.html'), 'r').read()
engine = StatikTemplateEngine(project)
template = engine.create_template(content)
# create context and update from project.config
context = {'model': model,
'fields': fields}
context.update(dict(project.config.context_static))
string = template.render(context)
with open(filename, 'w') as file:
file.write(string) | python | def generate_model_file(filename, project, model, fields):
"""Creates a webpage for a given instance of a model."""
for field in fields:
field.type = field.__class__.__name__
content = open(os.path.join(os.path.dirname(__file__), 'templates/model_page.html'), 'r').read()
engine = StatikTemplateEngine(project)
template = engine.create_template(content)
# create context and update from project.config
context = {'model': model,
'fields': fields}
context.update(dict(project.config.context_static))
string = template.render(context)
with open(filename, 'w') as file:
file.write(string) | [
"def",
"generate_model_file",
"(",
"filename",
",",
"project",
",",
"model",
",",
"fields",
")",
":",
"for",
"field",
"in",
"fields",
":",
"field",
".",
"type",
"=",
"field",
".",
"__class__",
".",
"__name__",
"content",
"=",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"'templates/model_page.html'",
")",
",",
"'r'",
")",
".",
"read",
"(",
")",
"engine",
"=",
"StatikTemplateEngine",
"(",
"project",
")",
"template",
"=",
"engine",
".",
"create_template",
"(",
"content",
")",
"# create context and update from project.config",
"context",
"=",
"{",
"'model'",
":",
"model",
",",
"'fields'",
":",
"fields",
"}",
"context",
".",
"update",
"(",
"dict",
"(",
"project",
".",
"config",
".",
"context_static",
")",
")",
"string",
"=",
"template",
".",
"render",
"(",
"context",
")",
"with",
"open",
"(",
"filename",
",",
"'w'",
")",
"as",
"file",
":",
"file",
".",
"write",
"(",
"string",
")"
] | Creates a webpage for a given instance of a model. | [
"Creates",
"a",
"webpage",
"for",
"a",
"given",
"instance",
"of",
"a",
"model",
"."
] | 56b1b5a2cb05a97afa81f428bfcefc833e935b8d | https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/autogen.py#L67-L84 | train |
thanethomson/statik | statik/context.py | StatikContext.build_dynamic | def build_dynamic(self, db, extra=None, safe_mode=False):
"""Builds the dynamic context based on our current dynamic context entity and the given
database."""
result = dict()
for var, query in iteritems(self.dynamic):
result[var] = db.query(query, safe_mode=safe_mode, additional_locals=extra)
return result | python | def build_dynamic(self, db, extra=None, safe_mode=False):
"""Builds the dynamic context based on our current dynamic context entity and the given
database."""
result = dict()
for var, query in iteritems(self.dynamic):
result[var] = db.query(query, safe_mode=safe_mode, additional_locals=extra)
return result | [
"def",
"build_dynamic",
"(",
"self",
",",
"db",
",",
"extra",
"=",
"None",
",",
"safe_mode",
"=",
"False",
")",
":",
"result",
"=",
"dict",
"(",
")",
"for",
"var",
",",
"query",
"in",
"iteritems",
"(",
"self",
".",
"dynamic",
")",
":",
"result",
"[",
"var",
"]",
"=",
"db",
".",
"query",
"(",
"query",
",",
"safe_mode",
"=",
"safe_mode",
",",
"additional_locals",
"=",
"extra",
")",
"return",
"result"
] | Builds the dynamic context based on our current dynamic context entity and the given
database. | [
"Builds",
"the",
"dynamic",
"context",
"based",
"on",
"our",
"current",
"dynamic",
"context",
"entity",
"and",
"the",
"given",
"database",
"."
] | 56b1b5a2cb05a97afa81f428bfcefc833e935b8d | https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/context.py#L47-L53 | train |
thanethomson/statik | statik/context.py | StatikContext.build_for_each | def build_for_each(self, db, safe_mode=False, extra=None):
"""Builds the for-each context."""
result = dict()
for var, query in iteritems(self.for_each):
result[var] = db.query(
query,
additional_locals=extra,
safe_mode=safe_mode
)
return result | python | def build_for_each(self, db, safe_mode=False, extra=None):
"""Builds the for-each context."""
result = dict()
for var, query in iteritems(self.for_each):
result[var] = db.query(
query,
additional_locals=extra,
safe_mode=safe_mode
)
return result | [
"def",
"build_for_each",
"(",
"self",
",",
"db",
",",
"safe_mode",
"=",
"False",
",",
"extra",
"=",
"None",
")",
":",
"result",
"=",
"dict",
"(",
")",
"for",
"var",
",",
"query",
"in",
"iteritems",
"(",
"self",
".",
"for_each",
")",
":",
"result",
"[",
"var",
"]",
"=",
"db",
".",
"query",
"(",
"query",
",",
"additional_locals",
"=",
"extra",
",",
"safe_mode",
"=",
"safe_mode",
")",
"return",
"result"
] | Builds the for-each context. | [
"Builds",
"the",
"for",
"-",
"each",
"context",
"."
] | 56b1b5a2cb05a97afa81f428bfcefc833e935b8d | https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/context.py#L55-L64 | train |
thanethomson/statik | statik/context.py | StatikContext.build | def build(self, db=None, safe_mode=False, for_each_inst=None, extra=None):
"""Builds a dictionary that can be used as context for template rendering."""
result = copy(self.initial)
result.update(self.static)
if self.dynamic:
result.update(self.build_dynamic(db, extra=extra, safe_mode=safe_mode))
if self.for_each and for_each_inst:
result.update(self.build_for_each(db, safe_mode=safe_mode, extra=extra))
if isinstance(extra, dict):
result.update(extra)
return result | python | def build(self, db=None, safe_mode=False, for_each_inst=None, extra=None):
"""Builds a dictionary that can be used as context for template rendering."""
result = copy(self.initial)
result.update(self.static)
if self.dynamic:
result.update(self.build_dynamic(db, extra=extra, safe_mode=safe_mode))
if self.for_each and for_each_inst:
result.update(self.build_for_each(db, safe_mode=safe_mode, extra=extra))
if isinstance(extra, dict):
result.update(extra)
return result | [
"def",
"build",
"(",
"self",
",",
"db",
"=",
"None",
",",
"safe_mode",
"=",
"False",
",",
"for_each_inst",
"=",
"None",
",",
"extra",
"=",
"None",
")",
":",
"result",
"=",
"copy",
"(",
"self",
".",
"initial",
")",
"result",
".",
"update",
"(",
"self",
".",
"static",
")",
"if",
"self",
".",
"dynamic",
":",
"result",
".",
"update",
"(",
"self",
".",
"build_dynamic",
"(",
"db",
",",
"extra",
"=",
"extra",
",",
"safe_mode",
"=",
"safe_mode",
")",
")",
"if",
"self",
".",
"for_each",
"and",
"for_each_inst",
":",
"result",
".",
"update",
"(",
"self",
".",
"build_for_each",
"(",
"db",
",",
"safe_mode",
"=",
"safe_mode",
",",
"extra",
"=",
"extra",
")",
")",
"if",
"isinstance",
"(",
"extra",
",",
"dict",
")",
":",
"result",
".",
"update",
"(",
"extra",
")",
"return",
"result"
] | Builds a dictionary that can be used as context for template rendering. | [
"Builds",
"a",
"dictionary",
"that",
"can",
"be",
"used",
"as",
"context",
"for",
"template",
"rendering",
"."
] | 56b1b5a2cb05a97afa81f428bfcefc833e935b8d | https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/context.py#L66-L76 | train |
thanethomson/statik | statik/templating.py | template_exception_handler | def template_exception_handler(fn, error_context, filename=None):
"""Calls the given function, attempting to catch any template-related errors, and
converts the error to a Statik TemplateError instance. Returns the result returned
by the function itself."""
error_message = None
if filename:
error_context.update(filename=filename)
try:
return fn()
except jinja2.TemplateSyntaxError as exc:
error_context.update(filename=exc.filename, line_no=exc.lineno)
error_message = exc.message
except jinja2.TemplateError as exc:
error_message = exc.message
except Exception as exc:
error_message = "%s" % exc
raise TemplateError(message=error_message, context=error_context) | python | def template_exception_handler(fn, error_context, filename=None):
"""Calls the given function, attempting to catch any template-related errors, and
converts the error to a Statik TemplateError instance. Returns the result returned
by the function itself."""
error_message = None
if filename:
error_context.update(filename=filename)
try:
return fn()
except jinja2.TemplateSyntaxError as exc:
error_context.update(filename=exc.filename, line_no=exc.lineno)
error_message = exc.message
except jinja2.TemplateError as exc:
error_message = exc.message
except Exception as exc:
error_message = "%s" % exc
raise TemplateError(message=error_message, context=error_context) | [
"def",
"template_exception_handler",
"(",
"fn",
",",
"error_context",
",",
"filename",
"=",
"None",
")",
":",
"error_message",
"=",
"None",
"if",
"filename",
":",
"error_context",
".",
"update",
"(",
"filename",
"=",
"filename",
")",
"try",
":",
"return",
"fn",
"(",
")",
"except",
"jinja2",
".",
"TemplateSyntaxError",
"as",
"exc",
":",
"error_context",
".",
"update",
"(",
"filename",
"=",
"exc",
".",
"filename",
",",
"line_no",
"=",
"exc",
".",
"lineno",
")",
"error_message",
"=",
"exc",
".",
"message",
"except",
"jinja2",
".",
"TemplateError",
"as",
"exc",
":",
"error_message",
"=",
"exc",
".",
"message",
"except",
"Exception",
"as",
"exc",
":",
"error_message",
"=",
"\"%s\"",
"%",
"exc",
"raise",
"TemplateError",
"(",
"message",
"=",
"error_message",
",",
"context",
"=",
"error_context",
")"
] | Calls the given function, attempting to catch any template-related errors, and
converts the error to a Statik TemplateError instance. Returns the result returned
by the function itself. | [
"Calls",
"the",
"given",
"function",
"attempting",
"to",
"catch",
"any",
"template",
"-",
"related",
"errors",
"and",
"converts",
"the",
"error",
"to",
"a",
"Statik",
"TemplateError",
"instance",
".",
"Returns",
"the",
"result",
"returned",
"by",
"the",
"function",
"itself",
"."
] | 56b1b5a2cb05a97afa81f428bfcefc833e935b8d | https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/templating.py#L53-L70 | train |
thanethomson/statik | statik/templating.py | StatikTemplateEngine.create_template | def create_template(self, s, provider_name=None):
"""Creates a template from the given string based on the specified provider or the provider with
highest precedence.
Args:
s: The string to convert to a template.
provider_name: The name of the provider to use to create the template.
"""
if provider_name is None:
provider_name = self.supported_providers[0]
return template_exception_handler(
lambda: self.get_provider(provider_name).create_template(s),
self.error_context
) | python | def create_template(self, s, provider_name=None):
"""Creates a template from the given string based on the specified provider or the provider with
highest precedence.
Args:
s: The string to convert to a template.
provider_name: The name of the provider to use to create the template.
"""
if provider_name is None:
provider_name = self.supported_providers[0]
return template_exception_handler(
lambda: self.get_provider(provider_name).create_template(s),
self.error_context
) | [
"def",
"create_template",
"(",
"self",
",",
"s",
",",
"provider_name",
"=",
"None",
")",
":",
"if",
"provider_name",
"is",
"None",
":",
"provider_name",
"=",
"self",
".",
"supported_providers",
"[",
"0",
"]",
"return",
"template_exception_handler",
"(",
"lambda",
":",
"self",
".",
"get_provider",
"(",
"provider_name",
")",
".",
"create_template",
"(",
"s",
")",
",",
"self",
".",
"error_context",
")"
] | Creates a template from the given string based on the specified provider or the provider with
highest precedence.
Args:
s: The string to convert to a template.
provider_name: The name of the provider to use to create the template. | [
"Creates",
"a",
"template",
"from",
"the",
"given",
"string",
"based",
"on",
"the",
"specified",
"provider",
"or",
"the",
"provider",
"with",
"highest",
"precedence",
"."
] | 56b1b5a2cb05a97afa81f428bfcefc833e935b8d | https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/templating.py#L197-L210 | train |
thanethomson/statik | statik/fields.py | construct_field | def construct_field(model_name, field_name, field_type, all_models, **kwargs):
"""Helper function to build a field from the given field name and
type.
Args:
model_name: The name of the model for which we're building this field.
field_name: The name of the field to build.
field_type: A string indicator as to which field type must be built.
all_models: A list containing the names of all of the models, which
will help us when building foreign key lookups.
"""
field_type_parts = field_type.split('->')
_field_type = field_type_parts[0].strip().split('[]')[0].strip()
back_populates = field_type_parts[1].strip() if len(field_type_parts) > 1 else None
error_context = kwargs.pop('error_context', StatikErrorContext())
_kwargs = copy(kwargs)
_kwargs['back_populates'] = back_populates
if _field_type not in FIELD_TYPES and _field_type not in all_models:
raise InvalidFieldTypeError(
model_name,
field_name,
context=error_context
)
if _field_type in FIELD_TYPES:
return FIELD_TYPES[_field_type](field_name, **_kwargs)
if field_type_parts[0].strip().endswith('[]'):
return StatikManyToManyField(field_name, _field_type, **_kwargs)
return StatikForeignKeyField(field_name, _field_type, **_kwargs) | python | def construct_field(model_name, field_name, field_type, all_models, **kwargs):
"""Helper function to build a field from the given field name and
type.
Args:
model_name: The name of the model for which we're building this field.
field_name: The name of the field to build.
field_type: A string indicator as to which field type must be built.
all_models: A list containing the names of all of the models, which
will help us when building foreign key lookups.
"""
field_type_parts = field_type.split('->')
_field_type = field_type_parts[0].strip().split('[]')[0].strip()
back_populates = field_type_parts[1].strip() if len(field_type_parts) > 1 else None
error_context = kwargs.pop('error_context', StatikErrorContext())
_kwargs = copy(kwargs)
_kwargs['back_populates'] = back_populates
if _field_type not in FIELD_TYPES and _field_type not in all_models:
raise InvalidFieldTypeError(
model_name,
field_name,
context=error_context
)
if _field_type in FIELD_TYPES:
return FIELD_TYPES[_field_type](field_name, **_kwargs)
if field_type_parts[0].strip().endswith('[]'):
return StatikManyToManyField(field_name, _field_type, **_kwargs)
return StatikForeignKeyField(field_name, _field_type, **_kwargs) | [
"def",
"construct_field",
"(",
"model_name",
",",
"field_name",
",",
"field_type",
",",
"all_models",
",",
"*",
"*",
"kwargs",
")",
":",
"field_type_parts",
"=",
"field_type",
".",
"split",
"(",
"'->'",
")",
"_field_type",
"=",
"field_type_parts",
"[",
"0",
"]",
".",
"strip",
"(",
")",
".",
"split",
"(",
"'[]'",
")",
"[",
"0",
"]",
".",
"strip",
"(",
")",
"back_populates",
"=",
"field_type_parts",
"[",
"1",
"]",
".",
"strip",
"(",
")",
"if",
"len",
"(",
"field_type_parts",
")",
">",
"1",
"else",
"None",
"error_context",
"=",
"kwargs",
".",
"pop",
"(",
"'error_context'",
",",
"StatikErrorContext",
"(",
")",
")",
"_kwargs",
"=",
"copy",
"(",
"kwargs",
")",
"_kwargs",
"[",
"'back_populates'",
"]",
"=",
"back_populates",
"if",
"_field_type",
"not",
"in",
"FIELD_TYPES",
"and",
"_field_type",
"not",
"in",
"all_models",
":",
"raise",
"InvalidFieldTypeError",
"(",
"model_name",
",",
"field_name",
",",
"context",
"=",
"error_context",
")",
"if",
"_field_type",
"in",
"FIELD_TYPES",
":",
"return",
"FIELD_TYPES",
"[",
"_field_type",
"]",
"(",
"field_name",
",",
"*",
"*",
"_kwargs",
")",
"if",
"field_type_parts",
"[",
"0",
"]",
".",
"strip",
"(",
")",
".",
"endswith",
"(",
"'[]'",
")",
":",
"return",
"StatikManyToManyField",
"(",
"field_name",
",",
"_field_type",
",",
"*",
"*",
"_kwargs",
")",
"return",
"StatikForeignKeyField",
"(",
"field_name",
",",
"_field_type",
",",
"*",
"*",
"_kwargs",
")"
] | Helper function to build a field from the given field name and
type.
Args:
model_name: The name of the model for which we're building this field.
field_name: The name of the field to build.
field_type: A string indicator as to which field type must be built.
all_models: A list containing the names of all of the models, which
will help us when building foreign key lookups. | [
"Helper",
"function",
"to",
"build",
"a",
"field",
"from",
"the",
"given",
"field",
"name",
"and",
"type",
"."
] | 56b1b5a2cb05a97afa81f428bfcefc833e935b8d | https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/fields.py#L90-L121 | train |
thanethomson/statik | statik/pagination.py | paginate | def paginate(db_query, items_per_page, offset=0, start_page=1):
"""Instantiates a Paginator instance for database queries.
Args:
db_query: The SQLAlchemy database query to paginate.
items_per_page: The desired number of items per page.
offset: The number of items to skip when paginating.
start_page: The number of the first page when reporting on page numbers.
"""
return Paginator(db_query, items_per_page, offset=offset, start_page=start_page) | python | def paginate(db_query, items_per_page, offset=0, start_page=1):
"""Instantiates a Paginator instance for database queries.
Args:
db_query: The SQLAlchemy database query to paginate.
items_per_page: The desired number of items per page.
offset: The number of items to skip when paginating.
start_page: The number of the first page when reporting on page numbers.
"""
return Paginator(db_query, items_per_page, offset=offset, start_page=start_page) | [
"def",
"paginate",
"(",
"db_query",
",",
"items_per_page",
",",
"offset",
"=",
"0",
",",
"start_page",
"=",
"1",
")",
":",
"return",
"Paginator",
"(",
"db_query",
",",
"items_per_page",
",",
"offset",
"=",
"offset",
",",
"start_page",
"=",
"start_page",
")"
] | Instantiates a Paginator instance for database queries.
Args:
db_query: The SQLAlchemy database query to paginate.
items_per_page: The desired number of items per page.
offset: The number of items to skip when paginating.
start_page: The number of the first page when reporting on page numbers. | [
"Instantiates",
"a",
"Paginator",
"instance",
"for",
"database",
"queries",
"."
] | 56b1b5a2cb05a97afa81f428bfcefc833e935b8d | https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/pagination.py#L132-L141 | train |
thanethomson/statik | statik/views.py | StatikViewPath.render_reverse | def render_reverse(self, inst=None, context=None):
"""Renders the reverse URL for this path."""
rendered = self.render(inst=inst, context=context)
parts = rendered.split('/')
# we only prettify URLs for these files
if parts[-1] in ['index.html', 'index.htm']:
return ('/'.join(parts[:-1])) + '/'
return rendered | python | def render_reverse(self, inst=None, context=None):
"""Renders the reverse URL for this path."""
rendered = self.render(inst=inst, context=context)
parts = rendered.split('/')
# we only prettify URLs for these files
if parts[-1] in ['index.html', 'index.htm']:
return ('/'.join(parts[:-1])) + '/'
return rendered | [
"def",
"render_reverse",
"(",
"self",
",",
"inst",
"=",
"None",
",",
"context",
"=",
"None",
")",
":",
"rendered",
"=",
"self",
".",
"render",
"(",
"inst",
"=",
"inst",
",",
"context",
"=",
"context",
")",
"parts",
"=",
"rendered",
".",
"split",
"(",
"'/'",
")",
"# we only prettify URLs for these files",
"if",
"parts",
"[",
"-",
"1",
"]",
"in",
"[",
"'index.html'",
",",
"'index.htm'",
"]",
":",
"return",
"(",
"'/'",
".",
"join",
"(",
"parts",
"[",
":",
"-",
"1",
"]",
")",
")",
"+",
"'/'",
"return",
"rendered"
] | Renders the reverse URL for this path. | [
"Renders",
"the",
"reverse",
"URL",
"for",
"this",
"path",
"."
] | 56b1b5a2cb05a97afa81f428bfcefc833e935b8d | https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/views.py#L61-L68 | train |
thanethomson/statik | statik/views.py | StatikViewPath.create | def create(
cls,
path,
template_engine=None,
output_filename=None,
output_ext=None,
view_name=None
):
"""Create the relevant subclass of StatikView based on the given path variable and
parameters."""
# if it's a complex view
if isinstance(path, dict):
return StatikViewComplexPath(
path,
template_engine,
output_filename=output_filename,
output_ext=output_ext,
view_name=view_name
)
elif isinstance(path, basestring):
return StatikViewSimplePath(
path,
output_filename=output_filename,
output_ext=output_ext,
view_name=view_name
)
else:
raise ValueError(
"Unrecognised structure for \"path\" configuration in view: %s" % view_name
) | python | def create(
cls,
path,
template_engine=None,
output_filename=None,
output_ext=None,
view_name=None
):
"""Create the relevant subclass of StatikView based on the given path variable and
parameters."""
# if it's a complex view
if isinstance(path, dict):
return StatikViewComplexPath(
path,
template_engine,
output_filename=output_filename,
output_ext=output_ext,
view_name=view_name
)
elif isinstance(path, basestring):
return StatikViewSimplePath(
path,
output_filename=output_filename,
output_ext=output_ext,
view_name=view_name
)
else:
raise ValueError(
"Unrecognised structure for \"path\" configuration in view: %s" % view_name
) | [
"def",
"create",
"(",
"cls",
",",
"path",
",",
"template_engine",
"=",
"None",
",",
"output_filename",
"=",
"None",
",",
"output_ext",
"=",
"None",
",",
"view_name",
"=",
"None",
")",
":",
"# if it's a complex view",
"if",
"isinstance",
"(",
"path",
",",
"dict",
")",
":",
"return",
"StatikViewComplexPath",
"(",
"path",
",",
"template_engine",
",",
"output_filename",
"=",
"output_filename",
",",
"output_ext",
"=",
"output_ext",
",",
"view_name",
"=",
"view_name",
")",
"elif",
"isinstance",
"(",
"path",
",",
"basestring",
")",
":",
"return",
"StatikViewSimplePath",
"(",
"path",
",",
"output_filename",
"=",
"output_filename",
",",
"output_ext",
"=",
"output_ext",
",",
"view_name",
"=",
"view_name",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Unrecognised structure for \\\"path\\\" configuration in view: %s\"",
"%",
"view_name",
")"
] | Create the relevant subclass of StatikView based on the given path variable and
parameters. | [
"Create",
"the",
"relevant",
"subclass",
"of",
"StatikView",
"based",
"on",
"the",
"given",
"path",
"variable",
"and",
"parameters",
"."
] | 56b1b5a2cb05a97afa81f428bfcefc833e935b8d | https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/views.py#L71-L100 | train |
thanethomson/statik | statik/views.py | StatikComplexViewRenderer.render | def render(self, context, db=None, safe_mode=False, extra_context=None):
"""Renders the given context using the specified database, returning a dictionary
containing path segments and rendered view contents."""
if not db:
raise MissingParameterError(
"db",
context=self.error_context
)
rendered_views = dict()
path_instances = db.query(self.path.query, safe_mode=safe_mode)
extra_ctx = copy(extra_context) if extra_context else dict()
for inst in path_instances:
extra_ctx.update({
self.path.variable: inst
})
ctx = context.build(
db=db,
safe_mode=safe_mode,
for_each_inst=inst,
extra=extra_ctx
)
inst_path = self.path.render(inst=inst, context=ctx)
rendered_view = self.template.render(ctx)
rendered_views = deep_merge_dict(
rendered_views,
dict_from_path(inst_path, final_value=rendered_view)
)
return rendered_views | python | def render(self, context, db=None, safe_mode=False, extra_context=None):
"""Renders the given context using the specified database, returning a dictionary
containing path segments and rendered view contents."""
if not db:
raise MissingParameterError(
"db",
context=self.error_context
)
rendered_views = dict()
path_instances = db.query(self.path.query, safe_mode=safe_mode)
extra_ctx = copy(extra_context) if extra_context else dict()
for inst in path_instances:
extra_ctx.update({
self.path.variable: inst
})
ctx = context.build(
db=db,
safe_mode=safe_mode,
for_each_inst=inst,
extra=extra_ctx
)
inst_path = self.path.render(inst=inst, context=ctx)
rendered_view = self.template.render(ctx)
rendered_views = deep_merge_dict(
rendered_views,
dict_from_path(inst_path, final_value=rendered_view)
)
return rendered_views | [
"def",
"render",
"(",
"self",
",",
"context",
",",
"db",
"=",
"None",
",",
"safe_mode",
"=",
"False",
",",
"extra_context",
"=",
"None",
")",
":",
"if",
"not",
"db",
":",
"raise",
"MissingParameterError",
"(",
"\"db\"",
",",
"context",
"=",
"self",
".",
"error_context",
")",
"rendered_views",
"=",
"dict",
"(",
")",
"path_instances",
"=",
"db",
".",
"query",
"(",
"self",
".",
"path",
".",
"query",
",",
"safe_mode",
"=",
"safe_mode",
")",
"extra_ctx",
"=",
"copy",
"(",
"extra_context",
")",
"if",
"extra_context",
"else",
"dict",
"(",
")",
"for",
"inst",
"in",
"path_instances",
":",
"extra_ctx",
".",
"update",
"(",
"{",
"self",
".",
"path",
".",
"variable",
":",
"inst",
"}",
")",
"ctx",
"=",
"context",
".",
"build",
"(",
"db",
"=",
"db",
",",
"safe_mode",
"=",
"safe_mode",
",",
"for_each_inst",
"=",
"inst",
",",
"extra",
"=",
"extra_ctx",
")",
"inst_path",
"=",
"self",
".",
"path",
".",
"render",
"(",
"inst",
"=",
"inst",
",",
"context",
"=",
"ctx",
")",
"rendered_view",
"=",
"self",
".",
"template",
".",
"render",
"(",
"ctx",
")",
"rendered_views",
"=",
"deep_merge_dict",
"(",
"rendered_views",
",",
"dict_from_path",
"(",
"inst_path",
",",
"final_value",
"=",
"rendered_view",
")",
")",
"return",
"rendered_views"
] | Renders the given context using the specified database, returning a dictionary
containing path segments and rendered view contents. | [
"Renders",
"the",
"given",
"context",
"using",
"the",
"specified",
"database",
"returning",
"a",
"dictionary",
"containing",
"path",
"segments",
"and",
"rendered",
"view",
"contents",
"."
] | 56b1b5a2cb05a97afa81f428bfcefc833e935b8d | https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/views.py#L282-L310 | train |
thanethomson/statik | statik/views.py | StatikView.render | def render(self, db, safe_mode=False, extra_context=None):
"""Renders this view, given the specified StatikDatabase instance."""
return self.renderer.render(
self.context,
db,
safe_mode=safe_mode,
extra_context=extra_context
) | python | def render(self, db, safe_mode=False, extra_context=None):
"""Renders this view, given the specified StatikDatabase instance."""
return self.renderer.render(
self.context,
db,
safe_mode=safe_mode,
extra_context=extra_context
) | [
"def",
"render",
"(",
"self",
",",
"db",
",",
"safe_mode",
"=",
"False",
",",
"extra_context",
"=",
"None",
")",
":",
"return",
"self",
".",
"renderer",
".",
"render",
"(",
"self",
".",
"context",
",",
"db",
",",
"safe_mode",
"=",
"safe_mode",
",",
"extra_context",
"=",
"extra_context",
")"
] | Renders this view, given the specified StatikDatabase instance. | [
"Renders",
"this",
"view",
"given",
"the",
"specified",
"StatikDatabase",
"instance",
"."
] | 56b1b5a2cb05a97afa81f428bfcefc833e935b8d | https://github.com/thanethomson/statik/blob/56b1b5a2cb05a97afa81f428bfcefc833e935b8d/statik/views.py#L399-L406 | train |
goldmann/docker-squash | docker_squash/image.py | Image._validate_number_of_layers | def _validate_number_of_layers(self, number_of_layers):
"""
Makes sure that the specified number of layers to squash
is a valid number
"""
# Only positive numbers are correct
if number_of_layers <= 0:
raise SquashError(
"Number of layers to squash cannot be less or equal 0, provided: %s" % number_of_layers)
# Do not squash if provided number of layer to squash is bigger
# than number of actual layers in the image
if number_of_layers > len(self.old_image_layers):
raise SquashError(
"Cannot squash %s layers, the %s image contains only %s layers" % (number_of_layers, self.image, len(self.old_image_layers))) | python | def _validate_number_of_layers(self, number_of_layers):
"""
Makes sure that the specified number of layers to squash
is a valid number
"""
# Only positive numbers are correct
if number_of_layers <= 0:
raise SquashError(
"Number of layers to squash cannot be less or equal 0, provided: %s" % number_of_layers)
# Do not squash if provided number of layer to squash is bigger
# than number of actual layers in the image
if number_of_layers > len(self.old_image_layers):
raise SquashError(
"Cannot squash %s layers, the %s image contains only %s layers" % (number_of_layers, self.image, len(self.old_image_layers))) | [
"def",
"_validate_number_of_layers",
"(",
"self",
",",
"number_of_layers",
")",
":",
"# Only positive numbers are correct",
"if",
"number_of_layers",
"<=",
"0",
":",
"raise",
"SquashError",
"(",
"\"Number of layers to squash cannot be less or equal 0, provided: %s\"",
"%",
"number_of_layers",
")",
"# Do not squash if provided number of layer to squash is bigger",
"# than number of actual layers in the image",
"if",
"number_of_layers",
">",
"len",
"(",
"self",
".",
"old_image_layers",
")",
":",
"raise",
"SquashError",
"(",
"\"Cannot squash %s layers, the %s image contains only %s layers\"",
"%",
"(",
"number_of_layers",
",",
"self",
".",
"image",
",",
"len",
"(",
"self",
".",
"old_image_layers",
")",
")",
")"
] | Makes sure that the specified number of layers to squash
is a valid number | [
"Makes",
"sure",
"that",
"the",
"specified",
"number",
"of",
"layers",
"to",
"squash",
"is",
"a",
"valid",
"number"
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L125-L140 | train |
goldmann/docker-squash | docker_squash/image.py | Image._files_in_layers | def _files_in_layers(self, layers, directory):
"""
Prepare a list of files in all layers
"""
files = {}
for layer in layers:
self.log.debug("Generating list of files in layer '%s'..." % layer)
tar_file = os.path.join(directory, layer, "layer.tar")
with tarfile.open(tar_file, 'r', format=tarfile.PAX_FORMAT) as tar:
files[layer] = [self._normalize_path(
x) for x in tar.getnames()]
self.log.debug("Done, found %s files" % len(files[layer]))
return files | python | def _files_in_layers(self, layers, directory):
"""
Prepare a list of files in all layers
"""
files = {}
for layer in layers:
self.log.debug("Generating list of files in layer '%s'..." % layer)
tar_file = os.path.join(directory, layer, "layer.tar")
with tarfile.open(tar_file, 'r', format=tarfile.PAX_FORMAT) as tar:
files[layer] = [self._normalize_path(
x) for x in tar.getnames()]
self.log.debug("Done, found %s files" % len(files[layer]))
return files | [
"def",
"_files_in_layers",
"(",
"self",
",",
"layers",
",",
"directory",
")",
":",
"files",
"=",
"{",
"}",
"for",
"layer",
"in",
"layers",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"Generating list of files in layer '%s'...\"",
"%",
"layer",
")",
"tar_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"directory",
",",
"layer",
",",
"\"layer.tar\"",
")",
"with",
"tarfile",
".",
"open",
"(",
"tar_file",
",",
"'r'",
",",
"format",
"=",
"tarfile",
".",
"PAX_FORMAT",
")",
"as",
"tar",
":",
"files",
"[",
"layer",
"]",
"=",
"[",
"self",
".",
"_normalize_path",
"(",
"x",
")",
"for",
"x",
"in",
"tar",
".",
"getnames",
"(",
")",
"]",
"self",
".",
"log",
".",
"debug",
"(",
"\"Done, found %s files\"",
"%",
"len",
"(",
"files",
"[",
"layer",
"]",
")",
")",
"return",
"files"
] | Prepare a list of files in all layers | [
"Prepare",
"a",
"list",
"of",
"files",
"in",
"all",
"layers"
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L260-L274 | train |
goldmann/docker-squash | docker_squash/image.py | Image._prepare_tmp_directory | def _prepare_tmp_directory(self, tmp_dir):
""" Creates temporary directory that is used to work on layers """
if tmp_dir:
if os.path.exists(tmp_dir):
raise SquashError(
"The '%s' directory already exists, please remove it before you proceed" % tmp_dir)
os.makedirs(tmp_dir)
else:
tmp_dir = tempfile.mkdtemp(prefix="docker-squash-")
self.log.debug("Using %s as the temporary directory" % tmp_dir)
return tmp_dir | python | def _prepare_tmp_directory(self, tmp_dir):
""" Creates temporary directory that is used to work on layers """
if tmp_dir:
if os.path.exists(tmp_dir):
raise SquashError(
"The '%s' directory already exists, please remove it before you proceed" % tmp_dir)
os.makedirs(tmp_dir)
else:
tmp_dir = tempfile.mkdtemp(prefix="docker-squash-")
self.log.debug("Using %s as the temporary directory" % tmp_dir)
return tmp_dir | [
"def",
"_prepare_tmp_directory",
"(",
"self",
",",
"tmp_dir",
")",
":",
"if",
"tmp_dir",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"tmp_dir",
")",
":",
"raise",
"SquashError",
"(",
"\"The '%s' directory already exists, please remove it before you proceed\"",
"%",
"tmp_dir",
")",
"os",
".",
"makedirs",
"(",
"tmp_dir",
")",
"else",
":",
"tmp_dir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
"prefix",
"=",
"\"docker-squash-\"",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"Using %s as the temporary directory\"",
"%",
"tmp_dir",
")",
"return",
"tmp_dir"
] | Creates temporary directory that is used to work on layers | [
"Creates",
"temporary",
"directory",
"that",
"is",
"used",
"to",
"work",
"on",
"layers"
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L276-L289 | train |
goldmann/docker-squash | docker_squash/image.py | Image._layers_to_squash | def _layers_to_squash(self, layers, from_layer):
""" Prepares a list of layer IDs that should be squashed """
to_squash = []
to_leave = []
should_squash = True
for l in reversed(layers):
if l == from_layer:
should_squash = False
if should_squash:
to_squash.append(l)
else:
to_leave.append(l)
to_squash.reverse()
to_leave.reverse()
return to_squash, to_leave | python | def _layers_to_squash(self, layers, from_layer):
""" Prepares a list of layer IDs that should be squashed """
to_squash = []
to_leave = []
should_squash = True
for l in reversed(layers):
if l == from_layer:
should_squash = False
if should_squash:
to_squash.append(l)
else:
to_leave.append(l)
to_squash.reverse()
to_leave.reverse()
return to_squash, to_leave | [
"def",
"_layers_to_squash",
"(",
"self",
",",
"layers",
",",
"from_layer",
")",
":",
"to_squash",
"=",
"[",
"]",
"to_leave",
"=",
"[",
"]",
"should_squash",
"=",
"True",
"for",
"l",
"in",
"reversed",
"(",
"layers",
")",
":",
"if",
"l",
"==",
"from_layer",
":",
"should_squash",
"=",
"False",
"if",
"should_squash",
":",
"to_squash",
".",
"append",
"(",
"l",
")",
"else",
":",
"to_leave",
".",
"append",
"(",
"l",
")",
"to_squash",
".",
"reverse",
"(",
")",
"to_leave",
".",
"reverse",
"(",
")",
"return",
"to_squash",
",",
"to_leave"
] | Prepares a list of layer IDs that should be squashed | [
"Prepares",
"a",
"list",
"of",
"layer",
"IDs",
"that",
"should",
"be",
"squashed"
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L319-L337 | train |
goldmann/docker-squash | docker_squash/image.py | Image._save_image | def _save_image(self, image_id, directory):
""" Saves the image as a tar archive under specified name """
for x in [0, 1, 2]:
self.log.info("Saving image %s to %s directory..." %
(image_id, directory))
self.log.debug("Try #%s..." % (x + 1))
try:
image = self.docker.get_image(image_id)
if docker.version_info[0] < 3:
# Docker library prior to 3.0.0 returned the requests
# object directly which cold be used to read from
self.log.debug("Extracting image using HTTPResponse object directly")
self._extract_tar(image, directory)
else:
# Docker library >=3.0.0 returns iterator over raw data
self.log.debug("Extracting image using iterator over raw data")
fd_r, fd_w = os.pipe()
r = os.fdopen(fd_r, 'rb')
w = os.fdopen(fd_w, 'wb')
extracter = threading.Thread(target=self._extract_tar, args=(r,directory))
extracter.start()
for chunk in image:
w.write(chunk)
w.flush()
w.close()
extracter.join()
r.close()
self.log.info("Image saved!")
return True
except Exception as e:
self.log.exception(e)
self.log.warn(
"An error occured while saving the %s image, retrying..." % image_id)
raise SquashError("Couldn't save %s image!" % image_id) | python | def _save_image(self, image_id, directory):
""" Saves the image as a tar archive under specified name """
for x in [0, 1, 2]:
self.log.info("Saving image %s to %s directory..." %
(image_id, directory))
self.log.debug("Try #%s..." % (x + 1))
try:
image = self.docker.get_image(image_id)
if docker.version_info[0] < 3:
# Docker library prior to 3.0.0 returned the requests
# object directly which cold be used to read from
self.log.debug("Extracting image using HTTPResponse object directly")
self._extract_tar(image, directory)
else:
# Docker library >=3.0.0 returns iterator over raw data
self.log.debug("Extracting image using iterator over raw data")
fd_r, fd_w = os.pipe()
r = os.fdopen(fd_r, 'rb')
w = os.fdopen(fd_w, 'wb')
extracter = threading.Thread(target=self._extract_tar, args=(r,directory))
extracter.start()
for chunk in image:
w.write(chunk)
w.flush()
w.close()
extracter.join()
r.close()
self.log.info("Image saved!")
return True
except Exception as e:
self.log.exception(e)
self.log.warn(
"An error occured while saving the %s image, retrying..." % image_id)
raise SquashError("Couldn't save %s image!" % image_id) | [
"def",
"_save_image",
"(",
"self",
",",
"image_id",
",",
"directory",
")",
":",
"for",
"x",
"in",
"[",
"0",
",",
"1",
",",
"2",
"]",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Saving image %s to %s directory...\"",
"%",
"(",
"image_id",
",",
"directory",
")",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"Try #%s...\"",
"%",
"(",
"x",
"+",
"1",
")",
")",
"try",
":",
"image",
"=",
"self",
".",
"docker",
".",
"get_image",
"(",
"image_id",
")",
"if",
"docker",
".",
"version_info",
"[",
"0",
"]",
"<",
"3",
":",
"# Docker library prior to 3.0.0 returned the requests",
"# object directly which cold be used to read from",
"self",
".",
"log",
".",
"debug",
"(",
"\"Extracting image using HTTPResponse object directly\"",
")",
"self",
".",
"_extract_tar",
"(",
"image",
",",
"directory",
")",
"else",
":",
"# Docker library >=3.0.0 returns iterator over raw data",
"self",
".",
"log",
".",
"debug",
"(",
"\"Extracting image using iterator over raw data\"",
")",
"fd_r",
",",
"fd_w",
"=",
"os",
".",
"pipe",
"(",
")",
"r",
"=",
"os",
".",
"fdopen",
"(",
"fd_r",
",",
"'rb'",
")",
"w",
"=",
"os",
".",
"fdopen",
"(",
"fd_w",
",",
"'wb'",
")",
"extracter",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"_extract_tar",
",",
"args",
"=",
"(",
"r",
",",
"directory",
")",
")",
"extracter",
".",
"start",
"(",
")",
"for",
"chunk",
"in",
"image",
":",
"w",
".",
"write",
"(",
"chunk",
")",
"w",
".",
"flush",
"(",
")",
"w",
".",
"close",
"(",
")",
"extracter",
".",
"join",
"(",
")",
"r",
".",
"close",
"(",
")",
"self",
".",
"log",
".",
"info",
"(",
"\"Image saved!\"",
")",
"return",
"True",
"except",
"Exception",
"as",
"e",
":",
"self",
".",
"log",
".",
"exception",
"(",
"e",
")",
"self",
".",
"log",
".",
"warn",
"(",
"\"An error occured while saving the %s image, retrying...\"",
"%",
"image_id",
")",
"raise",
"SquashError",
"(",
"\"Couldn't save %s image!\"",
"%",
"image_id",
")"
] | Saves the image as a tar archive under specified name | [
"Saves",
"the",
"image",
"as",
"a",
"tar",
"archive",
"under",
"specified",
"name"
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L343-L386 | train |
goldmann/docker-squash | docker_squash/image.py | Image._unpack | def _unpack(self, tar_file, directory):
""" Unpacks tar archive to selected directory """
self.log.info("Unpacking %s tar file to %s directory" %
(tar_file, directory))
with tarfile.open(tar_file, 'r') as tar:
tar.extractall(path=directory)
self.log.info("Archive unpacked!") | python | def _unpack(self, tar_file, directory):
""" Unpacks tar archive to selected directory """
self.log.info("Unpacking %s tar file to %s directory" %
(tar_file, directory))
with tarfile.open(tar_file, 'r') as tar:
tar.extractall(path=directory)
self.log.info("Archive unpacked!") | [
"def",
"_unpack",
"(",
"self",
",",
"tar_file",
",",
"directory",
")",
":",
"self",
".",
"log",
".",
"info",
"(",
"\"Unpacking %s tar file to %s directory\"",
"%",
"(",
"tar_file",
",",
"directory",
")",
")",
"with",
"tarfile",
".",
"open",
"(",
"tar_file",
",",
"'r'",
")",
"as",
"tar",
":",
"tar",
".",
"extractall",
"(",
"path",
"=",
"directory",
")",
"self",
".",
"log",
".",
"info",
"(",
"\"Archive unpacked!\"",
")"
] | Unpacks tar archive to selected directory | [
"Unpacks",
"tar",
"archive",
"to",
"selected",
"directory"
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L388-L397 | train |
goldmann/docker-squash | docker_squash/image.py | Image._parse_image_name | def _parse_image_name(self, image):
"""
Parses the provided image name and splits it in the
name and tag part, if possible. If no tag is provided
'latest' is used.
"""
if ':' in image and '/' not in image.split(':')[-1]:
image_tag = image.split(':')[-1]
image_name = image[:-(len(image_tag) + 1)]
else:
image_tag = "latest"
image_name = image
return (image_name, image_tag) | python | def _parse_image_name(self, image):
"""
Parses the provided image name and splits it in the
name and tag part, if possible. If no tag is provided
'latest' is used.
"""
if ':' in image and '/' not in image.split(':')[-1]:
image_tag = image.split(':')[-1]
image_name = image[:-(len(image_tag) + 1)]
else:
image_tag = "latest"
image_name = image
return (image_name, image_tag) | [
"def",
"_parse_image_name",
"(",
"self",
",",
"image",
")",
":",
"if",
"':'",
"in",
"image",
"and",
"'/'",
"not",
"in",
"image",
".",
"split",
"(",
"':'",
")",
"[",
"-",
"1",
"]",
":",
"image_tag",
"=",
"image",
".",
"split",
"(",
"':'",
")",
"[",
"-",
"1",
"]",
"image_name",
"=",
"image",
"[",
":",
"-",
"(",
"len",
"(",
"image_tag",
")",
"+",
"1",
")",
"]",
"else",
":",
"image_tag",
"=",
"\"latest\"",
"image_name",
"=",
"image",
"return",
"(",
"image_name",
",",
"image_tag",
")"
] | Parses the provided image name and splits it in the
name and tag part, if possible. If no tag is provided
'latest' is used. | [
"Parses",
"the",
"provided",
"image",
"name",
"and",
"splits",
"it",
"in",
"the",
"name",
"and",
"tag",
"part",
"if",
"possible",
".",
"If",
"no",
"tag",
"is",
"provided",
"latest",
"is",
"used",
"."
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L405-L418 | train |
goldmann/docker-squash | docker_squash/image.py | Image._dump_json | def _dump_json(self, data, new_line=False):
"""
Helper function to marshal object into JSON string.
Additionally a sha256sum of the created JSON string is generated.
"""
# We do not want any spaces between keys and values in JSON
json_data = json.dumps(data, separators=(',', ':'))
if new_line:
json_data = "%s\n" % json_data
# Generate sha256sum of the JSON data, may be handy
sha = hashlib.sha256(json_data.encode('utf-8')).hexdigest()
return json_data, sha | python | def _dump_json(self, data, new_line=False):
"""
Helper function to marshal object into JSON string.
Additionally a sha256sum of the created JSON string is generated.
"""
# We do not want any spaces between keys and values in JSON
json_data = json.dumps(data, separators=(',', ':'))
if new_line:
json_data = "%s\n" % json_data
# Generate sha256sum of the JSON data, may be handy
sha = hashlib.sha256(json_data.encode('utf-8')).hexdigest()
return json_data, sha | [
"def",
"_dump_json",
"(",
"self",
",",
"data",
",",
"new_line",
"=",
"False",
")",
":",
"# We do not want any spaces between keys and values in JSON",
"json_data",
"=",
"json",
".",
"dumps",
"(",
"data",
",",
"separators",
"=",
"(",
"','",
",",
"':'",
")",
")",
"if",
"new_line",
":",
"json_data",
"=",
"\"%s\\n\"",
"%",
"json_data",
"# Generate sha256sum of the JSON data, may be handy",
"sha",
"=",
"hashlib",
".",
"sha256",
"(",
"json_data",
".",
"encode",
"(",
"'utf-8'",
")",
")",
".",
"hexdigest",
"(",
")",
"return",
"json_data",
",",
"sha"
] | Helper function to marshal object into JSON string.
Additionally a sha256sum of the created JSON string is generated. | [
"Helper",
"function",
"to",
"marshal",
"object",
"into",
"JSON",
"string",
".",
"Additionally",
"a",
"sha256sum",
"of",
"the",
"created",
"JSON",
"string",
"is",
"generated",
"."
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L420-L435 | train |
goldmann/docker-squash | docker_squash/image.py | Image._move_layers | def _move_layers(self, layers, src, dest):
"""
This moves all the layers that should be copied as-is.
In other words - all layers that are not meant to be squashed will be
moved from the old image to the new image untouched.
"""
for layer in layers:
layer_id = layer.replace('sha256:', '')
self.log.debug("Moving unmodified layer '%s'..." % layer_id)
shutil.move(os.path.join(src, layer_id), dest) | python | def _move_layers(self, layers, src, dest):
"""
This moves all the layers that should be copied as-is.
In other words - all layers that are not meant to be squashed will be
moved from the old image to the new image untouched.
"""
for layer in layers:
layer_id = layer.replace('sha256:', '')
self.log.debug("Moving unmodified layer '%s'..." % layer_id)
shutil.move(os.path.join(src, layer_id), dest) | [
"def",
"_move_layers",
"(",
"self",
",",
"layers",
",",
"src",
",",
"dest",
")",
":",
"for",
"layer",
"in",
"layers",
":",
"layer_id",
"=",
"layer",
".",
"replace",
"(",
"'sha256:'",
",",
"''",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"Moving unmodified layer '%s'...\"",
"%",
"layer_id",
")",
"shutil",
".",
"move",
"(",
"os",
".",
"path",
".",
"join",
"(",
"src",
",",
"layer_id",
")",
",",
"dest",
")"
] | This moves all the layers that should be copied as-is.
In other words - all layers that are not meant to be squashed will be
moved from the old image to the new image untouched. | [
"This",
"moves",
"all",
"the",
"layers",
"that",
"should",
"be",
"copied",
"as",
"-",
"is",
".",
"In",
"other",
"words",
"-",
"all",
"layers",
"that",
"are",
"not",
"meant",
"to",
"be",
"squashed",
"will",
"be",
"moved",
"from",
"the",
"old",
"image",
"to",
"the",
"new",
"image",
"untouched",
"."
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L475-L485 | train |
goldmann/docker-squash | docker_squash/image.py | Image._marker_files | def _marker_files(self, tar, members):
"""
Searches for marker files in the specified archive.
Docker marker files are files taht have the .wh. prefix in the name.
These files mark the corresponding file to be removed (hidden) when
we start a container from the image.
"""
marker_files = {}
self.log.debug(
"Searching for marker files in '%s' archive..." % tar.name)
for member in members:
if '.wh.' in member.name:
self.log.debug("Found '%s' marker file" % member.name)
marker_files[member] = tar.extractfile(member)
self.log.debug("Done, found %s files" % len(marker_files))
return marker_files | python | def _marker_files(self, tar, members):
"""
Searches for marker files in the specified archive.
Docker marker files are files taht have the .wh. prefix in the name.
These files mark the corresponding file to be removed (hidden) when
we start a container from the image.
"""
marker_files = {}
self.log.debug(
"Searching for marker files in '%s' archive..." % tar.name)
for member in members:
if '.wh.' in member.name:
self.log.debug("Found '%s' marker file" % member.name)
marker_files[member] = tar.extractfile(member)
self.log.debug("Done, found %s files" % len(marker_files))
return marker_files | [
"def",
"_marker_files",
"(",
"self",
",",
"tar",
",",
"members",
")",
":",
"marker_files",
"=",
"{",
"}",
"self",
".",
"log",
".",
"debug",
"(",
"\"Searching for marker files in '%s' archive...\"",
"%",
"tar",
".",
"name",
")",
"for",
"member",
"in",
"members",
":",
"if",
"'.wh.'",
"in",
"member",
".",
"name",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"Found '%s' marker file\"",
"%",
"member",
".",
"name",
")",
"marker_files",
"[",
"member",
"]",
"=",
"tar",
".",
"extractfile",
"(",
"member",
")",
"self",
".",
"log",
".",
"debug",
"(",
"\"Done, found %s files\"",
"%",
"len",
"(",
"marker_files",
")",
")",
"return",
"marker_files"
] | Searches for marker files in the specified archive.
Docker marker files are files taht have the .wh. prefix in the name.
These files mark the corresponding file to be removed (hidden) when
we start a container from the image. | [
"Searches",
"for",
"marker",
"files",
"in",
"the",
"specified",
"archive",
"."
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L501-L521 | train |
goldmann/docker-squash | docker_squash/image.py | Image._add_markers | def _add_markers(self, markers, tar, files_in_layers, added_symlinks):
"""
This method is responsible for adding back all markers that were not
added to the squashed layer AND files they refer to can be found in layers
we do not squash.
"""
if markers:
self.log.debug("Marker files to add: %s" %
[o.name for o in markers.keys()])
else:
# No marker files to add
return
# https://github.com/goldmann/docker-squash/issues/108
# Some tar archives do have the filenames prefixed with './'
# which does not have any effect when we unpack the tar achive,
# but when processing tar content - we see this.
tar_files = [self._normalize_path(x) for x in tar.getnames()]
for marker, marker_file in six.iteritems(markers):
actual_file = marker.name.replace('.wh.', '')
normalized_file = self._normalize_path(actual_file)
should_be_added_back = False
if self._file_should_be_skipped(normalized_file, added_symlinks):
self.log.debug(
"Skipping '%s' marker file, this file is on a symlink path" % normalized_file)
continue
if normalized_file in tar_files:
self.log.debug(
"Skipping '%s' marker file, this file was added earlier for some reason..." % normalized_file)
continue
if files_in_layers:
for files in files_in_layers.values():
if normalized_file in files:
should_be_added_back = True
break
else:
# There are no previous layers, so we need to add it back
# In fact this shouldn't happen since having a marker file
# where there is no previous layer does not make sense.
should_be_added_back = True
if should_be_added_back:
self.log.debug(
"Adding '%s' marker file back..." % marker.name)
# Marker files on AUFS are hardlinks, we need to create
# regular files, therefore we need to recreate the tarinfo
# object
tar.addfile(tarfile.TarInfo(name=marker.name), marker_file)
# Add the file name to the list too to avoid re-reading all files
# in tar archive
tar_files.append(normalized_file)
else:
self.log.debug(
"Skipping '%s' marker file..." % marker.name) | python | def _add_markers(self, markers, tar, files_in_layers, added_symlinks):
"""
This method is responsible for adding back all markers that were not
added to the squashed layer AND files they refer to can be found in layers
we do not squash.
"""
if markers:
self.log.debug("Marker files to add: %s" %
[o.name for o in markers.keys()])
else:
# No marker files to add
return
# https://github.com/goldmann/docker-squash/issues/108
# Some tar archives do have the filenames prefixed with './'
# which does not have any effect when we unpack the tar achive,
# but when processing tar content - we see this.
tar_files = [self._normalize_path(x) for x in tar.getnames()]
for marker, marker_file in six.iteritems(markers):
actual_file = marker.name.replace('.wh.', '')
normalized_file = self._normalize_path(actual_file)
should_be_added_back = False
if self._file_should_be_skipped(normalized_file, added_symlinks):
self.log.debug(
"Skipping '%s' marker file, this file is on a symlink path" % normalized_file)
continue
if normalized_file in tar_files:
self.log.debug(
"Skipping '%s' marker file, this file was added earlier for some reason..." % normalized_file)
continue
if files_in_layers:
for files in files_in_layers.values():
if normalized_file in files:
should_be_added_back = True
break
else:
# There are no previous layers, so we need to add it back
# In fact this shouldn't happen since having a marker file
# where there is no previous layer does not make sense.
should_be_added_back = True
if should_be_added_back:
self.log.debug(
"Adding '%s' marker file back..." % marker.name)
# Marker files on AUFS are hardlinks, we need to create
# regular files, therefore we need to recreate the tarinfo
# object
tar.addfile(tarfile.TarInfo(name=marker.name), marker_file)
# Add the file name to the list too to avoid re-reading all files
# in tar archive
tar_files.append(normalized_file)
else:
self.log.debug(
"Skipping '%s' marker file..." % marker.name) | [
"def",
"_add_markers",
"(",
"self",
",",
"markers",
",",
"tar",
",",
"files_in_layers",
",",
"added_symlinks",
")",
":",
"if",
"markers",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"Marker files to add: %s\"",
"%",
"[",
"o",
".",
"name",
"for",
"o",
"in",
"markers",
".",
"keys",
"(",
")",
"]",
")",
"else",
":",
"# No marker files to add",
"return",
"# https://github.com/goldmann/docker-squash/issues/108",
"# Some tar archives do have the filenames prefixed with './'",
"# which does not have any effect when we unpack the tar achive,",
"# but when processing tar content - we see this.",
"tar_files",
"=",
"[",
"self",
".",
"_normalize_path",
"(",
"x",
")",
"for",
"x",
"in",
"tar",
".",
"getnames",
"(",
")",
"]",
"for",
"marker",
",",
"marker_file",
"in",
"six",
".",
"iteritems",
"(",
"markers",
")",
":",
"actual_file",
"=",
"marker",
".",
"name",
".",
"replace",
"(",
"'.wh.'",
",",
"''",
")",
"normalized_file",
"=",
"self",
".",
"_normalize_path",
"(",
"actual_file",
")",
"should_be_added_back",
"=",
"False",
"if",
"self",
".",
"_file_should_be_skipped",
"(",
"normalized_file",
",",
"added_symlinks",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"Skipping '%s' marker file, this file is on a symlink path\"",
"%",
"normalized_file",
")",
"continue",
"if",
"normalized_file",
"in",
"tar_files",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"Skipping '%s' marker file, this file was added earlier for some reason...\"",
"%",
"normalized_file",
")",
"continue",
"if",
"files_in_layers",
":",
"for",
"files",
"in",
"files_in_layers",
".",
"values",
"(",
")",
":",
"if",
"normalized_file",
"in",
"files",
":",
"should_be_added_back",
"=",
"True",
"break",
"else",
":",
"# There are no previous layers, so we need to add it back",
"# In fact this shouldn't happen since having a marker file",
"# where there is no previous layer does not make sense.",
"should_be_added_back",
"=",
"True",
"if",
"should_be_added_back",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"Adding '%s' marker file back...\"",
"%",
"marker",
".",
"name",
")",
"# Marker files on AUFS are hardlinks, we need to create",
"# regular files, therefore we need to recreate the tarinfo",
"# object",
"tar",
".",
"addfile",
"(",
"tarfile",
".",
"TarInfo",
"(",
"name",
"=",
"marker",
".",
"name",
")",
",",
"marker_file",
")",
"# Add the file name to the list too to avoid re-reading all files",
"# in tar archive",
"tar_files",
".",
"append",
"(",
"normalized_file",
")",
"else",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"Skipping '%s' marker file...\"",
"%",
"marker",
".",
"name",
")"
] | This method is responsible for adding back all markers that were not
added to the squashed layer AND files they refer to can be found in layers
we do not squash. | [
"This",
"method",
"is",
"responsible",
"for",
"adding",
"back",
"all",
"markers",
"that",
"were",
"not",
"added",
"to",
"the",
"squashed",
"layer",
"AND",
"files",
"they",
"refer",
"to",
"can",
"be",
"found",
"in",
"layers",
"we",
"do",
"not",
"squash",
"."
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/image.py#L523-L582 | train |
goldmann/docker-squash | docker_squash/lib/xtarfile.py | _proc_pax | def _proc_pax(self, filetar):
"""Process an extended or global header as described in POSIX.1-2001."""
# Read the header information.
buf = filetar.fileobj.read(self._block(self.size))
# A pax header stores supplemental information for either
# the following file (extended) or all following files
# (global).
if self.type == tarfile.XGLTYPE:
pax_headers = filetar.pax_headers
else:
pax_headers = filetar.pax_headers.copy()
# Parse pax header information. A record looks like that:
# "%d %s=%s\n" % (length, keyword, value). length is the size
# of the complete record including the length field itself and
# the newline. keyword and value are both UTF-8 encoded strings.
regex = re.compile(r"(\d+) ([^=]+)=", re.U)
pos = 0
while True:
match = regex.match(buf, pos)
if not match:
break
length, keyword = match.groups()
length = int(length)
value = buf[match.end(2) + 1:match.start(1) + length - 1]
try:
keyword = keyword.decode("utf8")
except Exception:
pass
try:
value = value.decode("utf8")
except Exception:
pass
pax_headers[keyword] = value
pos += length
# Fetch the next header.
try:
next = self.fromtarfile(filetar)
except tarfile.HeaderError:
raise tarfile.SubsequentHeaderError("missing or bad subsequent header")
if self.type in (tarfile.XHDTYPE, tarfile.SOLARIS_XHDTYPE):
# Patch the TarInfo object with the extended header info.
next._apply_pax_info(pax_headers, filetar.encoding, filetar.errors)
next.offset = self.offset
if "size" in pax_headers:
# If the extended header replaces the size field,
# we need to recalculate the offset where the next
# header starts.
offset = next.offset_data
if next.isreg() or next.type not in tarfile.SUPPORTED_TYPES:
offset += next._block(next.size)
filetar.offset = offset
return next | python | def _proc_pax(self, filetar):
"""Process an extended or global header as described in POSIX.1-2001."""
# Read the header information.
buf = filetar.fileobj.read(self._block(self.size))
# A pax header stores supplemental information for either
# the following file (extended) or all following files
# (global).
if self.type == tarfile.XGLTYPE:
pax_headers = filetar.pax_headers
else:
pax_headers = filetar.pax_headers.copy()
# Parse pax header information. A record looks like that:
# "%d %s=%s\n" % (length, keyword, value). length is the size
# of the complete record including the length field itself and
# the newline. keyword and value are both UTF-8 encoded strings.
regex = re.compile(r"(\d+) ([^=]+)=", re.U)
pos = 0
while True:
match = regex.match(buf, pos)
if not match:
break
length, keyword = match.groups()
length = int(length)
value = buf[match.end(2) + 1:match.start(1) + length - 1]
try:
keyword = keyword.decode("utf8")
except Exception:
pass
try:
value = value.decode("utf8")
except Exception:
pass
pax_headers[keyword] = value
pos += length
# Fetch the next header.
try:
next = self.fromtarfile(filetar)
except tarfile.HeaderError:
raise tarfile.SubsequentHeaderError("missing or bad subsequent header")
if self.type in (tarfile.XHDTYPE, tarfile.SOLARIS_XHDTYPE):
# Patch the TarInfo object with the extended header info.
next._apply_pax_info(pax_headers, filetar.encoding, filetar.errors)
next.offset = self.offset
if "size" in pax_headers:
# If the extended header replaces the size field,
# we need to recalculate the offset where the next
# header starts.
offset = next.offset_data
if next.isreg() or next.type not in tarfile.SUPPORTED_TYPES:
offset += next._block(next.size)
filetar.offset = offset
return next | [
"def",
"_proc_pax",
"(",
"self",
",",
"filetar",
")",
":",
"# Read the header information.",
"buf",
"=",
"filetar",
".",
"fileobj",
".",
"read",
"(",
"self",
".",
"_block",
"(",
"self",
".",
"size",
")",
")",
"# A pax header stores supplemental information for either",
"# the following file (extended) or all following files",
"# (global).",
"if",
"self",
".",
"type",
"==",
"tarfile",
".",
"XGLTYPE",
":",
"pax_headers",
"=",
"filetar",
".",
"pax_headers",
"else",
":",
"pax_headers",
"=",
"filetar",
".",
"pax_headers",
".",
"copy",
"(",
")",
"# Parse pax header information. A record looks like that:",
"# \"%d %s=%s\\n\" % (length, keyword, value). length is the size",
"# of the complete record including the length field itself and",
"# the newline. keyword and value are both UTF-8 encoded strings.",
"regex",
"=",
"re",
".",
"compile",
"(",
"r\"(\\d+) ([^=]+)=\"",
",",
"re",
".",
"U",
")",
"pos",
"=",
"0",
"while",
"True",
":",
"match",
"=",
"regex",
".",
"match",
"(",
"buf",
",",
"pos",
")",
"if",
"not",
"match",
":",
"break",
"length",
",",
"keyword",
"=",
"match",
".",
"groups",
"(",
")",
"length",
"=",
"int",
"(",
"length",
")",
"value",
"=",
"buf",
"[",
"match",
".",
"end",
"(",
"2",
")",
"+",
"1",
":",
"match",
".",
"start",
"(",
"1",
")",
"+",
"length",
"-",
"1",
"]",
"try",
":",
"keyword",
"=",
"keyword",
".",
"decode",
"(",
"\"utf8\"",
")",
"except",
"Exception",
":",
"pass",
"try",
":",
"value",
"=",
"value",
".",
"decode",
"(",
"\"utf8\"",
")",
"except",
"Exception",
":",
"pass",
"pax_headers",
"[",
"keyword",
"]",
"=",
"value",
"pos",
"+=",
"length",
"# Fetch the next header.",
"try",
":",
"next",
"=",
"self",
".",
"fromtarfile",
"(",
"filetar",
")",
"except",
"tarfile",
".",
"HeaderError",
":",
"raise",
"tarfile",
".",
"SubsequentHeaderError",
"(",
"\"missing or bad subsequent header\"",
")",
"if",
"self",
".",
"type",
"in",
"(",
"tarfile",
".",
"XHDTYPE",
",",
"tarfile",
".",
"SOLARIS_XHDTYPE",
")",
":",
"# Patch the TarInfo object with the extended header info.",
"next",
".",
"_apply_pax_info",
"(",
"pax_headers",
",",
"filetar",
".",
"encoding",
",",
"filetar",
".",
"errors",
")",
"next",
".",
"offset",
"=",
"self",
".",
"offset",
"if",
"\"size\"",
"in",
"pax_headers",
":",
"# If the extended header replaces the size field,",
"# we need to recalculate the offset where the next",
"# header starts.",
"offset",
"=",
"next",
".",
"offset_data",
"if",
"next",
".",
"isreg",
"(",
")",
"or",
"next",
".",
"type",
"not",
"in",
"tarfile",
".",
"SUPPORTED_TYPES",
":",
"offset",
"+=",
"next",
".",
"_block",
"(",
"next",
".",
"size",
")",
"filetar",
".",
"offset",
"=",
"offset",
"return",
"next"
] | Process an extended or global header as described in POSIX.1-2001. | [
"Process",
"an",
"extended",
"or",
"global",
"header",
"as",
"described",
"in",
"POSIX",
".",
"1",
"-",
"2001",
"."
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/lib/xtarfile.py#L20-L81 | train |
goldmann/docker-squash | docker_squash/lib/xtarfile.py | _create_pax_generic_header | def _create_pax_generic_header(cls, pax_headers, type=tarfile.XHDTYPE):
"""Return a POSIX.1-2001 extended or global header sequence
that contains a list of keyword, value pairs. The values
must be unicode objects.
"""
records = []
for keyword, value in pax_headers.iteritems():
try:
keyword = keyword.encode("utf8")
except Exception:
pass
try:
value = value.encode("utf8")
except Exception:
pass
l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
n = p = 0
while True:
n = l + len(str(p))
if n == p:
break
p = n
records.append("%d %s=%s\n" % (p, keyword, value))
records = "".join(records)
# We use a hardcoded "././@PaxHeader" name like star does
# instead of the one that POSIX recommends.
info = {}
info["name"] = "././@PaxHeader"
info["type"] = type
info["size"] = len(records)
info["magic"] = tarfile.POSIX_MAGIC
# Create pax header + record blocks.
return cls._create_header(info, tarfile.USTAR_FORMAT) + \
cls._create_payload(records) | python | def _create_pax_generic_header(cls, pax_headers, type=tarfile.XHDTYPE):
"""Return a POSIX.1-2001 extended or global header sequence
that contains a list of keyword, value pairs. The values
must be unicode objects.
"""
records = []
for keyword, value in pax_headers.iteritems():
try:
keyword = keyword.encode("utf8")
except Exception:
pass
try:
value = value.encode("utf8")
except Exception:
pass
l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
n = p = 0
while True:
n = l + len(str(p))
if n == p:
break
p = n
records.append("%d %s=%s\n" % (p, keyword, value))
records = "".join(records)
# We use a hardcoded "././@PaxHeader" name like star does
# instead of the one that POSIX recommends.
info = {}
info["name"] = "././@PaxHeader"
info["type"] = type
info["size"] = len(records)
info["magic"] = tarfile.POSIX_MAGIC
# Create pax header + record blocks.
return cls._create_header(info, tarfile.USTAR_FORMAT) + \
cls._create_payload(records) | [
"def",
"_create_pax_generic_header",
"(",
"cls",
",",
"pax_headers",
",",
"type",
"=",
"tarfile",
".",
"XHDTYPE",
")",
":",
"records",
"=",
"[",
"]",
"for",
"keyword",
",",
"value",
"in",
"pax_headers",
".",
"iteritems",
"(",
")",
":",
"try",
":",
"keyword",
"=",
"keyword",
".",
"encode",
"(",
"\"utf8\"",
")",
"except",
"Exception",
":",
"pass",
"try",
":",
"value",
"=",
"value",
".",
"encode",
"(",
"\"utf8\"",
")",
"except",
"Exception",
":",
"pass",
"l",
"=",
"len",
"(",
"keyword",
")",
"+",
"len",
"(",
"value",
")",
"+",
"3",
"# ' ' + '=' + '\\n'",
"n",
"=",
"p",
"=",
"0",
"while",
"True",
":",
"n",
"=",
"l",
"+",
"len",
"(",
"str",
"(",
"p",
")",
")",
"if",
"n",
"==",
"p",
":",
"break",
"p",
"=",
"n",
"records",
".",
"append",
"(",
"\"%d %s=%s\\n\"",
"%",
"(",
"p",
",",
"keyword",
",",
"value",
")",
")",
"records",
"=",
"\"\"",
".",
"join",
"(",
"records",
")",
"# We use a hardcoded \"././@PaxHeader\" name like star does",
"# instead of the one that POSIX recommends.",
"info",
"=",
"{",
"}",
"info",
"[",
"\"name\"",
"]",
"=",
"\"././@PaxHeader\"",
"info",
"[",
"\"type\"",
"]",
"=",
"type",
"info",
"[",
"\"size\"",
"]",
"=",
"len",
"(",
"records",
")",
"info",
"[",
"\"magic\"",
"]",
"=",
"tarfile",
".",
"POSIX_MAGIC",
"# Create pax header + record blocks.",
"return",
"cls",
".",
"_create_header",
"(",
"info",
",",
"tarfile",
".",
"USTAR_FORMAT",
")",
"+",
"cls",
".",
"_create_payload",
"(",
"records",
")"
] | Return a POSIX.1-2001 extended or global header sequence
that contains a list of keyword, value pairs. The values
must be unicode objects. | [
"Return",
"a",
"POSIX",
".",
"1",
"-",
"2001",
"extended",
"or",
"global",
"header",
"sequence",
"that",
"contains",
"a",
"list",
"of",
"keyword",
"value",
"pairs",
".",
"The",
"values",
"must",
"be",
"unicode",
"objects",
"."
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/lib/xtarfile.py#L84-L122 | train |
goldmann/docker-squash | docker_squash/v2_image.py | V2Image._read_json_file | def _read_json_file(self, json_file):
""" Helper function to read JSON file as OrderedDict """
self.log.debug("Reading '%s' JSON file..." % json_file)
with open(json_file, 'r') as f:
return json.load(f, object_pairs_hook=OrderedDict) | python | def _read_json_file(self, json_file):
""" Helper function to read JSON file as OrderedDict """
self.log.debug("Reading '%s' JSON file..." % json_file)
with open(json_file, 'r') as f:
return json.load(f, object_pairs_hook=OrderedDict) | [
"def",
"_read_json_file",
"(",
"self",
",",
"json_file",
")",
":",
"self",
".",
"log",
".",
"debug",
"(",
"\"Reading '%s' JSON file...\"",
"%",
"json_file",
")",
"with",
"open",
"(",
"json_file",
",",
"'r'",
")",
"as",
"f",
":",
"return",
"json",
".",
"load",
"(",
"f",
",",
"object_pairs_hook",
"=",
"OrderedDict",
")"
] | Helper function to read JSON file as OrderedDict | [
"Helper",
"function",
"to",
"read",
"JSON",
"file",
"as",
"OrderedDict"
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/v2_image.py#L122-L128 | train |
goldmann/docker-squash | docker_squash/v2_image.py | V2Image._read_layer_paths | def _read_layer_paths(self, old_image_config, old_image_manifest, layers_to_move):
"""
In case of v2 format, layer id's are not the same as the id's
used in the exported tar archive to name directories for layers.
These id's can be found in the configuration files saved with
the image - we need to read them.
"""
# In manifest.json we do not have listed all layers
# but only layers that do contain some data.
current_manifest_layer = 0
layer_paths_to_move = []
layer_paths_to_squash = []
# Iterate over image history, from base image to top layer
for i, layer in enumerate(old_image_config['history']):
# If it's not an empty layer get the id
# (directory name) where the layer's data is
# stored
if not layer.get('empty_layer', False):
layer_id = old_image_manifest['Layers'][
current_manifest_layer].rsplit('/')[0]
# Check if this layer should be moved or squashed
if len(layers_to_move) > i:
layer_paths_to_move.append(layer_id)
else:
layer_paths_to_squash.append(layer_id)
current_manifest_layer += 1
return layer_paths_to_squash, layer_paths_to_move | python | def _read_layer_paths(self, old_image_config, old_image_manifest, layers_to_move):
"""
In case of v2 format, layer id's are not the same as the id's
used in the exported tar archive to name directories for layers.
These id's can be found in the configuration files saved with
the image - we need to read them.
"""
# In manifest.json we do not have listed all layers
# but only layers that do contain some data.
current_manifest_layer = 0
layer_paths_to_move = []
layer_paths_to_squash = []
# Iterate over image history, from base image to top layer
for i, layer in enumerate(old_image_config['history']):
# If it's not an empty layer get the id
# (directory name) where the layer's data is
# stored
if not layer.get('empty_layer', False):
layer_id = old_image_manifest['Layers'][
current_manifest_layer].rsplit('/')[0]
# Check if this layer should be moved or squashed
if len(layers_to_move) > i:
layer_paths_to_move.append(layer_id)
else:
layer_paths_to_squash.append(layer_id)
current_manifest_layer += 1
return layer_paths_to_squash, layer_paths_to_move | [
"def",
"_read_layer_paths",
"(",
"self",
",",
"old_image_config",
",",
"old_image_manifest",
",",
"layers_to_move",
")",
":",
"# In manifest.json we do not have listed all layers",
"# but only layers that do contain some data.",
"current_manifest_layer",
"=",
"0",
"layer_paths_to_move",
"=",
"[",
"]",
"layer_paths_to_squash",
"=",
"[",
"]",
"# Iterate over image history, from base image to top layer",
"for",
"i",
",",
"layer",
"in",
"enumerate",
"(",
"old_image_config",
"[",
"'history'",
"]",
")",
":",
"# If it's not an empty layer get the id",
"# (directory name) where the layer's data is",
"# stored",
"if",
"not",
"layer",
".",
"get",
"(",
"'empty_layer'",
",",
"False",
")",
":",
"layer_id",
"=",
"old_image_manifest",
"[",
"'Layers'",
"]",
"[",
"current_manifest_layer",
"]",
".",
"rsplit",
"(",
"'/'",
")",
"[",
"0",
"]",
"# Check if this layer should be moved or squashed",
"if",
"len",
"(",
"layers_to_move",
")",
">",
"i",
":",
"layer_paths_to_move",
".",
"append",
"(",
"layer_id",
")",
"else",
":",
"layer_paths_to_squash",
".",
"append",
"(",
"layer_id",
")",
"current_manifest_layer",
"+=",
"1",
"return",
"layer_paths_to_squash",
",",
"layer_paths_to_move"
] | In case of v2 format, layer id's are not the same as the id's
used in the exported tar archive to name directories for layers.
These id's can be found in the configuration files saved with
the image - we need to read them. | [
"In",
"case",
"of",
"v2",
"format",
"layer",
"id",
"s",
"are",
"not",
"the",
"same",
"as",
"the",
"id",
"s",
"used",
"in",
"the",
"exported",
"tar",
"archive",
"to",
"name",
"directories",
"for",
"layers",
".",
"These",
"id",
"s",
"can",
"be",
"found",
"in",
"the",
"configuration",
"files",
"saved",
"with",
"the",
"image",
"-",
"we",
"need",
"to",
"read",
"them",
"."
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/v2_image.py#L130-L163 | train |
goldmann/docker-squash | docker_squash/v2_image.py | V2Image._generate_squashed_layer_path_id | def _generate_squashed_layer_path_id(self):
"""
This function generates the id used to name the directory to
store the squashed layer content in the archive.
This mimics what Docker does here: https://github.com/docker/docker/blob/v1.10.0-rc1/image/v1/imagev1.go#L42
To make it simpler we do reuse old image metadata and
modify it to what it should look which means to be exact
as https://github.com/docker/docker/blob/v1.10.0-rc1/image/v1/imagev1.go#L64
"""
# Using OrderedDict, because order of JSON elements is important
v1_metadata = OrderedDict(self.old_image_config)
# Update image creation date
v1_metadata['created'] = self.date
# Remove unnecessary elements
# Do not fail if key is not found
for key in 'history', 'rootfs', 'container':
v1_metadata.pop(key, None)
# Docker internally changes the order of keys between
# exported metadata (why oh why?!). We need to add 'os'
# element after 'layer_id'
operating_system = v1_metadata.pop('os', None)
# The 'layer_id' element is the chain_id of the
# squashed layer
v1_metadata['layer_id'] = "sha256:%s" % self.chain_ids[-1]
# Add back 'os' element
if operating_system:
v1_metadata['os'] = operating_system
# The 'parent' element is the name of the directory (inside the
# exported tar archive) of the last layer that we move
# (layer below squashed layer)
if self.layer_paths_to_move:
if self.layer_paths_to_squash:
parent = self.layer_paths_to_move[-1]
else:
parent = self.layer_paths_to_move[0]
v1_metadata['parent'] = "sha256:%s" % parent
# The 'Image' element is the id of the layer from which we squash
if self.squash_id:
# Update image id, should be one layer below squashed layer
v1_metadata['config']['Image'] = self.squash_id
else:
v1_metadata['config']['Image'] = ""
# Get the sha256sum of the JSON exported metadata,
# we do not care about the metadata anymore
sha = self._dump_json(v1_metadata)[1]
return sha | python | def _generate_squashed_layer_path_id(self):
"""
This function generates the id used to name the directory to
store the squashed layer content in the archive.
This mimics what Docker does here: https://github.com/docker/docker/blob/v1.10.0-rc1/image/v1/imagev1.go#L42
To make it simpler we do reuse old image metadata and
modify it to what it should look which means to be exact
as https://github.com/docker/docker/blob/v1.10.0-rc1/image/v1/imagev1.go#L64
"""
# Using OrderedDict, because order of JSON elements is important
v1_metadata = OrderedDict(self.old_image_config)
# Update image creation date
v1_metadata['created'] = self.date
# Remove unnecessary elements
# Do not fail if key is not found
for key in 'history', 'rootfs', 'container':
v1_metadata.pop(key, None)
# Docker internally changes the order of keys between
# exported metadata (why oh why?!). We need to add 'os'
# element after 'layer_id'
operating_system = v1_metadata.pop('os', None)
# The 'layer_id' element is the chain_id of the
# squashed layer
v1_metadata['layer_id'] = "sha256:%s" % self.chain_ids[-1]
# Add back 'os' element
if operating_system:
v1_metadata['os'] = operating_system
# The 'parent' element is the name of the directory (inside the
# exported tar archive) of the last layer that we move
# (layer below squashed layer)
if self.layer_paths_to_move:
if self.layer_paths_to_squash:
parent = self.layer_paths_to_move[-1]
else:
parent = self.layer_paths_to_move[0]
v1_metadata['parent'] = "sha256:%s" % parent
# The 'Image' element is the id of the layer from which we squash
if self.squash_id:
# Update image id, should be one layer below squashed layer
v1_metadata['config']['Image'] = self.squash_id
else:
v1_metadata['config']['Image'] = ""
# Get the sha256sum of the JSON exported metadata,
# we do not care about the metadata anymore
sha = self._dump_json(v1_metadata)[1]
return sha | [
"def",
"_generate_squashed_layer_path_id",
"(",
"self",
")",
":",
"# Using OrderedDict, because order of JSON elements is important",
"v1_metadata",
"=",
"OrderedDict",
"(",
"self",
".",
"old_image_config",
")",
"# Update image creation date",
"v1_metadata",
"[",
"'created'",
"]",
"=",
"self",
".",
"date",
"# Remove unnecessary elements",
"# Do not fail if key is not found",
"for",
"key",
"in",
"'history'",
",",
"'rootfs'",
",",
"'container'",
":",
"v1_metadata",
".",
"pop",
"(",
"key",
",",
"None",
")",
"# Docker internally changes the order of keys between",
"# exported metadata (why oh why?!). We need to add 'os'",
"# element after 'layer_id'",
"operating_system",
"=",
"v1_metadata",
".",
"pop",
"(",
"'os'",
",",
"None",
")",
"# The 'layer_id' element is the chain_id of the",
"# squashed layer",
"v1_metadata",
"[",
"'layer_id'",
"]",
"=",
"\"sha256:%s\"",
"%",
"self",
".",
"chain_ids",
"[",
"-",
"1",
"]",
"# Add back 'os' element",
"if",
"operating_system",
":",
"v1_metadata",
"[",
"'os'",
"]",
"=",
"operating_system",
"# The 'parent' element is the name of the directory (inside the",
"# exported tar archive) of the last layer that we move",
"# (layer below squashed layer)",
"if",
"self",
".",
"layer_paths_to_move",
":",
"if",
"self",
".",
"layer_paths_to_squash",
":",
"parent",
"=",
"self",
".",
"layer_paths_to_move",
"[",
"-",
"1",
"]",
"else",
":",
"parent",
"=",
"self",
".",
"layer_paths_to_move",
"[",
"0",
"]",
"v1_metadata",
"[",
"'parent'",
"]",
"=",
"\"sha256:%s\"",
"%",
"parent",
"# The 'Image' element is the id of the layer from which we squash",
"if",
"self",
".",
"squash_id",
":",
"# Update image id, should be one layer below squashed layer",
"v1_metadata",
"[",
"'config'",
"]",
"[",
"'Image'",
"]",
"=",
"self",
".",
"squash_id",
"else",
":",
"v1_metadata",
"[",
"'config'",
"]",
"[",
"'Image'",
"]",
"=",
"\"\"",
"# Get the sha256sum of the JSON exported metadata,",
"# we do not care about the metadata anymore",
"sha",
"=",
"self",
".",
"_dump_json",
"(",
"v1_metadata",
")",
"[",
"1",
"]",
"return",
"sha"
] | This function generates the id used to name the directory to
store the squashed layer content in the archive.
This mimics what Docker does here: https://github.com/docker/docker/blob/v1.10.0-rc1/image/v1/imagev1.go#L42
To make it simpler we do reuse old image metadata and
modify it to what it should look which means to be exact
as https://github.com/docker/docker/blob/v1.10.0-rc1/image/v1/imagev1.go#L64 | [
"This",
"function",
"generates",
"the",
"id",
"used",
"to",
"name",
"the",
"directory",
"to",
"store",
"the",
"squashed",
"layer",
"content",
"in",
"the",
"archive",
"."
] | 89e0297942be268791aff2098b7ebfa50d82f8e8 | https://github.com/goldmann/docker-squash/blob/89e0297942be268791aff2098b7ebfa50d82f8e8/docker_squash/v2_image.py#L215-L273 | train |
django-dbbackup/django-dbbackup | dbbackup/management/commands/_base.py | BaseDbBackupCommand.write_local_file | def write_local_file(self, outputfile, path):
"""Write file to the desired path."""
self.logger.info("Writing file to %s", path)
outputfile.seek(0)
with open(path, 'wb') as fd:
copyfileobj(outputfile, fd) | python | def write_local_file(self, outputfile, path):
"""Write file to the desired path."""
self.logger.info("Writing file to %s", path)
outputfile.seek(0)
with open(path, 'wb') as fd:
copyfileobj(outputfile, fd) | [
"def",
"write_local_file",
"(",
"self",
",",
"outputfile",
",",
"path",
")",
":",
"self",
".",
"logger",
".",
"info",
"(",
"\"Writing file to %s\"",
",",
"path",
")",
"outputfile",
".",
"seek",
"(",
"0",
")",
"with",
"open",
"(",
"path",
",",
"'wb'",
")",
"as",
"fd",
":",
"copyfileobj",
"(",
"outputfile",
",",
"fd",
")"
] | Write file to the desired path. | [
"Write",
"file",
"to",
"the",
"desired",
"path",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/_base.py#L94-L99 | train |
django-dbbackup/django-dbbackup | dbbackup/management/commands/_base.py | BaseDbBackupCommand._cleanup_old_backups | def _cleanup_old_backups(self, database=None, servername=None):
"""
Cleanup old backups, keeping the number of backups specified by
DBBACKUP_CLEANUP_KEEP and any backups that occur on first of the month.
"""
self.storage.clean_old_backups(encrypted=self.encrypt,
compressed=self.compress,
content_type=self.content_type,
database=database,
servername=servername) | python | def _cleanup_old_backups(self, database=None, servername=None):
"""
Cleanup old backups, keeping the number of backups specified by
DBBACKUP_CLEANUP_KEEP and any backups that occur on first of the month.
"""
self.storage.clean_old_backups(encrypted=self.encrypt,
compressed=self.compress,
content_type=self.content_type,
database=database,
servername=servername) | [
"def",
"_cleanup_old_backups",
"(",
"self",
",",
"database",
"=",
"None",
",",
"servername",
"=",
"None",
")",
":",
"self",
".",
"storage",
".",
"clean_old_backups",
"(",
"encrypted",
"=",
"self",
".",
"encrypt",
",",
"compressed",
"=",
"self",
".",
"compress",
",",
"content_type",
"=",
"self",
".",
"content_type",
",",
"database",
"=",
"database",
",",
"servername",
"=",
"servername",
")"
] | Cleanup old backups, keeping the number of backups specified by
DBBACKUP_CLEANUP_KEEP and any backups that occur on first of the month. | [
"Cleanup",
"old",
"backups",
"keeping",
"the",
"number",
"of",
"backups",
"specified",
"by",
"DBBACKUP_CLEANUP_KEEP",
"and",
"any",
"backups",
"that",
"occur",
"on",
"first",
"of",
"the",
"month",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/_base.py#L123-L132 | train |
django-dbbackup/django-dbbackup | dbbackup/management/commands/dbbackup.py | Command._save_new_backup | def _save_new_backup(self, database):
"""
Save a new backup file.
"""
self.logger.info("Backing Up Database: %s", database['NAME'])
# Get backup and name
filename = self.connector.generate_filename(self.servername)
outputfile = self.connector.create_dump()
# Apply trans
if self.compress:
compressed_file, filename = utils.compress_file(outputfile, filename)
outputfile = compressed_file
if self.encrypt:
encrypted_file, filename = utils.encrypt_file(outputfile, filename)
outputfile = encrypted_file
# Set file name
filename = self.filename if self.filename else filename
self.logger.debug("Backup size: %s", utils.handle_size(outputfile))
# Store backup
outputfile.seek(0)
if self.path is None:
self.write_to_storage(outputfile, filename)
else:
self.write_local_file(outputfile, self.path) | python | def _save_new_backup(self, database):
"""
Save a new backup file.
"""
self.logger.info("Backing Up Database: %s", database['NAME'])
# Get backup and name
filename = self.connector.generate_filename(self.servername)
outputfile = self.connector.create_dump()
# Apply trans
if self.compress:
compressed_file, filename = utils.compress_file(outputfile, filename)
outputfile = compressed_file
if self.encrypt:
encrypted_file, filename = utils.encrypt_file(outputfile, filename)
outputfile = encrypted_file
# Set file name
filename = self.filename if self.filename else filename
self.logger.debug("Backup size: %s", utils.handle_size(outputfile))
# Store backup
outputfile.seek(0)
if self.path is None:
self.write_to_storage(outputfile, filename)
else:
self.write_local_file(outputfile, self.path) | [
"def",
"_save_new_backup",
"(",
"self",
",",
"database",
")",
":",
"self",
".",
"logger",
".",
"info",
"(",
"\"Backing Up Database: %s\"",
",",
"database",
"[",
"'NAME'",
"]",
")",
"# Get backup and name",
"filename",
"=",
"self",
".",
"connector",
".",
"generate_filename",
"(",
"self",
".",
"servername",
")",
"outputfile",
"=",
"self",
".",
"connector",
".",
"create_dump",
"(",
")",
"# Apply trans",
"if",
"self",
".",
"compress",
":",
"compressed_file",
",",
"filename",
"=",
"utils",
".",
"compress_file",
"(",
"outputfile",
",",
"filename",
")",
"outputfile",
"=",
"compressed_file",
"if",
"self",
".",
"encrypt",
":",
"encrypted_file",
",",
"filename",
"=",
"utils",
".",
"encrypt_file",
"(",
"outputfile",
",",
"filename",
")",
"outputfile",
"=",
"encrypted_file",
"# Set file name",
"filename",
"=",
"self",
".",
"filename",
"if",
"self",
".",
"filename",
"else",
"filename",
"self",
".",
"logger",
".",
"debug",
"(",
"\"Backup size: %s\"",
",",
"utils",
".",
"handle_size",
"(",
"outputfile",
")",
")",
"# Store backup",
"outputfile",
".",
"seek",
"(",
"0",
")",
"if",
"self",
".",
"path",
"is",
"None",
":",
"self",
".",
"write_to_storage",
"(",
"outputfile",
",",
"filename",
")",
"else",
":",
"self",
".",
"write_local_file",
"(",
"outputfile",
",",
"self",
".",
"path",
")"
] | Save a new backup file. | [
"Save",
"a",
"new",
"backup",
"file",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/dbbackup.py#L67-L90 | train |
django-dbbackup/django-dbbackup | dbbackup/management/commands/mediabackup.py | Command._explore_storage | def _explore_storage(self):
"""Generator of all files contained in media storage."""
path = ''
dirs = [path]
while dirs:
path = dirs.pop()
subdirs, files = self.media_storage.listdir(path)
for media_filename in files:
yield os.path.join(path, media_filename)
dirs.extend([os.path.join(path, subdir) for subdir in subdirs]) | python | def _explore_storage(self):
"""Generator of all files contained in media storage."""
path = ''
dirs = [path]
while dirs:
path = dirs.pop()
subdirs, files = self.media_storage.listdir(path)
for media_filename in files:
yield os.path.join(path, media_filename)
dirs.extend([os.path.join(path, subdir) for subdir in subdirs]) | [
"def",
"_explore_storage",
"(",
"self",
")",
":",
"path",
"=",
"''",
"dirs",
"=",
"[",
"path",
"]",
"while",
"dirs",
":",
"path",
"=",
"dirs",
".",
"pop",
"(",
")",
"subdirs",
",",
"files",
"=",
"self",
".",
"media_storage",
".",
"listdir",
"(",
"path",
")",
"for",
"media_filename",
"in",
"files",
":",
"yield",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"media_filename",
")",
"dirs",
".",
"extend",
"(",
"[",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"subdir",
")",
"for",
"subdir",
"in",
"subdirs",
"]",
")"
] | Generator of all files contained in media storage. | [
"Generator",
"of",
"all",
"files",
"contained",
"in",
"media",
"storage",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/mediabackup.py#L59-L68 | train |
django-dbbackup/django-dbbackup | dbbackup/management/commands/mediabackup.py | Command._create_tar | def _create_tar(self, name):
"""Create TAR file."""
fileobj = utils.create_spooled_temporary_file()
mode = 'w:gz' if self.compress else 'w'
tar_file = tarfile.open(name=name, fileobj=fileobj, mode=mode)
for media_filename in self._explore_storage():
tarinfo = tarfile.TarInfo(media_filename)
media_file = self.media_storage.open(media_filename)
tarinfo.size = len(media_file)
tar_file.addfile(tarinfo, media_file)
# Close the TAR for writing
tar_file.close()
return fileobj | python | def _create_tar(self, name):
"""Create TAR file."""
fileobj = utils.create_spooled_temporary_file()
mode = 'w:gz' if self.compress else 'w'
tar_file = tarfile.open(name=name, fileobj=fileobj, mode=mode)
for media_filename in self._explore_storage():
tarinfo = tarfile.TarInfo(media_filename)
media_file = self.media_storage.open(media_filename)
tarinfo.size = len(media_file)
tar_file.addfile(tarinfo, media_file)
# Close the TAR for writing
tar_file.close()
return fileobj | [
"def",
"_create_tar",
"(",
"self",
",",
"name",
")",
":",
"fileobj",
"=",
"utils",
".",
"create_spooled_temporary_file",
"(",
")",
"mode",
"=",
"'w:gz'",
"if",
"self",
".",
"compress",
"else",
"'w'",
"tar_file",
"=",
"tarfile",
".",
"open",
"(",
"name",
"=",
"name",
",",
"fileobj",
"=",
"fileobj",
",",
"mode",
"=",
"mode",
")",
"for",
"media_filename",
"in",
"self",
".",
"_explore_storage",
"(",
")",
":",
"tarinfo",
"=",
"tarfile",
".",
"TarInfo",
"(",
"media_filename",
")",
"media_file",
"=",
"self",
".",
"media_storage",
".",
"open",
"(",
"media_filename",
")",
"tarinfo",
".",
"size",
"=",
"len",
"(",
"media_file",
")",
"tar_file",
".",
"addfile",
"(",
"tarinfo",
",",
"media_file",
")",
"# Close the TAR for writing",
"tar_file",
".",
"close",
"(",
")",
"return",
"fileobj"
] | Create TAR file. | [
"Create",
"TAR",
"file",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/mediabackup.py#L70-L82 | train |
django-dbbackup/django-dbbackup | dbbackup/management/commands/mediabackup.py | Command.backup_mediafiles | def backup_mediafiles(self):
"""
Create backup file and write it to storage.
"""
# Create file name
extension = "tar%s" % ('.gz' if self.compress else '')
filename = utils.filename_generate(extension,
servername=self.servername,
content_type=self.content_type)
tarball = self._create_tar(filename)
# Apply trans
if self.encrypt:
encrypted_file = utils.encrypt_file(tarball, filename)
tarball, filename = encrypted_file
self.logger.debug("Backup size: %s", utils.handle_size(tarball))
# Store backup
tarball.seek(0)
if self.path is None:
self.write_to_storage(tarball, filename)
else:
self.write_local_file(tarball, self.path) | python | def backup_mediafiles(self):
"""
Create backup file and write it to storage.
"""
# Create file name
extension = "tar%s" % ('.gz' if self.compress else '')
filename = utils.filename_generate(extension,
servername=self.servername,
content_type=self.content_type)
tarball = self._create_tar(filename)
# Apply trans
if self.encrypt:
encrypted_file = utils.encrypt_file(tarball, filename)
tarball, filename = encrypted_file
self.logger.debug("Backup size: %s", utils.handle_size(tarball))
# Store backup
tarball.seek(0)
if self.path is None:
self.write_to_storage(tarball, filename)
else:
self.write_local_file(tarball, self.path) | [
"def",
"backup_mediafiles",
"(",
"self",
")",
":",
"# Create file name",
"extension",
"=",
"\"tar%s\"",
"%",
"(",
"'.gz'",
"if",
"self",
".",
"compress",
"else",
"''",
")",
"filename",
"=",
"utils",
".",
"filename_generate",
"(",
"extension",
",",
"servername",
"=",
"self",
".",
"servername",
",",
"content_type",
"=",
"self",
".",
"content_type",
")",
"tarball",
"=",
"self",
".",
"_create_tar",
"(",
"filename",
")",
"# Apply trans",
"if",
"self",
".",
"encrypt",
":",
"encrypted_file",
"=",
"utils",
".",
"encrypt_file",
"(",
"tarball",
",",
"filename",
")",
"tarball",
",",
"filename",
"=",
"encrypted_file",
"self",
".",
"logger",
".",
"debug",
"(",
"\"Backup size: %s\"",
",",
"utils",
".",
"handle_size",
"(",
"tarball",
")",
")",
"# Store backup",
"tarball",
".",
"seek",
"(",
"0",
")",
"if",
"self",
".",
"path",
"is",
"None",
":",
"self",
".",
"write_to_storage",
"(",
"tarball",
",",
"filename",
")",
"else",
":",
"self",
".",
"write_local_file",
"(",
"tarball",
",",
"self",
".",
"path",
")"
] | Create backup file and write it to storage. | [
"Create",
"backup",
"file",
"and",
"write",
"it",
"to",
"storage",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/mediabackup.py#L84-L105 | train |
django-dbbackup/django-dbbackup | dbbackup/utils.py | bytes_to_str | def bytes_to_str(byteVal, decimals=1):
"""
Convert bytes to a human readable string.
:param byteVal: Value to convert in bytes
:type byteVal: int or float
:param decimal: Number of decimal to display
:type decimal: int
:returns: Number of byte with the best unit of measure
:rtype: str
"""
for unit, byte in BYTES:
if (byteVal >= byte):
if decimals == 0:
return '%s %s' % (int(round(byteVal / byte, 0)), unit)
return '%s %s' % (round(byteVal / byte, decimals), unit)
return '%s B' % byteVal | python | def bytes_to_str(byteVal, decimals=1):
"""
Convert bytes to a human readable string.
:param byteVal: Value to convert in bytes
:type byteVal: int or float
:param decimal: Number of decimal to display
:type decimal: int
:returns: Number of byte with the best unit of measure
:rtype: str
"""
for unit, byte in BYTES:
if (byteVal >= byte):
if decimals == 0:
return '%s %s' % (int(round(byteVal / byte, 0)), unit)
return '%s %s' % (round(byteVal / byte, decimals), unit)
return '%s B' % byteVal | [
"def",
"bytes_to_str",
"(",
"byteVal",
",",
"decimals",
"=",
"1",
")",
":",
"for",
"unit",
",",
"byte",
"in",
"BYTES",
":",
"if",
"(",
"byteVal",
">=",
"byte",
")",
":",
"if",
"decimals",
"==",
"0",
":",
"return",
"'%s %s'",
"%",
"(",
"int",
"(",
"round",
"(",
"byteVal",
"/",
"byte",
",",
"0",
")",
")",
",",
"unit",
")",
"return",
"'%s %s'",
"%",
"(",
"round",
"(",
"byteVal",
"/",
"byte",
",",
"decimals",
")",
",",
"unit",
")",
"return",
"'%s B'",
"%",
"byteVal"
] | Convert bytes to a human readable string.
:param byteVal: Value to convert in bytes
:type byteVal: int or float
:param decimal: Number of decimal to display
:type decimal: int
:returns: Number of byte with the best unit of measure
:rtype: str | [
"Convert",
"bytes",
"to",
"a",
"human",
"readable",
"string",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L58-L76 | train |
django-dbbackup/django-dbbackup | dbbackup/utils.py | mail_admins | def mail_admins(subject, message, fail_silently=False, connection=None,
html_message=None):
"""Sends a message to the admins, as defined by the DBBACKUP_ADMINS setting."""
if not settings.ADMINS:
return
mail = EmailMultiAlternatives('%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject),
message, settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS],
connection=connection)
if html_message:
mail.attach_alternative(html_message, 'text/html')
mail.send(fail_silently=fail_silently) | python | def mail_admins(subject, message, fail_silently=False, connection=None,
html_message=None):
"""Sends a message to the admins, as defined by the DBBACKUP_ADMINS setting."""
if not settings.ADMINS:
return
mail = EmailMultiAlternatives('%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject),
message, settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS],
connection=connection)
if html_message:
mail.attach_alternative(html_message, 'text/html')
mail.send(fail_silently=fail_silently) | [
"def",
"mail_admins",
"(",
"subject",
",",
"message",
",",
"fail_silently",
"=",
"False",
",",
"connection",
"=",
"None",
",",
"html_message",
"=",
"None",
")",
":",
"if",
"not",
"settings",
".",
"ADMINS",
":",
"return",
"mail",
"=",
"EmailMultiAlternatives",
"(",
"'%s%s'",
"%",
"(",
"settings",
".",
"EMAIL_SUBJECT_PREFIX",
",",
"subject",
")",
",",
"message",
",",
"settings",
".",
"SERVER_EMAIL",
",",
"[",
"a",
"[",
"1",
"]",
"for",
"a",
"in",
"settings",
".",
"ADMINS",
"]",
",",
"connection",
"=",
"connection",
")",
"if",
"html_message",
":",
"mail",
".",
"attach_alternative",
"(",
"html_message",
",",
"'text/html'",
")",
"mail",
".",
"send",
"(",
"fail_silently",
"=",
"fail_silently",
")"
] | Sends a message to the admins, as defined by the DBBACKUP_ADMINS setting. | [
"Sends",
"a",
"message",
"to",
"the",
"admins",
"as",
"defined",
"by",
"the",
"DBBACKUP_ADMINS",
"setting",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L93-L103 | train |
django-dbbackup/django-dbbackup | dbbackup/utils.py | create_spooled_temporary_file | def create_spooled_temporary_file(filepath=None, fileobj=None):
"""
Create a spooled temporary file. if ``filepath`` or ``fileobj`` is
defined its content will be copied into temporary file.
:param filepath: Path of input file
:type filepath: str
:param fileobj: Input file object
:type fileobj: file
:returns: Spooled temporary file
:rtype: :class:`tempfile.SpooledTemporaryFile`
"""
spooled_file = tempfile.SpooledTemporaryFile(
max_size=settings.TMP_FILE_MAX_SIZE,
dir=settings.TMP_DIR)
if filepath:
fileobj = open(filepath, 'r+b')
if fileobj is not None:
fileobj.seek(0)
copyfileobj(fileobj, spooled_file, settings.TMP_FILE_READ_SIZE)
return spooled_file | python | def create_spooled_temporary_file(filepath=None, fileobj=None):
"""
Create a spooled temporary file. if ``filepath`` or ``fileobj`` is
defined its content will be copied into temporary file.
:param filepath: Path of input file
:type filepath: str
:param fileobj: Input file object
:type fileobj: file
:returns: Spooled temporary file
:rtype: :class:`tempfile.SpooledTemporaryFile`
"""
spooled_file = tempfile.SpooledTemporaryFile(
max_size=settings.TMP_FILE_MAX_SIZE,
dir=settings.TMP_DIR)
if filepath:
fileobj = open(filepath, 'r+b')
if fileobj is not None:
fileobj.seek(0)
copyfileobj(fileobj, spooled_file, settings.TMP_FILE_READ_SIZE)
return spooled_file | [
"def",
"create_spooled_temporary_file",
"(",
"filepath",
"=",
"None",
",",
"fileobj",
"=",
"None",
")",
":",
"spooled_file",
"=",
"tempfile",
".",
"SpooledTemporaryFile",
"(",
"max_size",
"=",
"settings",
".",
"TMP_FILE_MAX_SIZE",
",",
"dir",
"=",
"settings",
".",
"TMP_DIR",
")",
"if",
"filepath",
":",
"fileobj",
"=",
"open",
"(",
"filepath",
",",
"'r+b'",
")",
"if",
"fileobj",
"is",
"not",
"None",
":",
"fileobj",
".",
"seek",
"(",
"0",
")",
"copyfileobj",
"(",
"fileobj",
",",
"spooled_file",
",",
"settings",
".",
"TMP_FILE_READ_SIZE",
")",
"return",
"spooled_file"
] | Create a spooled temporary file. if ``filepath`` or ``fileobj`` is
defined its content will be copied into temporary file.
:param filepath: Path of input file
:type filepath: str
:param fileobj: Input file object
:type fileobj: file
:returns: Spooled temporary file
:rtype: :class:`tempfile.SpooledTemporaryFile` | [
"Create",
"a",
"spooled",
"temporary",
"file",
".",
"if",
"filepath",
"or",
"fileobj",
"is",
"defined",
"its",
"content",
"will",
"be",
"copied",
"into",
"temporary",
"file",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L129-L151 | train |
django-dbbackup/django-dbbackup | dbbackup/utils.py | compress_file | def compress_file(inputfile, filename):
"""
Compress input file using gzip and change its name.
:param inputfile: File to compress
:type inputfile: ``file`` like object
:param filename: File's name
:type filename: ``str``
:returns: Tuple with compressed file and new file's name
:rtype: :class:`tempfile.SpooledTemporaryFile`, ``str``
"""
outputfile = create_spooled_temporary_file()
new_filename = filename + '.gz'
zipfile = gzip.GzipFile(filename=filename, fileobj=outputfile, mode="wb")
try:
inputfile.seek(0)
copyfileobj(inputfile, zipfile, settings.TMP_FILE_READ_SIZE)
finally:
zipfile.close()
return outputfile, new_filename | python | def compress_file(inputfile, filename):
"""
Compress input file using gzip and change its name.
:param inputfile: File to compress
:type inputfile: ``file`` like object
:param filename: File's name
:type filename: ``str``
:returns: Tuple with compressed file and new file's name
:rtype: :class:`tempfile.SpooledTemporaryFile`, ``str``
"""
outputfile = create_spooled_temporary_file()
new_filename = filename + '.gz'
zipfile = gzip.GzipFile(filename=filename, fileobj=outputfile, mode="wb")
try:
inputfile.seek(0)
copyfileobj(inputfile, zipfile, settings.TMP_FILE_READ_SIZE)
finally:
zipfile.close()
return outputfile, new_filename | [
"def",
"compress_file",
"(",
"inputfile",
",",
"filename",
")",
":",
"outputfile",
"=",
"create_spooled_temporary_file",
"(",
")",
"new_filename",
"=",
"filename",
"+",
"'.gz'",
"zipfile",
"=",
"gzip",
".",
"GzipFile",
"(",
"filename",
"=",
"filename",
",",
"fileobj",
"=",
"outputfile",
",",
"mode",
"=",
"\"wb\"",
")",
"try",
":",
"inputfile",
".",
"seek",
"(",
"0",
")",
"copyfileobj",
"(",
"inputfile",
",",
"zipfile",
",",
"settings",
".",
"TMP_FILE_READ_SIZE",
")",
"finally",
":",
"zipfile",
".",
"close",
"(",
")",
"return",
"outputfile",
",",
"new_filename"
] | Compress input file using gzip and change its name.
:param inputfile: File to compress
:type inputfile: ``file`` like object
:param filename: File's name
:type filename: ``str``
:returns: Tuple with compressed file and new file's name
:rtype: :class:`tempfile.SpooledTemporaryFile`, ``str`` | [
"Compress",
"input",
"file",
"using",
"gzip",
"and",
"change",
"its",
"name",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L234-L255 | train |
django-dbbackup/django-dbbackup | dbbackup/utils.py | uncompress_file | def uncompress_file(inputfile, filename):
"""
Uncompress this file using gzip and change its name.
:param inputfile: File to compress
:type inputfile: ``file`` like object
:param filename: File's name
:type filename: ``str``
:returns: Tuple with file and new file's name
:rtype: :class:`tempfile.SpooledTemporaryFile`, ``str``
"""
zipfile = gzip.GzipFile(fileobj=inputfile, mode="rb")
try:
outputfile = create_spooled_temporary_file(fileobj=zipfile)
finally:
zipfile.close()
new_basename = os.path.basename(filename).replace('.gz', '')
return outputfile, new_basename | python | def uncompress_file(inputfile, filename):
"""
Uncompress this file using gzip and change its name.
:param inputfile: File to compress
:type inputfile: ``file`` like object
:param filename: File's name
:type filename: ``str``
:returns: Tuple with file and new file's name
:rtype: :class:`tempfile.SpooledTemporaryFile`, ``str``
"""
zipfile = gzip.GzipFile(fileobj=inputfile, mode="rb")
try:
outputfile = create_spooled_temporary_file(fileobj=zipfile)
finally:
zipfile.close()
new_basename = os.path.basename(filename).replace('.gz', '')
return outputfile, new_basename | [
"def",
"uncompress_file",
"(",
"inputfile",
",",
"filename",
")",
":",
"zipfile",
"=",
"gzip",
".",
"GzipFile",
"(",
"fileobj",
"=",
"inputfile",
",",
"mode",
"=",
"\"rb\"",
")",
"try",
":",
"outputfile",
"=",
"create_spooled_temporary_file",
"(",
"fileobj",
"=",
"zipfile",
")",
"finally",
":",
"zipfile",
".",
"close",
"(",
")",
"new_basename",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"filename",
")",
".",
"replace",
"(",
"'.gz'",
",",
"''",
")",
"return",
"outputfile",
",",
"new_basename"
] | Uncompress this file using gzip and change its name.
:param inputfile: File to compress
:type inputfile: ``file`` like object
:param filename: File's name
:type filename: ``str``
:returns: Tuple with file and new file's name
:rtype: :class:`tempfile.SpooledTemporaryFile`, ``str`` | [
"Uncompress",
"this",
"file",
"using",
"gzip",
"and",
"change",
"its",
"name",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L258-L277 | train |
django-dbbackup/django-dbbackup | dbbackup/utils.py | timestamp | def timestamp(value):
"""
Return the timestamp of a datetime.datetime object.
:param value: a datetime object
:type value: datetime.datetime
:return: the timestamp
:rtype: str
"""
value = value if timezone.is_naive(value) else timezone.localtime(value)
return value.strftime(settings.DATE_FORMAT) | python | def timestamp(value):
"""
Return the timestamp of a datetime.datetime object.
:param value: a datetime object
:type value: datetime.datetime
:return: the timestamp
:rtype: str
"""
value = value if timezone.is_naive(value) else timezone.localtime(value)
return value.strftime(settings.DATE_FORMAT) | [
"def",
"timestamp",
"(",
"value",
")",
":",
"value",
"=",
"value",
"if",
"timezone",
".",
"is_naive",
"(",
"value",
")",
"else",
"timezone",
".",
"localtime",
"(",
"value",
")",
"return",
"value",
".",
"strftime",
"(",
"settings",
".",
"DATE_FORMAT",
")"
] | Return the timestamp of a datetime.datetime object.
:param value: a datetime object
:type value: datetime.datetime
:return: the timestamp
:rtype: str | [
"Return",
"the",
"timestamp",
"of",
"a",
"datetime",
".",
"datetime",
"object",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L280-L291 | train |
django-dbbackup/django-dbbackup | dbbackup/utils.py | datefmt_to_regex | def datefmt_to_regex(datefmt):
"""
Convert a strftime format string to a regex.
:param datefmt: strftime format string
:type datefmt: ``str``
:returns: Equivalent regex
:rtype: ``re.compite``
"""
new_string = datefmt
for pat, reg in PATTERN_MATCHNG:
new_string = new_string.replace(pat, reg)
return re.compile(r'(%s)' % new_string) | python | def datefmt_to_regex(datefmt):
"""
Convert a strftime format string to a regex.
:param datefmt: strftime format string
:type datefmt: ``str``
:returns: Equivalent regex
:rtype: ``re.compite``
"""
new_string = datefmt
for pat, reg in PATTERN_MATCHNG:
new_string = new_string.replace(pat, reg)
return re.compile(r'(%s)' % new_string) | [
"def",
"datefmt_to_regex",
"(",
"datefmt",
")",
":",
"new_string",
"=",
"datefmt",
"for",
"pat",
",",
"reg",
"in",
"PATTERN_MATCHNG",
":",
"new_string",
"=",
"new_string",
".",
"replace",
"(",
"pat",
",",
"reg",
")",
"return",
"re",
".",
"compile",
"(",
"r'(%s)'",
"%",
"new_string",
")"
] | Convert a strftime format string to a regex.
:param datefmt: strftime format string
:type datefmt: ``str``
:returns: Equivalent regex
:rtype: ``re.compite`` | [
"Convert",
"a",
"strftime",
"format",
"string",
"to",
"a",
"regex",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L327-L340 | train |
django-dbbackup/django-dbbackup | dbbackup/utils.py | filename_to_date | def filename_to_date(filename, datefmt=None):
"""
Return a datetime from a file name.
:param datefmt: strftime format string, ``settings.DATE_FORMAT`` is used
if is ``None``
:type datefmt: ``str`` or ``NoneType``
:returns: Date guessed or nothing if no date found
:rtype: ``datetime.datetime`` or ``NoneType``
"""
datefmt = datefmt or settings.DATE_FORMAT
datestring = filename_to_datestring(filename, datefmt)
if datestring is not None:
return datetime.strptime(datestring, datefmt) | python | def filename_to_date(filename, datefmt=None):
"""
Return a datetime from a file name.
:param datefmt: strftime format string, ``settings.DATE_FORMAT`` is used
if is ``None``
:type datefmt: ``str`` or ``NoneType``
:returns: Date guessed or nothing if no date found
:rtype: ``datetime.datetime`` or ``NoneType``
"""
datefmt = datefmt or settings.DATE_FORMAT
datestring = filename_to_datestring(filename, datefmt)
if datestring is not None:
return datetime.strptime(datestring, datefmt) | [
"def",
"filename_to_date",
"(",
"filename",
",",
"datefmt",
"=",
"None",
")",
":",
"datefmt",
"=",
"datefmt",
"or",
"settings",
".",
"DATE_FORMAT",
"datestring",
"=",
"filename_to_datestring",
"(",
"filename",
",",
"datefmt",
")",
"if",
"datestring",
"is",
"not",
"None",
":",
"return",
"datetime",
".",
"strptime",
"(",
"datestring",
",",
"datefmt",
")"
] | Return a datetime from a file name.
:param datefmt: strftime format string, ``settings.DATE_FORMAT`` is used
if is ``None``
:type datefmt: ``str`` or ``NoneType``
:returns: Date guessed or nothing if no date found
:rtype: ``datetime.datetime`` or ``NoneType`` | [
"Return",
"a",
"datetime",
"from",
"a",
"file",
"name",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L361-L375 | train |
django-dbbackup/django-dbbackup | dbbackup/utils.py | filename_generate | def filename_generate(extension, database_name='', servername=None, content_type='db', wildcard=None):
"""
Create a new backup filename.
:param extension: Extension of backup file
:type extension: ``str``
:param database_name: If it is database backup specify its name
:type database_name: ``str``
:param servername: Specify server name or by default ``settings.DBBACKUP_HOSTNAME``
:type servername: ``str``
:param content_type: Content type to backup, ``'media'`` or ``'db'``
:type content_type: ``str``
:param wildcard: Replace datetime with this wilecard regex
:type content_type: ``str``
:returns: Computed file name
:rtype: ``str`
"""
if content_type == 'db':
if '/' in database_name:
database_name = os.path.basename(database_name)
if '.' in database_name:
database_name = database_name.split('.')[0]
template = settings.FILENAME_TEMPLATE
elif content_type == 'media':
template = settings.MEDIA_FILENAME_TEMPLATE
else:
template = settings.FILENAME_TEMPLATE
params = {
'servername': servername or settings.HOSTNAME,
'datetime': wildcard or datetime.now().strftime(settings.DATE_FORMAT),
'databasename': database_name,
'extension': extension,
'content_type': content_type
}
if callable(template):
filename = template(**params)
else:
filename = template.format(**params)
filename = REG_FILENAME_CLEAN.sub('-', filename)
filename = filename[1:] if filename.startswith('-') else filename
return filename | python | def filename_generate(extension, database_name='', servername=None, content_type='db', wildcard=None):
"""
Create a new backup filename.
:param extension: Extension of backup file
:type extension: ``str``
:param database_name: If it is database backup specify its name
:type database_name: ``str``
:param servername: Specify server name or by default ``settings.DBBACKUP_HOSTNAME``
:type servername: ``str``
:param content_type: Content type to backup, ``'media'`` or ``'db'``
:type content_type: ``str``
:param wildcard: Replace datetime with this wilecard regex
:type content_type: ``str``
:returns: Computed file name
:rtype: ``str`
"""
if content_type == 'db':
if '/' in database_name:
database_name = os.path.basename(database_name)
if '.' in database_name:
database_name = database_name.split('.')[0]
template = settings.FILENAME_TEMPLATE
elif content_type == 'media':
template = settings.MEDIA_FILENAME_TEMPLATE
else:
template = settings.FILENAME_TEMPLATE
params = {
'servername': servername or settings.HOSTNAME,
'datetime': wildcard or datetime.now().strftime(settings.DATE_FORMAT),
'databasename': database_name,
'extension': extension,
'content_type': content_type
}
if callable(template):
filename = template(**params)
else:
filename = template.format(**params)
filename = REG_FILENAME_CLEAN.sub('-', filename)
filename = filename[1:] if filename.startswith('-') else filename
return filename | [
"def",
"filename_generate",
"(",
"extension",
",",
"database_name",
"=",
"''",
",",
"servername",
"=",
"None",
",",
"content_type",
"=",
"'db'",
",",
"wildcard",
"=",
"None",
")",
":",
"if",
"content_type",
"==",
"'db'",
":",
"if",
"'/'",
"in",
"database_name",
":",
"database_name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"database_name",
")",
"if",
"'.'",
"in",
"database_name",
":",
"database_name",
"=",
"database_name",
".",
"split",
"(",
"'.'",
")",
"[",
"0",
"]",
"template",
"=",
"settings",
".",
"FILENAME_TEMPLATE",
"elif",
"content_type",
"==",
"'media'",
":",
"template",
"=",
"settings",
".",
"MEDIA_FILENAME_TEMPLATE",
"else",
":",
"template",
"=",
"settings",
".",
"FILENAME_TEMPLATE",
"params",
"=",
"{",
"'servername'",
":",
"servername",
"or",
"settings",
".",
"HOSTNAME",
",",
"'datetime'",
":",
"wildcard",
"or",
"datetime",
".",
"now",
"(",
")",
".",
"strftime",
"(",
"settings",
".",
"DATE_FORMAT",
")",
",",
"'databasename'",
":",
"database_name",
",",
"'extension'",
":",
"extension",
",",
"'content_type'",
":",
"content_type",
"}",
"if",
"callable",
"(",
"template",
")",
":",
"filename",
"=",
"template",
"(",
"*",
"*",
"params",
")",
"else",
":",
"filename",
"=",
"template",
".",
"format",
"(",
"*",
"*",
"params",
")",
"filename",
"=",
"REG_FILENAME_CLEAN",
".",
"sub",
"(",
"'-'",
",",
"filename",
")",
"filename",
"=",
"filename",
"[",
"1",
":",
"]",
"if",
"filename",
".",
"startswith",
"(",
"'-'",
")",
"else",
"filename",
"return",
"filename"
] | Create a new backup filename.
:param extension: Extension of backup file
:type extension: ``str``
:param database_name: If it is database backup specify its name
:type database_name: ``str``
:param servername: Specify server name or by default ``settings.DBBACKUP_HOSTNAME``
:type servername: ``str``
:param content_type: Content type to backup, ``'media'`` or ``'db'``
:type content_type: ``str``
:param wildcard: Replace datetime with this wilecard regex
:type content_type: ``str``
:returns: Computed file name
:rtype: ``str` | [
"Create",
"a",
"new",
"backup",
"filename",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/utils.py#L378-L424 | train |
django-dbbackup/django-dbbackup | dbbackup/storage.py | get_storage | def get_storage(path=None, options=None):
"""
Get the specified storage configured with options.
:param path: Path in Python dot style to module containing the storage
class. If empty settings.DBBACKUP_STORAGE will be used.
:type path: ``str``
:param options: Parameters for configure the storage, if empty
settings.DBBACKUP_STORAGE_OPTIONS will be used.
:type options: ``dict``
:return: Storage configured
:rtype: :class:`.Storage`
"""
path = path or settings.STORAGE
options = options or settings.STORAGE_OPTIONS
if not path:
raise ImproperlyConfigured('You must specify a storage class using '
'DBBACKUP_STORAGE settings.')
return Storage(path, **options) | python | def get_storage(path=None, options=None):
"""
Get the specified storage configured with options.
:param path: Path in Python dot style to module containing the storage
class. If empty settings.DBBACKUP_STORAGE will be used.
:type path: ``str``
:param options: Parameters for configure the storage, if empty
settings.DBBACKUP_STORAGE_OPTIONS will be used.
:type options: ``dict``
:return: Storage configured
:rtype: :class:`.Storage`
"""
path = path or settings.STORAGE
options = options or settings.STORAGE_OPTIONS
if not path:
raise ImproperlyConfigured('You must specify a storage class using '
'DBBACKUP_STORAGE settings.')
return Storage(path, **options) | [
"def",
"get_storage",
"(",
"path",
"=",
"None",
",",
"options",
"=",
"None",
")",
":",
"path",
"=",
"path",
"or",
"settings",
".",
"STORAGE",
"options",
"=",
"options",
"or",
"settings",
".",
"STORAGE_OPTIONS",
"if",
"not",
"path",
":",
"raise",
"ImproperlyConfigured",
"(",
"'You must specify a storage class using '",
"'DBBACKUP_STORAGE settings.'",
")",
"return",
"Storage",
"(",
"path",
",",
"*",
"*",
"options",
")"
] | Get the specified storage configured with options.
:param path: Path in Python dot style to module containing the storage
class. If empty settings.DBBACKUP_STORAGE will be used.
:type path: ``str``
:param options: Parameters for configure the storage, if empty
settings.DBBACKUP_STORAGE_OPTIONS will be used.
:type options: ``dict``
:return: Storage configured
:rtype: :class:`.Storage` | [
"Get",
"the",
"specified",
"storage",
"configured",
"with",
"options",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/storage.py#L10-L30 | train |
django-dbbackup/django-dbbackup | dbbackup/storage.py | Storage.list_backups | def list_backups(self, encrypted=None, compressed=None, content_type=None,
database=None, servername=None):
"""
List stored files except given filter. If filter is None, it won't be
used. ``content_type`` must be ``'db'`` for database backups or
``'media'`` for media backups.
:param encrypted: Filter by encrypted or not
:type encrypted: ``bool`` or ``None``
:param compressed: Filter by compressed or not
:type compressed: ``bool`` or ``None``
:param content_type: Filter by media or database backup, must be
``'db'`` or ``'media'``
:type content_type: ``str`` or ``None``
:param database: Filter by source database's name
:type: ``str`` or ``None``
:param servername: Filter by source server's name
:type: ``str`` or ``None``
:returns: List of files
:rtype: ``list`` of ``str``
"""
if content_type not in ('db', 'media', None):
msg = "Bad content_type %s, must be 'db', 'media', or None" % (
content_type)
raise TypeError(msg)
# TODO: Make better filter for include only backups
files = [f for f in self.list_directory() if utils.filename_to_datestring(f)]
if encrypted is not None:
files = [f for f in files if ('.gpg' in f) == encrypted]
if compressed is not None:
files = [f for f in files if ('.gz' in f) == compressed]
if content_type == 'media':
files = [f for f in files if '.tar' in f]
elif content_type == 'db':
files = [f for f in files if '.tar' not in f]
if database:
files = [f for f in files if database in f]
if servername:
files = [f for f in files if servername in f]
return files | python | def list_backups(self, encrypted=None, compressed=None, content_type=None,
database=None, servername=None):
"""
List stored files except given filter. If filter is None, it won't be
used. ``content_type`` must be ``'db'`` for database backups or
``'media'`` for media backups.
:param encrypted: Filter by encrypted or not
:type encrypted: ``bool`` or ``None``
:param compressed: Filter by compressed or not
:type compressed: ``bool`` or ``None``
:param content_type: Filter by media or database backup, must be
``'db'`` or ``'media'``
:type content_type: ``str`` or ``None``
:param database: Filter by source database's name
:type: ``str`` or ``None``
:param servername: Filter by source server's name
:type: ``str`` or ``None``
:returns: List of files
:rtype: ``list`` of ``str``
"""
if content_type not in ('db', 'media', None):
msg = "Bad content_type %s, must be 'db', 'media', or None" % (
content_type)
raise TypeError(msg)
# TODO: Make better filter for include only backups
files = [f for f in self.list_directory() if utils.filename_to_datestring(f)]
if encrypted is not None:
files = [f for f in files if ('.gpg' in f) == encrypted]
if compressed is not None:
files = [f for f in files if ('.gz' in f) == compressed]
if content_type == 'media':
files = [f for f in files if '.tar' in f]
elif content_type == 'db':
files = [f for f in files if '.tar' not in f]
if database:
files = [f for f in files if database in f]
if servername:
files = [f for f in files if servername in f]
return files | [
"def",
"list_backups",
"(",
"self",
",",
"encrypted",
"=",
"None",
",",
"compressed",
"=",
"None",
",",
"content_type",
"=",
"None",
",",
"database",
"=",
"None",
",",
"servername",
"=",
"None",
")",
":",
"if",
"content_type",
"not",
"in",
"(",
"'db'",
",",
"'media'",
",",
"None",
")",
":",
"msg",
"=",
"\"Bad content_type %s, must be 'db', 'media', or None\"",
"%",
"(",
"content_type",
")",
"raise",
"TypeError",
"(",
"msg",
")",
"# TODO: Make better filter for include only backups",
"files",
"=",
"[",
"f",
"for",
"f",
"in",
"self",
".",
"list_directory",
"(",
")",
"if",
"utils",
".",
"filename_to_datestring",
"(",
"f",
")",
"]",
"if",
"encrypted",
"is",
"not",
"None",
":",
"files",
"=",
"[",
"f",
"for",
"f",
"in",
"files",
"if",
"(",
"'.gpg'",
"in",
"f",
")",
"==",
"encrypted",
"]",
"if",
"compressed",
"is",
"not",
"None",
":",
"files",
"=",
"[",
"f",
"for",
"f",
"in",
"files",
"if",
"(",
"'.gz'",
"in",
"f",
")",
"==",
"compressed",
"]",
"if",
"content_type",
"==",
"'media'",
":",
"files",
"=",
"[",
"f",
"for",
"f",
"in",
"files",
"if",
"'.tar'",
"in",
"f",
"]",
"elif",
"content_type",
"==",
"'db'",
":",
"files",
"=",
"[",
"f",
"for",
"f",
"in",
"files",
"if",
"'.tar'",
"not",
"in",
"f",
"]",
"if",
"database",
":",
"files",
"=",
"[",
"f",
"for",
"f",
"in",
"files",
"if",
"database",
"in",
"f",
"]",
"if",
"servername",
":",
"files",
"=",
"[",
"f",
"for",
"f",
"in",
"files",
"if",
"servername",
"in",
"f",
"]",
"return",
"files"
] | List stored files except given filter. If filter is None, it won't be
used. ``content_type`` must be ``'db'`` for database backups or
``'media'`` for media backups.
:param encrypted: Filter by encrypted or not
:type encrypted: ``bool`` or ``None``
:param compressed: Filter by compressed or not
:type compressed: ``bool`` or ``None``
:param content_type: Filter by media or database backup, must be
``'db'`` or ``'media'``
:type content_type: ``str`` or ``None``
:param database: Filter by source database's name
:type: ``str`` or ``None``
:param servername: Filter by source server's name
:type: ``str`` or ``None``
:returns: List of files
:rtype: ``list`` of ``str`` | [
"List",
"stored",
"files",
"except",
"given",
"filter",
".",
"If",
"filter",
"is",
"None",
"it",
"won",
"t",
"be",
"used",
".",
"content_type",
"must",
"be",
"db",
"for",
"database",
"backups",
"or",
"media",
"for",
"media",
"backups",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/storage.py#L91-L136 | train |
django-dbbackup/django-dbbackup | dbbackup/storage.py | Storage.get_older_backup | def get_older_backup(self, encrypted=None, compressed=None,
content_type=None, database=None, servername=None):
"""
Return the older backup's file name.
:param encrypted: Filter by encrypted or not
:type encrypted: ``bool`` or ``None``
:param compressed: Filter by compressed or not
:type compressed: ``bool`` or ``None``
:param content_type: Filter by media or database backup, must be
``'db'`` or ``'media'``
:type content_type: ``str`` or ``None``
:param database: Filter by source database's name
:type: ``str`` or ``None``
:param servername: Filter by source server's name
:type: ``str`` or ``None``
:returns: Older file
:rtype: ``str``
:raises: FileNotFound: If no backup file is found
"""
files = self.list_backups(encrypted=encrypted, compressed=compressed,
content_type=content_type, database=database,
servername=servername)
if not files:
raise FileNotFound("There's no backup file available.")
return min(files, key=utils.filename_to_date) | python | def get_older_backup(self, encrypted=None, compressed=None,
content_type=None, database=None, servername=None):
"""
Return the older backup's file name.
:param encrypted: Filter by encrypted or not
:type encrypted: ``bool`` or ``None``
:param compressed: Filter by compressed or not
:type compressed: ``bool`` or ``None``
:param content_type: Filter by media or database backup, must be
``'db'`` or ``'media'``
:type content_type: ``str`` or ``None``
:param database: Filter by source database's name
:type: ``str`` or ``None``
:param servername: Filter by source server's name
:type: ``str`` or ``None``
:returns: Older file
:rtype: ``str``
:raises: FileNotFound: If no backup file is found
"""
files = self.list_backups(encrypted=encrypted, compressed=compressed,
content_type=content_type, database=database,
servername=servername)
if not files:
raise FileNotFound("There's no backup file available.")
return min(files, key=utils.filename_to_date) | [
"def",
"get_older_backup",
"(",
"self",
",",
"encrypted",
"=",
"None",
",",
"compressed",
"=",
"None",
",",
"content_type",
"=",
"None",
",",
"database",
"=",
"None",
",",
"servername",
"=",
"None",
")",
":",
"files",
"=",
"self",
".",
"list_backups",
"(",
"encrypted",
"=",
"encrypted",
",",
"compressed",
"=",
"compressed",
",",
"content_type",
"=",
"content_type",
",",
"database",
"=",
"database",
",",
"servername",
"=",
"servername",
")",
"if",
"not",
"files",
":",
"raise",
"FileNotFound",
"(",
"\"There's no backup file available.\"",
")",
"return",
"min",
"(",
"files",
",",
"key",
"=",
"utils",
".",
"filename_to_date",
")"
] | Return the older backup's file name.
:param encrypted: Filter by encrypted or not
:type encrypted: ``bool`` or ``None``
:param compressed: Filter by compressed or not
:type compressed: ``bool`` or ``None``
:param content_type: Filter by media or database backup, must be
``'db'`` or ``'media'``
:type content_type: ``str`` or ``None``
:param database: Filter by source database's name
:type: ``str`` or ``None``
:param servername: Filter by source server's name
:type: ``str`` or ``None``
:returns: Older file
:rtype: ``str``
:raises: FileNotFound: If no backup file is found | [
"Return",
"the",
"older",
"backup",
"s",
"file",
"name",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/storage.py#L172-L204 | train |
django-dbbackup/django-dbbackup | dbbackup/storage.py | Storage.clean_old_backups | def clean_old_backups(self, encrypted=None, compressed=None,
content_type=None, database=None, servername=None,
keep_number=None):
"""
Delete olders backups and hold the number defined.
:param encrypted: Filter by encrypted or not
:type encrypted: ``bool`` or ``None``
:param compressed: Filter by compressed or not
:type compressed: ``bool`` or ``None``
:param content_type: Filter by media or database backup, must be
``'db'`` or ``'media'``
:type content_type: ``str`` or ``None``
:param database: Filter by source database's name
:type: ``str`` or ``None``
:param servername: Filter by source server's name
:type: ``str`` or ``None``
:param keep_number: Number of files to keep, other will be deleted
:type keep_number: ``int`` or ``None``
"""
if keep_number is None:
keep_number = settings.CLEANUP_KEEP if content_type == 'db' \
else settings.CLEANUP_KEEP_MEDIA
keep_filter = settings.CLEANUP_KEEP_FILTER
files = self.list_backups(encrypted=encrypted, compressed=compressed,
content_type=content_type, database=database,
servername=servername)
files = sorted(files, key=utils.filename_to_date, reverse=True)
files_to_delete = [fi for i, fi in enumerate(files) if i >= keep_number]
for filename in files_to_delete:
if keep_filter(filename):
continue
self.delete_file(filename) | python | def clean_old_backups(self, encrypted=None, compressed=None,
content_type=None, database=None, servername=None,
keep_number=None):
"""
Delete olders backups and hold the number defined.
:param encrypted: Filter by encrypted or not
:type encrypted: ``bool`` or ``None``
:param compressed: Filter by compressed or not
:type compressed: ``bool`` or ``None``
:param content_type: Filter by media or database backup, must be
``'db'`` or ``'media'``
:type content_type: ``str`` or ``None``
:param database: Filter by source database's name
:type: ``str`` or ``None``
:param servername: Filter by source server's name
:type: ``str`` or ``None``
:param keep_number: Number of files to keep, other will be deleted
:type keep_number: ``int`` or ``None``
"""
if keep_number is None:
keep_number = settings.CLEANUP_KEEP if content_type == 'db' \
else settings.CLEANUP_KEEP_MEDIA
keep_filter = settings.CLEANUP_KEEP_FILTER
files = self.list_backups(encrypted=encrypted, compressed=compressed,
content_type=content_type, database=database,
servername=servername)
files = sorted(files, key=utils.filename_to_date, reverse=True)
files_to_delete = [fi for i, fi in enumerate(files) if i >= keep_number]
for filename in files_to_delete:
if keep_filter(filename):
continue
self.delete_file(filename) | [
"def",
"clean_old_backups",
"(",
"self",
",",
"encrypted",
"=",
"None",
",",
"compressed",
"=",
"None",
",",
"content_type",
"=",
"None",
",",
"database",
"=",
"None",
",",
"servername",
"=",
"None",
",",
"keep_number",
"=",
"None",
")",
":",
"if",
"keep_number",
"is",
"None",
":",
"keep_number",
"=",
"settings",
".",
"CLEANUP_KEEP",
"if",
"content_type",
"==",
"'db'",
"else",
"settings",
".",
"CLEANUP_KEEP_MEDIA",
"keep_filter",
"=",
"settings",
".",
"CLEANUP_KEEP_FILTER",
"files",
"=",
"self",
".",
"list_backups",
"(",
"encrypted",
"=",
"encrypted",
",",
"compressed",
"=",
"compressed",
",",
"content_type",
"=",
"content_type",
",",
"database",
"=",
"database",
",",
"servername",
"=",
"servername",
")",
"files",
"=",
"sorted",
"(",
"files",
",",
"key",
"=",
"utils",
".",
"filename_to_date",
",",
"reverse",
"=",
"True",
")",
"files_to_delete",
"=",
"[",
"fi",
"for",
"i",
",",
"fi",
"in",
"enumerate",
"(",
"files",
")",
"if",
"i",
">=",
"keep_number",
"]",
"for",
"filename",
"in",
"files_to_delete",
":",
"if",
"keep_filter",
"(",
"filename",
")",
":",
"continue",
"self",
".",
"delete_file",
"(",
"filename",
")"
] | Delete olders backups and hold the number defined.
:param encrypted: Filter by encrypted or not
:type encrypted: ``bool`` or ``None``
:param compressed: Filter by compressed or not
:type compressed: ``bool`` or ``None``
:param content_type: Filter by media or database backup, must be
``'db'`` or ``'media'``
:type content_type: ``str`` or ``None``
:param database: Filter by source database's name
:type: ``str`` or ``None``
:param servername: Filter by source server's name
:type: ``str`` or ``None``
:param keep_number: Number of files to keep, other will be deleted
:type keep_number: ``int`` or ``None`` | [
"Delete",
"olders",
"backups",
"and",
"hold",
"the",
"number",
"defined",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/storage.py#L206-L244 | train |
django-dbbackup/django-dbbackup | dbbackup/management/commands/dbrestore.py | Command._get_database | def _get_database(self, options):
"""Get the database to restore."""
database_name = options.get('database')
if not database_name:
if len(settings.DATABASES) > 1:
errmsg = "Because this project contains more than one database, you"\
" must specify the --database option."
raise CommandError(errmsg)
database_name = list(settings.DATABASES.keys())[0]
if database_name not in settings.DATABASES:
raise CommandError("Database %s does not exist." % database_name)
return database_name, settings.DATABASES[database_name] | python | def _get_database(self, options):
"""Get the database to restore."""
database_name = options.get('database')
if not database_name:
if len(settings.DATABASES) > 1:
errmsg = "Because this project contains more than one database, you"\
" must specify the --database option."
raise CommandError(errmsg)
database_name = list(settings.DATABASES.keys())[0]
if database_name not in settings.DATABASES:
raise CommandError("Database %s does not exist." % database_name)
return database_name, settings.DATABASES[database_name] | [
"def",
"_get_database",
"(",
"self",
",",
"options",
")",
":",
"database_name",
"=",
"options",
".",
"get",
"(",
"'database'",
")",
"if",
"not",
"database_name",
":",
"if",
"len",
"(",
"settings",
".",
"DATABASES",
")",
">",
"1",
":",
"errmsg",
"=",
"\"Because this project contains more than one database, you\"",
"\" must specify the --database option.\"",
"raise",
"CommandError",
"(",
"errmsg",
")",
"database_name",
"=",
"list",
"(",
"settings",
".",
"DATABASES",
".",
"keys",
"(",
")",
")",
"[",
"0",
"]",
"if",
"database_name",
"not",
"in",
"settings",
".",
"DATABASES",
":",
"raise",
"CommandError",
"(",
"\"Database %s does not exist.\"",
"%",
"database_name",
")",
"return",
"database_name",
",",
"settings",
".",
"DATABASES",
"[",
"database_name",
"]"
] | Get the database to restore. | [
"Get",
"the",
"database",
"to",
"restore",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/dbrestore.py#L57-L68 | train |
django-dbbackup/django-dbbackup | dbbackup/management/commands/dbrestore.py | Command._restore_backup | def _restore_backup(self):
"""Restore the specified database."""
input_filename, input_file = self._get_backup_file(database=self.database_name,
servername=self.servername)
self.logger.info("Restoring backup for database '%s' and server '%s'",
self.database_name, self.servername)
self.logger.info("Restoring: %s" % input_filename)
if self.decrypt:
unencrypted_file, input_filename = utils.unencrypt_file(input_file, input_filename,
self.passphrase)
input_file.close()
input_file = unencrypted_file
if self.uncompress:
uncompressed_file, input_filename = utils.uncompress_file(input_file, input_filename)
input_file.close()
input_file = uncompressed_file
self.logger.info("Restore tempfile created: %s", utils.handle_size(input_file))
if self.interactive:
self._ask_confirmation()
input_file.seek(0)
self.connector = get_connector(self.database_name)
self.connector.restore_dump(input_file) | python | def _restore_backup(self):
"""Restore the specified database."""
input_filename, input_file = self._get_backup_file(database=self.database_name,
servername=self.servername)
self.logger.info("Restoring backup for database '%s' and server '%s'",
self.database_name, self.servername)
self.logger.info("Restoring: %s" % input_filename)
if self.decrypt:
unencrypted_file, input_filename = utils.unencrypt_file(input_file, input_filename,
self.passphrase)
input_file.close()
input_file = unencrypted_file
if self.uncompress:
uncompressed_file, input_filename = utils.uncompress_file(input_file, input_filename)
input_file.close()
input_file = uncompressed_file
self.logger.info("Restore tempfile created: %s", utils.handle_size(input_file))
if self.interactive:
self._ask_confirmation()
input_file.seek(0)
self.connector = get_connector(self.database_name)
self.connector.restore_dump(input_file) | [
"def",
"_restore_backup",
"(",
"self",
")",
":",
"input_filename",
",",
"input_file",
"=",
"self",
".",
"_get_backup_file",
"(",
"database",
"=",
"self",
".",
"database_name",
",",
"servername",
"=",
"self",
".",
"servername",
")",
"self",
".",
"logger",
".",
"info",
"(",
"\"Restoring backup for database '%s' and server '%s'\"",
",",
"self",
".",
"database_name",
",",
"self",
".",
"servername",
")",
"self",
".",
"logger",
".",
"info",
"(",
"\"Restoring: %s\"",
"%",
"input_filename",
")",
"if",
"self",
".",
"decrypt",
":",
"unencrypted_file",
",",
"input_filename",
"=",
"utils",
".",
"unencrypt_file",
"(",
"input_file",
",",
"input_filename",
",",
"self",
".",
"passphrase",
")",
"input_file",
".",
"close",
"(",
")",
"input_file",
"=",
"unencrypted_file",
"if",
"self",
".",
"uncompress",
":",
"uncompressed_file",
",",
"input_filename",
"=",
"utils",
".",
"uncompress_file",
"(",
"input_file",
",",
"input_filename",
")",
"input_file",
".",
"close",
"(",
")",
"input_file",
"=",
"uncompressed_file",
"self",
".",
"logger",
".",
"info",
"(",
"\"Restore tempfile created: %s\"",
",",
"utils",
".",
"handle_size",
"(",
"input_file",
")",
")",
"if",
"self",
".",
"interactive",
":",
"self",
".",
"_ask_confirmation",
"(",
")",
"input_file",
".",
"seek",
"(",
"0",
")",
"self",
".",
"connector",
"=",
"get_connector",
"(",
"self",
".",
"database_name",
")",
"self",
".",
"connector",
".",
"restore_dump",
"(",
"input_file",
")"
] | Restore the specified database. | [
"Restore",
"the",
"specified",
"database",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/management/commands/dbrestore.py#L70-L94 | train |
django-dbbackup/django-dbbackup | dbbackup/db/base.py | get_connector | def get_connector(database_name=None):
"""
Get a connector from its database key in setttings.
"""
from django.db import connections, DEFAULT_DB_ALIAS
# Get DB
database_name = database_name or DEFAULT_DB_ALIAS
connection = connections[database_name]
engine = connection.settings_dict['ENGINE']
connector_settings = settings.CONNECTORS.get(database_name, {})
connector_path = connector_settings.get('CONNECTOR', CONNECTOR_MAPPING[engine])
connector_module_path = '.'.join(connector_path.split('.')[:-1])
module = import_module(connector_module_path)
connector_name = connector_path.split('.')[-1]
connector = getattr(module, connector_name)
return connector(database_name, **connector_settings) | python | def get_connector(database_name=None):
"""
Get a connector from its database key in setttings.
"""
from django.db import connections, DEFAULT_DB_ALIAS
# Get DB
database_name = database_name or DEFAULT_DB_ALIAS
connection = connections[database_name]
engine = connection.settings_dict['ENGINE']
connector_settings = settings.CONNECTORS.get(database_name, {})
connector_path = connector_settings.get('CONNECTOR', CONNECTOR_MAPPING[engine])
connector_module_path = '.'.join(connector_path.split('.')[:-1])
module = import_module(connector_module_path)
connector_name = connector_path.split('.')[-1]
connector = getattr(module, connector_name)
return connector(database_name, **connector_settings) | [
"def",
"get_connector",
"(",
"database_name",
"=",
"None",
")",
":",
"from",
"django",
".",
"db",
"import",
"connections",
",",
"DEFAULT_DB_ALIAS",
"# Get DB",
"database_name",
"=",
"database_name",
"or",
"DEFAULT_DB_ALIAS",
"connection",
"=",
"connections",
"[",
"database_name",
"]",
"engine",
"=",
"connection",
".",
"settings_dict",
"[",
"'ENGINE'",
"]",
"connector_settings",
"=",
"settings",
".",
"CONNECTORS",
".",
"get",
"(",
"database_name",
",",
"{",
"}",
")",
"connector_path",
"=",
"connector_settings",
".",
"get",
"(",
"'CONNECTOR'",
",",
"CONNECTOR_MAPPING",
"[",
"engine",
"]",
")",
"connector_module_path",
"=",
"'.'",
".",
"join",
"(",
"connector_path",
".",
"split",
"(",
"'.'",
")",
"[",
":",
"-",
"1",
"]",
")",
"module",
"=",
"import_module",
"(",
"connector_module_path",
")",
"connector_name",
"=",
"connector_path",
".",
"split",
"(",
"'.'",
")",
"[",
"-",
"1",
"]",
"connector",
"=",
"getattr",
"(",
"module",
",",
"connector_name",
")",
"return",
"connector",
"(",
"database_name",
",",
"*",
"*",
"connector_settings",
")"
] | Get a connector from its database key in setttings. | [
"Get",
"a",
"connector",
"from",
"its",
"database",
"key",
"in",
"setttings",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/db/base.py#L31-L46 | train |
django-dbbackup/django-dbbackup | dbbackup/db/base.py | BaseDBConnector.settings | def settings(self):
"""Mix of database and connector settings."""
if not hasattr(self, '_settings'):
sett = self.connection.settings_dict.copy()
sett.update(settings.CONNECTORS.get(self.database_name, {}))
self._settings = sett
return self._settings | python | def settings(self):
"""Mix of database and connector settings."""
if not hasattr(self, '_settings'):
sett = self.connection.settings_dict.copy()
sett.update(settings.CONNECTORS.get(self.database_name, {}))
self._settings = sett
return self._settings | [
"def",
"settings",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_settings'",
")",
":",
"sett",
"=",
"self",
".",
"connection",
".",
"settings_dict",
".",
"copy",
"(",
")",
"sett",
".",
"update",
"(",
"settings",
".",
"CONNECTORS",
".",
"get",
"(",
"self",
".",
"database_name",
",",
"{",
"}",
")",
")",
"self",
".",
"_settings",
"=",
"sett",
"return",
"self",
".",
"_settings"
] | Mix of database and connector settings. | [
"Mix",
"of",
"database",
"and",
"connector",
"settings",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/db/base.py#L65-L71 | train |
django-dbbackup/django-dbbackup | dbbackup/db/base.py | BaseCommandDBConnector.run_command | def run_command(self, command, stdin=None, env=None):
"""
Launch a shell command line.
:param command: Command line to launch
:type command: str
:param stdin: Standard input of command
:type stdin: file
:param env: Environment variable used in command
:type env: dict
:return: Standard output of command
:rtype: file
"""
cmd = shlex.split(command)
stdout = SpooledTemporaryFile(max_size=settings.TMP_FILE_MAX_SIZE,
dir=settings.TMP_DIR)
stderr = SpooledTemporaryFile(max_size=settings.TMP_FILE_MAX_SIZE,
dir=settings.TMP_DIR)
full_env = os.environ.copy() if self.use_parent_env else {}
full_env.update(self.env)
full_env.update(env or {})
try:
if isinstance(stdin, (ContentFile, SFTPStorageFile)):
process = Popen(cmd, stdin=PIPE, stdout=stdout, stderr=stderr, env=full_env)
process.communicate(input=stdin.read())
else:
process = Popen(cmd, stdin=stdin, stdout=stdout, stderr=stderr, env=full_env)
process.wait()
if process.poll():
stderr.seek(0)
raise exceptions.CommandConnectorError(
"Error running: {}\n{}".format(command, stderr.read().decode('utf-8')))
stdout.seek(0)
stderr.seek(0)
return stdout, stderr
except OSError as err:
raise exceptions.CommandConnectorError(
"Error running: {}\n{}".format(command, str(err))) | python | def run_command(self, command, stdin=None, env=None):
"""
Launch a shell command line.
:param command: Command line to launch
:type command: str
:param stdin: Standard input of command
:type stdin: file
:param env: Environment variable used in command
:type env: dict
:return: Standard output of command
:rtype: file
"""
cmd = shlex.split(command)
stdout = SpooledTemporaryFile(max_size=settings.TMP_FILE_MAX_SIZE,
dir=settings.TMP_DIR)
stderr = SpooledTemporaryFile(max_size=settings.TMP_FILE_MAX_SIZE,
dir=settings.TMP_DIR)
full_env = os.environ.copy() if self.use_parent_env else {}
full_env.update(self.env)
full_env.update(env or {})
try:
if isinstance(stdin, (ContentFile, SFTPStorageFile)):
process = Popen(cmd, stdin=PIPE, stdout=stdout, stderr=stderr, env=full_env)
process.communicate(input=stdin.read())
else:
process = Popen(cmd, stdin=stdin, stdout=stdout, stderr=stderr, env=full_env)
process.wait()
if process.poll():
stderr.seek(0)
raise exceptions.CommandConnectorError(
"Error running: {}\n{}".format(command, stderr.read().decode('utf-8')))
stdout.seek(0)
stderr.seek(0)
return stdout, stderr
except OSError as err:
raise exceptions.CommandConnectorError(
"Error running: {}\n{}".format(command, str(err))) | [
"def",
"run_command",
"(",
"self",
",",
"command",
",",
"stdin",
"=",
"None",
",",
"env",
"=",
"None",
")",
":",
"cmd",
"=",
"shlex",
".",
"split",
"(",
"command",
")",
"stdout",
"=",
"SpooledTemporaryFile",
"(",
"max_size",
"=",
"settings",
".",
"TMP_FILE_MAX_SIZE",
",",
"dir",
"=",
"settings",
".",
"TMP_DIR",
")",
"stderr",
"=",
"SpooledTemporaryFile",
"(",
"max_size",
"=",
"settings",
".",
"TMP_FILE_MAX_SIZE",
",",
"dir",
"=",
"settings",
".",
"TMP_DIR",
")",
"full_env",
"=",
"os",
".",
"environ",
".",
"copy",
"(",
")",
"if",
"self",
".",
"use_parent_env",
"else",
"{",
"}",
"full_env",
".",
"update",
"(",
"self",
".",
"env",
")",
"full_env",
".",
"update",
"(",
"env",
"or",
"{",
"}",
")",
"try",
":",
"if",
"isinstance",
"(",
"stdin",
",",
"(",
"ContentFile",
",",
"SFTPStorageFile",
")",
")",
":",
"process",
"=",
"Popen",
"(",
"cmd",
",",
"stdin",
"=",
"PIPE",
",",
"stdout",
"=",
"stdout",
",",
"stderr",
"=",
"stderr",
",",
"env",
"=",
"full_env",
")",
"process",
".",
"communicate",
"(",
"input",
"=",
"stdin",
".",
"read",
"(",
")",
")",
"else",
":",
"process",
"=",
"Popen",
"(",
"cmd",
",",
"stdin",
"=",
"stdin",
",",
"stdout",
"=",
"stdout",
",",
"stderr",
"=",
"stderr",
",",
"env",
"=",
"full_env",
")",
"process",
".",
"wait",
"(",
")",
"if",
"process",
".",
"poll",
"(",
")",
":",
"stderr",
".",
"seek",
"(",
"0",
")",
"raise",
"exceptions",
".",
"CommandConnectorError",
"(",
"\"Error running: {}\\n{}\"",
".",
"format",
"(",
"command",
",",
"stderr",
".",
"read",
"(",
")",
".",
"decode",
"(",
"'utf-8'",
")",
")",
")",
"stdout",
".",
"seek",
"(",
"0",
")",
"stderr",
".",
"seek",
"(",
"0",
")",
"return",
"stdout",
",",
"stderr",
"except",
"OSError",
"as",
"err",
":",
"raise",
"exceptions",
".",
"CommandConnectorError",
"(",
"\"Error running: {}\\n{}\"",
".",
"format",
"(",
"command",
",",
"str",
"(",
"err",
")",
")",
")"
] | Launch a shell command line.
:param command: Command line to launch
:type command: str
:param stdin: Standard input of command
:type stdin: file
:param env: Environment variable used in command
:type env: dict
:return: Standard output of command
:rtype: file | [
"Launch",
"a",
"shell",
"command",
"line",
"."
] | 77de209e2d5317e51510d0f888e085ee0c400d66 | https://github.com/django-dbbackup/django-dbbackup/blob/77de209e2d5317e51510d0f888e085ee0c400d66/dbbackup/db/base.py#L118-L155 | train |
tchellomello/raincloudy | raincloudy/faucet.py | RainCloudyFaucetCore._assign_zones | def _assign_zones(self):
"""Assign all RainCloudyFaucetZone managed by faucet."""
for zone_id in range(1, 5):
zone = \
RainCloudyFaucetZone(
parent=self._parent,
controller=self._controller,
faucet=self,
zone_id=zone_id)
if zone not in self.zones:
self.zones.append(zone) | python | def _assign_zones(self):
"""Assign all RainCloudyFaucetZone managed by faucet."""
for zone_id in range(1, 5):
zone = \
RainCloudyFaucetZone(
parent=self._parent,
controller=self._controller,
faucet=self,
zone_id=zone_id)
if zone not in self.zones:
self.zones.append(zone) | [
"def",
"_assign_zones",
"(",
"self",
")",
":",
"for",
"zone_id",
"in",
"range",
"(",
"1",
",",
"5",
")",
":",
"zone",
"=",
"RainCloudyFaucetZone",
"(",
"parent",
"=",
"self",
".",
"_parent",
",",
"controller",
"=",
"self",
".",
"_controller",
",",
"faucet",
"=",
"self",
",",
"zone_id",
"=",
"zone_id",
")",
"if",
"zone",
"not",
"in",
"self",
".",
"zones",
":",
"self",
".",
"zones",
".",
"append",
"(",
"zone",
")"
] | Assign all RainCloudyFaucetZone managed by faucet. | [
"Assign",
"all",
"RainCloudyFaucetZone",
"managed",
"by",
"faucet",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L38-L49 | train |
tchellomello/raincloudy | raincloudy/faucet.py | RainCloudyFaucetCore._find_zone_by_id | def _find_zone_by_id(self, zone_id):
"""Return zone by id."""
if not self.zones:
return None
zone = list(filter(
lambda zone: zone.id == zone_id, self.zones))
return zone[0] if zone else None | python | def _find_zone_by_id(self, zone_id):
"""Return zone by id."""
if not self.zones:
return None
zone = list(filter(
lambda zone: zone.id == zone_id, self.zones))
return zone[0] if zone else None | [
"def",
"_find_zone_by_id",
"(",
"self",
",",
"zone_id",
")",
":",
"if",
"not",
"self",
".",
"zones",
":",
"return",
"None",
"zone",
"=",
"list",
"(",
"filter",
"(",
"lambda",
"zone",
":",
"zone",
".",
"id",
"==",
"zone_id",
",",
"self",
".",
"zones",
")",
")",
"return",
"zone",
"[",
"0",
"]",
"if",
"zone",
"else",
"None"
] | Return zone by id. | [
"Return",
"zone",
"by",
"id",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L114-L122 | train |
tchellomello/raincloudy | raincloudy/faucet.py | RainCloudyFaucetZone._set_zone_name | def _set_zone_name(self, zoneid, name):
"""Private method to override zone name."""
# zone starts with index 0
zoneid -= 1
data = {
'_set_zone_name': 'Set Name',
'select_zone': str(zoneid),
'zone_name': name,
}
self._controller.post(data) | python | def _set_zone_name(self, zoneid, name):
"""Private method to override zone name."""
# zone starts with index 0
zoneid -= 1
data = {
'_set_zone_name': 'Set Name',
'select_zone': str(zoneid),
'zone_name': name,
}
self._controller.post(data) | [
"def",
"_set_zone_name",
"(",
"self",
",",
"zoneid",
",",
"name",
")",
":",
"# zone starts with index 0",
"zoneid",
"-=",
"1",
"data",
"=",
"{",
"'_set_zone_name'",
":",
"'Set Name'",
",",
"'select_zone'",
":",
"str",
"(",
"zoneid",
")",
",",
"'zone_name'",
":",
"name",
",",
"}",
"self",
".",
"_controller",
".",
"post",
"(",
"data",
")"
] | Private method to override zone name. | [
"Private",
"method",
"to",
"override",
"zone",
"name",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L181-L190 | train |
tchellomello/raincloudy | raincloudy/faucet.py | RainCloudyFaucetZone._set_watering_time | def _set_watering_time(self, zoneid, value):
"""Private method to set watering_time per zone."""
if value not in MANUAL_WATERING_ALLOWED:
raise ValueError(
'Valid options are: {}'.format(
', '.join(map(str, MANUAL_WATERING_ALLOWED)))
)
if isinstance(value, int) and value == 0:
value = 'OFF'
elif isinstance(value, str):
value = value.upper()
if value == 'ON':
value = MAX_WATERING_MINUTES
ddata = self.preupdate()
attr = 'zone{}_select_manual_mode'.format(zoneid)
ddata[attr] = value
self.submit_action(ddata) | python | def _set_watering_time(self, zoneid, value):
"""Private method to set watering_time per zone."""
if value not in MANUAL_WATERING_ALLOWED:
raise ValueError(
'Valid options are: {}'.format(
', '.join(map(str, MANUAL_WATERING_ALLOWED)))
)
if isinstance(value, int) and value == 0:
value = 'OFF'
elif isinstance(value, str):
value = value.upper()
if value == 'ON':
value = MAX_WATERING_MINUTES
ddata = self.preupdate()
attr = 'zone{}_select_manual_mode'.format(zoneid)
ddata[attr] = value
self.submit_action(ddata) | [
"def",
"_set_watering_time",
"(",
"self",
",",
"zoneid",
",",
"value",
")",
":",
"if",
"value",
"not",
"in",
"MANUAL_WATERING_ALLOWED",
":",
"raise",
"ValueError",
"(",
"'Valid options are: {}'",
".",
"format",
"(",
"', '",
".",
"join",
"(",
"map",
"(",
"str",
",",
"MANUAL_WATERING_ALLOWED",
")",
")",
")",
")",
"if",
"isinstance",
"(",
"value",
",",
"int",
")",
"and",
"value",
"==",
"0",
":",
"value",
"=",
"'OFF'",
"elif",
"isinstance",
"(",
"value",
",",
"str",
")",
":",
"value",
"=",
"value",
".",
"upper",
"(",
")",
"if",
"value",
"==",
"'ON'",
":",
"value",
"=",
"MAX_WATERING_MINUTES",
"ddata",
"=",
"self",
".",
"preupdate",
"(",
")",
"attr",
"=",
"'zone{}_select_manual_mode'",
".",
"format",
"(",
"zoneid",
")",
"ddata",
"[",
"attr",
"]",
"=",
"value",
"self",
".",
"submit_action",
"(",
"ddata",
")"
] | Private method to set watering_time per zone. | [
"Private",
"method",
"to",
"set",
"watering_time",
"per",
"zone",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L202-L220 | train |
tchellomello/raincloudy | raincloudy/faucet.py | RainCloudyFaucetZone.watering_time | def watering_time(self):
"""Return watering_time from zone."""
# zone starts with index 0
index = self.id - 1
auto_watering_time =\
self._attributes['rain_delay_mode'][index]['auto_watering_time']
manual_watering_time =\
self._attributes['rain_delay_mode'][index]['manual_watering_time']
if auto_watering_time > manual_watering_time:
watering_time = auto_watering_time
else:
watering_time = manual_watering_time
return watering_time | python | def watering_time(self):
"""Return watering_time from zone."""
# zone starts with index 0
index = self.id - 1
auto_watering_time =\
self._attributes['rain_delay_mode'][index]['auto_watering_time']
manual_watering_time =\
self._attributes['rain_delay_mode'][index]['manual_watering_time']
if auto_watering_time > manual_watering_time:
watering_time = auto_watering_time
else:
watering_time = manual_watering_time
return watering_time | [
"def",
"watering_time",
"(",
"self",
")",
":",
"# zone starts with index 0",
"index",
"=",
"self",
".",
"id",
"-",
"1",
"auto_watering_time",
"=",
"self",
".",
"_attributes",
"[",
"'rain_delay_mode'",
"]",
"[",
"index",
"]",
"[",
"'auto_watering_time'",
"]",
"manual_watering_time",
"=",
"self",
".",
"_attributes",
"[",
"'rain_delay_mode'",
"]",
"[",
"index",
"]",
"[",
"'manual_watering_time'",
"]",
"if",
"auto_watering_time",
">",
"manual_watering_time",
":",
"watering_time",
"=",
"auto_watering_time",
"else",
":",
"watering_time",
"=",
"manual_watering_time",
"return",
"watering_time"
] | Return watering_time from zone. | [
"Return",
"watering_time",
"from",
"zone",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L224-L239 | train |
tchellomello/raincloudy | raincloudy/faucet.py | RainCloudyFaucetZone._set_rain_delay | def _set_rain_delay(self, zoneid, value):
"""Generic method to set auto_watering program."""
# current index for rain_delay starts in 0
zoneid -= 1
if isinstance(value, int):
if value > MAX_RAIN_DELAY_DAYS or value < 0:
return None
elif value == 0:
value = 'off'
elif value == 1:
value = '1day'
elif value >= 2:
value = str(value) + 'days'
elif isinstance(value, str):
if value.lower() != 'off':
return None
ddata = self.preupdate()
attr = 'zone{}_rain_delay_select'.format(zoneid)
ddata[attr] = value
self.submit_action(ddata)
return True | python | def _set_rain_delay(self, zoneid, value):
"""Generic method to set auto_watering program."""
# current index for rain_delay starts in 0
zoneid -= 1
if isinstance(value, int):
if value > MAX_RAIN_DELAY_DAYS or value < 0:
return None
elif value == 0:
value = 'off'
elif value == 1:
value = '1day'
elif value >= 2:
value = str(value) + 'days'
elif isinstance(value, str):
if value.lower() != 'off':
return None
ddata = self.preupdate()
attr = 'zone{}_rain_delay_select'.format(zoneid)
ddata[attr] = value
self.submit_action(ddata)
return True | [
"def",
"_set_rain_delay",
"(",
"self",
",",
"zoneid",
",",
"value",
")",
":",
"# current index for rain_delay starts in 0",
"zoneid",
"-=",
"1",
"if",
"isinstance",
"(",
"value",
",",
"int",
")",
":",
"if",
"value",
">",
"MAX_RAIN_DELAY_DAYS",
"or",
"value",
"<",
"0",
":",
"return",
"None",
"elif",
"value",
"==",
"0",
":",
"value",
"=",
"'off'",
"elif",
"value",
"==",
"1",
":",
"value",
"=",
"'1day'",
"elif",
"value",
">=",
"2",
":",
"value",
"=",
"str",
"(",
"value",
")",
"+",
"'days'",
"elif",
"isinstance",
"(",
"value",
",",
"str",
")",
":",
"if",
"value",
".",
"lower",
"(",
")",
"!=",
"'off'",
":",
"return",
"None",
"ddata",
"=",
"self",
".",
"preupdate",
"(",
")",
"attr",
"=",
"'zone{}_rain_delay_select'",
".",
"format",
"(",
"zoneid",
")",
"ddata",
"[",
"attr",
"]",
"=",
"value",
"self",
".",
"submit_action",
"(",
"ddata",
")",
"return",
"True"
] | Generic method to set auto_watering program. | [
"Generic",
"method",
"to",
"set",
"auto_watering",
"program",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L250-L272 | train |
tchellomello/raincloudy | raincloudy/faucet.py | RainCloudyFaucetZone._set_auto_watering | def _set_auto_watering(self, zoneid, value):
"""Private method to set auto_watering program."""
if not isinstance(value, bool):
return None
ddata = self.preupdate()
attr = 'zone{}_program_toggle'.format(zoneid)
try:
if not value:
ddata.pop(attr)
else:
ddata[attr] = 'on'
except KeyError:
pass
self.submit_action(ddata)
return True | python | def _set_auto_watering(self, zoneid, value):
"""Private method to set auto_watering program."""
if not isinstance(value, bool):
return None
ddata = self.preupdate()
attr = 'zone{}_program_toggle'.format(zoneid)
try:
if not value:
ddata.pop(attr)
else:
ddata[attr] = 'on'
except KeyError:
pass
self.submit_action(ddata)
return True | [
"def",
"_set_auto_watering",
"(",
"self",
",",
"zoneid",
",",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"bool",
")",
":",
"return",
"None",
"ddata",
"=",
"self",
".",
"preupdate",
"(",
")",
"attr",
"=",
"'zone{}_program_toggle'",
".",
"format",
"(",
"zoneid",
")",
"try",
":",
"if",
"not",
"value",
":",
"ddata",
".",
"pop",
"(",
"attr",
")",
"else",
":",
"ddata",
"[",
"attr",
"]",
"=",
"'on'",
"except",
"KeyError",
":",
"pass",
"self",
".",
"submit_action",
"(",
"ddata",
")",
"return",
"True"
] | Private method to set auto_watering program. | [
"Private",
"method",
"to",
"set",
"auto_watering",
"program",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L291-L306 | train |
tchellomello/raincloudy | raincloudy/faucet.py | RainCloudyFaucetZone.auto_watering | def auto_watering(self):
"""Return if zone is configured to automatic watering."""
value = "zone{}".format(self.id)
return find_program_status(self._parent.html['home'], value) | python | def auto_watering(self):
"""Return if zone is configured to automatic watering."""
value = "zone{}".format(self.id)
return find_program_status(self._parent.html['home'], value) | [
"def",
"auto_watering",
"(",
"self",
")",
":",
"value",
"=",
"\"zone{}\"",
".",
"format",
"(",
"self",
".",
"id",
")",
"return",
"find_program_status",
"(",
"self",
".",
"_parent",
".",
"html",
"[",
"'home'",
"]",
",",
"value",
")"
] | Return if zone is configured to automatic watering. | [
"Return",
"if",
"zone",
"is",
"configured",
"to",
"automatic",
"watering",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L309-L312 | train |
tchellomello/raincloudy | raincloudy/faucet.py | RainCloudyFaucetZone._to_dict | def _to_dict(self):
"""Method to build zone dict."""
return {
'auto_watering':
getattr(self, "auto_watering"),
'droplet':
getattr(self, "droplet"),
'is_watering':
getattr(self, "is_watering"),
'name':
getattr(self, "name"),
'next_cycle':
getattr(self, "next_cycle"),
'rain_delay':
getattr(self, "rain_delay"),
'watering_time':
getattr(self, "watering_time"),
} | python | def _to_dict(self):
"""Method to build zone dict."""
return {
'auto_watering':
getattr(self, "auto_watering"),
'droplet':
getattr(self, "droplet"),
'is_watering':
getattr(self, "is_watering"),
'name':
getattr(self, "name"),
'next_cycle':
getattr(self, "next_cycle"),
'rain_delay':
getattr(self, "rain_delay"),
'watering_time':
getattr(self, "watering_time"),
} | [
"def",
"_to_dict",
"(",
"self",
")",
":",
"return",
"{",
"'auto_watering'",
":",
"getattr",
"(",
"self",
",",
"\"auto_watering\"",
")",
",",
"'droplet'",
":",
"getattr",
"(",
"self",
",",
"\"droplet\"",
")",
",",
"'is_watering'",
":",
"getattr",
"(",
"self",
",",
"\"is_watering\"",
")",
",",
"'name'",
":",
"getattr",
"(",
"self",
",",
"\"name\"",
")",
",",
"'next_cycle'",
":",
"getattr",
"(",
"self",
",",
"\"next_cycle\"",
")",
",",
"'rain_delay'",
":",
"getattr",
"(",
"self",
",",
"\"rain_delay\"",
")",
",",
"'watering_time'",
":",
"getattr",
"(",
"self",
",",
"\"watering_time\"",
")",
",",
"}"
] | Method to build zone dict. | [
"Method",
"to",
"build",
"zone",
"dict",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L324-L341 | train |
tchellomello/raincloudy | raincloudy/faucet.py | RainCloudyFaucetZone.preupdate | def preupdate(self, force_refresh=True):
"""Return a dict with all current options prior submitting request."""
ddata = MANUAL_OP_DATA.copy()
# force update to make sure status is accurate
if force_refresh:
self.update()
# select current controller and faucet
ddata['select_controller'] = \
self._parent.controllers.index(self._controller)
ddata['select_faucet'] = \
self._controller.faucets.index(self._faucet)
# check if zone is scheduled automatically (zone1_program_toggle)
# only add zoneX_program_toogle to ddata when needed,
# otherwise the field will be always on
for zone in self._faucet.zones:
attr = 'zone{}_program_toggle'.format(zone.id)
if zone.auto_watering:
ddata[attr] = 'on'
# check if zone current watering manually (zone1_select_manual_mode)
for zone in self._faucet.zones:
attr = 'zone{}_select_manual_mode'.format(zone.id)
if zone.watering_time and attr in ddata.keys():
ddata[attr] = zone.watering_time
# check if rain delay is selected (zone0_rain_delay_select)
for zone in self._faucet.zones:
attr = 'zone{}_rain_delay_select'.format(zone.id - 1)
value = zone.rain_delay
if value and attr in ddata.keys():
if int(value) >= 2 and int(value) <= 7:
value = str(value) + 'days'
else:
value = str(value) + 'day'
ddata[attr] = value
return ddata | python | def preupdate(self, force_refresh=True):
"""Return a dict with all current options prior submitting request."""
ddata = MANUAL_OP_DATA.copy()
# force update to make sure status is accurate
if force_refresh:
self.update()
# select current controller and faucet
ddata['select_controller'] = \
self._parent.controllers.index(self._controller)
ddata['select_faucet'] = \
self._controller.faucets.index(self._faucet)
# check if zone is scheduled automatically (zone1_program_toggle)
# only add zoneX_program_toogle to ddata when needed,
# otherwise the field will be always on
for zone in self._faucet.zones:
attr = 'zone{}_program_toggle'.format(zone.id)
if zone.auto_watering:
ddata[attr] = 'on'
# check if zone current watering manually (zone1_select_manual_mode)
for zone in self._faucet.zones:
attr = 'zone{}_select_manual_mode'.format(zone.id)
if zone.watering_time and attr in ddata.keys():
ddata[attr] = zone.watering_time
# check if rain delay is selected (zone0_rain_delay_select)
for zone in self._faucet.zones:
attr = 'zone{}_rain_delay_select'.format(zone.id - 1)
value = zone.rain_delay
if value and attr in ddata.keys():
if int(value) >= 2 and int(value) <= 7:
value = str(value) + 'days'
else:
value = str(value) + 'day'
ddata[attr] = value
return ddata | [
"def",
"preupdate",
"(",
"self",
",",
"force_refresh",
"=",
"True",
")",
":",
"ddata",
"=",
"MANUAL_OP_DATA",
".",
"copy",
"(",
")",
"# force update to make sure status is accurate",
"if",
"force_refresh",
":",
"self",
".",
"update",
"(",
")",
"# select current controller and faucet",
"ddata",
"[",
"'select_controller'",
"]",
"=",
"self",
".",
"_parent",
".",
"controllers",
".",
"index",
"(",
"self",
".",
"_controller",
")",
"ddata",
"[",
"'select_faucet'",
"]",
"=",
"self",
".",
"_controller",
".",
"faucets",
".",
"index",
"(",
"self",
".",
"_faucet",
")",
"# check if zone is scheduled automatically (zone1_program_toggle)",
"# only add zoneX_program_toogle to ddata when needed,",
"# otherwise the field will be always on",
"for",
"zone",
"in",
"self",
".",
"_faucet",
".",
"zones",
":",
"attr",
"=",
"'zone{}_program_toggle'",
".",
"format",
"(",
"zone",
".",
"id",
")",
"if",
"zone",
".",
"auto_watering",
":",
"ddata",
"[",
"attr",
"]",
"=",
"'on'",
"# check if zone current watering manually (zone1_select_manual_mode)",
"for",
"zone",
"in",
"self",
".",
"_faucet",
".",
"zones",
":",
"attr",
"=",
"'zone{}_select_manual_mode'",
".",
"format",
"(",
"zone",
".",
"id",
")",
"if",
"zone",
".",
"watering_time",
"and",
"attr",
"in",
"ddata",
".",
"keys",
"(",
")",
":",
"ddata",
"[",
"attr",
"]",
"=",
"zone",
".",
"watering_time",
"# check if rain delay is selected (zone0_rain_delay_select)",
"for",
"zone",
"in",
"self",
".",
"_faucet",
".",
"zones",
":",
"attr",
"=",
"'zone{}_rain_delay_select'",
".",
"format",
"(",
"zone",
".",
"id",
"-",
"1",
")",
"value",
"=",
"zone",
".",
"rain_delay",
"if",
"value",
"and",
"attr",
"in",
"ddata",
".",
"keys",
"(",
")",
":",
"if",
"int",
"(",
"value",
")",
">=",
"2",
"and",
"int",
"(",
"value",
")",
"<=",
"7",
":",
"value",
"=",
"str",
"(",
"value",
")",
"+",
"'days'",
"else",
":",
"value",
"=",
"str",
"(",
"value",
")",
"+",
"'day'",
"ddata",
"[",
"attr",
"]",
"=",
"value",
"return",
"ddata"
] | Return a dict with all current options prior submitting request. | [
"Return",
"a",
"dict",
"with",
"all",
"current",
"options",
"prior",
"submitting",
"request",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L347-L386 | train |
tchellomello/raincloudy | raincloudy/faucet.py | RainCloudyFaucetZone.submit_action | def submit_action(self, ddata):
"""Post data."""
self._controller.post(ddata,
url=HOME_ENDPOINT,
referer=HOME_ENDPOINT) | python | def submit_action(self, ddata):
"""Post data."""
self._controller.post(ddata,
url=HOME_ENDPOINT,
referer=HOME_ENDPOINT) | [
"def",
"submit_action",
"(",
"self",
",",
"ddata",
")",
":",
"self",
".",
"_controller",
".",
"post",
"(",
"ddata",
",",
"url",
"=",
"HOME_ENDPOINT",
",",
"referer",
"=",
"HOME_ENDPOINT",
")"
] | Post data. | [
"Post",
"data",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/faucet.py#L388-L392 | train |
tchellomello/raincloudy | raincloudy/core.py | RainCloudy.controller | def controller(self):
"""Show current linked controllers."""
if hasattr(self, 'controllers'):
if len(self.controllers) > 1:
# in the future, we should support more controllers
raise TypeError("Only one controller per account.")
return self.controllers[0]
raise AttributeError("There is no controller assigned.") | python | def controller(self):
"""Show current linked controllers."""
if hasattr(self, 'controllers'):
if len(self.controllers) > 1:
# in the future, we should support more controllers
raise TypeError("Only one controller per account.")
return self.controllers[0]
raise AttributeError("There is no controller assigned.") | [
"def",
"controller",
"(",
"self",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"'controllers'",
")",
":",
"if",
"len",
"(",
"self",
".",
"controllers",
")",
">",
"1",
":",
"# in the future, we should support more controllers",
"raise",
"TypeError",
"(",
"\"Only one controller per account.\"",
")",
"return",
"self",
".",
"controllers",
"[",
"0",
"]",
"raise",
"AttributeError",
"(",
"\"There is no controller assigned.\"",
")"
] | Show current linked controllers. | [
"Show",
"current",
"linked",
"controllers",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/core.py#L123-L130 | train |
tchellomello/raincloudy | raincloudy/controller.py | RainCloudyController._assign_faucets | def _assign_faucets(self, faucets):
"""Assign RainCloudyFaucet objects to self.faucets."""
if not faucets:
raise TypeError("Controller does not have a faucet assigned.")
for faucet_id in faucets:
self.faucets.append(
RainCloudyFaucet(self._parent, self, faucet_id)) | python | def _assign_faucets(self, faucets):
"""Assign RainCloudyFaucet objects to self.faucets."""
if not faucets:
raise TypeError("Controller does not have a faucet assigned.")
for faucet_id in faucets:
self.faucets.append(
RainCloudyFaucet(self._parent, self, faucet_id)) | [
"def",
"_assign_faucets",
"(",
"self",
",",
"faucets",
")",
":",
"if",
"not",
"faucets",
":",
"raise",
"TypeError",
"(",
"\"Controller does not have a faucet assigned.\"",
")",
"for",
"faucet_id",
"in",
"faucets",
":",
"self",
".",
"faucets",
".",
"append",
"(",
"RainCloudyFaucet",
"(",
"self",
".",
"_parent",
",",
"self",
",",
"faucet_id",
")",
")"
] | Assign RainCloudyFaucet objects to self.faucets. | [
"Assign",
"RainCloudyFaucet",
"objects",
"to",
"self",
".",
"faucets",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/controller.py#L48-L55 | train |
tchellomello/raincloudy | raincloudy/controller.py | RainCloudyController.post | def post(self, ddata, url=SETUP_ENDPOINT, referer=SETUP_ENDPOINT):
"""Method to update some attributes on namespace."""
headers = HEADERS.copy()
if referer is None:
headers.pop('Referer')
else:
headers['Referer'] = referer
# append csrftoken
if 'csrfmiddlewaretoken' not in ddata.keys():
ddata['csrfmiddlewaretoken'] = self._parent.csrftoken
req = self._parent.client.post(url, headers=headers, data=ddata)
if req.status_code == 200:
self.update() | python | def post(self, ddata, url=SETUP_ENDPOINT, referer=SETUP_ENDPOINT):
"""Method to update some attributes on namespace."""
headers = HEADERS.copy()
if referer is None:
headers.pop('Referer')
else:
headers['Referer'] = referer
# append csrftoken
if 'csrfmiddlewaretoken' not in ddata.keys():
ddata['csrfmiddlewaretoken'] = self._parent.csrftoken
req = self._parent.client.post(url, headers=headers, data=ddata)
if req.status_code == 200:
self.update() | [
"def",
"post",
"(",
"self",
",",
"ddata",
",",
"url",
"=",
"SETUP_ENDPOINT",
",",
"referer",
"=",
"SETUP_ENDPOINT",
")",
":",
"headers",
"=",
"HEADERS",
".",
"copy",
"(",
")",
"if",
"referer",
"is",
"None",
":",
"headers",
".",
"pop",
"(",
"'Referer'",
")",
"else",
":",
"headers",
"[",
"'Referer'",
"]",
"=",
"referer",
"# append csrftoken",
"if",
"'csrfmiddlewaretoken'",
"not",
"in",
"ddata",
".",
"keys",
"(",
")",
":",
"ddata",
"[",
"'csrfmiddlewaretoken'",
"]",
"=",
"self",
".",
"_parent",
".",
"csrftoken",
"req",
"=",
"self",
".",
"_parent",
".",
"client",
".",
"post",
"(",
"url",
",",
"headers",
"=",
"headers",
",",
"data",
"=",
"ddata",
")",
"if",
"req",
".",
"status_code",
"==",
"200",
":",
"self",
".",
"update",
"(",
")"
] | Method to update some attributes on namespace. | [
"Method",
"to",
"update",
"some",
"attributes",
"on",
"namespace",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/controller.py#L64-L78 | train |
tchellomello/raincloudy | raincloudy/controller.py | RainCloudyController._get_cu_and_fu_status | def _get_cu_and_fu_status(self):
"""Submit GET request to update information."""
# adjust headers
headers = HEADERS.copy()
headers['Accept'] = '*/*'
headers['X-Requested-With'] = 'XMLHttpRequest'
headers['X-CSRFToken'] = self._parent.csrftoken
args = '?controller_serial=' + self.serial \
+ '&faucet_serial=' + self.faucet.serial
req = self._parent.client.get(STATUS_ENDPOINT + args,
headers=headers)
# token probably expired, then try again
if req.status_code == 403:
self._parent.login()
self.update()
elif req.status_code == 200:
self.attributes = req.json()
else:
req.raise_for_status() | python | def _get_cu_and_fu_status(self):
"""Submit GET request to update information."""
# adjust headers
headers = HEADERS.copy()
headers['Accept'] = '*/*'
headers['X-Requested-With'] = 'XMLHttpRequest'
headers['X-CSRFToken'] = self._parent.csrftoken
args = '?controller_serial=' + self.serial \
+ '&faucet_serial=' + self.faucet.serial
req = self._parent.client.get(STATUS_ENDPOINT + args,
headers=headers)
# token probably expired, then try again
if req.status_code == 403:
self._parent.login()
self.update()
elif req.status_code == 200:
self.attributes = req.json()
else:
req.raise_for_status() | [
"def",
"_get_cu_and_fu_status",
"(",
"self",
")",
":",
"# adjust headers",
"headers",
"=",
"HEADERS",
".",
"copy",
"(",
")",
"headers",
"[",
"'Accept'",
"]",
"=",
"'*/*'",
"headers",
"[",
"'X-Requested-With'",
"]",
"=",
"'XMLHttpRequest'",
"headers",
"[",
"'X-CSRFToken'",
"]",
"=",
"self",
".",
"_parent",
".",
"csrftoken",
"args",
"=",
"'?controller_serial='",
"+",
"self",
".",
"serial",
"+",
"'&faucet_serial='",
"+",
"self",
".",
"faucet",
".",
"serial",
"req",
"=",
"self",
".",
"_parent",
".",
"client",
".",
"get",
"(",
"STATUS_ENDPOINT",
"+",
"args",
",",
"headers",
"=",
"headers",
")",
"# token probably expired, then try again",
"if",
"req",
".",
"status_code",
"==",
"403",
":",
"self",
".",
"_parent",
".",
"login",
"(",
")",
"self",
".",
"update",
"(",
")",
"elif",
"req",
".",
"status_code",
"==",
"200",
":",
"self",
".",
"attributes",
"=",
"req",
".",
"json",
"(",
")",
"else",
":",
"req",
".",
"raise_for_status",
"(",
")"
] | Submit GET request to update information. | [
"Submit",
"GET",
"request",
"to",
"update",
"information",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/controller.py#L80-L101 | train |
tchellomello/raincloudy | raincloudy/controller.py | RainCloudyController.name | def name(self, value):
"""Set a new name to controller."""
data = {
'_set_controller_name': 'Set Name',
'controller_name': value,
}
self.post(data, url=SETUP_ENDPOINT, referer=SETUP_ENDPOINT) | python | def name(self, value):
"""Set a new name to controller."""
data = {
'_set_controller_name': 'Set Name',
'controller_name': value,
}
self.post(data, url=SETUP_ENDPOINT, referer=SETUP_ENDPOINT) | [
"def",
"name",
"(",
"self",
",",
"value",
")",
":",
"data",
"=",
"{",
"'_set_controller_name'",
":",
"'Set Name'",
",",
"'controller_name'",
":",
"value",
",",
"}",
"self",
".",
"post",
"(",
"data",
",",
"url",
"=",
"SETUP_ENDPOINT",
",",
"referer",
"=",
"SETUP_ENDPOINT",
")"
] | Set a new name to controller. | [
"Set",
"a",
"new",
"name",
"to",
"controller",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/controller.py#L148-L154 | train |
tchellomello/raincloudy | raincloudy/controller.py | RainCloudyController.faucet | def faucet(self):
"""Show current linked faucet."""
if hasattr(self, 'faucets'):
if len(self.faucets) > 1:
# in the future, we should support more faucets
raise TypeError("Only one faucet per account.")
return self.faucets[0]
raise AttributeError("There is no faucet assigned.") | python | def faucet(self):
"""Show current linked faucet."""
if hasattr(self, 'faucets'):
if len(self.faucets) > 1:
# in the future, we should support more faucets
raise TypeError("Only one faucet per account.")
return self.faucets[0]
raise AttributeError("There is no faucet assigned.") | [
"def",
"faucet",
"(",
"self",
")",
":",
"if",
"hasattr",
"(",
"self",
",",
"'faucets'",
")",
":",
"if",
"len",
"(",
"self",
".",
"faucets",
")",
">",
"1",
":",
"# in the future, we should support more faucets",
"raise",
"TypeError",
"(",
"\"Only one faucet per account.\"",
")",
"return",
"self",
".",
"faucets",
"[",
"0",
"]",
"raise",
"AttributeError",
"(",
"\"There is no faucet assigned.\"",
")"
] | Show current linked faucet. | [
"Show",
"current",
"linked",
"faucet",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/controller.py#L167-L174 | train |
tchellomello/raincloudy | raincloudy/helpers.py | serial_finder | def serial_finder(data):
"""
Find controller serial and faucet_serial from the setup page.
<select id="id_select_controller2" name="select_controller" >
<option value='0' selected='selected'>1 - Controller001</option>
</select>
:param data: text to be parsed
:type data: BeautilSoup object
:return: a dict with controller_serial and faucet_serial
:rtype: dict
:raises IndexError: if controller_serial was not found on the data
"""
if not isinstance(data, BeautifulSoup):
raise TypeError("Function requires BeautifulSoup HTML element.")
try:
# The setup page contains a select box for each controller and each
# faucet
controllersElement = data.find_all('select',
{'id': 'id_select_controller2'})
faucetsElement = data.find_all('select',
{'id': 'id_select_faucet2'})
controllerSerial = controllersElement[0].text.split('-')[1].strip()
faucetSerial = faucetsElement[0].text.split('-')[1].strip()
# currently only one faucet is supported on the code
# we have plans to support it in the future
parsed_dict = {}
parsed_dict['controller_serial'] = controllerSerial
parsed_dict['faucet_serial'] = [faucetSerial]
return parsed_dict
except (AttributeError, IndexError, ValueError):
raise RainCloudyException(
'Could not find any valid controller or faucet') | python | def serial_finder(data):
"""
Find controller serial and faucet_serial from the setup page.
<select id="id_select_controller2" name="select_controller" >
<option value='0' selected='selected'>1 - Controller001</option>
</select>
:param data: text to be parsed
:type data: BeautilSoup object
:return: a dict with controller_serial and faucet_serial
:rtype: dict
:raises IndexError: if controller_serial was not found on the data
"""
if not isinstance(data, BeautifulSoup):
raise TypeError("Function requires BeautifulSoup HTML element.")
try:
# The setup page contains a select box for each controller and each
# faucet
controllersElement = data.find_all('select',
{'id': 'id_select_controller2'})
faucetsElement = data.find_all('select',
{'id': 'id_select_faucet2'})
controllerSerial = controllersElement[0].text.split('-')[1].strip()
faucetSerial = faucetsElement[0].text.split('-')[1].strip()
# currently only one faucet is supported on the code
# we have plans to support it in the future
parsed_dict = {}
parsed_dict['controller_serial'] = controllerSerial
parsed_dict['faucet_serial'] = [faucetSerial]
return parsed_dict
except (AttributeError, IndexError, ValueError):
raise RainCloudyException(
'Could not find any valid controller or faucet') | [
"def",
"serial_finder",
"(",
"data",
")",
":",
"if",
"not",
"isinstance",
"(",
"data",
",",
"BeautifulSoup",
")",
":",
"raise",
"TypeError",
"(",
"\"Function requires BeautifulSoup HTML element.\"",
")",
"try",
":",
"# The setup page contains a select box for each controller and each",
"# faucet",
"controllersElement",
"=",
"data",
".",
"find_all",
"(",
"'select'",
",",
"{",
"'id'",
":",
"'id_select_controller2'",
"}",
")",
"faucetsElement",
"=",
"data",
".",
"find_all",
"(",
"'select'",
",",
"{",
"'id'",
":",
"'id_select_faucet2'",
"}",
")",
"controllerSerial",
"=",
"controllersElement",
"[",
"0",
"]",
".",
"text",
".",
"split",
"(",
"'-'",
")",
"[",
"1",
"]",
".",
"strip",
"(",
")",
"faucetSerial",
"=",
"faucetsElement",
"[",
"0",
"]",
".",
"text",
".",
"split",
"(",
"'-'",
")",
"[",
"1",
"]",
".",
"strip",
"(",
")",
"# currently only one faucet is supported on the code",
"# we have plans to support it in the future",
"parsed_dict",
"=",
"{",
"}",
"parsed_dict",
"[",
"'controller_serial'",
"]",
"=",
"controllerSerial",
"parsed_dict",
"[",
"'faucet_serial'",
"]",
"=",
"[",
"faucetSerial",
"]",
"return",
"parsed_dict",
"except",
"(",
"AttributeError",
",",
"IndexError",
",",
"ValueError",
")",
":",
"raise",
"RainCloudyException",
"(",
"'Could not find any valid controller or faucet'",
")"
] | Find controller serial and faucet_serial from the setup page.
<select id="id_select_controller2" name="select_controller" >
<option value='0' selected='selected'>1 - Controller001</option>
</select>
:param data: text to be parsed
:type data: BeautilSoup object
:return: a dict with controller_serial and faucet_serial
:rtype: dict
:raises IndexError: if controller_serial was not found on the data | [
"Find",
"controller",
"serial",
"and",
"faucet_serial",
"from",
"the",
"setup",
"page",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/helpers.py#L15-L54 | train |
tchellomello/raincloudy | raincloudy/helpers.py | find_controller_or_faucet_name | def find_controller_or_faucet_name(data, p_type):
"""
Find on the HTML document the controller name.
# expected result
<label for="select_controller">
<span class="more_info" id="#styling-type-light" data-hasqtip="26" \
title="Select Control Unit to display." >Control Unit:</span></label><br/>
<select class="simpleselect" id="id_select_controller" \
name="select_controller" onchange="submit()" >
<option value="0" selected="selected">HERE_IS_CONTROLLER_NAME
:param data: BeautifulSoup object
:param p_type: parameter type. (controller or faucet)
:return: controller or valve name
:rtype: string.
:raises TypeError: if data is not a BeautifulSoup object
:raises IndexError: return None because controller name was not found
"""
if not isinstance(data, BeautifulSoup):
raise TypeError("Function requires BeautilSoup HTML element.")
if not (p_type == 'controller' or p_type == 'faucet'):
raise TypeError("Function p_type must be controller or faucet")
try:
search_field = 'id_select_{0}'.format(p_type)
child = data.find('select', {'id': search_field})
return child.get_text().strip()
except AttributeError:
return None | python | def find_controller_or_faucet_name(data, p_type):
"""
Find on the HTML document the controller name.
# expected result
<label for="select_controller">
<span class="more_info" id="#styling-type-light" data-hasqtip="26" \
title="Select Control Unit to display." >Control Unit:</span></label><br/>
<select class="simpleselect" id="id_select_controller" \
name="select_controller" onchange="submit()" >
<option value="0" selected="selected">HERE_IS_CONTROLLER_NAME
:param data: BeautifulSoup object
:param p_type: parameter type. (controller or faucet)
:return: controller or valve name
:rtype: string.
:raises TypeError: if data is not a BeautifulSoup object
:raises IndexError: return None because controller name was not found
"""
if not isinstance(data, BeautifulSoup):
raise TypeError("Function requires BeautilSoup HTML element.")
if not (p_type == 'controller' or p_type == 'faucet'):
raise TypeError("Function p_type must be controller or faucet")
try:
search_field = 'id_select_{0}'.format(p_type)
child = data.find('select', {'id': search_field})
return child.get_text().strip()
except AttributeError:
return None | [
"def",
"find_controller_or_faucet_name",
"(",
"data",
",",
"p_type",
")",
":",
"if",
"not",
"isinstance",
"(",
"data",
",",
"BeautifulSoup",
")",
":",
"raise",
"TypeError",
"(",
"\"Function requires BeautilSoup HTML element.\"",
")",
"if",
"not",
"(",
"p_type",
"==",
"'controller'",
"or",
"p_type",
"==",
"'faucet'",
")",
":",
"raise",
"TypeError",
"(",
"\"Function p_type must be controller or faucet\"",
")",
"try",
":",
"search_field",
"=",
"'id_select_{0}'",
".",
"format",
"(",
"p_type",
")",
"child",
"=",
"data",
".",
"find",
"(",
"'select'",
",",
"{",
"'id'",
":",
"search_field",
"}",
")",
"return",
"child",
".",
"get_text",
"(",
")",
".",
"strip",
"(",
")",
"except",
"AttributeError",
":",
"return",
"None"
] | Find on the HTML document the controller name.
# expected result
<label for="select_controller">
<span class="more_info" id="#styling-type-light" data-hasqtip="26" \
title="Select Control Unit to display." >Control Unit:</span></label><br/>
<select class="simpleselect" id="id_select_controller" \
name="select_controller" onchange="submit()" >
<option value="0" selected="selected">HERE_IS_CONTROLLER_NAME
:param data: BeautifulSoup object
:param p_type: parameter type. (controller or faucet)
:return: controller or valve name
:rtype: string.
:raises TypeError: if data is not a BeautifulSoup object
:raises IndexError: return None because controller name was not found | [
"Find",
"on",
"the",
"HTML",
"document",
"the",
"controller",
"name",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/helpers.py#L93-L123 | train |
tchellomello/raincloudy | raincloudy/helpers.py | find_zone_name | def find_zone_name(data, zone_id):
"""
Find on the HTML document the zone name.
# expected result
<span class="more_info" \
title="Zone can be renamed on Setup tab">1 - zone1</span>,
:param data: BeautifulSoup object
:param zone: zone id
:return: zone name
:rtype: string
:raises TypeError: if data is not a BeautifulSoup object
:raises IndexError: return None because controller name was not found
"""
if not isinstance(data, BeautifulSoup):
raise TypeError("Function requires BeautilSoup HTML element.")
table = data.find('table', {'class': 'zone_table'})
table_body = table.find('tbody')
rows = table_body.find_all('span', {'class': 'more_info'})
for row in rows:
if row.get_text().startswith(str(zone_id)):
return row.get_text()[4:].strip()
return None | python | def find_zone_name(data, zone_id):
"""
Find on the HTML document the zone name.
# expected result
<span class="more_info" \
title="Zone can be renamed on Setup tab">1 - zone1</span>,
:param data: BeautifulSoup object
:param zone: zone id
:return: zone name
:rtype: string
:raises TypeError: if data is not a BeautifulSoup object
:raises IndexError: return None because controller name was not found
"""
if not isinstance(data, BeautifulSoup):
raise TypeError("Function requires BeautilSoup HTML element.")
table = data.find('table', {'class': 'zone_table'})
table_body = table.find('tbody')
rows = table_body.find_all('span', {'class': 'more_info'})
for row in rows:
if row.get_text().startswith(str(zone_id)):
return row.get_text()[4:].strip()
return None | [
"def",
"find_zone_name",
"(",
"data",
",",
"zone_id",
")",
":",
"if",
"not",
"isinstance",
"(",
"data",
",",
"BeautifulSoup",
")",
":",
"raise",
"TypeError",
"(",
"\"Function requires BeautilSoup HTML element.\"",
")",
"table",
"=",
"data",
".",
"find",
"(",
"'table'",
",",
"{",
"'class'",
":",
"'zone_table'",
"}",
")",
"table_body",
"=",
"table",
".",
"find",
"(",
"'tbody'",
")",
"rows",
"=",
"table_body",
".",
"find_all",
"(",
"'span'",
",",
"{",
"'class'",
":",
"'more_info'",
"}",
")",
"for",
"row",
"in",
"rows",
":",
"if",
"row",
".",
"get_text",
"(",
")",
".",
"startswith",
"(",
"str",
"(",
"zone_id",
")",
")",
":",
"return",
"row",
".",
"get_text",
"(",
")",
"[",
"4",
":",
"]",
".",
"strip",
"(",
")",
"return",
"None"
] | Find on the HTML document the zone name.
# expected result
<span class="more_info" \
title="Zone can be renamed on Setup tab">1 - zone1</span>,
:param data: BeautifulSoup object
:param zone: zone id
:return: zone name
:rtype: string
:raises TypeError: if data is not a BeautifulSoup object
:raises IndexError: return None because controller name was not found | [
"Find",
"on",
"the",
"HTML",
"document",
"the",
"zone",
"name",
"."
] | 1847fa913e5ba79645d51bf23637860d68c67dbf | https://github.com/tchellomello/raincloudy/blob/1847fa913e5ba79645d51bf23637860d68c67dbf/raincloudy/helpers.py#L126-L150 | train |
django-getpaid/django-getpaid | example/orders/listeners.py | new_payment_query_listener | def new_payment_query_listener(sender, order=None, payment=None, **kwargs):
"""
Here we fill only two obligatory fields of payment, and leave signal handler
"""
payment.amount = order.total
payment.currency = order.currency
logger.debug("new_payment_query_listener, amount=%s, currency=%s",
payment.amount, payment.currency) | python | def new_payment_query_listener(sender, order=None, payment=None, **kwargs):
"""
Here we fill only two obligatory fields of payment, and leave signal handler
"""
payment.amount = order.total
payment.currency = order.currency
logger.debug("new_payment_query_listener, amount=%s, currency=%s",
payment.amount, payment.currency) | [
"def",
"new_payment_query_listener",
"(",
"sender",
",",
"order",
"=",
"None",
",",
"payment",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"payment",
".",
"amount",
"=",
"order",
".",
"total",
"payment",
".",
"currency",
"=",
"order",
".",
"currency",
"logger",
".",
"debug",
"(",
"\"new_payment_query_listener, amount=%s, currency=%s\"",
",",
"payment",
".",
"amount",
",",
"payment",
".",
"currency",
")"
] | Here we fill only two obligatory fields of payment, and leave signal handler | [
"Here",
"we",
"fill",
"only",
"two",
"obligatory",
"fields",
"of",
"payment",
"and",
"leave",
"signal",
"handler"
] | f32badcd0ebc28d24adceb4f649c0c2b84c03987 | https://github.com/django-getpaid/django-getpaid/blob/f32badcd0ebc28d24adceb4f649c0c2b84c03987/example/orders/listeners.py#L6-L14 | train |
django-getpaid/django-getpaid | example/orders/listeners.py | payment_status_changed_listener | def payment_status_changed_listener(sender, instance, old_status, new_status, **kwargs):
"""
Here we will actually do something, when payment is accepted.
E.g. lets change an order status.
"""
logger.debug("payment_status_changed_listener, old=%s, new=%s", old_status,
new_status)
if old_status != 'paid' and new_status == 'paid':
# Ensures that we process order only one
instance.order.status = 'P'
instance.order.save() | python | def payment_status_changed_listener(sender, instance, old_status, new_status, **kwargs):
"""
Here we will actually do something, when payment is accepted.
E.g. lets change an order status.
"""
logger.debug("payment_status_changed_listener, old=%s, new=%s", old_status,
new_status)
if old_status != 'paid' and new_status == 'paid':
# Ensures that we process order only one
instance.order.status = 'P'
instance.order.save() | [
"def",
"payment_status_changed_listener",
"(",
"sender",
",",
"instance",
",",
"old_status",
",",
"new_status",
",",
"*",
"*",
"kwargs",
")",
":",
"logger",
".",
"debug",
"(",
"\"payment_status_changed_listener, old=%s, new=%s\"",
",",
"old_status",
",",
"new_status",
")",
"if",
"old_status",
"!=",
"'paid'",
"and",
"new_status",
"==",
"'paid'",
":",
"# Ensures that we process order only one",
"instance",
".",
"order",
".",
"status",
"=",
"'P'",
"instance",
".",
"order",
".",
"save",
"(",
")"
] | Here we will actually do something, when payment is accepted.
E.g. lets change an order status. | [
"Here",
"we",
"will",
"actually",
"do",
"something",
"when",
"payment",
"is",
"accepted",
".",
"E",
".",
"g",
".",
"lets",
"change",
"an",
"order",
"status",
"."
] | f32badcd0ebc28d24adceb4f649c0c2b84c03987 | https://github.com/django-getpaid/django-getpaid/blob/f32badcd0ebc28d24adceb4f649c0c2b84c03987/example/orders/listeners.py#L17-L27 | train |
django-getpaid/django-getpaid | getpaid/models.py | register_to_payment | def register_to_payment(order_class, **kwargs):
"""
A function for registering unaware order class to ``getpaid``. This will
generate a ``Payment`` model class that will store payments with
ForeignKey to original order class
This also will build a model class for every enabled backend.
"""
global Payment
global Order
class Payment(PaymentFactory.construct(order=order_class, **kwargs)):
objects = PaymentManager()
class Meta:
ordering = ('-created_on',)
verbose_name = _("Payment")
verbose_name_plural = _("Payments")
Order = order_class
# Now build models for backends
backend_models_modules = import_backend_modules('models')
for backend_name, models_module in backend_models_modules.items():
for model in models_module.build_models(Payment):
apps.register_model(backend_name, model)
return Payment | python | def register_to_payment(order_class, **kwargs):
"""
A function for registering unaware order class to ``getpaid``. This will
generate a ``Payment`` model class that will store payments with
ForeignKey to original order class
This also will build a model class for every enabled backend.
"""
global Payment
global Order
class Payment(PaymentFactory.construct(order=order_class, **kwargs)):
objects = PaymentManager()
class Meta:
ordering = ('-created_on',)
verbose_name = _("Payment")
verbose_name_plural = _("Payments")
Order = order_class
# Now build models for backends
backend_models_modules = import_backend_modules('models')
for backend_name, models_module in backend_models_modules.items():
for model in models_module.build_models(Payment):
apps.register_model(backend_name, model)
return Payment | [
"def",
"register_to_payment",
"(",
"order_class",
",",
"*",
"*",
"kwargs",
")",
":",
"global",
"Payment",
"global",
"Order",
"class",
"Payment",
"(",
"PaymentFactory",
".",
"construct",
"(",
"order",
"=",
"order_class",
",",
"*",
"*",
"kwargs",
")",
")",
":",
"objects",
"=",
"PaymentManager",
"(",
")",
"class",
"Meta",
":",
"ordering",
"=",
"(",
"'-created_on'",
",",
")",
"verbose_name",
"=",
"_",
"(",
"\"Payment\"",
")",
"verbose_name_plural",
"=",
"_",
"(",
"\"Payments\"",
")",
"Order",
"=",
"order_class",
"# Now build models for backends",
"backend_models_modules",
"=",
"import_backend_modules",
"(",
"'models'",
")",
"for",
"backend_name",
",",
"models_module",
"in",
"backend_models_modules",
".",
"items",
"(",
")",
":",
"for",
"model",
"in",
"models_module",
".",
"build_models",
"(",
"Payment",
")",
":",
"apps",
".",
"register_model",
"(",
"backend_name",
",",
"model",
")",
"return",
"Payment"
] | A function for registering unaware order class to ``getpaid``. This will
generate a ``Payment`` model class that will store payments with
ForeignKey to original order class
This also will build a model class for every enabled backend. | [
"A",
"function",
"for",
"registering",
"unaware",
"order",
"class",
"to",
"getpaid",
".",
"This",
"will",
"generate",
"a",
"Payment",
"model",
"class",
"that",
"will",
"store",
"payments",
"with",
"ForeignKey",
"to",
"original",
"order",
"class"
] | f32badcd0ebc28d24adceb4f649c0c2b84c03987 | https://github.com/django-getpaid/django-getpaid/blob/f32badcd0ebc28d24adceb4f649c0c2b84c03987/getpaid/models.py#L128-L155 | train |
django-getpaid/django-getpaid | getpaid/utils.py | get_backend_choices | def get_backend_choices(currency=None):
"""
Get active backends modules. Backend list can be filtered by
supporting given currency.
"""
choices = []
backends_names = getattr(settings, 'GETPAID_BACKENDS', [])
for backend_name in backends_names:
backend = import_module(backend_name)
if currency:
if currency in backend.PaymentProcessor.BACKEND_ACCEPTED_CURRENCY:
choices.append(
(backend_name, backend.PaymentProcessor.BACKEND_NAME)
)
else:
choices.append(
(backend_name, backend.PaymentProcessor.BACKEND_NAME)
)
return choices | python | def get_backend_choices(currency=None):
"""
Get active backends modules. Backend list can be filtered by
supporting given currency.
"""
choices = []
backends_names = getattr(settings, 'GETPAID_BACKENDS', [])
for backend_name in backends_names:
backend = import_module(backend_name)
if currency:
if currency in backend.PaymentProcessor.BACKEND_ACCEPTED_CURRENCY:
choices.append(
(backend_name, backend.PaymentProcessor.BACKEND_NAME)
)
else:
choices.append(
(backend_name, backend.PaymentProcessor.BACKEND_NAME)
)
return choices | [
"def",
"get_backend_choices",
"(",
"currency",
"=",
"None",
")",
":",
"choices",
"=",
"[",
"]",
"backends_names",
"=",
"getattr",
"(",
"settings",
",",
"'GETPAID_BACKENDS'",
",",
"[",
"]",
")",
"for",
"backend_name",
"in",
"backends_names",
":",
"backend",
"=",
"import_module",
"(",
"backend_name",
")",
"if",
"currency",
":",
"if",
"currency",
"in",
"backend",
".",
"PaymentProcessor",
".",
"BACKEND_ACCEPTED_CURRENCY",
":",
"choices",
".",
"append",
"(",
"(",
"backend_name",
",",
"backend",
".",
"PaymentProcessor",
".",
"BACKEND_NAME",
")",
")",
"else",
":",
"choices",
".",
"append",
"(",
"(",
"backend_name",
",",
"backend",
".",
"PaymentProcessor",
".",
"BACKEND_NAME",
")",
")",
"return",
"choices"
] | Get active backends modules. Backend list can be filtered by
supporting given currency. | [
"Get",
"active",
"backends",
"modules",
".",
"Backend",
"list",
"can",
"be",
"filtered",
"by",
"supporting",
"given",
"currency",
"."
] | f32badcd0ebc28d24adceb4f649c0c2b84c03987 | https://github.com/django-getpaid/django-getpaid/blob/f32badcd0ebc28d24adceb4f649c0c2b84c03987/getpaid/utils.py#L29-L48 | train |
django-getpaid/django-getpaid | getpaid/backends/payu_rest/__init__.py | PaymentProcessor.online | def online(cls, payload, ip, req_sig):
"""
Receive and analyze request from payment service with information on payment status change.
"""
from getpaid.models import Payment
params = json.loads(payload)
order_data = params.get('order', {})
pos_id = order_data.get('merchantPosId')
payment_id = order_data.get('extOrderId')
key2 = cls.get_backend_setting('key2')
if pos_id != cls.get_backend_setting('pos_id'):
logger.warning('Received message for different pos: {}'.format(pos_id))
return 'ERROR'
req_sig_dict = cls.parse_payu_sig(req_sig)
sig = cls.compute_sig(payload, key2, algorithm=req_sig_dict.get('algorithm', 'md5'))
if sig != req_sig_dict['signature']:
logger.warning('Received message with malformed signature. Payload: {}'.format(payload))
return 'ERROR'
try:
payment = Payment.objects.get(id=payment_id)
except Payment.DoesNotExist:
logger.warning('Received message for nonexistent payment: {}.\nPayload: {}'.format(payment_id, payload))
return 'ERROR'
status = order_data['status']
if payment.status != 'paid':
if status == 'COMPLETED':
payment.external_id = order_data['orderId']
payment.amount = Decimal(order_data['totalAmount']) / Decimal(100)
payment.amount_paid = payment.amount
payment.currenct = order_data['currencyCode']
payment.paid_on = pendulum.parse(params['localReceiptDateTime']).in_tz('utc')
payment.description = order_data['description']
payment.change_status('paid')
elif status == 'PENDING':
payment.change_status('in_progress')
elif status in ['CANCELED', 'REJECTED']:
payment.change_status('cancelled')
return 'OK' | python | def online(cls, payload, ip, req_sig):
"""
Receive and analyze request from payment service with information on payment status change.
"""
from getpaid.models import Payment
params = json.loads(payload)
order_data = params.get('order', {})
pos_id = order_data.get('merchantPosId')
payment_id = order_data.get('extOrderId')
key2 = cls.get_backend_setting('key2')
if pos_id != cls.get_backend_setting('pos_id'):
logger.warning('Received message for different pos: {}'.format(pos_id))
return 'ERROR'
req_sig_dict = cls.parse_payu_sig(req_sig)
sig = cls.compute_sig(payload, key2, algorithm=req_sig_dict.get('algorithm', 'md5'))
if sig != req_sig_dict['signature']:
logger.warning('Received message with malformed signature. Payload: {}'.format(payload))
return 'ERROR'
try:
payment = Payment.objects.get(id=payment_id)
except Payment.DoesNotExist:
logger.warning('Received message for nonexistent payment: {}.\nPayload: {}'.format(payment_id, payload))
return 'ERROR'
status = order_data['status']
if payment.status != 'paid':
if status == 'COMPLETED':
payment.external_id = order_data['orderId']
payment.amount = Decimal(order_data['totalAmount']) / Decimal(100)
payment.amount_paid = payment.amount
payment.currenct = order_data['currencyCode']
payment.paid_on = pendulum.parse(params['localReceiptDateTime']).in_tz('utc')
payment.description = order_data['description']
payment.change_status('paid')
elif status == 'PENDING':
payment.change_status('in_progress')
elif status in ['CANCELED', 'REJECTED']:
payment.change_status('cancelled')
return 'OK' | [
"def",
"online",
"(",
"cls",
",",
"payload",
",",
"ip",
",",
"req_sig",
")",
":",
"from",
"getpaid",
".",
"models",
"import",
"Payment",
"params",
"=",
"json",
".",
"loads",
"(",
"payload",
")",
"order_data",
"=",
"params",
".",
"get",
"(",
"'order'",
",",
"{",
"}",
")",
"pos_id",
"=",
"order_data",
".",
"get",
"(",
"'merchantPosId'",
")",
"payment_id",
"=",
"order_data",
".",
"get",
"(",
"'extOrderId'",
")",
"key2",
"=",
"cls",
".",
"get_backend_setting",
"(",
"'key2'",
")",
"if",
"pos_id",
"!=",
"cls",
".",
"get_backend_setting",
"(",
"'pos_id'",
")",
":",
"logger",
".",
"warning",
"(",
"'Received message for different pos: {}'",
".",
"format",
"(",
"pos_id",
")",
")",
"return",
"'ERROR'",
"req_sig_dict",
"=",
"cls",
".",
"parse_payu_sig",
"(",
"req_sig",
")",
"sig",
"=",
"cls",
".",
"compute_sig",
"(",
"payload",
",",
"key2",
",",
"algorithm",
"=",
"req_sig_dict",
".",
"get",
"(",
"'algorithm'",
",",
"'md5'",
")",
")",
"if",
"sig",
"!=",
"req_sig_dict",
"[",
"'signature'",
"]",
":",
"logger",
".",
"warning",
"(",
"'Received message with malformed signature. Payload: {}'",
".",
"format",
"(",
"payload",
")",
")",
"return",
"'ERROR'",
"try",
":",
"payment",
"=",
"Payment",
".",
"objects",
".",
"get",
"(",
"id",
"=",
"payment_id",
")",
"except",
"Payment",
".",
"DoesNotExist",
":",
"logger",
".",
"warning",
"(",
"'Received message for nonexistent payment: {}.\\nPayload: {}'",
".",
"format",
"(",
"payment_id",
",",
"payload",
")",
")",
"return",
"'ERROR'",
"status",
"=",
"order_data",
"[",
"'status'",
"]",
"if",
"payment",
".",
"status",
"!=",
"'paid'",
":",
"if",
"status",
"==",
"'COMPLETED'",
":",
"payment",
".",
"external_id",
"=",
"order_data",
"[",
"'orderId'",
"]",
"payment",
".",
"amount",
"=",
"Decimal",
"(",
"order_data",
"[",
"'totalAmount'",
"]",
")",
"/",
"Decimal",
"(",
"100",
")",
"payment",
".",
"amount_paid",
"=",
"payment",
".",
"amount",
"payment",
".",
"currenct",
"=",
"order_data",
"[",
"'currencyCode'",
"]",
"payment",
".",
"paid_on",
"=",
"pendulum",
".",
"parse",
"(",
"params",
"[",
"'localReceiptDateTime'",
"]",
")",
".",
"in_tz",
"(",
"'utc'",
")",
"payment",
".",
"description",
"=",
"order_data",
"[",
"'description'",
"]",
"payment",
".",
"change_status",
"(",
"'paid'",
")",
"elif",
"status",
"==",
"'PENDING'",
":",
"payment",
".",
"change_status",
"(",
"'in_progress'",
")",
"elif",
"status",
"in",
"[",
"'CANCELED'",
",",
"'REJECTED'",
"]",
":",
"payment",
".",
"change_status",
"(",
"'cancelled'",
")",
"return",
"'OK'"
] | Receive and analyze request from payment service with information on payment status change. | [
"Receive",
"and",
"analyze",
"request",
"from",
"payment",
"service",
"with",
"information",
"on",
"payment",
"status",
"change",
"."
] | f32badcd0ebc28d24adceb4f649c0c2b84c03987 | https://github.com/django-getpaid/django-getpaid/blob/f32badcd0ebc28d24adceb4f649c0c2b84c03987/getpaid/backends/payu_rest/__init__.py#L87-L132 | train |
django-getpaid/django-getpaid | getpaid/backends/__init__.py | PaymentProcessorBase.get_order_description | def get_order_description(self, payment, order):
"""
Renders order description using django template provided in ``settings.GETPAID_ORDER_DESCRIPTION``
or if not provided return unicode representation of ``Order object``.
"""
template = getattr(settings, 'GETPAID_ORDER_DESCRIPTION', None)
if template:
return Template(template).render(Context({"payment": payment, "order": order}))
else:
return six.text_type(order) | python | def get_order_description(self, payment, order):
"""
Renders order description using django template provided in ``settings.GETPAID_ORDER_DESCRIPTION``
or if not provided return unicode representation of ``Order object``.
"""
template = getattr(settings, 'GETPAID_ORDER_DESCRIPTION', None)
if template:
return Template(template).render(Context({"payment": payment, "order": order}))
else:
return six.text_type(order) | [
"def",
"get_order_description",
"(",
"self",
",",
"payment",
",",
"order",
")",
":",
"template",
"=",
"getattr",
"(",
"settings",
",",
"'GETPAID_ORDER_DESCRIPTION'",
",",
"None",
")",
"if",
"template",
":",
"return",
"Template",
"(",
"template",
")",
".",
"render",
"(",
"Context",
"(",
"{",
"\"payment\"",
":",
"payment",
",",
"\"order\"",
":",
"order",
"}",
")",
")",
"else",
":",
"return",
"six",
".",
"text_type",
"(",
"order",
")"
] | Renders order description using django template provided in ``settings.GETPAID_ORDER_DESCRIPTION``
or if not provided return unicode representation of ``Order object``. | [
"Renders",
"order",
"description",
"using",
"django",
"template",
"provided",
"in",
"settings",
".",
"GETPAID_ORDER_DESCRIPTION",
"or",
"if",
"not",
"provided",
"return",
"unicode",
"representation",
"of",
"Order",
"object",
"."
] | f32badcd0ebc28d24adceb4f649c0c2b84c03987 | https://github.com/django-getpaid/django-getpaid/blob/f32badcd0ebc28d24adceb4f649c0c2b84c03987/getpaid/backends/__init__.py#L54-L63 | train |
django-getpaid/django-getpaid | getpaid/backends/__init__.py | PaymentProcessorBase.get_backend_setting | def get_backend_setting(cls, name, default=None):
"""
Reads ``name`` setting from backend settings dictionary.
If `default` value is omitted, raises ``ImproperlyConfigured`` when
setting ``name`` is not available.
"""
backend_settings = get_backend_settings(cls.BACKEND)
if default is not None:
return backend_settings.get(name, default)
else:
try:
return backend_settings[name]
except KeyError:
raise ImproperlyConfigured("getpaid '%s' requires backend '%s' setting" % (cls.BACKEND, name)) | python | def get_backend_setting(cls, name, default=None):
"""
Reads ``name`` setting from backend settings dictionary.
If `default` value is omitted, raises ``ImproperlyConfigured`` when
setting ``name`` is not available.
"""
backend_settings = get_backend_settings(cls.BACKEND)
if default is not None:
return backend_settings.get(name, default)
else:
try:
return backend_settings[name]
except KeyError:
raise ImproperlyConfigured("getpaid '%s' requires backend '%s' setting" % (cls.BACKEND, name)) | [
"def",
"get_backend_setting",
"(",
"cls",
",",
"name",
",",
"default",
"=",
"None",
")",
":",
"backend_settings",
"=",
"get_backend_settings",
"(",
"cls",
".",
"BACKEND",
")",
"if",
"default",
"is",
"not",
"None",
":",
"return",
"backend_settings",
".",
"get",
"(",
"name",
",",
"default",
")",
"else",
":",
"try",
":",
"return",
"backend_settings",
"[",
"name",
"]",
"except",
"KeyError",
":",
"raise",
"ImproperlyConfigured",
"(",
"\"getpaid '%s' requires backend '%s' setting\"",
"%",
"(",
"cls",
".",
"BACKEND",
",",
"name",
")",
")"
] | Reads ``name`` setting from backend settings dictionary.
If `default` value is omitted, raises ``ImproperlyConfigured`` when
setting ``name`` is not available. | [
"Reads",
"name",
"setting",
"from",
"backend",
"settings",
"dictionary",
"."
] | f32badcd0ebc28d24adceb4f649c0c2b84c03987 | https://github.com/django-getpaid/django-getpaid/blob/f32badcd0ebc28d24adceb4f649c0c2b84c03987/getpaid/backends/__init__.py#L83-L97 | train |
django-getpaid/django-getpaid | getpaid/backends/dotpay/__init__.py | PaymentProcessor.get_gateway_url | def get_gateway_url(self, request):
"""
Routes a payment to Gateway, should return URL for redirection.
"""
params = {
'id': self.get_backend_setting('id'),
'description': self.get_order_description(self.payment, self.payment.order),
'amount': self.payment.amount,
'currency': self.payment.currency,
'type': 0, # 0 = show "return" button after finished payment
'control': self.payment.pk,
'URL': self.get_URL(self.payment.pk),
'URLC': self.get_URLC(),
'api_version': 'dev',
}
user_data = {
'email': None,
'lang': None,
}
signals.user_data_query.send(sender=None, order=self.payment.order, user_data=user_data)
if user_data['email']:
params['email'] = user_data['email']
if user_data['lang'] and user_data['lang'].lower() in self._ACCEPTED_LANGS:
params['lang'] = user_data['lang'].lower()
elif self.get_backend_setting('lang', False
) and self.get_backend_setting('lang').lower() in self._ACCEPTED_LANGS:
params['lang'] = self.get_backend_setting('lang').lower()
if self.get_backend_setting('onlinetransfer', False):
params['onlinetransfer'] = 1
if self.get_backend_setting('p_email', False):
params['p_email'] = self.get_backend_setting('p_email')
if self.get_backend_setting('p_info', False):
params['p_info'] = self.get_backend_setting('p_info')
if self.get_backend_setting('tax', False):
params['tax'] = 1
gateway_url = self.get_backend_setting('gateway_url', self._GATEWAY_URL)
if self.get_backend_setting('method', 'get').lower() == 'post':
return gateway_url, 'POST', params
elif self.get_backend_setting('method', 'get').lower() == 'get':
for key in params.keys():
params[key] = six.text_type(params[key]).encode('utf-8')
return gateway_url + '?' + urlencode(params), "GET", {}
else:
raise ImproperlyConfigured('Dotpay payment backend accepts only GET or POST') | python | def get_gateway_url(self, request):
"""
Routes a payment to Gateway, should return URL for redirection.
"""
params = {
'id': self.get_backend_setting('id'),
'description': self.get_order_description(self.payment, self.payment.order),
'amount': self.payment.amount,
'currency': self.payment.currency,
'type': 0, # 0 = show "return" button after finished payment
'control': self.payment.pk,
'URL': self.get_URL(self.payment.pk),
'URLC': self.get_URLC(),
'api_version': 'dev',
}
user_data = {
'email': None,
'lang': None,
}
signals.user_data_query.send(sender=None, order=self.payment.order, user_data=user_data)
if user_data['email']:
params['email'] = user_data['email']
if user_data['lang'] and user_data['lang'].lower() in self._ACCEPTED_LANGS:
params['lang'] = user_data['lang'].lower()
elif self.get_backend_setting('lang', False
) and self.get_backend_setting('lang').lower() in self._ACCEPTED_LANGS:
params['lang'] = self.get_backend_setting('lang').lower()
if self.get_backend_setting('onlinetransfer', False):
params['onlinetransfer'] = 1
if self.get_backend_setting('p_email', False):
params['p_email'] = self.get_backend_setting('p_email')
if self.get_backend_setting('p_info', False):
params['p_info'] = self.get_backend_setting('p_info')
if self.get_backend_setting('tax', False):
params['tax'] = 1
gateway_url = self.get_backend_setting('gateway_url', self._GATEWAY_URL)
if self.get_backend_setting('method', 'get').lower() == 'post':
return gateway_url, 'POST', params
elif self.get_backend_setting('method', 'get').lower() == 'get':
for key in params.keys():
params[key] = six.text_type(params[key]).encode('utf-8')
return gateway_url + '?' + urlencode(params), "GET", {}
else:
raise ImproperlyConfigured('Dotpay payment backend accepts only GET or POST') | [
"def",
"get_gateway_url",
"(",
"self",
",",
"request",
")",
":",
"params",
"=",
"{",
"'id'",
":",
"self",
".",
"get_backend_setting",
"(",
"'id'",
")",
",",
"'description'",
":",
"self",
".",
"get_order_description",
"(",
"self",
".",
"payment",
",",
"self",
".",
"payment",
".",
"order",
")",
",",
"'amount'",
":",
"self",
".",
"payment",
".",
"amount",
",",
"'currency'",
":",
"self",
".",
"payment",
".",
"currency",
",",
"'type'",
":",
"0",
",",
"# 0 = show \"return\" button after finished payment",
"'control'",
":",
"self",
".",
"payment",
".",
"pk",
",",
"'URL'",
":",
"self",
".",
"get_URL",
"(",
"self",
".",
"payment",
".",
"pk",
")",
",",
"'URLC'",
":",
"self",
".",
"get_URLC",
"(",
")",
",",
"'api_version'",
":",
"'dev'",
",",
"}",
"user_data",
"=",
"{",
"'email'",
":",
"None",
",",
"'lang'",
":",
"None",
",",
"}",
"signals",
".",
"user_data_query",
".",
"send",
"(",
"sender",
"=",
"None",
",",
"order",
"=",
"self",
".",
"payment",
".",
"order",
",",
"user_data",
"=",
"user_data",
")",
"if",
"user_data",
"[",
"'email'",
"]",
":",
"params",
"[",
"'email'",
"]",
"=",
"user_data",
"[",
"'email'",
"]",
"if",
"user_data",
"[",
"'lang'",
"]",
"and",
"user_data",
"[",
"'lang'",
"]",
".",
"lower",
"(",
")",
"in",
"self",
".",
"_ACCEPTED_LANGS",
":",
"params",
"[",
"'lang'",
"]",
"=",
"user_data",
"[",
"'lang'",
"]",
".",
"lower",
"(",
")",
"elif",
"self",
".",
"get_backend_setting",
"(",
"'lang'",
",",
"False",
")",
"and",
"self",
".",
"get_backend_setting",
"(",
"'lang'",
")",
".",
"lower",
"(",
")",
"in",
"self",
".",
"_ACCEPTED_LANGS",
":",
"params",
"[",
"'lang'",
"]",
"=",
"self",
".",
"get_backend_setting",
"(",
"'lang'",
")",
".",
"lower",
"(",
")",
"if",
"self",
".",
"get_backend_setting",
"(",
"'onlinetransfer'",
",",
"False",
")",
":",
"params",
"[",
"'onlinetransfer'",
"]",
"=",
"1",
"if",
"self",
".",
"get_backend_setting",
"(",
"'p_email'",
",",
"False",
")",
":",
"params",
"[",
"'p_email'",
"]",
"=",
"self",
".",
"get_backend_setting",
"(",
"'p_email'",
")",
"if",
"self",
".",
"get_backend_setting",
"(",
"'p_info'",
",",
"False",
")",
":",
"params",
"[",
"'p_info'",
"]",
"=",
"self",
".",
"get_backend_setting",
"(",
"'p_info'",
")",
"if",
"self",
".",
"get_backend_setting",
"(",
"'tax'",
",",
"False",
")",
":",
"params",
"[",
"'tax'",
"]",
"=",
"1",
"gateway_url",
"=",
"self",
".",
"get_backend_setting",
"(",
"'gateway_url'",
",",
"self",
".",
"_GATEWAY_URL",
")",
"if",
"self",
".",
"get_backend_setting",
"(",
"'method'",
",",
"'get'",
")",
".",
"lower",
"(",
")",
"==",
"'post'",
":",
"return",
"gateway_url",
",",
"'POST'",
",",
"params",
"elif",
"self",
".",
"get_backend_setting",
"(",
"'method'",
",",
"'get'",
")",
".",
"lower",
"(",
")",
"==",
"'get'",
":",
"for",
"key",
"in",
"params",
".",
"keys",
"(",
")",
":",
"params",
"[",
"key",
"]",
"=",
"six",
".",
"text_type",
"(",
"params",
"[",
"key",
"]",
")",
".",
"encode",
"(",
"'utf-8'",
")",
"return",
"gateway_url",
"+",
"'?'",
"+",
"urlencode",
"(",
"params",
")",
",",
"\"GET\"",
",",
"{",
"}",
"else",
":",
"raise",
"ImproperlyConfigured",
"(",
"'Dotpay payment backend accepts only GET or POST'",
")"
] | Routes a payment to Gateway, should return URL for redirection. | [
"Routes",
"a",
"payment",
"to",
"Gateway",
"should",
"return",
"URL",
"for",
"redirection",
"."
] | f32badcd0ebc28d24adceb4f649c0c2b84c03987 | https://github.com/django-getpaid/django-getpaid/blob/f32badcd0ebc28d24adceb4f649c0c2b84c03987/getpaid/backends/dotpay/__init__.py#L114-L163 | train |
hubo1016/aiogrpc | aiogrpc/channel.py | channel_ready_future | def channel_ready_future(channel):
"""Creates a Future that tracks when a Channel is ready.
Cancelling the Future does not affect the channel's state machine.
It merely decouples the Future from channel state machine.
Args:
channel: A Channel object.
Returns:
A Future object that matures when the channel connectivity is
ChannelConnectivity.READY.
"""
fut = channel._loop.create_future()
def _set_result(state):
if not fut.done() and state is _grpc.ChannelConnectivity.READY:
fut.set_result(None)
fut.add_done_callback(lambda f: channel.unsubscribe(_set_result))
channel.subscribe(_set_result, try_to_connect=True)
return fut | python | def channel_ready_future(channel):
"""Creates a Future that tracks when a Channel is ready.
Cancelling the Future does not affect the channel's state machine.
It merely decouples the Future from channel state machine.
Args:
channel: A Channel object.
Returns:
A Future object that matures when the channel connectivity is
ChannelConnectivity.READY.
"""
fut = channel._loop.create_future()
def _set_result(state):
if not fut.done() and state is _grpc.ChannelConnectivity.READY:
fut.set_result(None)
fut.add_done_callback(lambda f: channel.unsubscribe(_set_result))
channel.subscribe(_set_result, try_to_connect=True)
return fut | [
"def",
"channel_ready_future",
"(",
"channel",
")",
":",
"fut",
"=",
"channel",
".",
"_loop",
".",
"create_future",
"(",
")",
"def",
"_set_result",
"(",
"state",
")",
":",
"if",
"not",
"fut",
".",
"done",
"(",
")",
"and",
"state",
"is",
"_grpc",
".",
"ChannelConnectivity",
".",
"READY",
":",
"fut",
".",
"set_result",
"(",
"None",
")",
"fut",
".",
"add_done_callback",
"(",
"lambda",
"f",
":",
"channel",
".",
"unsubscribe",
"(",
"_set_result",
")",
")",
"channel",
".",
"subscribe",
"(",
"_set_result",
",",
"try_to_connect",
"=",
"True",
")",
"return",
"fut"
] | Creates a Future that tracks when a Channel is ready.
Cancelling the Future does not affect the channel's state machine.
It merely decouples the Future from channel state machine.
Args:
channel: A Channel object.
Returns:
A Future object that matures when the channel connectivity is
ChannelConnectivity.READY. | [
"Creates",
"a",
"Future",
"that",
"tracks",
"when",
"a",
"Channel",
"is",
"ready",
"."
] | 5bc98bfbe9f2e11dd0eab8e93b8aeefbcc2ccd4b | https://github.com/hubo1016/aiogrpc/blob/5bc98bfbe9f2e11dd0eab8e93b8aeefbcc2ccd4b/aiogrpc/channel.py#L395-L414 | train |
hubo1016/aiogrpc | aiogrpc/channel.py | insecure_channel | def insecure_channel(target, options=None, *, loop=None, executor=None,
standalone_pool_for_streaming=False):
"""Creates an insecure Channel to a server.
Args:
target: The server address
options: An optional list of key-value pairs (channel args in gRPC runtime)
to configure the channel.
Returns:
A Channel object.
"""
return Channel(_grpc.insecure_channel(target, options), loop, executor, standalone_pool_for_streaming) | python | def insecure_channel(target, options=None, *, loop=None, executor=None,
standalone_pool_for_streaming=False):
"""Creates an insecure Channel to a server.
Args:
target: The server address
options: An optional list of key-value pairs (channel args in gRPC runtime)
to configure the channel.
Returns:
A Channel object.
"""
return Channel(_grpc.insecure_channel(target, options), loop, executor, standalone_pool_for_streaming) | [
"def",
"insecure_channel",
"(",
"target",
",",
"options",
"=",
"None",
",",
"*",
",",
"loop",
"=",
"None",
",",
"executor",
"=",
"None",
",",
"standalone_pool_for_streaming",
"=",
"False",
")",
":",
"return",
"Channel",
"(",
"_grpc",
".",
"insecure_channel",
"(",
"target",
",",
"options",
")",
",",
"loop",
",",
"executor",
",",
"standalone_pool_for_streaming",
")"
] | Creates an insecure Channel to a server.
Args:
target: The server address
options: An optional list of key-value pairs (channel args in gRPC runtime)
to configure the channel.
Returns:
A Channel object. | [
"Creates",
"an",
"insecure",
"Channel",
"to",
"a",
"server",
"."
] | 5bc98bfbe9f2e11dd0eab8e93b8aeefbcc2ccd4b | https://github.com/hubo1016/aiogrpc/blob/5bc98bfbe9f2e11dd0eab8e93b8aeefbcc2ccd4b/aiogrpc/channel.py#L417-L429 | train |
hubo1016/aiogrpc | aiogrpc/channel.py | secure_channel | def secure_channel(target, credentials, options=None, *, loop=None, executor=None,
standalone_pool_for_streaming=False):
"""Creates a secure Channel to a server.
Args:
target: The server address.
credentials: A ChannelCredentials instance.
options: An optional list of key-value pairs (channel args in gRPC runtime)
to configure the channel.
Returns:
A Channel object.
"""
return Channel(_grpc.secure_channel(target, credentials, options),
loop, executor, standalone_pool_for_streaming) | python | def secure_channel(target, credentials, options=None, *, loop=None, executor=None,
standalone_pool_for_streaming=False):
"""Creates a secure Channel to a server.
Args:
target: The server address.
credentials: A ChannelCredentials instance.
options: An optional list of key-value pairs (channel args in gRPC runtime)
to configure the channel.
Returns:
A Channel object.
"""
return Channel(_grpc.secure_channel(target, credentials, options),
loop, executor, standalone_pool_for_streaming) | [
"def",
"secure_channel",
"(",
"target",
",",
"credentials",
",",
"options",
"=",
"None",
",",
"*",
",",
"loop",
"=",
"None",
",",
"executor",
"=",
"None",
",",
"standalone_pool_for_streaming",
"=",
"False",
")",
":",
"return",
"Channel",
"(",
"_grpc",
".",
"secure_channel",
"(",
"target",
",",
"credentials",
",",
"options",
")",
",",
"loop",
",",
"executor",
",",
"standalone_pool_for_streaming",
")"
] | Creates a secure Channel to a server.
Args:
target: The server address.
credentials: A ChannelCredentials instance.
options: An optional list of key-value pairs (channel args in gRPC runtime)
to configure the channel.
Returns:
A Channel object. | [
"Creates",
"a",
"secure",
"Channel",
"to",
"a",
"server",
"."
] | 5bc98bfbe9f2e11dd0eab8e93b8aeefbcc2ccd4b | https://github.com/hubo1016/aiogrpc/blob/5bc98bfbe9f2e11dd0eab8e93b8aeefbcc2ccd4b/aiogrpc/channel.py#L432-L446 | train |
hubo1016/aiogrpc | aiogrpc/channel.py | _UnaryUnaryMultiCallable.future | def future(self, request, timeout=None, metadata=None, credentials=None):
"""Asynchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
An object that is both a Call for the RPC and a Future. In the event of
RPC completion, the return Call-Future's result value will be the
response message of the RPC. Should the event terminate with non-OK
status, the returned Call-Future's exception value will be an RpcError.
"""
return _utils.wrap_future_call(self._inner.future(request, timeout, metadata, credentials),
self._loop, self._executor) | python | def future(self, request, timeout=None, metadata=None, credentials=None):
"""Asynchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
An object that is both a Call for the RPC and a Future. In the event of
RPC completion, the return Call-Future's result value will be the
response message of the RPC. Should the event terminate with non-OK
status, the returned Call-Future's exception value will be an RpcError.
"""
return _utils.wrap_future_call(self._inner.future(request, timeout, metadata, credentials),
self._loop, self._executor) | [
"def",
"future",
"(",
"self",
",",
"request",
",",
"timeout",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"credentials",
"=",
"None",
")",
":",
"return",
"_utils",
".",
"wrap_future_call",
"(",
"self",
".",
"_inner",
".",
"future",
"(",
"request",
",",
"timeout",
",",
"metadata",
",",
"credentials",
")",
",",
"self",
".",
"_loop",
",",
"self",
".",
"_executor",
")"
] | Asynchronously invokes the underlying RPC.
Args:
request: The request value for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
An object that is both a Call for the RPC and a Future. In the event of
RPC completion, the return Call-Future's result value will be the
response message of the RPC. Should the event terminate with non-OK
status, the returned Call-Future's exception value will be an RpcError. | [
"Asynchronously",
"invokes",
"the",
"underlying",
"RPC",
"."
] | 5bc98bfbe9f2e11dd0eab8e93b8aeefbcc2ccd4b | https://github.com/hubo1016/aiogrpc/blob/5bc98bfbe9f2e11dd0eab8e93b8aeefbcc2ccd4b/aiogrpc/channel.py#L72-L89 | train |
hubo1016/aiogrpc | aiogrpc/channel.py | _StreamUnaryMultiCallable.with_call | async def with_call(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None):
"""Synchronously invokes the underlying RPC on the client.
Args:
request_iterator: An ASYNC iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
If None, the timeout is considered infinite.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
The response value for the RPC and a Call object for the RPC.
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
fut = self.future(request_iterator, timeout, metadata, credentials)
try:
result = await fut
return (result, fut)
finally:
if not fut.done():
fut.cancel() | python | async def with_call(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None):
"""Synchronously invokes the underlying RPC on the client.
Args:
request_iterator: An ASYNC iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
If None, the timeout is considered infinite.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
The response value for the RPC and a Call object for the RPC.
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details.
"""
fut = self.future(request_iterator, timeout, metadata, credentials)
try:
result = await fut
return (result, fut)
finally:
if not fut.done():
fut.cancel() | [
"async",
"def",
"with_call",
"(",
"self",
",",
"request_iterator",
",",
"timeout",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"credentials",
"=",
"None",
")",
":",
"fut",
"=",
"self",
".",
"future",
"(",
"request_iterator",
",",
"timeout",
",",
"metadata",
",",
"credentials",
")",
"try",
":",
"result",
"=",
"await",
"fut",
"return",
"(",
"result",
",",
"fut",
")",
"finally",
":",
"if",
"not",
"fut",
".",
"done",
"(",
")",
":",
"fut",
".",
"cancel",
"(",
")"
] | Synchronously invokes the underlying RPC on the client.
Args:
request_iterator: An ASYNC iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
If None, the timeout is considered infinite.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
The response value for the RPC and a Call object for the RPC.
Raises:
RpcError: Indicating that the RPC terminated with non-OK status. The
raised RpcError will also be a Call for the RPC affording the RPC's
metadata, status code, and details. | [
"Synchronously",
"invokes",
"the",
"underlying",
"RPC",
"on",
"the",
"client",
"."
] | 5bc98bfbe9f2e11dd0eab8e93b8aeefbcc2ccd4b | https://github.com/hubo1016/aiogrpc/blob/5bc98bfbe9f2e11dd0eab8e93b8aeefbcc2ccd4b/aiogrpc/channel.py#L177-L206 | train |
hubo1016/aiogrpc | aiogrpc/channel.py | _StreamUnaryMultiCallable.future | def future(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None):
"""Asynchronously invokes the underlying RPC on the client.
Args:
request_iterator: An ASYNC iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
If None, the timeout is considered infinite.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
An object that is both a Call for the RPC and a Future. In the event of
RPC completion, the return Call-Future's result value will be the
response message of the RPC. Should the event terminate with non-OK
status, the returned Call-Future's exception value will be an RpcError.
"""
return _utils.wrap_future_call(
self._inner.future(
_utils.WrappedAsyncIterator(request_iterator, self._loop),
timeout,
metadata,
credentials
),
self._loop,
self._executor) | python | def future(self,
request_iterator,
timeout=None,
metadata=None,
credentials=None):
"""Asynchronously invokes the underlying RPC on the client.
Args:
request_iterator: An ASYNC iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
If None, the timeout is considered infinite.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
An object that is both a Call for the RPC and a Future. In the event of
RPC completion, the return Call-Future's result value will be the
response message of the RPC. Should the event terminate with non-OK
status, the returned Call-Future's exception value will be an RpcError.
"""
return _utils.wrap_future_call(
self._inner.future(
_utils.WrappedAsyncIterator(request_iterator, self._loop),
timeout,
metadata,
credentials
),
self._loop,
self._executor) | [
"def",
"future",
"(",
"self",
",",
"request_iterator",
",",
"timeout",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"credentials",
"=",
"None",
")",
":",
"return",
"_utils",
".",
"wrap_future_call",
"(",
"self",
".",
"_inner",
".",
"future",
"(",
"_utils",
".",
"WrappedAsyncIterator",
"(",
"request_iterator",
",",
"self",
".",
"_loop",
")",
",",
"timeout",
",",
"metadata",
",",
"credentials",
")",
",",
"self",
".",
"_loop",
",",
"self",
".",
"_executor",
")"
] | Asynchronously invokes the underlying RPC on the client.
Args:
request_iterator: An ASYNC iterator that yields request values for the RPC.
timeout: An optional duration of time in seconds to allow for the RPC.
If None, the timeout is considered infinite.
metadata: Optional :term:`metadata` to be transmitted to the
service-side of the RPC.
credentials: An optional CallCredentials for the RPC.
Returns:
An object that is both a Call for the RPC and a Future. In the event of
RPC completion, the return Call-Future's result value will be the
response message of the RPC. Should the event terminate with non-OK
status, the returned Call-Future's exception value will be an RpcError. | [
"Asynchronously",
"invokes",
"the",
"underlying",
"RPC",
"on",
"the",
"client",
"."
] | 5bc98bfbe9f2e11dd0eab8e93b8aeefbcc2ccd4b | https://github.com/hubo1016/aiogrpc/blob/5bc98bfbe9f2e11dd0eab8e93b8aeefbcc2ccd4b/aiogrpc/channel.py#L208-L237 | train |
Cymmetria/honeycomb | honeycomb/utils/config_utils.py | config_field_type | def config_field_type(field, cls):
"""Validate a config field against a type.
Similar functionality to :func:`validate_field_matches_type` but returns :obj:`honeycomb.defs.ConfigField`
"""
return defs.ConfigField(lambda _: isinstance(_, cls),
lambda: CONFIG_FIELD_TYPE_ERROR.format(field, cls.__name__)) | python | def config_field_type(field, cls):
"""Validate a config field against a type.
Similar functionality to :func:`validate_field_matches_type` but returns :obj:`honeycomb.defs.ConfigField`
"""
return defs.ConfigField(lambda _: isinstance(_, cls),
lambda: CONFIG_FIELD_TYPE_ERROR.format(field, cls.__name__)) | [
"def",
"config_field_type",
"(",
"field",
",",
"cls",
")",
":",
"return",
"defs",
".",
"ConfigField",
"(",
"lambda",
"_",
":",
"isinstance",
"(",
"_",
",",
"cls",
")",
",",
"lambda",
":",
"CONFIG_FIELD_TYPE_ERROR",
".",
"format",
"(",
"field",
",",
"cls",
".",
"__name__",
")",
")"
] | Validate a config field against a type.
Similar functionality to :func:`validate_field_matches_type` but returns :obj:`honeycomb.defs.ConfigField` | [
"Validate",
"a",
"config",
"field",
"against",
"a",
"type",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/config_utils.py#L21-L27 | train |
Cymmetria/honeycomb | honeycomb/utils/config_utils.py | get_config_parameters | def get_config_parameters(plugin_path):
"""Return the parameters section from config.json."""
json_config_path = os.path.join(plugin_path, defs.CONFIG_FILE_NAME)
with open(json_config_path, "r") as f:
config = json.load(f)
return config.get(defs.PARAMETERS, []) | python | def get_config_parameters(plugin_path):
"""Return the parameters section from config.json."""
json_config_path = os.path.join(plugin_path, defs.CONFIG_FILE_NAME)
with open(json_config_path, "r") as f:
config = json.load(f)
return config.get(defs.PARAMETERS, []) | [
"def",
"get_config_parameters",
"(",
"plugin_path",
")",
":",
"json_config_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"plugin_path",
",",
"defs",
".",
"CONFIG_FILE_NAME",
")",
"with",
"open",
"(",
"json_config_path",
",",
"\"r\"",
")",
"as",
"f",
":",
"config",
"=",
"json",
".",
"load",
"(",
"f",
")",
"return",
"config",
".",
"get",
"(",
"defs",
".",
"PARAMETERS",
",",
"[",
"]",
")"
] | Return the parameters section from config.json. | [
"Return",
"the",
"parameters",
"section",
"from",
"config",
".",
"json",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/config_utils.py#L41-L46 | train |
Cymmetria/honeycomb | honeycomb/utils/config_utils.py | validate_config_parameters | def validate_config_parameters(config_json, allowed_keys, allowed_types):
"""Validate parameters in config file."""
custom_fields = config_json.get(defs.PARAMETERS, [])
for field in custom_fields:
validate_field(field, allowed_keys, allowed_types)
default = field.get(defs.DEFAULT)
field_type = field.get(defs.TYPE)
if default:
validate_field_matches_type(field[defs.VALUE], default, field_type) | python | def validate_config_parameters(config_json, allowed_keys, allowed_types):
"""Validate parameters in config file."""
custom_fields = config_json.get(defs.PARAMETERS, [])
for field in custom_fields:
validate_field(field, allowed_keys, allowed_types)
default = field.get(defs.DEFAULT)
field_type = field.get(defs.TYPE)
if default:
validate_field_matches_type(field[defs.VALUE], default, field_type) | [
"def",
"validate_config_parameters",
"(",
"config_json",
",",
"allowed_keys",
",",
"allowed_types",
")",
":",
"custom_fields",
"=",
"config_json",
".",
"get",
"(",
"defs",
".",
"PARAMETERS",
",",
"[",
"]",
")",
"for",
"field",
"in",
"custom_fields",
":",
"validate_field",
"(",
"field",
",",
"allowed_keys",
",",
"allowed_types",
")",
"default",
"=",
"field",
".",
"get",
"(",
"defs",
".",
"DEFAULT",
")",
"field_type",
"=",
"field",
".",
"get",
"(",
"defs",
".",
"TYPE",
")",
"if",
"default",
":",
"validate_field_matches_type",
"(",
"field",
"[",
"defs",
".",
"VALUE",
"]",
",",
"default",
",",
"field_type",
")"
] | Validate parameters in config file. | [
"Validate",
"parameters",
"in",
"config",
"file",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/config_utils.py#L49-L57 | train |
Cymmetria/honeycomb | honeycomb/utils/config_utils.py | validate_field_matches_type | def validate_field_matches_type(field, value, field_type, select_items=None, _min=None, _max=None):
"""Validate a config field against a specific type."""
if (field_type == defs.TEXT_TYPE and not isinstance(value, six.string_types)) or \
(field_type == defs.STRING_TYPE and not isinstance(value, six.string_types)) or \
(field_type == defs.BOOLEAN_TYPE and not isinstance(value, bool)) or \
(field_type == defs.INTEGER_TYPE and not isinstance(value, int)):
raise exceptions.ConfigFieldTypeMismatch(field, value, field_type)
if field_type == defs.INTEGER_TYPE:
if _min and value < _min:
raise exceptions.ConfigFieldTypeMismatch(field, value, "must be higher than {}".format(_min))
if _max and value > _max:
raise exceptions.ConfigFieldTypeMismatch(field, value, "must be lower than {}".format(_max))
if field_type == defs.SELECT_TYPE:
from honeycomb.utils.plugin_utils import get_select_items
items = get_select_items(select_items)
if value not in items:
raise exceptions.ConfigFieldTypeMismatch(field, value, "one of: {}".format(", ".join(items))) | python | def validate_field_matches_type(field, value, field_type, select_items=None, _min=None, _max=None):
"""Validate a config field against a specific type."""
if (field_type == defs.TEXT_TYPE and not isinstance(value, six.string_types)) or \
(field_type == defs.STRING_TYPE and not isinstance(value, six.string_types)) or \
(field_type == defs.BOOLEAN_TYPE and not isinstance(value, bool)) or \
(field_type == defs.INTEGER_TYPE and not isinstance(value, int)):
raise exceptions.ConfigFieldTypeMismatch(field, value, field_type)
if field_type == defs.INTEGER_TYPE:
if _min and value < _min:
raise exceptions.ConfigFieldTypeMismatch(field, value, "must be higher than {}".format(_min))
if _max and value > _max:
raise exceptions.ConfigFieldTypeMismatch(field, value, "must be lower than {}".format(_max))
if field_type == defs.SELECT_TYPE:
from honeycomb.utils.plugin_utils import get_select_items
items = get_select_items(select_items)
if value not in items:
raise exceptions.ConfigFieldTypeMismatch(field, value, "one of: {}".format(", ".join(items))) | [
"def",
"validate_field_matches_type",
"(",
"field",
",",
"value",
",",
"field_type",
",",
"select_items",
"=",
"None",
",",
"_min",
"=",
"None",
",",
"_max",
"=",
"None",
")",
":",
"if",
"(",
"field_type",
"==",
"defs",
".",
"TEXT_TYPE",
"and",
"not",
"isinstance",
"(",
"value",
",",
"six",
".",
"string_types",
")",
")",
"or",
"(",
"field_type",
"==",
"defs",
".",
"STRING_TYPE",
"and",
"not",
"isinstance",
"(",
"value",
",",
"six",
".",
"string_types",
")",
")",
"or",
"(",
"field_type",
"==",
"defs",
".",
"BOOLEAN_TYPE",
"and",
"not",
"isinstance",
"(",
"value",
",",
"bool",
")",
")",
"or",
"(",
"field_type",
"==",
"defs",
".",
"INTEGER_TYPE",
"and",
"not",
"isinstance",
"(",
"value",
",",
"int",
")",
")",
":",
"raise",
"exceptions",
".",
"ConfigFieldTypeMismatch",
"(",
"field",
",",
"value",
",",
"field_type",
")",
"if",
"field_type",
"==",
"defs",
".",
"INTEGER_TYPE",
":",
"if",
"_min",
"and",
"value",
"<",
"_min",
":",
"raise",
"exceptions",
".",
"ConfigFieldTypeMismatch",
"(",
"field",
",",
"value",
",",
"\"must be higher than {}\"",
".",
"format",
"(",
"_min",
")",
")",
"if",
"_max",
"and",
"value",
">",
"_max",
":",
"raise",
"exceptions",
".",
"ConfigFieldTypeMismatch",
"(",
"field",
",",
"value",
",",
"\"must be lower than {}\"",
".",
"format",
"(",
"_max",
")",
")",
"if",
"field_type",
"==",
"defs",
".",
"SELECT_TYPE",
":",
"from",
"honeycomb",
".",
"utils",
".",
"plugin_utils",
"import",
"get_select_items",
"items",
"=",
"get_select_items",
"(",
"select_items",
")",
"if",
"value",
"not",
"in",
"items",
":",
"raise",
"exceptions",
".",
"ConfigFieldTypeMismatch",
"(",
"field",
",",
"value",
",",
"\"one of: {}\"",
".",
"format",
"(",
"\", \"",
".",
"join",
"(",
"items",
")",
")",
")"
] | Validate a config field against a specific type. | [
"Validate",
"a",
"config",
"field",
"against",
"a",
"specific",
"type",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/config_utils.py#L60-L78 | train |
Cymmetria/honeycomb | honeycomb/utils/config_utils.py | get_truetype | def get_truetype(value):
"""Convert a string to a pythonized parameter."""
if value in ["true", "True", "y", "Y", "yes"]:
return True
if value in ["false", "False", "n", "N", "no"]:
return False
if value.isdigit():
return int(value)
return str(value) | python | def get_truetype(value):
"""Convert a string to a pythonized parameter."""
if value in ["true", "True", "y", "Y", "yes"]:
return True
if value in ["false", "False", "n", "N", "no"]:
return False
if value.isdigit():
return int(value)
return str(value) | [
"def",
"get_truetype",
"(",
"value",
")",
":",
"if",
"value",
"in",
"[",
"\"true\"",
",",
"\"True\"",
",",
"\"y\"",
",",
"\"Y\"",
",",
"\"yes\"",
"]",
":",
"return",
"True",
"if",
"value",
"in",
"[",
"\"false\"",
",",
"\"False\"",
",",
"\"n\"",
",",
"\"N\"",
",",
"\"no\"",
"]",
":",
"return",
"False",
"if",
"value",
".",
"isdigit",
"(",
")",
":",
"return",
"int",
"(",
"value",
")",
"return",
"str",
"(",
"value",
")"
] | Convert a string to a pythonized parameter. | [
"Convert",
"a",
"string",
"to",
"a",
"pythonized",
"parameter",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/config_utils.py#L81-L89 | train |
Cymmetria/honeycomb | honeycomb/utils/config_utils.py | validate_field | def validate_field(field, allowed_keys, allowed_types):
"""Validate field is allowed and valid."""
for key, value in field.items():
if key not in allowed_keys:
raise exceptions.ParametersFieldError(key, "property")
if key == defs.TYPE:
if value not in allowed_types:
raise exceptions.ParametersFieldError(value, key)
if key == defs.VALUE:
if not is_valid_field_name(value):
raise exceptions.ParametersFieldError(value, "field name") | python | def validate_field(field, allowed_keys, allowed_types):
"""Validate field is allowed and valid."""
for key, value in field.items():
if key not in allowed_keys:
raise exceptions.ParametersFieldError(key, "property")
if key == defs.TYPE:
if value not in allowed_types:
raise exceptions.ParametersFieldError(value, key)
if key == defs.VALUE:
if not is_valid_field_name(value):
raise exceptions.ParametersFieldError(value, "field name") | [
"def",
"validate_field",
"(",
"field",
",",
"allowed_keys",
",",
"allowed_types",
")",
":",
"for",
"key",
",",
"value",
"in",
"field",
".",
"items",
"(",
")",
":",
"if",
"key",
"not",
"in",
"allowed_keys",
":",
"raise",
"exceptions",
".",
"ParametersFieldError",
"(",
"key",
",",
"\"property\"",
")",
"if",
"key",
"==",
"defs",
".",
"TYPE",
":",
"if",
"value",
"not",
"in",
"allowed_types",
":",
"raise",
"exceptions",
".",
"ParametersFieldError",
"(",
"value",
",",
"key",
")",
"if",
"key",
"==",
"defs",
".",
"VALUE",
":",
"if",
"not",
"is_valid_field_name",
"(",
"value",
")",
":",
"raise",
"exceptions",
".",
"ParametersFieldError",
"(",
"value",
",",
"\"field name\"",
")"
] | Validate field is allowed and valid. | [
"Validate",
"field",
"is",
"allowed",
"and",
"valid",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/config_utils.py#L92-L102 | train |
Cymmetria/honeycomb | honeycomb/utils/config_utils.py | is_valid_field_name | def is_valid_field_name(value):
"""Ensure field name is valid."""
leftovers = re.sub(r"\w", "", value)
leftovers = re.sub(r"-", "", leftovers)
if leftovers != "" or value[0].isdigit() or value[0] in ["-", "_"] or " " in value:
return False
return True | python | def is_valid_field_name(value):
"""Ensure field name is valid."""
leftovers = re.sub(r"\w", "", value)
leftovers = re.sub(r"-", "", leftovers)
if leftovers != "" or value[0].isdigit() or value[0] in ["-", "_"] or " " in value:
return False
return True | [
"def",
"is_valid_field_name",
"(",
"value",
")",
":",
"leftovers",
"=",
"re",
".",
"sub",
"(",
"r\"\\w\"",
",",
"\"\"",
",",
"value",
")",
"leftovers",
"=",
"re",
".",
"sub",
"(",
"r\"-\"",
",",
"\"\"",
",",
"leftovers",
")",
"if",
"leftovers",
"!=",
"\"\"",
"or",
"value",
"[",
"0",
"]",
".",
"isdigit",
"(",
")",
"or",
"value",
"[",
"0",
"]",
"in",
"[",
"\"-\"",
",",
"\"_\"",
"]",
"or",
"\" \"",
"in",
"value",
":",
"return",
"False",
"return",
"True"
] | Ensure field name is valid. | [
"Ensure",
"field",
"name",
"is",
"valid",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/config_utils.py#L105-L111 | train |
Cymmetria/honeycomb | honeycomb/utils/config_utils.py | process_config | def process_config(ctx, configfile):
"""Process a yaml config with instructions.
This is a heavy method that loads lots of content, so we only run the imports if its called.
"""
from honeycomb.commands.service.run import run as service_run
# from honeycomb.commands.service.logs import logs as service_logs
from honeycomb.commands.service.install import install as service_install
from honeycomb.commands.integration.install import install as integration_install
from honeycomb.commands.integration.configure import configure as integration_configure
VERSION = "version"
SERVICES = defs.SERVICES
INTEGRATIONS = defs.INTEGRATIONS
required_top_keys = [VERSION, SERVICES]
supported_versions = [1]
def validate_yml(config):
for key in required_top_keys:
if key not in config:
raise exceptions.ConfigFieldMissing(key)
version = config.get(VERSION)
if version not in supported_versions:
raise exceptions.ConfigFieldTypeMismatch(VERSION, version,
"one of: {}".format(repr(supported_versions)))
def install_plugins(services, integrations):
for cmd, kwargs in [(service_install, {SERVICES: services}),
(integration_install, {INTEGRATIONS: integrations})]:
try:
ctx.invoke(cmd, **kwargs)
except SystemExit:
# If a plugin is already installed honeycomb will exit abnormally
pass
def parameters_to_string(parameters_dict):
return ["{}={}".format(k, v) for k, v in parameters_dict.items()]
def configure_integrations(integrations):
for integration in integrations:
args_list = parameters_to_string(config[INTEGRATIONS][integration].get(defs.PARAMETERS, dict()))
ctx.invoke(integration_configure, integration=integration, args=args_list)
def run_services(services, integrations):
# TODO: Enable support with multiple services as daemon, and run service.logs afterwards
# tricky part is that services launched as daemon are exited with os._exit(0) so you
# can't catch it.
for service in services:
args_list = parameters_to_string(config[SERVICES][service].get(defs.PARAMETERS, dict()))
ctx.invoke(service_run, service=service, integration=integrations, args=args_list)
# TODO: Silence normal stdout and follow honeycomb.debug.json instead
# This would make monitoring containers and collecting logs easier
with open(configfile, "rb") as fh:
config = yaml.load(fh.read())
validate_yml(config)
services = config.get(SERVICES).keys()
integrations = config.get(INTEGRATIONS).keys() if config.get(INTEGRATIONS) else []
install_plugins(services, integrations)
configure_integrations(integrations)
run_services(services, integrations) | python | def process_config(ctx, configfile):
"""Process a yaml config with instructions.
This is a heavy method that loads lots of content, so we only run the imports if its called.
"""
from honeycomb.commands.service.run import run as service_run
# from honeycomb.commands.service.logs import logs as service_logs
from honeycomb.commands.service.install import install as service_install
from honeycomb.commands.integration.install import install as integration_install
from honeycomb.commands.integration.configure import configure as integration_configure
VERSION = "version"
SERVICES = defs.SERVICES
INTEGRATIONS = defs.INTEGRATIONS
required_top_keys = [VERSION, SERVICES]
supported_versions = [1]
def validate_yml(config):
for key in required_top_keys:
if key not in config:
raise exceptions.ConfigFieldMissing(key)
version = config.get(VERSION)
if version not in supported_versions:
raise exceptions.ConfigFieldTypeMismatch(VERSION, version,
"one of: {}".format(repr(supported_versions)))
def install_plugins(services, integrations):
for cmd, kwargs in [(service_install, {SERVICES: services}),
(integration_install, {INTEGRATIONS: integrations})]:
try:
ctx.invoke(cmd, **kwargs)
except SystemExit:
# If a plugin is already installed honeycomb will exit abnormally
pass
def parameters_to_string(parameters_dict):
return ["{}={}".format(k, v) for k, v in parameters_dict.items()]
def configure_integrations(integrations):
for integration in integrations:
args_list = parameters_to_string(config[INTEGRATIONS][integration].get(defs.PARAMETERS, dict()))
ctx.invoke(integration_configure, integration=integration, args=args_list)
def run_services(services, integrations):
# TODO: Enable support with multiple services as daemon, and run service.logs afterwards
# tricky part is that services launched as daemon are exited with os._exit(0) so you
# can't catch it.
for service in services:
args_list = parameters_to_string(config[SERVICES][service].get(defs.PARAMETERS, dict()))
ctx.invoke(service_run, service=service, integration=integrations, args=args_list)
# TODO: Silence normal stdout and follow honeycomb.debug.json instead
# This would make monitoring containers and collecting logs easier
with open(configfile, "rb") as fh:
config = yaml.load(fh.read())
validate_yml(config)
services = config.get(SERVICES).keys()
integrations = config.get(INTEGRATIONS).keys() if config.get(INTEGRATIONS) else []
install_plugins(services, integrations)
configure_integrations(integrations)
run_services(services, integrations) | [
"def",
"process_config",
"(",
"ctx",
",",
"configfile",
")",
":",
"from",
"honeycomb",
".",
"commands",
".",
"service",
".",
"run",
"import",
"run",
"as",
"service_run",
"# from honeycomb.commands.service.logs import logs as service_logs",
"from",
"honeycomb",
".",
"commands",
".",
"service",
".",
"install",
"import",
"install",
"as",
"service_install",
"from",
"honeycomb",
".",
"commands",
".",
"integration",
".",
"install",
"import",
"install",
"as",
"integration_install",
"from",
"honeycomb",
".",
"commands",
".",
"integration",
".",
"configure",
"import",
"configure",
"as",
"integration_configure",
"VERSION",
"=",
"\"version\"",
"SERVICES",
"=",
"defs",
".",
"SERVICES",
"INTEGRATIONS",
"=",
"defs",
".",
"INTEGRATIONS",
"required_top_keys",
"=",
"[",
"VERSION",
",",
"SERVICES",
"]",
"supported_versions",
"=",
"[",
"1",
"]",
"def",
"validate_yml",
"(",
"config",
")",
":",
"for",
"key",
"in",
"required_top_keys",
":",
"if",
"key",
"not",
"in",
"config",
":",
"raise",
"exceptions",
".",
"ConfigFieldMissing",
"(",
"key",
")",
"version",
"=",
"config",
".",
"get",
"(",
"VERSION",
")",
"if",
"version",
"not",
"in",
"supported_versions",
":",
"raise",
"exceptions",
".",
"ConfigFieldTypeMismatch",
"(",
"VERSION",
",",
"version",
",",
"\"one of: {}\"",
".",
"format",
"(",
"repr",
"(",
"supported_versions",
")",
")",
")",
"def",
"install_plugins",
"(",
"services",
",",
"integrations",
")",
":",
"for",
"cmd",
",",
"kwargs",
"in",
"[",
"(",
"service_install",
",",
"{",
"SERVICES",
":",
"services",
"}",
")",
",",
"(",
"integration_install",
",",
"{",
"INTEGRATIONS",
":",
"integrations",
"}",
")",
"]",
":",
"try",
":",
"ctx",
".",
"invoke",
"(",
"cmd",
",",
"*",
"*",
"kwargs",
")",
"except",
"SystemExit",
":",
"# If a plugin is already installed honeycomb will exit abnormally",
"pass",
"def",
"parameters_to_string",
"(",
"parameters_dict",
")",
":",
"return",
"[",
"\"{}={}\"",
".",
"format",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"parameters_dict",
".",
"items",
"(",
")",
"]",
"def",
"configure_integrations",
"(",
"integrations",
")",
":",
"for",
"integration",
"in",
"integrations",
":",
"args_list",
"=",
"parameters_to_string",
"(",
"config",
"[",
"INTEGRATIONS",
"]",
"[",
"integration",
"]",
".",
"get",
"(",
"defs",
".",
"PARAMETERS",
",",
"dict",
"(",
")",
")",
")",
"ctx",
".",
"invoke",
"(",
"integration_configure",
",",
"integration",
"=",
"integration",
",",
"args",
"=",
"args_list",
")",
"def",
"run_services",
"(",
"services",
",",
"integrations",
")",
":",
"# TODO: Enable support with multiple services as daemon, and run service.logs afterwards",
"# tricky part is that services launched as daemon are exited with os._exit(0) so you",
"# can't catch it.",
"for",
"service",
"in",
"services",
":",
"args_list",
"=",
"parameters_to_string",
"(",
"config",
"[",
"SERVICES",
"]",
"[",
"service",
"]",
".",
"get",
"(",
"defs",
".",
"PARAMETERS",
",",
"dict",
"(",
")",
")",
")",
"ctx",
".",
"invoke",
"(",
"service_run",
",",
"service",
"=",
"service",
",",
"integration",
"=",
"integrations",
",",
"args",
"=",
"args_list",
")",
"# TODO: Silence normal stdout and follow honeycomb.debug.json instead",
"# This would make monitoring containers and collecting logs easier",
"with",
"open",
"(",
"configfile",
",",
"\"rb\"",
")",
"as",
"fh",
":",
"config",
"=",
"yaml",
".",
"load",
"(",
"fh",
".",
"read",
"(",
")",
")",
"validate_yml",
"(",
"config",
")",
"services",
"=",
"config",
".",
"get",
"(",
"SERVICES",
")",
".",
"keys",
"(",
")",
"integrations",
"=",
"config",
".",
"get",
"(",
"INTEGRATIONS",
")",
".",
"keys",
"(",
")",
"if",
"config",
".",
"get",
"(",
"INTEGRATIONS",
")",
"else",
"[",
"]",
"install_plugins",
"(",
"services",
",",
"integrations",
")",
"configure_integrations",
"(",
"integrations",
")",
"run_services",
"(",
"services",
",",
"integrations",
")"
] | Process a yaml config with instructions.
This is a heavy method that loads lots of content, so we only run the imports if its called. | [
"Process",
"a",
"yaml",
"config",
"with",
"instructions",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/config_utils.py#L114-L178 | train |
Cymmetria/honeycomb | honeycomb/utils/plugin_utils.py | get_plugin_path | def get_plugin_path(home, plugin_type, plugin_name, editable=False):
"""Return path to plugin.
:param home: Path to honeycomb home
:param plugin_type: Type of plugin (:obj:`honeycomb.defs.SERVICES` pr :obj:`honeycomb.defs.INTEGRATIONS`)
:param plugin_name: Name of plugin
:param editable: Use plugin_name as direct path instead of loading from honeycomb home folder
"""
if editable:
plugin_path = plugin_name
else:
plugin_path = os.path.join(home, plugin_type, plugin_name)
return os.path.realpath(plugin_path) | python | def get_plugin_path(home, plugin_type, plugin_name, editable=False):
"""Return path to plugin.
:param home: Path to honeycomb home
:param plugin_type: Type of plugin (:obj:`honeycomb.defs.SERVICES` pr :obj:`honeycomb.defs.INTEGRATIONS`)
:param plugin_name: Name of plugin
:param editable: Use plugin_name as direct path instead of loading from honeycomb home folder
"""
if editable:
plugin_path = plugin_name
else:
plugin_path = os.path.join(home, plugin_type, plugin_name)
return os.path.realpath(plugin_path) | [
"def",
"get_plugin_path",
"(",
"home",
",",
"plugin_type",
",",
"plugin_name",
",",
"editable",
"=",
"False",
")",
":",
"if",
"editable",
":",
"plugin_path",
"=",
"plugin_name",
"else",
":",
"plugin_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"home",
",",
"plugin_type",
",",
"plugin_name",
")",
"return",
"os",
".",
"path",
".",
"realpath",
"(",
"plugin_path",
")"
] | Return path to plugin.
:param home: Path to honeycomb home
:param plugin_type: Type of plugin (:obj:`honeycomb.defs.SERVICES` pr :obj:`honeycomb.defs.INTEGRATIONS`)
:param plugin_name: Name of plugin
:param editable: Use plugin_name as direct path instead of loading from honeycomb home folder | [
"Return",
"path",
"to",
"plugin",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/plugin_utils.py#L44-L57 | train |
Cymmetria/honeycomb | honeycomb/utils/plugin_utils.py | install_plugin | def install_plugin(pkgpath, plugin_type, install_path, register_func):
"""Install specified plugin.
:param pkgpath: Name of plugin to be downloaded from online repo or path to plugin folder or zip file.
:param install_path: Path where plugin will be installed.
:param register_func: Method used to register and validate plugin.
"""
service_name = os.path.basename(pkgpath)
if os.path.exists(os.path.join(install_path, service_name)):
raise exceptions.PluginAlreadyInstalled(pkgpath)
if os.path.exists(pkgpath):
logger.debug("%s exists in filesystem", pkgpath)
if os.path.isdir(pkgpath):
pip_status = install_dir(pkgpath, install_path, register_func)
else: # pkgpath is file
pip_status = install_from_zip(pkgpath, install_path, register_func)
else:
logger.debug("cannot find %s locally, checking github repo", pkgpath)
click.secho("Collecting {}..".format(pkgpath))
pip_status = install_from_repo(pkgpath, plugin_type, install_path, register_func)
if pip_status == 0:
click.secho("[+] Great success!")
else:
# TODO: rephrase
click.secho("[-] Service installed but something was odd with dependency install, please review debug logs") | python | def install_plugin(pkgpath, plugin_type, install_path, register_func):
"""Install specified plugin.
:param pkgpath: Name of plugin to be downloaded from online repo or path to plugin folder or zip file.
:param install_path: Path where plugin will be installed.
:param register_func: Method used to register and validate plugin.
"""
service_name = os.path.basename(pkgpath)
if os.path.exists(os.path.join(install_path, service_name)):
raise exceptions.PluginAlreadyInstalled(pkgpath)
if os.path.exists(pkgpath):
logger.debug("%s exists in filesystem", pkgpath)
if os.path.isdir(pkgpath):
pip_status = install_dir(pkgpath, install_path, register_func)
else: # pkgpath is file
pip_status = install_from_zip(pkgpath, install_path, register_func)
else:
logger.debug("cannot find %s locally, checking github repo", pkgpath)
click.secho("Collecting {}..".format(pkgpath))
pip_status = install_from_repo(pkgpath, plugin_type, install_path, register_func)
if pip_status == 0:
click.secho("[+] Great success!")
else:
# TODO: rephrase
click.secho("[-] Service installed but something was odd with dependency install, please review debug logs") | [
"def",
"install_plugin",
"(",
"pkgpath",
",",
"plugin_type",
",",
"install_path",
",",
"register_func",
")",
":",
"service_name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"pkgpath",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"install_path",
",",
"service_name",
")",
")",
":",
"raise",
"exceptions",
".",
"PluginAlreadyInstalled",
"(",
"pkgpath",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"pkgpath",
")",
":",
"logger",
".",
"debug",
"(",
"\"%s exists in filesystem\"",
",",
"pkgpath",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"pkgpath",
")",
":",
"pip_status",
"=",
"install_dir",
"(",
"pkgpath",
",",
"install_path",
",",
"register_func",
")",
"else",
":",
"# pkgpath is file",
"pip_status",
"=",
"install_from_zip",
"(",
"pkgpath",
",",
"install_path",
",",
"register_func",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"\"cannot find %s locally, checking github repo\"",
",",
"pkgpath",
")",
"click",
".",
"secho",
"(",
"\"Collecting {}..\"",
".",
"format",
"(",
"pkgpath",
")",
")",
"pip_status",
"=",
"install_from_repo",
"(",
"pkgpath",
",",
"plugin_type",
",",
"install_path",
",",
"register_func",
")",
"if",
"pip_status",
"==",
"0",
":",
"click",
".",
"secho",
"(",
"\"[+] Great success!\"",
")",
"else",
":",
"# TODO: rephrase",
"click",
".",
"secho",
"(",
"\"[-] Service installed but something was odd with dependency install, please review debug logs\"",
")"
] | Install specified plugin.
:param pkgpath: Name of plugin to be downloaded from online repo or path to plugin folder or zip file.
:param install_path: Path where plugin will be installed.
:param register_func: Method used to register and validate plugin. | [
"Install",
"specified",
"plugin",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/plugin_utils.py#L60-L86 | train |
Cymmetria/honeycomb | honeycomb/utils/plugin_utils.py | install_deps | def install_deps(pkgpath):
"""Install plugin dependencies using pip.
We import pip here to reduce load time for when its not needed.
"""
if os.path.exists(os.path.join(pkgpath, "requirements.txt")):
logger.debug("installing dependencies")
click.secho("[*] Installing dependencies")
pipargs = ["install", "--target", os.path.join(pkgpath, defs.DEPS_DIR), "--ignore-installed",
"-r", os.path.join(pkgpath, "requirements.txt")]
logger.debug("running pip %s", pipargs)
return subprocess.check_call([sys.executable, "-m", "pip"] + pipargs)
return 0 | python | def install_deps(pkgpath):
"""Install plugin dependencies using pip.
We import pip here to reduce load time for when its not needed.
"""
if os.path.exists(os.path.join(pkgpath, "requirements.txt")):
logger.debug("installing dependencies")
click.secho("[*] Installing dependencies")
pipargs = ["install", "--target", os.path.join(pkgpath, defs.DEPS_DIR), "--ignore-installed",
"-r", os.path.join(pkgpath, "requirements.txt")]
logger.debug("running pip %s", pipargs)
return subprocess.check_call([sys.executable, "-m", "pip"] + pipargs)
return 0 | [
"def",
"install_deps",
"(",
"pkgpath",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"join",
"(",
"pkgpath",
",",
"\"requirements.txt\"",
")",
")",
":",
"logger",
".",
"debug",
"(",
"\"installing dependencies\"",
")",
"click",
".",
"secho",
"(",
"\"[*] Installing dependencies\"",
")",
"pipargs",
"=",
"[",
"\"install\"",
",",
"\"--target\"",
",",
"os",
".",
"path",
".",
"join",
"(",
"pkgpath",
",",
"defs",
".",
"DEPS_DIR",
")",
",",
"\"--ignore-installed\"",
",",
"\"-r\"",
",",
"os",
".",
"path",
".",
"join",
"(",
"pkgpath",
",",
"\"requirements.txt\"",
")",
"]",
"logger",
".",
"debug",
"(",
"\"running pip %s\"",
",",
"pipargs",
")",
"return",
"subprocess",
".",
"check_call",
"(",
"[",
"sys",
".",
"executable",
",",
"\"-m\"",
",",
"\"pip\"",
"]",
"+",
"pipargs",
")",
"return",
"0"
] | Install plugin dependencies using pip.
We import pip here to reduce load time for when its not needed. | [
"Install",
"plugin",
"dependencies",
"using",
"pip",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/plugin_utils.py#L89-L101 | train |
Cymmetria/honeycomb | honeycomb/utils/plugin_utils.py | copy_file | def copy_file(src, dst):
"""Copy a single file.
:param src: Source name
:param dst: Destination name
"""
try:
fin = os.open(src, READ_FLAGS)
stat = os.fstat(fin)
fout = os.open(dst, WRITE_FLAGS, stat.st_mode)
for x in iter(lambda: os.read(fin, BUFFER_SIZE), b""):
os.write(fout, x)
finally:
try:
os.close(fin)
except Exception as exc:
logger.debug("Failed to close file handle when copying: {}".format(exc))
try:
os.close(fout)
except Exception as exc:
logger.debug("Failed to close file handle when copying: {}".format(exc)) | python | def copy_file(src, dst):
"""Copy a single file.
:param src: Source name
:param dst: Destination name
"""
try:
fin = os.open(src, READ_FLAGS)
stat = os.fstat(fin)
fout = os.open(dst, WRITE_FLAGS, stat.st_mode)
for x in iter(lambda: os.read(fin, BUFFER_SIZE), b""):
os.write(fout, x)
finally:
try:
os.close(fin)
except Exception as exc:
logger.debug("Failed to close file handle when copying: {}".format(exc))
try:
os.close(fout)
except Exception as exc:
logger.debug("Failed to close file handle when copying: {}".format(exc)) | [
"def",
"copy_file",
"(",
"src",
",",
"dst",
")",
":",
"try",
":",
"fin",
"=",
"os",
".",
"open",
"(",
"src",
",",
"READ_FLAGS",
")",
"stat",
"=",
"os",
".",
"fstat",
"(",
"fin",
")",
"fout",
"=",
"os",
".",
"open",
"(",
"dst",
",",
"WRITE_FLAGS",
",",
"stat",
".",
"st_mode",
")",
"for",
"x",
"in",
"iter",
"(",
"lambda",
":",
"os",
".",
"read",
"(",
"fin",
",",
"BUFFER_SIZE",
")",
",",
"b\"\"",
")",
":",
"os",
".",
"write",
"(",
"fout",
",",
"x",
")",
"finally",
":",
"try",
":",
"os",
".",
"close",
"(",
"fin",
")",
"except",
"Exception",
"as",
"exc",
":",
"logger",
".",
"debug",
"(",
"\"Failed to close file handle when copying: {}\"",
".",
"format",
"(",
"exc",
")",
")",
"try",
":",
"os",
".",
"close",
"(",
"fout",
")",
"except",
"Exception",
"as",
"exc",
":",
"logger",
".",
"debug",
"(",
"\"Failed to close file handle when copying: {}\"",
".",
"format",
"(",
"exc",
")",
")"
] | Copy a single file.
:param src: Source name
:param dst: Destination name | [
"Copy",
"a",
"single",
"file",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/plugin_utils.py#L104-L124 | train |
Cymmetria/honeycomb | honeycomb/utils/plugin_utils.py | copy_tree | def copy_tree(src, dst, symlinks=False, ignore=[]):
"""Copy a full directory structure.
:param src: Source path
:param dst: Destination path
:param symlinks: Copy symlinks
:param ignore: Subdirs/filenames to ignore
"""
names = os.listdir(src)
if not os.path.exists(dst):
os.makedirs(dst)
errors = []
for name in names:
if name in ignore:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copy_tree(srcname, dstname, symlinks, ignore)
else:
copy_file(srcname, dstname)
except (IOError, os.error) as exc:
errors.append((srcname, dstname, str(exc)))
except CTError as exc:
errors.extend(exc.errors)
if errors:
raise CTError(errors) | python | def copy_tree(src, dst, symlinks=False, ignore=[]):
"""Copy a full directory structure.
:param src: Source path
:param dst: Destination path
:param symlinks: Copy symlinks
:param ignore: Subdirs/filenames to ignore
"""
names = os.listdir(src)
if not os.path.exists(dst):
os.makedirs(dst)
errors = []
for name in names:
if name in ignore:
continue
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copy_tree(srcname, dstname, symlinks, ignore)
else:
copy_file(srcname, dstname)
except (IOError, os.error) as exc:
errors.append((srcname, dstname, str(exc)))
except CTError as exc:
errors.extend(exc.errors)
if errors:
raise CTError(errors) | [
"def",
"copy_tree",
"(",
"src",
",",
"dst",
",",
"symlinks",
"=",
"False",
",",
"ignore",
"=",
"[",
"]",
")",
":",
"names",
"=",
"os",
".",
"listdir",
"(",
"src",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"dst",
")",
":",
"os",
".",
"makedirs",
"(",
"dst",
")",
"errors",
"=",
"[",
"]",
"for",
"name",
"in",
"names",
":",
"if",
"name",
"in",
"ignore",
":",
"continue",
"srcname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"src",
",",
"name",
")",
"dstname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dst",
",",
"name",
")",
"try",
":",
"if",
"symlinks",
"and",
"os",
".",
"path",
".",
"islink",
"(",
"srcname",
")",
":",
"linkto",
"=",
"os",
".",
"readlink",
"(",
"srcname",
")",
"os",
".",
"symlink",
"(",
"linkto",
",",
"dstname",
")",
"elif",
"os",
".",
"path",
".",
"isdir",
"(",
"srcname",
")",
":",
"copy_tree",
"(",
"srcname",
",",
"dstname",
",",
"symlinks",
",",
"ignore",
")",
"else",
":",
"copy_file",
"(",
"srcname",
",",
"dstname",
")",
"except",
"(",
"IOError",
",",
"os",
".",
"error",
")",
"as",
"exc",
":",
"errors",
".",
"append",
"(",
"(",
"srcname",
",",
"dstname",
",",
"str",
"(",
"exc",
")",
")",
")",
"except",
"CTError",
"as",
"exc",
":",
"errors",
".",
"extend",
"(",
"exc",
".",
"errors",
")",
"if",
"errors",
":",
"raise",
"CTError",
"(",
"errors",
")"
] | Copy a full directory structure.
:param src: Source path
:param dst: Destination path
:param symlinks: Copy symlinks
:param ignore: Subdirs/filenames to ignore | [
"Copy",
"a",
"full",
"directory",
"structure",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/plugin_utils.py#L130-L161 | train |
Cymmetria/honeycomb | honeycomb/utils/plugin_utils.py | install_dir | def install_dir(pkgpath, install_path, register_func, delete_after_install=False):
"""Install plugin from specified directory.
install_path and register_func are same as :func:`install_plugin`.
:param delete_after_install: Delete pkgpath after install (used in :func:`install_from_zip`).
"""
logger.debug("%s is a directory, attempting to validate", pkgpath)
plugin = register_func(pkgpath)
logger.debug("%s looks good, copying to %s", pkgpath, install_path)
try:
copy_tree(pkgpath, os.path.join(install_path, plugin.name))
if delete_after_install:
logger.debug("deleting %s", pkgpath)
shutil.rmtree(pkgpath)
pkgpath = os.path.join(install_path, plugin.name)
except (OSError, CTError) as exc:
# TODO: handle package name exists (upgrade? overwrite?)
logger.debug(str(exc), exc_info=True)
raise exceptions.PluginAlreadyInstalled(plugin.name)
return install_deps(pkgpath) | python | def install_dir(pkgpath, install_path, register_func, delete_after_install=False):
"""Install plugin from specified directory.
install_path and register_func are same as :func:`install_plugin`.
:param delete_after_install: Delete pkgpath after install (used in :func:`install_from_zip`).
"""
logger.debug("%s is a directory, attempting to validate", pkgpath)
plugin = register_func(pkgpath)
logger.debug("%s looks good, copying to %s", pkgpath, install_path)
try:
copy_tree(pkgpath, os.path.join(install_path, plugin.name))
if delete_after_install:
logger.debug("deleting %s", pkgpath)
shutil.rmtree(pkgpath)
pkgpath = os.path.join(install_path, plugin.name)
except (OSError, CTError) as exc:
# TODO: handle package name exists (upgrade? overwrite?)
logger.debug(str(exc), exc_info=True)
raise exceptions.PluginAlreadyInstalled(plugin.name)
return install_deps(pkgpath) | [
"def",
"install_dir",
"(",
"pkgpath",
",",
"install_path",
",",
"register_func",
",",
"delete_after_install",
"=",
"False",
")",
":",
"logger",
".",
"debug",
"(",
"\"%s is a directory, attempting to validate\"",
",",
"pkgpath",
")",
"plugin",
"=",
"register_func",
"(",
"pkgpath",
")",
"logger",
".",
"debug",
"(",
"\"%s looks good, copying to %s\"",
",",
"pkgpath",
",",
"install_path",
")",
"try",
":",
"copy_tree",
"(",
"pkgpath",
",",
"os",
".",
"path",
".",
"join",
"(",
"install_path",
",",
"plugin",
".",
"name",
")",
")",
"if",
"delete_after_install",
":",
"logger",
".",
"debug",
"(",
"\"deleting %s\"",
",",
"pkgpath",
")",
"shutil",
".",
"rmtree",
"(",
"pkgpath",
")",
"pkgpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"install_path",
",",
"plugin",
".",
"name",
")",
"except",
"(",
"OSError",
",",
"CTError",
")",
"as",
"exc",
":",
"# TODO: handle package name exists (upgrade? overwrite?)",
"logger",
".",
"debug",
"(",
"str",
"(",
"exc",
")",
",",
"exc_info",
"=",
"True",
")",
"raise",
"exceptions",
".",
"PluginAlreadyInstalled",
"(",
"plugin",
".",
"name",
")",
"return",
"install_deps",
"(",
"pkgpath",
")"
] | Install plugin from specified directory.
install_path and register_func are same as :func:`install_plugin`.
:param delete_after_install: Delete pkgpath after install (used in :func:`install_from_zip`). | [
"Install",
"plugin",
"from",
"specified",
"directory",
"."
] | 33ea91b5cf675000e4e85dd02efe580ea6e95c86 | https://github.com/Cymmetria/honeycomb/blob/33ea91b5cf675000e4e85dd02efe580ea6e95c86/honeycomb/utils/plugin_utils.py#L164-L184 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.