text
stringlengths 4
1.02M
| meta
dict |
---|---|
"""test staticmethod and classmethod as decorator"""
__revision__ = None
class StaticMethod1(object):
"""staticmethod test"""
def __init__(self):
pass
@staticmethod
def do_work():
"Working..."
@staticmethod
def do_work_with_arg(job):
"Working on something"
print "Working on %s..." % job
class ClassMethod2(object):
"""classmethod test"""
def __init__(self):
pass
@classmethod
def do_work(cls):
"Working..."
@classmethod
def do_work_with_arg(cls, job):
"Working on something"
print "Working on %s..." % job
| {
"content_hash": "4a8a19f31e62c7ee2ba0b1a6624e9987",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 52,
"avg_line_length": 19.59375,
"alnum_prop": 0.5661881977671451,
"repo_name": "willemneal/Docky",
"id": "7884cbde741cb6903c5348d37370c04b01dd09b6",
"size": "668",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "lib/pylint/test/input/func_noerror_staticmethod_as_decorator_py24.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "APL",
"bytes": "587"
},
{
"name": "ASP",
"bytes": "636"
},
{
"name": "ActionScript",
"bytes": "5686"
},
{
"name": "Ada",
"bytes": "5145"
},
{
"name": "Agda",
"bytes": "3154"
},
{
"name": "Alloy",
"bytes": "6579"
},
{
"name": "AppleScript",
"bytes": "421"
},
{
"name": "Assembly",
"bytes": "3168"
},
{
"name": "AutoHotkey",
"bytes": "3733"
},
{
"name": "AutoIt",
"bytes": "667"
},
{
"name": "Awk",
"bytes": "4528"
},
{
"name": "BlitzBasic",
"bytes": "1730"
},
{
"name": "BlitzMax",
"bytes": "2387"
},
{
"name": "Boo",
"bytes": "1111"
},
{
"name": "Bro",
"bytes": "7337"
},
{
"name": "C",
"bytes": "109073"
},
{
"name": "C#",
"bytes": "17784"
},
{
"name": "C++",
"bytes": "79372"
},
{
"name": "COBOL",
"bytes": "114812"
},
{
"name": "CSS",
"bytes": "26952"
},
{
"name": "Ceylon",
"bytes": "1387"
},
{
"name": "Chapel",
"bytes": "4366"
},
{
"name": "Cirru",
"bytes": "2574"
},
{
"name": "Clean",
"bytes": "2878"
},
{
"name": "Clojure",
"bytes": "23871"
},
{
"name": "CoffeeScript",
"bytes": "20149"
},
{
"name": "ColdFusion",
"bytes": "9006"
},
{
"name": "Common Lisp",
"bytes": "91743"
},
{
"name": "Coq",
"bytes": "66"
},
{
"name": "Cuda",
"bytes": "776"
},
{
"name": "D",
"bytes": "5475"
},
{
"name": "Dart",
"bytes": "591"
},
{
"name": "Dylan",
"bytes": "6343"
},
{
"name": "Ecl",
"bytes": "2599"
},
{
"name": "Eiffel",
"bytes": "2145"
},
{
"name": "Elixir",
"bytes": "4340"
},
{
"name": "Emacs Lisp",
"bytes": "5709"
},
{
"name": "Erlang",
"bytes": "5746"
},
{
"name": "F#",
"bytes": "19156"
},
{
"name": "FORTRAN",
"bytes": "27879"
},
{
"name": "Factor",
"bytes": "10194"
},
{
"name": "Fancy",
"bytes": "2581"
},
{
"name": "Fantom",
"bytes": "25331"
},
{
"name": "GAP",
"bytes": "15760"
},
{
"name": "Gnuplot",
"bytes": "10376"
},
{
"name": "Go",
"bytes": "172"
},
{
"name": "Golo",
"bytes": "1649"
},
{
"name": "Gosu",
"bytes": "2853"
},
{
"name": "Groovy",
"bytes": "2586"
},
{
"name": "Haskell",
"bytes": "49593"
},
{
"name": "Haxe",
"bytes": "16812"
},
{
"name": "Hy",
"bytes": "7237"
},
{
"name": "IDL",
"bytes": "2098"
},
{
"name": "Idris",
"bytes": "2771"
},
{
"name": "Inform 7",
"bytes": "1944"
},
{
"name": "Ioke",
"bytes": "469"
},
{
"name": "Isabelle",
"bytes": "21392"
},
{
"name": "Jasmin",
"bytes": "9428"
},
{
"name": "Java",
"bytes": "81613"
},
{
"name": "JavaScript",
"bytes": "14143"
},
{
"name": "Julia",
"bytes": "27687"
},
{
"name": "Kotlin",
"bytes": "971"
},
{
"name": "LSL",
"bytes": "160"
},
{
"name": "Lasso",
"bytes": "18650"
},
{
"name": "LiveScript",
"bytes": "972"
},
{
"name": "Logos",
"bytes": "306"
},
{
"name": "Logtalk",
"bytes": "7260"
},
{
"name": "Lua",
"bytes": "8677"
},
{
"name": "Makefile",
"bytes": "76274"
},
{
"name": "Mathematica",
"bytes": "191"
},
{
"name": "Monkey",
"bytes": "2587"
},
{
"name": "Moocode",
"bytes": "3343"
},
{
"name": "MoonScript",
"bytes": "14862"
},
{
"name": "Nemerle",
"bytes": "1517"
},
{
"name": "Nimrod",
"bytes": "37191"
},
{
"name": "Nit",
"bytes": "55581"
},
{
"name": "Nix",
"bytes": "2448"
},
{
"name": "OCaml",
"bytes": "42416"
},
{
"name": "Objective-C",
"bytes": "3385"
},
{
"name": "Objective-J",
"bytes": "15340"
},
{
"name": "Opa",
"bytes": "172"
},
{
"name": "OpenEdge ABL",
"bytes": "318"
},
{
"name": "PAWN",
"bytes": "6555"
},
{
"name": "PHP",
"bytes": "17354"
},
{
"name": "Pan",
"bytes": "1241"
},
{
"name": "Pascal",
"bytes": "84519"
},
{
"name": "Perl",
"bytes": "3611"
},
{
"name": "Perl6",
"bytes": "49676"
},
{
"name": "PigLatin",
"bytes": "6657"
},
{
"name": "Pike",
"bytes": "8479"
},
{
"name": "PowerShell",
"bytes": "6932"
},
{
"name": "Prolog",
"bytes": "738"
},
{
"name": "Puppet",
"bytes": "130"
},
{
"name": "Python",
"bytes": "6272729"
},
{
"name": "R",
"bytes": "4057"
},
{
"name": "Racket",
"bytes": "11341"
},
{
"name": "Rebol",
"bytes": "1887"
},
{
"name": "Red",
"bytes": "10536"
},
{
"name": "Ruby",
"bytes": "91403"
},
{
"name": "Rust",
"bytes": "6788"
},
{
"name": "Scala",
"bytes": "730"
},
{
"name": "Scheme",
"bytes": "47137"
},
{
"name": "Scilab",
"bytes": "943"
},
{
"name": "Shell",
"bytes": "121510"
},
{
"name": "ShellSession",
"bytes": "320"
},
{
"name": "Smalltalk",
"bytes": "156665"
},
{
"name": "SourcePawn",
"bytes": "130"
},
{
"name": "Standard ML",
"bytes": "36869"
},
{
"name": "Swift",
"bytes": "2035"
},
{
"name": "SystemVerilog",
"bytes": "265"
},
{
"name": "TypeScript",
"bytes": "535"
},
{
"name": "VHDL",
"bytes": "4446"
},
{
"name": "VimL",
"bytes": "16922"
},
{
"name": "Visual Basic",
"bytes": "17210"
},
{
"name": "XQuery",
"bytes": "4289"
},
{
"name": "XSLT",
"bytes": "755"
},
{
"name": "Xtend",
"bytes": "727"
},
{
"name": "Zephir",
"bytes": "485"
},
{
"name": "eC",
"bytes": "26388"
},
{
"name": "nesC",
"bytes": "23697"
},
{
"name": "xBase",
"bytes": "3349"
}
],
"symlink_target": ""
} |
"""Provides a way to memoize pure functions with the cache stored to the state.
For usage examples, check memoize in the examples folder.
Written by Peter Duerr
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from toolz import memoize
from inspect import getsource
from pyexperiment import state
class PersistentCache(object): # pylint: disable=too-few-public-methods
"""Persistent cache object that redirects to the state
"""
KEY_PREFIX = '__persistent_cache_'
"""Prefix for the keys of the persistent cache in the state
"""
def __init__(self, key):
"""Initializer
"""
self.key = self.KEY_PREFIX + str(key)
if self.key not in state:
state[self.key] = {}
def __getitem__(self, key):
"""Get cache entry
"""
return state[self.key][key]
def __setitem__(self, key, value):
"""Set cache entry
"""
state[self.key][key] = value
state.changed.add(self.key)
def __iter__(self):
"""Iterator over the cache
"""
return state[self.key].__iter__()
def persistent_memoize(target):
"""Memoize target function, keep persistent cache in state
"""
target_hash = hash(getsource(target))
cache = PersistentCache(target_hash)
return memoize(target, cache=cache)
| {
"content_hash": "1eee8cce8148254dfac8e2f85c3924a7",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 79,
"avg_line_length": 25.714285714285715,
"alnum_prop": 0.6340277777777777,
"repo_name": "duerrp/pyexperiment",
"id": "6fb74353c824a846a870f674c78aa545921f04fd",
"size": "1440",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyexperiment/utils/persistent_memoize.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "253715"
},
{
"name": "Ruby",
"bytes": "2506"
},
{
"name": "Shell",
"bytes": "1066"
}
],
"symlink_target": ""
} |
"""
sentry.runner.initializer
~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
import click
import os
import six
from sentry.utils import warnings
from sentry.utils.warnings import DeprecatedSettingWarning
def install_plugin_apps(settings):
# entry_points={
# 'sentry.apps': [
# 'phabricator = sentry_phabricator'
# ],
# },
from pkg_resources import iter_entry_points
installed_apps = list(settings.INSTALLED_APPS)
for ep in iter_entry_points('sentry.apps'):
installed_apps.append(ep.module_name)
settings.INSTALLED_APPS = tuple(installed_apps)
def register_plugins(settings):
from pkg_resources import iter_entry_points
from sentry.plugins import register
# entry_points={
# 'sentry.plugins': [
# 'phabricator = sentry_phabricator.plugins:PhabricatorPlugin'
# ],
# },
for ep in iter_entry_points('sentry.plugins'):
try:
plugin = ep.load()
except Exception:
import traceback
click.echo("Failed to load plugin %r:\n%s" % (ep.name, traceback.format_exc()), err=True)
else:
register(plugin)
def initialize_receivers():
# force signal registration
import sentry.receivers # NOQA
def get_asset_version(settings):
path = os.path.join(settings.STATIC_ROOT, 'version')
try:
with open(path) as fp:
return fp.read().strip()
except IOError:
from time import time
return int(time())
# Options which must get extracted into Django settings while
# bootstrapping. Everything else will get validated and used
# as a part of OptionsManager.
options_mapper = {
# 'cache.backend': 'SENTRY_CACHE',
# 'cache.options': 'SENTRY_CACHE_OPTIONS',
# 'system.databases': 'DATABASES',
# 'system.debug': 'DEBUG',
'system.secret-key': 'SECRET_KEY',
'mail.backend': 'EMAIL_BACKEND',
'mail.host': 'EMAIL_HOST',
'mail.port': 'EMAIL_PORT',
'mail.username': 'EMAIL_HOST_USER',
'mail.password': 'EMAIL_HOST_PASSWORD',
'mail.use-tls': 'EMAIL_USE_TLS',
'mail.from': 'SERVER_EMAIL',
'mail.subject-prefix': 'EMAIL_SUBJECT_PREFIX',
}
def bootstrap_options(settings, config=None):
"""
Quickly bootstrap options that come in from a config file
and convert options into Django settings that are
required to even initialize the rest of the app.
"""
# Make sure our options have gotten registered
from sentry.options import load_defaults
load_defaults()
options = {}
if config is not None:
# Attempt to load our config yaml file
from sentry.utils.yaml import safe_load
from yaml.parser import ParserError
from yaml.scanner import ScannerError
try:
with open(config, 'rb') as fp:
options = safe_load(fp)
except IOError:
# Gracefully fail if yaml file doesn't exist
pass
except (AttributeError, ParserError, ScannerError) as e:
from .importer import ConfigurationError
raise ConfigurationError('Malformed config.yml file: %s' % six.text_type(e))
# Empty options file, so fail gracefully
if options is None:
options = {}
# Options needs to be a dict
elif not isinstance(options, dict):
from .importer import ConfigurationError
raise ConfigurationError('Malformed config.yml file')
from sentry.conf.server import DEAD
# First move options from settings into options
for k, v in six.iteritems(options_mapper):
if getattr(settings, v, DEAD) is not DEAD and k not in options:
warnings.warn(
DeprecatedSettingWarning(
options_mapper[k],
"SENTRY_OPTIONS['%s']" % k,
)
)
options[k] = getattr(settings, v)
# Stuff everything else into SENTRY_OPTIONS
# these will be validated later after bootstrapping
for k, v in six.iteritems(options):
settings.SENTRY_OPTIONS[k] = v
# Now go back through all of SENTRY_OPTIONS and promote
# back into settings. This catches the case when values are defined
# only in SENTRY_OPTIONS and no config.yml file
for o in (settings.SENTRY_DEFAULT_OPTIONS, settings.SENTRY_OPTIONS):
for k, v in six.iteritems(o):
if k in options_mapper:
# Map the mail.backend aliases to something Django understands
if k == 'mail.backend':
try:
v = settings.SENTRY_EMAIL_BACKEND_ALIASES[v]
except KeyError:
pass
# Escalate the few needed to actually get the app bootstrapped into settings
setattr(settings, options_mapper[k], v)
def configure_structlog():
"""
Make structlog comply with all of our options.
"""
from django.conf import settings
import logging
import structlog
from sentry import options
from sentry.logging import LoggingFormat
WrappedDictClass = structlog.threadlocal.wrap_dict(dict)
kwargs = {
'context_class': WrappedDictClass,
'wrapper_class': structlog.stdlib.BoundLogger,
'cache_logger_on_first_use': True,
'processors': [
structlog.stdlib.add_log_level,
structlog.stdlib.PositionalArgumentsFormatter(),
structlog.processors.format_exc_info,
structlog.processors.StackInfoRenderer(),
]
}
fmt_from_env = os.environ.get('SENTRY_LOG_FORMAT')
if fmt_from_env:
settings.SENTRY_OPTIONS['system.logging-format'] = fmt_from_env.lower()
fmt = options.get('system.logging-format')
if fmt == LoggingFormat.HUMAN:
from sentry.logging.handlers import HumanRenderer
kwargs['processors'].extend([
structlog.processors.ExceptionPrettyPrinter(),
HumanRenderer(),
])
elif fmt == LoggingFormat.MACHINE:
from sentry.logging.handlers import JSONRenderer
kwargs['processors'].append(JSONRenderer())
structlog.configure(**kwargs)
lvl = os.environ.get('SENTRY_LOG_LEVEL')
if lvl and lvl not in logging._levelNames:
raise AttributeError('%s is not a valid logging level.' % lvl)
settings.LOGGING['root'].update({
'level': lvl or settings.LOGGING['default_level']
})
if lvl:
for logger in settings.LOGGING['overridable']:
try:
settings.LOGGING['loggers'][logger].update({
'level': lvl
})
except KeyError:
raise KeyError('%s is not a defined logger.' % logger)
logging.config.dictConfig(settings.LOGGING)
def initialize_app(config, skip_backend_validation=False):
settings = config['settings']
bootstrap_options(settings, config['options'])
configure_structlog()
fix_south(settings)
apply_legacy_settings(settings)
bind_cache_to_option_store()
install_plugin_apps(settings)
# Commonly setups don't correctly configure themselves for production envs
# so lets try to provide a bit more guidance
if settings.CELERY_ALWAYS_EAGER and not settings.DEBUG:
warnings.warn('Sentry is configured to run asynchronous tasks in-process. '
'This is not recommended within production environments. '
'See https://docs.sentry.io/on-premise/server/queue/ for more information.')
if settings.SENTRY_SINGLE_ORGANIZATION:
settings.SENTRY_FEATURES['organizations:create'] = False
if not hasattr(settings, 'SUDO_COOKIE_SECURE'):
settings.SUDO_COOKIE_SECURE = getattr(settings, 'SESSION_COOKIE_SECURE', False)
if not hasattr(settings, 'SUDO_COOKIE_DOMAIN'):
settings.SUDO_COOKIE_DOMAIN = getattr(settings, 'SESSION_COOKIE_DOMAIN', None)
if not hasattr(settings, 'SUDO_COOKIE_PATH'):
settings.SUDO_COOKIE_PATH = getattr(settings, 'SESSION_COOKIE_PATH', '/')
if not hasattr(settings, 'CSRF_COOKIE_SECURE'):
settings.CSRF_COOKIE_SECURE = getattr(settings, 'SESSION_COOKIE_SECURE', False)
if not hasattr(settings, 'CSRF_COOKIE_DOMAIN'):
settings.CSRF_COOKIE_DOMAIN = getattr(settings, 'SESSION_COOKIE_DOMAIN', None)
if not hasattr(settings, 'CSRF_COOKIE_PATH'):
settings.CSRF_COOKIE_PATH = getattr(settings, 'SESSION_COOKIE_PATH', '/')
settings.CACHES['default']['VERSION'] = settings.CACHE_VERSION
settings.ASSET_VERSION = get_asset_version(settings)
settings.STATIC_URL = settings.STATIC_URL.format(
version=settings.ASSET_VERSION,
)
register_plugins(settings)
initialize_receivers()
validate_options(settings)
if not skip_backend_validation:
validate_backends()
from django.utils import timezone
from sentry.app import env
from sentry.runner.settings import get_sentry_conf
env.data['config'] = get_sentry_conf()
env.data['start_date'] = timezone.now()
def validate_backends():
from sentry import app
backends = (
app.buffer,
app.digests,
app.nodestore,
app.quotas,
app.ratelimiter,
app.search,
app.tsdb,
)
for backend in backends:
backend.validate()
def validate_options(settings):
from sentry.options import default_manager
default_manager.validate(settings.SENTRY_OPTIONS, warn=True)
def fix_south(settings):
settings.SOUTH_DATABASE_ADAPTERS = {}
# South needs an adapter defined conditionally
for key, value in six.iteritems(settings.DATABASES):
if value['ENGINE'] != 'sentry.db.postgres':
continue
settings.SOUTH_DATABASE_ADAPTERS[key] = 'south.db.postgresql_psycopg2'
def bind_cache_to_option_store():
# The default ``OptionsStore`` instance is initialized without the cache
# backend attached. The store itself utilizes the cache during normal
# operation, but can't use the cache before the options (which typically
# includes the cache configuration) have been bootstrapped from the legacy
# settings and/or configuration values. Those options should have been
# loaded at this point, so we can plug in the cache backend before
# continuing to initialize the remainder of the application.
from sentry.cache import default_cache
from sentry.options import default_store
default_store.cache = default_cache
def show_big_error(message):
if isinstance(message, six.string_types):
lines = message.splitlines()
else:
lines = message
maxline = max(map(len, lines))
click.echo('', err=True)
click.secho('!! %s !!' % ('!' * min(maxline, 80),), err=True, fg='red')
for line in lines:
click.secho('!! %s !!' % line.center(maxline), err=True, fg='red')
click.secho('!! %s !!' % ('!' * min(maxline, 80),), err=True, fg='red')
click.echo('', err=True)
def apply_legacy_settings(settings):
from sentry import options
# SENTRY_USE_QUEUE used to determine if Celery was eager or not
if hasattr(settings, 'SENTRY_USE_QUEUE'):
warnings.warn(
DeprecatedSettingWarning(
'SENTRY_USE_QUEUE',
'CELERY_ALWAYS_EAGER',
'https://docs.sentry.io/on-premise/server/queue/',
)
)
settings.CELERY_ALWAYS_EAGER = (not settings.SENTRY_USE_QUEUE)
for old, new in (
('SENTRY_ADMIN_EMAIL', 'system.admin-email'),
('SENTRY_URL_PREFIX', 'system.url-prefix'),
('SENTRY_SYSTEM_MAX_EVENTS_PER_MINUTE', 'system.rate-limit'),
('SENTRY_ENABLE_EMAIL_REPLIES', 'mail.enable-replies'),
('SENTRY_SMTP_HOSTNAME', 'mail.reply-hostname'),
('MAILGUN_API_KEY', 'mail.mailgun-api-key'),
):
if new not in settings.SENTRY_OPTIONS and hasattr(settings, old):
warnings.warn(
DeprecatedSettingWarning(old, "SENTRY_OPTIONS['%s']" % new))
settings.SENTRY_OPTIONS[new] = getattr(settings, old)
if hasattr(settings, 'SENTRY_REDIS_OPTIONS'):
if 'redis.clusters' in settings.SENTRY_OPTIONS:
raise Exception("Cannot specify both SENTRY_OPTIONS['redis.clusters'] option and SENTRY_REDIS_OPTIONS setting.")
else:
warnings.warn(
DeprecatedSettingWarning(
'SENTRY_REDIS_OPTIONS',
'SENTRY_OPTIONS["redis.clusters"]',
removed_in_version='8.5',
)
)
settings.SENTRY_OPTIONS['redis.clusters'] = {
'default': settings.SENTRY_REDIS_OPTIONS,
}
else:
# Provide backwards compatibility to plugins expecting there to be a
# ``SENTRY_REDIS_OPTIONS`` setting by using the ``default`` cluster.
# This should be removed when ``SENTRY_REDIS_OPTIONS`` is officially
# deprecated. (This also assumes ``FLAG_NOSTORE`` on the configuration
# option.)
settings.SENTRY_REDIS_OPTIONS = options.get('redis.clusters')['default']
if not hasattr(settings, 'SENTRY_URL_PREFIX'):
url_prefix = options.get('system.url-prefix', silent=True)
if not url_prefix:
# HACK: We need to have some value here for backwards compatibility
url_prefix = 'http://sentry.example.com'
settings.SENTRY_URL_PREFIX = url_prefix
if settings.TIME_ZONE != 'UTC':
# non-UTC timezones are not supported
show_big_error('TIME_ZONE should be set to UTC')
# Set ALLOWED_HOSTS if it's not already available
if not settings.ALLOWED_HOSTS:
settings.ALLOWED_HOSTS = ['*']
if hasattr(settings, 'SENTRY_ALLOW_REGISTRATION'):
warnings.warn(DeprecatedSettingWarning('SENTRY_ALLOW_REGISTRATION', 'SENTRY_FEATURES["auth:register"]'))
settings.SENTRY_FEATURES['auth:register'] = settings.SENTRY_ALLOW_REGISTRATION
settings.DEFAULT_FROM_EMAIL = settings.SENTRY_OPTIONS.get(
'mail.from', settings.SENTRY_DEFAULT_OPTIONS.get('mail.from'))
# HACK(mattrobenolt): This is a one-off assertion for a system.secret-key value.
# If this becomes a pattern, we could add another flag to the OptionsManager to cover this, but for now
# this is the only value that should prevent the app from booting up. Currently FLAG_REQUIRED is used to
# trigger the Installation Wizard, not abort startup.
if not settings.SENTRY_OPTIONS.get('system.secret-key'):
from .importer import ConfigurationError
raise ConfigurationError("`system.secret-key` MUST be set. Use 'sentry config generate-secret-key' to get one.")
def skip_migration_if_applied(settings, app_name, table_name,
name='0001_initial'):
from south.migration import Migrations
from sentry.utils.db import table_exists
import types
if app_name not in settings.INSTALLED_APPS:
return
migration = Migrations(app_name)[name]
def skip_if_table_exists(original):
def wrapped(self):
# TODO: look into why we're having to return some ridiculous
# lambda
if table_exists(table_name):
return lambda x=None: None
return original()
wrapped.__name__ = original.__name__
return wrapped
migration.forwards = types.MethodType(
skip_if_table_exists(migration.forwards), migration)
def on_configure(config):
"""
Executes after settings are full installed and configured.
At this point we can force import on various things such as models
as all of settings should be correctly configured.
"""
settings = config['settings']
skip_migration_if_applied(
settings, 'social_auth', 'social_auth_association')
| {
"content_hash": "235db805381a23cae7d80d3a2c90fbec",
"timestamp": "",
"source": "github",
"line_count": 451,
"max_line_length": 124,
"avg_line_length": 35.60088691796009,
"alnum_prop": 0.6420029895366218,
"repo_name": "alexm92/sentry",
"id": "d132422295167a90a6bf8488bd82f559e8d920da",
"size": "16056",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/sentry/runner/initializer.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "156715"
},
{
"name": "HTML",
"bytes": "191265"
},
{
"name": "JavaScript",
"bytes": "457236"
},
{
"name": "Makefile",
"bytes": "4689"
},
{
"name": "Python",
"bytes": "7262450"
}
],
"symlink_target": ""
} |
import time
import Netmaxiot
# Connect the Netmaxiot SPDT Relay to digital port D4
# SIG,NC,VCC,GND
relay = 4
Netmaxiot.pinMode(relay,"OUTPUT")
while True:
try:
# switch on for 5 seconds
Netmaxiot.digitalWrite(relay,1)
print ("on")
time.sleep(5)
# switch off for 5 seconds
Netmaxiot.digitalWrite(relay,0)
print ("off")
time.sleep(5)
except KeyboardInterrupt:
Netmaxiot.digitalWrite(relay,0)
break
except IOError:
print ("Error")
| {
"content_hash": "0db536fc752ba595dd0fc6659a64c423",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 53,
"avg_line_length": 21.32,
"alnum_prop": 0.6191369606003753,
"repo_name": "NetmaxIOT/Netmaxiot-Shield",
"id": "61c2d87e7bb5989c776cb870157472b58dd16fc9",
"size": "886",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "Software/Python/NetmaxIOT_spdt_relay.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "116885"
},
{
"name": "Makefile",
"bytes": "7061"
},
{
"name": "Perl",
"bytes": "96047"
},
{
"name": "Python",
"bytes": "39609"
},
{
"name": "Roff",
"bytes": "14045"
},
{
"name": "Shell",
"bytes": "479"
}
],
"symlink_target": ""
} |
"""Tests the engine."""
import unittest
from dfvfs.helpers import fake_file_system_builder
from dfvfs.lib import definitions as dfvfs_definitions
from dfvfs.path import factory as path_spec_factory
from dfvfs.path import path_spec
from dfvfs.resolver import context
from dfvfs.vfs import file_system as dfvfs_file_system
from plaso.engine import configurations
from plaso.engine import engine
from plaso.storage.fake import writer as fake_writer
from tests import test_lib as shared_test_lib
class TestEngine(engine.BaseEngine):
"""Class that defines the processing engine for testing."""
def __init__(self):
"""Initialize a test engine object."""
file_system_builder = fake_file_system_builder.FakeFileSystemBuilder()
test_file_path = shared_test_lib.GetTestFilePath(['SOFTWARE'])
file_system_builder.AddFileReadData(
'/Windows/System32/config/SOFTWARE', test_file_path)
test_file_path = shared_test_lib.GetTestFilePath(['SYSTEM'])
file_system_builder.AddFileReadData(
'/Windows/System32/config/SYSTEM', test_file_path)
super(TestEngine, self).__init__()
self._file_system = file_system_builder.file_system
self._mount_point = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_FAKE, location='/')
def GetSourceFileSystem(self, source_path_spec, resolver_context=None):
"""Retrieves the file system of the source.
Args:
source_path_spec (dfvfs.PathSpec): path specifications of the sources
to process.
resolver_context (dfvfs.Context): resolver context.
Returns:
tuple: containing:
dfvfs.FileSystem: file system
path.PathSpec: mount point path specification. The mount point path
specification refers to either a directory or a volume on a storage
media device or image. It is needed by the dfVFS file system
searcher (FileSystemSearcher) to indicate the base location of
the file system
"""
return self._file_system, self._mount_point
class BaseEngineTest(shared_test_lib.BaseTestCase):
"""Tests for the engine object."""
# pylint: disable=protected-access
def testStartStopProfiling(self):
"""Tests the _StartProfiling and _StopProfiling functions."""
with shared_test_lib.TempDirectory() as temp_directory:
configuration = configurations.ProcessingConfiguration()
configuration.profiling.directory = temp_directory
configuration.profiling.profilers = set([
'memory', 'parsers', 'processing', 'serializers', 'storage',
'task_queue'])
test_engine = engine.BaseEngine()
test_engine._StartProfiling(None)
test_engine._StartProfiling(configuration.profiling)
test_engine._StopProfiling()
def testCreateSession(self):
"""Tests the CreateSession function."""
test_engine = engine.BaseEngine()
session = test_engine.CreateSession()
self.assertIsNotNone(session)
def testGetSourceFileSystem(self):
"""Tests the GetSourceFileSystem function."""
test_engine = engine.BaseEngine()
test_file_path = self._GetTestFilePath(['ímynd.dd'])
self._SkipIfPathNotExists(test_file_path)
os_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_OS, location=test_file_path)
source_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_TSK, location='/',
parent=os_path_spec)
resolver_context = context.Context()
test_file_system, test_mount_point = test_engine.GetSourceFileSystem(
source_path_spec, resolver_context=resolver_context)
self.assertIsNotNone(test_file_system)
self.assertIsInstance(test_file_system, dfvfs_file_system.FileSystem)
self.assertIsNotNone(test_mount_point)
self.assertIsInstance(test_mount_point, path_spec.PathSpec)
with self.assertRaises(RuntimeError):
test_engine.GetSourceFileSystem(None)
def testPreprocessSources(self):
"""Tests the PreprocessSources function."""
test_file_path = self._GetTestFilePath(['SOFTWARE'])
self._SkipIfPathNotExists(test_file_path)
test_file_path = self._GetTestFilePath(['SYSTEM'])
self._SkipIfPathNotExists(test_file_path)
test_artifacts_path = shared_test_lib.GetTestFilePath(['artifacts'])
self._SkipIfPathNotExists(test_artifacts_path)
test_engine = TestEngine()
source_path_spec = path_spec_factory.Factory.NewPathSpec(
dfvfs_definitions.TYPE_INDICATOR_FAKE, location='/')
session = test_engine.CreateSession()
storage_writer = fake_writer.FakeStorageWriter()
storage_writer.Open()
test_engine.PreprocessSources(
test_artifacts_path, None, [source_path_spec], session, storage_writer)
operating_system = test_engine.knowledge_base.GetValue('operating_system')
self.assertEqual(operating_system, 'Windows NT')
test_engine.PreprocessSources(
test_artifacts_path, None, [None], session, storage_writer)
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "7545bad3a27c67f9c0f9c1dfd234740b",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 79,
"avg_line_length": 35.173611111111114,
"alnum_prop": 0.717670286278381,
"repo_name": "log2timeline/plaso",
"id": "1f669ab71f2c1c69a5b8f23e183f13342dc85525",
"size": "5113",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tests/engine/engine.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "4301"
},
{
"name": "Makefile",
"bytes": "122"
},
{
"name": "PowerShell",
"bytes": "1305"
},
{
"name": "Python",
"bytes": "5345186"
},
{
"name": "Shell",
"bytes": "27279"
},
{
"name": "YARA",
"bytes": "507"
}
],
"symlink_target": ""
} |
"""
.. moduleauthor:: Gabriel Martin Becedillas Ruiz <[email protected]>
"""
import os
import sys
sys.path.append(os.path.join("..", "symbols"))
sys.path.append(os.path.join("..", "..")) # For pyalgotrade
import pyalgotrade.logger
pyalgotrade.logger.file_log = "download_data.log"
logger = pyalgotrade.logger.getLogger("download_data")
from pyalgotrade.tools import yahoofinance
import symbolsxml
storage = "data"
def get_csv_filename(symbol, year):
return os.path.join(storage, "%s-%d-yahoofinance.csv" % (symbol, year))
def download_files_for_symbol(symbol, fromYear, toYear):
if not os.path.exists(storage):
logger.info("Creating %s directory" % (storage))
os.mkdir(storage)
status = ""
for year in range(fromYear, toYear+1):
fileName = get_csv_filename(symbol, year)
if not os.path.exists(fileName):
logger.info("Downloading %s %d to %s" % (symbol, year, fileName))
try:
yahoofinance.download_daily_bars(symbol, year, fileName)
status += "1"
except Exception, e:
logger.error(str(e))
status += "0"
else:
status += "1"
if status.find("1") == -1:
logger.fatal("No data found for %s" % (symbol))
elif status.lstrip("0").find("0") != -1:
logger.fatal("Some bars are missing for %s" % (symbol))
def main():
fromYear = 2000
toYear = 2013
try:
symbolsFile = os.path.join("..", "symbols", "merval.xml")
callback = lambda stock: download_files_for_symbol(stock.getTicker(), fromYear, toYear)
symbolsxml.parse(symbolsFile, callback, callback)
except Exception, e:
logger.error(str(e))
main()
| {
"content_hash": "106727a298ad3fc5b564f333bd0ba35f",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 95,
"avg_line_length": 27.857142857142858,
"alnum_prop": 0.6148148148148148,
"repo_name": "cgqyh/pyalgotrade-mod",
"id": "91cd4f45d577c0ef5a3ff8fd33457aecdd2c6fa5",
"size": "2369",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/yahoodbfeed/download_data.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1066824"
},
{
"name": "Shell",
"bytes": "504"
}
],
"symlink_target": ""
} |
import logging, threading, time, socket, httplib, os
from urlparse import urlparse
from py2030.base_component import BaseComponent
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
def createRequestHandler(event_manager = None, _options = {}):
class CustomHandler(SimpleHTTPRequestHandler, object):
def __init__(self, *args, **kwargs):
# do_stuff_with(self, init_args)
self.options = _options
self.root_path = self.options['serve'] if 'serve' in _options else '.'
self.event_manager = event_manager
self.logger = logging.getLogger(__name__)
self.response_code = None
self.response_type = None
self.response_content = None
if 'verbose' in self.options and self.options['verbose']:
self.logger.setLevel(logging.DEBUG)
if 'response_content_event' in self.options and self.event_manager:
self.event_manager.get(self.options['response_content_event']).subscribe(self._onResponseContent)
super(CustomHandler, self).__init__(*args, **kwargs)
def process_request(self):
# print("PATH: " + self.path)
urlParseResult = urlparse(self.path)
# print("URLPARSERESULT:", urlParseResult)
if self.event_manager != None and 'output_events' in self.options:
if urlParseResult.path in self.options['output_events']:
self.event_manager.fire(self.options['output_events'][urlParseResult.path])
self.response_code = 200
if self.event_manager != None and 'output_options' in self.options:
if urlParseResult.path in self.options['output_options']:
event_name = self.options['output_options'][urlParseResult.path]
event = self.event_manager.get(event_name)
opts = {}
try:
opts = dict(qc.split("=") for qc in urlParseResult.query.split("&"))
except ValueError as err:
opts = {}
# self.logger.warn('triggering options event `'+event_name+'` with: '+str(opts))
self.response_code = 200
event.fire(opts)
if 'responses' in self.options and urlParseResult.path in self.options['responses']:
self.response_content = self.options['responses'][urlParseResult.path]
# self.send_response(200)
# self.send_header("Content-type", "text/plain")
# self.end_headers()
# # print('headers done')
# self.wfile.write()
# self.wfile.close()
if self.response_code == None:
self.send_response(404)
self.end_headers()
self.wfile.close()
return False
self.send_response(self.response_code)
self.send_header("Content-type", self.response_type if self.response_type else "text/plain")
self.end_headers()
if self.response_content:
self.wfile.write(self.response_content)
self.wfile.close()
return True
def do_HEAD(self):
if self.process_request():
return
super(CustomHandler, self).do_HEAD()
def do_GET(self):
if self.process_request():
return
super(CustomHandler, self).do_GET()
def do_POST(self):
if self.process_request():
return
super(CustomHandler, self).do_POST()
def translate_path(self, path):
if self.event_manager != None and 'output_events' in self.options:
if path in self.options['output_events']:
self.event_manager.fire(self.options['output_events'][path])
# self.send_error(204)
self.send_response(200)
self.wfile.write('OK')
self.wfile.close()
return ''
relative_path = path[1:] if path.startswith('/') else path
return SimpleHTTPRequestHandler.translate_path(self, os.path.join(self.root_path, relative_path))
def _onResponseContent(self, json):
# self.logger.warn('response CONTENT: '+str(json))
self.response_type = "application/json"
self.response_content = json
return CustomHandler
class WebServer(BaseComponent, threading.Thread):
config_name = 'web_servers'
def __init__(self, options = {}):
threading.Thread.__init__(self)
self.options = options
self.http_server = None
self.event_manager = None
self.threading_event = None
self.daemon=True
# attributes
self.logger = logging.getLogger(__name__)
if 'verbose' in options and options['verbose']:
self.logger.setLevel(logging.DEBUG)
def __del__(self):
self.destroy()
def setup(self, event_manager=None):
self.event_manager = event_manager
self.logger.debug("Starting http server thread")
self.threading_event = threading.Event()
self.threading_event.set()
self.start() # start thread
def destroy(self):
self.event_manager = None
if not self.isAlive():
return
self.threading_event.clear()
self.logger.debug('Sending dummy HTTP request to stop HTTP server from blocking...')
try:
connection = httplib.HTTPConnection('127.0.0.1', self.port())
connection.request('HEAD', '/')
connection.getresponse()
except socket.error:
pass
self.join()
# thread function
def run(self):
self.logger.info('Starting HTTP server on port {0}'.format(self.port()))
HandlerClass = createRequestHandler(self.event_manager, self.options)
self.http_server = HTTPServer(('', self.port()), HandlerClass)
# self.httpd.serve_forever()
# self.httpd.server_activate()
while self.threading_event.is_set(): #not self.kill:
try:
self.http_server.handle_request()
except Exception as exc:
print('http exception:')
print(exc)
self.logger.info('Closing HTTP server at port {0}'.format(self.port()))
self.http_server.server_close()
self.http_server = None
def port(self):
return self.options['port'] if 'port' in self.options else 2031
# for testing
if __name__ == '__main__':
logging.basicConfig()
ws = WebServer({'verbose': True, 'serve': 'examples'})
try:
ws.setup()
while True:
time.sleep(.1)
except KeyboardInterrupt:
print('KeyboardInterrupt. Quitting.')
ws.destroy()
| {
"content_hash": "4b911bdafad727c488bb5c95b99afcca",
"timestamp": "",
"source": "github",
"line_count": 188,
"max_line_length": 113,
"avg_line_length": 37.23936170212766,
"alnum_prop": 0.5693472361091273,
"repo_name": "markkorput/py2030",
"id": "18d6d9d9f6a7528f5faf0412dab68e48b8444c57",
"size": "7001",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "py2030/components/web_server.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "229253"
},
{
"name": "Shell",
"bytes": "506"
}
],
"symlink_target": ""
} |
"""Special pages such as the recent changes page."""
from .actions import page_missing
from .database import Page
from .database import RevisionedPage
from .utils import generate_template
from .utils import Pagination
from .utils import Response
def page_index(request):
"""Index of all pages."""
letters = {}
for page in Page.query.order_by(Page.name):
letters.setdefault(page.name.capitalize()[0], []).append(page)
return Response(
generate_template("page_index.html", letters=sorted(letters.items()))
)
def recent_changes(request):
"""Display the recent changes."""
page = max(1, request.args.get("page", type=int))
query = RevisionedPage.query.order_by(RevisionedPage.revision_id.desc())
return Response(
generate_template(
"recent_changes.html",
pagination=Pagination(query, 20, page, "Special:Recent_Changes"),
)
)
def page_not_found(request, page_name):
"""
Displays an error message if a user tried to access
a not existing special page.
"""
return page_missing(request, page_name, True)
pages = {"Index": page_index, "Recent_Changes": recent_changes}
| {
"content_hash": "f707f659e4d1a64ecfed831627cfd90b",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 77,
"avg_line_length": 29.6,
"alnum_prop": 0.674831081081081,
"repo_name": "pallets/werkzeug",
"id": "2c286f5b562c72d5fb1064b4d5eade01b8fc5e9e",
"size": "1184",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "examples/simplewiki/specialpages.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "6078"
},
{
"name": "HTML",
"bytes": "124"
},
{
"name": "JavaScript",
"bytes": "10521"
},
{
"name": "Python",
"bytes": "1095568"
}
],
"symlink_target": ""
} |
"""Util for modifying the GRR server configuration."""
import argparse
import ConfigParser
import getpass
import json
import os
import re
# importing readline enables the raw_input calls to have history etc.
import readline # pylint: disable=unused-import
import socket
import sys
import urlparse
# pylint: disable=unused-import,g-bad-import-order
from grr.lib import server_plugins
# pylint: enable=g-bad-import-order,unused-import
from grr.lib import access_control
from grr.lib import aff4
from grr.lib import artifact
from grr.lib import artifact_registry
from grr.lib import config_lib
from grr.lib import flags
from grr.lib import key_utils
from grr.lib import maintenance_utils
from grr.lib import rdfvalue
from grr.lib import startup
from grr.lib import utils
from grr.lib.aff4_objects import users
class Error(Exception):
"""Base error class."""
pass
class UserError(Error):
pass
parser = flags.PARSER
parser.description = ("Set configuration parameters for the GRR Server."
"\nThis script has numerous subcommands to perform "
"various actions. When you are first setting up, you "
"probably only care about 'initialize'.")
# Generic arguments.
parser.add_argument(
"--share_dir", default="/usr/share/grr",
help="Path to the directory containing grr data.")
subparsers = parser.add_subparsers(
title="subcommands", dest="subparser_name", description="valid subcommands")
# Subparsers.
parser_memory = subparsers.add_parser(
"load_memory_drivers", help="Load memory drivers from disk to database.")
parser_generate_keys = subparsers.add_parser(
"generate_keys", help="Generate crypto keys in the configuration.")
parser_repack_clients = subparsers.add_parser(
"repack_clients",
help="Repack the clients binaries with the current configuration.")
parser_initialize = subparsers.add_parser(
"initialize",
help="Run all the required steps to setup a new GRR install.")
parser_set_var = subparsers.add_parser(
"set_var", help="Set a config variable.")
# Update an existing user.
parser_update_user = subparsers.add_parser(
"update_user", help="Update user settings.")
parser_update_user.add_argument("username", help="Username to update.")
parser_update_user.add_argument(
"--password", default=False, action="store_true",
help="Reset the password for this user (will prompt for password).")
parser_update_user.add_argument(
"--add_labels", default=[], action="append",
help="Add labels to the user object. These are used to control access.")
parser_update_user.add_argument(
"--delete_labels", default=[], action="append",
help="Delete labels from the user object. These are used to control access."
)
parser_add_user = subparsers.add_parser(
"add_user", help="Add a new user.")
parser_add_user.add_argument("username", help="Username to create.")
parser_add_user.add_argument("--password", default=None, help="Set password.")
parser_add_user.add_argument(
"--labels", default=[], action="append",
help="Create user with labels. These are used to control access.")
parser_add_user.add_argument(
"--noadmin", default=False, action="store_true",
help="Don't create the user as an administrator.")
parser_initialize.add_argument(
"--external_hostname", default=None,
help="External hostname to use.")
parser_initialize.add_argument(
"--admin_password", default=None,
help="Admin password for web interface.")
parser_initialize.add_argument(
"--noprompt", default=False, action="store_true",
help="Set to avoid prompting during initialize.")
parser_set_var.add_argument("var", help="Variable to set.")
parser_set_var.add_argument("val", help="Value to set.")
def AddUser(username, password=None, labels=None, token=None):
"""Implementation of the add_user command."""
try:
if aff4.FACTORY.Open("aff4:/users/%s" % username, "GRRUser",
token=token):
raise UserError("Cannot add user %s: User already exists." % username)
except aff4.InstantiationError:
pass
fd = aff4.FACTORY.Create("aff4:/users/%s" % username,
"GRRUser", mode="rw", token=token)
# Note this accepts blank passwords as valid.
if password is None:
password = getpass.getpass(
prompt="Please enter password for user '%s': " % username)
fd.SetPassword(password)
if labels:
fd.AddLabels(*set(labels), owner="GRR")
fd.Close()
print "Added user %s." % username
def UpdateUser(username, password, add_labels=None, delete_labels=None,
token=None):
"""Implementation of the update_user command."""
try:
fd = aff4.FACTORY.Open("aff4:/users/%s" % username,
"GRRUser", mode="rw", token=token)
except aff4.InstantiationError:
raise UserError("User %s does not exist." % username)
# Note this accepts blank passwords as valid.
if password:
if not isinstance(password, basestring):
password = getpass.getpass(
prompt="Please enter password for user '%s': " % username)
fd.SetPassword(password)
# Use sets to dedup input.
current_labels = set()
# Build a list of existing labels.
for label in fd.GetLabels():
current_labels.add(label.name)
# Build a list of labels to be added.
expanded_add_labels = set()
if add_labels:
for label in add_labels:
# Split up any space or comma separated labels in the list.
labels = label.split(",")
expanded_add_labels.update(labels)
# Build a list of labels to be removed.
expanded_delete_labels = set()
if delete_labels:
for label in delete_labels:
# Split up any space or comma separated labels in the list.
labels = label.split(",")
expanded_delete_labels.update(labels)
# Set subtraction to remove labels being added and deleted at the same time.
clean_add_labels = expanded_add_labels - expanded_delete_labels
clean_del_labels = expanded_delete_labels - expanded_add_labels
# Create final list using difference to only add new labels.
final_add_labels = clean_add_labels - current_labels
# Create final list using intersection to only remove existing labels.
final_del_labels = clean_del_labels & current_labels
if final_add_labels:
fd.AddLabels(*final_add_labels, owner="GRR")
if final_del_labels:
fd.RemoveLabels(*final_del_labels, owner="GRR")
fd.Close()
print "Updated user %s" % username
ShowUser(username, token=token)
# Delete an existing user.
parser_update_user = subparsers.add_parser(
"delete_user", help="Delete an user account.")
parser_update_user.add_argument("username", help="Username to update.")
def DeleteUser(username, token=None):
try:
aff4.FACTORY.Open("aff4:/users/%s" % username, "GRRUser", token=token)
except aff4.InstantiationError:
print "User %s not found." % username
return
aff4.FACTORY.Delete("aff4:/users/%s" % username, token=token)
print "User %s has been deleted." % username
# Show user account.
parser_show_user = subparsers.add_parser(
"show_user", help="Display user settings or list all users.")
parser_show_user.add_argument(
"username", default=None, nargs="?",
help="Username to display. If not specified, list all users.")
def ShowUser(username, token=None):
"""Implementation of the show_user command."""
if username is None:
fd = aff4.FACTORY.Open("aff4:/users", token=token)
for user in fd.OpenChildren():
if isinstance(user, users.GRRUser):
print user.Describe()
else:
user = aff4.FACTORY.Open("aff4:/users/%s" % username, token=token)
if isinstance(user, users.GRRUser):
print user.Describe()
else:
print "User %s not found" % username
# Generate Keys Arguments
parser_generate_keys.add_argument(
"--overwrite", default=False, action="store_true",
help="Required to overwrite existing keys.")
# Repack arguments.
parser_repack_clients.add_argument(
"--upload", default=True, action="store_false",
help="Upload the client binaries to the datastore.")
# Parent parser used in other upload based parsers.
parser_upload_args = argparse.ArgumentParser(add_help=False)
parser_upload_signed_args = argparse.ArgumentParser(add_help=False)
# Upload arguments.
parser_upload_args.add_argument(
"--file", help="The file to upload", required=True)
parser_upload_args.add_argument(
"--dest_path", required=False, default=None,
help="The destination path to upload the file to, specified in aff4: form,"
"e.g. aff4:/config/test.raw")
parser_upload_args.add_argument(
"--overwrite", default=False, action="store_true",
help="Required to overwrite existing files.")
parser_upload_signed_args.add_argument(
"--platform", required=True, choices=maintenance_utils.SUPPORTED_PLATFORMS,
default="windows",
help="The platform the file will be used on. This determines which signing"
" keys to use, and the path on the server the file will be uploaded to.")
parser_upload_signed_args.add_argument(
"--arch", required=True, choices=maintenance_utils.SUPPORTED_ARCHITECTURES,
default="amd64",
help="The architecture the file will be used on. This determines "
" the path on the server the file will be uploaded to.")
# Upload parsers.
parser_upload_raw = subparsers.add_parser(
"upload_raw", parents=[parser_upload_args],
help="Upload a raw file to an aff4 path.")
parser_upload_artifact = subparsers.add_parser(
"upload_artifact", parents=[parser_upload_args],
help="Upload a raw json artifact file.")
parser_upload_python = subparsers.add_parser(
"upload_python", parents=[parser_upload_args, parser_upload_signed_args],
help="Sign and upload a 'python hack' which can be used to execute code on "
"a client.")
parser_upload_exe = subparsers.add_parser(
"upload_exe", parents=[parser_upload_args, parser_upload_signed_args],
help="Sign and upload an executable which can be used to execute code on "
"a client.")
parser_upload_memory_driver = subparsers.add_parser(
"upload_memory_driver",
parents=[parser_upload_args, parser_upload_signed_args],
help="Sign and upload a memory driver for a specific platform.")
def LoadMemoryDrivers(grr_dir, token=None):
"""Load memory drivers from disk to database."""
for client_context in [["Platform:Darwin", "Arch:amd64"],
["Platform:Windows", "Arch:i386"],
["Platform:Windows", "Arch:amd64"]]:
file_paths = config_lib.CONFIG.Get(
"MemoryDriver.driver_files", context=client_context)
aff4_paths = config_lib.CONFIG.Get(
"MemoryDriver.aff4_paths", context=client_context)
if len(file_paths) != len(aff4_paths):
print "Length mismatch:"
print "%s.", file_paths
print "%s.", aff4_paths
raise RuntimeError("Could not find all files/aff4 paths.")
for file_path, aff4_path in zip(file_paths, aff4_paths):
f_path = os.path.join(grr_dir, file_path)
print "Signing and uploading %s to %s" % (f_path, aff4_path)
up_path = maintenance_utils.UploadSignedDriverBlob(
open(f_path).read(), aff4_path=aff4_path,
client_context=client_context, token=token)
print "uploaded %s" % up_path
def ImportConfig(filename, config):
"""Reads an old config file and imports keys and user accounts."""
sections_to_import = ["PrivateKeys"]
entries_to_import = ["Client.driver_signing_public_key",
"Client.executable_signing_public_key",
"CA.certificate",
"Frontend.certificate"]
options_imported = 0
old_config = config_lib.CONFIG.MakeNewConfig()
old_config.Initialize(filename)
for entry in old_config.raw_data.keys():
try:
section = entry.split(".")[0]
if section in sections_to_import or entry in entries_to_import:
config.Set(entry, old_config.Get(entry))
print "Imported %s." % entry
options_imported += 1
except Exception as e: # pylint: disable=broad-except
print "Exception during import of %s: %s" % (entry, e)
return options_imported
def GenerateDjangoKey(config):
"""Update a config with a random django key."""
try:
secret_key = config["AdminUI.django_secret_key"]
except ConfigParser.NoOptionError:
secret_key = "CHANGE_ME" # This is the config file default.
if not secret_key or secret_key.strip().upper() == "CHANGE_ME":
key = utils.GeneratePassphrase(length=100)
config.Set("AdminUI.django_secret_key", key)
else:
print "Not updating django_secret_key as it is already set."
def GenerateKeys(config):
"""Generate the keys we need for a GRR server."""
if not hasattr(key_utils, "MakeCACert"):
parser.error("Generate keys can only run with open source key_utils.")
if (config.Get("PrivateKeys.server_key", default=None) and
not flags.FLAGS.overwrite):
raise RuntimeError("Config %s already has keys, use --overwrite to "
"override." % config.parser)
length = config_lib.CONFIG["Server.rsa_key_length"]
print "All keys will have a bit length of %d." % length
print "Generating executable signing key"
priv_key, pub_key = key_utils.GenerateRSAKey(key_length=length)
config.Set("PrivateKeys.executable_signing_private_key", priv_key)
config.Set("Client.executable_signing_public_key", pub_key)
print "Generating driver signing key"
priv_key, pub_key = key_utils.GenerateRSAKey(key_length=length)
config.Set("PrivateKeys.driver_signing_private_key", priv_key)
config.Set("Client.driver_signing_public_key", pub_key)
print "Generating CA keys"
ca_cert, ca_pk, _ = key_utils.MakeCACert(bits=length)
cipher = None
config.Set("CA.certificate", ca_cert.as_pem())
config.Set("PrivateKeys.ca_key", ca_pk.as_pem(cipher))
print "Generating Server keys"
server_cert, server_key = key_utils.MakeCASignedCert(
"grr", ca_pk, bits=length)
config.Set("Frontend.certificate", server_cert.as_pem())
config.Set("PrivateKeys.server_key", server_key.as_pem(cipher))
print "Generating Django Secret key (used for xsrf protection etc)"
GenerateDjangoKey(config)
def RetryQuestion(question_text, output_re="", default_val=None):
"""Continually ask a question until the output_re is matched."""
while True:
if default_val is not None:
new_text = "%s [%s]: " % (question_text, default_val)
else:
new_text = "%s: " % question_text
output = raw_input(new_text) or str(default_val)
output = output.strip()
if not output_re or re.match(output_re, output):
break
else:
print "Invalid input, must match %s" % output_re
return output
def ConfigureHostnames(config):
"""This configures the hostnames stored in the config."""
if flags.FLAGS.external_hostname:
hostname = flags.FLAGS.external_hostname
else:
try:
hostname = socket.gethostname()
except (OSError, IOError):
print "Sorry, we couldn't guess your hostname.\n"
hostname = RetryQuestion("Please enter your hostname e.g. "
"grr.example.com", "^[\\.A-Za-z0-9-]+$", hostname)
print """\n\n-=Server URL=-
The Server URL specifies the URL that the clients will connect to
communicate with the server. For best results this should be publicly
accessible. By default this will be port 8080 with the URL ending in /control.
"""
location = RetryQuestion("Frontend URL", "^http://.*/control$",
"http://%s:8080/control" % hostname)
config.Set("Client.control_urls", [location])
frontend_port = urlparse.urlparse(location).port or config_lib.CONFIG.Get(
"Frontend.bind_port")
config.Set("Frontend.bind_port", frontend_port)
print """\n\n-=AdminUI URL=-:
The UI URL specifies where the Administrative Web Interface can be found.
"""
ui_url = RetryQuestion("AdminUI URL", "^http[s]*://.*$",
"http://%s:8000" % hostname)
config.Set("AdminUI.url", ui_url)
ui_port = urlparse.urlparse(ui_url).port or config_lib.CONFIG.Get(
"AdminUI.port")
config.Set("AdminUI.port", ui_port)
def ConfigureDatastore(config):
"""Set the datastore to use by prompting the user to choose."""
print """
1. SQLite (Default) - This datastore is stored on the local file system. If you
configure GRR to run as non-root be sure to allow that user access to the files.
2. MySQL - This datastore uses MySQL and requires MySQL 5.6 server or later
to be running and a user with the ability to create the GRR database and tables.
The MySQL client binaries are required for use with the MySQLdb python module as
well.
"""
datastore = RetryQuestion("Datastore", "^[1-2]$", "1")
if datastore == "1":
config.Set("Datastore.implementation", "SqliteDataStore")
datastore_location = RetryQuestion(
"Datastore Location", "^/[A-Za-z0-9/.-]+$",
config_lib.CONFIG.Get("Datastore.location"))
config.Set("Datastore.location", datastore_location)
if datastore == "2":
config.Set("Datastore.implementation", "MySQLAdvancedDataStore")
mysql_host = RetryQuestion("MySQL Host", "^[\\.A-Za-z0-9-]+$",
config_lib.CONFIG.Get("Mysql.host"))
config.Set("Mysql.host", mysql_host)
mysql_port = RetryQuestion("MySQL Port (0 for local socket)",
"^[0-9]+$",
config_lib.CONFIG.Get("Mysql.port"))
config.Set("Mysql.port", mysql_port)
mysql_database = RetryQuestion("MySQL Database", "^[A-Za-z0-9-]+$",
config_lib.CONFIG.Get("Mysql.database_name"))
config.Set("Mysql.database_name", mysql_database)
mysql_username = RetryQuestion(
"MySQL Username", "[A-Za-z0-9-]+$",
config_lib.CONFIG.Get("Mysql.database_username"))
config.Set("Mysql.database_username", mysql_username)
mysql_password = getpass.getpass(
prompt="Please enter password for database user %s: " % mysql_username)
config.Set("Mysql.database_password", mysql_password)
def ConfigureEmails(config):
"""Configure email notification addresses."""
print """\n\n-=Monitoring/Email Domain=-
Emails concerning alerts or updates must be sent to this domain.
"""
domain = RetryQuestion("Email Domain e.g example.com",
"^([\\.A-Za-z0-9-]+)*$",
config_lib.CONFIG.Get("Logging.domain"))
config.Set("Logging.domain", domain)
print """\n\n-=Alert Email Address=-
Address where monitoring events get sent, e.g. crashed clients, broken server
etc.
"""
email = RetryQuestion("Alert Email Address", "", "grr-monitoring@%s" % domain)
config.Set("Monitoring.alert_email", email)
print """\n\n-=Emergency Email Address=-
Address where high priority events such as an emergency ACL bypass are sent.
"""
emergency_email = RetryQuestion("Emergency Access Email Address", "",
"grr-emergency@%s" % domain)
config.Set("Monitoring.emergency_access_email", emergency_email)
def ConfigureBaseOptions(config):
"""Configure the basic options required to run the server."""
print "We are now going to configure the server using a bunch of questions."
print """\n\n-=GRR Datastore=-
For GRR to work each GRR server has to be able to communicate with the
datastore. To do this we need to configure a datastore.\n"""
existing_datastore = config_lib.CONFIG.Get("Datastore.implementation")
if not existing_datastore or existing_datastore == "FakeDataStore":
ConfigureDatastore(config)
else:
print """Found existing settings:
Datastore: %s""" % existing_datastore
if existing_datastore == "SqliteDataStore":
print """ Datastore Location: %s
""" % config_lib.CONFIG.Get("Datastore.location")
if existing_datastore == "MySQLAdvancedDataStore":
print """ MySQL Host: %s
MySQL Port: %s
MySQL Database: %s
MySQL Username: %s
""" % (config_lib.CONFIG.Get("Mysql.host"),
config_lib.CONFIG.Get("Mysql.port"),
config_lib.CONFIG.Get("Mysql.database_name"),
config_lib.CONFIG.Get("Mysql.database_username"))
if existing_datastore == "MongoDataStore":
print """ Mongo Host: %s
Mongo Port: %s
Mongo Database: %s
""" % (config_lib.CONFIG.Get("Mongo.server"),
config_lib.CONFIG.Get("Mongo.port"),
config_lib.CONFIG.Get("Mongo.db_name"))
if raw_input("Do you want to keep this configuration?"
" [Yn]: ").upper() == "N":
ConfigureDatastore(config)
print """\n\n-=GRR URLs=-
For GRR to work each client has to be able to communicate with the
server. To do this we normally need a public dns name or IP address to
communicate with. In the standard configuration this will be used to host both
the client facing server and the admin user interface.\n"""
existing_ui_urn = config_lib.CONFIG.Get("AdminUI.url", default=None)
existing_frontend_urn = config_lib.CONFIG.Get("Client.control_urls",
default=None)
if not existing_frontend_urn or not existing_ui_urn:
ConfigureHostnames(config)
else:
print """Found existing settings:
AdminUI URL: %s
Frontend URL(s): %s
""" % (existing_ui_urn, existing_frontend_urn)
if raw_input(
"Do you want to keep this configuration? [Yn]: ").upper() == "N":
ConfigureHostnames(config)
print """\n\n-=GRR Emails=-
GRR needs to be able to send emails for various logging and
alerting functions. The email domain will be appended to GRR user names
when sending emails to users.\n"""
existing_log_domain = config_lib.CONFIG.Get("Logging.domain", default=None)
existing_al_email = config_lib.CONFIG.Get("Monitoring.alert_email",
default=None)
existing_em_email = config_lib.CONFIG.Get("Monitoring.emergency_access_email",
default=None)
if not existing_log_domain or not existing_al_email or not existing_em_email:
ConfigureEmails(config)
else:
print """Found existing settings:
Email Domain: %s
Alert Email Address: %s
Emergency Access Email Address: %s
""" % (existing_log_domain, existing_al_email, existing_em_email)
if raw_input("Do you want to keep this configuration?"
" [Yn]: ").upper() == "N":
ConfigureEmails(config)
config.Write()
print ("Configuration parameters set. You can edit these in %s" %
config_lib.CONFIG.Get("Config.writeback"))
def AddUsers(token=None):
# Now initialize with our modified config.
startup.Init()
print "\nStep 3: Adding Admin User"
try:
AddUser("admin", labels=["admin"], token=token,
password=flags.FLAGS.admin_password)
except UserError:
if flags.FLAGS.noprompt:
UpdateUser("admin", password=flags.FLAGS.admin_password,
add_labels=["admin"], token=token)
else:
if ((raw_input("User 'admin' already exists, do you want to "
"reset the password? [yN]: ").upper() or "N") == "Y"):
UpdateUser("admin", password=True, add_labels=["admin"], token=token)
def ManageBinaries(config=None, token=None):
"""Load memory drivers and repack templates into installers."""
print "\nStep 4: Uploading Memory Drivers to the Database"
LoadMemoryDrivers(flags.FLAGS.share_dir, token=token)
print "\nStep 5: Repackaging clients with new configuration."
# We need to update the config to point to the installed templates now.
config.Set("ClientBuilder.executables_path", os.path.join(
flags.FLAGS.share_dir, "executables"))
# Build debug binaries, then build release binaries.
maintenance_utils.RepackAllBinaries(upload=True, debug_build=True,
token=token)
maintenance_utils.RepackAllBinaries(upload=True, token=token)
print "\nInitialization complete, writing configuration."
config.Write()
print "Please restart the service for it to take effect.\n\n"
def Initialize(config=None, token=None):
"""Initialize or update a GRR configuration."""
print "Checking write access on config %s" % config.parser
if not os.access(config.parser.filename, os.W_OK):
raise IOError("Config not writeable (need sudo?)")
print "\nStep 0: Importing Configuration from previous installation."
options_imported = 0
prev_config_file = config.Get("ConfigUpdater.old_config", default=None)
if prev_config_file and os.access(prev_config_file, os.R_OK):
print "Found config file %s." % prev_config_file
if raw_input("Do you want to import this configuration?"
" [yN]: ").upper() == "Y":
options_imported = ImportConfig(prev_config_file, config)
else:
print "No old config file found."
print "\nStep 1: Key Generation"
if config.Get("PrivateKeys.server_key", default=None):
if options_imported > 0:
print ("Since you have imported keys from another installation in the "
"last step,\nyou probably do not want to generate new keys now.")
if (raw_input("You already have keys in your config, do you want to"
" overwrite them? [yN]: ").upper() or "N") == "Y":
flags.FLAGS.overwrite = True
GenerateKeys(config)
else:
GenerateKeys(config)
print "\nStep 2: Setting Basic Configuration Parameters"
ConfigureBaseOptions(config)
AddUsers(token=token)
ManageBinaries(config, token=token)
def InitializeNoPrompt(config=None, token=None):
"""Initialize GRR with no prompts, assumes SQLite db.
Args:
config: config object
token: auth token
Raises:
ValueError: if hostname and password not supplied.
IOError: if config is not writeable
This method does the minimum work necessary to configure GRR without any user
prompting, relying heavily on config default values. User must supply the
external hostname and admin password, everything else is set automatically.
"""
if not (flags.FLAGS.external_hostname and flags.FLAGS.admin_password):
raise ValueError(
"If interactive prompting is disabled, external_hostname and "
"admin_password must be set.")
print "Checking write access on config %s" % config.parser
if not os.access(config.parser.filename, os.W_OK):
raise IOError("Config not writeable (need sudo?)")
config_dict = {}
GenerateKeys(config)
config_dict["Datastore.implementation"] = "SqliteDataStore"
hostname = flags.FLAGS.external_hostname
config_dict["Client.control_urls"] = ["http://%s:%s/control" % (
hostname, config.Get("Frontend.bind_port"))]
config_dict["AdminUI.url"] = "http://%s:%s" % (
hostname, config.Get("AdminUI.port"))
config_dict["Logging.domain"] = hostname
config_dict["Monitoring.alert_email"] = "grr-monitoring@%s" % hostname
config_dict["Monitoring.emergency_access_email"] = (
"grr-emergency@%s" % hostname)
print "Setting configuration as:\n\n%s" % config_dict
for key, value in config_dict.iteritems():
config.Set(key, value)
config.Write()
print ("Configuration parameters set. You can edit these in %s" %
config_lib.CONFIG.Get("Config.writeback"))
AddUsers(token=token)
ManageBinaries(config, token=token)
def UploadRaw(file_path, aff4_path, token=None):
"""Upload a file to the datastore."""
full_path = rdfvalue.RDFURN(aff4_path).Add(os.path.basename(file_path))
fd = aff4.FACTORY.Create(full_path, "AFF4Image", mode="w", token=token)
fd.Write(open(file_path).read(1024 * 1024 * 30))
fd.Close()
return str(fd.urn)
def GetToken():
# Extend for user authorization
return access_control.ACLToken(username="GRRConsole").SetUID()
def main(unused_argv):
"""Main."""
token = GetToken()
config_lib.CONFIG.AddContext("Commandline Context")
config_lib.CONFIG.AddContext("ConfigUpdater Context")
if flags.FLAGS.subparser_name == "initialize":
startup.ConfigInit()
if flags.FLAGS.noprompt:
InitializeNoPrompt(config_lib.CONFIG, token=token)
else:
Initialize(config_lib.CONFIG, token=token)
return
else:
startup.Init()
try:
print "Using configuration %s" % config_lib.CONFIG.parser
except AttributeError:
raise RuntimeError("No valid config specified.")
if flags.FLAGS.subparser_name == "load_memory_drivers":
LoadMemoryDrivers(flags.FLAGS.share_dir, token=token)
elif flags.FLAGS.subparser_name == "generate_keys":
try:
GenerateKeys(config_lib.CONFIG)
except RuntimeError, e:
# GenerateKeys will raise if keys exist and --overwrite is not set.
print "ERROR: %s" % e
sys.exit(1)
config_lib.CONFIG.Write()
elif flags.FLAGS.subparser_name == "repack_clients":
maintenance_utils.RepackAllBinaries(upload=flags.FLAGS.upload,
token=token)
maintenance_utils.RepackAllBinaries(upload=flags.FLAGS.upload,
debug_build=True,
token=token)
elif flags.FLAGS.subparser_name == "show_user":
ShowUser(flags.FLAGS.username, token=token)
elif flags.FLAGS.subparser_name == "update_user":
try:
UpdateUser(flags.FLAGS.username, flags.FLAGS.password,
flags.FLAGS.add_labels, flags.FLAGS.delete_labels, token=token)
except UserError as e:
print e
elif flags.FLAGS.subparser_name == "delete_user":
DeleteUser(flags.FLAGS.username, token=token)
elif flags.FLAGS.subparser_name == "add_user":
labels = []
if not flags.FLAGS.noadmin:
labels.append("admin")
if flags.FLAGS.labels:
labels.extend(flags.FLAGS.labels)
try:
AddUser(flags.FLAGS.username, flags.FLAGS.password, labels, token=token)
except UserError as e:
print e
elif flags.FLAGS.subparser_name == "upload_python":
content = open(flags.FLAGS.file).read(1024 * 1024 * 30)
aff4_path = flags.FLAGS.dest_path
if not aff4_path:
python_hack_root_urn = config_lib.CONFIG.Get("Config.python_hack_root")
aff4_path = python_hack_root_urn.Add(os.path.basename(flags.FLAGS.file))
context = ["Platform:%s" % flags.FLAGS.platform.title(),
"Client"]
maintenance_utils.UploadSignedConfigBlob(content, aff4_path=aff4_path,
client_context=context,
token=token)
elif flags.FLAGS.subparser_name == "upload_exe":
content = open(flags.FLAGS.file).read(1024 * 1024 * 30)
context = ["Platform:%s" % flags.FLAGS.platform.title(),
"Client"]
if flags.FLAGS.dest_path:
dest_path = rdfvalue.RDFURN(flags.FLAGS.dest_path)
else:
dest_path = config_lib.CONFIG.Get(
"Executables.aff4_path", context=context).Add(
os.path.basename(flags.FLAGS.file))
# Now upload to the destination.
maintenance_utils.UploadSignedConfigBlob(content, aff4_path=dest_path,
client_context=context,
token=token)
print "Uploaded to %s" % dest_path
elif flags.FLAGS.subparser_name == "upload_memory_driver":
client_context = ["Platform:%s" % flags.FLAGS.platform.title(),
"Arch:%s" % flags.FLAGS.arch]
content = open(flags.FLAGS.file).read(1024 * 1024 * 30)
if flags.FLAGS.dest_path:
uploaded = maintenance_utils.UploadSignedDriverBlob(
content, aff4_path=flags.FLAGS.dest_path,
client_context=client_context, token=token)
else:
uploaded = maintenance_utils.UploadSignedDriverBlob(
content, client_context=client_context, token=token)
print "Uploaded to %s" % uploaded
elif flags.FLAGS.subparser_name == "set_var":
config = config_lib.CONFIG
print "Setting %s to %s" % (flags.FLAGS.var, flags.FLAGS.val)
if flags.FLAGS.val.startswith("["): # Allow setting of basic lists.
flags.FLAGS.val = flags.FLAGS.val[1:-1].split(",")
config.Set(flags.FLAGS.var, flags.FLAGS.val)
config.Write()
elif flags.FLAGS.subparser_name == "upload_raw":
if not flags.FLAGS.dest_path:
flags.FLAGS.dest_path = aff4.ROOT_URN.Add("config").Add("raw")
uploaded = UploadRaw(flags.FLAGS.file, flags.FLAGS.dest_path, token=token)
print "Uploaded to %s" % uploaded
elif flags.FLAGS.subparser_name == "upload_artifact":
json.load(open(flags.FLAGS.file)) # Check it will parse.
base_urn = aff4.ROOT_URN.Add("artifact_store")
try:
artifact.UploadArtifactYamlFile(
open(flags.FLAGS.file).read(1000000), base_urn=base_urn, token=None,
overwrite=flags.FLAGS.overwrite)
except artifact_registry.ArtifactDefinitionError as e:
print "Error %s. You may need to set --overwrite." % e
if __name__ == "__main__":
flags.StartMain(main)
| {
"content_hash": "87928afa14b60d4c20b5db74f367eb9c",
"timestamp": "",
"source": "github",
"line_count": 911,
"max_line_length": 80,
"avg_line_length": 36.18441273326015,
"alnum_prop": 0.6738259919912633,
"repo_name": "ahojjati/grr",
"id": "5bf3c8901e23baef2396ae9bf5cd6d570da8eb48",
"size": "32986",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tools/config_updater.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "227"
},
{
"name": "Batchfile",
"bytes": "14896"
},
{
"name": "C",
"bytes": "10598"
},
{
"name": "C++",
"bytes": "276081"
},
{
"name": "CMake",
"bytes": "3044"
},
{
"name": "CSS",
"bytes": "12677"
},
{
"name": "Groff",
"bytes": "444"
},
{
"name": "HTML",
"bytes": "71683"
},
{
"name": "JavaScript",
"bytes": "228300"
},
{
"name": "Makefile",
"bytes": "6232"
},
{
"name": "Protocol Buffer",
"bytes": "198203"
},
{
"name": "Python",
"bytes": "5181684"
},
{
"name": "Ruby",
"bytes": "5103"
},
{
"name": "Shell",
"bytes": "43112"
},
{
"name": "Standard ML",
"bytes": "8172"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import frappe, json
class User:
"""
A user object is created at the beginning of every request with details of the use.
The global user object is `frappe.user`
"""
def __init__(self, name=''):
self.defaults = None
self.name = name or frappe.session.get('user')
self.roles = []
self.all_read = []
self.can_create = []
self.can_read = []
self.can_write = []
self.can_cancel = []
self.can_delete = []
self.can_search = []
self.can_get_report = []
self.can_import = []
self.can_export = []
self.can_print = []
self.can_email = []
self.can_set_user_permissions = []
self.allow_modules = []
self.in_create = []
def get_roles(self):
"""get list of roles"""
if not self.roles:
self.roles = get_roles(self.name)
return self.roles
def build_doctype_map(self):
"""build map of special doctype properties"""
self.doctype_map = {}
for r in frappe.db.sql("""select name, in_create, issingle, istable,
read_only, module from tabDocType""", as_dict=1):
self.doctype_map[r['name']] = r
def build_perm_map(self):
"""build map of permissions at level 0"""
self.perm_map = {}
roles = self.get_roles()
for r in frappe.db.sql("""select * from tabDocPerm where docstatus=0
and ifnull(permlevel,0)=0
and role in ({roles})""".format(roles=", ".join(["%s"]*len(roles))), tuple(roles), as_dict=1):
dt = r['parent']
if not dt in self.perm_map:
self.perm_map[dt] = {}
for k in frappe.permissions.rights:
if not self.perm_map[dt].get(k):
self.perm_map[dt][k] = r.get(k)
def build_permissions(self):
"""build lists of what the user can read / write / create
quirks:
read_only => Not in Search
in_create => Not in create
"""
self.build_doctype_map()
self.build_perm_map()
for dt in self.doctype_map:
dtp = self.doctype_map[dt]
p = self.perm_map.get(dt, {})
if not dtp.get('istable'):
if p.get('create') and not dtp.get('issingle'):
if dtp.get('in_create'):
self.in_create.append(dt)
else:
self.can_create.append(dt)
elif p.get('write'):
self.can_write.append(dt)
elif p.get('read'):
if dtp.get('read_only'):
self.all_read.append(dt)
else:
self.can_read.append(dt)
if p.get('cancel'):
self.can_cancel.append(dt)
if p.get('delete'):
self.can_delete.append(dt)
if (p.get('read') or p.get('write') or p.get('create')):
if p.get('report'):
self.can_get_report.append(dt)
for key in ("import", "export", "print", "email", "set_user_permissions"):
if p.get(key):
getattr(self, "can_" + key).append(dt)
if not dtp.get('istable'):
if not dtp.get('issingle') and not dtp.get('read_only'):
self.can_search.append(dt)
if not dtp.get('module') in self.allow_modules:
self.allow_modules.append(dtp.get('module'))
self.can_write += self.can_create
self.can_write += self.in_create
self.can_read += self.can_write
self.all_read += self.can_read
def get_defaults(self):
import frappe.defaults
self.defaults = frappe.defaults.get_defaults(self.name)
return self.defaults
# update recent documents
def update_recent(self, dt, dn):
rdl = frappe.cache().get_value("recent:" + self.name) or []
new_rd = [dt, dn]
# clear if exists
for i in range(len(rdl)):
rd = rdl[i]
if rd==new_rd:
del rdl[i]
break
if len(rdl) > 19:
rdl = rdl[:19]
rdl = [new_rd] + rdl
r = frappe.cache().set_value("recent:" + self.name, rdl)
def _get(self, key):
if not self.can_read:
self.build_permissions()
return getattr(self, key)
def get_can_read(self):
"""return list of doctypes that the user can read"""
if not self.can_read:
self.build_permissions()
return self.can_read
def load_user(self):
d = frappe.db.sql("""select email, first_name, last_name, time_zone,
email_signature, background_image, background_style, user_type, language
from tabUser where name = %s""", (self.name,), as_dict=1)[0]
if not self.can_read:
self.build_permissions()
d.name = self.name
d.recent = json.dumps(frappe.cache().get_value("recent:" + self.name) or [])
d['roles'] = self.get_roles()
d['defaults'] = self.get_defaults()
for key in ("can_create", "can_write", "can_read", "can_cancel", "can_delete",
"can_get_report", "allow_modules", "all_read", "can_search",
"in_create", "can_export", "can_import", "can_print", "can_email",
"can_set_user_permissions"):
d[key] = list(set(getattr(self, key)))
return d
def get_user_fullname(user):
fullname = frappe.db.sql("SELECT CONCAT_WS(' ', first_name, last_name) FROM `tabUser` WHERE name=%s", (user,))
return fullname and fullname[0][0] or ''
def get_system_managers(only_name=False):
"""returns all system manager's user details"""
import email.utils
from frappe.core.doctype.user.user import STANDARD_USERS
system_managers = frappe.db.sql("""select distinct name,
concat_ws(" ", if(first_name="", null, first_name), if(last_name="", null, last_name))
as fullname from tabUser p
where docstatus < 2 and enabled = 1
and name not in ({})
and exists (select * from tabUserRole ur
where ur.parent = p.name and ur.role="System Manager")""".format(", ".join(["%s"]*len(STANDARD_USERS))),
STANDARD_USERS, as_dict=True)
if only_name:
return [p.name for p in system_managers]
else:
return [email.utils.formataddr((p.fullname, p.name)) for p in system_managers]
def add_role(user, role):
user_wrapper = frappe.get_doc("User", user).add_roles(role)
def add_system_manager(email, first_name=None, last_name=None):
# add user
user = frappe.new_doc("User")
user.update({
"name": email,
"email": email,
"enabled": 1,
"first_name": first_name or email,
"last_name": last_name,
"user_type": "System User"
})
user.insert()
# add roles
roles = frappe.db.sql_list("""select name from `tabRole`
where name not in ("Administrator", "Guest", "All")""")
user.add_roles(*roles)
def get_roles(username=None, with_standard=True):
"""get roles of current user"""
if not username:
username = frappe.session.user
if username=='Guest':
return ['Guest']
roles = frappe.cache().get_value("roles:" + username)
if not roles:
roles = [r[0] for r in frappe.db.sql("""select role from tabUserRole
where parent=%s and role!='All'""", (username,))] + ['All']
frappe.cache().set_value("roles:" + username, roles)
# filter standard if required
if not with_standard:
roles = filter(lambda x: x not in ['All', 'Guest', 'Administrator'], roles)
return roles
| {
"content_hash": "276c3855dd40c81e18cc14d81b2a7551",
"timestamp": "",
"source": "github",
"line_count": 230,
"max_line_length": 111,
"avg_line_length": 28.61304347826087,
"alnum_prop": 0.6435192220027351,
"repo_name": "gangadharkadam/office_frappe",
"id": "9095a4422a9f07bc56b0c8f569a9b30a3f03c09d",
"size": "6685",
"binary": false,
"copies": "8",
"ref": "refs/heads/develop",
"path": "frappe/utils/user.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "80527"
},
{
"name": "HTML",
"bytes": "60452"
},
{
"name": "JavaScript",
"bytes": "1182079"
},
{
"name": "Python",
"bytes": "906331"
}
],
"symlink_target": ""
} |
class SignOnException(Exception):
pass
class AppProfileException(Exception):
pass
class TransactionRequestException(Exception):
pass
class ApplicationRequestException(Exception):
pass
| {
"content_hash": "23ff1be65962f53882c9283d00823dee",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 45,
"avg_line_length": 14.714285714285714,
"alnum_prop": 0.7766990291262136,
"repo_name": "Zertifica/evosnap",
"id": "4377675c0782abdebe53eb5fe923ccacbf060815",
"size": "206",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "evosnap/exceptions.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "72821"
}
],
"symlink_target": ""
} |
import sys
import petsc4py
petsc4py.init(sys.argv)
import numpy as np
from time import time
from scipy.io import savemat
# from src.stokes_flow import problem_dic, obj_dic
from petsc4py import PETSc
from src import stokes_flow as sf
from src.myio import *
from src.objComposite import *
# from src.myvtk import save_singleEcoli_vtk
import codeStore.ecoli_common as ec
import pickle
# import import_my_lib
def get_problem_kwargs(**main_kwargs):
OptDB = PETSc.Options()
fileHandle = OptDB.getString('f', 'loop_table_FatHelix')
OptDB.setValue('f', fileHandle)
problem_kwargs = ec.get_problem_kwargs()
problem_kwargs['fileHandle'] = fileHandle
n_norm_theta = OptDB.getInt('n_norm_theta', 2)
n_norm_phi = OptDB.getInt('n_norm_phi', 2)
norm_psi = OptDB.getReal('norm_psi', 0)
problem_kwargs['n_norm_theta'] = n_norm_theta
problem_kwargs['n_norm_phi'] = n_norm_phi
problem_kwargs['norm_psi'] = norm_psi
kwargs_list = (get_shearFlow_kwargs(), main_kwargs,)
for t_kwargs in kwargs_list:
for key in t_kwargs:
problem_kwargs[key] = t_kwargs[key]
pickle_name = '%s_kwargs.pickle' % fileHandle
with open(pickle_name, 'wb') as handle:
pickle.dump(problem_kwargs, handle, protocol=4)
PETSc.Sys.Print('---->save kwargs to %s' % pickle_name)
return problem_kwargs
def print_case_info(**problem_kwargs):
caseIntro = '-->Passive fat helix in infinite shear flow case, make table. '
ec.print_case_info(caseIntro, **problem_kwargs)
n_norm_theta = problem_kwargs['n_norm_theta']
n_norm_phi = problem_kwargs['n_norm_phi']
norm_psi = problem_kwargs['norm_psi']
PETSc.Sys.Print('Loop parameter space: n_norm_theta %d, n_norm_phi %d. norm_psi %f' %
(n_norm_theta, n_norm_phi, norm_psi))
print_shearFlow_info(**problem_kwargs)
return True
def do_solve_once(problem_ff: sf.ShearFlowForceFreeProblem,
problem: sf.ShearFlowForceFreeIterateProblem,
ecoli_comp: sf.ForceFreeComposite,
fileHandle, norm_theta, norm_phi, norm_psi, planeShearRate, rank, idx, N,
iter_tor):
PETSc.Sys.Print()
PETSc.Sys.Print('%s %05d / %05d theta=%f, phi=%f, psi=%f %s' %
('#' * 25, idx, N, norm_theta, norm_phi, norm_psi, '#' * 25,))
# 1) ini guess
ref_U0 = ecoli_comp.get_ref_U()
problem_ff.create_matrix()
problem_ff.solve()
ref_U1 = ecoli_comp.get_ref_U()
PETSc.Sys.Print(' ini ref_U0 in shear flow %s' % str(ref_U0))
PETSc.Sys.Print(' ini ref_U1 in shear flow %s' % str(ref_U1))
# 2) optimize force and torque free
problem.create_matrix()
ref_U = problem.do_iterate3(ini_refU0=ref_U0, ini_refU1=ref_U1, rtol=iter_tor)
ecoli_comp.set_ref_U(ref_U)
PETSc.Sys.Print(' true ref_U in shear flow', ref_U)
tU = np.linalg.norm(ref_U[:3])
tW = np.linalg.norm(ref_U[3:])
terr = (ref_U1 - ref_U) / [tU, tU, tU, tW, tW, tW]
PETSc.Sys.Print(' error of direct method', terr)
if rank == 0:
mat_name = '%s_th%f_phi%f_psi_%f.mat' % (fileHandle, norm_theta, norm_phi, norm_psi)
savemat(mat_name, {
'norm_theta': norm_theta,
'norm_phi': norm_phi,
'norm_psi': norm_psi,
'planeShearRate': planeShearRate,
'ecoli_center': np.vstack(ecoli_comp.get_center()),
'ecoli_nodes': np.vstack([tobj.get_u_nodes() for tobj in ecoli_comp.get_obj_list()]),
'ecoli_f': np.hstack([np.zeros_like(tobj.get_force())
for tobj in ecoli_comp.get_obj_list()]).reshape(-1, 3),
'ecoli_u': np.hstack([np.zeros_like(tobj.get_re_velocity())
for tobj in ecoli_comp.get_obj_list()]).reshape(-1, 3),
'ecoli_norm': np.vstack(ecoli_comp.get_norm()),
'ecoli_U': np.vstack(ecoli_comp.get_ref_U()), }, oned_as='column', )
return True
def do_solve_once_noIter(problem_ff: sf.ShearFlowForceFreeProblem,
ecoli_comp: sf.ForceFreeComposite,
fileHandle, norm_theta, norm_phi, norm_psi, planeShearRate, rank, idx, N):
PETSc.Sys.Print()
PETSc.Sys.Print('%s %05d / %05d theta=%f, phi=%f, psi=%f %s' %
('#' * 25, idx, N, norm_theta, norm_phi, norm_psi, '#' * 25,))
problem_ff.create_matrix()
problem_ff.solve()
ref_U = ecoli_comp.get_ref_U()
PETSc.Sys.Print(' ref_U in shear flow', ref_U)
if rank == 0:
mat_name = '%s_th%f_phi%f_psi_%f.mat' % (fileHandle, norm_theta, norm_phi, norm_psi)
savemat(mat_name, {
'norm_theta': norm_theta,
'norm_phi': norm_phi,
'norm_psi': norm_psi,
'planeShearRate': planeShearRate,
'ecoli_center': np.vstack(ecoli_comp.get_center()),
'ecoli_nodes': np.vstack([tobj.get_u_nodes() for tobj in ecoli_comp.get_obj_list()]),
'ecoli_f': np.hstack([np.zeros_like(tobj.get_force())
for tobj in ecoli_comp.get_obj_list()]).reshape(-1, 3),
'ecoli_u': np.hstack([np.zeros_like(tobj.get_re_velocity())
for tobj in ecoli_comp.get_obj_list()]).reshape(-1, 3),
'ecoli_norm': np.vstack(ecoli_comp.get_norm()),
'ecoli_U': np.vstack(ecoli_comp.get_ref_U()), }, oned_as='column', )
return True
def main_fun(**main_kwargs):
comm = PETSc.COMM_WORLD.tompi4py()
rank = comm.Get_rank()
problem_kwargs = get_problem_kwargs(**main_kwargs)
print_case_info(**problem_kwargs)
fileHandle = problem_kwargs['fileHandle']
n_norm_theta = problem_kwargs['n_norm_theta']
n_norm_phi = problem_kwargs['n_norm_phi']
norm_psi = problem_kwargs['norm_psi']
N = n_norm_phi * n_norm_theta
iter_tor = 1e-3
if not problem_kwargs['restart']:
# create helix
_, tail_obj_list = createEcoli_ellipse(name='ecoli0', **problem_kwargs)
tail_obj = sf.StokesFlowObj()
tail_obj.set_name('tail_obj')
tail_obj.combine(tail_obj_list)
tail_obj.move(-tail_obj.get_u_geo().get_center())
t_norm = tail_obj.get_u_geo().get_geo_norm()
helix_comp = sf.ForceFreeComposite(center=np.zeros(3), norm=t_norm, name='helix_0')
helix_comp.add_obj(obj=tail_obj, rel_U=np.zeros(6))
helix_comp.node_rotation(helix_comp.get_norm(), norm_psi)
problem_ff = sf.ShearFlowForceFreeProblem(**problem_kwargs)
problem_ff.add_obj(helix_comp)
problem_ff.print_info()
problem_ff.create_matrix()
problem = sf.ShearFlowForceFreeIterateProblem(**problem_kwargs)
problem.add_obj(helix_comp)
problem.set_iterate_comp(helix_comp)
planeShearRate = problem_ff.get_planeShearRate()
# 1). theta=0, ecoli_norm=(0, 0, 1)
norm_theta, norm_phi = 0, 0
t2 = time()
do_solve_once(problem_ff, problem, helix_comp, fileHandle, norm_theta, norm_phi, norm_psi,
planeShearRate, rank, 0, N, iter_tor)
ref_U000 = helix_comp.get_ref_U().copy()
t3 = time()
PETSc.Sys.Print(' Current process uses: %07.3fs' % (t3 - t2))
# 2). loop over parameter space
# using the new orientation definition method, {norm_theta, norm_phi = 0, 0} is not a singularity now.
for i0, norm_theta in enumerate(np.linspace(0, np.pi, n_norm_theta)):
helix_comp.set_ref_U(ref_U000)
helix_comp.node_rotation(np.array((0, 1, 0)), norm_theta)
for i1, norm_phi in enumerate(np.linspace(0, np.pi, n_norm_phi)):
t2 = time()
idx = i0 * n_norm_phi + i1 + 1
helix_comp.node_rotation(np.array((0, 0, 1)), norm_phi)
do_solve_once(problem_ff, problem, helix_comp, fileHandle, norm_theta, norm_phi,
norm_psi,
planeShearRate, rank, idx, N, iter_tor)
helix_comp.node_rotation(np.array((0, 0, 1)), -norm_phi) # rotate back
t3 = time()
PETSc.Sys.Print(' Current process uses: %07.3fs' % (t3 - t2))
helix_comp.node_rotation(np.array((0, 1, 0)), -norm_theta) # rotate back
else:
pass
return True
def main_fun_noIter(**main_kwargs):
comm = PETSc.COMM_WORLD.tompi4py()
rank = comm.Get_rank()
problem_kwargs = get_problem_kwargs(**main_kwargs)
print_case_info(**problem_kwargs)
fileHandle = problem_kwargs['fileHandle']
n_norm_theta = problem_kwargs['n_norm_theta']
n_norm_phi = problem_kwargs['n_norm_phi']
norm_psi = problem_kwargs['norm_psi']
N = n_norm_phi * n_norm_theta
if not problem_kwargs['restart']:
# create helix
_, tail_obj_list = createEcoli_ellipse(name='ecoli0', **problem_kwargs)
tail_obj = sf.StokesFlowObj()
tail_obj.set_name('tail_obj')
tail_obj.combine(tail_obj_list)
tail_obj.move(-tail_obj.get_u_geo().get_center())
t_norm = tail_obj.get_u_geo().get_geo_norm()
helix_comp = sf.ForceFreeComposite(center=np.zeros(3), norm=t_norm, name='helix_0')
helix_comp.add_obj(obj=tail_obj, rel_U=np.zeros(6))
helix_comp.node_rotation(helix_comp.get_norm(), norm_psi)
problem_ff = sf.ShearFlowForceFreeProblem(**problem_kwargs)
problem_ff.add_obj(helix_comp)
problem_ff.print_info()
problem_ff.create_matrix()
planeShearRate = problem_ff.get_planeShearRate()
# 2). loop over parameter space
# using the new orientation definition method, {norm_theta, norm_phi = 0, 0} is not a singularity now.
for i0, norm_theta in enumerate(np.linspace(0, np.pi, n_norm_theta)):
helix_comp.node_rotation(np.array((0, 1, 0)), norm_theta)
for i1, norm_phi in enumerate(np.linspace(0, np.pi, n_norm_phi)):
t2 = time()
idx = i0 * n_norm_phi + i1 + 1
helix_comp.node_rotation(np.array((0, 0, 1)), norm_phi)
do_solve_once_noIter(problem_ff, helix_comp, fileHandle, norm_theta, norm_phi,
norm_psi, planeShearRate, rank, idx, N)
helix_comp.node_rotation(np.array((0, 0, 1)), -norm_phi) # rotate back
t3 = time()
PETSc.Sys.Print(' Current process uses: %07.3fs' % (t3 - t2))
helix_comp.node_rotation(np.array((0, 1, 0)), -norm_theta) # rotate back
else:
pass
return True
def test_location(**main_kwargs):
comm = PETSc.COMM_WORLD.tompi4py()
rank = comm.Get_rank()
problem_kwargs = get_problem_kwargs(**main_kwargs)
print_case_info(**problem_kwargs)
fileHandle = problem_kwargs['fileHandle']
norm_psi = problem_kwargs['norm_psi']
norm_theta = problem_kwargs['norm_theta']
norm_phi = problem_kwargs['norm_phi']
PETSc.Sys.Print('-->norm_theta=%f, norm_phi=%f' % (norm_theta, norm_phi))
iter_tor = 1e-3
if not problem_kwargs['restart']:
# create helix
_, tail_obj_list = createEcoli_ellipse(name='ecoli0', **problem_kwargs)
tail_obj = sf.StokesFlowObj()
tail_obj.set_name('tail_obj')
tail_obj.combine(tail_obj_list)
tail_obj.move(-tail_obj.get_u_geo().get_center())
t_norm = tail_obj.get_u_geo().get_geo_norm()
helix_comp = sf.ForceFreeComposite(center=np.zeros(3), norm=t_norm, name='helix_0')
helix_comp.add_obj(obj=tail_obj, rel_U=np.zeros(6))
helix_comp.node_rotation(helix_comp.get_norm(), norm_psi)
problem_ff = sf.ShearFlowForceFreeProblem(**problem_kwargs)
problem_ff.add_obj(helix_comp)
problem_ff.print_info()
problem_ff.create_matrix()
problem = sf.ShearFlowForceFreeIterateProblem(**problem_kwargs)
problem.add_obj(helix_comp)
problem.set_iterate_comp(helix_comp)
planeShearRate = problem_ff.get_planeShearRate()
helix_comp.node_rotation(np.array((0, 1, 0)), norm_theta)
helix_comp.node_rotation(np.array((0, 0, 1)), norm_phi)
do_solve_once(problem_ff, problem, helix_comp, fileHandle, norm_theta, norm_phi, norm_psi,
planeShearRate, rank, 0, 0, iter_tor)
ref_U = helix_comp.get_ref_U()
PETSc.Sys.Print(
'-->norm_theta=%f, norm_phi=%f, norm_psi=%f' % (norm_theta, norm_phi, norm_psi))
PETSc.Sys.Print('--> ref_U=%s' %
np.array2string(ref_U, separator=', ',
formatter={'float': lambda x: "%f" % x}))
else:
pass
return True
def test_location_noIter(**main_kwargs):
comm = PETSc.COMM_WORLD.tompi4py()
rank = comm.Get_rank()
problem_kwargs = get_problem_kwargs(**main_kwargs)
print_case_info(**problem_kwargs)
fileHandle = problem_kwargs['fileHandle']
norm_psi = problem_kwargs['norm_psi']
norm_theta = problem_kwargs['norm_theta']
norm_phi = problem_kwargs['norm_phi']
PETSc.Sys.Print('-->norm_theta=%f, norm_phi=%f' % (norm_theta, norm_phi))
iter_tor = 1e-3
if not problem_kwargs['restart']:
# create helix
_, tail_obj_list = createEcoli_ellipse(name='ecoli0', **problem_kwargs)
tail_obj = sf.StokesFlowObj()
tail_obj.set_name('tail_obj')
tail_obj.combine(tail_obj_list)
tail_obj.move(-tail_obj.get_u_geo().get_center())
t_norm = tail_obj.get_u_geo().get_geo_norm()
helix_comp = sf.ForceFreeComposite(center=np.zeros(3), norm=t_norm, name='helix_0')
helix_comp.add_obj(obj=tail_obj, rel_U=np.zeros(6))
helix_comp.node_rotation(helix_comp.get_norm(), norm_psi)
problem_ff = sf.ShearFlowForceFreeProblem(**problem_kwargs)
problem_ff.add_obj(helix_comp)
problem_ff.print_info()
problem_ff.create_matrix()
planeShearRate = problem_ff.get_planeShearRate()
helix_comp.node_rotation(np.array((0, 1, 0)), norm_theta)
helix_comp.node_rotation(np.array((0, 0, 1)), norm_phi)
do_solve_once_noIter(problem_ff, helix_comp, fileHandle, norm_theta, norm_phi, norm_psi,
planeShearRate, rank, 0, 0, iter_tor)
ref_U = helix_comp.get_ref_U()
PETSc.Sys.Print(
'-->norm_theta=%f, norm_phi=%f, norm_psi=%f' % (norm_theta, norm_phi, norm_psi))
PETSc.Sys.Print('--> ref_U=%s' %
np.array2string(ref_U, separator=', ',
formatter={'float': lambda x: "%f" % x}))
else:
pass
return True
if __name__ == '__main__':
OptDB = PETSc.Options()
if OptDB.getBool('main_fun_noIter', False):
OptDB.setValue('main_fun', False)
main_fun_noIter()
if OptDB.getBool('test_location', False):
OptDB.setValue('main_fun', False)
norm_theta = OptDB.getReal('norm_theta', 0)
norm_phi = OptDB.getReal('norm_phi', 0)
test_location(norm_theta=norm_theta, norm_phi=norm_phi)
if OptDB.getBool('test_location_noIter', False):
OptDB.setValue('main_fun', False)
norm_theta = OptDB.getReal('norm_theta', 0)
norm_phi = OptDB.getReal('norm_phi', 0)
test_location_noIter(norm_theta=norm_theta, norm_phi=norm_phi)
if OptDB.getBool('main_fun', True):
main_fun()
| {
"content_hash": "1df06676009b050adcb7ae93d006812b",
"timestamp": "",
"source": "github",
"line_count": 348,
"max_line_length": 110,
"avg_line_length": 44.695402298850574,
"alnum_prop": 0.5878230680210879,
"repo_name": "pcmagic/stokes_flow",
"id": "78457a840bfff267dc8acd477b18a88d0218e249",
"size": "15570",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "head_Force/loop_table_FatHelix.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "32833"
},
{
"name": "C++",
"bytes": "221"
},
{
"name": "CSS",
"bytes": "1645"
},
{
"name": "Fortran",
"bytes": "12772"
},
{
"name": "Gnuplot",
"bytes": "2957"
},
{
"name": "HTML",
"bytes": "22464"
},
{
"name": "JavaScript",
"bytes": "9553"
},
{
"name": "Jupyter Notebook",
"bytes": "326253745"
},
{
"name": "MATLAB",
"bytes": "82969"
},
{
"name": "Makefile",
"bytes": "6488"
},
{
"name": "Mathematica",
"bytes": "765914"
},
{
"name": "Objective-C",
"bytes": "793"
},
{
"name": "Python",
"bytes": "1404660"
}
],
"symlink_target": ""
} |
"""Module for handling consistent state storage.
Attributes:
state_file(str): file path where state storage is kept. By default XDG conventions are used. (Most likely ~/.config/python-cozify/python-cozify.cfg)
state(configparser.ConfigParser): State object used for in-memory state. By default initialized with _initState.
"""
import configparser
import os
import datetime
from absl import logging
def _initXDG():
"""Initialize config path per XDG basedir-spec and resolve the final location of state file storage.
Returns:
str: file path to state file as per XDG spec and current env.
"""
# per the XDG basedir-spec we adhere to $XDG_CONFIG_HOME if it's set, otherwise assume $HOME/.config
xdg_config_home = ''
if 'XDG_CONFIG_HOME' in os.environ:
xdg_config_home = os.environ['XDG_CONFIG_HOME']
logging.debug('XDG basedir overriden: {0}'.format(xdg_config_home))
else:
xdg_config_home = "%s/.config" % os.path.expanduser('~')
# XDG base-dir: "If, when attempting to write a file, the destination directory is non-existant an attempt should be made to create it with permission 0700. If the destination directory exists already the permissions should not be changed."
if not os.path.isdir(xdg_config_home):
logging.debug('XDG basedir does not exist, creating: {0}'.format(xdg_config_home))
os.mkdir(xdg_config_home, 0o0700)
# finally create our own config dir
config_dir = "%s/%s" % (xdg_config_home, 'python-cozify')
if not os.path.isdir(config_dir):
logging.debug('XDG local dir does not exist, creating: {0}'.format(config_dir))
os.mkdir(config_dir, 0o0700)
state_file = "%s/python-cozify.cfg" % config_dir
logging.debug('state_file determined to be: {0}'.format(state_file))
return state_file
def stateWrite(tmpstate=None):
"""Write current state to file storage.
Args:
tmpstate(configparser.ConfigParser): State object to store instead of default state.
"""
global state_file
if tmpstate is None:
global state
tmpstate = state
with open(state_file, 'w') as cf:
tmpstate.write(cf)
def setStatePath(filepath=_initXDG(), copy_current=False):
"""Set state storage path. Useful for example for testing without affecting your normal state. Call with no arguments to reset back to autoconfigured location.
Args:
filepath(str): file path to use as new storage location. Defaults to XDG defined path.
copy_current(bool): Instead of initializing target file, dump previous state into it.
"""
global state_file
global state
state_file = filepath
if copy_current:
stateWrite()
else:
state = _initState(state_file)
def dump_state():
"""Print out current state file to stdout. Long values are truncated since this is only for visualization.
"""
for section in state.sections():
print('[{!s:.10}]'.format(section))
for option in state.options(section):
print(' {!s:<13.13} = {!s:>10.100}'.format(option, state[section][option]))
def _initState(state_file):
"""Initialize state on cold start. Any stored state is read in or a new basic state is initialized.
Args:
state_file(str): State storage filepath to attempt to read from.
Returns:
configparser.ConfigParser: State object.
"""
# if we can read it, read it in, otherwise create empty file
state = configparser.ConfigParser(allow_no_value=True)
try:
cf = open(state_file, 'r')
except IOError:
cf = open(state_file, 'w+')
os.chmod(state_file, 0o600) # set to user readwrite only to protect tokens
else:
state.read_file(cf)
# make sure config is in roughly a valid state
for key in ['Cloud', 'Hubs']:
if key not in state:
state[key] = {}
stateWrite(state)
return state
state_file = _initXDG()
state = _initState(state_file)
| {
"content_hash": "f8f749a11a63c11f6a10a88d8fb96d66",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 244,
"avg_line_length": 36.054054054054056,
"alnum_prop": 0.6714142928535732,
"repo_name": "Artanicus/python-cozify",
"id": "198cf619e101b7c6e68a589ca297c780c16ef295",
"size": "4002",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "cozify/config.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "106920"
},
{
"name": "Shell",
"bytes": "50"
}
],
"symlink_target": ""
} |
"""Tests for the generate-timezone-mask-ancillary CLI."""
import pytest
from . import acceptance as acc
pytest.importorskip("timezonefinder")
pytest.importorskip("numba")
pytestmark = [pytest.mark.acc, acc.skip_if_kgo_missing]
CLI = acc.cli_name_with_dashes(__file__)
run_cli = acc.run_cli(CLI)
GRIDS = ["uk", "global"]
TIMES = ["20210615T1200Z", "20211215T1200Z"]
@pytest.mark.parametrize("time", TIMES)
@pytest.mark.parametrize("grid", GRIDS)
def test_ignoring_dst(tmp_path, time, grid):
"""Test masks generated ignoring daylight savings time. The time of year
should have no impact on the result, which is demonstrated here by use of a
common kgo for both summer and winter. The kgo is checked excluding the
validity time as this is necessarily different between summer and winter,
whilst everything else remains unchanged."""
kgo_dir = acc.kgo_root() / f"generate-timezone-mask-ancillary/{grid}/"
kgo_path = kgo_dir / "ignore_dst_kgo.nc"
input_path = kgo_dir / "input.nc"
output_path = tmp_path / "output.nc"
args = [input_path, "--time", f"{time}", "--output", output_path]
run_cli(args)
acc.compare(output_path, kgo_path, exclude_vars=["time"])
@pytest.mark.parametrize("time", TIMES)
@pytest.mark.parametrize("grid", GRIDS)
def test_with_dst(tmp_path, time, grid):
"""Test masks generated including daylight savings time. In this case the
time of year chosen will give different results."""
kgo_dir = acc.kgo_root() / f"generate-timezone-mask-ancillary/{grid}/"
kgo_path = kgo_dir / f"{time}_with_dst_kgo.nc"
input_path = kgo_dir / "input.nc"
output_path = tmp_path / "output.nc"
args = [
input_path,
"--time",
f"{time}",
"--include-dst",
"--output",
output_path,
]
run_cli(args)
acc.compare(output_path, kgo_path)
@pytest.mark.parametrize("grid", GRIDS)
def test_grouping(tmp_path, grid):
"""Test masks generated with grouping produce the expected output."""
kgo_dir = acc.kgo_root() / f"generate-timezone-mask-ancillary/{grid}/"
kgo_path = kgo_dir / "grouped_kgo.nc"
input_path = kgo_dir / "input.nc"
groups = kgo_dir / "group_config.json"
output_path = tmp_path / "output.nc"
args = [
input_path,
"--time",
"20210615T1200Z",
"--groupings",
groups,
"--output",
output_path,
]
run_cli(args)
acc.compare(output_path, kgo_path)
| {
"content_hash": "82dce2b1d2dcc9f2dcd691e902fa1c42",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 79,
"avg_line_length": 32.20779220779221,
"alnum_prop": 0.6479838709677419,
"repo_name": "fionaRust/improver",
"id": "085015d8497da10482f72d4abb99c391924a0dca",
"size": "4137",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "improver_tests/acceptance/test_generate_timezone_mask_ancillary.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "5026255"
},
{
"name": "Shell",
"bytes": "9493"
}
],
"symlink_target": ""
} |
import numpy as np
import pytest
import pandas.util.testing as tm
from pandas import date_range
from pandas._libs.tslib import iNaT
from pandas._libs.tslibs import conversion, timezones
def compare_utc_to_local(tz_didx, utc_didx):
f = lambda x: conversion.tz_convert_single(x, 'UTC', tz_didx.tz)
result = conversion.tz_convert(tz_didx.asi8, 'UTC', tz_didx.tz)
result_single = np.vectorize(f)(tz_didx.asi8)
tm.assert_numpy_array_equal(result, result_single)
def compare_local_to_utc(tz_didx, utc_didx):
f = lambda x: conversion.tz_convert_single(x, tz_didx.tz, 'UTC')
result = conversion.tz_convert(utc_didx.asi8, tz_didx.tz, 'UTC')
result_single = np.vectorize(f)(utc_didx.asi8)
tm.assert_numpy_array_equal(result, result_single)
class TestTZConvert(object):
@pytest.mark.parametrize('tz', ['UTC', 'Asia/Tokyo',
'US/Eastern', 'Europe/Moscow'])
def test_tz_convert_single_matches_tz_convert_hourly(self, tz):
# US: 2014-03-09 - 2014-11-11
# MOSCOW: 2014-10-26 / 2014-12-31
tz_didx = date_range('2014-03-01', '2015-01-10', freq='H', tz=tz)
utc_didx = date_range('2014-03-01', '2015-01-10', freq='H')
compare_utc_to_local(tz_didx, utc_didx)
# local tz to UTC can be differ in hourly (or higher) freqs because
# of DST
compare_local_to_utc(tz_didx, utc_didx)
@pytest.mark.parametrize('tz', ['UTC', 'Asia/Tokyo',
'US/Eastern', 'Europe/Moscow'])
@pytest.mark.parametrize('freq', ['D', 'A'])
def test_tz_convert_single_matches_tz_convert(self, tz, freq):
tz_didx = date_range('2000-01-01', '2020-01-01', freq=freq, tz=tz)
utc_didx = date_range('2000-01-01', '2020-01-01', freq=freq)
compare_utc_to_local(tz_didx, utc_didx)
compare_local_to_utc(tz_didx, utc_didx)
@pytest.mark.parametrize('arr', [
pytest.param(np.array([], dtype=np.int64), id='empty'),
pytest.param(np.array([iNaT], dtype=np.int64), id='all_nat')])
def test_tz_convert_corner(self, arr):
result = conversion.tz_convert(arr,
timezones.maybe_get_tz('US/Eastern'),
timezones.maybe_get_tz('Asia/Tokyo'))
tm.assert_numpy_array_equal(result, arr)
| {
"content_hash": "3122dcd5ea02a0e695c1f62c162db864",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 76,
"avg_line_length": 42.763636363636365,
"alnum_prop": 0.6113945578231292,
"repo_name": "harisbal/pandas",
"id": "76038136c26cb6d3899b437ceac9c98e17a9f924",
"size": "2377",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "pandas/tests/tslibs/test_conversion.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "4907"
},
{
"name": "C",
"bytes": "404689"
},
{
"name": "C++",
"bytes": "17194"
},
{
"name": "HTML",
"bytes": "551714"
},
{
"name": "Makefile",
"bytes": "574"
},
{
"name": "Python",
"bytes": "14298777"
},
{
"name": "Shell",
"bytes": "28914"
},
{
"name": "Smarty",
"bytes": "2069"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
import random
import nose
from nose.tools import raises
from xvalidator.validators import Token, Name, NCName, Language, \
NMTOKEN, IntegerValidator, NonNegativeInteger, PositiveInteger, \
NegativeInteger, FloatValidator, NonNegativeFloat, BooleanValidator, \
EnumValidator, ValidationException
__author__ = 'bernd'
def test_validation_exception__str__pass():
iv = ValidationException('message', 42)
nose.tools.eq_(str(iv), 'message, value:42')
def test_token_build_value_pass():
value = ' Test string with collapsed whitespace.'
actual = Token().build(value)
nose.tools.eq_(actual, value)
def test_token_str_pass():
actual = str(Token())
nose.tools.eq_(actual, 'Token')
def test_token_build_pass():
actual = Token().build()
nose.tools.eq_(actual, 'Token')
def test_token_pass():
tk = Token()
value = ' Test string with collapsed whitespace.'
actual = tk.to_python(value)
nose.tools.eq_(actual, value)
@raises(ValidationException)
def test_token_whitespace_fail():
tk = Token()
value = 'Test string with uncollapsed whitespace.'
tk.to_python(value)
@raises(ValidationException)
def test_token_value_type_fail():
tk = Token()
value = 21
tk.to_python(value)
@raises(ValidationException)
def test_token_min_length_fail():
tk = Token(minLength=8)
value = 'short'
tk.to_python(value)
@raises(ValidationException)
def test_token_max_length_fail():
tk = Token(maxLength=8)
value = ' value is too long'
tk.to_python(value)
def test_name_pass():
validator_inst = Name()
value = 'ns:Test-Name_0'
actual = validator_inst.to_python(value)
nose.tools.eq_(actual, value)
def test_build_name_pass():
actual = Name().build()
nose.tools.eq_(actual, 'prefix:Name')
@raises(ValidationException)
def test_name_two_colons_fail():
validator_inst = Name()
value = 'ns:Test-Name:0'
validator_inst.to_python(value)
@raises(ValidationException)
def test_name_whitespace_fail():
validator_inst = Name()
value = 'ns:Test-Name 0'
validator_inst.to_python(value)
def test_ncname_pass():
validator_inst = NCName()
value = 'Test-NCName_0'
actual = validator_inst.to_python(value)
nose.tools.eq_(actual, value)
def test_build_ncname_pass():
actual = NCName().build()
nose.tools.eq_(actual, 'NCName')
@raises(ValidationException)
def test_ncname_whitespace_fail():
validator_inst = NCName()
value = 'ns:Test-NCName 0'
validator_inst.to_python(value)
def test_language_pass():
validator_inst = Language()
value = 'En-US'
actual = validator_inst.to_python(value)
nose.tools.eq_(actual, value)
def test_build_language_value_pass():
value = 'En-US'
actual = Language().build(value)
nose.tools.eq_(actual, value)
def test_build_language_pass():
actual = Language().build()
nose.tools.eq_(actual, 'en-us')
@raises(ValidationException)
def test_language_whitespace_fail():
validator_inst = Language()
value = 'En-US lang'
validator_inst.to_python(value)
@raises(ValidationException)
def test_language_part_too_long_fail():
validator_inst = Language()
value = 'En-US-partIsTooLong'
validator_inst.to_python(value)
def test_nmtoken_pass():
validator_inst = NMTOKEN()
value = '.:-_Test-NMTOKEN_0'
actual = validator_inst.to_python(value)
nose.tools.eq_(actual, value)
def test_nmtoken_build_pass():
actual = NMTOKEN().build()
nose.tools.eq_(actual, 'NMTOKEN')
def test_nmtoken_build_value_pass():
value = '.:-_Test-NMTOKEN_0'
actual = NMTOKEN().build(value)
nose.tools.eq_(actual, value)
@raises(ValidationException)
def test_nmtoken_whitespace_fail():
value = 'Test-NMTOKEN 0'
NMTOKEN().to_python(value)
def test_integer_pass():
validator_inst = IntegerValidator()
value = '00033'
actual = validator_inst.to_python(value)
nose.tools.eq_(actual, 33)
def test_build_integer_33_pass():
value = '00033'
actual = IntegerValidator().build(value)
nose.tools.eq_(actual, 33)
def test_build_integer_pass():
actual = IntegerValidator().build()
nose.tools.eq_(actual, 0)
@raises(ValidationException)
def test_integer_range_fail():
value = '00abc'
IntegerValidator().to_python(value)
def test_non_negative_integer_pass():
validator_inst = NonNegativeInteger()
value = '00033'
actual = validator_inst.to_python(value)
nose.tools.eq_(actual, 33)
def test_non_negative_integer_build_pass():
actual = NonNegativeInteger().build()
nose.tools.eq_(actual, 0)
def test_non_negative_integer_build_value_pass():
value = '00033'
actual = NonNegativeInteger().build(value)
nose.tools.eq_(actual, 33)
@raises(ValidationException)
def test_non_negative_integer_range_fail():
value = '-33'
NonNegativeInteger().to_python(value)
def test_positive_integer_pass():
validator_inst = PositiveInteger()
value = '00033'
actual = validator_inst.to_python(value)
nose.tools.eq_(actual, 33)
def test_positive_integer_build_pass():
actual = PositiveInteger().build()
nose.tools.eq_(actual, 1)
def test_positive_integer_build_value_pass():
value = '00033'
actual = PositiveInteger().build(value)
nose.tools.eq_(actual, 33)
@raises(ValidationException)
def test_positive_integer_range_fail():
value = '0'
PositiveInteger().to_python(value)
def test_negative_integer_pass():
validator_inst = NegativeInteger()
value = '-00033'
actual = validator_inst.to_python(value)
nose.tools.eq_(actual, -33)
def test_negative_integer_build_pass():
actual = NegativeInteger().build()
nose.tools.eq_(actual, -1)
def test_negative_integer_build_value_pass():
value = '-00033'
actual = NegativeInteger().build(value)
nose.tools.eq_(actual, -33)
@raises(ValidationException)
def test_negative_integer_range_fail():
value = '0'
NegativeInteger().to_python(value)
def test_float_pass():
validator_inst = FloatValidator()
value = '00033'
actual = validator_inst.to_python(value)
nose.tools.eq_(actual, 33)
def test_float_build_pass():
actual = FloatValidator().build(0)
nose.tools.eq_(actual, 0.0)
def test_float_build_value_pass():
value = '00033'
actual = FloatValidator().build(value)
nose.tools.eq_(actual, 33)
@raises(ValidationException)
def test_float_range_fail():
value = '00abc'
FloatValidator().to_python(value)
def test_non_negative_float_pass():
validator_inst = NonNegativeFloat()
value = '00033'
actual = validator_inst.to_python(value)
nose.tools.eq_(actual, 33)
def test_non_negative_float_build_pass():
actual = NonNegativeFloat().build()
nose.tools.eq_(actual, 0.0)
def test_non_negative_float_build_value_pass():
value = '00033'
actual = NonNegativeFloat().build(value)
nose.tools.eq_(actual, 33)
@raises(ValidationException)
def test_non_negative_float_range_fail():
validator_inst = NonNegativeFloat()
value = '-33'
validator_inst.to_python(value)
def test_boolean_validator_true_pass():
validator_inst = BooleanValidator()
actual = validator_inst.to_python('true')
nose.tools.eq_(actual, True)
def test_boolean_validator_false_pass():
validator_inst = BooleanValidator()
actual = validator_inst.to_python('false')
nose.tools.eq_(actual, False)
def test_build_boolean_validator_pass():
random.seed(1)
actual = BooleanValidator().build()
nose.tools.eq_(actual, True)
def test_build_boolean_validator_value_pass():
actual = BooleanValidator().build('true')
nose.tools.eq_(actual, True)
def test_build_boolean_validator_false_pass():
actual = BooleanValidator().build('false')
nose.tools.eq_(actual, False)
def test_boolean_validator_bool_true_pass():
validator_inst = BooleanValidator()
actual = validator_inst.to_python(True)
nose.tools.eq_(actual, True)
def test_boolean_validator_bool_false_pass():
validator_inst = BooleanValidator()
actual = validator_inst.to_python(False)
nose.tools.eq_(actual, False)
@raises(ValidationException)
def test_boolean_validator_wrong_type_fail():
value = '-33'
BooleanValidator().to_python(value)
@raises(ValidationException)
def test_boolean_validator_wrong_value_fail():
value = 'not boolean'
BooleanValidator().to_python(value)
class DriverType(EnumValidator):
options = [
"clock",
"singleShot",
"any",
]
def test_driver_type_pass():
validator_inst = DriverType()
actual = validator_inst.to_python('singleShot')
nose.tools.eq_(actual, 'singleShot')
def test_build_driver_type_pass():
random.seed(41)
actual = DriverType().build()
nose.tools.eq_(actual, 'singleShot')
def test_build_driver_type_value_pass():
actual = DriverType().build('any')
nose.tools.eq_(actual, 'any')
def test_driver_type_case_warning_pass():
validator_inst = DriverType()
actual = validator_inst.to_python('CLOCK')
nose.tools.eq_(actual, 'clock')
@raises(ValidationException)
def test_driver_type_value_fail():
DriverType().to_python('myCLOCK')
@raises(ValidationException)
def test_driver_type_value_type_fail():
DriverType().to_python(22)
@raises(ValidationException)
def test_driver_type_value_unicode_error_fail():
DriverType().to_python('\x81myCLOCK')
def test_enum_init_pass():
validator_inst = EnumValidator(options=[
"clock",
"singleShot",
"any",
])
actual = validator_inst.to_python('singleShot')
nose.tools.eq_(actual, 'singleShot')
| {
"content_hash": "4495e22cedc0249df287edea5c05c79f",
"timestamp": "",
"source": "github",
"line_count": 423,
"max_line_length": 74,
"avg_line_length": 23.03309692671395,
"alnum_prop": 0.6769988709842965,
"repo_name": "berndca/xvalidator",
"id": "286be97056a584e295fd443da55c568509a39bfa",
"size": "9743",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_validators.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "127029"
},
{
"name": "Shell",
"bytes": "6467"
}
],
"symlink_target": ""
} |
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
import django
if django.VERSION < (1, 6):
extra_settings = {
'TEST_RUNNER': 'discover_runner.DiscoverRunner',
}
else:
extra_settings = {}
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS=(
'tests',
),
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
**extra_settings
)
def runtests():
argv = sys.argv[:1] + ['test', 'tests']
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
| {
"content_hash": "6b45ea7a5b4aaa4d33b0363c9aa5a9fc",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 60,
"avg_line_length": 19.526315789473685,
"alnum_prop": 0.5592991913746631,
"repo_name": "MarkusH/django-nap",
"id": "fcc01a72768c76c70142000d8f02002bfbfbfb1c",
"size": "854",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "runtests.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "91846"
}
],
"symlink_target": ""
} |
import os
from .. import run_python_module
from .base import BaseTestApp
class TestNbGraderFetch(BaseTestApp):
def _release(self, assignment, exchange):
self._copy_file("files/test.ipynb", "release/ps1/p1.ipynb")
run_python_module([
"nbgrader", "release", assignment,
"--course", "abc101",
"--TransferApp.exchange_directory={}".format(exchange)
])
def _fetch(self, assignment, exchange, flags=None, retcode=0):
cmd = [
"nbgrader", "fetch", assignment,
"--course", "abc101",
"--TransferApp.exchange_directory={}".format(exchange)
]
if flags is not None:
cmd.extend(flags)
run_python_module(cmd, retcode=retcode)
def test_help(self):
"""Does the help display without error?"""
run_python_module(["nbgrader", "fetch", "--help-all"])
def test_no_course_id(self, exchange):
"""Does releasing without a course id thrown an error?"""
self._release("ps1", exchange)
cmd = [
"nbgrader", "fetch", "ps1",
"--TransferApp.exchange_directory={}".format(exchange)
]
run_python_module(cmd, retcode=1)
def test_fetch(self, exchange):
self._release("ps1", exchange)
self._fetch("ps1", exchange)
assert os.path.isfile("ps1/p1.ipynb")
# make sure it fails if the assignment already exists
self._fetch("ps1", exchange, retcode=1)
# make sure it fails even if the assignment is incomplete
os.remove("ps1/p1.ipynb")
self._fetch("ps1", exchange, retcode=1)
| {
"content_hash": "3549155660d132407e21c0e3b5de2383",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 67,
"avg_line_length": 31.71153846153846,
"alnum_prop": 0.5858095815645846,
"repo_name": "dementrock/nbgrader",
"id": "e272bf018d3cfadb3483edb9f4a2e35182c2aeb0",
"size": "1649",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nbgrader/tests/apps/test_nbgrader_fetch.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "5780"
},
{
"name": "JavaScript",
"bytes": "192320"
},
{
"name": "Python",
"bytes": "571756"
},
{
"name": "Smarty",
"bytes": "25754"
}
],
"symlink_target": ""
} |
"""Flux database.
"""
import os
import psycopg2
import psycopg2.extras
from astropy import log
try:
from StringIO import StringIO # Python 2
except ImportError:
from io import StringIO
from . import config
class FluxDB(object):
def __init__(self,
dbinfo=config.DBINFO,
prefix='',
autocommit=True):
"""Constructor
Parameters
----------
dbname : string
Connection settings passed on to the psycopg2 module.
autocommit : boolean
If true, changes will be commited on each operation.
"""
self.conn = psycopg2.connect(dbinfo)
self.cur = self.conn.cursor(cursor_factory=psycopg2.extras.DictCursor)
self.prefix = prefix
self.autocommit = autocommit
self.fluxtable = self.prefix+'flux'
def __del__(self):
"""Destructor"""
self.close()
def close(self):
"""Close the database connection."""
self.cur.close()
self.conn.close()
################
# DATA QUERYING
################
def query(self, sql, arguments=()):
try:
self.cur.execute(sql, arguments)
return self.cur.fetchall()
except psycopg2.ProgrammingError as e:
log.error('Query failed [{0}] with error message[{1}]'.format(
self.cur.query, e))
self.rollback()
return None
#def query_json(self, sql, arguments=(,)):
# result = self.query(sql, arguments)
# for key in result[0].keys():
# "{0}: {}"
################
# DATA INGESTION
################
def commit(self):
"""Commits changes."""
self.conn.commit()
def rollback(self):
"""Undo changes or reset error."""
self.conn.rollback()
def ingest_json(self, json):
"""
Parameters
----------
json : a list of dictionaries
"""
lines = [self._json2csv(row) for row in json]
csvfile = StringIO("\n".join(lines))
self.ingest_csv(csvfile)
def ingest_csv(self, csvfile):
"""
Parameters
----------
csvfile : a file-like object which supports read() and readline()
"""
self.cur.copy_expert('COPY flux FROM STDIN WITH CSV', csvfile)
if self.autocommit:
self.commit()
def _json2csv(self, json):
"""Converts a Python dictionary to a CSV line for database ingestion
Parameters
----------
json : Python dictionary object
"""
# The magnitude array requires special formatting: "{1,2,3}"
magnitudes = [str(m) for m in json['mag']]
json['mag'] = '"{' + (','.join(magnitudes)) + '}"'
if json['dist'] is None:
json['dist'] = ''
csv = "{dataset_id},{format},{station},{shower},{time},{sollong}," + \
"{teff},{lmstar},{alt},{dist},{vel},{mlalt},{lmmet},{eca},{met}," + \
"{mag},{added}"
return csv.format(**json)
def remove_dataset(self, dataset_id):
"""Removes a single dataset from the database.
Parameters
----------
dataset_id : string
Unique identifier of the dataset, e.g. "20120723_ORION1".
"""
self.cur.execute("DELETE FROM flux WHERE dataset_id = %s",
(dataset_id,))
log.debug(self.cur.query)
if self.autocommit:
self.commit()
#############################################
# DATABASE SETUP (TABLES, INDEXES, FUNCTIONS)
#############################################
def setup(self):
"""Setup the database tables and indexes."""
self.create_tables()
self.create_indexes()
self.create_functions()
def drop(self):
"""Drops the tables."""
log.info('DROP TABLE {0}'.format(self.fluxtable))
self.cur.execute("""DROP TABLE {0}""".format(self.fluxtable))
if self.autocommit:
self.commit()
def create_tables(self):
"""Setup the database. Should not commonly be used.
"""
log.info('CREATE TABLE {0}'.format(self.fluxtable))
self.cur.execute("DROP TABLE IF EXISTS {0};".format(self.fluxtable))
self.cur.execute("""CREATE TABLE {0} (
dataset_id text,
format text,
station text,
shower text,
time timestamp,
sollong real,
teff real,
lmstar real,
alt real,
dist real,
vel real,
mlalt real,
lmmet real,
eca real,
met int,
mag real[],
added timestamp
);""".format(self.fluxtable))
if self.autocommit:
self.commit()
def create_indexes(self):
"""Creates the indexes needed.
"""
log.info('Creating indexes on {0}'.format(self.fluxtable))
log.info('Creating index on dataset_id')
self.cur.execute("""CREATE INDEX {0}_dataset_idx ON {0}
USING btree (dataset_id);""".format(
self.fluxtable))
log.info('Creating index on (time,shower)')
self.cur.execute("""CREATE INDEX {0}_time_shower_idx ON {0}
USING btree (time, shower);""".format(
self.fluxtable))
log.info('Creating index on (sollong, shower)')
self.cur.execute("""CREATE INDEX {0}_sollong_shower_idx ON {0}
USING btree (sollong, shower);""".format(
self.fluxtable))
if self.autocommit:
self.commit()
def create_functions(self):
"""Create the stored procedures.
"""
PATH = os.path.dirname(os.path.realpath(__file__))
filename = os.path.join(PATH, 'lib', 'functions.sql')
with open(filename, 'r') as myfile:
sql = "".join(myfile.readlines())
self.cur.execute(sql)
if self.autocommit:
self.commit()
| {
"content_hash": "c669ef65d5a2fdc7cedeed2749eba040",
"timestamp": "",
"source": "github",
"line_count": 205,
"max_line_length": 83,
"avg_line_length": 32.36585365853659,
"alnum_prop": 0.47445365486058777,
"repo_name": "barentsen/meteor-flux",
"id": "4d370d17379a38815fdf008b4a71a59186f6c230",
"size": "6635",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "meteorflux/db.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1374"
},
{
"name": "HTML",
"bytes": "12282"
},
{
"name": "JavaScript",
"bytes": "10366"
},
{
"name": "PLpgSQL",
"bytes": "14724"
},
{
"name": "Python",
"bytes": "50158"
},
{
"name": "Shell",
"bytes": "829"
}
],
"symlink_target": ""
} |
from typing import Dict, Optional
from appium.options.common.supports_capabilities import SupportsCapabilities
AVD_ENV = 'avdEnv'
class AvdEnvOption(SupportsCapabilities):
@property
def avd_env(self) -> Optional[Dict[str, str]]:
"""
Mapping of emulator environment variables.
"""
return self.get_capability(AVD_ENV)
@avd_env.setter
def avd_env(self, value: Dict[str, str]) -> None:
"""
Set the mapping of emulator environment variables.
"""
self.set_capability(AVD_ENV, value)
| {
"content_hash": "d5a07fe52d4efe218b382efbc1ea303d",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 76,
"avg_line_length": 26.761904761904763,
"alnum_prop": 0.6512455516014235,
"repo_name": "appium/python-client",
"id": "a673a95fd9af5d8f379ff0aa486450f6cc02ded1",
"size": "1350",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "appium/options/android/common/avd/avd_env_option.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "835"
},
{
"name": "Python",
"bytes": "801497"
},
{
"name": "Shell",
"bytes": "3195"
}
],
"symlink_target": ""
} |
''' Миграция для дат в транзакции '''
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
''' Автоматическая подстановка даты в транзакции '''
dependencies = [
('lawe', '0002_auto_20161227_2005'),
]
operations = [
migrations.AlterField(
model_name='transaction',
name='date',
field=models.DateTimeField(auto_now=True, verbose_name='date'),
),
]
| {
"content_hash": "a15b1391fbdd10e1cb31a19575c413a3",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 66,
"avg_line_length": 23,
"alnum_prop": 0.7025171624713958,
"repo_name": "DronMDF/laweb",
"id": "5bf48e4a7e7fe61333671d040accaa3fb780bbd0",
"size": "573",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lawe/migrations/0003_auto_20170102_1715.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "662"
},
{
"name": "HTML",
"bytes": "1876"
},
{
"name": "Python",
"bytes": "37441"
},
{
"name": "Shell",
"bytes": "199"
},
{
"name": "XSLT",
"bytes": "6310"
}
],
"symlink_target": ""
} |
"""
Given a collection of candidate numbers (candidates) and a target number
(target), find all unique combinations in candidates where the candidate numbers
sums to target.
Each number in candidates may only be used once in the combination.
Note:
All numbers (including target) will be positive integers.
The solution set must not contain duplicate combinations.
Example 1:
Input: candidates = [10,1,2,7,6,1,5], target = 8,
A solution set is:
[
[1, 7],
[1, 2, 5],
[2, 6],
[1, 1, 6]
]
Example 2:
Input: candidates = [2,5,2,1,2], target = 5,
A solution set is:
[
[1,2,2],
[5]
]
"""
from typing import List
class Solution:
def combinationSum2(self, candidates: List[int], target: int) -> List[List[int]]:
ret = []
candidates.sort()
self.dfs(candidates, 0, [], 0, target, ret)
return ret
def dfs(self, candidates, i, cur, cur_sum, target, ret):
if cur_sum == target:
ret.append(list(cur))
return
if cur_sum > target or i >= len(candidates):
return
# not choose A_i
# to de-dup, need to jump
j = i + 1
while j < len(candidates) and candidates[j] == candidates[i]:
j += 1
self.dfs(candidates, j, cur, cur_sum, target, ret)
# choose A_i
cur.append(candidates[i])
cur_sum += candidates[i]
self.dfs(candidates, i + 1, cur, cur_sum, target, ret)
cur.pop()
cur_sum -= candidates[i]
if __name__ == "__main__":
assert Solution().combinationSum2([2,5,2,1,2], 5) == [[5], [1,2,2]]
| {
"content_hash": "600da75b86def9a5d0e51f6d3a06996c",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 85,
"avg_line_length": 25.075757575757574,
"alnum_prop": 0.5637462235649546,
"repo_name": "algorhythms/LeetCode",
"id": "7658b27ba54591a8544b80a501d02d4d999bbd76",
"size": "1675",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "038 Combination Sum II py3.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1444167"
}
],
"symlink_target": ""
} |
"""
Deployment for SUMO in production.
Requires commander (https://github.com/oremj/commander) which is installed on
the systems that need it.
"""
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from commander.deploy import task, hostgroups # noqa
import commander_settings as settings # noqa
# Setup virtualenv path.
venv_bin_path = os.path.join(settings.SRC_DIR, 'virtualenv', 'bin')
os.environ['PATH'] = venv_bin_path + os.pathsep + os.environ['PATH']
os.environ['DJANGO_SETTINGS_MODULE'] = 'kitsune.settings_local'
@task
def update_code(ctx, tag):
with ctx.lcd(settings.SRC_DIR):
ctx.local("git fetch")
ctx.local("git checkout -f %s" % tag)
ctx.local("find -name '*.pyc' -delete")
@task
def update_locales(ctx):
with ctx.lcd(os.path.join(settings.SRC_DIR, 'locale')):
ctx.local("svn up")
# Run the script that lints the .po files and compiles to .mo the
# the ones that don't have egregious errors in them. This prints
# stdout to the deploy log and also to media/postatus.txt so
# others can see what happened.
with ctx.lcd(settings.SRC_DIR):
ctx.local('date > media/postatus.txt')
ctx.local('./scripts/compile-linted-mo.sh | /usr/bin/tee -a media/postatus.txt')
ctx.local('python2.7 manage.py compilejsi18n')
@task
def update_assets(ctx):
with ctx.lcd(settings.SRC_DIR):
ctx.local("git clean -fxd -- static")
ctx.local("python2.7 manage.py nunjucks_precompile")
ctx.local("./node_modules/.bin/bower install --allow-root")
ctx.local("python2.7 manage.py collectstatic --noinput")
@task
def db_migrations(ctx):
with ctx.lcd(settings.SRC_DIR):
ctx.local('python2.7 manage.py migrate --noinput')
@task
def install_cron(ctx):
with ctx.lcd(settings.SRC_DIR):
ctx.local("python2.7 ./scripts/crontab/gen-crons.py -k %s -u apache > /etc/cron.d/.%s" %
(settings.WWW_DIR, settings.CRON_NAME))
ctx.local("mv /etc/cron.d/.%s /etc/cron.d/%s" % (settings.CRON_NAME, settings.CRON_NAME))
@task
def checkin_changes(ctx):
# Touching the wsgi file forces the app to reload.
with ctx.lcd(settings.SRC_DIR):
ctx.local('touch wsgi/kitsune.wsgi')
ctx.local(settings.DEPLOY_SCRIPT)
@hostgroups(settings.WEB_HOSTGROUP, remote_kwargs={'ssh_key': settings.SSH_KEY})
def deploy_app(ctx):
ctx.remote(settings.REMOTE_UPDATE_SCRIPT)
# Instead of restarting apache, we just touch the wsgi file in `checkin_changes()`
# ctx.remote('service httpd graceful')
@hostgroups(settings.CELERY_HOSTGROUP, remote_kwargs={'ssh_key': settings.SSH_KEY})
def update_celery(ctx):
ctx.remote(settings.REMOTE_UPDATE_SCRIPT)
ctx.remote('/sbin/service %s restart' % settings.CELERY_SERVICE)
@task
def update_info(ctx):
with ctx.lcd(settings.SRC_DIR):
ctx.local("date")
ctx.local("git branch")
ctx.local("git log -3")
ctx.local("git status")
ctx.local("git submodule status")
ctx.local("python2.7 manage.py migrate --list")
with ctx.lcd("locale"):
ctx.local("svn info")
ctx.local("svn status")
ctx.local("git rev-parse HEAD > media/revision.txt")
@task
def setup_dependencies(ctx):
with ctx.lcd(settings.SRC_DIR):
# Creating a virtualenv tries to open virtualenv/bin/python for
# writing, but because virtualenv is using it, it fails.
# So we delete it and let virtualenv create a new one.
ctx.local('rm -f virtualenv/bin/python virtualenv/bin/python2.7')
ctx.local('virtualenv-2.7 --no-site-packages virtualenv')
# Activate virtualenv to append to the correct path to $PATH.
activate_env = os.path.join(settings.SRC_DIR, 'virtualenv', 'bin', 'activate_this.py')
execfile(activate_env, dict(__file__=activate_env))
ctx.local('pip --version')
ctx.local('./peep.sh install -r requirements/default.txt')
# Make the virtualenv relocatable
ctx.local('virtualenv-2.7 --relocatable virtualenv')
# Fix lib64 symlink to be relative instead of absolute.
ctx.local('rm -f virtualenv/lib64')
with ctx.lcd('virtualenv'):
ctx.local('ln -s lib lib64')
# Install Node dependencies
ctx.local('npm install --production --unsafe-perm')
@task
def pre_update(ctx, ref=settings.UPDATE_REF):
update_code(ref)
setup_dependencies()
update_info()
@task
def update(ctx):
update_assets()
update_locales()
db_migrations()
@task
def deploy(ctx):
install_cron()
checkin_changes()
deploy_app()
update_celery()
@task
def update_sumo(ctx, tag):
"""Do typical sumo update"""
pre_update(tag)
update()
| {
"content_hash": "1efa723f7d911ff893cb0b809c2db09e",
"timestamp": "",
"source": "github",
"line_count": 161,
"max_line_length": 97,
"avg_line_length": 29.819875776397517,
"alnum_prop": 0.6556967298479484,
"repo_name": "MziRintu/kitsune",
"id": "07f1b6a82c6968fbd0635797a90d61a2d89b4203",
"size": "4801",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "scripts/update/deploy.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "2694"
},
{
"name": "CSS",
"bytes": "282192"
},
{
"name": "HTML",
"bytes": "625684"
},
{
"name": "JavaScript",
"bytes": "728519"
},
{
"name": "Python",
"bytes": "2717690"
},
{
"name": "Shell",
"bytes": "10281"
},
{
"name": "Smarty",
"bytes": "2062"
}
],
"symlink_target": ""
} |
"""
The :mod:`bolt.model` module contains classes which represent
parametric models supported by Bolt.
Currently, the following models are supported:
:class:`bolt.model.LinearModel`: a linear model for binary
classification and regression.
:class:`bolt.model.GeneralizedLinearModel`: a linear model for multi-class
classification.
"""
__authors__ = [
'"Peter Prettenhofer" <[email protected]>'
]
import numpy as np
from io import sparsedtype, densedtype, dense2sparse
try:
from trainer.sgd import predict
except ImportError:
def predict(x, w, b):
return np.dot(x, w) + b
class LinearModel(object):
"""A linear model of the form
:math:`z = \operatorname{sign}(\mathbf{w}^T \mathbf{x} + b)`.
"""
def __init__(self, m, biasterm=False):
"""Create a linear model with an
m-dimensional vector :math:`w = [0,..,0]` and `b = 0`.
:arg m: The dimensionality of the classification problem
(i.e. the number of features).
:type m: positive integer
:arg biasterm: Whether or not a bias term (aka offset or intercept)
is incorporated.
:type biasterm: True or False
"""
if m <= 0:
raise ValueError("Number of dimensions must be larger than 0.")
self.m = m
"""The number of features.
"""
self.w = np.zeros((m), dtype=np.float64, order = "c")
"""A vector of size `m` which parameterizes the model. """
self.bias = 0.0
"""The value of the bias term."""
self.biasterm = biasterm
"""Whether or not the biasterm is used."""
def __call__(self, x, confidence=False):
"""Predicts the target value for the given example.
:arg x: An instance in dense or sparse representation.
:arg confidence: whether to output confidence scores.
:returns: The class assignment and optionally a confidence score.
"""
if x.dtype != sparsedtype:
x = dense2sparse(x)
p = predict(x, self.w, self.bias)
if confidence:
return np.sign(p), 1.0/(1.0+np.exp(-p))
else:
return np.sign(p)
def predict(self, instances, confidence=False):
"""Evaluates :math:`y = sign(w^T \mathbf{x} + b)` for each
instance x in `instances`.
Optionally, gives confidence score to each prediction
if `confidence` is `True`.
This method yields :meth:`LinearModel.__call__` for each instance
in `instances`.
:arg instances: a sequence of instances.
:arg confidence: whether to output confidence scores.
:returns: a generator over the class assignments and
optionally a confidence score.
"""
for x in instances:
yield self.__call__(x, confidence)
class GeneralizedLinearModel(object):
"""A generalized linear model of the form
:math:`z = \operatorname*{arg\,max}_y \mathbf{w}^T \Phi(\mathbf{x},y) + b_y`.
"""
def __init__(self, m, k, biasterm=False):
"""Create a generalized linear model for
classification problems with `k` classes.
:arg m: The dimensionality of the input data (i.e., the number of features).
:arg k: The number of classes.
"""
if m <= 0:
raise ValueError("Number of dimensions must be larger than 0.")
if k <= 1:
raise ValueError("Number of classes must be larger than 2 "\
"(if 2 use `LinearModel`.)")
self.m = m
"""The number of features."""
self.k = k
"""The number of classes."""
self.W = np.zeros((k,m), dtype=np.float64, order = "c")
"""A matrix which contains a `m`-dimensional weight vector for each
class.
Use `W[i]` to access the `i`-th weight vector."""
self.biasterm = biasterm
"""Whether or not the bias term is used. """
self.b = np.zeros((k,), dtype=np.float64, order = "c")
"""A vector of bias terms. """
def __call__(self,x, confidence=False):
"""Predicts the class for the instance `x`.
Evaluates :math:`z = argmax_y w^T f(x,y) + b_y`.
:arg confidence: whether to output confidence scores.
:return: the class index of the predicted class and optionally a confidence value.
"""
return self._predict(x, confidence)
def predict(self, instances, confidence=False):
"""Predicts class of each instances in
`instances`. Optionally, gives confidence score to each prediction
if `confidence` is `True`.
This method yields :meth:`GeneralizedLinearModel.__call__`
for each instance in `instances`.
:arg confidence: whether to output confidence scores.
:arg instances: a sequence of instances.
:return: a generator over the class assignments and
optionally a confidence score.
"""
for x in instances:
yield self.__call__(x, confidence)
def _predict(self, x, confidence=False):
ps = np.array([predict(x, self.W[i], self.b[i]) for i in range(self.k)])
c = np.argmax(ps)
if confidence:
return c, ps[c]
else:
return c
def probdist(self, x):
"""The probability distribution of class assignment.
Transforms the confidence scores into a probability via a logit function
:math:`\exp{\mathbf{w}^T \mathbf{x} + b} / Z`.
:return: a `k`-dimensional probability vector.
"""
ps = np.array([np.exp(predict(x, self.W[i], self.b[i]))
for i in range(self.k)])
Z = np.sum(ps)
return ps / Z
| {
"content_hash": "789c56564d7e3329aa56265792b68fc5",
"timestamp": "",
"source": "github",
"line_count": 165,
"max_line_length": 91,
"avg_line_length": 34.981818181818184,
"alnum_prop": 0.5885308385308385,
"repo_name": "pprett/bolt",
"id": "295f2fadd7d07ee4036e200f5c36079c27185da1",
"size": "5874",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bolt/model.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1169663"
},
{
"name": "Python",
"bytes": "92459"
}
],
"symlink_target": ""
} |
from translate.convert import po2tmx
from translate.convert import test_convert
from translate.misc import wStringIO
from translate.storage import tmx
from translate.storage import lisa
class TestPO2TMX:
def po2tmx(self, posource, sourcelanguage='en', targetlanguage='af'):
"""helper that converts po source to tmx source without requiring files"""
inputfile = wStringIO.StringIO(posource)
outputfile = wStringIO.StringIO()
outputfile.tmxfile = tmx.tmxfile(inputfile=None, sourcelanguage=sourcelanguage)
po2tmx.convertpo(inputfile, outputfile, templatefile=None, sourcelanguage=sourcelanguage, targetlanguage=targetlanguage)
return outputfile.tmxfile
def test_basic(self):
minipo = r"""# Afrikaans translation of program ABC
#
msgid ""
msgstr ""
"Project-Id-Version: program 2.1-branch\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2006-01-09 07:15+0100\n"
"PO-Revision-Date: 2004-03-30 17:02+0200\n"
"Last-Translator: Zuza Software Foundation <[email protected]>\n"
"Language-Team: Afrikaans <[email protected]>\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
# Please remember to do something
#: ../dir/file.xml.in.h:1 ../dir/file2.xml.in.h:4
msgid "Applications"
msgstr "Toepassings"
"""
tmx = self.po2tmx(minipo)
print "The generated xml:"
print str(tmx)
assert tmx.translate("Applications") == "Toepassings"
assert tmx.translate("bla") is None
xmltext = str(tmx)
assert xmltext.index('creationtool="Translate Toolkit - po2tmx"')
assert xmltext.index('adminlang')
assert xmltext.index('creationtoolversion')
assert xmltext.index('datatype')
assert xmltext.index('o-tmf')
assert xmltext.index('segtype')
assert xmltext.index('srclang')
def test_sourcelanguage(self):
minipo = 'msgid "String"\nmsgstr "String"\n'
tmx = self.po2tmx(minipo, sourcelanguage="xh")
print "The generated xml:"
print str(tmx)
header = tmx.document.find("header")
assert header.get("srclang") == "xh"
def test_targetlanguage(self):
minipo = 'msgid "String"\nmsgstr "String"\n'
tmx = self.po2tmx(minipo, targetlanguage="xh")
print "The generated xml:"
print str(tmx)
tuv = tmx.document.findall(".//%s" % tmx.namespaced("tuv"))[1]
#tag[0] will be the source, we want the target tuv
assert tuv.get("{%s}lang" % lisa.XML_NS) == "xh"
def test_multiline(self):
"""Test multiline po entry"""
minipo = r'''msgid "First part "
"and extra"
msgstr "Eerste deel "
"en ekstra"'''
tmx = self.po2tmx(minipo)
print "The generated xml:"
print str(tmx)
assert tmx.translate('First part and extra') == 'Eerste deel en ekstra'
def test_escapednewlines(self):
"""Test the escaping of newlines"""
minipo = r'''msgid "First line\nSecond line"
msgstr "Eerste lyn\nTweede lyn"
'''
tmx = self.po2tmx(minipo)
print "The generated xml:"
print str(tmx)
assert tmx.translate("First line\nSecond line") == "Eerste lyn\nTweede lyn"
def test_escapedtabs(self):
"""Test the escaping of tabs"""
minipo = r'''msgid "First column\tSecond column"
msgstr "Eerste kolom\tTweede kolom"
'''
tmx = self.po2tmx(minipo)
print "The generated xml:"
print str(tmx)
assert tmx.translate("First column\tSecond column") == "Eerste kolom\tTweede kolom"
def test_escapedquotes(self):
"""Test the escaping of quotes (and slash)"""
minipo = r'''msgid "Hello \"Everyone\""
msgstr "Good day \"All\""
msgid "Use \\\"."
msgstr "Gebruik \\\"."
'''
tmx = self.po2tmx(minipo)
print "The generated xml:"
print str(tmx)
assert tmx.translate('Hello "Everyone"') == 'Good day "All"'
assert tmx.translate(r'Use \".') == r'Gebruik \".'
def test_exclusions(self):
"""Test that empty and fuzzy messages are excluded"""
minipo = r'''#, fuzzy
msgid "One"
msgstr "Een"
msgid "Two"
msgstr ""
msgid ""
msgstr "Drie"
'''
tmx = self.po2tmx(minipo)
print "The generated xml:"
print str(tmx)
assert "<tu" not in str(tmx)
assert len(tmx.units) == 0
def test_nonascii(self):
"""Tests that non-ascii conversion works."""
minipo = r'''msgid "Bézier curve"
msgstr "Bézier-kurwe"
'''
tmx = self.po2tmx(minipo)
print str(tmx)
assert tmx.translate(u"Bézier curve") == u"Bézier-kurwe"
class TestPO2TMXCommand(test_convert.TestConvertCommand, TestPO2TMX):
"""Tests running actual po2tmx commands on files"""
convertmodule = po2tmx
def test_help(self):
"""tests getting help"""
options = test_convert.TestConvertCommand.test_help(self)
options = self.help_check(options, "-l LANG, --language=LANG")
options = self.help_check(options, "--source-language=LANG", last=True)
| {
"content_hash": "76a224462cc7f22d54d75e84b9f7189d",
"timestamp": "",
"source": "github",
"line_count": 151,
"max_line_length": 128,
"avg_line_length": 34.01324503311258,
"alnum_prop": 0.6388239875389408,
"repo_name": "dbbhattacharya/kitsune",
"id": "7cb439e7ed9a5e950d6cf894c40e5a62043d06e9",
"size": "5187",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "vendor/packages/translate-toolkit/translate/convert/test_po2tmx.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ApacheConf",
"bytes": "2694"
},
{
"name": "CSS",
"bytes": "276585"
},
{
"name": "HTML",
"bytes": "600145"
},
{
"name": "JavaScript",
"bytes": "800276"
},
{
"name": "Python",
"bytes": "2762831"
},
{
"name": "Shell",
"bytes": "6720"
},
{
"name": "Smarty",
"bytes": "1752"
}
],
"symlink_target": ""
} |
from siconos.mechanics.collision.tools import Contactor
from siconos.io.mechanics_io import Hdf5
import siconos.numerics as Numerics
# Creation of the hdf5 file for input/output
with Hdf5() as io:
# Definition of a cube as a convex shape
io.addConvexShape('Cube', [
(-1.0, 1.0, -1.0),
(-1.0, -1.0, -1.0),
(-1.0, -1.0, 1.0),
(-1.0, 1.0, 1.0),
(1.0, 1.0, 1.0),
(1.0, 1.0, -1.0),
(1.0, -1.0, -1.0),
(1.0, -1.0, 1.0)])
# Definition of the ground shape
io.addPrimitiveShape('Ground', 'Box', (100, 100, .5))
# Definition of a non smooth law. As no group ids are specified it
# is between contactors of group id 0.
io.addNewtonImpactFrictionNSL('contact', mu=0.3)
# The cube objects are made with an unique Contactor : the cube shape.
# As a mass is given, they are dynamic systems involved in contact
# detection and in the simulation. With no group id specified the
# Contactor belongs to group 0
# A first cube is introduced a the beginning of the simulation
io.addObject('cube0', [Contactor('Cube')], translation=[0, 0, 2],
velocity=[10, 0, 0, 1, 1, 1],
mass=1)
# the second cube introduction is delayed. It is crearted in the simulation
# a time 0.5
io.addObject('cube1', [Contactor('Cube')], translation=[0, 0, 2],
velocity=[10, 0, 0, 1, 1, 1],
mass=1, time_of_birth=0.5)
# the ground object made with the ground shape. As the mass is
# not given, it is a static object only involved in contact
# detection.
io.addObject('ground', [Contactor('Ground')],
translation=[0, 0, 0])
# Run the simulation from the inputs previously defined and add
# results to the hdf5 file. The visualisation of the output may be done
# with the vview command.
with Hdf5(mode='r+') as io:
# By default earth gravity is applied and the units are those
# of the International System of Units.
# Because of fixed collision margins used in the collision detection,
# sizes of small objects may need to be expressed in cm or mm.
io.run(with_timer=False,
gravity_scale=1,
t0=0,
T=10,
h=0.0005,
multipoints_iterations=True,
theta=0.50001,
Newton_max_iter=20,
set_external_forces=None,
solver=Numerics.SICONOS_FRICTION_3D_NSGS,
itermax=100000,
tolerance=1e-8,
numerics_verbose=False,
output_frequency=None)
| {
"content_hash": "3b7e6c1476e7bc5992bed9db03e7c2ca",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 79,
"avg_line_length": 36.33802816901409,
"alnum_prop": 0.6089147286821706,
"repo_name": "bremond/siconos",
"id": "c810dafb094f44b98380669e1e986b6d18741efd",
"size": "2678",
"binary": false,
"copies": "1",
"ref": "refs/heads/oldmaster",
"path": "examples/Mechanics/GeometricPrimitives/delayed_cubes.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "28"
},
{
"name": "Batchfile",
"bytes": "2725"
},
{
"name": "C",
"bytes": "4581481"
},
{
"name": "C++",
"bytes": "9341609"
},
{
"name": "CMake",
"bytes": "416755"
},
{
"name": "CSS",
"bytes": "12791"
},
{
"name": "Dockerfile",
"bytes": "233"
},
{
"name": "Fortran",
"bytes": "2539066"
},
{
"name": "GAMS",
"bytes": "5614"
},
{
"name": "HTML",
"bytes": "4344542"
},
{
"name": "Makefile",
"bytes": "11474"
},
{
"name": "Nix",
"bytes": "1207"
},
{
"name": "Python",
"bytes": "1378490"
},
{
"name": "SWIG",
"bytes": "140629"
},
{
"name": "Shell",
"bytes": "51216"
}
],
"symlink_target": ""
} |
""" A guide renderer for displaying grid lines on Bokeh plots.
"""
from __future__ import absolute_import
from ..core.properties import Int, String, Float, Auto, Instance, Tuple, Either, Include, Override
from ..core.property_mixins import FillProps, LineProps
from .renderers import GuideRenderer
from .tickers import Ticker
class Grid(GuideRenderer):
""" Display horizontal or vertical grid lines at locations
given by a supplied ``Ticker``.
"""
dimension = Int(0, help="""
Which dimension the Axis Grid lines will intersect. The
x-axis is dimension 0 (vertical Grid lines) and the y-axis
is dimension 1 (horizontal Grid lines).
""")
bounds = Either(Auto, Tuple(Float, Float), help="""
Bounds for the rendered grid lines. If unset, the grid
lines will span the entire plot in the given dimension.
""")
# Note: we must allow the possibility of setting both
# range names be cause if a grid line is "traced" along
# a path, ranges in both dimensions will matter.
x_range_name = String('default', help="""
A particular (named) x-range to use for computing screen
locations when rendering a grid on the plot. If unset, use the
default x-range.
""")
y_range_name = String('default', help="""
A particular (named) y-range to use for computing screen
locations when rendering a grid on the plot. If unset, use the
default y-range.
""")
ticker = Instance(Ticker, help="""
The Ticker to use for computing locations for the Grid lines.
""")
grid_props = Include(LineProps, help="""
The %s of the Grid lines.
""")
grid_line_color = Override(default='#cccccc')
minor_grid_props = Include(LineProps, help="""
The %s of the minor Grid lines.
""")
minor_grid_line_color = Override(default=None)
band_props = Include(FillProps, help="""
The %s of alternating bands between Grid lines.
""")
band_fill_alpha = Override(default=0)
band_fill_color = Override(default=None)
level = Override(default="underlay")
| {
"content_hash": "ad25971a02a9d95d6a74df52d36a465b",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 98,
"avg_line_length": 30.558823529411764,
"alnum_prop": 0.6742059672762272,
"repo_name": "quasiben/bokeh",
"id": "21d859d8a4f971bdef4cf858e541b4c6249c678c",
"size": "2078",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "bokeh/models/grids.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "448001"
},
{
"name": "CoffeeScript",
"bytes": "2130601"
},
{
"name": "JavaScript",
"bytes": "2530410"
},
{
"name": "Python",
"bytes": "1056239"
},
{
"name": "Scala",
"bytes": "28977"
},
{
"name": "Shell",
"bytes": "13082"
}
],
"symlink_target": ""
} |
import unittest
from prime_factors import prime_factors
class PrimeFactorsTest(unittest.TestCase):
def test_1(self):
self.assertEqual([], prime_factors(1))
def test_2(self):
self.assertEqual([2], prime_factors(2))
def test_3(self):
self.assertEqual([3], prime_factors(3))
def test_4(self):
self.assertEqual([2, 2], prime_factors(4))
def test_6(self):
self.assertEqual([2, 3], prime_factors(6))
def test_8(self):
self.assertEqual([2, 2, 2], prime_factors(8))
def test_9(self):
self.assertEqual([3, 3], prime_factors(9))
def test_27(self):
self.assertEqual([3, 3, 3], prime_factors(27))
def test_625(self):
self.assertEqual([5, 5, 5, 5], prime_factors(625))
def test_901255(self):
self.assertEqual([5, 17, 23, 461], prime_factors(901255))
def test_93819012551(self):
self.assertEqual([11, 9539, 894119], prime_factors(93819012551))
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "c8afb92fde983548cb12126d7ffd864f",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 72,
"avg_line_length": 24.38095238095238,
"alnum_prop": 0.6064453125,
"repo_name": "rootulp/xpython",
"id": "02524dce8477520f652462add96ea699e232707c",
"size": "1024",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "exercises/prime-factors/prime_factors_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "209553"
},
{
"name": "Shell",
"bytes": "640"
}
],
"symlink_target": ""
} |
import importlib
import json
from typing import Any, Callable, Dict, Optional
from django.conf import settings
from django.utils.translation import gettext as _
from zulip_bots.lib import BotIdentity, RateLimit
from zerver.actions.message_send import (
internal_send_huddle_message,
internal_send_private_message,
internal_send_stream_message_by_name,
)
from zerver.lib.bot_config import ConfigError, get_bot_config
from zerver.lib.bot_storage import (
get_bot_storage,
is_key_in_bot_storage,
remove_bot_storage,
set_bot_storage,
)
from zerver.lib.integrations import EMBEDDED_BOTS
from zerver.lib.topic import get_topic_from_message_info
from zerver.models import UserProfile, get_active_user
def get_bot_handler(service_name: str) -> Any:
# Check that this service is present in EMBEDDED_BOTS, add exception handling.
configured_service = ""
for embedded_bot_service in EMBEDDED_BOTS:
if service_name == embedded_bot_service.name:
configured_service = embedded_bot_service.name
if not configured_service:
return None
bot_module_name = f"zulip_bots.bots.{configured_service}.{configured_service}"
bot_module: Any = importlib.import_module(bot_module_name)
return bot_module.handler_class()
class StateHandler:
storage_size_limit: int = settings.USER_STATE_SIZE_LIMIT
def __init__(self, user_profile: UserProfile) -> None:
self.user_profile = user_profile
self.marshal: Callable[[object], str] = lambda obj: json.dumps(obj)
self.demarshal: Callable[[str], object] = lambda obj: json.loads(obj)
def get(self, key: str) -> object:
return self.demarshal(get_bot_storage(self.user_profile, key))
def put(self, key: str, value: object) -> None:
set_bot_storage(self.user_profile, [(key, self.marshal(value))])
def remove(self, key: str) -> None:
remove_bot_storage(self.user_profile, [key])
def contains(self, key: str) -> bool:
return is_key_in_bot_storage(self.user_profile, key)
class EmbeddedBotQuitException(Exception):
pass
class EmbeddedBotEmptyRecipientsList(Exception):
pass
class EmbeddedBotHandler:
def __init__(self, user_profile: UserProfile) -> None:
# Only expose a subset of our UserProfile's functionality
self.user_profile = user_profile
self._rate_limit = RateLimit(20, 5)
self.full_name = user_profile.full_name
self.email = user_profile.email
self.storage = StateHandler(user_profile)
self.user_id = user_profile.id
def identity(self) -> BotIdentity:
return BotIdentity(self.full_name, self.email)
def react(self, message: Dict[str, Any], emoji_name: str) -> Dict[str, Any]:
return {} # Not implemented
def send_message(self, message: Dict[str, Any]) -> Dict[str, Any]:
if not self._rate_limit.is_legal():
self._rate_limit.show_error_and_exit()
if message["type"] == "stream":
message_id = internal_send_stream_message_by_name(
self.user_profile.realm,
self.user_profile,
message["to"],
message["topic"],
message["content"],
)
return {"id": message_id}
assert message["type"] == "private"
# Ensure that it's a comma-separated list, even though the
# usual 'to' field could be either a List[str] or a str.
recipients = ",".join(message["to"]).split(",")
if len(message["to"]) == 0:
raise EmbeddedBotEmptyRecipientsList(_("Message must have recipients!"))
elif len(message["to"]) == 1:
recipient_user = get_active_user(recipients[0], self.user_profile.realm)
message_id = internal_send_private_message(
self.user_profile, recipient_user, message["content"]
)
else:
message_id = internal_send_huddle_message(
self.user_profile.realm, self.user_profile, recipients, message["content"]
)
return {"id": message_id}
def send_reply(
self, message: Dict[str, Any], response: str, widget_content: Optional[str] = None
) -> Dict[str, Any]:
if message["type"] == "private":
result = self.send_message(
dict(
type="private",
to=[x["email"] for x in message["display_recipient"]],
content=response,
sender_email=message["sender_email"],
)
)
else:
result = self.send_message(
dict(
type="stream",
to=message["display_recipient"],
topic=get_topic_from_message_info(message),
content=response,
sender_email=message["sender_email"],
)
)
return {"id": result["id"]}
def update_message(self, message: Dict[str, Any]) -> None:
pass # Not implemented
# The bot_name argument exists only to comply with ExternalBotHandler.get_config_info().
def get_config_info(self, bot_name: str, optional: bool = False) -> Dict[str, str]:
try:
return get_bot_config(self.user_profile)
except ConfigError:
if optional:
return {}
raise
def quit(self, message: str = "") -> None:
raise EmbeddedBotQuitException(message)
| {
"content_hash": "665b0c028773760acc0342abcb89d3b8",
"timestamp": "",
"source": "github",
"line_count": 154,
"max_line_length": 92,
"avg_line_length": 35.90909090909091,
"alnum_prop": 0.6092224231464738,
"repo_name": "rht/zulip",
"id": "47750d9e2b9ff4e7edb443ae7588fefed626cec8",
"size": "5530",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "zerver/lib/bot_lib.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "489438"
},
{
"name": "Dockerfile",
"bytes": "4025"
},
{
"name": "Emacs Lisp",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "743287"
},
{
"name": "Handlebars",
"bytes": "374049"
},
{
"name": "JavaScript",
"bytes": "4000260"
},
{
"name": "Perl",
"bytes": "10163"
},
{
"name": "Puppet",
"bytes": "112128"
},
{
"name": "Python",
"bytes": "10160680"
},
{
"name": "Ruby",
"bytes": "3459"
},
{
"name": "Shell",
"bytes": "146797"
},
{
"name": "TypeScript",
"bytes": "284836"
}
],
"symlink_target": ""
} |
"""
Convenience routines for creating non-trivial Field subclasses, as well as
backwards compatibility utilities.
Add SubfieldBase as the __metaclass__ for your Field subclass, implement
to_python() and the other necessary methods and everything will work seamlessly.
"""
from inspect import getargspec
from warnings import warn
def call_with_connection(func):
arg_names, varargs, varkwargs, defaults = getargspec(func)
updated = ('connection' in arg_names or varkwargs)
if not updated:
warn("A Field class whose %s method hasn't been updated to take a "
"`connection` argument." % func.__name__,
DeprecationWarning, stacklevel=3)
def inner(*args, **kwargs):
if 'connection' not in kwargs:
from django.db import connection
kwargs['connection'] = connection
warn("%s has been called without providing a connection argument. " %
func.__name__, DeprecationWarning,
stacklevel=2)
if updated:
return func(*args, **kwargs)
if 'connection' in kwargs:
del kwargs['connection']
return func(*args, **kwargs)
return inner
def call_with_connection_and_prepared(func):
arg_names, varargs, varkwargs, defaults = getargspec(func)
updated = (
('connection' in arg_names or varkwargs) and
('prepared' in arg_names or varkwargs)
)
if not updated:
warn("A Field class whose %s method hasn't been updated to take "
"`connection` and `prepared` arguments." % func.__name__,
DeprecationWarning, stacklevel=3)
def inner(*args, **kwargs):
if 'connection' not in kwargs:
from django.db import connection
kwargs['connection'] = connection
warn("%s has been called without providing a connection argument. " %
func.__name__, DeprecationWarning,
stacklevel=2)
if updated:
return func(*args, **kwargs)
if 'connection' in kwargs:
del kwargs['connection']
if 'prepared' in kwargs:
del kwargs['prepared']
return func(*args, **kwargs)
return inner
class LegacyConnection(type):
"""
A metaclass to normalize arguments give to the get_db_prep_* and db_type
methods on fields.
"""
def __new__(cls, names, bases, attrs):
new_cls = super(LegacyConnection, cls).__new__(cls, names, bases, attrs)
for attr in ('db_type', 'get_db_prep_save'):
setattr(new_cls, attr, call_with_connection(getattr(new_cls, attr)))
for attr in ('get_db_prep_lookup', 'get_db_prep_value'):
setattr(new_cls, attr, call_with_connection_and_prepared(getattr(new_cls, attr)))
return new_cls
class SubfieldBase(LegacyConnection):
"""
A metaclass for custom Field subclasses. This ensures the model's attribute
has the descriptor protocol attached to it.
"""
def __new__(cls, base, name, attrs):
new_class = super(SubfieldBase, cls).__new__(cls, base, name, attrs)
new_class.contribute_to_class = make_contrib(
attrs.get('contribute_to_class'))
return new_class
class Creator(object):
"""
A placeholder class that provides a way to set the attribute on the model.
"""
def __init__(self, field):
self.field = field
def __get__(self, obj, type=None):
if obj is None:
raise AttributeError('Can only be accessed via an instance.')
return obj.__dict__[self.field.name]
def __set__(self, obj, value):
obj.__dict__[self.field.name] = self.field.to_python(value)
def make_contrib(func=None):
"""
Returns a suitable contribute_to_class() method for the Field subclass.
If 'func' is passed in, it is the existing contribute_to_class() method on
the subclass and it is called before anything else. It is assumed in this
case that the existing contribute_to_class() calls all the necessary
superclass methods.
"""
def contribute_to_class(self, cls, name):
if func:
func(self, cls, name)
else:
super(self.__class__, self).contribute_to_class(cls, name)
setattr(cls, self.name, Creator(self))
return contribute_to_class
| {
"content_hash": "ebfafbe32fb4534580b58bbe0cb7951e",
"timestamp": "",
"source": "github",
"line_count": 116,
"max_line_length": 93,
"avg_line_length": 37.3448275862069,
"alnum_prop": 0.6255771006463527,
"repo_name": "alex/django-old",
"id": "148a998f1c8462fb3cd7e5303d163bae4f5fa457",
"size": "4332",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django/db/models/fields/subclassing.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "91750"
},
{
"name": "Python",
"bytes": "6425033"
},
{
"name": "Shell",
"bytes": "3519"
}
],
"symlink_target": ""
} |
"""Package contenant un objet destiné à montrer des informations.
Par exemple, vous trouverez dans ce package le module score et la classe
MontrerScore, destinée à montrer le score d'un personnage. L'intérêt de
passer par des classes (simple conteneurs, méthodes statiques) est que l'on
peut utiliser la même classe pour afficher différents scores (celui d'un
PNJ, d'un joueur, d'un familier, de soi-même).
"""
| {
"content_hash": "021c77dc9f0eea55b3740a70399ad006",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 75,
"avg_line_length": 45.888888888888886,
"alnum_prop": 0.7820823244552058,
"repo_name": "vlegoff/tsunami",
"id": "5982687de629501273bb89b816f4870f9b8659cd",
"size": "1990",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/primaires/perso/montrer/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "7930908"
},
{
"name": "Ruby",
"bytes": "373"
}
],
"symlink_target": ""
} |
from app import app, swagger
from flask import Blueprint, jsonify
from config import BUILD_VERSION
swagger_bp = Blueprint('swagger_bp', __name__)
@app.route('/')
def show_swagger():
res = swagger.spec.to_dict()
for hidden_views in ('/signup',):
if hidden_views in res['paths']:
del res['paths'][hidden_views]
return jsonify(res)
@app.route('/version')
def show_version():
return BUILD_VERSION, 200
| {
"content_hash": "9eb0ca5b35e49811d78c26dada3a6dd6",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 46,
"avg_line_length": 23.105263157894736,
"alnum_prop": 0.6560364464692483,
"repo_name": "giubil/trackit",
"id": "3c3b3425007e375947628197ef6c07c4c86dcf63",
"size": "439",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "api/files/api/app/views/swagger.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "44532"
},
{
"name": "HTML",
"bytes": "255830"
},
{
"name": "JavaScript",
"bytes": "512621"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "PHP",
"bytes": "1097"
},
{
"name": "Python",
"bytes": "450877"
},
{
"name": "Shell",
"bytes": "5999"
},
{
"name": "Smarty",
"bytes": "5571"
}
],
"symlink_target": ""
} |
import os.path
from typing import Dict, List
from .variable import Variable
from .subprogram import Subprogram
class CompilationUnit:
"""
CompilationUnit for DWARF
See http://dwarfstd.org/doc/DWARF5.pdf page 60
"""
def __init__(self, name, comp_dir, low_pc, high_pc, language):
self.name = name
self.comp_dir = comp_dir
self.file_path = os.path.join(self.comp_dir, self.name)
self.low_pc = low_pc
self.high_pc = high_pc
self.language = language
self.functions: Dict[int, Subprogram] = {}
self.global_variables: List[Variable] = []
| {
"content_hash": "790964356310da951584796cccf17420",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 66,
"avg_line_length": 28.136363636363637,
"alnum_prop": 0.6365105008077544,
"repo_name": "angr/cle",
"id": "8a8c57c439cc97d10674bfa06b54ae58f1098a99",
"size": "619",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cle/backends/elf/compilation_unit.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "609361"
}
],
"symlink_target": ""
} |
__author__ = "Christian Kongsgaard"
__license__ = 'MIT'
# -------------------------------------------------------------------------------------------------------------------- #
# IMPORTS
# Modules
# RiBuild Modules
from delphin_6_automation.database_interactions import mongo_setup
from delphin_6_automation.database_interactions.auth import auth_dict
from delphin_6_automation.database_interactions import general_interactions
from delphin_6_automation.database_interactions import weather_interactions
from delphin_6_automation.database_interactions import delphin_interactions
from delphin_6_automation.database_interactions.db_templates import material_entry
# -------------------------------------------------------------------------------------------------------------------- #
# RIBuild
mongo_setup.global_init(auth_dict)
delphin_file0 = r'U:\RIBuild\Material_Sensitivity\Delphin Projects\4A_36cm_brick_1D.d6p'
delphin_file1 = r'U:\RIBuild\Material_Sensitivity\Delphin Projects\4A_36cm_brick_ins_1D.d6p'
delphin_file2 = r'U:\RIBuild\Material_Sensitivity\Delphin Projects\4A_36cm_2D.d6p'
delphin_file3 = r'U:\RIBuild\Material_Sensitivity\Delphin Projects\4A_36cm_ins_2D.d6p'
priority = 'high'
climate_class = 'a'
location_name = 'KobenhavnTaastrup'
years = [2020, 2020, 2021, 2022]
bricks = []
not_hydrolic_computable_bricks = [53, 92, 93, 94, 99, 100, 101, 102, 104, 105, 106, 118, 176, 203, 204, 205, 213, 214,
215, 216, 217, 234, 237, 259, 260, 312, 313, 320, 321, 352, 353, 354, 355, 356, 357,
358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 374, 385,
386, 387, 388, 389, 390, 391]
for material in material_entry.Material.objects():
if material.material_data['IDENTIFICATION-CATEGORY'] == 'BRICK' and not material.material_id == 504 \
and material.material_id not in not_hydrolic_computable_bricks:
bricks.append(material.material_id)
print(f'{material.material_id} - {material.material_name}')
def permutate_uploads(materials):
print('Uploading')
sim_id = general_interactions.add_to_simulation_queue(delphin_file0, priority)
weather_interactions.assign_indoor_climate_to_project(sim_id, climate_class)
weather_interactions.assign_weather_by_name_and_years(sim_id, location_name, years)
delphin_interactions.change_entry_simulation_length(sim_id, len(years), 'a')
layer_material = 'Old Building Brick Dresden ZP [504]'
priority_ = 1
modified_ids = delphin_interactions.permutate_entry_layer_material(sim_id, layer_material, materials, priority_)
for modified_id in modified_ids:
weather_interactions.assign_weather_by_name_and_years(modified_id, location_name, years)
weather_interactions.assign_indoor_climate_to_project(modified_id, climate_class)
delphin_interactions.change_entry_simulation_length(modified_id, len(years), 'a')
print(len(bricks))
#permutate_uploads(bricks)
mongo_setup.global_end_ssh(auth_dict)
| {
"content_hash": "e8bbae82b66af35810f2e540e8e6640b",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 120,
"avg_line_length": 47.13846153846154,
"alnum_prop": 0.6622062663185379,
"repo_name": "thp44/delphin_6_automation",
"id": "bbbbca2c143a105103d70633e7914ddaeba2b029",
"size": "3064",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "data_process/sensitivity/material/upload_project.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "137563"
}
],
"symlink_target": ""
} |
import optproblems.cec2005
import numpy as np
import time
from ASO import *
import os
if __name__ == "__main__":
dim = 30
repeats = 10
evaluations = 10000*dim
population = 30
external_rate = 3
internal_rate = 1
fickleness_rate = 0.5
if not os.path.exists('results'):
os.makedirs('results')
if not os.path.exists('convergence'):
os.makedirs('convergence')
np.random.seed(10)
f5 = optproblems.cec2005.F5(dim)
time1 = time.time()
results = np.array([ASO(f5, dim=dim, max_eval=evaluations, external_rate=external_rate,
internal_rate=internal_rate, lower_bound=-100, upper_bound=100,
fickleness_rate=fickleness_rate) for _ in range(repeats)])
total_time = time.time() - time1
means = results.mean(axis=0)
solutions = results[:,-1]
mean_best = means[-1]
min_sol = np.min(solutions)
max_sol = np.max(solutions)
marks = means[0:-1]
with open("results/ASO-resultsd-30-5.txt", "w") as file:
print("F5: Schwefel's Problem 2.6 with Global Optimum on Bounds", file=file)
print("Min\t Max\t Mean\t Mean time", file=file)
print("_______________________________________________", file=file)
print("{} {} {} {}".format(min_sol, max_sol, mean_best, total_time / repeats), file=file)
with open("convergence/ASO-convergenced-30-5.csv", "w") as file:
for i in range(len(marks)):
print("{},{}".format(10000*i, marks[i]), file=file)
np.random.seed(10)
f10 = optproblems.cec2005.F10(dim)
time1 = time.time()
results = np.array([ASO(f10, dim=dim, max_eval=evaluations, external_rate=external_rate,
internal_rate=internal_rate, lower_bound=-5, upper_bound=5,
fickleness_rate=fickleness_rate) for _ in range(repeats)])
total_time = time.time() - time1
means = results.mean(axis=0)
solutions = results[:,-1]
mean_best = means[-1]
min_sol = np.min(solutions)
max_sol = np.max(solutions)
marks = means[0:-1]
with open("results/ASO-resultsd-30-10.txt", "w") as file:
print("F10: Shifted Rotated Rastrigin's Function", file=file)
print("Min\t Max\t Mean\t Mean time", file=file)
print("_______________________________________________", file=file)
print("{} {} {} {}".format(min_sol, max_sol, mean_best, total_time / repeats), file=file)
with open("convergence/ASO-convergenced-30-10.csv", "w") as file:
for i in range(len(marks)):
print("{},{}".format(10000*i, marks[i]), file=file)
np.random.seed(10)
f23 = optproblems.cec2005.F23(dim)
time1 = time.time()
results = np.array([ASO(f23, dim=dim, max_eval=evaluations, external_rate=external_rate,
internal_rate=internal_rate, lower_bound=-np.pi, upper_bound=np.pi,
fickleness_rate=fickleness_rate) for _ in range(repeats)])
total_time = time.time() - time1
means = results.mean(axis=0)
solutions = results[:,-1]
mean_best = means[-1]
min_sol = np.min(solutions)
max_sol = np.max(solutions)
marks = means[0:-1]
with open("results/ASO-resultsd-30-23.txt", "w") as file:
print("F23: Non-Continuous Rotated Hybrid Composition Function", file=file)
print("Min\t Max\t Mean\t Mean time", file=file)
print("_______________________________________________", file=file)
print("{} {} {} {}".format(min_sol, max_sol, mean_best, total_time / repeats), file=file)
with open("convergence/ASO-convergenced-30-23.csv", "w") as file:
for i in range(len(marks)):
print("{},{}".format(10000*i, marks[i]), file=file)
np.random.seed(10)
f24 = optproblems.cec2005.F24(dim)
time1 = time.time()
results = np.array([ASO(f24, dim=dim, max_eval=evaluations, external_rate=external_rate,
internal_rate=internal_rate, lower_bound=-5, upper_bound=5,
fickleness_rate=fickleness_rate) for _ in range(repeats)])
total_time = time.time() - time1
means = results.mean(axis=0)
solutions = results[:,-1]
mean_best = means[-1]
min_sol = np.min(solutions)
max_sol = np.max(solutions)
marks = means[0:-1]
with open("results/ASO-resultsd-30-24.txt", "w") as file:
print("F24: Rotated Hybrid Composition Function", file=file)
print("Min\t Max\t Mean\t Mean time", file=file)
print("_______________________________________________", file=file)
print("{} {} {} {}".format(min_sol, max_sol, mean_best, total_time / repeats), file=file)
with open("convergence/ASO-convergenced-30-24.csv", "w") as file:
for i in range(len(marks)):
print("{},{}".format(10000*i, marks[i]), file=file)
np.random.seed(10)
f25 = optproblems.cec2005.F25(dim)
time1 = time.time()
results = np.array([ASO(f25, dim=dim, max_eval=evaluations, external_rate=external_rate,
internal_rate=internal_rate, lower_bound=-10, upper_bound=10,
fickleness_rate=fickleness_rate, initial_population_lower_bound=2,
initial_population_upper_bound=5) for _ in range(repeats)])
total_time = time.time() - time1
means = results.mean(axis=0)
solutions = results[:,-1]
mean_best = means[-1]
min_sol = np.min(solutions)
max_sol = np.max(solutions)
marks = means[0:-1]
with open("results/ASO-resultsd-30-25.txt", "w") as file:
print("F25: Rotated Hybrid Composition Function without Bounds", file=file)
print("Min\t Max\t Mean\t Mean time", file=file)
print("_______________________________________________", file=file)
print("{} {} {} {}".format(min_sol, max_sol, mean_best, total_time / repeats), file=file)
with open("convergence/ASO-convergenced-30-25.csv", "w") as file:
for i in range(len(marks)):
print("{},{}".format(10000*i, marks[i]), file=file)
| {
"content_hash": "bdc8f47458b085b6c622fa3030df54ff",
"timestamp": "",
"source": "github",
"line_count": 160,
"max_line_length": 97,
"avg_line_length": 36.76875,
"alnum_prop": 0.5934047254801972,
"repo_name": "JJSrra/Research-SocioinspiredAlgorithms",
"id": "91372c47ed40a5a7a848d95ab476894c361c6ad7",
"size": "5883",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ASO/ASObenchmark30-5.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "357836"
},
{
"name": "Shell",
"bytes": "23920"
}
],
"symlink_target": ""
} |
import os
import sys
argv = sys.argv
sys.argv = [sys.argv[0]]
import ROOT
import CrombieTools # Needed for KinematicFunctions.h
import logging
logging.basicConfig(level=logging.INFO)
def main(in_file, out_file, cut, *args, **kwargs):
dump = args or ['run', 'luminosityBlock', 'event']
to_scan = ':'.join(dump)
logging.info('Input file %s', in_file)
logging.info('Output file %s', out_file)
logging.info('Cut %s', cut)
logging.info('Scanning %s', to_scan)
fh = ROOT.TFile(in_file)
in_tree = fh.Get(kwargs.get('tree', 'events'))
in_tree.GetPlayer().SetScanRedirect(True)
in_tree.GetPlayer().SetScanFileName(out_file)
in_tree.Scan(to_scan, cut, 'colsize=20')
fh.Close()
if __name__ == '__main__':
if len(argv) < 4:
print 'USAGE %s INFILE OUTFILE CUT [BRANCH [BRANCH ...]]' % argv[0]
exit(0)
main(*argv[1:], tree=os.environ.get('tree', 'events'))
| {
"content_hash": "7225935af40905082d86dfc20a437d4d",
"timestamp": "",
"source": "github",
"line_count": 38,
"max_line_length": 75,
"avg_line_length": 24.42105263157895,
"alnum_prop": 0.6282327586206896,
"repo_name": "dabercro/CrombieTools",
"id": "8a928c28c7310f05d1ecb8c54b32d541a99ecdd7",
"size": "952",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/eventdump.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "6339"
},
{
"name": "C++",
"bytes": "744000"
},
{
"name": "HTML",
"bytes": "4719"
},
{
"name": "JavaScript",
"bytes": "1783"
},
{
"name": "Makefile",
"bytes": "723"
},
{
"name": "Objective-C",
"bytes": "184882"
},
{
"name": "PHP",
"bytes": "6820"
},
{
"name": "Perl",
"bytes": "8637"
},
{
"name": "Python",
"bytes": "155475"
},
{
"name": "R",
"bytes": "12216"
},
{
"name": "Shell",
"bytes": "55692"
},
{
"name": "TeX",
"bytes": "2664"
}
],
"symlink_target": ""
} |
from django.contrib import admin
from models import Message, Report
class MessageAdmin(admin.ModelAdmin):
list_display = ('destination', 'message_type')
search_fields = ('destination',)
fieldsets = [
(None, {'fields': ['destination', 'message_type']}),
('Optional', {'fields': ['source_name',
'source',
'service',
'header',
'wap_text',
'_class',
'concatenate',
'unicode',
'validity',
'delivery',
'report'],
'classes': ['collapse']}),
]
class ReportAdmin(admin.ModelAdmin):
raw_id_fields = ('message',)
list_display = ('message', 'status', 'timestamp')
list_filter = ('status',)
admin.site.register(Message, MessageAdmin)
admin.site.register(Report, ReportAdmin)
| {
"content_hash": "3051aa315ae4c72e28ebac70e10fe932",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 60,
"avg_line_length": 33.25,
"alnum_prop": 0.43609022556390975,
"repo_name": "MagicSolutions/django-labyrinpy",
"id": "a05fa811a4fa612ee717b1e22b6a64cf95d24c91",
"size": "1064",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django_labyrinpy/admin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "12549"
}
],
"symlink_target": ""
} |
import os
import webapp2
import logging
import utils
from google.appengine.ext import db
from google.appengine.api import users
from gpx import SaveToDB
from models import GPXheader, ToroamUsers
from usermgmt import UserMgmt
from views import BaseHandler
class LoginPage(BaseHandler):
def get(self):
user = users.get_current_user()
cuser = UserMgmt()
if user:
reglist = None
logouturl = cuser.getlogouturl()
else:
reglist = cuser.getloginlist()
logouturl = None
self.render_template('login.html', {'user': user, 'reglist' : reglist, 'logout' : logouturl})
def post(self):
cont = self.request.get('continue')
logging.info('OpenIDLogin handler called, cont: %s' % cont)
openid = self.request.get('openid_url')
if openid:
logging.info('creating login url for openid: %s' % openid)
login_url = users.create_login_url(cont, None, openid)
logging.info('redirecting to url: %s' % login_url)
self.redirect(login_url)
else:
self.error(400)
class Settings(BaseHandler):
def get(self):
cuser = UserMgmt()
user = cuser.get()
if user:
if user.gpxuser:
gpxs = user.gpxuser
else:
gpxs = None
if user.admin:
allusers = ToroamUsers.all()
else:
allusers = None
self.render_template('settings.html', {'dbuser': user, 'gpxs': gpxs, 'allusers': allusers})
else:
return webapp2.redirect('/login')
| {
"content_hash": "eef62f95ad4f95a8032ebeed805f8d6c",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 103,
"avg_line_length": 32.27450980392157,
"alnum_prop": 0.5820170109356014,
"repo_name": "yafraorg/yafra-toroam",
"id": "53bcabea102b10655645378e472006f508dc854d",
"size": "1646",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "com.toroam.appengine/views_admin.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "367"
},
{
"name": "HTML",
"bytes": "87961"
},
{
"name": "Python",
"bytes": "67200"
}
],
"symlink_target": ""
} |
from corehq.fluff.calculators.xform import FormPropertyFilter
from couchforms.models import XFormInstance
import fluff
from custom.tdh import TDH_DOMAINS, ENROLL_CHILD_XMLNSES, INFANT_CLASSIFICATION_XMLNSES, INFANT_TREATMENT_XMLNSES, \
NEWBORN_CLASSIFICATION_XMLNSES, NEWBORN_TREATMENT_XMLNSES, CHILD_CLASSIFICATION_XMLNSES, \
CHILD_TREATMENT_XMLNSES
from custom.utils.utils import flat_field
class TDHNullEmitter(fluff.Calculator):
@fluff.null_emitter
def numerator(self, case):
yield None
class TDHDateEmiiter(fluff.Calculator):
@fluff.date_emitter
def total(self, form):
yield {
'date': form.received_on,
'value': 0
}
class TDHEnrollChildFluff(fluff.IndicatorDocument):
document_class = XFormInstance
document_filter = FormPropertyFilter(xmlns=ENROLL_CHILD_XMLNSES[0])
domains = TDH_DOMAINS
save_direct_to_sql = True
case_id = flat_field(lambda f: f.form['case_rec_child']['case']['@case_id'])
sex = flat_field(lambda f: f.form.get('demographics', {}).get('sex', ''))
village = flat_field(lambda f: f.form.get('demographics', {}).get('village', ''))
last_visit_date = flat_field(lambda f: f.form['case_rec_child']['case']['update'].get('last_visit_date', ''))
dob = flat_field(lambda f: f.form.get('age_questions', {}).get('dob', ''))
dob_known = flat_field(lambda f: f.form.get('age_questions', {}).get('dob_known', ''))
age_in_years = flat_field(lambda f: f.form.get('age_in_years', ''))
numerator = TDHNullEmitter()
class TDHInfantClassificationFluff(fluff.IndicatorDocument):
document_class = XFormInstance
document_filter = FormPropertyFilter(xmlns=INFANT_CLASSIFICATION_XMLNSES[0])
domains = TDH_DOMAINS
save_direct_to_sql = True
group_by = ('domain', )
case_id = flat_field(lambda f: f.form['case']['@case_id'])
user_id = flat_field(lambda f: f.form['case']['@user_id'])
tablet_login_id = flat_field(lambda f: f.form['meta']['username'])
author_id = flat_field(lambda f: f.form.get('selected_user_pin', ''))
author_name = flat_field(lambda f: f.form.get('selected_username', ''))
consultation_type = flat_field(lambda f: f.form.get('consultation_type', ''))
number = flat_field(lambda f: f.form.get('consult_count', ''))
other_comments = flat_field(lambda f: f.form.get('other_comments', ''))
L_hfa = flat_field(lambda f: f.form.get('L_hfa', ''))
L_wfa = flat_field(lambda f: f.form.get('L_wfa', ''))
L_wfh = flat_field(lambda f: f.form.get('L_wfh', ''))
M_hfa = flat_field(lambda f: f.form.get('M_hfa', ''))
M_wfa = flat_field(lambda f: f.form.get('M_wfa', ''))
M_wfh = flat_field(lambda f: f.form.get('M_wfh', ''))
S_hfa = flat_field(lambda f: f.form.get('S_hfa', ''))
S_wfa = flat_field(lambda f: f.form.get('S_wfa', ''))
S_wfh = flat_field(lambda f: f.form.get('S_wfh', ''))
age_in_months = flat_field(lambda f: f.form.get('age_in_months', ''))
bad_height = flat_field(lambda f: f.form.get('bad_height', ''))
dob = flat_field(lambda f: f.form.get('dob', ''))
au_moins_deux_signes_vih = flat_field(lambda f: f.form.get('au_moins_deux_signes_vih', ''))
incapable_nourir = flat_field(lambda f: f.form.get('incapable_nourir', ''))
infection_bac_grave = flat_field(lambda f: f.form.get('infection_bac_grave', ''))
infection_bac_locale = flat_field(lambda f: f.form.get('infection_bac_locale', ''))
mean_hfa = flat_field(lambda f: f.form.get('mean_hfa', ''))
mean_wfa = flat_field(lambda f: f.form.get('mean_wfa', ''))
mean_wfh = flat_field(lambda f: f.form.get('mean_wfh', ''))
alimentation_low = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_low', ''))
alimentation_medium = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_medium', ''))
alimentation_qa = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qa', ''))
alimentation_qb = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qb', ''))
alimentation_qc = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qc', ''))
alimentation_qd = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qd', ''))
alimentation_qe = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qe', ''))
alimentation_qf = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qf', ''))
alimentation_qg = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qg', ''))
alimentation_qh = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qh', ''))
centile_hfa = flat_field(lambda f: f.form.get('centile_hfa', ''))
centile_wfa = flat_field(lambda f: f.form.get('centile_wfa', ''))
centile_wfh = flat_field(lambda f: f.form.get('centile_wfh', ''))
classification_deshydratation = flat_field(lambda f: f.form.get('classification_deshydratation', ''))
classification_diahree = flat_field(lambda f: f.form.get('classification_diahree', ''))
classification_infection = flat_field(lambda f: f.form.get('classification_infection', ''))
classification_malnutrition = flat_field(lambda f: f.form.get('classification_malnutrition', ''))
classification_vih = flat_field(lambda f: f.form.get('classification_vih', ''))
diarrhee_non = flat_field(lambda f: f.form.get('diarrhee', {}).get('diarrhee_non', ''))
diarrhee_qa = flat_field(lambda f: f.form.get('diarrhee', {}).get('diarrhee_qa', ''))
inf_bac_freq_resp = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_freq_resp', ''))
inf_bac_grave = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_grave', ''))
inf_bac_non = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_non', ''))
inf_bac_qa = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qa', ''))
inf_bac_qc = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qc', ''))
inf_bac_qd = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qd', ''))
inf_bac_qe = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qe', ''))
inf_bac_qf = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qf', ''))
inf_bac_qg = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qg', ''))
inf_bac_qh = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qh', ''))
inf_bac_qi = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qi', ''))
inf_bac_qj = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qj', ''))
inf_bac_qk = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qk', ''))
inf_bac_ql = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_ql', ''))
inf_bac_qm = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qm', ''))
muac_change = flat_field(lambda f: f.form.get('muac_change', ''))
muac_change_status = flat_field(lambda f: f.form.get('muac_change_status', ''))
muac_grading = flat_field(lambda f: f.form.get('muac_grading', ''))
pas_signes_vih = flat_field(lambda f: f.form.get('pas_signes_vih', ''))
sd2neg_hfa = flat_field(lambda f: f.form.get('sd2neg_hfa', ''))
sd2neg_wfa = flat_field(lambda f: f.form.get('sd2neg_wfa', ''))
sd2neg_wfh = flat_field(lambda f: f.form.get('sd2neg_wfh', ''))
sd2pos_hfa = flat_field(lambda f: f.form.get('sd2pos_hfa', ''))
sd2pos_wfa = flat_field(lambda f: f.form.get('sd2pos_wfa', ''))
sd2pos_wfh = flat_field(lambda f: f.form.get('sd2pos_wfh', ''))
sd3neg_hfa = flat_field(lambda f: f.form.get('sd3neg_hfa', ''))
sd3neg_wfa = flat_field(lambda f: f.form.get('sd3neg_wfa', ''))
sd3neg_wfh = flat_field(lambda f: f.form.get('sd3neg_wfh', ''))
sd3pos_hfa = flat_field(lambda f: f.form.get('sd3pos_hfa', ''))
sd3pos_wfa = flat_field(lambda f: f.form.get('sd3pos_wfa', ''))
sd3pos_wfh = flat_field(lambda f: f.form.get('sd3pos_wfh', ''))
selected_user_id_and_name = flat_field(lambda f: f.form.get('selected_user_id_and_name', ''))
seriousness_alimentation = flat_field(lambda f: f.form.get('seriousness_alimentation', ''))
seriousness_diarrhee = flat_field(lambda f: f.form.get('seriousness_diarrhee', ''))
seriousness_inf_bac = flat_field(lambda f: f.form.get('seriousness_inf_bac', ''))
seriousness_vih = flat_field(lambda f: f.form.get('seriousness_vih', ''))
sex = flat_field(lambda f: f.form.get('sex', ''))
sex_loaded = flat_field(lambda f: f.form.get('sex_loaded', ''))
signes_deshy_evident = flat_field(lambda f: f.form.get('signes_deshy_evident', ''))
signes_deshy_severe = flat_field(lambda f: f.form.get('signes_deshy_severe', ''))
signes_pb_alim = flat_field(lambda f: f.form.get('signes_pb_alim', ''))
update_vaccines = flat_field(lambda f: f.form.get('update_vaccines', ''))
vaccines = flat_field(lambda f: ', '.join([k for k, v in f.form.get('vaccines', {}).iteritems()
if v == 'yes']))
bcg = flat_field(lambda f: f.form.get('vaccines', {}).get('bcg', ''))
vih_non = flat_field(lambda f: f.form.get('vih', {}).get('vih_non', ''))
vih_qa = flat_field(lambda f: f.form.get('vih', {}).get('vih_qa', ''))
vih_qb = flat_field(lambda f: f.form.get('vih', {}).get('vih_qb', ''))
vih_qc = flat_field(lambda f: f.form.get('vih', {}).get('vih_qc', ''))
vih_qd = flat_field(lambda f: f.form.get('vih', {}).get('vih_qd', ''))
vih_qe = flat_field(lambda f: f.form.get('vih', {}).get('vih_qe', ''))
vih_qf = flat_field(lambda f: f.form.get('vih', {}).get('vih_qf', ''))
vih_qg = flat_field(lambda f: f.form.get('vih', {}).get('vih_qg', ''))
vih_qh = flat_field(lambda f: f.form.get('vih', {}).get('vih_qh', ''))
vih_qi = flat_field(lambda f: f.form.get('vih', {}).get('vih_qi', ''))
vih_qj = flat_field(lambda f: f.form.get('vih', {}).get('vih_qj', ''))
vih_qk = flat_field(lambda f: f.form.get('vih', {}).get('vih_qk', ''))
vih_ql = flat_field(lambda f: f.form.get('vih', {}).get('vih_ql', ''))
vih_symp_possible = flat_field(lambda f: f.form.get('vih', {}).get('vih_symp_possible', ''))
visit_date = flat_field(lambda f: f.form.get('visit_date', ''))
visit_type = flat_field(lambda f: f.form.get('visit_type', ''))
height = flat_field(lambda f: f.form.get('vitals', {}).get('height', ''))
muac = flat_field(lambda f: f.form.get('vitals', {}).get('muac', ''))
temp = flat_field(lambda f: f.form.get('vitals', {}).get('temp', ''))
weight = flat_field(lambda f: f.form.get('vitals', {}).get('weight', ''))
zscore_grading_hfa = flat_field(lambda f: f.form.get('zscore_grading_hfa', ''))
zscore_grading_wfa = flat_field(lambda f: f.form.get('zscore_grading_wfa', ''))
zscore_grading_wfh = flat_field(lambda f: f.form.get('zscore_grading_wfh', ''))
zscore_hfa = flat_field(lambda f: f.form.get('zscore_hfa', ''))
zscore_hfa_change = flat_field(lambda f: f.form.get('zscore_hfa_change', ''))
zscore_hfa_change_status = flat_field(lambda f: f.form.get('zscore_hfa_change_status', ''))
zscore_wfa = flat_field(lambda f: f.form.get('zscore_wfa', ''))
zscore_wfa_change = flat_field(lambda f: f.form.get('zscore_wfa_change', ''))
zscore_wfa_change_status = flat_field(lambda f: f.form.get('zscore_wfa_change_status', ''))
zscore_wfh = flat_field(lambda f: f.form.get('zscore_wfh', ''))
zscore_wfh_change = flat_field(lambda f: f.form.get('zscore_wfh_change', ''))
zscore_wfh_change_status = flat_field(lambda f: f.form.get('zscore_wfh_change_status', ''))
last_height = flat_field(lambda f: f.form['case']['update'].get('last_height', ''))
last_weight = flat_field(lambda f: f.form['case']['update'].get('last_weight', ''))
numerator = TDHDateEmiiter()
class TDHNewbornClassificationFluff(fluff.IndicatorDocument):
document_class = XFormInstance
document_filter = FormPropertyFilter(xmlns=NEWBORN_CLASSIFICATION_XMLNSES[0])
domains = TDH_DOMAINS
save_direct_to_sql = True
group_by = ('domain', )
case_id = flat_field(lambda f: f.form['case']['@case_id'])
user_id = flat_field(lambda f: f.form['case']['@user_id'])
tablet_login_id = flat_field(lambda f: f.form['meta']['username'])
author_id = flat_field(lambda f: f.form.get('selected_user_pin', ''))
author_name = flat_field(lambda f: f.form.get('selected_username', ''))
consultation_type = flat_field(lambda f: f.form.get('consultation_type', ''))
number = flat_field(lambda f: f.form.get('consult_count', ''))
other_comments = flat_field(lambda f: f.form.get('other_comments', ''))
L_hfa = flat_field(lambda f: f.form.get('L_hfa', ''))
L_wfa = flat_field(lambda f: f.form.get('L_wfa', ''))
L_wfh = flat_field(lambda f: f.form.get('L_wfh', ''))
M_hfa = flat_field(lambda f: f.form.get('M_hfa', ''))
M_wfa = flat_field(lambda f: f.form.get('M_wfa', ''))
M_wfh = flat_field(lambda f: f.form.get('M_wfh', ''))
S_hfa = flat_field(lambda f: f.form.get('S_hfa', ''))
S_wfa = flat_field(lambda f: f.form.get('S_wfa', ''))
S_wfh = flat_field(lambda f: f.form.get('S_wfh', ''))
age_in_months = flat_field(lambda f: f.form.get('age_in_months', ''))
age_in_weeks = flat_field(lambda f: f.form.get('age_in_weeks', ''))
bad_height = flat_field(lambda f: f.form.get('bad_height', ''))
dob = flat_field(lambda f: f.form.get('dob', ''))
infection_locale = flat_field(lambda f: f.form.get('infection_locale', ''))
maladie_grave = flat_field(lambda f: f.form.get('maladie_grave', ''))
maladie_grave_alim = flat_field(lambda f: f.form.get('maladie_grave_alim', ''))
mean_hfa = flat_field(lambda f: f.form.get('mean_hfa', ''))
mean_wfa = flat_field(lambda f: f.form.get('mean_wfa', ''))
mean_wfh = flat_field(lambda f: f.form.get('mean_wfh', ''))
alimentation_low = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_low', ''))
alimentation_medium = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_medium', ''))
alimentation_high = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_high', ''))
alimentation_qa = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qa', ''))
alimentation_qb = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qb', ''))
alimentation_qc = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qc', ''))
alimentation_qd = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qd', ''))
alimentation_qe = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qe', ''))
alimentation_qf = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qf', ''))
alimentation_qg = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qg', ''))
alimentation_qh = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qh', ''))
alimentation_qi = flat_field(lambda f: f.form.get('alimentation', {}).get('alimentation_qi', ''))
centile_hfa = flat_field(lambda f: f.form.get('centile_hfa', ''))
centile_wfa = flat_field(lambda f: f.form.get('centile_wfa', ''))
centile_wfh = flat_field(lambda f: f.form.get('centile_wfh', ''))
classification_infection = flat_field(lambda f: f.form.get('classification_infection', ''))
classification_malnutrition = flat_field(lambda f: f.form.get('classification_malnutrition', ''))
classification_occular = flat_field(lambda f: f.form.get('classification_occular', ''))
classification_poids = flat_field(lambda f: f.form.get('classification_poids', ''))
classification_vih = flat_field(lambda f: f.form.get('classification_vih', ''))
diarrhee_non = flat_field(lambda f: f.form.get('diarrhee', {}).get('diarrhee_non', ''))
diarrhee_qa = flat_field(lambda f: f.form.get('diarrhee', {}).get('diarrhee_qa', ''))
inf_bac_freq_resp = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_freq_resp', ''))
inf_bac_grave = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_grave', ''))
inf_bac_hypo = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_hypo', ''))
inf_bac_locale = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_locale', ''))
inf_bac_peu_probable = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_peu_probable', ''))
inf_bac_qa = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qa', ''))
inf_bac_qb = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qb', ''))
inf_bac_qd = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qd', ''))
inf_bac_qe = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qe', ''))
inf_bac_qf = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qf', ''))
inf_bac_qg = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qg', ''))
inf_bac_qh = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qh', ''))
inf_bac_qi = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qi', ''))
inf_bac_qj = flat_field(lambda f: f.form.get('inf_bac', {}).get('inf_bac_qj', ''))
inf_occ_low = flat_field(lambda f: f.form.get('inf_occ', {}).get('inf_occ_low', ''))
inf_occ_medium = flat_field(lambda f: f.form.get('inf_occ', {}).get('inf_occ_medium', ''))
inf_occ_qa = flat_field(lambda f: f.form.get('inf_occ', {}).get('inf_bac_qj', ''))
muac_change = flat_field(lambda f: f.form.get('muac_change', ''))
muac_change_status = flat_field(lambda f: f.form.get('muac_change_status', ''))
muac_grading = flat_field(lambda f: f.form.get('muac_grading', ''))
pb_alim = flat_field(lambda f: f.form.get('pb_alim', ''))
prev_muac = flat_field(lambda f: f.form.get('prev_muac', ''))
prev_zscore_hfa = flat_field(lambda f: f.form.get('prev_muac', ''))
prev_zscore_wfa = flat_field(lambda f: f.form.get('prev_muac', ''))
prev_zscore_wfh = flat_field(lambda f: f.form.get('prev_muac', ''))
poids_high = flat_field(lambda f: f.form.get('poids', {}).get('poids_high', ''))
poids_medium = flat_field(lambda f: f.form.get('poids', {}).get('poids_medium', ''))
poids_low = flat_field(lambda f: f.form.get('poids', {}).get('poids_low', ''))
poids_qa = flat_field(lambda f: f.form.get('poids', {}).get('poids_qa', ''))
sd2neg_hfa = flat_field(lambda f: f.form.get('sd2neg_hfa', ''))
sd2neg_wfa = flat_field(lambda f: f.form.get('sd2neg_wfa', ''))
sd2neg_wfh = flat_field(lambda f: f.form.get('sd2neg_wfh', ''))
sd2pos_hfa = flat_field(lambda f: f.form.get('sd2pos_hfa', ''))
sd2pos_wfa = flat_field(lambda f: f.form.get('sd2pos_wfa', ''))
sd2pos_wfh = flat_field(lambda f: f.form.get('sd2pos_wfh', ''))
sd3neg_hfa = flat_field(lambda f: f.form.get('sd3neg_hfa', ''))
sd3neg_wfa = flat_field(lambda f: f.form.get('sd3neg_wfa', ''))
sd3neg_wfh = flat_field(lambda f: f.form.get('sd3neg_wfh', ''))
sd3pos_hfa = flat_field(lambda f: f.form.get('sd3pos_hfa', ''))
sd3pos_wfa = flat_field(lambda f: f.form.get('sd3pos_wfa', ''))
sd3pos_wfh = flat_field(lambda f: f.form.get('sd3pos_wfh', ''))
selected_user_id_and_name = flat_field(lambda f: f.form.get('selected_user_id_and_name', ''))
seriousness_alimentation = flat_field(lambda f: f.form.get('seriousness_alimentation', ''))
seriousness_inf_bac = flat_field(lambda f: f.form.get('seriousness_inf_bac', ''))
seriousness_inf_occ = flat_field(lambda f: f.form.get('seriousness_inf_occ', ''))
seriousness_poids = flat_field(lambda f: f.form.get('seriousness_poids', ''))
seriousness_vih = flat_field(lambda f: f.form.get('seriousness_vih', ''))
sex = flat_field(lambda f: f.form.get('sex', ''))
sex_loaded = flat_field(lambda f: f.form.get('sex_loaded', ''))
signes_hiv = flat_field(lambda f: f.form.get('signes_hiv', ''))
update_vaccines = flat_field(lambda f: f.form.get('update_vaccines', ''))
vaccines = flat_field(lambda f: ', '.join([k for k, v in f.form.get('vaccines', {}).iteritems()
if v == 'yes']))
bcg = flat_field(lambda f: f.form.get('vaccines', {}).get('bcg', ''))
opv_0 = flat_field(lambda f: f.form.get('vaccines', {}).get('opv_0', ''))
vih_peu_probable = flat_field(lambda f: f.form.get('vih', {}).get('vih_peu_probable', ''))
vih_possible = flat_field(lambda f: f.form.get('vih', {}).get('vih_possible', ''))
vih_probable = flat_field(lambda f: f.form.get('vih', {}).get('vih_probable', ''))
vih_qa = flat_field(lambda f: f.form.get('vih', {}).get('vih_qa', ''))
vih_qb = flat_field(lambda f: f.form.get('vih', {}).get('vih_qb', ''))
vih_qc = flat_field(lambda f: f.form.get('vih', {}).get('vih_qc', ''))
vih_qd = flat_field(lambda f: f.form.get('vih', {}).get('vih_qd', ''))
vih_qe = flat_field(lambda f: f.form.get('vih', {}).get('vih_qe', ''))
vih_qf = flat_field(lambda f: f.form.get('vih', {}).get('vih_qf', ''))
vih_qg = flat_field(lambda f: f.form.get('vih', {}).get('vih_qg', ''))
visit_date = flat_field(lambda f: f.form.get('visit_date', ''))
visit_type = flat_field(lambda f: f.form.get('visit_type', ''))
height = flat_field(lambda f: f.form.get('vitals', {}).get('height', ''))
muac = flat_field(lambda f: f.form.get('vitals', {}).get('muac', ''))
temp = flat_field(lambda f: f.form.get('vitals', {}).get('temp', ''))
weight = flat_field(lambda f: f.form.get('vitals', {}).get('weight', ''))
zscore_grading_hfa = flat_field(lambda f: f.form.get('zscore_grading_hfa', ''))
zscore_grading_wfa = flat_field(lambda f: f.form.get('zscore_grading_wfa', ''))
zscore_grading_wfh = flat_field(lambda f: f.form.get('zscore_grading_wfh', ''))
zscore_hfa = flat_field(lambda f: f.form.get('zscore_hfa', ''))
zscore_hfa_change = flat_field(lambda f: f.form.get('zscore_hfa_change', ''))
zscore_hfa_change_status = flat_field(lambda f: f.form.get('zscore_hfa_change_status', ''))
zscore_wfa = flat_field(lambda f: f.form.get('zscore_wfa', ''))
zscore_wfa_change = flat_field(lambda f: f.form.get('zscore_wfa_change', ''))
zscore_wfa_change_status = flat_field(lambda f: f.form.get('zscore_wfa_change_status', ''))
zscore_wfh = flat_field(lambda f: f.form.get('zscore_wfh', ''))
zscore_wfh_change = flat_field(lambda f: f.form.get('zscore_wfh_change', ''))
zscore_wfh_change_status = flat_field(lambda f: f.form.get('zscore_wfh_change_status', ''))
last_height = flat_field(lambda f: f.form['case']['update'].get('last_height', ''))
last_weight = flat_field(lambda f: f.form['case']['update'].get('last_weight', ''))
numerator = TDHDateEmiiter()
class TDHChildClassificationFluff(fluff.IndicatorDocument):
document_class = XFormInstance
document_filter = FormPropertyFilter(xmlns=CHILD_CLASSIFICATION_XMLNSES[0])
domains = TDH_DOMAINS
save_direct_to_sql = True
group_by = ('domain', )
case_id = flat_field(lambda f: f.form['case']['@case_id'])
user_id = flat_field(lambda f: f.form['case']['@user_id'])
tablet_login_id = flat_field(lambda f: f.form['meta']['username'])
author_id = flat_field(lambda f: f.form.get('selected_user_pin', ''))
author_name = flat_field(lambda f: f.form.get('selected_username', ''))
consultation_type = flat_field(lambda f: f.form.get('consultation_type', ''))
number = flat_field(lambda f: f.form.get('consult_count', ''))
other_comments = flat_field(lambda f: f.form.get('other_comments', ''))
L_hfa = flat_field(lambda f: f.form.get('L_hfa', ''))
L_wfa = flat_field(lambda f: f.form.get('L_wfa', ''))
L_wfh = flat_field(lambda f: f.form.get('L_wfh', ''))
M_hfa = flat_field(lambda f: f.form.get('M_hfa', ''))
M_wfa = flat_field(lambda f: f.form.get('M_wfa', ''))
M_wfh = flat_field(lambda f: f.form.get('M_wfh', ''))
S_hfa = flat_field(lambda f: f.form.get('S_hfa', ''))
S_wfa = flat_field(lambda f: f.form.get('S_wfa', ''))
S_wfh = flat_field(lambda f: f.form.get('S_wfh', ''))
age_in_months = flat_field(lambda f: f.form.get('age_in_months', ''))
age_in_weeks = flat_field(lambda f: f.form.get('age_in_weeks', ''))
bad_height = flat_field(lambda f: f.form.get('bad_height', ''))
dob = flat_field(lambda f: f.form.get('dob', ''))
anemie_grave = flat_field(lambda f: f.form.get('anemie_grave', ''))
au_moins_deux_maladies = flat_field(lambda f: f.form.get('au_moins_deux_maladies', ''))
au_plus_une_maladie = flat_field(lambda f: f.form.get('au_plus_une_maladie', ''))
mastoidite = flat_field(lambda f: f.form.get('mastoidite', ''))
mean_hfa = flat_field(lambda f: f.form.get('mean_hfa', ''))
mean_wfa = flat_field(lambda f: f.form.get('mean_wfa', ''))
mean_wfh = flat_field(lambda f: f.form.get('mean_wfh', ''))
anemia_none = flat_field(lambda f: f.form.get('anemie', {}).get('anemia_none', ''))
anemia_normal = flat_field(lambda f: f.form.get('anemie', {}).get('anemia_normal', ''))
anemia_serious = flat_field(lambda f: f.form.get('anemie', {}).get('anemia_serious', ''))
paleur_palmaire = flat_field(lambda f: f.form.get('anemie', {}).get('paleur_palmaire', ''))
centile_hfa = flat_field(lambda f: f.form.get('centile_hfa', ''))
centile_wfa = flat_field(lambda f: f.form.get('centile_wfa', ''))
centile_wfh = flat_field(lambda f: f.form.get('centile_wfh', ''))
classification_anemie = flat_field(lambda f: f.form.get('classification_anemie', ''))
classification_deshydratation = flat_field(lambda f: f.form.get('classification_deshydratation', ''))
classification_diahree = flat_field(lambda f: f.form.get('classification_diahree', ''))
classification_dysenterie = flat_field(lambda f: f.form.get('classification_dysenterie', ''))
classification_malnutrition = flat_field(lambda f: f.form.get('classification_malnutrition', ''))
classification_oreille = flat_field(lambda f: f.form.get('classification_oreille', ''))
classification_paludisme = flat_field(lambda f: f.form.get('classification_paludisme', ''))
classification_pneumonie = flat_field(lambda f: f.form.get('classification_pneumonie', ''))
classification_rougeole = flat_field(lambda f: f.form.get('classification_rougeole', ''))
classification_vih = flat_field(lambda f: f.form.get('classification_vih', ''))
classifications_graves = flat_field(lambda f: f.form.get('classifications_graves', ''))
deshydratation_evident = flat_field(lambda f: f.form.get('deshydratation_evident', ''))
deshydratation_severe = flat_field(lambda f: f.form.get('deshydratation_severe', ''))
boire = flat_field(lambda f: f.form.get('danger', {}).get('boire', ''))
convulsions_passe = flat_field(lambda f: f.form.get('danger', {}).get('convulsions_passe', ''))
convulsions_present = flat_field(lambda f: f.form.get('danger', {}).get('convulsions_present', ''))
high_danger = flat_field(lambda f: f.form.get('danger', {}).get('high_danger', ''))
lethargie = flat_field(lambda f: f.form.get('danger', {}).get('lethargie', ''))
low_danger = flat_field(lambda f: f.form.get('danger', {}).get('low_danger', ''))
vomit = flat_field(lambda f: f.form.get('danger', {}).get('vomit', ''))
conscience_agitation = flat_field(lambda f: f.form.get('diarrhee', {}).get('conscience_agitation', ''))
diarrhee_presence = flat_field(lambda f: f.form.get('diarrhee', {}).get('diarrhee_presence', ''))
diarrhee_presence_duree = flat_field(lambda f: f.form.get('diarrhee', {}).get('diarrhee_presence_duree', ''))
dysentery = flat_field(lambda f: f.form.get('diarrhee', {}).get('dysentery', ''))
no_dehydration = flat_field(lambda f: f.form.get('diarrhee', {}).get('no_dehydration', ''))
persistent = flat_field(lambda f: f.form.get('diarrhee', {}).get('persistent', ''))
pli_cutane = flat_field(lambda f: f.form.get('diarrhee', {}).get('pli_cutane', ''))
sang_selles = flat_field(lambda f: f.form.get('diarrhee', {}).get('sang_selles', ''))
severe_dehydration = flat_field(lambda f: f.form.get('diarrhee', {}).get('severe_dehydration', ''))
severe_persistent = flat_field(lambda f: f.form.get('diarrhee', {}).get('severe_persistent', ''))
soif = flat_field(lambda f: f.form.get('diarrhee', {}).get('soif', ''))
some_dehydration = flat_field(lambda f: f.form.get('diarrhee', {}).get('some_dehydration', ''))
yeux_enfonces = flat_field(lambda f: f.form.get('diarrhee', {}).get('yeux_enfonces', ''))
diarrhee_persistente = flat_field(lambda f: f.form.get('diarrhee_persistente', ''))
diarrhee_persistente_severe = flat_field(lambda f: f.form.get('diarrhee_persistente_severe', ''))
choc = flat_field(lambda f: f.form.get('fievre', {}).get('choc', ''))
cornee = flat_field(lambda f: f.form.get('fievre', {}).get('cornee', ''))
ecoulement_nasal = flat_field(lambda f: f.form.get('fievre', {}).get('ecoulement_nasal', ''))
ecoulement_oculaire = flat_field(lambda f: f.form.get('fievre', {}).get('ecoulement_oculaire', ''))
eruption_cutanee = flat_field(lambda f: f.form.get('fievre', {}).get('eruption_cutanee', ''))
fievre_presence = flat_field(lambda f: f.form.get('fievre', {}).get('fievre_presence', ''))
fievre_presence_duree = flat_field(lambda f: f.form.get('fievre', {}).get('fievre_presence_duree', ''))
fievre_presence_longue = flat_field(lambda f: f.form.get('fievre', {}).get('fievre_presence_longue', ''))
history_measles = flat_field(lambda f: f.form.get('fievre', {}).get('history_measles', ''))
ictere = flat_field(lambda f: f.form.get('fievre', {}).get('ictere', ''))
malaria = flat_field(lambda f: f.form.get('fievre', {}).get('malaria', ''))
malaria_severe = flat_field(lambda f: f.form.get('fievre', {}).get('malaria_severe', ''))
malaria_severe_neg_tdr = flat_field(lambda f: f.form.get('fievre', {}).get('malaria_severe_neg_tdr', ''))
measles = flat_field(lambda f: f.form.get('fievre', {}).get('measles', ''))
measles_complex = flat_field(lambda f: f.form.get('fievre', {}).get('measles_complex', ''))
measles_severe = flat_field(lambda f: f.form.get('fievre', {}).get('measles_severe', ''))
raideur_nuque = flat_field(lambda f: f.form.get('fievre', {}).get('raideur_nuque', ''))
saignements_anormaux = flat_field(lambda f: f.form.get('fievre', {}).get('saignements_anormaux', ''))
tdr = flat_field(lambda f: f.form.get('fievre', {}).get('tdr', ''))
tdr_negative = flat_field(lambda f: f.form.get('fievre', {}).get('tdr_negative', ''))
ulcerations = flat_field(lambda f: f.form.get('fievre', {}).get('ulcerations', ''))
urines_foncees = flat_field(lambda f: f.form.get('fievre', {}).get('urines_foncees', ''))
yeux_rouge = flat_field(lambda f: f.form.get('fievre', {}).get('yeux_rouge', ''))
frequence_elevee = flat_field(lambda f: f.form.get('frequence_elevee', ''))
height_rounded = flat_field(lambda f: f.form.get('height_rounded', ''))
ma_mam = flat_field(lambda f: f.form.get('ma_mam', ''))
ma_mas = flat_field(lambda f: f.form.get('ma_mas', ''))
ma_normal = flat_field(lambda f: f.form.get('ma_normal', ''))
malnutrition_mam = flat_field(lambda f: f.form.get('malnutrition', {}).get('malnutrition_mam', ''))
malnutrition_masc = flat_field(lambda f: f.form.get('malnutrition', {}).get('malnutrition_masc', ''))
malnutrition_mass = flat_field(lambda f: f.form.get('malnutrition', {}).get('malnutrition_mass', ''))
malnutrition_na = flat_field(lambda f: f.form.get('malnutrition', {}).get('malnutrition_na', ''))
no_malnutrition = flat_field(lambda f: f.form.get('malnutrition', {}).get('no_malnutrition', ''))
oedemes = flat_field(lambda f: f.form.get('malnutrition', {}).get('oedemes', ''))
test_appetit = flat_field(lambda f: f.form.get('malnutrition', {}).get('test_appetit', ''))
muac_change = flat_field(lambda f: f.form.get('muac_change', ''))
muac_change_status = flat_field(lambda f: f.form.get('muac_change_status', ''))
muac_grading = flat_field(lambda f: f.form.get('muac_grading', ''))
paludisme_grave = flat_field(lambda f: f.form.get('paludisme_grave', ''))
pas_de_deshydratation = flat_field(lambda f: f.form.get('pas_de_deshydratation', ''))
pneumonie_grave = flat_field(lambda f: f.form.get('pneumonie_grave', ''))
prev_muac = flat_field(lambda f: f.form.get('prev_muac', ''))
prev_zscore_hfa = flat_field(lambda f: f.form.get('prev_muac', ''))
prev_zscore_wfa = flat_field(lambda f: f.form.get('prev_muac', ''))
prev_zscore_wfh = flat_field(lambda f: f.form.get('prev_muac', ''))
rougeole_compliquee = flat_field(lambda f: f.form.get('rougeole_compliquee', ''))
rougeole_ou_antecedent = flat_field(lambda f: f.form.get('rougeole_ou_antecedent', ''))
ear_infection_acute = flat_field(lambda f: f.form.get('oreille', {}).get('ear_infection_acute', ''))
ear_mastoiditis = flat_field(lambda f: f.form.get('oreille', {}).get('ear_mastoiditis', ''))
oreille_douleur = flat_field(lambda f: f.form.get('oreille', {}).get('oreille_douleur', ''))
oreille_ecoulement = flat_field(lambda f: f.form.get('oreille', {}).get('oreille_ecoulement', ''))
oreille_ecoulement_duree = flat_field(lambda f: f.form.get('oreille', {}).get('oreille_ecoulement_duree', ''))
oreille_gonflement = flat_field(lambda f: f.form.get('oreille', {}).get('oreille_gonflement', ''))
oreille_probleme = flat_field(lambda f: f.form.get('oreille', {}).get('oreille_probleme', ''))
sd2neg_hfa = flat_field(lambda f: f.form.get('sd2neg_hfa', ''))
sd2neg_wfa = flat_field(lambda f: f.form.get('sd2neg_wfa', ''))
sd2neg_wfh = flat_field(lambda f: f.form.get('sd2neg_wfh', ''))
sd2pos_hfa = flat_field(lambda f: f.form.get('sd2pos_hfa', ''))
sd2pos_wfa = flat_field(lambda f: f.form.get('sd2pos_wfa', ''))
sd2pos_wfh = flat_field(lambda f: f.form.get('sd2pos_wfh', ''))
sd3neg_hfa = flat_field(lambda f: f.form.get('sd3neg_hfa', ''))
sd3neg_wfa = flat_field(lambda f: f.form.get('sd3neg_wfa', ''))
sd3neg_wfh = flat_field(lambda f: f.form.get('sd3neg_wfh', ''))
sd3pos_hfa = flat_field(lambda f: f.form.get('sd3pos_hfa', ''))
sd3pos_wfa = flat_field(lambda f: f.form.get('sd3pos_wfa', ''))
sd3pos_wfh = flat_field(lambda f: f.form.get('sd3pos_wfh', ''))
selected_user_id_and_name = flat_field(lambda f: f.form.get('selected_user_id_and_name', ''))
anemie = flat_field(lambda f: f.form.get('seriousness_anemie', ''))
danger = flat_field(lambda f: f.form.get('seriousness_danger', ''))
diarrhee = flat_field(lambda f: f.form.get('seriousness_diarrhee', ''))
fievre = flat_field(lambda f: f.form.get('seriousness_fievre', ''))
malnutrition = flat_field(lambda f: f.form.get('seriousness_malnutrition', ''))
oreille = flat_field(lambda f: f.form.get('seriousness_oreille', ''))
toux = flat_field(lambda f: f.form.get('seriousness_toux', ''))
vih = flat_field(lambda f: f.form.get('seriousness_vih', ''))
sex = flat_field(lambda f: f.form.get('sex', ''))
sex_loaded = flat_field(lambda f: f.form.get('sex_loaded', ''))
signes_danger = flat_field(lambda f: f.form.get('signes_danger', ''))
signes_rougeole = flat_field(lambda f: f.form.get('signes_rougeole', ''))
tdr_ok = flat_field(lambda f: f.form.get('tdr_ok', ''))
freq_resp = flat_field(lambda f: f.form.get('toux', {}).get('freq_resp', ''))
high_toux = flat_field(lambda f: f.form.get('toux', {}).get('high_toux', ''))
low_toux = flat_field(lambda f: f.form.get('toux', {}).get('low_toux', ''))
medium_toux = flat_field(lambda f: f.form.get('toux', {}).get('medium_toux', ''))
stridor = flat_field(lambda f: f.form.get('toux', {}).get('stridor', ''))
tirage = flat_field(lambda f: f.form.get('toux', {}).get('tirage', ''))
toux_presence = flat_field(lambda f: f.form.get('toux', {}).get('toux_presence', ''))
toux_presence_duree = flat_field(lambda f: f.form.get('toux', {}).get('toux_presence_duree', ''))
update_vaccines = flat_field(lambda f: f.form.get('update_vaccines', ''))
vaccines = flat_field(lambda f: ', '.join([k for k, v in f.form.get('vaccines', {}).iteritems()
if v == 'yes']))
bcg = flat_field(lambda f: f.form.get('vaccines', {}).get('bcg', ''))
measles_1 = flat_field(lambda f: f.form.get('vaccines', {}).get('measles_1', ''))
measles_2 = flat_field(lambda f: f.form.get('vaccines', {}).get('measles_2', ''))
opv_0 = flat_field(lambda f: f.form.get('vaccines', {}).get('opv_0', ''))
opv_1 = flat_field(lambda f: f.form.get('vaccines', {}).get('opv_1', ''))
opv_2 = flat_field(lambda f: f.form.get('vaccines', {}).get('opv_2', ''))
opv_3 = flat_field(lambda f: f.form.get('vaccines', {}).get('opv_3', ''))
penta_1 = flat_field(lambda f: f.form.get('vaccines', {}).get('penta_1', ''))
penta_2 = flat_field(lambda f: f.form.get('vaccines', {}).get('penta_2', ''))
penta_3 = flat_field(lambda f: f.form.get('vaccines', {}).get('penta_3', ''))
pneumo_1 = flat_field(lambda f: f.form.get('vaccines', {}).get('pneumo_1', ''))
pneumo_2 = flat_field(lambda f: f.form.get('vaccines', {}).get('pneumo_2', ''))
pneumo_3 = flat_field(lambda f: f.form.get('vaccines', {}).get('pneumo_3', ''))
rotavirus_1 = flat_field(lambda f: f.form.get('vaccines', {}).get('rotavirus_1', ''))
rotavirus_2 = flat_field(lambda f: f.form.get('vaccines', {}).get('rotavirus_2', ''))
rotavirus_3 = flat_field(lambda f: f.form.get('vaccines', {}).get('rotavirus_3', ''))
yf = flat_field(lambda f: f.form.get('vaccines', {}).get('yf', ''))
augmentation_glande_parotide = flat_field(
lambda f: f.form.get('vih', {}).get('augmentation_glande_parotide', ''))
candidose_buccale = flat_field(lambda f: f.form.get('vih', {}).get('candidose_buccale', ''))
diarrhee_dernierement = flat_field(lambda f: f.form.get('vih', {}).get('diarrhee_dernierement', ''))
hypertrophie_ganglions_lymphatiques = flat_field(
lambda f: f.form.get('vih', {}).get('hypertrophie_ganglions_lymphatiques', ''))
pneumonie_recidivante = flat_field(lambda f: f.form.get('vih', {}).get('pneumonie_recidivante', ''))
serologie_enfant = flat_field(lambda f: f.form.get('vih', {}).get('serologie_enfant', ''))
serologie_mere = flat_field(lambda f: f.form.get('vih', {}).get('serologie_mere', ''))
test_enfant = flat_field(lambda f: f.form.get('vih', {}).get('test_enfant', ''))
test_mere = flat_field(lambda f: f.form.get('vih', {}).get('test_mere', ''))
vih_confirmee = flat_field(lambda f: f.form.get('vih', {}).get('vih_confirmee', ''))
vih_pas = flat_field(lambda f: f.form.get('vih', {}).get('vih_pas', ''))
vih_peu_probable = flat_field(lambda f: f.form.get('vih', {}).get('vih_peu_probable', ''))
vih_possible = flat_field(lambda f: f.form.get('vih', {}).get('vih_possible', ''))
vih_symp_confirmee = flat_field(lambda f: f.form.get('vih', {}).get('vih_symp_confirmee', ''))
vih_symp_probable = flat_field(lambda f: f.form.get('vih', {}).get('vih_symp_probable', ''))
vih_symp_suspecte = flat_field(lambda f: f.form.get('vih', {}).get('vih_symp_suspecte', ''))
visit_date = flat_field(lambda f: f.form.get('visit_date', ''))
visit_type = flat_field(lambda f: f.form.get('visit_type', ''))
height = flat_field(lambda f: f.form.get('vitals', {}).get('height', ''))
muac = flat_field(lambda f: f.form.get('vitals', {}).get('muac', ''))
temp = flat_field(lambda f: f.form.get('vitals', {}).get('temp', ''))
weight = flat_field(lambda f: f.form.get('vitals', {}).get('weight', ''))
zscore_grading_hfa = flat_field(lambda f: f.form.get('zscore_grading_hfa', ''))
zscore_grading_wfa = flat_field(lambda f: f.form.get('zscore_grading_wfa', ''))
zscore_grading_wfh = flat_field(lambda f: f.form.get('zscore_grading_wfh', ''))
zscore_hfa = flat_field(lambda f: f.form.get('zscore_hfa', ''))
zscore_hfa_change = flat_field(lambda f: f.form.get('zscore_hfa_change', ''))
zscore_hfa_change_status = flat_field(lambda f: f.form.get('zscore_hfa_change_status', ''))
zscore_wfa = flat_field(lambda f: f.form.get('zscore_wfa', ''))
zscore_wfa_change = flat_field(lambda f: f.form.get('zscore_wfa_change', ''))
zscore_wfa_change_status = flat_field(lambda f: f.form.get('zscore_wfa_change_status', ''))
zscore_wfh = flat_field(lambda f: f.form.get('zscore_wfh', ''))
zscore_wfh_change = flat_field(lambda f: f.form.get('zscore_wfh_change', ''))
zscore_wfh_change_status = flat_field(lambda f: f.form.get('zscore_wfh_change_status', ''))
show_muac_status = flat_field(lambda f: f.form.get('zscore_results', {}).get('show_muac_status', ''))
show_zscore_hfa = flat_field(lambda f: f.form.get('zscore_results', {}).get('show_zscore_hfa', ''))
show_zscore_wfa = flat_field(lambda f: f.form.get('zscore_results', {}).get('show_zscore_wfa', ''))
show_zscore_wfh = flat_field(lambda f: f.form.get('zscore_results', {}).get('show_zscore_wfh', ''))
warn_bad_height = flat_field(lambda f: f.form.get('zscore_results', {}).get('warn_bad_height', ''))
last_height = flat_field(lambda f: f.form['case']['update'].get('last_height', ''))
last_weight = flat_field(lambda f: f.form['case']['update'].get('last_weight', ''))
numerator = TDHDateEmiiter()
class TDHInfantTreatmentFluff(fluff.IndicatorDocument):
document_class = XFormInstance
document_filter = FormPropertyFilter(xmlns=INFANT_TREATMENT_XMLNSES[0])
domains = TDH_DOMAINS
save_direct_to_sql = True
case_id = flat_field(lambda f: f.form.get('child_case_id'))
antibio_valid_meds = flat_field(lambda f: f.form.get('antibio_valid_meds', ''))
child_age = flat_field(lambda f: f.form.get('child_age', ''))
child_age_loaded = flat_field(lambda f: f.form.get('child_age_loaded', ''))
child_weight = flat_field(lambda f: f.form.get('child_weight', ''))
child_weight_loaded = flat_field(lambda f: f.form.get('child_weight_loaded', ''))
classification_deshydratation = flat_field(lambda f: f.form.get('classification_deshydratation', ''))
classification_deshydratation_loaded = flat_field(
lambda f: f.form.get('classification_deshydratation_loaded', ''))
classification_diahree = flat_field(lambda f: f.form.get('classification_diahree', ''))
classification_diahree_loaded = flat_field(lambda f: f.form.get('classification_diahree_loaded', ''))
classification_infection = flat_field(lambda f: f.form.get('classification_infection', ''))
classification_infection_loaded = flat_field(lambda f: f.form.get('classification_infection_loaded', ''))
classification_malnutrition = flat_field(lambda f: f.form.get('classification_malnutrition'))
classification_malnutrition_loaded = flat_field(lambda f: f.form.get('classification_malnutrition_loaded', ''))
classification_vih = flat_field(lambda f: f.form.get('classification_vih', ''))
classification_vih_loaded = flat_field(lambda f: f.form.get('classification_vih_loaded', ''))
other_treatments = flat_field(lambda f: f.form.get('other_treatments', ''))
vitamine_a_valid_meds = flat_field(lambda f: f.form.get('vitamine_a_valid_meds', ''))
antibio = flat_field(
lambda f: f.form.get('select_meds', {}).get('antibio', '') if f.form.get('select_meds', {}) else '')
deshydratation_severe = flat_field(
lambda f: f.form.get('select_treatments', {}).get('deshydratation_severe', '') if f.form.get(
'select_treatments', {}) else '')
infection_grave = flat_field(lambda f: f.form.get('select_treatments', {})
.get('infection_grave', '') if f.form.get('select_treatments', {}) else '')
signe_deshydratation = flat_field(
lambda f: f.form.get('select_treatments', {}).get('signe_deshydratation', '') if f.form.get(
'select_treatments', {}) else '')
deshydratation_severe_sans_infection_title = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_sans_infection', {})
.get('deshydratation_severe_sans_infection_title', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('deshydratation_severe_sans_infection', {}) else '')
deshydratation_severe_sans_infection_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_sans_infection', {})
.get('deshydratation_severe_sans_infection_treat_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('deshydratation_severe_sans_infection', {}) else '')
incapable_nourrir_title = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {}).get('incapable_nourrir_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {}).get('incapable_nourrir_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {}).get('incapable_nourrir_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {}).get('incapable_nourrir_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_2_help_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_2_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_2_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_2_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_2_help_1', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_2_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_2_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_2_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_2_help_2', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_2_help_2_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_2_help_2_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {}).get('incapable_nourrir_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_3_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_3_help_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_3_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_3_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {}).get('incapable_nourrir_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
infection_grave_title = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_3_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_3_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_4_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_4_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_5_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_5_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_6', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_7', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_no_ref_title = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_no_ref_grave', {})
.get('infection_no_ref_grave_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_no_ref_grave', {})
.get('infection_no_ref_grave_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_no_ref_grave', {})
.get('infection_no_ref_grave_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_no_ref_grave', {})
.get('infection_no_ref_grave_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_locale_title = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {}).get('infection_locale_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {}).get('infection_locale_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {}).get('infection_locale_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {}).get('infection_locale_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {})
.get('infection_locale_treat_2_help_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_2_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {})
.get('infection_locale_treat_2_help_1', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_2_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {})
.get('infection_locale_treat_2_help_2', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {}).get('infection_locale_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {}).get('infection_locale_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_locale', {}) else '')
pas_infection_title = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection', {}).get('pas_infection_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_infection', {}) else '')
pas_infection_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection', {}).get('pas_infection_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_infection', {}) else '')
pas_infection_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection', {}).get('pas_infection_treat_0_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_infection', {}) else '')
pas_infection_treat_0_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection', {})
.get('pas_infection_treat_0_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_infection', {}) else '')
pas_infection_treat_0_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection', {}).get('pas_infection_treat_0_help_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_infection', {}) else '')
pas_infection_treat_0_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection', {})
.get('pas_infection_treat_0_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_infection', {}) else '')
pas_infection_treat_0_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection', {}).get('pas_infection_treat_0_help_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_infection', {}) else '')
pas_infection_treat_0_help_2_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection', {})
.get('pas_infection_treat_0_help_2_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_infection', {}) else '')
pas_infection_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection', {}).get('pas_infection_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_infection', {}) else '')
pas_infection_treat_1_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection', {}).get('pas_infection_treat_1_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_infection', {}) else '')
pas_infection_treat_1_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection', {})
.get('pas_infection_treat_1_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_infection', {}) else '')
maladie_grave_title = flat_field(
lambda f: f.form.get('treatments', {}).get('maladie_grave', {})
.get('maladie_grave_title', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('maladie_grave', {}) else '')
maladie_grave_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('maladie_grave', {})
.get('maladie_grave_treat_0', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('maladie_grave', {}) else '')
maladie_grave_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('maladie_grave', {})
.get('maladie_grave_treat_1', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('maladie_grave', {}) else '')
probleme_alimentation_title = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_title', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_0', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_0_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_1', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_1_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_1_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_2', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_2_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_3', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_4', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_4_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_4_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_5', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_6', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_6_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_6_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_6_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_6_help_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_7', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_8', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
signe_deshydratation_infection_title = flat_field(
lambda f: f.form.get('treatments', {}).get('signe_deshydratation_infection', {})
.get('signe_deshydratation_infection_title', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('signe_deshydratation_infection', {}) else '')
signe_deshydratation_infection_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('signe_deshydratation_infection', {})
.get('signe_deshydratation_infection_treat_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('signe_deshydratation_infection', {}) else '')
signe_deshydratation_infection_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('signe_deshydratation_infection', {})
.get('signe_deshydratation_infection_treat_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('signe_deshydratation_infection', {}) else '')
signe_deshydratation_infection_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('signe_deshydratation_infection', {})
.get('signe_deshydratation_infection_treat_2', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('signe_deshydratation_infection', {}) else '')
vih_pas_infection_title = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas_infection', {}).get('vih_pas_infection_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas_infection', {}) else '')
vih_pas_infection_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas_infection', {}).get('vih_pas_infection_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas_infection', {}) else '')
vih_pas_infection_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas_infection', {}).get('vih_pas_infection_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas_infection', {}) else '')
vih_possible_title = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}) .get('vih_possible_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
numerator = TDHNullEmitter()
class TDHNewbornTreatmentFluff(fluff.IndicatorDocument):
document_class = XFormInstance
document_filter = FormPropertyFilter(xmlns=NEWBORN_TREATMENT_XMLNSES[0])
domains = TDH_DOMAINS
save_direct_to_sql = True
case_id = flat_field(lambda f: f.form.get('child_case_id'))
antibio_valid_meds = flat_field(lambda f: f.form.get('antibio_valid_meds', ''))
child_age = flat_field(lambda f: f.form.get('child_age', ''))
child_age_loaded = flat_field(lambda f: f.form.get('child_age_loaded', ''))
child_weight = flat_field(lambda f: f.form.get('child_weight', ''))
child_weight_loaded = flat_field(lambda f: f.form.get('child_weight_loaded', ''))
classification_occular = flat_field(lambda f: f.form.get('classification_occular', ''))
classification_occular_loaded = flat_field(lambda f: f.form.get('classification_occular_loaded', ''))
classification_poids = flat_field(lambda f: f.form.get('classification_poids', ''))
classification_poids_loaded = flat_field(lambda f: f.form.get('classification_poids_loaded', ''))
classification_infection = flat_field(lambda f: f.form.get('classification_infection', ''))
classification_infection_loaded = flat_field(lambda f: f.form.get('classification_infection_loaded', ''))
classification_malnutrition = flat_field(lambda f: f.form.get('classification_malnutrition'))
classification_malnutrition_loaded = flat_field(lambda f: f.form.get('classification_malnutrition_loaded', ''))
classification_vih = flat_field(lambda f: f.form.get('classification_vih', ''))
classification_vih_loaded = flat_field(lambda f: f.form.get('classification_vih_loaded', ''))
other_treatments = flat_field(lambda f: f.form.get('other_treatments', ''))
antibio = flat_field(
lambda f: f.form.get('select_meds', {}).get('antibio', '') if f.form.get('select_meds', {}) else '')
infection_grave = flat_field(
lambda f: f.form.get('select_treatments', {}).get('infection_grave', '') if f.form.get(
'select_treatments', {}) else '')
conjonctivite_title = flat_field(
lambda f: f.form.get('treatments', {}).get('conjonctivite', {}).get('conjonctivite_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('conjonctivite', {}) else '')
conjonctivite_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('conjonctivite', {}).get('conjonctivite_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('conjonctivite', {}) else '')
conjonctivite_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('conjonctivite', {}).get('conjonctivite_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('conjonctivite', {}) else '')
conjonctivite_treat_1_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('conjonctivite', {}).get('conjonctivite_treat_1_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('conjonctivite', {}) else '')
conjonctivite_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('conjonctivite', {}).get('conjonctivite_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('conjonctivite', {}) else '')
conjonctivite_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('conjonctivite', {}).get('conjonctivite_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('conjonctivite', {}) else '')
conjonctivite_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('conjonctivite', {}).get('conjonctivite_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('conjonctivite', {}) else '')
fable_poids_title = flat_field(
lambda f: f.form.get('treatments', {}).get('fable_poids', {}).get('fable_poids_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('fable_poids', {}) else '')
fable_poids_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('fable_poids', {}).get('conjonctivite_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('fable_poids', {}) else '')
fable_poids_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('fable_poids', {}).get('fable_poids_treat_0_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('fable_poids', {}) else '')
fable_poids_treat_0_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('fable_poids', {}).get('fable_poids_treat_0_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('fable_poids', {}) else '')
fable_poids_treat_0_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('fable_poids', {}).get('fable_poids_treat_0_help_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('fable_poids', {}) else '')
fable_poids_treat_0_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('fable_poids', {}).get('fable_poids_treat_0_help_1_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('fable_poids', {}) else '')
fable_poids_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('fable_poids', {}).get('fable_poids_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('fable_poids', {}) else '')
fable_poids_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('fable_poids', {}).get('fable_poids_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('fable_poids', {}) else '')
fable_poids_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('fable_poids', {}).get('fable_poids_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('fable_poids', {}) else '')
fable_poids_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('fable_poids', {}).get('fable_poids_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('fable_poids', {}) else '')
incapable_nourrir_title = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {}).get('incapable_nourrir_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {}).get('incapable_nourrir_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {}).get('incapable_nourrir_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {}).get('incapable_nourrir_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_2_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_2_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_2_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_2_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_2_help_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_2_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_2_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_2_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_2_help_2', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_2_help_2_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_2_help_2_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {}).get('incapable_nourrir_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_3_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_3_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_3_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {})
.get('incapable_nourrir_treat_3_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('incapable_nourrir', {}) else '')
incapable_nourrir_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('incapable_nourrir', {}).get('incapable_nourrir_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('incapable_nourrir', {}) else '')
hypothermie_moderee_title = flat_field(
lambda f: f.form.get('treatments', {}).get('hypothermie_moderee', {}).get('hypothermie_moderee_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('hypothermie_moderee', {}) else '')
hypothermie_moderee_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('hypothermie_moderee', {})
.get('hypothermie_moderee_treat_0', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('hypothermie_moderee', {}) else '')
hypothermie_moderee_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('hypothermie_moderee', {})
.get('hypothermie_moderee_treat_0_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('hypothermie_moderee', {}) else '')
hypothermie_moderee_treat_0_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('hypothermie_moderee', {})
.get('hypothermie_moderee_treat_0_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('hypothermie_moderee', {}) else '')
hypothermie_moderee_treat_0_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('hypothermie_moderee', {})
.get('hypothermie_moderee_treat_0_help_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('hypothermie_moderee', {}) else '')
hypothermie_moderee_treat_0_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('hypothermie_moderee', {})
.get('hypothermie_moderee_treat_0_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('hypothermie_moderee', {}) else '')
hypothermie_moderee_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('hypothermie_moderee', {})
.get('hypothermie_moderee_treat_1', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('hypothermie_moderee', {}) else '')
hypothermie_moderee_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('hypothermie_moderee', {})
.get('hypothermie_moderee_treat_2', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('hypothermie_moderee', {}) else '')
hypothermie_moderee_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('hypothermie_moderee', {})
.get('hypothermie_moderee_treat_3', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('hypothermie_moderee', {}) else '')
infection_grave_title = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_2_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_2_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {})
.get('infection_grave_treat_2_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_4_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_4_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_5_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_5_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_5_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {})
.get('infection_grave_treat_5_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_6', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_6_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_6_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_6_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {})
.get('infection_grave_treat_6_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_7', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_7_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_7_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_7_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {})
.get('infection_grave_treat_7_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_8', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_8_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_8_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_8_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {})
.get('infection_grave_treat_8_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_8_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_8_help_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_8_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {})
.get('infection_grave_treat_8_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave', {}) else '')
infection_grave_treat_9 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave', {}).get('infection_grave_treat_9', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_grave', {}) else '')
infection_grave_no_ref_title = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_title', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_0', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_1', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_2', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_3', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_4', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_4_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_4_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_5', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_5_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_5_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_5_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_5_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_6', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_6_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_6_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_6_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_6_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_7', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_7_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_7_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_7_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_7_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_8', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_8_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_8_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_8_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_8_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_9 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_9', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_10 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_10', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_10_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_10_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_10_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_10_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_10_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_10_help_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_grave_no_ref_treat_10_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_grave_no_ref', {})
.get('infection_grave_no_ref_treat_10_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_grave_no_ref', {}) else '')
infection_locale_title = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {}).get('infection_locale_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {}).get('infection_locale_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {}).get('infection_locale_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {}).get('infection_locale_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {})
.get('infection_locale_treat_2_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_2_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {})
.get('infection_locale_treat_2_help_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {}).get('infection_locale_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {}).get('infection_locale_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_4_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {})
.get('infection_locale_treat_4_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_locale', {}) else '')
infection_locale_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {}).get('infection_locale_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_locale', {}) else '')
show_antibio_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_locale', {}).get('show_antibio_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('infection_locale', {}) else '')
infection_peu_probable_title = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_peu_probable', {})
.get('infection_peu_probable_title', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_peu_probable', {}) else '')
infection_peu_probable_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_peu_probable', {})
.get('infection_peu_probable_treat_0', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_peu_probable', {}) else '')
infection_peu_probable_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_peu_probable', {})
.get('infection_peu_probable_treat_1', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_peu_probable', {}) else '')
infection_peu_probable_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_peu_probable', {})
.get('infection_peu_probable_treat_2', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('infection_peu_probable', {}) else '')
maladie_grave_title = flat_field(
lambda f: f.form.get('treatments', {}).get('maladie_grave', {}).get('maladie_grave_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('maladie_grave', {}) else '')
maladie_grave_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('maladie_grave', {}).get('maladie_grave_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('maladie_grave', {}) else '')
maladie_grave_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('maladie_grave', {}).get('maladie_grave_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('maladie_grave', {}) else '')
maladie_grave_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('maladie_grave', {}).get('maladie_grave_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('maladie_grave', {}) else '')
maladie_grave_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('maladie_grave', {}).get('maladie_grave_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('maladie_grave', {}) else '')
pas_de_faible_poids_title = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_de_faible_poids', {}).get('pas_de_faible_poids_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_de_faible_poids', {}) else '')
pas_de_faible_poids_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_de_faible_poids', {})
.get('pas_de_faible_poids_treat_0', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('pas_de_faible_poids', {}) else '')
pas_de_faible_poids_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_de_faible_poids', {})
.get('pas_de_faible_poids_treat_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_de_faible_poids', {}) else '')
pas_de_faible_poids_treat_1_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_de_faible_poids', {})
.get('pas_de_faible_poids_treat_1_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_de_faible_poids', {}) else '')
pas_de_faible_poids_treat_1_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_de_faible_poids', {})
.get('pas_de_faible_poids_treat_1_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_de_faible_poids', {}) else '')
pas_de_faible_poids_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_de_faible_poids', {})
.get('pas_de_faible_poids_treat_2', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('pas_de_faible_poids', {}) else '')
pas_de_probleme_alimentation_title = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_de_probleme_alimentation', {})
.get('pas_de_probleme_alimentation_title', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_de_probleme_alimentation', {}) else '')
pas_de_probleme_alimentation_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_de_probleme_alimentation', {})
.get('pas_de_probleme_alimentation_treat_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_de_probleme_alimentation', {}) else '')
pas_de_probleme_alimentation_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_de_probleme_alimentation', {})
.get('pas_de_probleme_alimentation_treat_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_de_probleme_alimentation', {}) else '')
pas_de_probleme_alimentation_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_de_probleme_alimentation', {})
.get('pas_de_probleme_alimentation_treat_2', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_de_probleme_alimentation', {}) else '')
pas_infection_occulaire_title = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection_occulaire', {})
.get('pas_infection_occulaire_title', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_infection_occulaire', {}) else '')
pas_infection_occulaire_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection_occulaire', {})
.get('pas_infection_occulaire_treat_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_infection_occulaire', {}) else '')
pas_infection_occulaire_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection_occulaire', {})
.get('pas_infection_occulaire_treat_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_infection_occulaire', {}) else '')
pas_infection_occulaire_treat_1_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection_occulaire', {})
.get('pas_infection_occulaire_treat_1_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_infection_occulaire', {}) else '')
pas_infection_occulaire_treat_1_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection_occulaire', {})
.get('pas_infection_occulaire_treat_1_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_infection_occulaire', {}) else '')
pas_infection_occulaire_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_infection_occulaire', {})
.get('pas_infection_occulaire_treat_2', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_infection_occulaire', {}) else '')
poids_tres_faible_title = flat_field(
lambda f: f.form.get('treatments', {}).get('poids_tres_faible', {}).get('poids_tres_faible_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('poids_tres_faible', {}) else '')
poids_tres_faible_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('poids_tres_faible', {}).get('poids_tres_faible_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('poids_tres_faible', {}) else '')
poids_tres_faible_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('poids_tres_faible', {})
.get('poids_tres_faible_treat_0_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('poids_tres_faible', {}) else '')
poids_tres_faible_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('poids_tres_faible', {}).get('poids_tres_faible_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('poids_tres_faible', {}) else '')
poids_tres_faible_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('poids_tres_faible', {}).get('poids_tres_faible_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('poids_tres_faible', {}) else '')
poids_tres_faible_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('poids_tres_faible', {})
.get('poids_tres_faible_treat_2_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('poids_tres_faible', {}) else '')
poids_tres_faible_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('poids_tres_faible', {}).get('poids_tres_faible_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('poids_tres_faible', {}) else '')
poids_tres_faible_treat_3_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('poids_tres_faible', {})
.get('poids_tres_faible_treat_3_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('poids_tres_faible', {}) else '')
poids_tres_faible_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('poids_tres_faible', {}).get('poids_tres_faible_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('poids_tres_faible', {}) else '')
poids_tres_faible_treat_4_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('poids_tres_faible', {})
.get('poids_tres_faible_treat_4_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('poids_tres_faible', {}) else '')
poids_tres_faible_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('poids_tres_faible', {}).get('poids_tres_faible_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('poids_tres_faible', {}) else '')
probleme_alimentation_title = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_title', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_0', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_1', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_1_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_1_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_1_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_1_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_1_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_1_help_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_1_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_1_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_2', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_3', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_4', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_5', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_6', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_7', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_8', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_9 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_9', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_10 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_10', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_11 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_11', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_12 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_12', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_13 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_13', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_14 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_14', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_15 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_15', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_16 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_16', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
probleme_alimentation_treat_17 = flat_field(
lambda f: f.form.get('treatments', {}).get('probleme_alimentation', {})
.get('probleme_alimentation_treat_17', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('probleme_alimentation', {}) else '')
vih_peu_probable_title = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {}).get('vih_peu_probable_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_peu_probable', {}) else '')
vih_peu_probable_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {}).get('vih_peu_probable_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_peu_probable', {}) else '')
vih_peu_probable_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {}).get('vih_peu_probable_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_peu_probable', {}) else '')
vih_peu_probable_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {}).get('vih_peu_probable_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_peu_probable', {}) else '')
vih_possible_title = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_0_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_0_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_0_help_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_probable_title = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_probable', {}).get('vih_probable_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_probable', {}) else '')
vih_probable_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_probable', {}).get('vih_probable_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_probable', {}) else '')
vih_probable_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_probable', {}).get('vih_probable_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_probable', {}) else '')
vih_probable_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_probable', {}).get('vih_probable_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_probable', {}) else '')
vih_probable_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_probable', {}).get('vih_probable_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_probable', {}) else '')
vih_probable_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_probable', {}).get('vih_probable_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_probable', {}) else '')
numerator = TDHNullEmitter()
class TDHChildTreatmentFluff(fluff.IndicatorDocument):
document_class = XFormInstance
document_filter = FormPropertyFilter(xmlns=CHILD_TREATMENT_XMLNSES[0])
domains = TDH_DOMAINS
save_direct_to_sql = True
case_id = flat_field(lambda f: f.form.get('child_case_id'))
antibio_valid_meds = flat_field(lambda f: f.form.get('antibio_valid_meds', ''))
artemether_valid_meds = flat_field(lambda f: f.form.get('artemether_valid_meds', ''))
child_age = flat_field(lambda f: f.form.get('child_age', ''))
child_age_loaded = flat_field(lambda f: f.form.get('child_age_loaded', ''))
child_weight = flat_field(lambda f: f.form.get('child_weight', ''))
child_weight_loaded = flat_field(lambda f: f.form.get('child_weight_loaded', ''))
classification_anemie = flat_field(lambda f: f.form.get('classification_anemie', ''))
classification_anemie_loaded = flat_field(lambda f: f.form.get('classification_anemie_loaded', ''))
classification_deshydratation = flat_field(lambda f: f.form.get('classification_deshydratation', ''))
classification_deshydratation_loaded = flat_field(
lambda f: f.form.get('classification_deshydratation_loaded', ''))
classification_diahree = flat_field(lambda f: f.form.get('classification_diahree', ''))
classification_diahree_loaded = flat_field(lambda f: f.form.get('classification_diahree_loaded', ''))
classification_dysenterie = flat_field(lambda f: f.form.get('classification_dysenterie', ''))
classification_dysenterie_loaded = flat_field(lambda f: f.form.get('classification_dysenterie_loaded', ''))
classification_malnutrition = flat_field(lambda f: f.form.get('classification_malnutrition', ''))
classification_malnutrition_loaded = flat_field(lambda f: f.form.get('classification_malnutrition_loaded', ''))
classification_oreille = flat_field(lambda f: f.form.get('classification_oreille', ''))
classification_oreille_loaded = flat_field(lambda f: f.form.get('classification_oreille_loaded', ''))
classification_paludisme = flat_field(lambda f: f.form.get('classification_paludisme'))
classification_paludisme_loaded = flat_field(lambda f: f.form.get('classification_paludisme_loaded', ''))
classification_pneumonie = flat_field(lambda f: f.form.get('classification_pneumonie'))
classification_pneumonie_loaded = flat_field(lambda f: f.form.get('classification_pneumonie_loaded', ''))
classification_rougeole = flat_field(lambda f: f.form.get('classification_pneumonie'))
classification_rougeole_loaded = flat_field(lambda f: f.form.get('classification_pneumonie_loaded', ''))
classification_vih = flat_field(lambda f: f.form.get('classification_vih', ''))
classification_vih_loaded = flat_field(lambda f: f.form.get('classification_vih_loaded', ''))
deparasitage_valid_meds = flat_field(lambda f: f.form.get('deparasitage_valid_meds', ''))
other_treatments = flat_field(lambda f: f.form.get('other_treatments', ''))
perfusion_p1_a_valid_meds = flat_field(lambda f: f.form.get('perfusion_p1_a_valid_meds', ''))
perfusion_p1_b_valid_meds = flat_field(lambda f: f.form.get('perfusion_p1_b_valid_meds', ''))
perfusion_p2_a_valid_meds = flat_field(lambda f: f.form.get('perfusion_p2_a_valid_meds', ''))
perfusion_p2_b_valid_meds = flat_field(lambda f: f.form.get('perfusion_p2_b_valid_meds', ''))
antibio = flat_field(
lambda f: f.form.get('select_meds', {}).get('antibio', '') if f.form.get('select_meds', {}) else '')
artemether = flat_field(
lambda f: f.form.get('select_meds', {}).get('artemether', '') if f.form.get('select_meds', {}) else '')
deparasitage = flat_field(
lambda f: f.form.get('select_meds', {}).get('deparasitage', '') if f.form.get('select_meds', {}) else '')
perfusion_p1_b = flat_field(
lambda f: f.form.get('select_meds', {}).get('perfusion_p1_b', '') if f.form.get('select_meds', {}) else '')
perfusion_p2_b = flat_field(
lambda f: f.form.get('select_meds', {}).get('perfusion_p2_b', '') if f.form.get('select_meds', {}) else '')
vitamine_a = flat_field(
lambda f: f.form.get('select_meds', {}).get('vitamine_a', '') if f.form.get('select_meds', {}) else '')
deshydratation_severe_grave = flat_field(
lambda f: f.form.get('select_treatments', {}).get('deshydratation_severe_grave', '') if f.form.get(
'select_treatments', {}) else '')
diahree_persistante_severe_grave = flat_field(
lambda f: f.form.get('select_treatments', {}).get('diahree_persistante_severe_grave', '') if f.form.get(
'select_treatments', {}) else '')
paludisme_grave = flat_field(
lambda f: f.form.get('select_treatments', {}).get('paludisme_grave', '') if f.form.get(
'select_treatments', {}) else '')
pneumonie_grave = flat_field(
lambda f: f.form.get('select_treatments', {}).get('pneumonie_grave', '') if f.form.get(
'select_treatments', {}) else '')
anemie_title = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_6', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_7', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_7_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_7_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_8', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_8_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_8_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_9 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_9', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_9_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_9_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_9_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_9_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_10 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_10', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_10_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_10_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_10_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_10_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_11 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_11', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_11_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_11_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_11_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_11_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_12 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_12', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_12_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_12_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_treat_12_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('anemie_treat_12_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
show_artemether_amod_enf = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('show_artemether_amod_enf', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
show_artemether_amod_nour = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('show_artemether_amod_enf', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
show_artemether_lum_enf = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('show_artemether_amod_enf', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
show_deparasitage_meb = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie', {}).get('show_artemether_amod_enf', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie', {}) else '')
anemie_grave_title = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie_grave', {}).get('anemie_grave_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie_grave', {}) else '')
anemie_grave_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie_grave', {}).get('anemie_grave_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie_grave', {}) else '')
anemie_grave_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie_grave', {}).get('anemie_grave_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie_grave', {}) else '')
anemie_grave_treat_1_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie_grave', {}).get('anemie_grave_treat_1_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie_grave', {}) else '')
anemie_grave_treat_1_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie_grave', {}).get('anemie_grave_treat_1_help_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie_grave', {}) else '')
anemie_grave_treat_1_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie_grave', {}).get('anemie_grave_treat_1_help_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie_grave', {}) else '')
anemie_grave_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('anemie_grave', {}).get('anemie_grave_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('anemie_grave', {}) else '')
antecedent_rougeole_title = flat_field(
lambda f: f.form.get('treatments', {}).get('antecedent_rougeole', {}).get('antecedent_rougeole_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('antecedent_rougeole', {}) else '')
antecedent_rougeole_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('antecedent_rougeole', {})
.get('antecedent_rougeole_treat_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('antecedent_rougeole', {}) else '')
antecedent_rougeole_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('antecedent_rougeole', {})
.get('antecedent_rougeole_treat_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('antecedent_rougeole', {}) else '')
antecedent_rougeole_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('antecedent_rougeole', {})
.get('antecedent_rougeole_treat_2', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('antecedent_rougeole', {}) else '')
antecedent_rougeole_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('antecedent_rougeole', {})
.get('antecedent_rougeole_treat_2_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('antecedent_rougeole', {}) else '')
antecedent_rougeole_treat_2_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('antecedent_rougeole', {})
.get('antecedent_rougeole_treat_2_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('antecedent_rougeole', {}) else '')
deshydratation_severe_grave_title = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_grave', {})
.get('deshydratation_severe_grave_title', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('deshydratation_severe_grave', {}) else '')
deshydratation_severe_grave_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_grave', {})
.get('deshydratation_severe_grave_treat_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('deshydratation_severe_grave', {}) else '')
deshydratation_severe_grave_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_grave', {})
.get('deshydratation_severe_grave_treat_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('deshydratation_severe_grave', {}) else '')
deshydratation_severe_grave_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_grave', {})
.get('deshydratation_severe_grave_treat_2', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('deshydratation_severe_grave', {}) else '')
deshydratation_severe_pas_grave_perfusion_title = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_title', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_0_help_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_1', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_2', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_3', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_4', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_5', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_6', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_7', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_8', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_9 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_9', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_10 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_10', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_11 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_11', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_12 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_12', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_14 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_14', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_15 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_15', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_16 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_16', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_17 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_17', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_18 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_18', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_perfusion_treat_18_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('deshydratation_severe_pas_grave_perfusion_treat_18_help_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {}) else '')
show_antibio_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('show_antibio_1', '') if f.form.get('treatments', {}) and f.form.get('treatments', {}).get(
'deshydratation_severe_pas_grave_perfusion', {}) else '')
show_perfusion_p1_b_ringer = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('show_perfusion_p1_b_ringer', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('deshydratation_severe_pas_grave_perfusion', {}) else '')
show_perfusion_p2_b_iso = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_perfusion', {})
.get('show_perfusion_p2_b_iso', '') if f.form.get('treatments', {}) and f.form.get('treatments', {}).get(
'deshydratation_severe_pas_grave_perfusion', {}) else '')
deshydratation_severe_pas_grave_sng_title = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_sng', {})
.get('deshydratation_severe_pas_grave_sng_title', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_sng', {}) else '')
deshydratation_severe_pas_grave_sng_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_sng', {})
.get('deshydratation_severe_pas_grave_sng_treat_2', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_sng', {}) else '')
deshydratation_severe_pas_grave_sng_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_sng', {})
.get('deshydratation_severe_pas_grave_sng_treat_3', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_sng', {}) else '')
deshydratation_severe_pas_grave_sans_sng_sans_perfusion_title = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_sans_sng_sans_perfusion', {})
.get('deshydratation_severe_pas_grave_sans_sng_sans_perfusion_title', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_sans_sng_sans_perfusion', {}) else ''
)
deshydratation_severe_pas_grave_sans_sng_sans_perfusion_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_sans_sng_sans_perfusion', {})
.get('deshydratation_severe_pas_grave_sans_sng_sans_perfusion_treat_3', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_sans_sng_sans_perfusion', {}) else ''
)
deshydratation_severe_pas_grave_sans_sng_sans_perfusion_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_sans_sng_sans_perfusion', {})
.get('deshydratation_severe_pas_grave_sans_sng_sans_perfusion_treat_4', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('deshydratation_severe_pas_grave_sans_sng_sans_perfusion', {}) else ''
)
diahree_persistante_title = flat_field(
lambda f: f.form.get('treatments', {}).get('diahree_persistante', {}).get('diahree_persistante_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('diahree_persistante', {}) else '')
diahree_persistante_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('diahree_persistante', {})
.get('diahree_persistante_treat_0', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('diahree_persistante', {}) else '')
diahree_persistante_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('diahree_persistante', {})
.get('diahree_persistante_treat_1', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('diahree_persistante', {}) else '')
diahree_persistante_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('diahree_persistante', {})
.get('diahree_persistante_treat_2', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('diahree_persistante', {}) else '')
diahree_persistante_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('diahree_persistante', {})
.get('diahree_persistante_treat_2_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('diahree_persistante', {}) else '')
diahree_persistante_treat_2_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('diahree_persistante', {})
.get('diahree_persistante_treat_2_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('diahree_persistante', {}) else '')
diahree_persistante_treat_2_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('diahree_persistante', {})
.get('diahree_persistante_treat_2_help_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('diahree_persistante', {}) else '')
diahree_persistante_treat_2_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('diahree_persistante', {})
.get('diahree_persistante_treat_2_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('diahree_persistante', {}) else '')
diahree_persistante_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('diahree_persistante', {})
.get('diahree_persistante_treat_3', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('diahree_persistante', {}) else '')
diahree_persistante_severe_grave_title = flat_field(
lambda f: f.form.get('treatments', {}).get('diahree_persistante_severe_grave', {})
.get('diahree_persistante_severe_grave_title', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('diahree_persistante_severe_grave', {}) else '')
diahree_persistante_severe_grave_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('diahree_persistante_severe_grave', {})
.get('diahree_persistante_severe_grave_treat_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('diahree_persistante_severe_grave', {}) else '')
dysenterie_title = flat_field(
lambda f: f.form.get('treatments', {}).get('dysenterie', {}).get('_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('', {}) else '')
dysenterie_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('dysenterie', {}).get('dysenterie_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('dysenterie', {}) else '')
dysenterie_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('dysenterie', {}).get('dysenterie_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('dysenterie', {}) else '')
dysenterie_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('dysenterie', {}).get('dysenterie_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('dysenterie', {}) else '')
dysenterie_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('dysenterie', {}).get('dysenterie_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('dysenterie', {}) else '')
dysenterie_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('dysenterie', {}).get('dysenterie_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('dysenterie', {}) else '')
dysenterie_treat_4_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('dysenterie', {}).get('dysenterie_treat_4_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('dysenterie', {}) else '')
dysenterie_treat_4_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('dysenterie', {}).get('dysenterie_treat_4_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('dysenterie', {}) else '')
dysenterie_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('dysenterie', {}).get('dysenterie_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('dysenterie', {}) else '')
infection_aigue_oreille_title = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_aigue_oreille', {})
.get('infection_aigue_oreille_title', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_aigue_oreille', {}) else '')
infection_aigue_oreille_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_aigue_oreille', {})
.get('conjonctivite_treat_0', '') if f.form.get('treatments', {}) and f.form.get('treatments', {}).get(
'infection_aigue_oreille', {}) else '')
infection_aigue_oreille_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_aigue_oreille', {})
.get('infection_aigue_oreille_treat_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_aigue_oreille', {}) else '')
infection_aigue_oreille_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_aigue_oreille', {})
.get('infection_aigue_oreille_treat_2', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_aigue_oreille', {}) else '')
infection_aigue_oreille_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_aigue_oreille', {})
.get('infection_aigue_oreille_treat_2_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_aigue_oreille', {}) else '')
infection_aigue_oreille_treat_2_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_aigue_oreille', {})
.get('infection_aigue_oreille_treat_2_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_aigue_oreille', {}) else '')
infection_aigue_oreille_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_aigue_oreille', {})
.get('infection_aigue_oreille_treat_3', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('infection_aigue_oreille', {}) else '')
infection_aigue_oreille_show_antibio_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('infection_aigue_oreille', {}).get('show_antibio_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get(
'infection_aigue_oreille', {}) else '')
mam_title = flat_field(
lambda f: f.form.get('treatments', {}).get('mam', {}).get('mam_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mam', {}) else '')
mam_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('mam', {}).get('mam_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mam', {}) else '')
mam_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('mam', {}).get('mam_treat_0_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mam', {}) else '')
mam_treat_0_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('mam', {}).get('mam_treat_0_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mam', {}) else '')
mam_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('mam', {}).get('mam_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mam', {}) else '')
mam_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('mam', {}).get('mam_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mam', {}) else '')
mam_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('mam', {}).get('mam_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mam', {}) else '')
mam_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('mam', {}).get('mam_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mam', {}) else '')
mam_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('mam', {}).get('mam_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mam', {}) else '')
mam_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('mam', {}).get('mam_treat_6', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mam', {}) else '')
mam_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('mam', {}).get('mam_treat_7', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mam', {}) else '')
mam_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('mam', {}).get('mam_treat_8', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mam', {}) else '')
mam_treat_9 = flat_field(
lambda f: f.form.get('treatments', {}).get('mam', {}).get('mam_treat_9', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mam', {}) else '')
mam_treat_10 = flat_field(
lambda f: f.form.get('treatments', {}).get('mam', {}).get('mam_treat_10', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mam', {}) else '')
masc_title = flat_field(
lambda f: f.form.get('treatments', {}).get('masc', {}).get('masc_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('masc', {}) else '')
masc_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('masc', {}).get('masc_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('masc', {}) else '')
masc_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('masc', {}).get('masc_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('masc', {}) else '')
masc_treat_1_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('masc', {}).get('masc_treat_1_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('masc', {}) else '')
masc_treat_1_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('masc', {}).get('masc_treat_1_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('masc', {}) else '')
masc_treat_1_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('masc', {}).get('masc_treat_1_help_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('masc', {}) else '')
masc_treat_1_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('masc', {}).get('masc_treat_1_help_1_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('masc', {}) else '')
masc_treat_1_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('masc', {}).get('masc_treat_1_help_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('masc', {}) else '')
masc_treat_1_help_2_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('masc', {}).get('masc_treat_1_help_2_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('masc', {}) else '')
masc_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('masc', {}).get('masc_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('masc', {}) else '')
masc_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('masc', {}).get('masc_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('masc', {}) else '')
masc_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('masc', {}).get('masc_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('masc', {}) else '')
mass_title = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_0_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_0_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_0_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_1_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_1_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_1_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_1_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_6', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_7', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_8', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_9 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_9', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_10 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_10', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_11 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_11', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_11_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_11_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_11_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_11_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_12 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_11', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_12_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_12_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_12_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_12_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mass_treat_13 = flat_field(
lambda f: f.form.get('treatments', {}).get('mass', {}).get('mass_treat_13', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mass', {}) else '')
mastoidite_title = flat_field(
lambda f: f.form.get('treatments', {}).get('mastoidite', {}).get('mastoidite_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mastoidite', {}) else '')
mastoidite_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('mastoidite', {}).get('mastoidite_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mastoidite', {}) else '')
mastoidite_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('mastoidite', {}).get('mastoidite_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mastoidite', {}) else '')
mastoidite_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('mastoidite', {}).get('mastoidite_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mastoidite', {}) else '')
mastoidite_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('mastoidite', {}).get('mastoidite_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mastoidite', {}) else '')
mastoidite_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('mastoidite', {}).get('mastoidite_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('mastoidite', {}) else '')
paludisme_grave_title = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {}).get('paludisme_grave_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {}).get('paludisme_grave_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {}).get('paludisme_grave_treat_0_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_0_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {})
.get('paludisme_grave_treat_0_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {}).get('paludisme_grave_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {}).get('paludisme_grave_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {}).get('paludisme_grave_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_3_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {}).get('paludisme_grave_treat_3_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_3_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {})
.get('paludisme_grave_treat_3_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_3_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {}).get('paludisme_grave_treat_3_help_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_3_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {})
.get('paludisme_grave_treat_3_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_3_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {}).get('paludisme_grave_treat_3_help_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_3_help_2_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {})
.get('paludisme_grave_treat_3_help_2_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {}).get('paludisme_grave_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {}).get('paludisme_grave_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {}).get('paludisme_grave_treat_6', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {}).get('paludisme_grave_treat_7', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_7_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {}).get('paludisme_grave_treat_7_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_treat_7_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave', {})
.get('paludisme_grave_treat_7_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave', {}) else '')
paludisme_grave_no_ref_title = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_no_ref', {})
.get('paludisme_grave_no_ref_title', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('paludisme_grave_no_ref', {}) else '')
paludisme_grave_no_ref_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_no_ref', {})
.get('paludisme_grave_no_ref_treat_0', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('paludisme_grave_no_ref', {}) else '')
paludisme_grave_no_ref_treat_0_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_no_ref', {})
.get('paludisme_grave_no_ref_treat_0_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_no_ref', {}) else '')
paludisme_grave_no_ref_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_no_ref', {})
.get('paludisme_grave_no_ref_treat_1', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('paludisme_grave_no_ref', {}) else '')
paludisme_grave_no_ref_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_no_ref', {})
.get('paludisme_grave_no_ref_treat_2', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('paludisme_grave_no_ref', {}) else '')
paludisme_grave_no_ref_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_no_ref', {})
.get('paludisme_grave_no_ref_treat_3', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('paludisme_grave_no_ref', {}) else '')
paludisme_grave_no_ref_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_no_ref', {})
.get('paludisme_grave_no_ref_treat_4', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('paludisme_grave_no_ref', {}) else '')
paludisme_grave_no_ref_treat_4_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_no_ref', {})
.get('paludisme_grave_no_ref_treat_4_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_no_ref', {}) else '')
paludisme_grave_no_ref_treat_4_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_no_ref', {})
.get('paludisme_grave_no_ref_treat_4_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_no_ref', {}) else '')
paludisme_grave_no_ref_treat_4_help_2_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_no_ref', {})
.get('paludisme_grave_no_ref_treat_4_help_2_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_no_ref', {}) else '')
paludisme_grave_no_ref_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_no_ref', {})
.get('paludisme_grave_no_ref_treat_4', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('paludisme_grave_no_ref', {}) else '')
paludisme_grave_no_ref_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_no_ref', {})
.get('paludisme_grave_no_ref_treat_6', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('paludisme_grave_no_ref', {}) else '')
paludisme_grave_no_ref_treat_6_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_no_ref', {})
.get('paludisme_grave_no_ref_treat_6_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_no_ref', {}) else '')
paludisme_grave_tdr_negatif_title = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_title', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_2', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_2_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_2_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_2_help_0_prompt', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_2_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_2_help_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_2_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_2_help_1_prompt', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_2_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_2_help_2', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_2_help_2_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_2_help_2_prompt', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_3', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_4', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_5', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_6', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_6_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_6_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_grave_tdr_negatif_treat_6_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {})
.get('paludisme_grave_tdr_negatif_treat_6_help_0_prompt', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('paludisme_grave_tdr_negatif', {}) else '')
paludisme_simple_title = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {}).get('paludisme_simple_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {}).get('paludisme_simple_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {}).get('paludisme_simple_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {}).get('paludisme_simple_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {}).get('paludisme_simple_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {}).get('paludisme_simple_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {}).get('paludisme_simple_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_5_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {})
.get('paludisme_simple_treat_5_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_5_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {})
.get('paludisme_simple_treat_5_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {}).get('paludisme_simple_treat_6', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {}).get('paludisme_simple_treat_7', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {}).get('paludisme_simple_treat_8', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_8_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {})
.get('paludisme_simple_treat_8_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_8_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {})
.get('paludisme_simple_treat_8_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_9 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {}).get('paludisme_simple_treat_9', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_simple', {}) else '')
paludisme_simple_treat_10 = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {}).get('paludisme_simple_treat_10', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_simple', {}) else '')
paludisme_show_artemether_amod_enf = flat_field(
lambda f: f.form.get('treatments', {}).get('paludisme_simple', {}).get('show_artemether_amod_enf', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('paludisme_simple', {}) else '')
pas_deshydratation_title = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_deshydratation', {}).get('pas_deshydratation_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_deshydratation', {}) else '')
pas_deshydratation_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_deshydratation', {}).get('pas_deshydratation_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_deshydratation', {}) else '')
pas_deshydratation_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_deshydratation', {}).get('pas_deshydratation_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_deshydratation', {}) else '')
pas_deshydratation_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_deshydratation', {}).get('pas_deshydratation_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_deshydratation', {}) else '')
pas_deshydratation_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_deshydratation', {})
.get('pas_deshydratation_treat_2_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_deshydratation', {}) else '')
pas_deshydratation_treat_2_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_deshydratation', {})
.get('pas_deshydratation_treat_2_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_deshydratation', {}) else '')
pas_deshydratation_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_deshydratation', {}).get('pas_deshydratation_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_deshydratation', {}) else '')
pas_deshydratation_treat_3_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_deshydratation', {})
.get('pas_deshydratation_treat_3_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_deshydratation', {}) else '')
pas_deshydratation_treat_3_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_deshydratation', {})
.get('pas_deshydratation_treat_3_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_deshydratation', {}) else '')
pas_deshydratation_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_deshydratation', {})
.get('pas_deshydratation_treat_4', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_deshydratation', {}) else '')
pas_malnutrition_title = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {}).get('pas_malnutrition_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_malnutrition', {}) else '')
pas_malnutrition_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {}).get('pas_malnutrition_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_malnutrition', {}) else '')
pas_malnutrition_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {})
.get('pas_malnutrition_treat_0_help_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('pas_malnutrition', {}) else '')
pas_malnutrition_treat_0_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {})
.get('pas_malnutrition_treat_0_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_malnutrition', {}) else '')
pas_malnutrition_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {}).get('pas_malnutrition_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_malnutrition', {}) else '')
pas_malnutrition_treat_1_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {})
.get('pas_malnutrition_treat_1_help_0', '') if f.form.get(
'treatments', {}) and f.form.get('treatments', {}).get('pas_malnutrition', {}) else '')
pas_malnutrition_treat_1_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {})
.get('pas_malnutrition_treat_1_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_malnutrition', {}) else '')
pas_malnutrition_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {}).get('pas_malnutrition_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_malnutrition', {}) else '')
pas_malnutrition_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {}).get('pas_malnutrition_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_malnutrition', {}) else '')
pas_malnutrition_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {}).get('pas_malnutrition_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_malnutrition', {}) else '')
pas_malnutrition_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {}).get('pas_malnutrition_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_malnutrition', {}) else '')
pas_malnutrition_treat_5_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {})
.get('pas_malnutrition_treat_5_help_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('pas_malnutrition', {}) else '')
pas_malnutrition_treat_5_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {})
.get('pas_malnutrition_treat_5_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_malnutrition', {}) else '')
pas_malnutrition_show_vitamine_a_100 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {}).get('show_vitamine_a_100', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_malnutrition', {}) else '')
pas_malnutrition_show_vitamine_a_200 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_malnutrition', {}).get('show_vitamine_a_200', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_malnutrition', {}) else '')
pas_pneumonie_title = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_pneumonie', {}).get('pas_pneumonie_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_pneumonie', {}) else '')
pas_pneumonie_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_pneumonie', {}).get('pas_pneumonie_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_pneumonie', {}) else '')
pas_pneumonie_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_pneumonie', {}).get('pas_pneumonie_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_pneumonie', {}) else '')
pas_pneumonie_treat_1_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_pneumonie', {})
.get('pas_pneumonie_treat_1_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_pneumonie', {}) else '')
pas_pneumonie_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_pneumonie', {}).get('pas_pneumonie_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_pneumonie', {}) else '')
pas_pneumonie_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_pneumonie', {}).get('pas_pneumonie_treat_2_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_pneumonie', {}) else '')
pas_pneumonie_treat_2_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_pneumonie', {})
.get('pas_pneumonie_treat_2_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_pneumonie', {}) else '')
pas_pneumonie_treat_2_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_pneumonie', {})
.get('pas_pneumonie_treat_2_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pas_pneumonie', {}) else '')
pas_pneumonie_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_pneumonie', {}).get('pas_pneumonie_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_pneumonie', {}) else '')
pas_pneumonie_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('pas_pneumonie', {}).get('pas_pneumonie_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pas_pneumonie', {}) else '')
pneumonie_title = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_treat_4_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_treat_4_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_treat_4_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_treat_4_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_treat_5_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_treat_5_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_treat_5_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_treat_5_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_treat_6', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_treat_6_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_treat_6_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_treat_6_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_treat_6_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('pneumonie_treat_7', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_show_antibio_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie', {}).get('show_antibio_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie', {}) else '')
pneumonie_grave_title = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {}).get('pneumonie_grave_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {}).get('pneumonie_grave_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {}).get('pneumonie_grave_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {}).get('pneumonie_grave_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {}).get('pneumonie_grave_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_3_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {}).get('pneumonie_grave_treat_3_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_3_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {})
.get('pneumonie_grave_treat_3_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {}).get('pneumonie_grave_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_4_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {})
.get('pneumonie_grave_treat_4_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {}).get('pneumonie_grave_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_5_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {}).get('pneumonie_grave_treat_5_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_5_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {})
.get('pneumonie_grave_treat_5_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {})
.get('pneumonie_grave_treat_6', '') if f.form.get('treatments', {}) and f.form.get('treatments', {})
.get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_6_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {}).get('pneumonie_grave_treat_6_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_6_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {})
.get('pneumonie_grave_treat_6_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {}).get('pneumonie_grave_treat_7', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_7_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {})
.get('pneumonie_grave_treat_7_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave', {}).get('pneumonie_grave_treat_8', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('pneumonie_grave', {}) else '')
pneumonie_grave_no_ref_title = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_title', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_1', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_2', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_3', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_3_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_3_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_4', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_4_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_4_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_5', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_5_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_5_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_5_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_5_help_0_prompt', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_6', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_6_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_6_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_7', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_7_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_7_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_8', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
pneumonie_grave_no_ref_treat_10 = flat_field(
lambda f: f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {})
.get('pneumonie_grave_no_ref_treat_10', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('pneumonie_grave_no_ref', {}) else '')
rougeole_title = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole', {}).get('rougeole_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('rougeole', {}) else '')
rougeole_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole', {}).get('rougeole_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('rougeole', {}) else '')
rougeole_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole', {}).get('rougeole_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('rougeole_treat_1', {}) else '')
rougeole_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole', {}).get('rougeole_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('rougeole', {}) else '')
rougeole_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole', {}).get('rougeole_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('rougeole_treat_3', {}) else '')
rougeole_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole', {}).get('rougeole_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('rougeole', {}) else '')
rougeole_treat_4_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole', {}).get('rougeole_treat_4_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('rougeole', {}) else '')
rougeole_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole', {}).get('rougeole_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('rougeole', {}) else '')
rougeole_show_antibio_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole', {}).get('rougeole_show_antibio_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('rougeole', {}) else '')
rougeole_show_vitamine_a_200 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole', {}).get('show_vitamine_a_200', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('rougeole', {}) else '')
rougeole_complications_title = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_complications', {})
.get('rougeole_complications_title', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('rougeole_complications', {}) else '')
rougeole_complications_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_complications', {})
.get('rougeole_complications_treat_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('rougeole_complications', {}) else '')
rougeole_complications_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_complications', {})
.get('rougeole_complications_treat_1', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('rougeole_complications_treat_1', {}) else '')
rougeole_complications_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_complications', {})
.get('rougeole_complications_treat_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('rougeole_complications', {}) else '')
rougeole_complications_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_complications', {})
.get('rougeole_complications_treat_2_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('rougeole_complications', {}) else '')
rougeole_complications_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_complications', {})
.get('rougeole_complications_treat_3', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('rougeole_complications_treat_3', {}) else '')
rougeole_complications_treat_3_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_complications', {})
.get('rougeole_complications_treat_3_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('rougeole_complications', {}) else '')
rougeole_complications_treat_3_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_complications', {})
.get('rougeole_complications_treat_3_help_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('rougeole_complications', {}) else '')
rougeole_complications_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_complications', {})
.get('rougeole_complications_treat_4', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('rougeole_complications', {}) else '')
rougeole_compliquee_title = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_compliquee', {}).get('rougeole_compliquee_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('rougeole_compliquee', {}) else '')
rougeole_compliquee_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_compliquee', {})
.get('rougeole_compliquee_treat_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('rougeole_compliquee', {}) else '')
rougeole_compliquee_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_compliquee', {})
.get('rougeole_compliquee_treat_1', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('rougeole_compliquee_treat_1', {}) else '')
rougeole_compliquee_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_compliquee', {})
.get('rougeole_compliquee_treat_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('rougeole_compliquee', {}) else '')
rougeole_compliquee_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_compliquee', {})
.get('rougeole_compliquee_treat_3', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('rougeole_compliquee_treat_3', {}) else '')
rougeole_compliquee_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_compliquee', {})
.get('rougeole_compliquee_treat_4', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('rougeole_compliquee', {}) else '')
rougeole_compliquee_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_compliquee', {})
.get('rougeole_compliquee_treat_5', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('rougeole_compliquee', {}) else '')
rougeole_compliquee_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_compliquee', {})
.get('rougeole_compliquee_treat_6', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('rougeole_compliquee', {}) else '')
rougeole_compliquee_show_antibio_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_compliquee', {})
.get('rougeole_compliquee_show_antibio_1', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('rougeole_compliquee', {}) else '')
rougeole_compliquee_show_vitamine_a_100 = flat_field(
lambda f: f.form.get('treatments', {}).get('rougeole_compliquee', {}).get('show_vitamine_a_100', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('rougeole_compliquee', {}) else '')
signes_deshydratation_title = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_title', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('signes_deshydratation', {}) else '')
signes_deshydratation_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('signes_deshydratation', {}) else '')
signes_deshydratation_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_1', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
signes_deshydratation_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('signes_deshydratation', {}) else '')
signes_deshydratation_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_3', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('signes_deshydratation_treat_3', {}) else '')
signes_deshydratation_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_4', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('signes_deshydratation', {}) else '')
signes_deshydratation_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_5', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('signes_deshydratation', {}) else '')
signes_deshydratation_treat_5_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_5_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
signes_deshydratation_treat_5_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_5_help_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
signes_deshydratation_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_6', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('signes_deshydratation', {}) else '')
signes_deshydratation_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_7', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('signes_deshydratation', {}) else '')
signes_deshydratation_treat_7_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_7_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
signes_deshydratation_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_8', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
signes_deshydratation_treat_8_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_8_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
signes_deshydratation_treat_8_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_8_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
signes_deshydratation_treat_8_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_8_help_1', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
signes_deshydratation_treat_8_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_8_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
signes_deshydratation_treat_9 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_9', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
signes_deshydratation_treat_10 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_10', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
signes_deshydratation_treat_10_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_10_help_0', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
signes_deshydratation_treat_10_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_10_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
signes_deshydratation_treat_11 = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_11', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
signes_deshydratation_treat_11_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('signes_deshydratation', {})
.get('signes_deshydratation_treat_11_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('signes_deshydratation_treat_1', {}) else '')
tdr_negatif_title = flat_field(
lambda f: f.form.get('treatments', {}).get('tdr_negatif', {}).get('tdr_negatif_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('tdr_negatif', {}) else '')
tdr_negatif_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('tdr_negatif', {}).get('tdr_negatif_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('tdr_negatif', {}) else '')
vih_confirmee_title = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_0_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_0_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {})
.get('vih_confirmee_treat_0_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_0_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_0_help_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_0_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {})
.get('vih_confirmee_treat_0_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_0_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_0_help_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_0_help_2_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {})
.get('vih_confirmee_treat_0_help_2_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_0_help_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_0_help_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_0_help_3_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {})
.get('vih_confirmee_treat_0_help_3_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_5_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_5_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_5_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {})
.get('vih_confirmee_treat_5_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_6', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_6_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_6_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_6_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {})
.get('vih_confirmee_treat_6_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_6_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_6_help_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_6_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {})
.get('vih_confirmee_treat_6_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_7', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_8', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_8_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_8_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_8_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {})
.get('vih_confirmee_treat_8_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_confirmee', {}) else '')
vih_confirmee_treat_9 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_confirmee', {}).get('vih_confirmee_treat_9', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_confirmee', {}) else '')
vih_pas_title = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas', {}).get('vih_pas_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas', {}) else '')
vih_pas_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas', {}).get('vih_pas_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas', {}) else '')
vih_pas_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas', {}).get('vih_pas_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas', {}) else '')
vih_pas_treat_1_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas', {}).get('vih_pas_treat_1_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas', {}) else '')
vih_pas_treat_1_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas', {}).get('vih_pas_treat_1_help_0_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas', {}) else '')
vih_pas_treat_1_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas', {}).get('vih_pas_treat_1_help_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas', {}) else '')
vih_pas_treat_1_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas', {}).get('vih_pas_treat_1_help_1_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas', {}) else '')
vih_pas_treat_1_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas', {}).get('vih_pas_treat_1_help_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas', {}) else '')
vih_pas_treat_1_help_2_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas', {}).get('vih_pas_treat_1_help_2_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas', {}) else '')
vih_pas_treat_1_help_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas', {}).get('vih_pas_treat_1_help_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas', {}) else '')
vih_pas_treat_1_help_3_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas', {}).get('vih_pas_treat_1_help_3_prompt', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas', {}) else '')
vih_pas_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_pas', {}).get('vih_pas_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_pas', {}) else '')
vih_peu_probable_title = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {}).get('vih_peu_probable_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_peu_probable', {}) else '')
vih_peu_probable_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {}).get('vih_peu_probable_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_peu_probable', {}) else '')
vih_peu_probable_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {}).get('vih_peu_probable_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_peu_probable', {}) else '')
vih_peu_probable_treat_1_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {})
.get('vih_peu_probable_treat_1_help_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_peu_probable', {}) else '')
vih_peu_probable_treat_1_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {})
.get('vih_peu_probable_treat_1_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_peu_probable', {}) else '')
vih_peu_probable_treat_1_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {})
.get('vih_peu_probable_treat_1_help_1', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_peu_probable', {}) else '')
vih_peu_probable_treat_1_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {})
.get('vih_peu_probable_treat_1_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_peu_probable', {}) else '')
vih_peu_probable_treat_1_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {})
.get('vih_peu_probable_treat_1_help_2', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_peu_probable', {}) else '')
vih_peu_probable_treat_1_help_2_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {})
.get('vih_peu_probable_treat_1_help_2_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_peu_probable', {}) else '')
vih_peu_probable_treat_1_help_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {})
.get('vih_peu_probable_treat_1_help_3', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_peu_probable', {}) else '')
vih_peu_probable_treat_1_help_3_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {})
.get('vih_peu_probable_treat_1_help_3_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_peu_probable', {}) else '')
vih_peu_probable_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_peu_probable', {}).get('vih_peu_probable_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_peu_probable', {}) else '')
vih_possible_title = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_0_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_0_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {})
.get('vih_possible_treat_0_help_0_prompt', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_0_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_0_help_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_0_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {})
.get('vih_possible_treat_0_help_1_prompt', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_0_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_0_help_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_0_help_2_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {})
.get('vih_possible_treat_0_help_2_prompt', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_4_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_4_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_4_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {})
.get('vih_possible_treat_4_help_0_prompt', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_5_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_5_help_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_5_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {})
.get('vih_possible_treat_5_help_0_prompt', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_5_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_5_help_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_5_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {})
.get('vih_possible_treat_5_help_1_prompt', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_6', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_7', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_treat_8', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_possible_show_antibio_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_possible', {}).get('vih_possible_show_antibio_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_possible', {}) else '')
vih_symp_confirmee_title = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {}).get('vih_symp_confirmee_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {}).get('vih_symp_confirmee_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_0_help_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_0_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_0_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_0_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_0_help_1', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_0_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_0_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_0_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_0_help_2', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_0_help_2_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_0_help_2_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_0_help_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_0_help_3', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_0_help_3_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_0_help_3_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {}).get('vih_symp_confirmee_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {}).get('vih_symp_confirmee_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {}).get('vih_symp_confirmee_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_4 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {}).get('vih_symp_confirmee_treat_4', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_5 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {}).get('vih_symp_confirmee_treat_5', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_5_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_5_help_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_5_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_5_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_6 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {}).get('vih_symp_confirmee_treat_6', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_6_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_6_help_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_6_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_6_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_6_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_6_help_1', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_6_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_6_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_7 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {}).get('vih_symp_confirmee_treat_7', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_8 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {}).get('vih_symp_confirmee_treat_8', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_8_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_8_help_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_8_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_treat_8_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_treat_9 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {}).get('vih_symp_confirmee_treat_9', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_confirmee_show_antibio_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_confirmee', {})
.get('vih_symp_confirmee_show_antibio_1', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_confirmee', {}) else '')
vih_symp_probable_title = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_probable', {}).get('vih_symp_probable_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_probable', {}) else '')
vih_symp_probable_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_probable', {}).get('vih_symp_probable_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_probable', {}) else '')
vih_symp_probable_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_probable', {}).get('vih_symp_probable_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_probable', {}) else '')
vih_symp_probable_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_probable', {}).get('vih_symp_probable_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_probable', {}) else '')
vih_symp_suspecte_title = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {}).get('vih_symp_suspecte_title', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {}).get('vih_symp_suspecte_treat_0', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_0_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_0_help_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_0_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_0_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_0_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_0_help_1', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_0_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_0_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_0_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_0_help_2', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_0_help_2_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_0_help_2_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_0_help_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_0_help_3', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_0_help_3_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_0_help_3_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {}).get('vih_symp_suspecte_treat_1', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {}).get('vih_symp_suspecte_treat_2', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_2_help_0 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_2_help_0', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_2_help_0_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_2_help_0_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_2_help_1 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_2_help_1', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_2_help_1_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_2_help_1_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_2_help_2 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_2_help_2', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_2_help_2_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_2_help_2_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_2_help_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_2_help_3', '') if f.form.get('treatments', {})
and f.form.get('treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_2_help_3_prompt = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {})
.get('vih_symp_suspecte_treat_2_help_3_prompt', '') if f.form.get('treatments', {}) and f.form.get(
'treatments', {}).get('vih_symp_suspecte', {}) else '')
vih_symp_suspecte_treat_3 = flat_field(
lambda f: f.form.get('treatments', {}).get('vih_symp_suspecte', {}).get('vih_symp_suspecte_treat_3', '')
if f.form.get('treatments', {}) and f.form.get('treatments', {}).get('vih_symp_suspecte', {}) else '')
numerator = TDHNullEmitter()
TDHEnrollChildFluffPillow = TDHEnrollChildFluff.pillow()
TDHInfantClassificationFluffPillow = TDHInfantClassificationFluff.pillow()
TDHInfantTreatmentFluffPillow = TDHInfantTreatmentFluff.pillow()
TDHNewbornClassificationFluffPillow = TDHNewbornClassificationFluff.pillow()
TDHNewbornTreatmentFluffPillow = TDHNewbornTreatmentFluff.pillow()
TDHChildClassificationFluffPillow = TDHChildClassificationFluff.pillow()
TDHChildTreatmentFluffPillow = TDHChildTreatmentFluff.pillow()
| {
"content_hash": "fb5f5495adbf097aaef22c1ac51bea1e",
"timestamp": "",
"source": "github",
"line_count": 3448,
"max_line_length": 116,
"avg_line_length": 76.00493039443155,
"alnum_prop": 0.6047202030030717,
"repo_name": "puttarajubr/commcare-hq",
"id": "38c3af5ef6f9c6d66da55662a0777df3ca1512d7",
"size": "262065",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "custom/tdh/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "ActionScript",
"bytes": "15950"
},
{
"name": "CSS",
"bytes": "581878"
},
{
"name": "HTML",
"bytes": "2790361"
},
{
"name": "JavaScript",
"bytes": "2572023"
},
{
"name": "Makefile",
"bytes": "3999"
},
{
"name": "Python",
"bytes": "11275678"
},
{
"name": "Shell",
"bytes": "23890"
}
],
"symlink_target": ""
} |
import numpy as np
import scipy.sparse as sp
from scipy.sparse import csr_matrix
from sklearn import datasets
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_raises_regexp
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import assert_greater
from sklearn.metrics.cluster import silhouette_score
from sklearn.metrics.cluster import calinski_harabaz_score
from sklearn.metrics import pairwise_distances
def test_silhouette():
# Tests the Silhouette Coefficient.
dataset = datasets.load_iris()
X_dense = dataset.data
X_csr = csr_matrix(X_dense)
X_dok = sp.dok_matrix(X_dense)
X_lil = sp.lil_matrix(X_dense)
y = dataset.target
for X in [X_dense, X_csr, X_dok, X_lil]:
D = pairwise_distances(X, metric='euclidean')
# Given that the actual labels are used, we can assume that S would be
# positive.
score_precomputed = silhouette_score(D, y, metric='precomputed')
assert_greater(score_precomputed, 0)
# Test without calculating D
score_euclidean = silhouette_score(X, y, metric='euclidean')
assert_almost_equal(score_precomputed, score_euclidean)
if X is X_dense:
score_dense_without_sampling = score_precomputed
else:
assert_almost_equal(score_euclidean,
score_dense_without_sampling)
# Test with sampling
score_precomputed = silhouette_score(D, y, metric='precomputed',
sample_size=int(X.shape[0] / 2),
random_state=0)
score_euclidean = silhouette_score(X, y, metric='euclidean',
sample_size=int(X.shape[0] / 2),
random_state=0)
assert_greater(score_precomputed, 0)
assert_greater(score_euclidean, 0)
assert_almost_equal(score_euclidean, score_precomputed)
if X is X_dense:
score_dense_with_sampling = score_precomputed
else:
assert_almost_equal(score_euclidean, score_dense_with_sampling)
def test_no_nan():
# Assert Silhouette Coefficient != nan when there is 1 sample in a class.
# This tests for the condition that caused issue 960.
# Note that there is only one sample in cluster 0. This used to cause the
# silhouette_score to return nan (see bug #960).
labels = np.array([1, 0, 1, 1, 1])
# The distance matrix doesn't actually matter.
D = np.random.RandomState(0).rand(len(labels), len(labels))
silhouette = silhouette_score(D, labels, metric='precomputed')
assert_false(np.isnan(silhouette))
def test_correct_labelsize():
# Assert 1 < n_labels < n_samples
dataset = datasets.load_iris()
X = dataset.data
# n_labels = n_samples
y = np.arange(X.shape[0])
assert_raises_regexp(ValueError,
'Number of labels is %d\. Valid values are 2 '
'to n_samples - 1 \(inclusive\)' % len(np.unique(y)),
silhouette_score, X, y)
# n_labels = 1
y = np.zeros(X.shape[0])
assert_raises_regexp(ValueError,
'Number of labels is %d\. Valid values are 2 '
'to n_samples - 1 \(inclusive\)' % len(np.unique(y)),
silhouette_score, X, y)
def test_non_encoded_labels():
dataset = datasets.load_iris()
X = dataset.data
labels = dataset.target
assert_equal(
silhouette_score(X, labels + 10), silhouette_score(X, labels))
def test_non_numpy_labels():
dataset = datasets.load_iris()
X = dataset.data
y = dataset.target
assert_equal(
silhouette_score(list(X), list(y)), silhouette_score(X, y))
def test_calinski_harabaz_score():
rng = np.random.RandomState(seed=0)
# Assert message when there is only one label
assert_raise_message(ValueError, "Number of labels is",
calinski_harabaz_score,
rng.rand(10, 2), np.zeros(10))
# Assert message when all point are in different clusters
assert_raise_message(ValueError, "Number of labels is",
calinski_harabaz_score,
rng.rand(10, 2), np.arange(10))
# Assert the value is 1. when all samples are equals
assert_equal(1., calinski_harabaz_score(np.ones((10, 2)),
[0] * 5 + [1] * 5))
# Assert the value is 0. when all the mean cluster are equal
assert_equal(0., calinski_harabaz_score([[-1, -1], [1, 1]] * 10,
[0] * 10 + [1] * 10))
# General case (with non numpy arrays)
X = ([[0, 0], [1, 1]] * 5 + [[3, 3], [4, 4]] * 5 +
[[0, 4], [1, 3]] * 5 + [[3, 1], [4, 0]] * 5)
labels = [0] * 10 + [1] * 10 + [2] * 10 + [3] * 10
assert_almost_equal(calinski_harabaz_score(X, labels),
45 * (40 - 4) / (5 * (4 - 1)))
| {
"content_hash": "023721c811d3affadd18c0f937e974d8",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 78,
"avg_line_length": 38.99248120300752,
"alnum_prop": 0.5887003470883146,
"repo_name": "YinongLong/scikit-learn",
"id": "04bdf8b6f60fe44ed966bb691680b2c5829e752f",
"size": "5186",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "sklearn/metrics/cluster/tests/test_unsupervised.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1786"
},
{
"name": "C",
"bytes": "385829"
},
{
"name": "C++",
"bytes": "139482"
},
{
"name": "Makefile",
"bytes": "1370"
},
{
"name": "PowerShell",
"bytes": "13427"
},
{
"name": "Python",
"bytes": "5905768"
},
{
"name": "Shell",
"bytes": "3952"
}
],
"symlink_target": ""
} |
from datetime import date, timedelta
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http.response import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.urls import reverse
from django.views import View
from django.views.generic import ListView
from swiftwind.billing_cycle.models import BillingCycle
class BillingCycleListView(LoginRequiredMixin, ListView):
template_name = 'billing_cycle/list.html'
context_object_name = 'billing_cycles'
def get_queryset(self):
return BillingCycle.objects.filter(
start_date__lte=date.today()
).order_by('-date_range')
class CreateTransactionsView(LoginRequiredMixin, View):
def post(self, request, uuid):
billing_cycle = get_object_or_404(BillingCycle, uuid=uuid)
if billing_cycle.can_create_transactions():
billing_cycle.enact_all_costs()
return HttpResponseRedirect(reverse('billing_cycles:list'))
class RecreateTransactionsView(LoginRequiredMixin, View):
"""For those times when you realise you're costs were not setup correctly"""
def post(self, request, uuid):
billing_cycle = get_object_or_404(BillingCycle, uuid=uuid)
billing_cycle.reenact_all_costs()
return HttpResponseRedirect(reverse('billing_cycles:list'))
class DeleteTransactionsView(LoginRequiredMixin, View):
"""For those times when you need to delete the transactions and faff about some more"""
def post(self, request, uuid):
billing_cycle = get_object_or_404(BillingCycle, uuid=uuid)
billing_cycle.unenact_all_costs()
return HttpResponseRedirect(reverse('billing_cycles:list'))
class SendNotificationsView(LoginRequiredMixin, View):
def post(self, request, uuid):
billing_cycle = get_object_or_404(BillingCycle, uuid=uuid)
if billing_cycle.can_send_statements():
billing_cycle.send_statements(force=True)
return HttpResponseRedirect(reverse('billing_cycles:list'))
| {
"content_hash": "8b022532dc50b7ed5d77ccf4092fd4e9",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 91,
"avg_line_length": 35.578947368421055,
"alnum_prop": 0.7292899408284024,
"repo_name": "adamcharnock/swiftwind",
"id": "2e35cab5c4783bfe76bbb15375726be8d67de5c5",
"size": "2028",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "swiftwind/billing_cycle/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "695"
},
{
"name": "Dockerfile",
"bytes": "946"
},
{
"name": "HTML",
"bytes": "72934"
},
{
"name": "Python",
"bytes": "255974"
},
{
"name": "Smarty",
"bytes": "536"
}
],
"symlink_target": ""
} |
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from starthinker_ui.recipe.scripts import Script
from starthinker.tool.example import recipe_to_python
class Command(BaseCommand):
help = 'Generate Templates For Python'
def handle(self, *args, **kwargs):
for script in Script.get_scripts():
if script.get_tag() in ['airflow']: continue
if script.get_open_source():
print('Writing: %s_example.py' % script.get_tag())
with open(
'%s/examples/%s_example.py' % (
settings.UI_ROOT,
script.get_tag()
),
'w'
) as py_file:
py_file.write(
recipe_to_python(
script.get_tag(),
script.get_description(),
script.get_instructions(),
script.get_tasks()
)
)
| {
"content_hash": "ea23497836a0e4fdb379e2efb7e7c487",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 65,
"avg_line_length": 29.766666666666666,
"alnum_prop": 0.5778275475923852,
"repo_name": "google/starthinker",
"id": "2b9ce94029dd2ac5236b5c0bb264c59420c0ec36",
"size": "1635",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "starthinker_ui/website/management/commands/example.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "89775"
},
{
"name": "Jupyter Notebook",
"bytes": "1088964"
},
{
"name": "Python",
"bytes": "2356647"
},
{
"name": "Shell",
"bytes": "89492"
}
],
"symlink_target": ""
} |
import random
import collections
from getpass import getpass as get_keyword
class GuessWord(object):
def __init__(self):
self.word_list = ["jack", "back", "cock", "luck", "bang", "tool", "dogs", "bags", "life", "kick"]
self.key = True
self.last_word = []
self.word = self.grab_word()
def start_game(self):
while self.key == True:
self.star = 0
self.exc = 0
self.guess_word_length = 0
#grabbing the guess word from user and checking the word length
while self.guess_word_length != len(self.word):
if self.guess_word_length != 0:
print "Please enter a %d letter word" % len(self.word)
self.guess_word = raw_input(
"Enter your guess that must be containing "+ str(len(self.word)) +" letters: "
)
self.guess_word_length = len(self.guess_word)
#Exact word match if block
if self.word == self.guess_word:
self.new_key = int(raw_input("Congrats! you've got the right word. To continue playing the game please enter 1 and to quit enter 2: \n 1. play \n 2. quit \n"))
if self.new_key == 1:
self.last_word.append(self.word)
self.word = self.grab_word()
continue
else:
self.star = 0
self.exc = 0
self.key = False
break
#star calculation
self.calculate_star()
#Exclamation calculation
self.calculate_exclamation()
#Guess output
print ' '.join(['_' for i in range(len(self.word))]) + '\t' + ' '.join(['*' for i in range(self.star)]) + ' '.join([' !' for i in range(self.exc)])
def grab_word(self):
while True:
word_no = random.randint(0, len(self.word_list)-1)
word = self.word_list[word_no]
if word not in self.last_word:
return word
def calculate_star(self):
self.new_word = ""
self.new_guess_word = ""
for i, char in enumerate(self.word):
if self.guess_word[i] == char:
self.star += 1
continue
self.new_word += char
self.new_guess_word += self.guess_word[i]
def calculate_exclamation(self):
new_word_count = dict(collections.Counter(self.new_word))
new_guess_word_count = dict(collections.Counter(self.new_guess_word))
for char in new_word_count:
if new_guess_word_count.has_key(char):
self.exc += min(new_guess_word_count[char], new_word_count[char])
| {
"content_hash": "454e42ab126b60757f7f0409b7a33466",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 175,
"avg_line_length": 37.5,
"alnum_prop": 0.5196396396396397,
"repo_name": "praba230890/guesswordgame",
"id": "4d486b25da4941754240aa977badb56807b1dffd",
"size": "2775",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "game/main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "8918"
}
],
"symlink_target": ""
} |
import logging
from .engine import SimEngine
l = logging.getLogger("angr.engines.hook")
# pylint: disable=abstract-method,unused-argument
class SimEngineHook(SimEngine):
def __init__(self, project):
super(SimEngineHook, self).__init__()
self.project = project
def _check(self, state, procedure=None, **kwargs):
# we have not yet entered the next step - we should check the "current" jumpkind
if state.history.jumpkind == 'Ijk_NoHook':
return False
if state._ip.symbolic:
# symbolic IP is not supported
return False
if procedure is None:
if state.addr not in self.project._sim_procedures:
return False
return True
def process(self, state, procedure=None, force_addr=None, **kwargs):
"""
Perform execution with a state.
:param state: The state with which to execute
:param procedure: An instance of a SimProcedure to run, optional
:param ret_to: The address to return to when this procedure is finished
:param inline: This is an inline execution. Do not bother copying the state.
:param force_addr: Force execution to pretend that we're working at this concrete address
:returns: A SimSuccessors object categorizing the execution's successor states
"""
addr = state.addr if force_addr is None else force_addr
if procedure is None:
if addr not in self.project._sim_procedures:
return
else:
procedure = self.project._sim_procedures[addr]
l.debug("Running %s (originally at %#x)", repr(procedure), addr)
return self.project.factory.procedure_engine.process(state, procedure, force_addr=force_addr, **kwargs)
#
# Pickling
#
def __setstate__(self, state):
super(SimEngineHook, self).__setstate__(state)
self.project = state['project']
def __getstate__(self):
s = super(SimEngineHook, self).__getstate__()
s['project'] = self.project
return s
| {
"content_hash": "6eb664cb0295138b60f34008916e7756",
"timestamp": "",
"source": "github",
"line_count": 62,
"max_line_length": 111,
"avg_line_length": 34.37096774193548,
"alnum_prop": 0.6189582355701548,
"repo_name": "chubbymaggie/angr",
"id": "e9624b7236d2ac1264eeb63e4b961cb930532d4f",
"size": "2131",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "angr/engines/hook.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "6375"
},
{
"name": "C++",
"bytes": "38545"
},
{
"name": "Makefile",
"bytes": "617"
},
{
"name": "Python",
"bytes": "2762600"
}
],
"symlink_target": ""
} |
from __future__ import annotations
from typing import Any, Mapping, Optional, Sequence, Union
from pydantic import BaseModel, root_validator, validator
from datadog_checks.base.utils.functions import identity
from datadog_checks.base.utils.models import validation
from . import defaults, validators
class AuthToken(BaseModel):
class Config:
allow_mutation = False
reader: Optional[Mapping[str, Any]]
writer: Optional[Mapping[str, Any]]
class IgnoreMetricsByLabels(BaseModel):
class Config:
allow_mutation = False
target_label_key: Optional[str]
target_label_value_list: Optional[Sequence[str]]
class TargetMetric(BaseModel):
class Config:
allow_mutation = False
label_to_match: Optional[str]
labels_to_get: Optional[Sequence[str]]
class LabelJoins(BaseModel):
class Config:
allow_mutation = False
target_metric: Optional[TargetMetric]
class MetricPatterns(BaseModel):
class Config:
allow_mutation = False
exclude: Optional[Sequence[str]]
include: Optional[Sequence[str]]
class Proxy(BaseModel):
class Config:
allow_mutation = False
http: Optional[str]
https: Optional[str]
no_proxy: Optional[Sequence[str]]
class InstanceConfig(BaseModel):
class Config:
allow_mutation = False
allow_redirects: Optional[bool]
auth_token: Optional[AuthToken]
auth_type: Optional[str]
aws_host: Optional[str]
aws_region: Optional[str]
aws_service: Optional[str]
bearer_token_auth: Optional[Union[bool, str]]
bearer_token_path: Optional[str]
bearer_token_refresh_interval: Optional[int]
connect_timeout: Optional[float]
disable_generic_tags: Optional[bool]
empty_default_hostname: Optional[bool]
exclude_labels: Optional[Sequence[str]]
extra_headers: Optional[Mapping[str, Any]]
gitlab_url: str
headers: Optional[Mapping[str, Any]]
health_service_check: Optional[bool]
ignore_metrics: Optional[Sequence[str]]
ignore_metrics_by_labels: Optional[IgnoreMetricsByLabels]
ignore_tags: Optional[Sequence[str]]
include_labels: Optional[Sequence[str]]
kerberos_auth: Optional[str]
kerberos_cache: Optional[str]
kerberos_delegate: Optional[bool]
kerberos_force_initiate: Optional[bool]
kerberos_hostname: Optional[str]
kerberos_keytab: Optional[str]
kerberos_principal: Optional[str]
label_joins: Optional[LabelJoins]
label_to_hostname: Optional[str]
labels_mapper: Optional[Mapping[str, Any]]
log_requests: Optional[bool]
metric_patterns: Optional[MetricPatterns]
metrics: Optional[Sequence[Union[str, Mapping[str, str]]]]
min_collection_interval: Optional[float]
namespace: Optional[str]
ntlm_domain: Optional[str]
password: Optional[str]
persist_connections: Optional[bool]
prometheus_endpoint: str
prometheus_metrics_prefix: Optional[str]
prometheus_url: str
proxy: Optional[Proxy]
read_timeout: Optional[float]
request_size: Optional[float]
send_distribution_buckets: Optional[bool]
send_distribution_counts_as_monotonic: Optional[bool]
send_distribution_sums_as_monotonic: Optional[bool]
send_histograms_buckets: Optional[bool]
send_monotonic_counter: Optional[bool]
send_monotonic_with_gauge: Optional[bool]
service: Optional[str]
skip_proxy: Optional[bool]
tags: Optional[Sequence[str]]
timeout: Optional[float]
tls_ca_cert: Optional[str]
tls_cert: Optional[str]
tls_ignore_warning: Optional[bool]
tls_private_key: Optional[str]
tls_protocols_allowed: Optional[Sequence[str]]
tls_use_host_header: Optional[bool]
tls_verify: Optional[bool]
type_overrides: Optional[Mapping[str, Any]]
use_legacy_auth_encoding: Optional[bool]
use_process_start_time: Optional[bool]
username: Optional[str]
@root_validator(pre=True)
def _initial_validation(cls, values):
return validation.core.initialize_config(getattr(validators, 'initialize_instance', identity)(values))
@validator('*', pre=True, always=True)
def _ensure_defaults(cls, v, field):
if v is not None or field.required:
return v
return getattr(defaults, f'instance_{field.name}')(field, v)
@validator('*')
def _run_validations(cls, v, field):
if not v:
return v
return getattr(validators, f'instance_{field.name}', identity)(v, field=field)
@root_validator(pre=False)
def _final_validation(cls, values):
return validation.core.finalize_config(getattr(validators, 'finalize_instance', identity)(values))
| {
"content_hash": "e0083e149b570b43a6f13e187b19602f",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 110,
"avg_line_length": 30.657894736842106,
"alnum_prop": 0.703862660944206,
"repo_name": "DataDog/integrations-core",
"id": "b924b6cdecdf71e2e9ffa3b2af778d4704b33f7c",
"size": "5017",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gitlab_runner/datadog_checks/gitlab_runner/config_models/instance.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "578"
},
{
"name": "COBOL",
"bytes": "12312"
},
{
"name": "Dockerfile",
"bytes": "22998"
},
{
"name": "Erlang",
"bytes": "15518"
},
{
"name": "Go",
"bytes": "6988"
},
{
"name": "HCL",
"bytes": "4080"
},
{
"name": "HTML",
"bytes": "1318"
},
{
"name": "JavaScript",
"bytes": "1817"
},
{
"name": "Kotlin",
"bytes": "430"
},
{
"name": "Lua",
"bytes": "3489"
},
{
"name": "PHP",
"bytes": "20"
},
{
"name": "PowerShell",
"bytes": "2398"
},
{
"name": "Python",
"bytes": "13020828"
},
{
"name": "Roff",
"bytes": "359"
},
{
"name": "Ruby",
"bytes": "241"
},
{
"name": "Scala",
"bytes": "7000"
},
{
"name": "Shell",
"bytes": "83227"
},
{
"name": "Swift",
"bytes": "203"
},
{
"name": "TSQL",
"bytes": "29972"
},
{
"name": "TypeScript",
"bytes": "1019"
}
],
"symlink_target": ""
} |
"""Tests for git compatibility utilities."""
from dulwich.tests import (
SkipTest,
TestCase,
)
from dulwich.tests.compat import utils
class GitVersionTests(TestCase):
def setUp(self):
super(GitVersionTests, self).setUp()
self._orig_run_git = utils.run_git
self._version_str = None # tests can override to set stub version
def run_git(args, **unused_kwargs):
self.assertEqual(['--version'], args)
return 0, self._version_str
utils.run_git = run_git
def tearDown(self):
super(GitVersionTests, self).tearDown()
utils.run_git = self._orig_run_git
def test_git_version_none(self):
self._version_str = 'not a git version'
self.assertEqual(None, utils.git_version())
def test_git_version_3(self):
self._version_str = 'git version 1.6.6'
self.assertEqual((1, 6, 6, 0), utils.git_version())
def test_git_version_4(self):
self._version_str = 'git version 1.7.0.2'
self.assertEqual((1, 7, 0, 2), utils.git_version())
def test_git_version_extra(self):
self._version_str = 'git version 1.7.0.3.295.gd8fa2'
self.assertEqual((1, 7, 0, 3), utils.git_version())
def assertRequireSucceeds(self, required_version):
try:
utils.require_git_version(required_version)
except SkipTest:
self.fail()
def assertRequireFails(self, required_version):
self.assertRaises(SkipTest, utils.require_git_version,
required_version)
def test_require_git_version(self):
try:
self._version_str = 'git version 1.6.6'
self.assertRequireSucceeds((1, 6, 6))
self.assertRequireSucceeds((1, 6, 6, 0))
self.assertRequireSucceeds((1, 6, 5))
self.assertRequireSucceeds((1, 6, 5, 99))
self.assertRequireFails((1, 7, 0))
self.assertRequireFails((1, 7, 0, 2))
self.assertRaises(ValueError, utils.require_git_version,
(1, 6, 6, 0, 0))
self._version_str = 'git version 1.7.0.2'
self.assertRequireSucceeds((1, 6, 6))
self.assertRequireSucceeds((1, 6, 6, 0))
self.assertRequireSucceeds((1, 7, 0))
self.assertRequireSucceeds((1, 7, 0, 2))
self.assertRequireFails((1, 7, 0, 3))
self.assertRequireFails((1, 7, 1))
except SkipTest, e:
# This test is designed to catch all SkipTest exceptions.
self.fail('Test unexpectedly skipped: %s' % e)
| {
"content_hash": "67c8eedf3f990ebe30b83dcf6bd72cb8",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 74,
"avg_line_length": 35.76712328767123,
"alnum_prop": 0.585982382229031,
"repo_name": "johndbritton/gitviz",
"id": "6ecd449dea1431b8957e3f163867b449dc34e11a",
"size": "3414",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "dulwich/tests/compat/test_utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Awk",
"bytes": "1916"
},
{
"name": "Batchfile",
"bytes": "3268"
},
{
"name": "C",
"bytes": "48666"
},
{
"name": "CSS",
"bytes": "5054"
},
{
"name": "Groff",
"bytes": "82"
},
{
"name": "HTML",
"bytes": "4369"
},
{
"name": "Handlebars",
"bytes": "2883"
},
{
"name": "JavaScript",
"bytes": "168931"
},
{
"name": "Makefile",
"bytes": "8587"
},
{
"name": "PHP",
"bytes": "4954"
},
{
"name": "Python",
"bytes": "1603823"
},
{
"name": "Shell",
"bytes": "1455"
}
],
"symlink_target": ""
} |
import sys
import json
from subprocess import Popen, PIPE
try:
import yaml
except ImportError:
print('Unable to import YAML module: please install PyYAML', file=sys.stderr)
sys.exit(1)
class Reporter(object):
"""Collect and report errors."""
def __init__(self):
"""Constructor."""
super(Reporter, self).__init__()
self.messages = []
def check_field(self, filename, name, values, key, expected):
"""Check that a dictionary has an expected value."""
if key not in values:
self.add(filename, '{0} does not contain {1}', name, key)
elif values[key] != expected:
self.add(filename, '{0} {1} is {2} not {3}', name, key, values[key], expected)
def check(self, condition, location, fmt, *args):
"""Append error if condition not met."""
if not condition:
self.add(location, fmt, *args)
def add(self, location, fmt, *args):
"""Append error unilaterally."""
if isinstance(location, type(None)):
coords = ''
elif isinstance(location, str):
coords = '{0}: '.format(location)
elif isinstance(location, tuple):
filename, line_number = location
coords = '{0}:{1}: '.format(*location)
else:
assert False, 'Unknown location "{0}"/{1}'.format(location, type(location))
self.messages.append(coords + fmt.format(*args))
def report(self, stream=sys.stdout):
"""Report all messages."""
if not self.messages:
return
for m in sorted(self.messages):
print(m, file=stream)
def read_markdown(parser, path):
"""
Get YAML and AST for Markdown file, returning
{'metadata':yaml, 'metadata_len':N, 'text':text, 'lines':[(i, line, len)], 'doc':doc}.
"""
# Split and extract YAML (if present).
with open(path, 'r') as reader:
body = reader.read()
metadata_raw, metadata_yaml, body = split_metadata(path, body)
# Split into lines.
metadata_len = 0 if metadata_raw is None else metadata_raw.count('\n')
lines = [(metadata_len+i+1, line, len(line)) for (i, line) in enumerate(body.split('\n'))]
# Parse Markdown.
cmd = 'ruby {0}'.format(parser)
p = Popen(cmd, shell=True, stdin=PIPE, stdout=PIPE, close_fds=True, universal_newlines=True)
stdout_data, stderr_data = p.communicate(body)
doc = json.loads(stdout_data)
return {
'metadata': metadata_yaml,
'metadata_len': metadata_len,
'text': body,
'lines': lines,
'doc': doc
}
def split_metadata(path, text):
"""
Get raw (text) metadata, metadata as YAML, and rest of body.
If no metadata, return (None, None, body).
"""
metadata_raw = None
metadata_yaml = None
metadata_len = None
pieces = text.split('---', 2)
if len(pieces) == 3:
metadata_raw = pieces[1]
text = pieces[2]
try:
metadata_yaml = yaml.load(metadata_raw)
except yaml.YAMLError as e:
print('Unable to parse YAML header in {0}:\n{1}'.format(path, e), file=sys.stderr)
sys.exit(1)
return metadata_raw, metadata_yaml, text
def load_yaml(filename):
"""
Wrapper around YAML loading so that 'import yaml' and error
handling is only needed in one place.
"""
with open(filename, 'r') as reader:
return yaml.load(reader)
| {
"content_hash": "9f6adacdd74810f88413347e056ddc55",
"timestamp": "",
"source": "github",
"line_count": 122,
"max_line_length": 96,
"avg_line_length": 28.360655737704917,
"alnum_prop": 0.588150289017341,
"repo_name": "easyreporting/fmri_reporting",
"id": "6af0a3317061d8663dafad9de41aec2156b7e597",
"size": "3460",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bin/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "3451"
},
{
"name": "HTML",
"bytes": "15745"
},
{
"name": "JavaScript",
"bytes": "271837"
},
{
"name": "Makefile",
"bytes": "3027"
},
{
"name": "Python",
"bytes": "57742"
},
{
"name": "R",
"bytes": "2226"
},
{
"name": "Ruby",
"bytes": "238"
},
{
"name": "Shell",
"bytes": "220"
}
],
"symlink_target": ""
} |
"""Package contenant le paramètre 'créer' de la commande 'banc'."""
from primaires.interpreteur.masque.parametre import Parametre
from primaires.interpreteur.editeur.presentation import Presentation
class PrmCreer(Parametre):
"""Commande 'banc créer'"""
def __init__(self):
"""Constructeur du paramètre."""
Parametre.__init__(self, "creer", "create")
self.schema = "<cle>"
self.aide_courte = "crée un banc de poisson"
self.aide_longue = \
"Cette commande permet de créer un nouveau banc de " \
"poisson. Vous devez préciser en argument la clé identifiant " \
"le banc."
def interpreter(self, personnage, dic_masques):
"""Méthode d'interprétation de commande"""
cle = dic_masques["cle"].cle
if cle in importeur.peche.bancs:
personnage << "|err|Ce banc existe déjà.|ff|"
return
banc = importeur.peche.creer_banc(cle)
editeur = importeur.interpreteur.construire_editeur(
"schooledit", personnage, banc)
personnage.contextes.ajouter(editeur)
editeur.actualiser()
| {
"content_hash": "4e41474819d3032ce466e25a13e84f16",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 76,
"avg_line_length": 37.70967741935484,
"alnum_prop": 0.6193327630453379,
"repo_name": "stormi/tsunami",
"id": "9db1aca935197ace9952c2d4012be69d954f95ff",
"size": "2746",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/secondaires/peche/commandes/banc/creer.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "7188300"
},
{
"name": "Ruby",
"bytes": "373"
}
],
"symlink_target": ""
} |
"""Basic example of feeding files into a directory being watched by spark streaming.
This script will monitor a directory tree passed as a argument. When new files are added inside this tree
(provided that they are lexicographically later than the last file fed into the stream - see updating_walk.py)
they will be first copied into a temporary directory, then moved into the passed output directory. After a specified
lag time, they will be automatically deleted from the output directory.
The temporary directory must be on the same filesystem as the output directory, or else errors will likely be
thrown by os.rename. The root of the temporary directory tree can be set on most linux systems by the TMP environment
variable.
"""
import logging
import sys
from thunder_streaming.feeder.core import build_filecheck_generators, runloop
from thunder_streaming.feeder.feeders import CopyAndMoveFeeder
from thunder_streaming.feeder.utils.logger import global_logger
from thunder_streaming.feeder.utils.regex import RegexMatchToPredicate
def parse_options():
import optparse
parser = optparse.OptionParser(usage="%prog indir outdir [options]")
parser.add_option("-p", "--poll-time", type="float", default=1.0,
help="Time between checks of indir in s, default %default")
parser.add_option("-m", "--mod-buffer-time", type="float", default=1.0,
help="Time to wait after last file modification time before feeding file into stream, "
"default %default")
parser.add_option("-l", "--linger-time", type="float", default=5.0,
help="Time to wait after feeding into stream before deleting intermediate file "
"(negative time disables), default %default")
parser.add_option("--max-files", type="int", default=-1,
help="Max files to copy in one iteration "
"(negative disables), default %default")
parser.add_option("--filter-regex-file", default=None,
help="File containing python regular expression. If passed, only move files for which " +
"the base filename matches the given regex.")
opts, args = parser.parse_args()
if len(args) != 2:
print >> sys.stderr, parser.get_usage()
sys.exit(1)
setattr(opts, "indir", args[0])
setattr(opts, "outdir", args[1])
return opts
def main():
_handler = logging.StreamHandler(sys.stdout)
_handler.setFormatter(logging.Formatter('%(levelname)s:%(name)s:%(asctime)s:%(message)s'))
global_logger.get().addHandler(_handler)
global_logger.get().setLevel(logging.INFO)
opts = parse_options()
if opts.filter_regex_file:
pred_fcn = RegexMatchToPredicate.fromFile(opts.filter_regex_file).predicate
else:
pred_fcn = None
feeder = CopyAndMoveFeeder.fromOptions(opts)
file_checkers = build_filecheck_generators(opts.indir, opts.mod_buffer_time,
max_files=opts.max_files, filename_predicate=pred_fcn)
runloop(file_checkers, feeder, opts.poll_time)
if __name__ == "__main__":
main() | {
"content_hash": "d721880e2234344ffdef7911d72b617f",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 117,
"avg_line_length": 45.65714285714286,
"alnum_prop": 0.6733416770963705,
"repo_name": "andrewosh/thunder-streaming",
"id": "e49a4a31a0eaa798c9f891a63b117b12c32da796",
"size": "3218",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "python/thunder_streaming/feeder/bin/stream_feeder.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "117931"
},
{
"name": "Scala",
"bytes": "81769"
},
{
"name": "Shell",
"bytes": "3712"
}
],
"symlink_target": ""
} |
"""
Name server control module. Slightly enhanced for QA package.
"""
from pycopia.remote import pyro
def print_listing(listing):
for name, uri in sorted(listing.items()):
if len(uri) > 45:
print(("{:>35.35s} --> \n{:>79.79s}".format(name, uri)))
else:
print(("{:>35.35s} --> {}".format(name, uri)))
_DOC = """nsc [-h?]
Control or query the name server.
Subcommands:
list - show current objects.
ping - No error if server is reachable.
remove <name> - remove the named agent entry.
"""
def nsc(argv):
import getopt
try:
optlist, args = getopt.getopt(argv[1:], "h?")
except getopt.GetoptError:
print(_DOC)
return 2
for opt, optarg in optlist:
if opt in ("-h", "-?"):
print(_DOC)
return
try:
subcmd = args[0]
except IndexError:
print(_DOC)
return 2
args = args[1:]
nameserver = pyro.locate_nameserver()
if subcmd.startswith("li"):
if args:
print_listing(nameserver.list(prefix=args[0]))
else:
print_listing(nameserver.list())
elif subcmd.startswith("pi"):
nameserver.ping()
print("Name server is alive.")
if subcmd.startswith("rem"):
if args:
nameserver.remove(name=args[0])
else:
print(_DOC)
return 2
if __name__ == "__main__":
import sys
from pycopia import autodebug
nsc(sys.argv)
| {
"content_hash": "5193305893e6f6f6dafaf752e16bdfea",
"timestamp": "",
"source": "github",
"line_count": 68,
"max_line_length": 68,
"avg_line_length": 22.014705882352942,
"alnum_prop": 0.5497661990647963,
"repo_name": "kdart/pycopia3",
"id": "aead9e051ff1fbb6d20a4845db8c34a909dd3070",
"size": "2110",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "QA/pycopia/remote/nsc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "82876"
},
{
"name": "CSS",
"bytes": "22279"
},
{
"name": "HTML",
"bytes": "11125"
},
{
"name": "JavaScript",
"bytes": "70505"
},
{
"name": "Makefile",
"bytes": "5417"
},
{
"name": "Python",
"bytes": "1557130"
},
{
"name": "Roff",
"bytes": "7289"
},
{
"name": "Shell",
"bytes": "11157"
}
],
"symlink_target": ""
} |
""" Defines miscellaneous Qt-related helper classes and functions.
"""
# Standard library imports.
import inspect
# System library imports.
from IPython.external.qt import QtCore, QtGui
# IPython imports.
from IPython.utils.traitlets import HasTraits, TraitType
#-----------------------------------------------------------------------------
# Metaclasses
#-----------------------------------------------------------------------------
MetaHasTraits = type(HasTraits)
MetaQObject = type(QtCore.QObject)
class MetaQObjectHasTraits(MetaQObject, MetaHasTraits):
""" A metaclass that inherits from the metaclasses of HasTraits and QObject.
Using this metaclass allows a class to inherit from both HasTraits and
QObject. Using SuperQObject instead of QObject is highly recommended. See
QtKernelManager for an example.
"""
def __new__(mcls, name, bases, classdict):
# FIXME: this duplicates the code from MetaHasTraits.
# I don't think a super() call will help me here.
for k,v in classdict.iteritems():
if isinstance(v, TraitType):
v.name = k
elif inspect.isclass(v):
if issubclass(v, TraitType):
vinst = v()
vinst.name = k
classdict[k] = vinst
cls = MetaQObject.__new__(mcls, name, bases, classdict)
return cls
def __init__(mcls, name, bases, classdict):
# Note: super() did not work, so we explicitly call these.
MetaQObject.__init__(mcls, name, bases, classdict)
MetaHasTraits.__init__(mcls, name, bases, classdict)
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class SuperQObject(QtCore.QObject):
""" Permits the use of super() in class hierarchies that contain QObject.
Unlike QObject, SuperQObject does not accept a QObject parent. If it did,
super could not be emulated properly (all other classes in the heierarchy
would have to accept the parent argument--they don't, of course, because
they don't inherit QObject.)
This class is primarily useful for attaching signals to existing non-Qt
classes. See QtKernelManager for an example.
"""
def __new__(cls, *args, **kw):
# We initialize QObject as early as possible. Without this, Qt complains
# if SuperQObject is not the first class in the super class list.
inst = QtCore.QObject.__new__(cls)
QtCore.QObject.__init__(inst)
return inst
def __init__(self, *args, **kw):
# Emulate super by calling the next method in the MRO, if there is one.
mro = self.__class__.mro()
for qt_class in QtCore.QObject.mro():
mro.remove(qt_class)
next_index = mro.index(SuperQObject) + 1
if next_index < len(mro):
mro[next_index].__init__(self, *args, **kw)
#-----------------------------------------------------------------------------
# Functions
#-----------------------------------------------------------------------------
def get_font(family, fallback=None):
"""Return a font of the requested family, using fallback as alternative.
If a fallback is provided, it is used in case the requested family isn't
found. If no fallback is given, no alternative is chosen and Qt's internal
algorithms may automatically choose a fallback font.
Parameters
----------
family : str
A font name.
fallback : str
A font name.
Returns
-------
font : QFont object
"""
font = QtGui.QFont(family)
# Check whether we got what we wanted using QFontInfo, since exactMatch()
# is overly strict and returns false in too many cases.
font_info = QtGui.QFontInfo(font)
if fallback is not None and font_info.family() != family:
font = QtGui.QFont(fallback)
return font
| {
"content_hash": "ce935f40c8ed115e5747fde82e5f11f7",
"timestamp": "",
"source": "github",
"line_count": 106,
"max_line_length": 80,
"avg_line_length": 37.367924528301884,
"alnum_prop": 0.5740974501388538,
"repo_name": "sodafree/backend",
"id": "096c27ea1aba7f945bb0ca20763cc1abfb2e1e3d",
"size": "3961",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "build/ipython/IPython/frontend/qt/util.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Emacs Lisp",
"bytes": "21800"
},
{
"name": "JavaScript",
"bytes": "1050184"
},
{
"name": "Python",
"bytes": "21215906"
},
{
"name": "Shell",
"bytes": "7557"
},
{
"name": "VimL",
"bytes": "25012"
}
],
"symlink_target": ""
} |
from django.shortcuts import render
from django.shortcuts import redirect
from django.views.decorators.csrf import csrf_exempt
from openid.consumer import discover
from openid.server import server
from openid.extensions import sreg, ax
# Create your views here.
from django.http import HttpResponse
from openid.store.filestore import FileOpenIDStore
def addAttributeExchangeResponse(oidrequest, response, request):
ax_req = ax.FetchRequest.fromOpenIDRequest(oidrequest)
if ax_req:
required = ax_req.getRequiredAttrs()
if len(required) == 1 and 'http://axschema.org/contact/username' in required:
ax_resp = ax.FetchResponse(request=ax_req)
ax_resp.addValue('http://axschema.org/contact/username', request.session['username'])
response.addExtension(ax_resp)
def addSRegResponse(oidrequest, response, request):
sreg_req = sreg.SRegRequest.fromOpenIDRequest(oidrequest)
# CAMPOS ACEITOS PELA ESPECIFICACAO SREG:
# 'fullname':'Full Name',
# 'nickname':'Nickname',
# 'dob':'Date of Birth',
# 'email':'E-mail Address',
# 'gender':'Gender',
# 'postcode':'Postal Code',
# 'country':'Country',
# 'language':'Language',
# 'timezone':'Time Zone',
sreg_data = {
'nickname':request.session['username']
}
sreg_resp = sreg.SRegResponse.extractResponse(sreg_req, sreg_data)
response.addExtension(sreg_resp)
@csrf_exempt
def openid(request):
# NECESSARIAMENTE OS 3 TRECHOS DE CÓDIGO DEVEM ESTAR NESSA MESMA URL POIS O CONSUMIDOR FAZ A VERIFICACAO
# SE A URL QUE O RESPONDER EH A MESMA URL A QUAL ELE FEZ REQUISICAO, ENTAO OS RENDERS ABAIXO NAO PODEM TROCAR DE URL
endpoint_url = 'http://' + request.environ['HTTP_HOST'] + '/openid'
oidserver = server.Server(FileOpenIDStore('/tmp/openid_session_store_server'), endpoint_url)
# TRECHO 1
# NESSE TRECHO DE CODIGO SE CRIA A ASSOCIACAO NECESSARIA NA PRIMEIRA INTERACAO COM O SERVIDOR
# ESTA ASSOCIACAO COMPOE O PARAMETRO ASSOC_HANDLE NO PRIMEIRO FORM MONTADO PELO CONSUMIDOR
if 'openid.mode' in request.POST:
if request.POST['openid.mode'] in ['associate', 'check_authentication']:
oidrequest = oidserver.decodeRequest(request.POST)
oidresponse = oidserver.handleRequest(oidrequest)
webresponse = oidserver.encodeResponse(oidresponse)
return HttpResponse(webresponse.body)
# TRECHO 2
# CASO O USUARIO NAO ESTEJA LOGADO RENDERIZA A TELA DE LOGIN
# DEPOIS REDIRECIONA DE VOLTA PARA ESSA MESMA URL QUE A TRATA NOVAMENTE
if not request.session.get('username', False):
request.session['save_post'] = request.POST
return render(request, 'login.html', {'next':'/openid'})
# TRECHO 3
# NESSE TRECHO DE CODIGO O SERVIDOR ASSOCIA DADOS DO USUÁRIO NO OBJETO RESPONSE E MANDA AO CONSUMIDOR
if request.session.get('save_post', False):
saved_post_data = request.session['save_post']
del request.session['save_post']
else:
saved_post_data = request.POST
openid_request = oidserver.decodeRequest(saved_post_data)
openid_response = openid_request.answer(True, identity=None)
# addSRegResponse(openid_request, openid_response, request) # PROTOCOLO SREG
addAttributeExchangeResponse(openid_request, openid_response, request)
webresponse = oidserver.encodeResponse(openid_response)
# MONTA A URL COM QUERY STRING PARA REDIRECIONAR OS DADOS PARA O CONSUMIDOR
location = None
for header, value in webresponse.headers.iteritems():
if header == 'location':
location = value
return redirect(location)
def loginform(request):
# TELA BASICA DE LOGIN
# CASO O USUARIO JA ESTEJA LOGADO SIMPLESMENTE IDENTIFICA O USUARIO LOGADO E DA A OPCAO DE DESLOGAR
if request.session.get('username', False):
username = request.session['username']
return render(request, 'login.html', {'logged':True, 'name':username})
else:
return render(request, 'login.html', {'logged':False})
def login(request):
# METODO QUE FAZ LOGIN, SALVA O USUARIO NA SESSAO CASO AS INFORMACOES DE LOGIN SEJAM COERENTES
# NO CASO O UNICO 'USUARIO' ESTA MOCKADO FORA DE UM BANCO, SERIA USER:'MarRib' E PASS:'123456'
username = request.POST['username']
password = request.POST['password']
if username == 'MarRib' and password == '123456':
request.session['username'] = 'MarRib'
request.session['email'] = u'''[email protected]'''
if request.POST.get('next', False):
next = request.POST['next']
if next:
return redirect(next)
return redirect('/')
else:
return render(request, 'login.html', {'logged':False, 'error':'O nome de usuário e login não são compatíveis.'})
def logout(request):
# METODO QUE FAZ LOGOUT, TIRA O USUARIO DA SESSAO E REDIRECIONA PRA TELA DE LOGIN
try:
del request.session['username']
except KeyError:
pass
return redirect('/loginform')
def xrds(request):
# MONTA O DOCUMENTO XRDS PARA O CONSUMIDOR
endpoint_url = 'http://' + request.environ['HTTP_HOST'] + '/openid'
xrds_doc = """\
<?xml version="1.0" encoding="UTF-8"?>
<xrds:XRDS
xmlns:xrds="xri://$xrds"
xmlns="xri://$xrd*($v*2.0)">
<XRD>
<Service priority="0">
<Type>%s</Type>
<Type>%s</Type>
<URI>%s</URI>
</Service>
</XRD>
</xrds:XRDS>
"""%(discover.OPENID_2_0_TYPE, discover.OPENID_1_0_TYPE, endpoint_url)
return HttpResponse(xrds_doc, content_type='application/xrds+xml') | {
"content_hash": "a7b962d88f90736e1ee9fb871d7468b2",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 120,
"avg_line_length": 39.46853146853147,
"alnum_prop": 0.67487597448618,
"repo_name": "qmagico/openid_server",
"id": "f8803991a595b9d95749570dac0b0332bfbb7b68",
"size": "5666",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "server/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1112369"
},
{
"name": "Shell",
"bytes": "167"
}
],
"symlink_target": ""
} |
import inspect
import sys
import time
import pipes.pipe
from pipes import *
from writers import VideoServer
from sensors.pisensor import PiSensor
def get_pipes():
classes = inspect.getmembers(sys.modules[__name__], inspect.isclass)
return [c[1]() for c in classes
if pipes.pipe.Pipe in c[1].__bases__
and c[0] != "Pipeline"]
using_pipes = []
def main():
pipes = get_pipes()
using_pipes.append(pipes[0])
with \
PiSensor() as cam, \
Pipeline(using_pipes) as pipeline, \
VideoServer(port=8080, pipes=pipes, using_pipes=using_pipes) \
as video_server:
while True:
try:
frame = cam.read()
frame = pipeline.pipe(frame)
video_server.write(frame)
time.sleep(.05)
except KeyboardInterrupt:
break
if __name__ == '__main__':
main()
| {
"content_hash": "61ae3518c0bc5ead64987550520229d2",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 72,
"avg_line_length": 25.61111111111111,
"alnum_prop": 0.5672451193058569,
"repo_name": "jstriebel/webcam-effects",
"id": "64badc9b65a30328de0a4634d45289afd1a96afc",
"size": "922",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main_pi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1187"
},
{
"name": "Python",
"bytes": "12888"
}
],
"symlink_target": ""
} |
from overkill import manager
from overkill.extra.mail import MailNotifySink
from overkill.extra.notify import Notify
from overkill.sinks import SimpleSink
from subprocess import Popen
manager.add_sink(MailNotifySink())
class BatteryNotifySink(SimpleSink, Notify):
summary = "Battery Warning"
subscription = "battery_percent"
urgent_percent = 5
critical_percent = 10
low_percent = 15
_current = 100
def handle_update(self, update):
# FIXME: It starts at zero
if update == "0":
return
previous = self._current
self._current = int(update)
if previous > self.urgent_percent >= self._current:
Popen(["/usr/bin/systemctl", "suspend"])
if previous > self.critical_percent >= self._current:
self.message = "Critical Battery: %d%%" % self._current
self.urgency = self.notify.URGENCY_CRITICAL
self.show()
elif previous > self.low_percent >= self._current:
self.message = "Low Battery: %d%%" % self._current
self.urgency = self.notify.URGENCY_NORMAL
self.show()
manager.add_sink(BatteryNotifySink())
| {
"content_hash": "37e89c3c9d018b4a6e7f83da2a1748ad",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 67,
"avg_line_length": 32.638888888888886,
"alnum_prop": 0.6408510638297872,
"repo_name": "Stebalien/overkill-config",
"id": "c9f3f11243c676d143a41b8d26d256d291144ee4",
"size": "1175",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "notify.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2620"
}
],
"symlink_target": ""
} |
import asyncio
import atexit
import contextlib
import importlib
import inspect
import os
import types
from collections import defaultdict
from functools import partial
from statistics import mean, median
from time import sleep
from timeit import default_timer
from unittest.mock import patch
from urllib.parse import urlparse
import pytest
import geopy.geocoders
from geopy.adapters import AdapterHTTPError, BaseAsyncAdapter, BaseSyncAdapter
from geopy.geocoders.base import _DEFAULT_ADAPTER_CLASS
# pytest-aiohttp calls `inspect.isasyncgenfunction` to detect
# async generators in fixtures.
# To support Python 3.5 we use `async_generator` library.
# However:
# - Since Python 3.6 there is a native implementation of
# `inspect.isasyncgenfunction`, but it returns False
# for `async_generator`'s functions.
# - The stock `async_generator.isasyncgenfunction` doesn't detect
# generators wrapped in `@pytest.fixture`.
#
# Thus we resort to monkey-patching it (for now).
if getattr(inspect, "isasyncgenfunction", None) is not None:
# >=py36
original_isasyncgenfunction = inspect.isasyncgenfunction
else:
# ==py35
original_isasyncgenfunction = lambda func: False # noqa
def isasyncgenfunction(obj):
if original_isasyncgenfunction(obj):
return True
# Detect async_generator function, possibly wrapped in `@pytest.fixture`:
# See https://github.com/python-trio/async_generator/blob/v1.10/async_generator/_impl.py#L451-L455 # noqa
return bool(getattr(obj, "_async_gen_function", None))
inspect.isasyncgenfunction = isasyncgenfunction
def load_adapter_cls(adapter_ref):
actual_adapter_class = _DEFAULT_ADAPTER_CLASS
if adapter_ref:
module_s, cls_s = adapter_ref.rsplit(".", 1)
module = importlib.import_module(module_s)
actual_adapter_class = getattr(module, cls_s)
return actual_adapter_class
max_retries = int(os.getenv('GEOPY_TEST_RETRIES', 2))
error_wait_seconds = float(os.getenv('GEOPY_TEST_ERROR_WAIT_SECONDS', 3))
no_retries_for_hosts = set(os.getenv('GEOPY_TEST_NO_RETRIES_FOR_HOSTS', '').split(','))
default_adapter = load_adapter_cls(os.getenv('GEOPY_TEST_ADAPTER'))
default_adapter_is_async = issubclass(default_adapter, BaseAsyncAdapter)
retry_status_codes = (
403, # Forbidden (probably due to a rate limit)
429, # Too Many Requests (definitely a rate limit)
502, # Bad Gateway
)
def pytest_report_header(config):
internet_access = "allowed" if _is_internet_access_allowed(config) else "disabled"
adapter_type = "async" if default_adapter_is_async else "sync"
return (
"geopy:\n"
" internet access: %s\n"
" adapter: %r\n"
" adapter type: %s"
% (internet_access, default_adapter, adapter_type)
)
def pytest_addoption(parser):
# This option will probably be used in downstream packages,
# thus it should be considered a public interface.
parser.addoption(
"--skip-tests-requiring-internet",
action="store_true",
help="Skip tests requiring Internet access.",
)
def _is_internet_access_allowed(config):
return not config.getoption("--skip-tests-requiring-internet")
@pytest.fixture(scope='session')
def is_internet_access_allowed(request):
return _is_internet_access_allowed(request.config)
@pytest.fixture
def skip_if_internet_access_is_not_allowed(is_internet_access_allowed):
# Used in test_adapters.py, which doesn't use the injected adapter below.
if not is_internet_access_allowed:
pytest.skip("Skipping a test requiring Internet access")
@pytest.fixture(autouse=True, scope="session")
def loop():
# Geocoder instances have class scope, so the event loop
# should have session scope.
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop
def netloc_from_url(url):
return urlparse(url).netloc
def pretty_dict_format(heading, dict_to_format,
item_prefix=' ', legend='',
value_mapper=lambda v: v):
s = [heading]
if not dict_to_format:
s.append(item_prefix + '-- empty --')
else:
max_key_len = max(len(k) for k in dict_to_format.keys())
for k, v in sorted(dict_to_format.items()):
s.append('%s%s%s' % (item_prefix, k.ljust(max_key_len + 2),
value_mapper(v)))
if legend:
s.append('')
s.append('* %s' % legend)
s.append('') # trailing newline
return '\n'.join(s)
class RequestsMonitor:
"""RequestsMonitor holds statistics of Adapter requests."""
def __init__(self):
self.host_stats = defaultdict(lambda: dict(count=0, retries=0, times=[]))
def record_request(self, url):
hostname = netloc_from_url(url)
self.host_stats[hostname]['count'] += 1
def record_retry(self, url):
hostname = netloc_from_url(url)
self.host_stats[hostname]['retries'] += 1
@contextlib.contextmanager
def record_response(self, url):
start = default_timer()
try:
yield
finally:
end = default_timer()
hostname = netloc_from_url(url)
self.host_stats[hostname]['times'].append(end - start)
def __str__(self):
def value_mapper(v):
tv = v['times']
times_format = (
"min:%5.2fs, median:%5.2fs, max:%5.2fs, mean:%5.2fs, total:%5.2fs"
)
if tv:
# min/max require a non-empty sequence.
times = times_format % (min(tv), median(tv), max(tv), mean(tv), sum(tv))
else:
nan = float("nan")
times = times_format % (nan, nan, nan, nan, 0)
count = "count:%3d" % v['count']
retries = "retries:%3d" % v['retries'] if v['retries'] else ""
return "; ".join(s for s in (count, times, retries) if s)
legend = (
"count – number of requests (excluding retries); "
"min, median, max, mean, total – request duration statistics "
"(excluding failed requests); retries – number of retries."
)
return pretty_dict_format('Request statistics per hostname',
self.host_stats,
legend=legend,
value_mapper=value_mapper)
@pytest.fixture(scope='session')
def requests_monitor():
return RequestsMonitor()
@pytest.fixture(autouse=True, scope='session')
def print_requests_monitor_report(requests_monitor):
yield
def report():
print(str(requests_monitor))
# https://github.com/pytest-dev/pytest/issues/2704
# https://stackoverflow.com/a/38806934
atexit.register(report)
@pytest.fixture(scope='session')
def retries_enabled_session():
return types.SimpleNamespace(value=True)
@pytest.fixture
def disable_adapter_retries(retries_enabled_session):
retries_enabled_session.value = False
yield
retries_enabled_session.value = True
@pytest.fixture(autouse=True, scope='session')
def patch_adapter(
requests_monitor, retries_enabled_session, is_internet_access_allowed
):
"""
Patch the default Adapter to provide the following features:
- Retry failed requests. Makes test runs more stable.
- Track statistics with RequestsMonitor.
- Skip tests requiring Internet access when Internet access is not allowed.
"""
if default_adapter_is_async:
class AdapterProxy(BaseAdapterProxy, BaseAsyncAdapter):
async def __aenter__(self):
assert await self.adapter.__aenter__() is self.adapter
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
return await self.adapter.__aexit__(exc_type, exc_val, exc_tb)
async def _wrapped_get(self, url, do_request):
res = None
gen = self._retries(url)
while True:
try:
next(gen)
except StopIteration:
break
assert res is None
try:
res = await do_request()
except Exception as e:
error_wait_seconds = gen.throw(e)
await asyncio.sleep(error_wait_seconds)
else:
assert gen.send(res) is None
assert res is not None
return res
else:
class AdapterProxy(BaseAdapterProxy, BaseSyncAdapter):
def _wrapped_get(self, url, do_request):
res = None
gen = self._retries(url)
while True:
try:
next(gen)
except StopIteration:
break
assert res is None
try:
res = do_request()
except Exception as e:
error_wait_seconds = gen.throw(e)
sleep(error_wait_seconds)
else:
assert gen.send(res) is None
assert res is not None
return res
# In order to take advantage of Keep-Alives in tests, the actual Adapter
# should be persisted between the test runs, so this fixture must be
# in the "session" scope.
adapter_factory = partial(
AdapterProxy,
adapter_factory=default_adapter,
requests_monitor=requests_monitor,
retries_enabled_session=retries_enabled_session,
is_internet_access_allowed=is_internet_access_allowed,
)
with patch.object(
geopy.geocoders.options, "default_adapter_factory", adapter_factory
):
yield
class BaseAdapterProxy:
def __init__(
self,
*,
proxies,
ssl_context,
adapter_factory,
requests_monitor,
retries_enabled_session,
is_internet_access_allowed
):
self.adapter = adapter_factory(
proxies=proxies,
ssl_context=ssl_context,
)
self.requests_monitor = requests_monitor
self.retries_enabled_session = retries_enabled_session
self.is_internet_access_allowed = is_internet_access_allowed
def get_json(self, url, *, timeout, headers):
return self._wrapped_get(
url,
partial(self.adapter.get_json, url, timeout=timeout, headers=headers),
)
def get_text(self, url, *, timeout, headers):
return self._wrapped_get(
url,
partial(self.adapter.get_text, url, timeout=timeout, headers=headers),
)
def _retries(self, url):
if not self.is_internet_access_allowed:
# Assume that *all* geocoders require Internet access
pytest.skip("Skipping a test requiring Internet access")
self.requests_monitor.record_request(url)
netloc = netloc_from_url(url)
retries = max_retries
if netloc in no_retries_for_hosts:
retries = 0
for i in range(retries + 1):
try:
with self.requests_monitor.record_response(url):
yield
except AdapterHTTPError as error:
if not self.retries_enabled_session.value:
raise
if i == retries or error.status_code not in retry_status_codes:
# Note: we shouldn't blindly retry on any >=400 code,
# because some of them are actually expected in tests
# (like input validation verification).
# TODO Retry failures with the 200 code?
# Some geocoders return failures with 200 code
# (like GoogleV3 for Quota Exceeded).
# Should we detect this somehow to restart such requests?
#
# Re-raise -- don't retry this request
raise
else:
# Swallow the error and retry the request
pass
except Exception:
if i == retries:
raise
else:
yield None
return
self.requests_monitor.record_retry(url)
yield error_wait_seconds
raise RuntimeError("Should not have been reached")
| {
"content_hash": "3cf4742abca2a57665d4afedca8d46b3",
"timestamp": "",
"source": "github",
"line_count": 377,
"max_line_length": 110,
"avg_line_length": 33.389920424403186,
"alnum_prop": 0.5927867810613282,
"repo_name": "jmb/geopy",
"id": "42889bfd2659139564f0d1c36b6206d058ea44ce",
"size": "12594",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/conftest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1334"
},
{
"name": "Python",
"bytes": "477174"
}
],
"symlink_target": ""
} |
from gala import classify
datas = []
labels = []
import numpy as np
for i in range(4):
data, label = classify.load_training_data_from_disk('training-data-%i.h5' % i,
names=['data', 'labels'])
datas.append(data)
labels.append(label[:, 0])
X0 = np.concatenate(datas, axis=0)
y0 = np.concatenate(labels)
# runtime was 5min for 3000 samples, expect ~2h for 72,000
# for 280,000, expect ~8h (took 10h)
idx = np.random.choice(len(y0), size=280000, replace=False)
X, y = X0[idx], y0[idx]
param_dist = {'n_estimators': [20, 100, 200, 500],
'max_depth': [3, 5, 20, None],
'max_features': ['auto', 5, 10, 20],
'bootstrap': [True, False],
'criterion': ['gini', 'entropy']}
from sklearn import grid_search as gs
from time import time
from sklearn import ensemble
rf = ensemble.RandomForestClassifier()
random_search = gs.GridSearchCV(rf, param_grid=param_dist, refit=False,
verbose=2, n_jobs=12)
start=time(); random_search.fit(X, y); stop=time()
print('took %s seconds' % (stop - start))
| {
"content_hash": "22496b758087820f2904f233e692a796",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 82,
"avg_line_length": 39.241379310344826,
"alnum_prop": 0.5949033391915641,
"repo_name": "jni/gala-scripts",
"id": "a53efad364f19720cb45cd178c674665d60f73c7",
"size": "1159",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "gala-training-crossval-sub.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "12253"
}
],
"symlink_target": ""
} |
from sys import maxsize
class Group:
def __init__(self, name=None, header=None, footer=None, id=None):
self.name = name
self.header = header
self.footer = footer
self.id = id
def __repr__(self): # Показывает в консоле списки
return "%s:%s:%s:%s" % (self.id, self.name, self.header, self.footer)
def __eq__(self, other): # Сравнение списков объектов
return (self.id is None or other.id is None or self.id == other.id) and self.name == other.name
def id_or_max(self):
if self.id:
return int(self.id)
else:
return maxsize
| {
"content_hash": "c580a204075ffc1a8013d0cdbffb0fb9",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 103,
"avg_line_length": 30,
"alnum_prop": 0.580952380952381,
"repo_name": "zr4x/pythonTests",
"id": "e7996b47f8c201a35e70ee9f4b5252569e93ab9c",
"size": "678",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "model/group.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "34679"
}
],
"symlink_target": ""
} |
from __future__ import annotations
from dynaconf import settings
print("EXAMPLE_ prefix")
settings.configure(ENVVAR_PREFIX_FOR_DYNACONF="EXAMPLE")
print(settings.VAR1)
print(settings.VAR2)
print("_ prefix")
settings.configure(ENVVAR_PREFIX_FOR_DYNACONF="")
print(settings.VAR1)
print(settings.VAR2)
print("no prefix at all")
settings.configure(ENVVAR_PREFIX_FOR_DYNACONF=False)
print(settings.VAR1)
print(settings.VAR2)
# test issue 166 (renamed GLOBAL_ENV_)
print("using GLOBAL_ENV_")
settings.configure(GLOBAL_ENV_FOR_DYNACONF=False)
print(settings.VAR1)
print(settings.VAR2)
| {
"content_hash": "1b628f6110e492168c822a4cfd5450f0",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 56,
"avg_line_length": 23.36,
"alnum_prop": 0.7825342465753424,
"repo_name": "rochacbruno/dynaconf",
"id": "eb88dd647af7af0cb8f36216e4bff98f79fa594a",
"size": "584",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "example/envvar_prefix/app.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "2867"
},
{
"name": "Makefile",
"bytes": "11505"
},
{
"name": "Python",
"bytes": "1438471"
},
{
"name": "Shell",
"bytes": "14740"
}
],
"symlink_target": ""
} |
import os
import sys
import unittest
classifiers = """\
Development Status :: 5 - Production/Stable
Environment :: Console
Intended Audience :: Developers
Intended Audience :: Education
Intended Audience :: Information Technology
Intended Audience :: System Administrators
Intended Audience :: Telecommunications Industry
License :: OSI Approved :: BSD License
Natural Language :: English
Operating System :: OS Independent
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.5
Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Topic :: Communications
Topic :: Software Development :: Libraries :: Python Modules
"""
def howto_install_setuptools():
print("""
Error: You need setuptools Python package!
It's very easy to install it, just type:
wget https://bootstrap.pypa.io/ez_setup.py
python ez_setup.py
Then you could make eggs from this package.
""")
if sys.version_info[:2] < (2, 7):
print("ERROR: this package requires Python 2.7 or later!")
sys.exit(1)
try:
from setuptools import setup, Command
params = {
'zip_safe': True
}
except ImportError:
for arg in sys.argv:
if 'egg' in arg:
howto_install_setuptools()
sys.exit(1)
from distutils.core import setup, Command
params = {}
params.update({
'name': 'pyasn1',
'version': open(os.path.join('pyasn1', '__init__.py')).read().split('\'')[1],
'description': 'ASN.1 types and codecs',
'long_description': 'Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)',
'maintainer': 'Ilya Etingof <[email protected]>',
'author': 'Ilya Etingof',
'author_email': '[email protected]',
'url': 'https://github.com/etingof/pyasn1',
'platforms': ['any'],
'classifiers': [x for x in classifiers.split('\n') if x],
'license': 'BSD',
'packages': ['pyasn1',
'pyasn1.type',
'pyasn1.compat',
'pyasn1.codec',
'pyasn1.codec.ber',
'pyasn1.codec.cer',
'pyasn1.codec.der',
'pyasn1.codec.native'],
'python_requires': '>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*'})
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
suite = unittest.TestLoader().loadTestsFromNames(
['tests.__main__.suite']
)
unittest.TextTestRunner(verbosity=2).run(suite)
params['cmdclass'] = {
'test': PyTest,
'tests': PyTest,
}
setup(**params)
| {
"content_hash": "9c4a1d3f6ebafa8103c3ecc487b16aa6",
"timestamp": "",
"source": "github",
"line_count": 105,
"max_line_length": 99,
"avg_line_length": 26.247619047619047,
"alnum_prop": 0.6186502177068215,
"repo_name": "etingof/pyasn1",
"id": "dc5d7042a1a3617d31148ff4b0d30fcbbb63d762",
"size": "2935",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "684863"
}
],
"symlink_target": ""
} |
"""Common profiles are defined here to be easily used within a project using --profile {name}"""
from typing import Any, Dict
black = {
"multi_line_output": 3,
"include_trailing_comma": True,
"force_grid_wrap": 0,
"use_parentheses": True,
"ensure_newline_before_comments": True,
"line_length": 88,
}
django = {
"combine_as_imports": True,
"include_trailing_comma": True,
"multi_line_output": 5,
"line_length": 79,
}
pycharm = {
"multi_line_output": 3,
"force_grid_wrap": 2,
"lines_after_imports": 2,
}
google = {
"force_single_line": True,
"force_sort_within_sections": True,
"lexicographical": True,
"single_line_exclusions": ("typing",),
"order_by_type": False,
"group_by_package": True,
}
open_stack = {
"force_single_line": True,
"force_sort_within_sections": True,
"lexicographical": True,
}
plone = {
"force_alphabetical_sort": True,
"force_single_line": True,
"lines_after_imports": 2,
"line_length": 200,
}
attrs = {
"atomic": True,
"force_grid_wrap": 0,
"include_trailing_comma": True,
"lines_after_imports": 2,
"lines_between_types": 1,
"multi_line_output": 3,
"use_parentheses": True,
}
hug = {
"multi_line_output": 3,
"include_trailing_comma": True,
"force_grid_wrap": 0,
"use_parentheses": True,
"line_length": 100,
}
wemake = {
"multi_line_output": 3,
"include_trailing_comma": True,
"use_parentheses": True,
"line_length": 80,
}
appnexus = {
**black,
"force_sort_within_sections": True,
"order_by_type": False,
"case_sensitive": False,
"reverse_relative": True,
"sort_relative_in_force_sorted_sections": True,
"sections": ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "APPLICATION", "LOCALFOLDER"],
"no_lines_before": "LOCALFOLDER",
}
profiles: Dict[str, Dict[str, Any]] = {
"black": black,
"django": django,
"pycharm": pycharm,
"google": google,
"open_stack": open_stack,
"plone": plone,
"attrs": attrs,
"hug": hug,
"wemake": wemake,
"appnexus": appnexus,
}
| {
"content_hash": "6b5c6ac08f1a7ef2843744bc0880d0fc",
"timestamp": "",
"source": "github",
"line_count": 86,
"max_line_length": 96,
"avg_line_length": 24.63953488372093,
"alnum_prop": 0.6045304388862671,
"repo_name": "glenngillen/dotfiles",
"id": "21d064630068fec7e69cbf9c4089327f786e88d1",
"size": "2119",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": ".vscode/extensions/ms-python.python-2022.2.1924087327/pythonFiles/lib/python/isort/profiles.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Ruby",
"bytes": "3634"
},
{
"name": "Shell",
"bytes": "4225"
},
{
"name": "Vim script",
"bytes": "16306"
}
],
"symlink_target": ""
} |
import numpy as np
import argparse
import os
import sys
import signal
import time
import socket
from contextlib import closing
import math
import paddle
import paddle.fluid as fluid
import paddle.fluid.profiler as profiler
import paddle.fluid.unique_name as nameGen
from paddle.fluid import core
import unittest
from multiprocessing import Process
import paddle.fluid.layers as layers
from functools import reduce
from test_collective_api_base_mlu import (
TestCollectiveAPIRunnerBase,
runtime_main,
)
paddle.enable_static()
class TestCollectiveAllreduceAPI(TestCollectiveAPIRunnerBase):
def __init__(self):
self.global_ring_id = 0
def get_model(self, main_prog, startup_program, rank):
with fluid.program_guard(main_prog, startup_program):
tindata = layers.data(
name="tindata", shape=[10, 1000], dtype='float32'
)
paddle.distributed.all_reduce(tindata)
return [tindata]
if __name__ == "__main__":
runtime_main(TestCollectiveAllreduceAPI, "allreduce")
| {
"content_hash": "9122c188099444e3a3e19566ee5fa9d1",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 65,
"avg_line_length": 25.829268292682926,
"alnum_prop": 0.7214353163361662,
"repo_name": "luotao1/Paddle",
"id": "22ca990c55afde93e2b28ab1167785e6e0559f49",
"size": "1670",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "python/paddle/fluid/tests/unittests/mlu/collective_allreduce_api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "58544"
},
{
"name": "C",
"bytes": "210300"
},
{
"name": "C++",
"bytes": "36771446"
},
{
"name": "CMake",
"bytes": "903079"
},
{
"name": "Cuda",
"bytes": "5200715"
},
{
"name": "Dockerfile",
"bytes": "4361"
},
{
"name": "Go",
"bytes": "49796"
},
{
"name": "Java",
"bytes": "16630"
},
{
"name": "Jinja",
"bytes": "23852"
},
{
"name": "MLIR",
"bytes": "39982"
},
{
"name": "Python",
"bytes": "36248258"
},
{
"name": "R",
"bytes": "1332"
},
{
"name": "Shell",
"bytes": "553175"
}
],
"symlink_target": ""
} |
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
import json
def main():
asset_results = []
incident = demisto.incident()
if not incident:
raise ValueError("Error - demisto.incident() expected to return current incident "
"from context but returned None")
labels = incident.get('labels', [])
for label in labels:
if label.get('type') == 'successful_asset_enrichment':
is_successful = label.get('value')
if is_successful == 'false':
return CommandResults(readable_output='Asset enrichment failed.')
if label.get('type') == 'Asset':
asset_results = json.loads(label.get('value', []))
if not asset_results:
return CommandResults(readable_output='No assets were found in the notable')
markdown = tableToMarkdown("", asset_results, headers=asset_results[0].keys())
return {'ContentsFormat': formats['markdown'], 'Type': entryTypes['note'], 'Contents': markdown}
if __name__ in ('__main__', '__builtin__', 'builtins'):
try:
return_results(main())
except Exception as e:
return_error(f'Got an error while parsing Splunk events: {e}', error=e)
| {
"content_hash": "3d34ba6bac431dfaa9267d07eaeb1032",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 100,
"avg_line_length": 36.794117647058826,
"alnum_prop": 0.6258992805755396,
"repo_name": "demisto/content",
"id": "32e2d1dcfdce6751e3c469e2510a8130a6820904",
"size": "1251",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Packs/SplunkPy/Scripts/SplunkShowAsset/SplunkShowAsset.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "2146"
},
{
"name": "HTML",
"bytes": "205901"
},
{
"name": "JavaScript",
"bytes": "1584075"
},
{
"name": "PowerShell",
"bytes": "442288"
},
{
"name": "Python",
"bytes": "47881712"
},
{
"name": "Rich Text Format",
"bytes": "480911"
},
{
"name": "Shell",
"bytes": "108066"
},
{
"name": "YARA",
"bytes": "1185"
}
],
"symlink_target": ""
} |
"""
This module is for internal use only; no backwards-compatibility guarantees.
The classes in this file keep shared state, and organize metrics information.
Available classes:
- MetricKey - Internal key for a metric.
- MetricResult - Current status of a metric's updates/commits.
- _MetricsEnvironment - Keeps track of MetricsContainer and other metrics
information for every single execution working thread.
- MetricsContainer - Holds the metrics of a single step and a single
unit-of-commit (bundle).
"""
from __future__ import absolute_import
from builtins import object
from apache_beam.metrics import monitoring_infos
from apache_beam.metrics.cells import CounterCell
from apache_beam.metrics.cells import DistributionCell
from apache_beam.metrics.cells import GaugeCell
from apache_beam.runners.worker import statesampler
from apache_beam.runners.worker.statesampler import get_current_tracker
class MetricKey(object):
"""Key used to identify instance of metric cell.
Metrics are internally keyed by the name of the step they're associated with,
the name and namespace (if it is a user defined metric) of the metric,
and any extra label metadata added by the runner specific metric collection
service.
"""
def __init__(self, step, metric, labels=None):
"""Initializes ``MetricKey``.
Args:
step: A string with the step this metric cell is part of.
metric: A ``MetricName`` namespace+name that identifies a metric.
labels: An arbitrary set of labels that also identifies the metric.
"""
self.step = step
self.metric = metric
self.labels = labels if labels else dict()
def __eq__(self, other):
return (self.step == other.step and
self.metric == other.metric and
self.labels == other.labels)
def __ne__(self, other):
# TODO(BEAM-5949): Needed for Python 2 compatibility.
return not self == other
def __hash__(self):
return hash((self.step, self.metric, frozenset(self.labels)))
def __repr__(self):
return 'MetricKey(step={}, metric={}, labels={})'.format(
self.step, self.metric, self.labels)
class MetricResult(object):
"""Keeps track of the status of a metric within a single bundle.
It contains the physical and logical updates to the metric. Physical updates
are updates that have not necessarily been committed, but that have been made
during pipeline execution. Logical updates are updates that have been
committed.
Attributes:
key: A ``MetricKey`` that identifies the metric and bundle of this result.
committed: The committed updates of the metric. This attribute's type is
of metric type result (e.g. int, DistributionResult, GaugeResult).
attempted: The logical updates of the metric. This attribute's type is that
of metric type result (e.g. int, DistributionResult, GaugeResult).
"""
def __init__(self, key, committed, attempted):
"""Initializes ``MetricResult``.
Args:
key: A ``MetricKey`` object.
committed: Metric data that has been committed (e.g. logical updates)
attempted: Metric data that has been attempted (e.g. physical updates)
"""
self.key = key
self.committed = committed
self.attempted = attempted
def __eq__(self, other):
return (self.key == other.key and
self.committed == other.committed and
self.attempted == other.attempted)
def __ne__(self, other):
# TODO(BEAM-5949): Needed for Python 2 compatibility.
return not self == other
def __hash__(self):
return hash((self.key, self.committed, self.attempted))
def __repr__(self):
return 'MetricResult(key={}, committed={}, attempted={})'.format(
self.key, str(self.committed), str(self.attempted))
def __str__(self):
return repr(self)
@property
def result(self):
"""Short-hand for falling back to attempted metrics if it seems that
committed was not populated (e.g. due to not being supported on a given
runner"""
return self.committed if self.committed else self.attempted
class _MetricsEnvironment(object):
"""Holds the MetricsContainer for every thread and other metric information.
This class is not meant to be instantiated, instead being used to keep
track of global state.
"""
def current_container(self):
"""Returns the current MetricsContainer."""
sampler = statesampler.get_current_tracker()
if sampler is None:
return None
return sampler.current_state().metrics_container
MetricsEnvironment = _MetricsEnvironment()
class _TypedMetricName(object):
"""Like MetricName, but also stores the cell type of the metric."""
def __init__(self, cell_type, metric_name):
self.cell_type = cell_type
self.metric_name = metric_name
if isinstance(metric_name, str):
self.fast_name = metric_name
else:
self.fast_name = '%d_%s%s' % (
len(metric_name.name), metric_name.name, metric_name.namespace)
# Cached for speed, as this is used as a key for every counter update.
self._hash = hash((cell_type, self.fast_name))
def __eq__(self, other):
return self is other or (
self.cell_type == other.cell_type and self.fast_name == other.fast_name)
def __ne__(self, other):
return not self == other
def __hash__(self):
return self._hash
def __reduce__(self):
return _TypedMetricName, (self.cell_type, self.metric_name)
_DEFAULT = None
class MetricUpdater(object):
"""A callable that updates the metric as quickly as possible."""
def __init__(self, cell_type, metric_name, default=None):
self.typed_metric_name = _TypedMetricName(cell_type, metric_name)
self.default = default
def __call__(self, value=_DEFAULT):
if value is _DEFAULT:
if self.default is _DEFAULT:
raise ValueError(
'Missing value for update of %s' % self.metric_name)
value = self.default
tracker = get_current_tracker()
if tracker is not None:
tracker.update_metric(self.typed_metric_name, value)
def __reduce__(self):
return MetricUpdater, (
self.typed_metric_name.cell_type,
self.typed_metric_name.metric_name,
self.default)
class MetricsContainer(object):
"""Holds the metrics of a single step and a single bundle."""
def __init__(self, step_name):
self.step_name = step_name
self.metrics = dict()
def get_counter(self, metric_name):
return self.get_metric_cell(_TypedMetricName(CounterCell, metric_name))
def get_distribution(self, metric_name):
return self.get_metric_cell(_TypedMetricName(DistributionCell, metric_name))
def get_gauge(self, metric_name):
return self.get_metric_cell(_TypedMetricName(GaugeCell, metric_name))
def get_metric_cell(self, typed_metric_name):
cell = self.metrics.get(typed_metric_name, None)
if cell is None:
cell = self.metrics[typed_metric_name] = typed_metric_name.cell_type()
return cell
def get_cumulative(self):
"""Return MetricUpdates with cumulative values of all metrics in container.
This returns all the cumulative values for all metrics.
"""
counters = {MetricKey(self.step_name, k.metric_name): v.get_cumulative()
for k, v in self.metrics.items()
if k.cell_type == CounterCell}
distributions = {
MetricKey(self.step_name, k.metric_name): v.get_cumulative()
for k, v in self.metrics.items()
if k.cell_type == DistributionCell}
gauges = {MetricKey(self.step_name, k.metric_name): v.get_cumulative()
for k, v in self.metrics.items()
if k.cell_type == GaugeCell}
return MetricUpdates(counters, distributions, gauges)
def to_runner_api(self):
return [cell.to_runner_api_user_metric(key.metric_name)
for key, cell in self.metrics.items()]
def to_runner_api_monitoring_infos(self, transform_id):
"""Returns a list of MonitoringInfos for the metrics in this container."""
all_user_metrics = [
cell.to_runner_api_monitoring_info(key.metric_name, transform_id)
for key, cell in self.metrics.items()]
return {monitoring_infos.to_key(mi) : mi for mi in all_user_metrics}
def reset(self):
for metric in self.metrics.values():
metric.reset()
def __reduce__(self):
raise NotImplementedError
class MetricUpdates(object):
"""Contains updates for several metrics.
A metric update is an object containing information to update a metric.
For Distribution metrics, it is DistributionData, and for Counter metrics,
it's an int.
"""
def __init__(self, counters=None, distributions=None, gauges=None):
"""Create a MetricUpdates object.
Args:
counters: Dictionary of MetricKey:MetricUpdate updates.
distributions: Dictionary of MetricKey:MetricUpdate objects.
gauges: Dictionary of MetricKey:MetricUpdate objects.
"""
self.counters = counters or {}
self.distributions = distributions or {}
self.gauges = gauges or {}
| {
"content_hash": "4c1659ecd57dabb19663fdaa8bf2bb85",
"timestamp": "",
"source": "github",
"line_count": 265,
"max_line_length": 80,
"avg_line_length": 34.0188679245283,
"alnum_prop": 0.6891846921797005,
"repo_name": "RyanSkraba/beam",
"id": "691891484e43a0c8fa3b9a54608908bfc7398b54",
"size": "9800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sdks/python/apache_beam/metrics/execution.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1597"
},
{
"name": "CSS",
"bytes": "40963"
},
{
"name": "Dockerfile",
"bytes": "16638"
},
{
"name": "FreeMarker",
"bytes": "7428"
},
{
"name": "Go",
"bytes": "2683402"
},
{
"name": "Groovy",
"bytes": "517560"
},
{
"name": "HTML",
"bytes": "183330"
},
{
"name": "Java",
"bytes": "28609011"
},
{
"name": "JavaScript",
"bytes": "16595"
},
{
"name": "Jupyter Notebook",
"bytes": "56365"
},
{
"name": "Python",
"bytes": "6191025"
},
{
"name": "Ruby",
"bytes": "4159"
},
{
"name": "Shell",
"bytes": "235061"
},
{
"name": "TSQL",
"bytes": "841"
}
],
"symlink_target": ""
} |
from collections.abc import Mapping
import re
import warnings
import pytest
from scipy import sparse
from sklearn.feature_extraction.text import strip_tags
from sklearn.feature_extraction.text import strip_accents_unicode
from sklearn.feature_extraction.text import strip_accents_ascii
from sklearn.feature_extraction.text import HashingVectorizer
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import ENGLISH_STOP_WORDS
from sklearn.model_selection import train_test_split
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import GridSearchCV
from sklearn.pipeline import Pipeline
from sklearn.svm import LinearSVC
from sklearn.base import clone
import numpy as np
from numpy.testing import assert_array_almost_equal
from numpy.testing import assert_array_equal
from sklearn.utils import IS_PYPY
from sklearn.exceptions import ChangedBehaviorWarning
from sklearn.utils.testing import (assert_almost_equal,
assert_warns_message, assert_raise_message,
clean_warning_registry,
SkipTest, assert_raises, assert_no_warnings,
fails_if_pypy, assert_allclose_dense_sparse,
skip_if_32bit)
from collections import defaultdict
from functools import partial
import pickle
from io import StringIO
JUNK_FOOD_DOCS = (
"the pizza pizza beer copyright",
"the pizza burger beer copyright",
"the the pizza beer beer copyright",
"the burger beer beer copyright",
"the coke burger coke copyright",
"the coke burger burger",
)
NOTJUNK_FOOD_DOCS = (
"the salad celeri copyright",
"the salad salad sparkling water copyright",
"the the celeri celeri copyright",
"the tomato tomato salad water",
"the tomato salad water copyright",
)
ALL_FOOD_DOCS = JUNK_FOOD_DOCS + NOTJUNK_FOOD_DOCS
def uppercase(s):
return strip_accents_unicode(s).upper()
def strip_eacute(s):
return s.replace('é', 'e')
def split_tokenize(s):
return s.split()
def lazy_analyze(s):
return ['the_ultimate_feature']
def test_strip_accents():
# check some classical latin accentuated symbols
a = 'àáâãäåçèéêë'
expected = 'aaaaaaceeee'
assert strip_accents_unicode(a) == expected
a = 'ìíîïñòóôõöùúûüý'
expected = 'iiiinooooouuuuy'
assert strip_accents_unicode(a) == expected
# check some arabic
a = '\u0625' # alef with a hamza below: إ
expected = '\u0627' # simple alef: ا
assert strip_accents_unicode(a) == expected
# mix letters accentuated and not
a = "this is à test"
expected = 'this is a test'
assert strip_accents_unicode(a) == expected
def test_to_ascii():
# check some classical latin accentuated symbols
a = 'àáâãäåçèéêë'
expected = 'aaaaaaceeee'
assert strip_accents_ascii(a) == expected
a = "ìíîïñòóôõöùúûüý"
expected = 'iiiinooooouuuuy'
assert strip_accents_ascii(a) == expected
# check some arabic
a = '\u0625' # halef with a hamza below
expected = '' # halef has no direct ascii match
assert strip_accents_ascii(a) == expected
# mix letters accentuated and not
a = "this is à test"
expected = 'this is a test'
assert strip_accents_ascii(a) == expected
@pytest.mark.parametrize('Vectorizer', (CountVectorizer, HashingVectorizer))
def test_word_analyzer_unigrams(Vectorizer):
wa = Vectorizer(strip_accents='ascii').build_analyzer()
text = ("J'ai mangé du kangourou ce midi, "
"c'était pas très bon.")
expected = ['ai', 'mange', 'du', 'kangourou', 'ce', 'midi',
'etait', 'pas', 'tres', 'bon']
assert wa(text) == expected
text = "This is a test, really.\n\n I met Harry yesterday."
expected = ['this', 'is', 'test', 'really', 'met', 'harry',
'yesterday']
assert wa(text) == expected
wa = Vectorizer(input='file').build_analyzer()
text = StringIO("This is a test with a file-like object!")
expected = ['this', 'is', 'test', 'with', 'file', 'like',
'object']
assert wa(text) == expected
# with custom preprocessor
wa = Vectorizer(preprocessor=uppercase).build_analyzer()
text = ("J'ai mangé du kangourou ce midi, "
" c'était pas très bon.")
expected = ['AI', 'MANGE', 'DU', 'KANGOUROU', 'CE', 'MIDI',
'ETAIT', 'PAS', 'TRES', 'BON']
assert wa(text) == expected
# with custom tokenizer
wa = Vectorizer(tokenizer=split_tokenize,
strip_accents='ascii').build_analyzer()
text = ("J'ai mangé du kangourou ce midi, "
"c'était pas très bon.")
expected = ["j'ai", 'mange', 'du', 'kangourou', 'ce', 'midi,',
"c'etait", 'pas', 'tres', 'bon.']
assert wa(text) == expected
def test_word_analyzer_unigrams_and_bigrams():
wa = CountVectorizer(analyzer="word", strip_accents='unicode',
ngram_range=(1, 2)).build_analyzer()
text = "J'ai mangé du kangourou ce midi, c'était pas très bon."
expected = ['ai', 'mange', 'du', 'kangourou', 'ce', 'midi',
'etait', 'pas', 'tres', 'bon', 'ai mange', 'mange du',
'du kangourou', 'kangourou ce', 'ce midi', 'midi etait',
'etait pas', 'pas tres', 'tres bon']
assert wa(text) == expected
def test_unicode_decode_error():
# decode_error default to strict, so this should fail
# First, encode (as bytes) a unicode string.
text = "J'ai mangé du kangourou ce midi, c'était pas très bon."
text_bytes = text.encode('utf-8')
# Then let the Analyzer try to decode it as ascii. It should fail,
# because we have given it an incorrect encoding.
wa = CountVectorizer(ngram_range=(1, 2), encoding='ascii').build_analyzer()
assert_raises(UnicodeDecodeError, wa, text_bytes)
ca = CountVectorizer(analyzer='char', ngram_range=(3, 6),
encoding='ascii').build_analyzer()
assert_raises(UnicodeDecodeError, ca, text_bytes)
def test_char_ngram_analyzer():
cnga = CountVectorizer(analyzer='char', strip_accents='unicode',
ngram_range=(3, 6)).build_analyzer()
text = "J'ai mangé du kangourou ce midi, c'était pas très bon"
expected = ["j'a", "'ai", 'ai ', 'i m', ' ma']
assert cnga(text)[:5] == expected
expected = ['s tres', ' tres ', 'tres b', 'res bo', 'es bon']
assert cnga(text)[-5:] == expected
text = "This \n\tis a test, really.\n\n I met Harry yesterday"
expected = ['thi', 'his', 'is ', 's i', ' is']
assert cnga(text)[:5] == expected
expected = [' yeste', 'yester', 'esterd', 'sterda', 'terday']
assert cnga(text)[-5:] == expected
cnga = CountVectorizer(input='file', analyzer='char',
ngram_range=(3, 6)).build_analyzer()
text = StringIO("This is a test with a file-like object!")
expected = ['thi', 'his', 'is ', 's i', ' is']
assert cnga(text)[:5] == expected
def test_char_wb_ngram_analyzer():
cnga = CountVectorizer(analyzer='char_wb', strip_accents='unicode',
ngram_range=(3, 6)).build_analyzer()
text = "This \n\tis a test, really.\n\n I met Harry yesterday"
expected = [' th', 'thi', 'his', 'is ', ' thi']
assert cnga(text)[:5] == expected
expected = ['yester', 'esterd', 'sterda', 'terday', 'erday ']
assert cnga(text)[-5:] == expected
cnga = CountVectorizer(input='file', analyzer='char_wb',
ngram_range=(3, 6)).build_analyzer()
text = StringIO("A test with a file-like object!")
expected = [' a ', ' te', 'tes', 'est', 'st ', ' tes']
assert cnga(text)[:6] == expected
def test_word_ngram_analyzer():
cnga = CountVectorizer(analyzer='word', strip_accents='unicode',
ngram_range=(3, 6)).build_analyzer()
text = "This \n\tis a test, really.\n\n I met Harry yesterday"
expected = ['this is test', 'is test really', 'test really met']
assert cnga(text)[:3] == expected
expected = ['test really met harry yesterday',
'this is test really met harry',
'is test really met harry yesterday']
assert cnga(text)[-3:] == expected
cnga_file = CountVectorizer(input='file', analyzer='word',
ngram_range=(3, 6)).build_analyzer()
file = StringIO(text)
assert cnga_file(file) == cnga(text)
def test_countvectorizer_custom_vocabulary():
vocab = {"pizza": 0, "beer": 1}
terms = set(vocab.keys())
# Try a few of the supported types.
for typ in [dict, list, iter, partial(defaultdict, int)]:
v = typ(vocab)
vect = CountVectorizer(vocabulary=v)
vect.fit(JUNK_FOOD_DOCS)
if isinstance(v, Mapping):
assert vect.vocabulary_ == vocab
else:
assert set(vect.vocabulary_) == terms
X = vect.transform(JUNK_FOOD_DOCS)
assert X.shape[1] == len(terms)
def test_countvectorizer_custom_vocabulary_pipeline():
what_we_like = ["pizza", "beer"]
pipe = Pipeline([
('count', CountVectorizer(vocabulary=what_we_like)),
('tfidf', TfidfTransformer())])
X = pipe.fit_transform(ALL_FOOD_DOCS)
assert (set(pipe.named_steps['count'].vocabulary_) ==
set(what_we_like))
assert X.shape[1] == len(what_we_like)
def test_countvectorizer_custom_vocabulary_repeated_indices():
vocab = {"pizza": 0, "beer": 0}
try:
CountVectorizer(vocabulary=vocab)
except ValueError as e:
assert "vocabulary contains repeated indices" in str(e).lower()
def test_countvectorizer_custom_vocabulary_gap_index():
vocab = {"pizza": 1, "beer": 2}
try:
CountVectorizer(vocabulary=vocab)
except ValueError as e:
assert "doesn't contain index" in str(e).lower()
def test_countvectorizer_stop_words():
cv = CountVectorizer()
cv.set_params(stop_words='english')
assert cv.get_stop_words() == ENGLISH_STOP_WORDS
cv.set_params(stop_words='_bad_str_stop_')
assert_raises(ValueError, cv.get_stop_words)
cv.set_params(stop_words='_bad_unicode_stop_')
assert_raises(ValueError, cv.get_stop_words)
stoplist = ['some', 'other', 'words']
cv.set_params(stop_words=stoplist)
assert cv.get_stop_words() == set(stoplist)
def test_countvectorizer_empty_vocabulary():
try:
vect = CountVectorizer(vocabulary=[])
vect.fit(["foo"])
assert False, "we shouldn't get here"
except ValueError as e:
assert "empty vocabulary" in str(e).lower()
try:
v = CountVectorizer(max_df=1.0, stop_words="english")
# fit on stopwords only
v.fit(["to be or not to be", "and me too", "and so do you"])
assert False, "we shouldn't get here"
except ValueError as e:
assert "empty vocabulary" in str(e).lower()
def test_fit_countvectorizer_twice():
cv = CountVectorizer()
X1 = cv.fit_transform(ALL_FOOD_DOCS[:5])
X2 = cv.fit_transform(ALL_FOOD_DOCS[5:])
assert X1.shape[1] != X2.shape[1]
def test_tf_idf_smoothing():
X = [[1, 1, 1],
[1, 1, 0],
[1, 0, 0]]
tr = TfidfTransformer(smooth_idf=True, norm='l2')
tfidf = tr.fit_transform(X).toarray()
assert (tfidf >= 0).all()
# check normalization
assert_array_almost_equal((tfidf ** 2).sum(axis=1), [1., 1., 1.])
# this is robust to features with only zeros
X = [[1, 1, 0],
[1, 1, 0],
[1, 0, 0]]
tr = TfidfTransformer(smooth_idf=True, norm='l2')
tfidf = tr.fit_transform(X).toarray()
assert (tfidf >= 0).all()
def test_tfidf_no_smoothing():
X = [[1, 1, 1],
[1, 1, 0],
[1, 0, 0]]
tr = TfidfTransformer(smooth_idf=False, norm='l2')
tfidf = tr.fit_transform(X).toarray()
assert (tfidf >= 0).all()
# check normalization
assert_array_almost_equal((tfidf ** 2).sum(axis=1), [1., 1., 1.])
# the lack of smoothing make IDF fragile in the presence of feature with
# only zeros
X = [[1, 1, 0],
[1, 1, 0],
[1, 0, 0]]
tr = TfidfTransformer(smooth_idf=False, norm='l2')
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
1. / np.array([0.])
numpy_provides_div0_warning = len(w) == 1
in_warning_message = 'divide by zero'
tfidf = assert_warns_message(RuntimeWarning, in_warning_message,
tr.fit_transform, X).toarray()
if not numpy_provides_div0_warning:
raise SkipTest("Numpy does not provide div 0 warnings.")
def test_sublinear_tf():
X = [[1], [2], [3]]
tr = TfidfTransformer(sublinear_tf=True, use_idf=False, norm=None)
tfidf = tr.fit_transform(X).toarray()
assert tfidf[0] == 1
assert tfidf[1] > tfidf[0]
assert tfidf[2] > tfidf[1]
assert tfidf[1] < 2
assert tfidf[2] < 3
def test_vectorizer():
# raw documents as an iterator
train_data = iter(ALL_FOOD_DOCS[:-1])
test_data = [ALL_FOOD_DOCS[-1]]
n_train = len(ALL_FOOD_DOCS) - 1
# test without vocabulary
v1 = CountVectorizer(max_df=0.5)
counts_train = v1.fit_transform(train_data)
if hasattr(counts_train, 'tocsr'):
counts_train = counts_train.tocsr()
assert counts_train[0, v1.vocabulary_["pizza"]] == 2
# build a vectorizer v1 with the same vocabulary as the one fitted by v1
v2 = CountVectorizer(vocabulary=v1.vocabulary_)
# compare that the two vectorizer give the same output on the test sample
for v in (v1, v2):
counts_test = v.transform(test_data)
if hasattr(counts_test, 'tocsr'):
counts_test = counts_test.tocsr()
vocabulary = v.vocabulary_
assert counts_test[0, vocabulary["salad"]] == 1
assert counts_test[0, vocabulary["tomato"]] == 1
assert counts_test[0, vocabulary["water"]] == 1
# stop word from the fixed list
assert "the" not in vocabulary
# stop word found automatically by the vectorizer DF thresholding
# words that are high frequent across the complete corpus are likely
# to be not informative (either real stop words of extraction
# artifacts)
assert "copyright" not in vocabulary
# not present in the sample
assert counts_test[0, vocabulary["coke"]] == 0
assert counts_test[0, vocabulary["burger"]] == 0
assert counts_test[0, vocabulary["beer"]] == 0
assert counts_test[0, vocabulary["pizza"]] == 0
# test tf-idf
t1 = TfidfTransformer(norm='l1')
tfidf = t1.fit(counts_train).transform(counts_train).toarray()
assert len(t1.idf_) == len(v1.vocabulary_)
assert tfidf.shape == (n_train, len(v1.vocabulary_))
# test tf-idf with new data
tfidf_test = t1.transform(counts_test).toarray()
assert tfidf_test.shape == (len(test_data), len(v1.vocabulary_))
# test tf alone
t2 = TfidfTransformer(norm='l1', use_idf=False)
tf = t2.fit(counts_train).transform(counts_train).toarray()
assert not hasattr(t2, "idf_")
# test idf transform with unlearned idf vector
t3 = TfidfTransformer(use_idf=True)
assert_raises(ValueError, t3.transform, counts_train)
# test idf transform with incompatible n_features
X = [[1, 1, 5],
[1, 1, 0]]
t3.fit(X)
X_incompt = [[1, 3],
[1, 3]]
assert_raises(ValueError, t3.transform, X_incompt)
# L1-normalized term frequencies sum to one
assert_array_almost_equal(np.sum(tf, axis=1), [1.0] * n_train)
# test the direct tfidf vectorizer
# (equivalent to term count vectorizer + tfidf transformer)
train_data = iter(ALL_FOOD_DOCS[:-1])
tv = TfidfVectorizer(norm='l1')
tv.max_df = v1.max_df
tfidf2 = tv.fit_transform(train_data).toarray()
assert not tv.fixed_vocabulary_
assert_array_almost_equal(tfidf, tfidf2)
# test the direct tfidf vectorizer with new data
tfidf_test2 = tv.transform(test_data).toarray()
assert_array_almost_equal(tfidf_test, tfidf_test2)
# test transform on unfitted vectorizer with empty vocabulary
v3 = CountVectorizer(vocabulary=None)
assert_raises(ValueError, v3.transform, train_data)
# ascii preprocessor?
v3.set_params(strip_accents='ascii', lowercase=False)
assert v3.build_preprocessor() == strip_accents_ascii
# error on bad strip_accents param
v3.set_params(strip_accents='_gabbledegook_', preprocessor=None)
assert_raises(ValueError, v3.build_preprocessor)
# error with bad analyzer type
v3.set_params = '_invalid_analyzer_type_'
assert_raises(ValueError, v3.build_analyzer)
def test_tfidf_vectorizer_setters():
tv = TfidfVectorizer(norm='l2', use_idf=False, smooth_idf=False,
sublinear_tf=False)
tv.norm = 'l1'
assert tv._tfidf.norm == 'l1'
tv.use_idf = True
assert tv._tfidf.use_idf
tv.smooth_idf = True
assert tv._tfidf.smooth_idf
tv.sublinear_tf = True
assert tv._tfidf.sublinear_tf
@fails_if_pypy
def test_hashing_vectorizer():
v = HashingVectorizer()
X = v.transform(ALL_FOOD_DOCS)
token_nnz = X.nnz
assert X.shape == (len(ALL_FOOD_DOCS), v.n_features)
assert X.dtype == v.dtype
# By default the hashed values receive a random sign and l2 normalization
# makes the feature values bounded
assert np.min(X.data) > -1
assert np.min(X.data) < 0
assert np.max(X.data) > 0
assert np.max(X.data) < 1
# Check that the rows are normalized
for i in range(X.shape[0]):
assert_almost_equal(np.linalg.norm(X[0].data, 2), 1.0)
# Check vectorization with some non-default parameters
v = HashingVectorizer(ngram_range=(1, 2), norm='l1')
X = v.transform(ALL_FOOD_DOCS)
assert X.shape == (len(ALL_FOOD_DOCS), v.n_features)
assert X.dtype == v.dtype
# ngrams generate more non zeros
ngrams_nnz = X.nnz
assert ngrams_nnz > token_nnz
assert ngrams_nnz < 2 * token_nnz
# makes the feature values bounded
assert np.min(X.data) > -1
assert np.max(X.data) < 1
# Check that the rows are normalized
for i in range(X.shape[0]):
assert_almost_equal(np.linalg.norm(X[0].data, 1), 1.0)
def test_feature_names():
cv = CountVectorizer(max_df=0.5)
# test for Value error on unfitted/empty vocabulary
assert_raises(ValueError, cv.get_feature_names)
assert not cv.fixed_vocabulary_
# test for vocabulary learned from data
X = cv.fit_transform(ALL_FOOD_DOCS)
n_samples, n_features = X.shape
assert len(cv.vocabulary_) == n_features
feature_names = cv.get_feature_names()
assert len(feature_names) == n_features
assert_array_equal(['beer', 'burger', 'celeri', 'coke', 'pizza',
'salad', 'sparkling', 'tomato', 'water'],
feature_names)
for idx, name in enumerate(feature_names):
assert idx == cv.vocabulary_.get(name)
# test for custom vocabulary
vocab = ['beer', 'burger', 'celeri', 'coke', 'pizza',
'salad', 'sparkling', 'tomato', 'water']
cv = CountVectorizer(vocabulary=vocab)
feature_names = cv.get_feature_names()
assert_array_equal(['beer', 'burger', 'celeri', 'coke', 'pizza', 'salad',
'sparkling', 'tomato', 'water'], feature_names)
assert cv.fixed_vocabulary_
for idx, name in enumerate(feature_names):
assert idx == cv.vocabulary_.get(name)
@pytest.mark.parametrize('Vectorizer', (CountVectorizer, TfidfVectorizer))
def test_vectorizer_max_features(Vectorizer):
expected_vocabulary = {'burger', 'beer', 'salad', 'pizza'}
expected_stop_words = {'celeri', 'tomato', 'copyright', 'coke',
'sparkling', 'water', 'the'}
# test bounded number of extracted features
vectorizer = Vectorizer(max_df=0.6, max_features=4)
vectorizer.fit(ALL_FOOD_DOCS)
assert set(vectorizer.vocabulary_) == expected_vocabulary
assert vectorizer.stop_words_ == expected_stop_words
def test_count_vectorizer_max_features():
# Regression test: max_features didn't work correctly in 0.14.
cv_1 = CountVectorizer(max_features=1)
cv_3 = CountVectorizer(max_features=3)
cv_None = CountVectorizer(max_features=None)
counts_1 = cv_1.fit_transform(JUNK_FOOD_DOCS).sum(axis=0)
counts_3 = cv_3.fit_transform(JUNK_FOOD_DOCS).sum(axis=0)
counts_None = cv_None.fit_transform(JUNK_FOOD_DOCS).sum(axis=0)
features_1 = cv_1.get_feature_names()
features_3 = cv_3.get_feature_names()
features_None = cv_None.get_feature_names()
# The most common feature is "the", with frequency 7.
assert 7 == counts_1.max()
assert 7 == counts_3.max()
assert 7 == counts_None.max()
# The most common feature should be the same
assert "the" == features_1[np.argmax(counts_1)]
assert "the" == features_3[np.argmax(counts_3)]
assert "the" == features_None[np.argmax(counts_None)]
def test_vectorizer_max_df():
test_data = ['abc', 'dea', 'eat']
vect = CountVectorizer(analyzer='char', max_df=1.0)
vect.fit(test_data)
assert 'a' in vect.vocabulary_.keys()
assert len(vect.vocabulary_.keys()) == 6
assert len(vect.stop_words_) == 0
vect.max_df = 0.5 # 0.5 * 3 documents -> max_doc_count == 1.5
vect.fit(test_data)
assert 'a' not in vect.vocabulary_.keys() # {ae} ignored
assert len(vect.vocabulary_.keys()) == 4 # {bcdt} remain
assert 'a' in vect.stop_words_
assert len(vect.stop_words_) == 2
vect.max_df = 1
vect.fit(test_data)
assert 'a' not in vect.vocabulary_.keys() # {ae} ignored
assert len(vect.vocabulary_.keys()) == 4 # {bcdt} remain
assert 'a' in vect.stop_words_
assert len(vect.stop_words_) == 2
def test_vectorizer_min_df():
test_data = ['abc', 'dea', 'eat']
vect = CountVectorizer(analyzer='char', min_df=1)
vect.fit(test_data)
assert 'a' in vect.vocabulary_.keys()
assert len(vect.vocabulary_.keys()) == 6
assert len(vect.stop_words_) == 0
vect.min_df = 2
vect.fit(test_data)
assert 'c' not in vect.vocabulary_.keys() # {bcdt} ignored
assert len(vect.vocabulary_.keys()) == 2 # {ae} remain
assert 'c' in vect.stop_words_
assert len(vect.stop_words_) == 4
vect.min_df = 0.8 # 0.8 * 3 documents -> min_doc_count == 2.4
vect.fit(test_data)
assert 'c' not in vect.vocabulary_.keys() # {bcdet} ignored
assert len(vect.vocabulary_.keys()) == 1 # {a} remains
assert 'c' in vect.stop_words_
assert len(vect.stop_words_) == 5
def test_count_binary_occurrences():
# by default multiple occurrences are counted as longs
test_data = ['aaabc', 'abbde']
vect = CountVectorizer(analyzer='char', max_df=1.0)
X = vect.fit_transform(test_data).toarray()
assert_array_equal(['a', 'b', 'c', 'd', 'e'], vect.get_feature_names())
assert_array_equal([[3, 1, 1, 0, 0],
[1, 2, 0, 1, 1]], X)
# using boolean features, we can fetch the binary occurrence info
# instead.
vect = CountVectorizer(analyzer='char', max_df=1.0, binary=True)
X = vect.fit_transform(test_data).toarray()
assert_array_equal([[1, 1, 1, 0, 0],
[1, 1, 0, 1, 1]], X)
# check the ability to change the dtype
vect = CountVectorizer(analyzer='char', max_df=1.0,
binary=True, dtype=np.float32)
X_sparse = vect.fit_transform(test_data)
assert X_sparse.dtype == np.float32
@fails_if_pypy
def test_hashed_binary_occurrences():
# by default multiple occurrences are counted as longs
test_data = ['aaabc', 'abbde']
vect = HashingVectorizer(alternate_sign=False, analyzer='char', norm=None)
X = vect.transform(test_data)
assert np.max(X[0:1].data) == 3
assert np.max(X[1:2].data) == 2
assert X.dtype == np.float64
# using boolean features, we can fetch the binary occurrence info
# instead.
vect = HashingVectorizer(analyzer='char', alternate_sign=False,
binary=True, norm=None)
X = vect.transform(test_data)
assert np.max(X.data) == 1
assert X.dtype == np.float64
# check the ability to change the dtype
vect = HashingVectorizer(analyzer='char', alternate_sign=False,
binary=True, norm=None, dtype=np.float64)
X = vect.transform(test_data)
assert X.dtype == np.float64
@pytest.mark.parametrize('Vectorizer', (CountVectorizer, TfidfVectorizer))
def test_vectorizer_inverse_transform(Vectorizer):
# raw documents
data = ALL_FOOD_DOCS
vectorizer = Vectorizer()
transformed_data = vectorizer.fit_transform(data)
inversed_data = vectorizer.inverse_transform(transformed_data)
analyze = vectorizer.build_analyzer()
for doc, inversed_terms in zip(data, inversed_data):
terms = np.sort(np.unique(analyze(doc)))
inversed_terms = np.sort(np.unique(inversed_terms))
assert_array_equal(terms, inversed_terms)
# Test that inverse_transform also works with numpy arrays
transformed_data = transformed_data.toarray()
inversed_data2 = vectorizer.inverse_transform(transformed_data)
for terms, terms2 in zip(inversed_data, inversed_data2):
assert_array_equal(np.sort(terms), np.sort(terms2))
def test_count_vectorizer_pipeline_grid_selection():
# raw documents
data = JUNK_FOOD_DOCS + NOTJUNK_FOOD_DOCS
# label junk food as -1, the others as +1
target = [-1] * len(JUNK_FOOD_DOCS) + [1] * len(NOTJUNK_FOOD_DOCS)
# split the dataset for model development and final evaluation
train_data, test_data, target_train, target_test = train_test_split(
data, target, test_size=.2, random_state=0)
pipeline = Pipeline([('vect', CountVectorizer()),
('svc', LinearSVC())])
parameters = {
'vect__ngram_range': [(1, 1), (1, 2)],
'svc__loss': ('hinge', 'squared_hinge')
}
# find the best parameters for both the feature extraction and the
# classifier
grid_search = GridSearchCV(pipeline, parameters, n_jobs=1, cv=3)
# Check that the best model found by grid search is 100% correct on the
# held out evaluation set.
pred = grid_search.fit(train_data, target_train).predict(test_data)
assert_array_equal(pred, target_test)
# on this toy dataset bigram representation which is used in the last of
# the grid_search is considered the best estimator since they all converge
# to 100% accuracy models
assert grid_search.best_score_ == 1.0
best_vectorizer = grid_search.best_estimator_.named_steps['vect']
assert best_vectorizer.ngram_range == (1, 1)
def test_vectorizer_pipeline_grid_selection():
# raw documents
data = JUNK_FOOD_DOCS + NOTJUNK_FOOD_DOCS
# label junk food as -1, the others as +1
target = [-1] * len(JUNK_FOOD_DOCS) + [1] * len(NOTJUNK_FOOD_DOCS)
# split the dataset for model development and final evaluation
train_data, test_data, target_train, target_test = train_test_split(
data, target, test_size=.1, random_state=0)
pipeline = Pipeline([('vect', TfidfVectorizer()),
('svc', LinearSVC())])
parameters = {
'vect__ngram_range': [(1, 1), (1, 2)],
'vect__norm': ('l1', 'l2'),
'svc__loss': ('hinge', 'squared_hinge'),
}
# find the best parameters for both the feature extraction and the
# classifier
grid_search = GridSearchCV(pipeline, parameters, n_jobs=1)
# Check that the best model found by grid search is 100% correct on the
# held out evaluation set.
pred = grid_search.fit(train_data, target_train).predict(test_data)
assert_array_equal(pred, target_test)
# on this toy dataset bigram representation which is used in the last of
# the grid_search is considered the best estimator since they all converge
# to 100% accuracy models
assert grid_search.best_score_ == 1.0
best_vectorizer = grid_search.best_estimator_.named_steps['vect']
assert best_vectorizer.ngram_range == (1, 1)
assert best_vectorizer.norm == 'l2'
assert not best_vectorizer.fixed_vocabulary_
def test_vectorizer_pipeline_cross_validation():
# raw documents
data = JUNK_FOOD_DOCS + NOTJUNK_FOOD_DOCS
# label junk food as -1, the others as +1
target = [-1] * len(JUNK_FOOD_DOCS) + [1] * len(NOTJUNK_FOOD_DOCS)
pipeline = Pipeline([('vect', TfidfVectorizer()),
('svc', LinearSVC())])
cv_scores = cross_val_score(pipeline, data, target, cv=3)
assert_array_equal(cv_scores, [1., 1., 1.])
@fails_if_pypy
def test_vectorizer_unicode():
# tests that the count vectorizer works with cyrillic.
document = (
"Машинное обучение — обширный подраздел искусственного "
"интеллекта, изучающий методы построения алгоритмов, "
"способных обучаться."
)
vect = CountVectorizer()
X_counted = vect.fit_transform([document])
assert X_counted.shape == (1, 12)
vect = HashingVectorizer(norm=None, alternate_sign=False)
X_hashed = vect.transform([document])
assert X_hashed.shape == (1, 2 ** 20)
# No collisions on such a small dataset
assert X_counted.nnz == X_hashed.nnz
# When norm is None and not alternate_sign, the tokens are counted up to
# collisions
assert_array_equal(np.sort(X_counted.data), np.sort(X_hashed.data))
def test_tfidf_vectorizer_with_fixed_vocabulary():
# non regression smoke test for inheritance issues
vocabulary = ['pizza', 'celeri']
vect = TfidfVectorizer(vocabulary=vocabulary)
X_1 = vect.fit_transform(ALL_FOOD_DOCS)
X_2 = vect.transform(ALL_FOOD_DOCS)
assert_array_almost_equal(X_1.toarray(), X_2.toarray())
assert vect.fixed_vocabulary_
def test_pickling_vectorizer():
instances = [
HashingVectorizer(),
HashingVectorizer(norm='l1'),
HashingVectorizer(binary=True),
HashingVectorizer(ngram_range=(1, 2)),
CountVectorizer(),
CountVectorizer(preprocessor=strip_tags),
CountVectorizer(analyzer=lazy_analyze),
CountVectorizer(preprocessor=strip_tags).fit(JUNK_FOOD_DOCS),
CountVectorizer(strip_accents=strip_eacute).fit(JUNK_FOOD_DOCS),
TfidfVectorizer(),
TfidfVectorizer(analyzer=lazy_analyze),
TfidfVectorizer().fit(JUNK_FOOD_DOCS),
]
for orig in instances:
s = pickle.dumps(orig)
copy = pickle.loads(s)
assert type(copy) == orig.__class__
assert copy.get_params() == orig.get_params()
if IS_PYPY and isinstance(orig, HashingVectorizer):
continue
else:
assert_array_equal(
copy.fit_transform(JUNK_FOOD_DOCS).toarray(),
orig.fit_transform(JUNK_FOOD_DOCS).toarray())
def test_countvectorizer_vocab_sets_when_pickling():
# ensure that vocabulary of type set is coerced to a list to
# preserve iteration ordering after deserialization
rng = np.random.RandomState(0)
vocab_words = np.array(['beer', 'burger', 'celeri', 'coke', 'pizza',
'salad', 'sparkling', 'tomato', 'water'])
for x in range(0, 100):
vocab_set = set(rng.choice(vocab_words, size=5, replace=False))
cv = CountVectorizer(vocabulary=vocab_set)
unpickled_cv = pickle.loads(pickle.dumps(cv))
cv.fit(ALL_FOOD_DOCS)
unpickled_cv.fit(ALL_FOOD_DOCS)
assert cv.get_feature_names() == unpickled_cv.get_feature_names()
def test_countvectorizer_vocab_dicts_when_pickling():
rng = np.random.RandomState(0)
vocab_words = np.array(['beer', 'burger', 'celeri', 'coke', 'pizza',
'salad', 'sparkling', 'tomato', 'water'])
for x in range(0, 100):
vocab_dict = dict()
words = rng.choice(vocab_words, size=5, replace=False)
for y in range(0, 5):
vocab_dict[words[y]] = y
cv = CountVectorizer(vocabulary=vocab_dict)
unpickled_cv = pickle.loads(pickle.dumps(cv))
cv.fit(ALL_FOOD_DOCS)
unpickled_cv.fit(ALL_FOOD_DOCS)
assert cv.get_feature_names() == unpickled_cv.get_feature_names()
def test_stop_words_removal():
# Ensure that deleting the stop_words_ attribute doesn't affect transform
fitted_vectorizers = (
TfidfVectorizer().fit(JUNK_FOOD_DOCS),
CountVectorizer(preprocessor=strip_tags).fit(JUNK_FOOD_DOCS),
CountVectorizer(strip_accents=strip_eacute).fit(JUNK_FOOD_DOCS)
)
for vect in fitted_vectorizers:
vect_transform = vect.transform(JUNK_FOOD_DOCS).toarray()
vect.stop_words_ = None
stop_None_transform = vect.transform(JUNK_FOOD_DOCS).toarray()
delattr(vect, 'stop_words_')
stop_del_transform = vect.transform(JUNK_FOOD_DOCS).toarray()
assert_array_equal(stop_None_transform, vect_transform)
assert_array_equal(stop_del_transform, vect_transform)
def test_pickling_transformer():
X = CountVectorizer().fit_transform(JUNK_FOOD_DOCS)
orig = TfidfTransformer().fit(X)
s = pickle.dumps(orig)
copy = pickle.loads(s)
assert type(copy) == orig.__class__
assert_array_equal(
copy.fit_transform(X).toarray(),
orig.fit_transform(X).toarray())
def test_transformer_idf_setter():
X = CountVectorizer().fit_transform(JUNK_FOOD_DOCS)
orig = TfidfTransformer().fit(X)
copy = TfidfTransformer()
copy.idf_ = orig.idf_
assert_array_equal(
copy.transform(X).toarray(),
orig.transform(X).toarray())
def test_tfidf_vectorizer_setter():
orig = TfidfVectorizer(use_idf=True)
orig.fit(JUNK_FOOD_DOCS)
copy = TfidfVectorizer(vocabulary=orig.vocabulary_, use_idf=True)
copy.idf_ = orig.idf_
assert_array_equal(
copy.transform(JUNK_FOOD_DOCS).toarray(),
orig.transform(JUNK_FOOD_DOCS).toarray())
def test_tfidfvectorizer_invalid_idf_attr():
vect = TfidfVectorizer(use_idf=True)
vect.fit(JUNK_FOOD_DOCS)
copy = TfidfVectorizer(vocabulary=vect.vocabulary_, use_idf=True)
expected_idf_len = len(vect.idf_)
invalid_idf = [1.0] * (expected_idf_len + 1)
assert_raises(ValueError, setattr, copy, 'idf_', invalid_idf)
def test_non_unique_vocab():
vocab = ['a', 'b', 'c', 'a', 'a']
vect = CountVectorizer(vocabulary=vocab)
assert_raises(ValueError, vect.fit, [])
@fails_if_pypy
def test_hashingvectorizer_nan_in_docs():
# np.nan can appear when using pandas to load text fields from a csv file
# with missing values.
message = "np.nan is an invalid document, expected byte or unicode string."
exception = ValueError
def func():
hv = HashingVectorizer()
hv.fit_transform(['hello world', np.nan, 'hello hello'])
assert_raise_message(exception, message, func)
def test_tfidfvectorizer_binary():
# Non-regression test: TfidfVectorizer used to ignore its "binary" param.
v = TfidfVectorizer(binary=True, use_idf=False, norm=None)
assert v.binary
X = v.fit_transform(['hello world', 'hello hello']).toarray()
assert_array_equal(X.ravel(), [1, 1, 1, 0])
X2 = v.transform(['hello world', 'hello hello']).toarray()
assert_array_equal(X2.ravel(), [1, 1, 1, 0])
def test_tfidfvectorizer_export_idf():
vect = TfidfVectorizer(use_idf=True)
vect.fit(JUNK_FOOD_DOCS)
assert_array_almost_equal(vect.idf_, vect._tfidf.idf_)
def test_vectorizer_vocab_clone():
vect_vocab = TfidfVectorizer(vocabulary=["the"])
vect_vocab_clone = clone(vect_vocab)
vect_vocab.fit(ALL_FOOD_DOCS)
vect_vocab_clone.fit(ALL_FOOD_DOCS)
assert vect_vocab_clone.vocabulary_ == vect_vocab.vocabulary_
@pytest.mark.parametrize('Vectorizer',
(CountVectorizer, TfidfVectorizer, HashingVectorizer))
def test_vectorizer_string_object_as_input(Vectorizer):
message = ("Iterable over raw text documents expected, "
"string object received.")
vec = Vectorizer()
assert_raise_message(
ValueError, message, vec.fit_transform, "hello world!")
assert_raise_message(ValueError, message, vec.fit, "hello world!")
assert_raise_message(ValueError, message, vec.transform, "hello world!")
@pytest.mark.parametrize("X_dtype", [np.float32, np.float64])
def test_tfidf_transformer_type(X_dtype):
X = sparse.rand(10, 20000, dtype=X_dtype, random_state=42)
X_trans = TfidfTransformer().fit_transform(X)
assert X_trans.dtype == X.dtype
def test_tfidf_transformer_sparse():
X = sparse.rand(10, 20000, dtype=np.float64, random_state=42)
X_csc = sparse.csc_matrix(X)
X_csr = sparse.csr_matrix(X)
X_trans_csc = TfidfTransformer().fit_transform(X_csc)
X_trans_csr = TfidfTransformer().fit_transform(X_csr)
assert_allclose_dense_sparse(X_trans_csc, X_trans_csr)
assert X_trans_csc.format == X_trans_csr.format
@pytest.mark.parametrize(
"vectorizer_dtype, output_dtype, warning_expected",
[(np.int32, np.float64, True),
(np.int64, np.float64, True),
(np.float32, np.float32, False),
(np.float64, np.float64, False)]
)
def test_tfidf_vectorizer_type(vectorizer_dtype, output_dtype,
warning_expected):
X = np.array(["numpy", "scipy", "sklearn"])
vectorizer = TfidfVectorizer(dtype=vectorizer_dtype)
warning_msg_match = "'dtype' should be used."
warning_cls = UserWarning
expected_warning_cls = warning_cls if warning_expected else None
with pytest.warns(expected_warning_cls,
match=warning_msg_match) as record:
X_idf = vectorizer.fit_transform(X)
if expected_warning_cls is None:
relevant_warnings = [w for w in record
if isinstance(w, warning_cls)]
assert len(relevant_warnings) == 0
assert X_idf.dtype == output_dtype
@pytest.mark.parametrize("vec", [
HashingVectorizer(ngram_range=(2, 1)),
CountVectorizer(ngram_range=(2, 1)),
TfidfVectorizer(ngram_range=(2, 1))
])
def test_vectorizers_invalid_ngram_range(vec):
# vectorizers could be initialized with invalid ngram range
# test for raising error message
invalid_range = vec.ngram_range
message = ("Invalid value for ngram_range=%s "
"lower boundary larger than the upper boundary."
% str(invalid_range))
if isinstance(vec, HashingVectorizer):
pytest.xfail(reason='HashingVectorizer is not supported on PyPy')
assert_raise_message(
ValueError, message, vec.fit, ["good news everyone"])
assert_raise_message(
ValueError, message, vec.fit_transform, ["good news everyone"])
if isinstance(vec, HashingVectorizer):
assert_raise_message(
ValueError, message, vec.transform, ["good news everyone"])
def _check_stop_words_consistency(estimator):
stop_words = estimator.get_stop_words()
tokenize = estimator.build_tokenizer()
preprocess = estimator.build_preprocessor()
return estimator._check_stop_words_consistency(stop_words, preprocess,
tokenize)
@fails_if_pypy
def test_vectorizer_stop_words_inconsistent():
lstr = "['and', 'll', 've']"
message = ('Your stop_words may be inconsistent with your '
'preprocessing. Tokenizing the stop words generated '
'tokens %s not in stop_words.' % lstr)
for vec in [CountVectorizer(),
TfidfVectorizer(), HashingVectorizer()]:
vec.set_params(stop_words=["you've", "you", "you'll", 'AND'])
assert_warns_message(UserWarning, message, vec.fit_transform,
['hello world'])
# reset stop word validation
del vec._stop_words_id
assert _check_stop_words_consistency(vec) is False
# Only one warning per stop list
assert_no_warnings(vec.fit_transform, ['hello world'])
assert _check_stop_words_consistency(vec) is None
# Test caching of inconsistency assessment
vec.set_params(stop_words=["you've", "you", "you'll", 'blah', 'AND'])
assert_warns_message(UserWarning, message, vec.fit_transform,
['hello world'])
@skip_if_32bit
def test_countvectorizer_sort_features_64bit_sparse_indices():
"""
Check that CountVectorizer._sort_features preserves the dtype of its sparse
feature matrix.
This test is skipped on 32bit platforms, see:
https://github.com/scikit-learn/scikit-learn/pull/11295
for more details.
"""
X = sparse.csr_matrix((5, 5), dtype=np.int64)
# force indices and indptr to int64.
INDICES_DTYPE = np.int64
X.indices = X.indices.astype(INDICES_DTYPE)
X.indptr = X.indptr.astype(INDICES_DTYPE)
vocabulary = {
"scikit-learn": 0,
"is": 1,
"great!": 2
}
Xs = CountVectorizer()._sort_features(X, vocabulary)
assert INDICES_DTYPE == Xs.indices.dtype
@fails_if_pypy
@pytest.mark.parametrize('Estimator',
[CountVectorizer, TfidfVectorizer, HashingVectorizer])
def test_stop_word_validation_custom_preprocessor(Estimator):
data = [{'text': 'some text'}]
vec = Estimator()
assert _check_stop_words_consistency(vec) is True
vec = Estimator(preprocessor=lambda x: x['text'],
stop_words=['and'])
assert _check_stop_words_consistency(vec) == 'error'
# checks are cached
assert _check_stop_words_consistency(vec) is None
vec.fit_transform(data)
class CustomEstimator(Estimator):
def build_preprocessor(self):
return lambda x: x['text']
vec = CustomEstimator(stop_words=['and'])
assert _check_stop_words_consistency(vec) == 'error'
vec = Estimator(tokenizer=lambda doc: re.compile(r'\w{1,}')
.findall(doc),
stop_words=['and'])
assert _check_stop_words_consistency(vec) is True
@pytest.mark.parametrize(
'Estimator',
[CountVectorizer,
TfidfVectorizer,
HashingVectorizer]
)
@pytest.mark.parametrize(
'input_type, err_type, err_msg',
[('filename', FileNotFoundError, ''),
('file', AttributeError, "'str' object has no attribute 'read'")]
)
def test_callable_analyzer_error(Estimator, input_type, err_type, err_msg):
if issubclass(Estimator, HashingVectorizer):
pytest.xfail('HashingVectorizer is not supported on PyPy')
data = ['this is text, not file or filename']
with pytest.raises(err_type, match=err_msg):
Estimator(analyzer=lambda x: x.split(),
input=input_type).fit_transform(data)
@pytest.mark.parametrize(
'Estimator',
[CountVectorizer,
TfidfVectorizer,
pytest.param(HashingVectorizer, marks=fails_if_pypy)]
)
@pytest.mark.parametrize(
'analyzer', [lambda doc: open(doc, 'r'), lambda doc: doc.read()]
)
@pytest.mark.parametrize('input_type', ['file', 'filename'])
def test_callable_analyzer_change_behavior(Estimator, analyzer, input_type):
data = ['this is text, not file or filename']
warn_msg = 'Since v0.21, vectorizer'
with pytest.raises((FileNotFoundError, AttributeError)):
with pytest.warns(ChangedBehaviorWarning, match=warn_msg) as records:
Estimator(analyzer=analyzer, input=input_type).fit_transform(data)
assert len(records) == 1
assert warn_msg in str(records[0])
@pytest.mark.parametrize(
'Estimator',
[CountVectorizer,
TfidfVectorizer,
HashingVectorizer]
)
def test_callable_analyzer_reraise_error(tmpdir, Estimator):
# check if a custom exception from the analyzer is shown to the user
def analyzer(doc):
raise Exception("testing")
if issubclass(Estimator, HashingVectorizer):
pytest.xfail('HashingVectorizer is not supported on PyPy')
f = tmpdir.join("file.txt")
f.write("sample content\n")
with pytest.raises(Exception, match="testing"):
Estimator(analyzer=analyzer, input='file').fit_transform([f])
| {
"content_hash": "0f8640a2b1cda85e9a7b93dab84beda7",
"timestamp": "",
"source": "github",
"line_count": 1253,
"max_line_length": 79,
"avg_line_length": 35.44213886671987,
"alnum_prop": 0.6387894345740728,
"repo_name": "chrsrds/scikit-learn",
"id": "59ddcb5fa1ac351866f76f1572137d3d91c5cd18",
"size": "44620",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "sklearn/feature_extraction/tests/test_text.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "1786"
},
{
"name": "C",
"bytes": "385829"
},
{
"name": "C++",
"bytes": "139482"
},
{
"name": "Makefile",
"bytes": "1388"
},
{
"name": "PowerShell",
"bytes": "13427"
},
{
"name": "Python",
"bytes": "5255814"
},
{
"name": "Shell",
"bytes": "4031"
}
],
"symlink_target": ""
} |
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v11.errors",
marshal="google.ads.googleads.v11",
manifest={"CustomInterestErrorEnum",},
)
class CustomInterestErrorEnum(proto.Message):
r"""Container for enum describing possible custom interest
errors.
"""
class CustomInterestError(proto.Enum):
r"""Enum describing possible custom interest errors."""
UNSPECIFIED = 0
UNKNOWN = 1
NAME_ALREADY_USED = 2
CUSTOM_INTEREST_MEMBER_ID_AND_TYPE_PARAMETER_NOT_PRESENT_IN_REMOVE = 3
TYPE_AND_PARAMETER_NOT_FOUND = 4
TYPE_AND_PARAMETER_ALREADY_EXISTED = 5
INVALID_CUSTOM_INTEREST_MEMBER_TYPE = 6
CANNOT_REMOVE_WHILE_IN_USE = 7
CANNOT_CHANGE_TYPE = 8
__all__ = tuple(sorted(__protobuf__.manifest))
| {
"content_hash": "748e30575981548f32fda4d728a10a09",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 78,
"avg_line_length": 28,
"alnum_prop": 0.6607142857142857,
"repo_name": "googleads/google-ads-python",
"id": "bbf5b64d012ec4df1d7b999269221df040f8cd77",
"size": "1440",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "google/ads/googleads/v11/errors/types/custom_interest_error.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "23399881"
}
],
"symlink_target": ""
} |
import gzip
import json
import zlib
import mock
import pytest
from s4 import utils
class TestTryDecompress:
def test_bad_value(self):
with pytest.raises(ValueError) as exc:
utils.try_decompress(b"Not compressed data")
assert str(exc.value) == "Unknown compression format"
def test_gzip(self):
body = gzip.compress(b"some data")
assert utils.try_decompress(body) == b"some data"
def test_zlib(self):
body = zlib.compress(b"some data")
assert utils.try_decompress(body) == b"some data"
@mock.patch("getpass.getpass")
@mock.patch("builtins.input")
class TestGetInput:
def test_required(self, input_fn, getpass):
input_fn.side_effect = ["", "", "something"]
result = utils.get_input("give me some info", required=True)
assert result == "something"
assert input_fn.call_count == 3
assert getpass.call_count == 0
def test_not_secret(self, input_fn, getpass):
input_fn.return_value = "foo"
result = utils.get_input("give me some info", secret=False)
assert result == "foo"
assert getpass.call_count == 0
assert input_fn.call_count == 1
def test_blank(self, input_fn, getpass):
input_fn.return_value = ""
result = utils.get_input("give me some info", blank=True)
assert result is None
assert getpass.call_count == 0
assert input_fn.call_count == 1
def test_secret(self, input_fn, getpass):
getpass.return_value = "bar"
result = utils.get_input("give me some secret info", secret=True)
assert result == "bar"
assert getpass.call_count == 1
assert input_fn.call_count == 0
class TestGetConfigFile(object):
@mock.patch("s4.utils.CONFIG_FILE_PATH", "/i/dont/exist")
def test_no_file(self):
assert utils.get_config() == {"targets": {}}
def test_correct_output(self, config_file):
with open(config_file, "w") as fp:
json.dump({"local_folder": "/home/someone/something"}, fp)
assert utils.get_config() == {"local_folder": "/home/someone/something"}
| {
"content_hash": "91e8cf459cf278985b9c6c76222c042b",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 80,
"avg_line_length": 27.641025641025642,
"alnum_prop": 0.6224489795918368,
"repo_name": "MichaelAquilina/s3backup",
"id": "8d0362a2d4c7ce7af5bb2976bcee7ba76e218cfa",
"size": "2181",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "101356"
},
{
"name": "Shell",
"bytes": "42"
}
],
"symlink_target": ""
} |
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from rq import Connection, Queue, Worker
if __name__ == '__main__':
# Tell rq what Redis connection to use
with Connection():
q = Queue()
Worker(q).work()
| {
"content_hash": "215474a84805c1aeb813bbc89b819aea",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 66,
"avg_line_length": 29,
"alnum_prop": 0.5862068965517241,
"repo_name": "jclee81/sktacc",
"id": "4feb2179ebfcbe2f12794bc7ed789e3fecc8433c",
"size": "314",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sktps/sandbox/run_worker.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "7311"
},
{
"name": "JavaScript",
"bytes": "4042"
},
{
"name": "Jupyter Notebook",
"bytes": "1021"
},
{
"name": "Python",
"bytes": "91818"
},
{
"name": "Shell",
"bytes": "4252"
}
],
"symlink_target": ""
} |
from botocore.exceptions import DataNotFoundError
from botocore.docs.utils import get_official_service_name
from botocore.docs.client import ClientDocumenter
from botocore.docs.waiter import WaiterDocumenter
from botocore.docs.paginator import PaginatorDocumenter
from botocore.docs.bcdoc.restdoc import DocumentStructure
class ServiceDocumenter(object):
def __init__(self, service_name, session):
self._session = session
self._service_name = service_name
self._client = self._session.create_client(
service_name, region_name='us-east-1', aws_access_key_id='foo',
aws_secret_access_key='bar')
self.sections = [
'title',
'table-of-contents',
'client-api',
'paginator-api',
'waiter-api'
]
def document_service(self):
"""Documents an entire service.
:returns: The reStructured text of the documented service.
"""
doc_structure = DocumentStructure(
self._service_name, section_names=self.sections,
target='html')
self.title(doc_structure.get_section('title'))
self.table_of_contents(doc_structure.get_section('table-of-contents'))
self.client_api(doc_structure.get_section('client-api'))
self.paginator_api(doc_structure.get_section('paginator-api'))
self.waiter_api(doc_structure.get_section('waiter-api'))
return doc_structure.flush_structure()
def title(self, section):
section.style.h1(self._client.__class__.__name__)
def table_of_contents(self, section):
section.style.table_of_contents(title='Table of Contents', depth=2)
def client_api(self, section):
examples = None
try:
examples = self.get_examples(self._service_name)
except DataNotFoundError:
pass
ClientDocumenter(self._client, examples).document_client(section)
def paginator_api(self, section):
try:
service_paginator_model = self._session.get_paginator_model(
self._service_name)
except DataNotFoundError:
return
paginator_documenter = PaginatorDocumenter(
self._client, service_paginator_model)
paginator_documenter.document_paginators(section)
def waiter_api(self, section):
if self._client.waiter_names:
service_waiter_model = self._session.get_waiter_model(
self._service_name)
waiter_documenter = WaiterDocumenter(
self._client, service_waiter_model)
waiter_documenter.document_waiters(section)
def get_examples(self, service_name, api_version=None):
loader = self._session.get_component('data_loader')
examples = loader.load_service_model(
service_name, 'examples-1', api_version)
return examples['examples']
| {
"content_hash": "9fcaf7cbe8b16a9065ab375d341f6991",
"timestamp": "",
"source": "github",
"line_count": 78,
"max_line_length": 78,
"avg_line_length": 37.42307692307692,
"alnum_prop": 0.6413155190133607,
"repo_name": "VirtueSecurity/aws-extender",
"id": "c9b5d7b31e1cc5c5d454100c0b5abe63d80069d9",
"size": "3480",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "BappModules/botocore/docs/service.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "46260"
}
],
"symlink_target": ""
} |
from JumpScale import j
import sys
j.application.debug = (sys.argv[1] == 'True')
# Start debugging if 1+1 == 2
if 1 + 1 == 2:
j.application.break_into_jshell("DEBUG STARTED")
| {
"content_hash": "b35a6f72cebed493af6806ff99d9251e",
"timestamp": "",
"source": "github",
"line_count": 7,
"max_line_length": 52,
"avg_line_length": 25.714285714285715,
"alnum_prop": 0.6722222222222223,
"repo_name": "Jumpscale/jumpscale_core8",
"id": "7a16545195c4cadd75d1bad4c3651e02b1a141ac",
"size": "180",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/JumpScale/core/main/tests/embed_debugger.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1113"
},
{
"name": "Cap'n Proto",
"bytes": "9033"
},
{
"name": "Lua",
"bytes": "12538"
},
{
"name": "Python",
"bytes": "4343122"
},
{
"name": "Shell",
"bytes": "7091"
}
],
"symlink_target": ""
} |
def undeploy_model(project_id, model_id):
"""Undeploy a model."""
# [START automl_undeploy_model]
from google.cloud import automl
# TODO(developer): Uncomment and set the following variables
# project_id = "YOUR_PROJECT_ID"
# model_id = "YOUR_MODEL_ID"
client = automl.AutoMlClient()
# Get the full path of the model.
model_full_id = client.model_path(project_id, "us-central1", model_id)
response = client.undeploy_model(name=model_full_id)
print("Model undeployment finished. {}".format(response.result()))
# [END automl_undeploy_model]
| {
"content_hash": "03d4bdaeeb47ddea06e1e28bb6d8bd39",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 74,
"avg_line_length": 36.875,
"alnum_prop": 0.6779661016949152,
"repo_name": "googleapis/python-automl",
"id": "25b5cdb7c507832464daafa6595bdded52086fa6",
"size": "1165",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "samples/snippets/undeploy_model.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2050"
},
{
"name": "Python",
"bytes": "2347989"
},
{
"name": "Shell",
"bytes": "30660"
}
],
"symlink_target": ""
} |
"""Run interop (cross-language) tests in parallel."""
from __future__ import print_function
import argparse
import atexit
import itertools
import json
import multiprocessing
import os
import re
import subprocess
import sys
import tempfile
import time
import uuid
import six
import traceback
import python_utils.dockerjob as dockerjob
import python_utils.jobset as jobset
import python_utils.report_utils as report_utils
# Docker doesn't clean up after itself, so we do it on exit.
atexit.register(lambda: subprocess.call(['stty', 'echo']))
ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
os.chdir(ROOT)
_DEFAULT_SERVER_PORT=8080
_SKIP_CLIENT_COMPRESSION = ['client_compressed_unary',
'client_compressed_streaming']
_SKIP_SERVER_COMPRESSION = ['server_compressed_unary',
'server_compressed_streaming']
_SKIP_COMPRESSION = _SKIP_CLIENT_COMPRESSION + _SKIP_SERVER_COMPRESSION
_SKIP_ADVANCED = ['status_code_and_message',
'custom_metadata',
'unimplemented_method',
'unimplemented_service']
_TEST_TIMEOUT = 3*60
# disable this test on core-based languages,
# see https://github.com/grpc/grpc/issues/9779
_SKIP_DATA_FRAME_PADDING = ['data_frame_padding']
# report suffix is important for reports to get picked up by internal CI
_INTERNAL_CL_XML_REPORT = 'sponge_log.xml'
# report suffix is important for reports to get picked up by internal CI
_XML_REPORT = 'report.xml'
class CXXLanguage:
def __init__(self):
self.client_cwd = None
self.server_cwd = None
self.http2_cwd = None
self.safename = 'cxx'
def client_cmd(self, args):
return ['bins/opt/interop_client'] + args
def client_cmd_http2interop(self, args):
return ['bins/opt/http2_client'] + args
def cloud_to_prod_env(self):
return {}
def server_cmd(self, args):
return ['bins/opt/interop_server'] + args
def global_env(self):
return {}
def unimplemented_test_cases(self):
return _SKIP_DATA_FRAME_PADDING
def unimplemented_test_cases_server(self):
return []
def __str__(self):
return 'c++'
class CSharpLanguage:
def __init__(self):
self.client_cwd = 'src/csharp/Grpc.IntegrationTesting.Client/bin/Debug/net45'
self.server_cwd = 'src/csharp/Grpc.IntegrationTesting.Server/bin/Debug/net45'
self.safename = str(self)
def client_cmd(self, args):
return ['mono', 'Grpc.IntegrationTesting.Client.exe'] + args
def cloud_to_prod_env(self):
return {}
def server_cmd(self, args):
return ['mono', 'Grpc.IntegrationTesting.Server.exe'] + args
def global_env(self):
return {}
def unimplemented_test_cases(self):
return _SKIP_SERVER_COMPRESSION + _SKIP_DATA_FRAME_PADDING
def unimplemented_test_cases_server(self):
return _SKIP_COMPRESSION
def __str__(self):
return 'csharp'
class CSharpCoreCLRLanguage:
def __init__(self):
self.client_cwd = 'src/csharp/Grpc.IntegrationTesting.Client/bin/Debug/netcoreapp1.0'
self.server_cwd = 'src/csharp/Grpc.IntegrationTesting.Server/bin/Debug/netcoreapp1.0'
self.safename = str(self)
def client_cmd(self, args):
return ['dotnet', 'exec', 'Grpc.IntegrationTesting.Client.dll'] + args
def cloud_to_prod_env(self):
return {}
def server_cmd(self, args):
return ['dotnet', 'exec', 'Grpc.IntegrationTesting.Server.dll'] + args
def global_env(self):
return {}
def unimplemented_test_cases(self):
return _SKIP_SERVER_COMPRESSION + _SKIP_DATA_FRAME_PADDING
def unimplemented_test_cases_server(self):
return _SKIP_COMPRESSION
def __str__(self):
return 'csharpcoreclr'
class JavaLanguage:
def __init__(self):
self.client_cwd = '../grpc-java'
self.server_cwd = '../grpc-java'
self.http2_cwd = '../grpc-java'
self.safename = str(self)
def client_cmd(self, args):
return ['./run-test-client.sh'] + args
def client_cmd_http2interop(self, args):
return ['./interop-testing/build/install/grpc-interop-testing/bin/http2-client'] + args
def cloud_to_prod_env(self):
return {}
def server_cmd(self, args):
return ['./run-test-server.sh'] + args
def global_env(self):
return {}
def unimplemented_test_cases(self):
return _SKIP_COMPRESSION
def unimplemented_test_cases_server(self):
return _SKIP_COMPRESSION
def __str__(self):
return 'java'
class JavaOkHttpClient:
def __init__(self):
self.client_cwd = '../grpc-java'
self.safename = 'java'
def client_cmd(self, args):
return ['./run-test-client.sh', '--use_okhttp=true'] + args
def cloud_to_prod_env(self):
return {}
def global_env(self):
return {}
def unimplemented_test_cases(self):
return _SKIP_COMPRESSION + _SKIP_DATA_FRAME_PADDING
def __str__(self):
return 'javaokhttp'
class GoLanguage:
def __init__(self):
# TODO: this relies on running inside docker
self.client_cwd = '/go/src/google.golang.org/grpc/interop/client'
self.server_cwd = '/go/src/google.golang.org/grpc/interop/server'
self.http2_cwd = '/go/src/google.golang.org/grpc/interop/http2'
self.safename = str(self)
def client_cmd(self, args):
return ['go', 'run', 'client.go'] + args
def client_cmd_http2interop(self, args):
return ['go', 'run', 'negative_http2_client.go'] + args
def cloud_to_prod_env(self):
return {}
def server_cmd(self, args):
return ['go', 'run', 'server.go'] + args
def global_env(self):
return {}
def unimplemented_test_cases(self):
return _SKIP_COMPRESSION
def unimplemented_test_cases_server(self):
return _SKIP_COMPRESSION
def __str__(self):
return 'go'
class Http2Server:
"""Represents the HTTP/2 Interop Test server
This pretends to be a language in order to be built and run, but really it
isn't.
"""
def __init__(self):
self.server_cwd = None
self.safename = str(self)
def server_cmd(self, args):
return ['python test/http2_test/http2_test_server.py']
def cloud_to_prod_env(self):
return {}
def global_env(self):
return {}
def unimplemented_test_cases(self):
return _TEST_CASES + _SKIP_DATA_FRAME_PADDING
def unimplemented_test_cases_server(self):
return _TEST_CASES
def __str__(self):
return 'http2'
class Http2Client:
"""Represents the HTTP/2 Interop Test
This pretends to be a language in order to be built and run, but really it
isn't.
"""
def __init__(self):
self.client_cwd = None
self.safename = str(self)
def client_cmd(self, args):
return ['tools/http2_interop/http2_interop.test', '-test.v'] + args
def cloud_to_prod_env(self):
return {}
def global_env(self):
return {}
def unimplemented_test_cases(self):
return _TEST_CASES
def unimplemented_test_cases_server(self):
return _TEST_CASES
def __str__(self):
return 'http2'
class NodeLanguage:
def __init__(self):
self.client_cwd = None
self.server_cwd = None
self.safename = str(self)
def client_cmd(self, args):
return ['tools/run_tests/interop/with_nvm.sh',
'node', 'src/node/interop/interop_client.js'] + args
def cloud_to_prod_env(self):
return {}
def server_cmd(self, args):
return ['tools/run_tests/interop/with_nvm.sh',
'node', 'src/node/interop/interop_server.js'] + args
def global_env(self):
return {}
def unimplemented_test_cases(self):
return _SKIP_COMPRESSION + _SKIP_DATA_FRAME_PADDING
def unimplemented_test_cases_server(self):
return _SKIP_COMPRESSION
def __str__(self):
return 'node'
class PHPLanguage:
def __init__(self):
self.client_cwd = None
self.safename = str(self)
def client_cmd(self, args):
return ['src/php/bin/interop_client.sh'] + args
def cloud_to_prod_env(self):
return {}
def global_env(self):
return {}
def unimplemented_test_cases(self):
return _SKIP_COMPRESSION + _SKIP_DATA_FRAME_PADDING
def unimplemented_test_cases_server(self):
return []
def __str__(self):
return 'php'
class PHP7Language:
def __init__(self):
self.client_cwd = None
self.safename = str(self)
def client_cmd(self, args):
return ['src/php/bin/interop_client.sh'] + args
def cloud_to_prod_env(self):
return {}
def global_env(self):
return {}
def unimplemented_test_cases(self):
return _SKIP_COMPRESSION + _SKIP_DATA_FRAME_PADDING
def unimplemented_test_cases_server(self):
return []
def __str__(self):
return 'php7'
class ObjcLanguage:
def __init__(self):
self.client_cwd = 'src/objective-c/tests'
self.safename = str(self)
def client_cmd(self, args):
# from args, extract the server port and craft xcodebuild command out of it
for arg in args:
port = re.search('--server_port=(\d+)', arg)
if port:
portnum = port.group(1)
cmdline = 'pod install && xcodebuild -workspace Tests.xcworkspace -scheme InteropTestsLocalSSL -destination name="iPhone 6" HOST_PORT_LOCALSSL=localhost:%s test'%portnum
return [cmdline]
def cloud_to_prod_env(self):
return {}
def global_env(self):
return {}
def unimplemented_test_cases(self):
# ObjC test runs all cases with the same command. It ignores the testcase
# cmdline argument. Here we return all but one test cases as unimplemented,
# and depend upon ObjC test's behavior that it runs all cases even when
# we tell it to run just one.
return _TEST_CASES[1:] + _SKIP_COMPRESSION + _SKIP_DATA_FRAME_PADDING
def unimplemented_test_cases_server(self):
return _SKIP_COMPRESSION
def __str__(self):
return 'objc'
class RubyLanguage:
def __init__(self):
self.client_cwd = None
self.server_cwd = None
self.safename = str(self)
def client_cmd(self, args):
return ['tools/run_tests/interop/with_rvm.sh',
'ruby', 'src/ruby/pb/test/client.rb'] + args
def cloud_to_prod_env(self):
return {}
def server_cmd(self, args):
return ['tools/run_tests/interop/with_rvm.sh',
'ruby', 'src/ruby/pb/test/server.rb'] + args
def global_env(self):
return {}
def unimplemented_test_cases(self):
return _SKIP_SERVER_COMPRESSION + _SKIP_DATA_FRAME_PADDING
def unimplemented_test_cases_server(self):
return _SKIP_COMPRESSION
def __str__(self):
return 'ruby'
class PythonLanguage:
def __init__(self):
self.client_cwd = None
self.server_cwd = None
self.http2_cwd = None
self.safename = str(self)
def client_cmd(self, args):
return [
'py27/bin/python',
'src/python/grpcio_tests/setup.py',
'run_interop',
'--client',
'--args="{}"'.format(' '.join(args))
]
def client_cmd_http2interop(self, args):
return [ 'py27/bin/python',
'src/python/grpcio_tests/tests/http2/negative_http2_client.py',
] + args
def cloud_to_prod_env(self):
return {}
def server_cmd(self, args):
return [
'py27/bin/python',
'src/python/grpcio_tests/setup.py',
'run_interop',
'--server',
'--args="{}"'.format(' '.join(args))
]
def global_env(self):
return {'LD_LIBRARY_PATH': '{}/libs/opt'.format(DOCKER_WORKDIR_ROOT),
'PYTHONPATH': '{}/src/python/gens'.format(DOCKER_WORKDIR_ROOT)}
def unimplemented_test_cases(self):
return _SKIP_COMPRESSION + _SKIP_DATA_FRAME_PADDING
def unimplemented_test_cases_server(self):
return _SKIP_COMPRESSION
def __str__(self):
return 'python'
_LANGUAGES = {
'c++' : CXXLanguage(),
'csharp' : CSharpLanguage(),
'csharpcoreclr' : CSharpCoreCLRLanguage(),
'go' : GoLanguage(),
'java' : JavaLanguage(),
'javaokhttp' : JavaOkHttpClient(),
'node' : NodeLanguage(),
'php' : PHPLanguage(),
'php7' : PHP7Language(),
'objc' : ObjcLanguage(),
'ruby' : RubyLanguage(),
'python' : PythonLanguage(),
}
# languages supported as cloud_to_cloud servers
_SERVERS = ['c++', 'node', 'csharp', 'csharpcoreclr', 'java', 'go', 'ruby', 'python']
_TEST_CASES = ['large_unary', 'empty_unary', 'ping_pong',
'empty_stream', 'client_streaming', 'server_streaming',
'cancel_after_begin', 'cancel_after_first_response',
'timeout_on_sleeping_server', 'custom_metadata',
'status_code_and_message', 'unimplemented_method',
'client_compressed_unary', 'server_compressed_unary',
'client_compressed_streaming', 'server_compressed_streaming',
'unimplemented_service']
_AUTH_TEST_CASES = ['compute_engine_creds', 'jwt_token_creds',
'oauth2_auth_token', 'per_rpc_creds']
_HTTP2_TEST_CASES = ['tls', 'framing']
_HTTP2_SERVER_TEST_CASES = ['rst_after_header', 'rst_after_data', 'rst_during_data',
'goaway', 'ping', 'max_streams', 'data_frame_padding', 'no_df_padding_sanity_test']
_GRPC_CLIENT_TEST_CASES_FOR_HTTP2_SERVER_TEST_CASES = { 'data_frame_padding': 'large_unary', 'no_df_padding_sanity_test': 'large_unary' }
_HTTP2_SERVER_TEST_CASES_THAT_USE_GRPC_CLIENTS = _GRPC_CLIENT_TEST_CASES_FOR_HTTP2_SERVER_TEST_CASES.keys()
_LANGUAGES_WITH_HTTP2_CLIENTS_FOR_HTTP2_SERVER_TEST_CASES = ['java', 'go', 'python', 'c++']
DOCKER_WORKDIR_ROOT = '/var/local/git/grpc'
def docker_run_cmdline(cmdline, image, docker_args=[], cwd=None, environ=None):
"""Wraps given cmdline array to create 'docker run' cmdline from it."""
docker_cmdline = ['docker', 'run', '-i', '--rm=true']
# turn environ into -e docker args
if environ:
for k,v in environ.items():
docker_cmdline += ['-e', '%s=%s' % (k,v)]
# set working directory
workdir = DOCKER_WORKDIR_ROOT
if cwd:
workdir = os.path.join(workdir, cwd)
docker_cmdline += ['-w', workdir]
docker_cmdline += docker_args + [image] + cmdline
return docker_cmdline
def manual_cmdline(docker_cmdline, docker_image):
"""Returns docker cmdline adjusted for manual invocation."""
print_cmdline = []
for item in docker_cmdline:
if item.startswith('--name='):
continue
if item == docker_image:
item = "$docker_image"
# add quotes when necessary
if any(character.isspace() for character in item):
item = "\"%s\"" % item
print_cmdline.append(item)
return ' '.join(print_cmdline)
def write_cmdlog_maybe(cmdlog, filename):
"""Returns docker cmdline adjusted for manual invocation."""
if cmdlog:
with open(filename, 'w') as logfile:
logfile.write('#!/bin/bash\n')
logfile.writelines("%s\n" % line for line in cmdlog)
print('Command log written to file %s' % filename)
def bash_cmdline(cmdline):
"""Creates bash -c cmdline from args list."""
# Use login shell:
# * makes error messages clearer if executables are missing
return ['bash', '-c', ' '.join(cmdline)]
def auth_options(language, test_case):
"""Returns (cmdline, env) tuple with cloud_to_prod_auth test options."""
language = str(language)
cmdargs = []
env = {}
# TODO(jtattermusch): this file path only works inside docker
key_filepath = '/root/service_account/GrpcTesting-726eb1347f15.json'
oauth_scope_arg = '--oauth_scope=https://www.googleapis.com/auth/xapi.zoo'
key_file_arg = '--service_account_key_file=%s' % key_filepath
default_account_arg = '--default_service_account=830293263384-compute@developer.gserviceaccount.com'
if test_case in ['jwt_token_creds', 'per_rpc_creds', 'oauth2_auth_token']:
if language in ['csharp', 'csharpcoreclr', 'node', 'php', 'php7', 'python', 'ruby']:
env['GOOGLE_APPLICATION_CREDENTIALS'] = key_filepath
else:
cmdargs += [key_file_arg]
if test_case in ['per_rpc_creds', 'oauth2_auth_token']:
cmdargs += [oauth_scope_arg]
if test_case == 'oauth2_auth_token' and language == 'c++':
# C++ oauth2 test uses GCE creds and thus needs to know the default account
cmdargs += [default_account_arg]
if test_case == 'compute_engine_creds':
cmdargs += [oauth_scope_arg, default_account_arg]
return (cmdargs, env)
def _job_kill_handler(job):
if job._spec.container_name:
dockerjob.docker_kill(job._spec.container_name)
# When the job times out and we decide to kill it,
# we need to wait a before restarting the job
# to prevent "container name already in use" error.
# TODO(jtattermusch): figure out a cleaner way to to this.
time.sleep(2)
def cloud_to_prod_jobspec(language, test_case, server_host_name,
server_host_detail, docker_image=None, auth=False,
manual_cmd_log=None):
"""Creates jobspec for cloud-to-prod interop test"""
container_name = None
cmdargs = [
'--server_host=%s' % server_host_detail[0],
'--server_host_override=%s' % server_host_detail[1],
'--server_port=443',
'--use_tls=true',
'--test_case=%s' % test_case]
environ = dict(language.cloud_to_prod_env(), **language.global_env())
if auth:
auth_cmdargs, auth_env = auth_options(language, test_case)
cmdargs += auth_cmdargs
environ.update(auth_env)
cmdline = bash_cmdline(language.client_cmd(cmdargs))
cwd = language.client_cwd
if docker_image:
container_name = dockerjob.random_name('interop_client_%s' %
language.safename)
cmdline = docker_run_cmdline(cmdline,
image=docker_image,
cwd=cwd,
environ=environ,
docker_args=['--net=host',
'--name=%s' % container_name])
if manual_cmd_log is not None:
if manual_cmd_log == []:
manual_cmd_log.append('echo "Testing ${docker_image:=%s}"' % docker_image)
manual_cmd_log.append(manual_cmdline(cmdline, docker_image))
cwd = None
environ = None
suite_name='cloud_to_prod_auth' if auth else 'cloud_to_prod'
test_job = jobset.JobSpec(
cmdline=cmdline,
cwd=cwd,
environ=environ,
shortname='%s:%s:%s:%s' % (suite_name, server_host_name, language,
test_case),
timeout_seconds=_TEST_TIMEOUT,
flake_retries=5 if args.allow_flakes else 0,
timeout_retries=2 if args.allow_flakes else 0,
kill_handler=_job_kill_handler)
if docker_image:
test_job.container_name = container_name
return test_job
def cloud_to_cloud_jobspec(language, test_case, server_name, server_host,
server_port, docker_image=None, insecure=False,
manual_cmd_log=None):
"""Creates jobspec for cloud-to-cloud interop test"""
interop_only_options = [
'--server_host_override=foo.test.google.fr',
'--use_tls=%s' % ('false' if insecure else 'true'),
'--use_test_ca=true',
]
client_test_case = test_case
if test_case in _HTTP2_SERVER_TEST_CASES_THAT_USE_GRPC_CLIENTS:
client_test_case = _GRPC_CLIENT_TEST_CASES_FOR_HTTP2_SERVER_TEST_CASES[test_case]
if client_test_case in language.unimplemented_test_cases():
print('asking client %s to run unimplemented test case %s' % (repr(language), client_test_case))
sys.exit(1)
common_options = [
'--test_case=%s' % client_test_case,
'--server_host=%s' % server_host,
'--server_port=%s' % server_port,
]
if test_case in _HTTP2_SERVER_TEST_CASES:
if test_case in _HTTP2_SERVER_TEST_CASES_THAT_USE_GRPC_CLIENTS:
client_options = interop_only_options + common_options
cmdline = bash_cmdline(language.client_cmd(client_options))
cwd = language.client_cwd
else:
cmdline = bash_cmdline(language.client_cmd_http2interop(common_options))
cwd = language.http2_cwd
else:
cmdline = bash_cmdline(language.client_cmd(common_options+interop_only_options))
cwd = language.client_cwd
environ = language.global_env()
if docker_image and language.safename != 'objc':
# we can't run client in docker for objc.
container_name = dockerjob.random_name('interop_client_%s' % language.safename)
cmdline = docker_run_cmdline(cmdline,
image=docker_image,
environ=environ,
cwd=cwd,
docker_args=['--net=host',
'--name=%s' % container_name])
if manual_cmd_log is not None:
if manual_cmd_log == []:
manual_cmd_log.append('echo "Testing ${docker_image:=%s}"' % docker_image)
manual_cmd_log.append(manual_cmdline(cmdline, docker_iamge))
cwd = None
test_job = jobset.JobSpec(
cmdline=cmdline,
cwd=cwd,
environ=environ,
shortname='cloud_to_cloud:%s:%s_server:%s' % (language, server_name,
test_case),
timeout_seconds=_TEST_TIMEOUT,
flake_retries=5 if args.allow_flakes else 0,
timeout_retries=2 if args.allow_flakes else 0,
kill_handler=_job_kill_handler)
if docker_image:
test_job.container_name = container_name
return test_job
def server_jobspec(language, docker_image, insecure=False, manual_cmd_log=None):
"""Create jobspec for running a server"""
container_name = dockerjob.random_name('interop_server_%s' % language.safename)
cmdline = bash_cmdline(
language.server_cmd(['--port=%s' % _DEFAULT_SERVER_PORT,
'--use_tls=%s' % ('false' if insecure else 'true')]))
environ = language.global_env()
docker_args = ['--name=%s' % container_name]
if language.safename == 'http2':
# we are running the http2 interop server. Open next N ports beginning
# with the server port. These ports are used for http2 interop test
# (one test case per port).
docker_args += list(
itertools.chain.from_iterable(('-p', str(_DEFAULT_SERVER_PORT + i))
for i in range(
len(_HTTP2_SERVER_TEST_CASES))))
# Enable docker's healthcheck mechanism.
# This runs a Python script inside the container every second. The script
# pings the http2 server to verify it is ready. The 'health-retries' flag
# specifies the number of consecutive failures before docker will report
# the container's status as 'unhealthy'. Prior to the first 'health_retries'
# failures or the first success, the status will be 'starting'. 'docker ps'
# or 'docker inspect' can be used to see the health of the container on the
# command line.
docker_args += [
'--health-cmd=python test/http2_test/http2_server_health_check.py '
'--server_host=%s --server_port=%d'
% ('localhost', _DEFAULT_SERVER_PORT),
'--health-interval=1s',
'--health-retries=5',
'--health-timeout=10s',
]
else:
docker_args += ['-p', str(_DEFAULT_SERVER_PORT)]
docker_cmdline = docker_run_cmdline(cmdline,
image=docker_image,
cwd=language.server_cwd,
environ=environ,
docker_args=docker_args)
if manual_cmd_log is not None:
if manual_cmd_log == []:
manual_cmd_log.append('echo "Testing ${docker_image:=%s}"' % docker_image)
manual_cmd_log.append(manual_cmdline(docker_cmdline, docker_iamge))
server_job = jobset.JobSpec(
cmdline=docker_cmdline,
environ=environ,
shortname='interop_server_%s' % language,
timeout_seconds=30*60)
server_job.container_name = container_name
return server_job
def build_interop_image_jobspec(language, tag=None):
"""Creates jobspec for building interop docker image for a language"""
if not tag:
tag = 'grpc_interop_%s:%s' % (language.safename, uuid.uuid4())
env = {'INTEROP_IMAGE': tag,
'BASE_NAME': 'grpc_interop_%s' % language.safename}
if not args.travis:
env['TTY_FLAG'] = '-t'
# This env variable is used to get around the github rate limit
# error when running the PHP `composer install` command
host_file = '%s/.composer/auth.json' % os.environ['HOME']
if language.safename == 'php' and os.path.exists(host_file):
env['BUILD_INTEROP_DOCKER_EXTRA_ARGS'] = \
'-v %s:/root/.composer/auth.json:ro' % host_file
build_job = jobset.JobSpec(
cmdline=['tools/run_tests/dockerize/build_interop_image.sh'],
environ=env,
shortname='build_docker_%s' % (language),
timeout_seconds=30*60)
build_job.tag = tag
return build_job
def aggregate_http2_results(stdout):
match = re.search(r'\{"cases[^\]]*\]\}', stdout)
if not match:
return None
results = json.loads(match.group(0))
skipped = 0
passed = 0
failed = 0
failed_cases = []
for case in results['cases']:
if case.get('skipped', False):
skipped += 1
else:
if case.get('passed', False):
passed += 1
else:
failed += 1
failed_cases.append(case.get('name', "NONAME"))
return {
'passed': passed,
'failed': failed,
'skipped': skipped,
'failed_cases': ', '.join(failed_cases),
'percent': 1.0 * passed / (passed + failed)
}
# A dictionary of prod servers to test.
# Format: server_name: (server_host, server_host_override, errors_allowed)
# TODO(adelez): implement logic for errors_allowed where if the indicated tests
# fail, they don't impact the overall test result.
prod_servers = {
'default': ('216.239.32.254',
'grpc-test.sandbox.googleapis.com', False),
'gateway_v2': ('216.239.32.254',
'grpc-test2.sandbox.googleapis.com', True),
'cloud_gateway': ('216.239.32.255', 'grpc-test.sandbox.googleapis.com',
False),
'cloud_gateway_v2': ('216.239.32.255', 'grpc-test2.sandbox.googleapis.com',
True),
'gateway_v4': ('216.239.32.254',
'grpc-test4.sandbox.googleapis.com', True),
'cloud_gateway_v4': ('216.239.32.255', 'grpc-test4.sandbox.googleapis.com',
True),
}
argp = argparse.ArgumentParser(description='Run interop tests.')
argp.add_argument('-l', '--language',
choices=['all'] + sorted(_LANGUAGES),
nargs='+',
default=['all'],
help='Clients to run. Objc client can be only run on OSX.')
argp.add_argument('-j', '--jobs', default=multiprocessing.cpu_count(), type=int)
argp.add_argument('--cloud_to_prod',
default=False,
action='store_const',
const=True,
help='Run cloud_to_prod tests.')
argp.add_argument('--cloud_to_prod_auth',
default=False,
action='store_const',
const=True,
help='Run cloud_to_prod_auth tests.')
argp.add_argument('--prod_servers',
choices=prod_servers.keys(),
default=['default'],
nargs='+',
help=('The servers to run cloud_to_prod and '
'cloud_to_prod_auth tests against.'))
argp.add_argument('-s', '--server',
choices=['all'] + sorted(_SERVERS),
nargs='+',
help='Run cloud_to_cloud servers in a separate docker ' +
'image. Servers can only be started automatically if ' +
'--use_docker option is enabled.',
default=[])
argp.add_argument('--override_server',
action='append',
type=lambda kv: kv.split('='),
help='Use servername=HOST:PORT to explicitly specify a server. E.g. csharp=localhost:50000',
default=[])
argp.add_argument('-t', '--travis',
default=False,
action='store_const',
const=True)
argp.add_argument('-v', '--verbose',
default=False,
action='store_const',
const=True)
argp.add_argument('--use_docker',
default=False,
action='store_const',
const=True,
help='Run all the interop tests under docker. That provides ' +
'additional isolation and prevents the need to install ' +
'language specific prerequisites. Only available on Linux.')
argp.add_argument('--allow_flakes',
default=False,
action='store_const',
const=True,
help='Allow flaky tests to show as passing (re-runs failed tests up to five times)')
argp.add_argument('--manual_run',
default=False,
action='store_const',
const=True,
help='Prepare things for running interop tests manually. ' +
'Preserve docker images after building them and skip '
'actually running the tests. Only print commands to run by ' +
'hand.')
argp.add_argument('--http2_interop',
default=False,
action='store_const',
const=True,
help='Enable HTTP/2 client edge case testing. (Bad client, good server)')
argp.add_argument('--http2_server_interop',
default=False,
action='store_const',
const=True,
help='Enable HTTP/2 server edge case testing. (Includes positive and negative tests')
argp.add_argument('--insecure',
default=False,
action='store_const',
const=True,
help='Whether to use secure channel.')
argp.add_argument('--internal_ci',
default=False,
action='store_const',
const=True,
help=('Put reports into subdirectories to improve '
'presentation of results by Internal CI.'))
args = argp.parse_args()
servers = set(s for s in itertools.chain.from_iterable(_SERVERS
if x == 'all' else [x]
for x in args.server))
if args.use_docker:
if not args.travis:
print('Seen --use_docker flag, will run interop tests under docker.')
print('')
print('IMPORTANT: The changes you are testing need to be locally committed')
print('because only the committed changes in the current branch will be')
print('copied to the docker environment.')
time.sleep(5)
if args.manual_run and not args.use_docker:
print('--manual_run is only supported with --use_docker option enabled.')
sys.exit(1)
if not args.use_docker and servers:
print('Running interop servers is only supported with --use_docker option enabled.')
sys.exit(1)
# we want to include everything but objc in 'all'
# because objc won't run on non-mac platforms
all_but_objc = set(six.iterkeys(_LANGUAGES)) - set(['objc'])
languages = set(_LANGUAGES[l]
for l in itertools.chain.from_iterable(
all_but_objc if x == 'all' else [x]
for x in args.language))
languages_http2_clients_for_http2_server_interop = set()
if args.http2_server_interop:
languages_http2_clients_for_http2_server_interop = set(
_LANGUAGES[l] for l in _LANGUAGES_WITH_HTTP2_CLIENTS_FOR_HTTP2_SERVER_TEST_CASES
if 'all' in args.language or l in args.language)
http2Interop = Http2Client() if args.http2_interop else None
http2InteropServer = Http2Server() if args.http2_server_interop else None
docker_images={}
if args.use_docker:
# languages for which to build docker images
languages_to_build = set(
_LANGUAGES[k] for k in set([str(l) for l in languages] + [s for s in servers]))
languages_to_build = languages_to_build | languages_http2_clients_for_http2_server_interop
if args.http2_interop:
languages_to_build.add(http2Interop)
if args.http2_server_interop:
languages_to_build.add(http2InteropServer)
build_jobs = []
for l in languages_to_build:
if str(l) == 'objc':
# we don't need to build a docker image for objc
continue
job = build_interop_image_jobspec(l)
docker_images[str(l)] = job.tag
build_jobs.append(job)
if build_jobs:
jobset.message('START', 'Building interop docker images.', do_newline=True)
if args.verbose:
print('Jobs to run: \n%s\n' % '\n'.join(str(j) for j in build_jobs))
num_failures, _ = jobset.run(
build_jobs, newline_on_success=True, maxjobs=args.jobs)
if num_failures == 0:
jobset.message('SUCCESS', 'All docker images built successfully.',
do_newline=True)
else:
jobset.message('FAILED', 'Failed to build interop docker images.',
do_newline=True)
for image in six.itervalues(docker_images):
dockerjob.remove_image(image, skip_nonexistent=True)
sys.exit(1)
server_manual_cmd_log = [] if args.manual_run else None
client_manual_cmd_log = [] if args.manual_run else None
# Start interop servers.
server_jobs = {}
server_addresses = {}
try:
for s in servers:
lang = str(s)
spec = server_jobspec(_LANGUAGES[lang], docker_images.get(lang),
args.insecure, manual_cmd_log=server_manual_cmd_log)
if not args.manual_run:
job = dockerjob.DockerJob(spec)
server_jobs[lang] = job
server_addresses[lang] = ('localhost', job.mapped_port(_DEFAULT_SERVER_PORT))
else:
# don't run the server, set server port to a placeholder value
server_addresses[lang] = ('localhost', '${SERVER_PORT}')
http2_server_job = None
if args.http2_server_interop:
# launch a HTTP2 server emulator that creates edge cases
lang = str(http2InteropServer)
spec = server_jobspec(http2InteropServer, docker_images.get(lang),
manual_cmd_log=server_manual_cmd_log)
if not args.manual_run:
http2_server_job = dockerjob.DockerJob(spec)
server_jobs[lang] = http2_server_job
else:
# don't run the server, set server port to a placeholder value
server_addresses[lang] = ('localhost', '${SERVER_PORT}')
jobs = []
if args.cloud_to_prod:
if args.insecure:
print('TLS is always enabled for cloud_to_prod scenarios.')
for server_host_name in args.prod_servers:
for language in languages:
for test_case in _TEST_CASES:
if not test_case in language.unimplemented_test_cases():
if not test_case in _SKIP_ADVANCED + _SKIP_COMPRESSION:
test_job = cloud_to_prod_jobspec(
language, test_case, server_host_name,
prod_servers[server_host_name],
docker_image=docker_images.get(str(language)),
manual_cmd_log=client_manual_cmd_log)
jobs.append(test_job)
if args.http2_interop:
for test_case in _HTTP2_TEST_CASES:
test_job = cloud_to_prod_jobspec(
http2Interop, test_case, server_host_name,
prod_servers[server_host_name],
docker_image=docker_images.get(str(http2Interop)),
manual_cmd_log=client_manual_cmd_log)
jobs.append(test_job)
if args.cloud_to_prod_auth:
if args.insecure:
print('TLS is always enabled for cloud_to_prod scenarios.')
for server_host_name in args.prod_servers:
for language in languages:
for test_case in _AUTH_TEST_CASES:
if not test_case in language.unimplemented_test_cases():
test_job = cloud_to_prod_jobspec(
language, test_case, server_host_name,
prod_servers[server_host_name],
docker_image=docker_images.get(str(language)), auth=True,
manual_cmd_log=client_manual_cmd_log)
jobs.append(test_job)
for server in args.override_server:
server_name = server[0]
(server_host, server_port) = server[1].split(':')
server_addresses[server_name] = (server_host, server_port)
for server_name, server_address in server_addresses.items():
(server_host, server_port) = server_address
server_language = _LANGUAGES.get(server_name, None)
skip_server = [] # test cases unimplemented by server
if server_language:
skip_server = server_language.unimplemented_test_cases_server()
for language in languages:
for test_case in _TEST_CASES:
if not test_case in language.unimplemented_test_cases():
if not test_case in skip_server:
test_job = cloud_to_cloud_jobspec(language,
test_case,
server_name,
server_host,
server_port,
docker_image=docker_images.get(str(language)),
insecure=args.insecure,
manual_cmd_log=client_manual_cmd_log)
jobs.append(test_job)
if args.http2_interop:
for test_case in _HTTP2_TEST_CASES:
if server_name == "go":
# TODO(carl-mastrangelo): Reenable after https://github.com/grpc/grpc-go/issues/434
continue
test_job = cloud_to_cloud_jobspec(http2Interop,
test_case,
server_name,
server_host,
server_port,
docker_image=docker_images.get(str(http2Interop)),
insecure=args.insecure,
manual_cmd_log=client_manual_cmd_log)
jobs.append(test_job)
if args.http2_server_interop:
if not args.manual_run:
http2_server_job.wait_for_healthy(timeout_seconds=600)
for language in languages_http2_clients_for_http2_server_interop:
for test_case in set(_HTTP2_SERVER_TEST_CASES) - set(_HTTP2_SERVER_TEST_CASES_THAT_USE_GRPC_CLIENTS):
offset = sorted(_HTTP2_SERVER_TEST_CASES).index(test_case)
server_port = _DEFAULT_SERVER_PORT+offset
if not args.manual_run:
server_port = http2_server_job.mapped_port(server_port)
test_job = cloud_to_cloud_jobspec(language,
test_case,
str(http2InteropServer),
'localhost',
server_port,
docker_image=docker_images.get(str(language)),
manual_cmd_log=client_manual_cmd_log)
jobs.append(test_job)
for language in languages:
# HTTP2_SERVER_TEST_CASES_THAT_USE_GRPC_CLIENTS is a subset of
# HTTP_SERVER_TEST_CASES, in which clients use their gRPC interop clients rather
# than specialized http2 clients, reusing existing test implementations.
# For example, in the "data_frame_padding" test, use language's gRPC
# interop clients and make them think that theyre running "large_unary"
# test case. This avoids implementing a new test case in each language.
for test_case in _HTTP2_SERVER_TEST_CASES_THAT_USE_GRPC_CLIENTS:
if test_case not in language.unimplemented_test_cases():
offset = sorted(_HTTP2_SERVER_TEST_CASES).index(test_case)
server_port = _DEFAULT_SERVER_PORT+offset
if not args.manual_run:
server_port = http2_server_job.mapped_port(server_port)
if not args.insecure:
print(('Creating grpc cient to http2 server test case with insecure connection, even though'
' args.insecure is False. Http2 test server only supports insecure connections.'))
test_job = cloud_to_cloud_jobspec(language,
test_case,
str(http2InteropServer),
'localhost',
server_port,
docker_image=docker_images.get(str(language)),
insecure=True,
manual_cmd_log=client_manual_cmd_log)
jobs.append(test_job)
if not jobs:
print('No jobs to run.')
for image in six.itervalues(docker_images):
dockerjob.remove_image(image, skip_nonexistent=True)
sys.exit(1)
if args.manual_run:
print('All tests will skipped --manual_run option is active.')
if args.verbose:
print('Jobs to run: \n%s\n' % '\n'.join(str(job) for job in jobs))
num_failures, resultset = jobset.run(jobs, newline_on_success=True,
maxjobs=args.jobs,
skip_jobs=args.manual_run)
if num_failures:
jobset.message('FAILED', 'Some tests failed', do_newline=True)
else:
jobset.message('SUCCESS', 'All tests passed', do_newline=True)
write_cmdlog_maybe(server_manual_cmd_log, 'interop_server_cmds.sh')
write_cmdlog_maybe(client_manual_cmd_log, 'interop_client_cmds.sh')
xml_report_name = _XML_REPORT
if args.internal_ci:
xml_report_name = _INTERNAL_CL_XML_REPORT
report_utils.render_junit_xml_report(resultset, xml_report_name)
for name, job in resultset.items():
if "http2" in name:
job[0].http2results = aggregate_http2_results(job[0].message)
http2_server_test_cases = (
_HTTP2_SERVER_TEST_CASES if args.http2_server_interop else [])
report_utils.render_interop_html_report(
set([str(l) for l in languages]), servers, _TEST_CASES, _AUTH_TEST_CASES,
_HTTP2_TEST_CASES, http2_server_test_cases, resultset, num_failures,
args.cloud_to_prod_auth or args.cloud_to_prod, args.prod_servers,
args.http2_interop)
except Exception as e:
print('exception occurred:')
traceback.print_exc(file=sys.stdout)
finally:
# Check if servers are still running.
for server, job in server_jobs.items():
if not job.is_running():
print('Server "%s" has exited prematurely.' % server)
dockerjob.finish_jobs([j for j in six.itervalues(server_jobs)])
for image in six.itervalues(docker_images):
if not args.manual_run:
print('Removing docker image %s' % image)
dockerjob.remove_image(image)
else:
print('Preserving docker image: %s' % image)
| {
"content_hash": "1bf3f11a1b2bb6638fdab13164747902",
"timestamp": "",
"source": "github",
"line_count": 1234,
"max_line_length": 177,
"avg_line_length": 34.79011345218801,
"alnum_prop": 0.6093265938366216,
"repo_name": "kskalski/grpc",
"id": "1e702a863651b82b738a0aeebc808e3f47f36ade",
"size": "43531",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/run_tests/run_interop_tests.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "14215"
},
{
"name": "C",
"bytes": "6593116"
},
{
"name": "C#",
"bytes": "1419650"
},
{
"name": "C++",
"bytes": "1994429"
},
{
"name": "CMake",
"bytes": "449240"
},
{
"name": "DTrace",
"bytes": "147"
},
{
"name": "JavaScript",
"bytes": "370036"
},
{
"name": "M4",
"bytes": "42266"
},
{
"name": "Makefile",
"bytes": "852253"
},
{
"name": "Objective-C",
"bytes": "269113"
},
{
"name": "PHP",
"bytes": "275617"
},
{
"name": "Protocol Buffer",
"bytes": "93110"
},
{
"name": "PureBasic",
"bytes": "147"
},
{
"name": "Python",
"bytes": "1263861"
},
{
"name": "Ruby",
"bytes": "625583"
},
{
"name": "Shell",
"bytes": "35036"
},
{
"name": "Swift",
"bytes": "3486"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
def get_jenv():
import frappe
if not getattr(frappe.local, 'jenv', None):
from jinja2 import Environment, DebugUndefined
# frappe will be loaded last, so app templates will get precedence
jenv = Environment(loader = get_jloader(),
undefined=DebugUndefined)
set_filters(jenv)
jenv.globals.update(get_allowed_functions_for_jenv())
frappe.local.jenv = jenv
return frappe.local.jenv
def get_template(path):
return get_jenv().get_template(path)
def get_email_from_template(name, args):
from jinja2 import TemplateNotFound
args = args or {}
try:
message = get_template('templates/emails/' + name + '.html').render(args)
except TemplateNotFound as e:
raise e
try:
text_content = get_template('templates/emails/' + name + '.txt').render(args)
except TemplateNotFound:
text_content = None
return (message, text_content)
def validate_template(html):
"""Throws exception if there is a syntax error in the Jinja Template"""
import frappe
from jinja2 import TemplateSyntaxError
jenv = get_jenv()
try:
jenv.from_string(html)
except TemplateSyntaxError as e:
frappe.msgprint('Line {}: {}'.format(e.lineno, e.message))
frappe.throw(frappe._("Syntax error in template"))
def render_template(template, context, is_path=None, safe_render=True):
'''Render a template using Jinja
:param template: path or HTML containing the jinja template
:param context: dict of properties to pass to the template
:param is_path: (optional) assert that the `template` parameter is a path
:param safe_render: (optional) prevent server side scripting via jinja templating
'''
from frappe import throw
if not template:
return ""
# if it ends with .html then its a freaking path, not html
if (is_path
or template.startswith("templates/")
or (template.endswith('.html') and '\n' not in template)):
return get_jenv().get_template(template).render(context)
else:
if safe_render and ".__" in template:
throw("Illegal template")
return get_jenv().from_string(template).render(context)
def get_allowed_functions_for_jenv():
import os, json
import frappe
import frappe.utils
import frappe.utils.data
from frappe.utils.autodoc import automodule, get_version
from frappe.model.document import get_controller
from frappe.website.utils import (get_shade, get_toc, get_next_link)
from frappe.modules import scrub
import mimetypes
from html2text import html2text
from frappe.www.printview import get_visible_columns
datautils = {}
if frappe.db:
date_format = frappe.db.get_default("date_format") or "yyyy-mm-dd"
else:
date_format = 'yyyy-mm-dd'
for key, obj in frappe.utils.data.__dict__.items():
if key.startswith("_"):
# ignore
continue
if hasattr(obj, "__call__"):
# only allow functions
datautils[key] = obj
if "_" in getattr(frappe.local, 'form_dict', {}):
del frappe.local.form_dict["_"]
user = getattr(frappe.local, "session", None) and frappe.local.session.user or "Guest"
out = {
# make available limited methods of frappe
"frappe": {
"_": frappe._,
"get_url": frappe.utils.get_url,
'format': frappe.format_value,
"format_value": frappe.format_value,
'date_format': date_format,
"format_date": frappe.utils.data.global_date_format,
"form_dict": getattr(frappe.local, 'form_dict', {}),
"local": frappe.local,
"get_hooks": frappe.get_hooks,
"get_meta": frappe.get_meta,
"get_doc": frappe.get_doc,
"get_list": frappe.get_list,
"get_all": frappe.get_all,
"utils": datautils,
"user": user,
"get_fullname": frappe.utils.get_fullname,
"get_gravatar": frappe.utils.get_gravatar_url,
"full_name": frappe.local.session.data.full_name if getattr(frappe.local, "session", None) else "Guest",
"render_template": frappe.render_template,
'session': {
'user': user,
'csrf_token': frappe.local.session.data.csrf_token if getattr(frappe.local, "session", None) else ''
},
},
'style': {
'border_color': '#d1d8dd'
},
"autodoc": {
"get_version": get_version,
"automodule": automodule,
"get_controller": get_controller
},
'get_toc': get_toc,
'get_next_link': get_next_link,
"_": frappe._,
"get_shade": get_shade,
"scrub": scrub,
"guess_mimetype": mimetypes.guess_type,
'html2text': html2text,
'json': json,
"dev_server": 1 if os.environ.get('DEV_SERVER', False) else 0
}
if not frappe.flags.in_setup_help:
out['get_visible_columns'] = get_visible_columns
out['frappe']['date_format'] = date_format
out['frappe']["db"] = {
"get_value": frappe.db.get_value,
"get_default": frappe.db.get_default,
"escape": frappe.db.escape,
}
# load jenv methods from hooks.py
for method_name, method_definition in get_jenv_customization("methods"):
out[method_name] = frappe.get_attr(method_definition)
return out
def get_jloader():
import frappe
if not getattr(frappe.local, 'jloader', None):
from jinja2 import ChoiceLoader, PackageLoader, PrefixLoader
if frappe.local.flags.in_setup_help:
apps = ['frappe']
else:
apps = frappe.get_hooks('template_apps')
if not apps:
apps = frappe.local.flags.web_pages_apps or frappe.get_installed_apps(sort=True)
apps.reverse()
if not "frappe" in apps:
apps.append('frappe')
frappe.local.jloader = ChoiceLoader(
# search for something like app/templates/...
[PrefixLoader(dict(
(app, PackageLoader(app, ".")) for app in apps
))]
# search for something like templates/...
+ [PackageLoader(app, ".") for app in apps]
)
return frappe.local.jloader
def set_filters(jenv):
import frappe
from frappe.utils import global_date_format, cint, cstr, flt, markdown
from frappe.website.utils import get_shade, abs_url
jenv.filters["global_date_format"] = global_date_format
jenv.filters["markdown"] = markdown
jenv.filters["json"] = frappe.as_json
jenv.filters["get_shade"] = get_shade
jenv.filters["len"] = len
jenv.filters["int"] = cint
jenv.filters["str"] = cstr
jenv.filters["flt"] = flt
jenv.filters["abs_url"] = abs_url
if frappe.flags.in_setup_help: return
# load jenv_filters from hooks.py
for filter_name, filter_function in get_jenv_customization("filters"):
jenv.filters[filter_name] = frappe.get_attr(filter_function)
def get_jenv_customization(customizable_type):
import frappe
if getattr(frappe.local, "site", None):
for app in frappe.get_installed_apps():
for jenv_customizable, jenv_customizable_definition in frappe.get_hooks(app_name=app).get("jenv", {}).items():
if customizable_type == jenv_customizable:
for data in jenv_customizable_definition:
split_data = data.split(":")
yield split_data[0], split_data[1]
| {
"content_hash": "b9d816b5e2f9cbc544bfa253e45c6f02",
"timestamp": "",
"source": "github",
"line_count": 227,
"max_line_length": 113,
"avg_line_length": 29.52863436123348,
"alnum_prop": 0.6981948381321796,
"repo_name": "paurosello/frappe",
"id": "dca6d8a066d4f1bc9e16a867ef540b188652d008",
"size": "6803",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "frappe/utils/jinja.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "422931"
},
{
"name": "HTML",
"bytes": "202357"
},
{
"name": "JavaScript",
"bytes": "1858011"
},
{
"name": "Makefile",
"bytes": "29"
},
{
"name": "Python",
"bytes": "2042290"
},
{
"name": "Shell",
"bytes": "517"
}
],
"symlink_target": ""
} |
"""This code example archives ad units.
The parent ad unit and all ad units underneath it will be archived. To create ad
units, run create_ad_units.py.
Tags: InventoryService.getAdUnitsByStatement
"""
__author__ = '[email protected] (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
from adspygoogle.dfp import DfpUtils
def main(client, parent_id):
# Initialize appropriate service.
inventory_service = client.GetService('InventoryService', version='v201306')
# Create a query to select ad units under the parent ad unit and the parent ad
# unit.
values = [{
'key': 'parentId',
'value': {
'xsi_type': 'NumberValue',
'value': parent_id
}
}]
query = 'WHERE parentId = :parentId or id = :parentId'
# Get ad units by statement.
ad_units = DfpUtils.GetAllEntitiesByStatementWithService(
inventory_service, query=query, bind_vars=values)
for ad_unit in ad_units:
print ('Ad unit with ID \'%s\' and name \'%s\' will be archived.'
% (ad_unit['id'], ad_unit['name']))
print 'Number of ad units to be archived: %s' % len(ad_units)
# Perform action.
result = inventory_service.PerformAdUnitAction(
{'type': 'ArchiveAdUnits'}, {'query': query, 'values': values})[0]
# Display results.
if result and int(result['numChanges']) > 0:
print 'Number of ad units archived: %s' % result['numChanges']
else:
print 'No ad units were archived.'
if __name__ == '__main__':
# Initialize client object.
dfp_client = DfpClient(path=os.path.join('..', '..', '..', '..', '..'))
# Get the Network Service.
network_service = dfp_client.GetService('NetworkService', version='v201306')
# Set the parent ad unit's ID for all children ad units to be fetched from.
root_id = network_service.GetCurrentNetwork()[0]['effectiveRootAdUnitId']
main(dfp_client, root_id)
| {
"content_hash": "6b01c20079792eacea01280e6c23851d",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 80,
"avg_line_length": 33.203125,
"alnum_prop": 0.6705882352941176,
"repo_name": "donspaulding/adspygoogle",
"id": "3261f760e176bba818bddb7cd414aa2ca4ac527f",
"size": "2743",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "examples/adspygoogle/dfp/v201306/inventory_service/archive_ad_units.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "3734067"
},
{
"name": "Shell",
"bytes": "603"
}
],
"symlink_target": ""
} |
from Tkinter import *
import time
class App:
def __init__(self, master):
frame = Frame(master)
frame.pack()
scale = Scale(frame, from_=0, to=180,
orient=HORIZONTAL, command=self.update)
scale.grid(row=0)
pwm = Pwm()
def update(self, angle):
duty = float(angle) / 10.0 + 2.5
root = Tk()
root.wm_title('Servo Control')
app = App(root)
root.geometry("200x50+0+0")
root.mainloop()
| {
"content_hash": "f66f21d108ab2d2f74da946d273b2bf0",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 53,
"avg_line_length": 20.545454545454547,
"alnum_prop": 0.5796460176991151,
"repo_name": "eugenekolo/kololib",
"id": "e3b1d5620389e10a81018ddae8f4235284b593b0",
"size": "476",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/tkinter.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "3786"
},
{
"name": "C++",
"bytes": "13888"
},
{
"name": "CSS",
"bytes": "110"
},
{
"name": "Go",
"bytes": "1378"
},
{
"name": "HTML",
"bytes": "739"
},
{
"name": "Java",
"bytes": "30385"
},
{
"name": "JavaScript",
"bytes": "2848"
},
{
"name": "Makefile",
"bytes": "6562"
},
{
"name": "Python",
"bytes": "17836"
},
{
"name": "Shell",
"bytes": "8809"
}
],
"symlink_target": ""
} |
"""Load data from the specified paths and format them for training."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import numpy as np
import tensorflow as tf
from data_augmentation import augment_data
LABEL_NAME = "gesture"
DATA_NAME = "accel_ms2_xyz"
class DataLoader(object):
"""Loads data and prepares for training."""
def __init__(self, train_data_path, valid_data_path, test_data_path,
seq_length):
self.dim = 3
self.seq_length = seq_length
self.label2id = {"wing": 0, "ring": 1, "slope": 2, "negative": 3}
self.train_data, self.train_label, self.train_len = self.get_data_file(
train_data_path, "train")
self.valid_data, self.valid_label, self.valid_len = self.get_data_file(
valid_data_path, "valid")
self.test_data, self.test_label, self.test_len = self.get_data_file(
test_data_path, "test")
def get_data_file(self, data_path, data_type): # pylint: disable=no-self-use
"""Get train, valid and test data from files."""
data = []
label = []
with open(data_path, "r") as f:
lines = f.readlines()
for idx, line in enumerate(lines): # pylint: disable=unused-variable
dic = json.loads(line)
data.append(dic[DATA_NAME])
label.append(dic[LABEL_NAME])
if data_type == "train":
data, label = augment_data(data, label)
length = len(label)
print(data_type + "_data_length:" + str(length))
return data, label, length
def pad(self, data, seq_length, dim): # pylint: disable=no-self-use
"""Get neighbour padding."""
noise_level = 20
padded_data = []
# Before- Neighbour padding
tmp_data = (np.random.rand(seq_length, dim) - 0.5) * noise_level + data[0]
tmp_data[(seq_length -
min(len(data), seq_length)):] = data[:min(len(data), seq_length)]
padded_data.append(tmp_data)
# After- Neighbour padding
tmp_data = (np.random.rand(seq_length, dim) - 0.5) * noise_level + data[-1]
tmp_data[:min(len(data), seq_length)] = data[:min(len(data), seq_length)]
padded_data.append(tmp_data)
return padded_data
def format_support_func(self, padded_num, length, data, label):
"""Support function for format.(Helps format train, valid and test.)"""
# Add 2 padding, initialize data and label
length *= padded_num
features = np.zeros((length, self.seq_length, self.dim))
labels = np.zeros(length)
# Get padding for train, valid and test
for idx, (data, label) in enumerate(zip(data, label)): # pylint: disable=redefined-argument-from-local
padded_data = self.pad(data, self.seq_length, self.dim)
for num in range(padded_num):
features[padded_num * idx + num] = padded_data[num]
labels[padded_num * idx + num] = self.label2id[label]
# Turn into tf.data.Dataset
dataset = tf.data.Dataset.from_tensor_slices(
(features, labels.astype("int32")))
return length, dataset
def format(self):
"""Format data(including padding, etc.) and get the dataset for the model."""
padded_num = 2
self.train_len, self.train_data = self.format_support_func(
padded_num, self.train_len, self.train_data, self.train_label)
self.valid_len, self.valid_data = self.format_support_func(
padded_num, self.valid_len, self.valid_data, self.valid_label)
self.test_len, self.test_data = self.format_support_func(
padded_num, self.test_len, self.test_data, self.test_label)
| {
"content_hash": "0a3efb19fc8d0581134371b582c23769",
"timestamp": "",
"source": "github",
"line_count": 89,
"max_line_length": 113,
"avg_line_length": 43.57303370786517,
"alnum_prop": 0.5941206807632801,
"repo_name": "galak/zephyr",
"id": "35ff4825a8beb6fe3d95dc53960bdadf3dfe2fca",
"size": "4628",
"binary": false,
"copies": "4",
"ref": "refs/heads/main",
"path": "samples/modules/tflite-micro/magic_wand/train/data_load.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "445128"
},
{
"name": "Batchfile",
"bytes": "110"
},
{
"name": "C",
"bytes": "45081460"
},
{
"name": "C++",
"bytes": "29379"
},
{
"name": "CMake",
"bytes": "1396197"
},
{
"name": "Cadence",
"bytes": "1501"
},
{
"name": "EmberScript",
"bytes": "997"
},
{
"name": "Forth",
"bytes": "1648"
},
{
"name": "GDB",
"bytes": "1285"
},
{
"name": "Haskell",
"bytes": "722"
},
{
"name": "JetBrains MPS",
"bytes": "3152"
},
{
"name": "PLSQL",
"bytes": "281"
},
{
"name": "Perl",
"bytes": "215338"
},
{
"name": "Python",
"bytes": "2267025"
},
{
"name": "Shell",
"bytes": "173704"
},
{
"name": "SmPL",
"bytes": "36840"
},
{
"name": "Smalltalk",
"bytes": "1885"
},
{
"name": "SourcePawn",
"bytes": "14890"
},
{
"name": "Tcl",
"bytes": "7034"
},
{
"name": "VBA",
"bytes": "294"
},
{
"name": "Verilog",
"bytes": "6394"
}
],
"symlink_target": ""
} |
from ..Constant import Constant
from ..Minutes import Minutes
from ..Irradiance import Irradiance
from .BaseComponent import BaseComponent
class SolarIrradianceComponent(BaseComponent):
''' handle GM1 solar irradiance '''
# TODO: add support for codes from 10-97
DATA_FLAGS = {"00": "Untested",
"01": "Passed one-component test; data fall within max-min limit of Kt, Kn, or Kd",
"02": "Passed two-component test; data fall within 0.03 of the Gompertz boundaries",
"03": "Passed three-component test; data come within + 0.03 of satisfying Kt = Kn + Kd",
"04": "Passed visual inspection: not used by SERI_QC1",
"05": "Failed visual inspection: not used by SERI_QC1",
"06": "Value estimated; passes all pertinent SERI_QC tests",
"07": "Failed one-component test; lower than allowed minimum",
"08": "Failed one-component test; higher than allowed maximum",
"09": "Passed three-component test but failed two-component test by 0.05",
"98": "Not Used"}
def loads(self, string):
self.solar_irradiance = {'time_period': Minutes(string[0:4]),
'global_irradiance': Irradiance(string[4:8]),
'irradiance_data_flag': Constant(string[8:10], None,
string[10:11], self.DATA_FLAGS),
'direct_beam_irradiance': Irradiance(string[11:15]),
'direct_beam_irradiance_data_flag': Constant(string[15:17], None,
string[17:18], self.DATA_FLAGS),
'diffuse_irradiance': Irradiance(string[18:22]),
'diffuse_irradiance_data_flag': Constant(string[22:24], None,
string[24:25], self.DATA_FLAGS),
'uvb_global_irradiance': Irradiance(string[26:30])}
def __repr__(self):
return str(self.solar_irradiance)
def __str__(self):
return str(self.solar_irradiance)
| {
"content_hash": "9f3a6ea58bc22db3850444b714137fb5",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 104,
"avg_line_length": 56.23076923076923,
"alnum_prop": 0.5444596443228454,
"repo_name": "haydenth/ish_parser",
"id": "81503feefc98f40303685d9428ea5209b24ee4af",
"size": "2193",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ish_parser/Components/SolarIrradianceComponent.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "69295"
}
],
"symlink_target": ""
} |
import os
import json
import requests
import pkg_resources
from pkg_resources import DistributionNotFound
from types import ModuleType
from .constants import HTTP_STATUS_CODE, ERROR_CODE, URL
from . import resources, utility
from .errors import (BadRequestError, GatewayError,
ServerError)
def capitalize_camel_case(string):
return "".join(map(str.capitalize, string.split('_')))
# Create a dict of resource classes
RESOURCE_CLASSES = {}
for name, module in resources.__dict__.items():
if isinstance(module, ModuleType) and \
capitalize_camel_case(name) in module.__dict__:
RESOURCE_CLASSES[name] = module.__dict__[capitalize_camel_case(name)]
UTILITY_CLASSES = {}
for name, module in utility.__dict__.items():
if isinstance(module, ModuleType) and name.capitalize() in module.__dict__:
UTILITY_CLASSES[name] = module.__dict__[name.capitalize()]
class Client:
"""Razorpay client class"""
DEFAULTS = {
'base_url': URL.BASE_URL
}
def __init__(self, session=None, auth=None, **options):
"""
Initialize a Client object with session,
optional auth handler, and options
"""
self.session = session or requests.Session()
self.auth = auth
file_dir = os.path.dirname(__file__)
self.cert_path = file_dir + '/ca-bundle.crt'
self.base_url = self._set_base_url(**options)
self.app_details = []
# intializes each resource
# injecting this client object into the constructor
for name, Klass in RESOURCE_CLASSES.items():
setattr(self, name, Klass(self))
for name, Klass in UTILITY_CLASSES.items():
setattr(self, name, Klass(self))
def _set_base_url(self, **options):
base_url = self.DEFAULTS['base_url']
if 'base_url' in options:
base_url = options['base_url']
del(options['base_url'])
return base_url
def _update_user_agent_header(self, options):
user_agent = "{}{} {}".format('Razorpay-Python/', self._get_version(),
self._get_app_details_ua())
if 'headers' in options:
options['headers']['User-Agent'] = user_agent
else:
options['headers'] = {'User-Agent': user_agent}
return options
def _get_version(self):
version = ""
try: # nosemgrep : gitlab.bandit.B110
version = pkg_resources.require("razorpay")[0].version
except DistributionNotFound: # pragma: no cover
pass
return version
def _get_app_details_ua(self):
app_details_ua = ""
app_details = self.get_app_details()
for app in app_details:
if 'title' in app:
app_ua = app['title']
if 'version' in app:
app_ua += "/{}".format(app['version'])
app_details_ua += "{} ".format(app_ua)
return app_details_ua
def set_app_details(self, app_details):
self.app_details.append(app_details)
def get_app_details(self):
return self.app_details
def request(self, method, path, **options):
"""
Dispatches a request to the Razorpay HTTP API
"""
options = self._update_user_agent_header(options)
url = "{}{}".format(self.base_url, path)
response = getattr(self.session, method)(url, auth=self.auth,
verify=self.cert_path,
**options)
if ((response.status_code >= HTTP_STATUS_CODE.OK) and
(response.status_code < HTTP_STATUS_CODE.REDIRECT)):
return json.dumps({}) if(response.status_code==204) else response.json()
else:
msg = ""
code = ""
json_response = response.json()
if 'error' in json_response:
if 'description' in json_response['error']:
msg = json_response['error']['description']
if 'code' in json_response['error']:
code = str(json_response['error']['code'])
if str.upper(code) == ERROR_CODE.BAD_REQUEST_ERROR:
raise BadRequestError(msg)
elif str.upper(code) == ERROR_CODE.GATEWAY_ERROR:
raise GatewayError(msg)
elif str.upper(code) == ERROR_CODE.SERVER_ERROR: # nosemgrep : python.lang.maintainability.useless-ifelse.useless-if-body
raise ServerError(msg)
else:
raise ServerError(msg)
def get(self, path, params, **options):
"""
Parses GET request options and dispatches a request
"""
return self.request('get', path, params=params, **options)
def post(self, path, data, **options):
"""
Parses POST request options and dispatches a request
"""
data, options = self._update_request(data, options)
return self.request('post', path, data=data, **options)
def patch(self, path, data, **options):
"""
Parses PATCH request options and dispatches a request
"""
data, options = self._update_request(data, options)
return self.request('patch', path, data=data, **options)
def delete(self, path, data, **options):
"""
Parses DELETE request options and dispatches a request
"""
data, options = self._update_request(data, options)
return self.request('delete', path, data=data, **options)
def put(self, path, data, **options):
"""
Parses PUT request options and dispatches a request
"""
data, options = self._update_request(data, options)
return self.request('put', path, data=data, **options)
def _update_request(self, data, options):
"""
Updates The resource data and header options
"""
data = json.dumps(data)
if 'headers' not in options:
options['headers'] = {}
options['headers'].update({'Content-type': 'application/json'})
return data, options
| {
"content_hash": "0cc13b989dbd81e72258fad057b0d598",
"timestamp": "",
"source": "github",
"line_count": 190,
"max_line_length": 133,
"avg_line_length": 32.61052631578947,
"alnum_prop": 0.5714977404777276,
"repo_name": "razorpay/razorpay-python",
"id": "f835e1397e1e05c774ba9b83eeb6beee9a7a62e0",
"size": "6196",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "razorpay/client.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "122124"
}
],
"symlink_target": ""
} |
"""
===============================================================================
Selecting the number of clusters with silhouette analysis on KMeans clustering
===============================================================================
Silhouette analysis can be used to study the separation distance between the
resulting clusters. The silhouette plot displays a measure of how close each
point in one cluster is to points in the neighboring clusters and thus provides
a way to assess parameters like number of clusters visually. This measure has a
range of [-1, 1].
Silhouette coefficients (as these values are referred to as) near +1 indicate
that the sample is far away from the neighboring clusters. A value of 0
indicates that the sample is on or very close to the decision boundary between
two neighboring clusters and negative values indicate that those samples might
have been assigned to the wrong cluster.
In this example the silhouette analysis is used to choose an optimal value for
``n_clusters``. The silhouette plot shows that the ``n_clusters`` value of 3, 5
and 6 are a bad pick for the given data due to the presence of clusters with
below average silhouette scores and also due to wide fluctuations in the size
of the silhouette plots. Silhouette analysis is more ambivalent in deciding
between 2 and 4.
Also from the thickness of the silhouette plot the cluster size can be
visualized. The silhouette plot for cluster 0 when ``n_clusters`` is equal to
2, is bigger in size owing to the grouping of the 3 sub clusters into one big
cluster. However when the ``n_clusters`` is equal to 4, all the plots are more
or less of similar thickness and hence are of similar sizes as can be also
verified from the labelled scatter plot on the right.
"""
from __future__ import print_function
from sklearn.datasets import make_blobs
from sklearn.cluster import KMeans
from sklearn.metrics import silhouette_samples, silhouette_score
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import numpy as np
print(__doc__)
# Generating the sample data from make_blobs
# This particular setting has one distinct cluster and 3 clusters placed close
# together.
X, y = make_blobs(n_samples=500,
n_features=2,
centers=4,
cluster_std=1,
center_box=(-10.0, 10.0),
shuffle=True,
random_state=1) # For reproducibility
range_n_clusters = [2, 3, 4, 5, 6]
for n_clusters in range_n_clusters:
# Create a subplot with 1 row and 2 columns
fig, (ax1, ax2) = plt.subplots(1, 2)
fig.set_size_inches(18, 7)
# The 1st subplot is the silhouette plot
# The silhouette coefficient can range from -1, 1 but in this example all
# lie within [-0.1, 1]
ax1.set_xlim([-0.1, 1])
# The (n_clusters+1)*10 is for inserting blank space between silhouette
# plots of individual clusters, to demarcate them clearly.
ax1.set_ylim([0, len(X) + (n_clusters + 1) * 10])
# Initialize the clusterer with n_clusters value and a random generator
# seed of 10 for reproducibility.
clusterer = KMeans(n_clusters=n_clusters, random_state=10)
cluster_labels = clusterer.fit_predict(X)
# The silhouette_score gives the average value for all the samples.
# This gives a perspective into the density and separation of the formed
# clusters
silhouette_avg = silhouette_score(X, cluster_labels)
print("For n_clusters =", n_clusters,
"The average silhouette_score is :", silhouette_avg)
# Compute the silhouette scores for each sample
sample_silhouette_values = silhouette_samples(X, cluster_labels)
y_lower = 10
for i in range(n_clusters):
# Aggregate the silhouette scores for samples belonging to
# cluster i, and sort them
ith_cluster_silhouette_values = \
sample_silhouette_values[cluster_labels == i]
ith_cluster_silhouette_values.sort()
size_cluster_i = ith_cluster_silhouette_values.shape[0]
y_upper = y_lower + size_cluster_i
color = cm.spectral(float(i) / n_clusters)
ax1.fill_betweenx(np.arange(y_lower, y_upper),
0, ith_cluster_silhouette_values,
facecolor=color, edgecolor=color, alpha=0.7)
# Label the silhouette plots with their cluster numbers at the middle
ax1.text(-0.05, y_lower + 0.5 * size_cluster_i, str(i))
# Compute the new y_lower for next plot
y_lower = y_upper + 10 # 10 for the 0 samples
ax1.set_title("The silhouette plot for the various clusters.")
ax1.set_xlabel("The silhouette coefficient values")
ax1.set_ylabel("Cluster label")
# The vertical line for average silhouette score of all the values
ax1.axvline(x=silhouette_avg, color="red", linestyle="--")
ax1.set_yticks([]) # Clear the yaxis labels / ticks
ax1.set_xticks([-0.1, 0, 0.2, 0.4, 0.6, 0.8, 1])
# 2nd Plot showing the actual clusters formed
colors = cm.spectral(cluster_labels.astype(float) / n_clusters)
ax2.scatter(X[:, 0], X[:, 1], marker='.', s=30, lw=0, alpha=0.7,
c=colors, edgecolor='k')
# Labeling the clusters
centers = clusterer.cluster_centers_
# Draw white circles at cluster centers
ax2.scatter(centers[:, 0], centers[:, 1], marker='o',
c="white", alpha=1, s=200, edgecolor='k')
for i, c in enumerate(centers):
ax2.scatter(c[0], c[1], marker='$%d$' % i, alpha=1,
s=50, edgecolor='k')
ax2.set_title("The visualization of the clustered data.")
ax2.set_xlabel("Feature space for the 1st feature")
ax2.set_ylabel("Feature space for the 2nd feature")
plt.suptitle(("Silhouette analysis for KMeans clustering on sample data "
"with n_clusters = %d" % n_clusters),
fontsize=14, fontweight='bold')
plt.show()
| {
"content_hash": "60e6569bd732f9d6d858dd43e2fadbb3",
"timestamp": "",
"source": "github",
"line_count": 142,
"max_line_length": 79,
"avg_line_length": 41.92253521126761,
"alnum_prop": 0.6643709054258358,
"repo_name": "raghavrv/scikit-learn",
"id": "ac36bc1fe72e5f89af0084efc178733b6b50773d",
"size": "5953",
"binary": false,
"copies": "25",
"ref": "refs/heads/master",
"path": "examples/cluster/plot_kmeans_silhouette_analysis.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "C",
"bytes": "451996"
},
{
"name": "C++",
"bytes": "140322"
},
{
"name": "Makefile",
"bytes": "1512"
},
{
"name": "PowerShell",
"bytes": "17042"
},
{
"name": "Python",
"bytes": "7103595"
},
{
"name": "Shell",
"bytes": "19783"
}
],
"symlink_target": ""
} |
"""Write a sample config for a given directory containing fastq files
following SRA naming conventions
"""
#--- standard library imports
#
import os
import sys
import logging
import glob
import subprocess
import argparse
import re
#--- third-party imports
#
import yaml
# --- project specific imports
#
# add lib dir for this pipeline installation to PYTHONPATH
LIB_PATH = os.path.abspath(os.path.join(os.path.dirname(
os.path.realpath(__file__)), "..", "lib"))
if LIB_PATH not in sys.path:
sys.path.insert(0, LIB_PATH)
from pipelines import get_init_call
__author__ = "Andreas WILM"
__email__ = "[email protected]"
__copyright__ = "2017 Genome Institute of Singapore"
__license__ = "The MIT License (MIT)"
# global logger
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(
'[{asctime}] {levelname:8s} {filename} {message}', style='{'))
logger.addHandler(handler)
def check_expected_files(snakefile):
"""check expected files in snakefile dir"""
is_ok = True
pipeline_basedir = os.path.dirname(snakefile)
pipeline_name = snakefile.split("/")[-2]
expected_files = ['example-dag.pdf', 'README.md', 'Snakefile', 'tests.sh', 'cfg/modules.yaml', 'cfg/references.yaml']
expected_files.extend(['cfg/cluster.{}.yaml'.format(site)
for site in ["GIS", "NSCC", "AWS"]])
expected_files.append(pipeline_name + ".py")
for f in expected_files:
f = os.path.join(pipeline_basedir, f)
if not os.path.exists(f):
print("WARN: Missing file {}".format(f))
is_ok = False
return is_ok
def get_includes_from_snakefile(snakefile):
includes = []
with open(snakefile) as fh:
for line in fh:
if line.startswith("include: "):
f = line.split()[1].replace(":", "").replace('"', "").replace("'", "")
includes.append(os.path.relpath(
os.path.join(os.path.dirname(snakefile), f)))
return includes
def check_benchmark_naming(snakefile):
"""check benchmark file naming"""
is_ok = True
seen_rules = dict()
rules_with_benchmark = dict()
exclude_rules = ['report', 'final', 'prep_bed_files']
with open(snakefile) as fh:
rule = None
for line in fh:
if line.startswith("rule "):
rulename = line.split()[1].replace(":", "")
if rulename in exclude_rules:
continue
seen_rules[rulename] = 1
if line.rstrip().endswith("benchmark:") and '#' not in line:
line = next(fh)
while len(line.strip())==0 or line.strip().startswith("#"):
line = next(fh)
benchmarkout = line.strip()
benchmarkout = benchmarkout.replace("'", "").replace('"', "").replace(")", "")
rules_with_benchmark[rulename] = 1
expsuf = r'[^\w]{}.benchmark.log$'.format(rulename)
if not re.compile(expsuf).search(benchmarkout):
print("WARN: Mismatch in {} for rule {}: expected '{}' to end with '{}'".format(
snakefile, rulename, benchmarkout, expsuf))
is_ok = False
rules_without_benchmark = set(seen_rules.keys()) - set(rules_with_benchmark.keys()) - set(exclude_rules)
if len(rules_without_benchmark) > 0:
is_ok = False
print("WARN: Rules without benchmark in {}: {}".format(
snakefile, ', '.join(rules_without_benchmark)))
return is_ok
def check_modules(pipeline_dir):
"""FIXME"""
is_ok = True
module_cfgs = glob.glob(os.path.join(pipeline_dir, "cfg/modules.yaml"))
assert len(module_cfgs) > 0
modules = dict()
for cfg in module_cfgs:
with open(cfg) as fh:
d = yaml.safe_load(fh)
for p, v in d.items():
modules[p] = v
for p, v in modules.items():
m = "{}/{}".format(p, v)
cmd = ' '.join(get_init_call())
cmd += "; module load {}".format(m)
try:
_ = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
sys.stderr.write("FAILED: {}\n".format(cmd))
is_ok = False
#else:
# print("OK: {}".format(cmd))
return is_ok
def main(pipelinedirs,
no_modules_check=False, no_benchmark_check=False):
"""main function"""
logger.warning("include other existing tools here: check_cluster_conf.py...")
snakefiles = [os.path.join(d, "Snakefile") for d in pipelinedirs]
if not no_modules_check:
for d in pipelinedirs:
if not check_modules(d):
print("FAILED: Modules check for {}".format(d))
else:
print("OK: Modules check for {}".format(d))
includes = []
for f in snakefiles:
assert os.path.exists(f)
if not check_expected_files(f):
print("FAILED: Expected files for {}".format(f))
else:
print("OK: Expected files for {}".format(f))
includes.extend(get_includes_from_snakefile(f))
for f in list(set(includes)) + snakefiles:
if not no_benchmark_check:
if not check_benchmark_naming(f):
print("FAILED: Benchmarking naming for {}".format(f))
else:
print("OK: Benchmarking naming for {}".format(f))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-M', "--no-modules-check", action="store_true",
help="Skip modules check")
parser.add_argument('-B', "--no-benchmark-check", action="store_true",
help="Skip benchmark rule checks")
parser.add_argument('pipelinedir', nargs='*')
args = parser.parse_args()
main(args.pipelinedir,
no_modules_check=args.no_modules_check,
no_benchmark_check=args.no_benchmark_check)
| {
"content_hash": "9133632e37ef573b7c4cda67a19efea5",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 121,
"avg_line_length": 34.06111111111111,
"alnum_prop": 0.5728266188223781,
"repo_name": "gis-rpd/pipelines",
"id": "1bc419cc4887df29762d9dc6450b0962436a044e",
"size": "6154",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/pipelint.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "11229"
},
{
"name": "Python",
"bytes": "665053"
},
{
"name": "Shell",
"bytes": "106434"
}
],
"symlink_target": ""
} |
click(Pattern("Ilama.png").targetOffset(23,0))
exit(0) | {
"content_hash": "2ad10ae0ec705a22d59eb8e7c1471d05",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 46,
"avg_line_length": 27,
"alnum_prop": 0.7407407407407407,
"repo_name": "silverbulleters/vanessa-behavoir",
"id": "b70e6ec6b30254ec1870afd3f4c71846276fe7ae",
"size": "54",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "tools/Sikuli/OpenDialogClickFolderSelect.sikuli/OpenDialogClickFolder.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Cucumber",
"bytes": "161783"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('release', '0002_auto_20180815_2248'),
('build', '0014_auto_20180601_2003'),
]
operations = [
migrations.AddField(
model_name='build',
name='release',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='release.Release'),
),
migrations.AddField(
model_name='build',
name='release_relationship_type',
field=models.CharField(blank=True, choices=[('test', 'Release Test'), ('automation', 'Release Automation'), ('manual', 'Manual Release Activity')], max_length=50, null=True),
),
]
| {
"content_hash": "c7977882d9ec9a8a9f357304cd4012ba",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 186,
"avg_line_length": 33.48,
"alnum_prop": 0.6176821983273596,
"repo_name": "SalesforceFoundation/mrbelvedereci",
"id": "3f0377ee604aced1d962d32dd4df47b837bef656",
"size": "911",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "metaci/build/migrations/0015_auto_20180815_2300.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "2069"
},
{
"name": "HTML",
"bytes": "123214"
},
{
"name": "JavaScript",
"bytes": "3993"
},
{
"name": "Python",
"bytes": "245560"
},
{
"name": "Shell",
"bytes": "4590"
}
],
"symlink_target": ""
} |
from django.test import TestCase
from rest_framework.reverse import reverse
class CiviliansTestCase(TestCase):
fixtures = [
'civilians.json',
'series.json',
]
def test_civilian_list(self):
resp = self.client.get(reverse('api:v1:civilian-list'))
self.assertEqual(resp.status_code, 200)
self.assertIsInstance(resp.data, list)
def test_civilian_detail(self):
resp = self.client.get(reverse('api:v1:civilian-detail', kwargs=dict(pk=4)))
self.assertEqual(resp.status_code, 200)
self.assertIsInstance(resp.data, dict)
self.assertIn('id', resp.data)
self.assertEqual(resp.data['id'], 4)
self.assertIn('name', resp.data)
self.assertEqual(resp.data['name'], 'Farkas Bulkmeier')
self.assertIn('description', resp.data)
self.assertRegexpMatches(resp.data['description'], '^Farkas "Bulk" Bulkmeier is a fictional character')
def test_civilian_not_found(self):
resp = self.client.get(reverse('api:v1:civilian-detail', kwargs=dict(pk=9999)))
self.assertEqual(resp.status_code, 404)
| {
"content_hash": "8f7e38c1e9c89924a95b789135bc7cad",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 111,
"avg_line_length": 32.2,
"alnum_prop": 0.6601597160603372,
"repo_name": "reiniervdwindt/power-rangers-api",
"id": "d2272e48cfcbb9120bda68064be08161f90aa598",
"size": "1127",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/api/v1/civilians/tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "617"
},
{
"name": "Python",
"bytes": "70343"
}
],
"symlink_target": ""
} |
import numpy as np
import tensorflow as tf
def xavier_init(fan_in, fan_out, *, const=1.0, dtype=tf.dtypes.float32):
k = const * np.sqrt(6.0 / (fan_in + fan_out))
return tf.random.uniform((fan_in, fan_out), minval=-k, maxval=k, dtype=dtype)
def sample_bernoulli(ps):
return tf.nn.relu(tf.sign(ps - tf.random.uniform(tf.shape(ps))))
def sample_gaussian(x, sigma):
return x + tf.random.uniform(tf.shape(x), mean=0.0, stddev=sigma, dtype=tf.float32)
| {
"content_hash": "24ca19bfe76915e10c62a251d0d4ade0",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 87,
"avg_line_length": 31.2,
"alnum_prop": 0.6773504273504274,
"repo_name": "meownoid/tensorfow-rbm",
"id": "57392a9140466c0030da2228ed615075d8539ef3",
"size": "468",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tfrbm/util.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11516"
}
],
"symlink_target": ""
} |
import base64
import os
import re
import random
import shutil
import socket
import string
import json
import ipaddress
import charms.leadership
from shutil import move
from shlex import split
from subprocess import check_call
from subprocess import check_output
from subprocess import CalledProcessError
from charms import layer
from charms.layer import snap
from charms.reactive import hook
from charms.reactive import remove_state
from charms.reactive import set_state
from charms.reactive import is_state
from charms.reactive import when, when_any, when_not
from charms.reactive.helpers import data_changed, any_file_changed
from charms.kubernetes.common import get_version
from charms.kubernetes.common import retry
from charms.layer import tls_client
from charmhelpers.core import hookenv
from charmhelpers.core import host
from charmhelpers.core import unitdata
from charmhelpers.core.host import service_stop
from charmhelpers.core.templating import render
from charmhelpers.fetch import apt_install
from charmhelpers.contrib.charmsupport import nrpe
# Override the default nagios shortname regex to allow periods, which we
# need because our bin names contain them (e.g. 'snap.foo.daemon'). The
# default regex in charmhelpers doesn't allow periods, but nagios itself does.
nrpe.Check.shortname_re = '[\.A-Za-z0-9-_]+$'
os.environ['PATH'] += os.pathsep + os.path.join(os.sep, 'snap', 'bin')
def set_upgrade_needed(forced=False):
set_state('kubernetes-master.upgrade-needed')
config = hookenv.config()
previous_channel = config.previous('channel')
require_manual = config.get('require-manual-upgrade')
hookenv.log('set upgrade needed')
if previous_channel is None or not require_manual or forced:
hookenv.log('forcing upgrade')
set_state('kubernetes-master.upgrade-specified')
@when('config.changed.channel')
def channel_changed():
set_upgrade_needed()
def service_cidr():
''' Return the charm's service-cidr config '''
db = unitdata.kv()
frozen_cidr = db.get('kubernetes-master.service-cidr')
return frozen_cidr or hookenv.config('service-cidr')
def freeze_service_cidr():
''' Freeze the service CIDR. Once the apiserver has started, we can no
longer safely change this value. '''
db = unitdata.kv()
db.set('kubernetes-master.service-cidr', service_cidr())
@hook('upgrade-charm')
def check_for_upgrade_needed():
'''An upgrade charm event was triggered by Juju, react to that here.'''
hookenv.status_set('maintenance', 'Checking resources')
migrate_from_pre_snaps()
add_rbac_roles()
set_state('reconfigure.authentication.setup')
remove_state('authentication.setup')
changed = snap_resources_changed()
if changed == 'yes':
set_upgrade_needed()
elif changed == 'unknown':
# We are here on an upgrade from non-rolling master
# Since this upgrade might also include resource updates eg
# juju upgrade-charm kubernetes-master --resource kube-any=my.snap
# we take no risk and forcibly upgrade the snaps.
# Forcibly means we do not prompt the user to call the upgrade action.
set_upgrade_needed(forced=True)
def snap_resources_changed():
'''
Check if the snapped resources have changed. The first time this method is
called will report "unknown".
Returns: "yes" in case a snap resource file has changed,
"no" in case a snap resources are the same as last call,
"unknown" if it is the first time this method is called
'''
db = unitdata.kv()
resources = ['kubectl', 'kube-apiserver', 'kube-controller-manager',
'kube-scheduler', 'cdk-addons']
paths = [hookenv.resource_get(resource) for resource in resources]
if db.get('snap.resources.fingerprint.initialised'):
result = 'yes' if any_file_changed(paths) else 'no'
return result
else:
db.set('snap.resources.fingerprint.initialised', True)
any_file_changed(paths)
return 'unknown'
def add_rbac_roles():
'''Update the known_tokens file with proper groups.'''
tokens_fname = '/root/cdk/known_tokens.csv'
tokens_backup_fname = '/root/cdk/known_tokens.csv.backup'
move(tokens_fname, tokens_backup_fname)
with open(tokens_fname, 'w') as ftokens:
with open(tokens_backup_fname, 'r') as stream:
for line in stream:
record = line.strip().split(',')
# token, username, user, groups
if record[2] == 'admin' and len(record) == 3:
towrite = '{0},{1},{2},"{3}"\n'.format(record[0],
record[1],
record[2],
'system:masters')
ftokens.write(towrite)
continue
if record[2] == 'kube_proxy':
towrite = '{0},{1},{2}\n'.format(record[0],
'system:kube-proxy',
'kube-proxy')
ftokens.write(towrite)
continue
if record[2] == 'kubelet' and record[1] == 'kubelet':
continue
ftokens.write('{}'.format(line))
def rename_file_idempotent(source, destination):
if os.path.isfile(source):
os.rename(source, destination)
def migrate_from_pre_snaps():
# remove old states
remove_state('kubernetes.components.installed')
remove_state('kubernetes.dashboard.available')
remove_state('kube-dns.available')
remove_state('kubernetes-master.app_version.set')
# disable old services
services = ['kube-apiserver',
'kube-controller-manager',
'kube-scheduler']
for service in services:
hookenv.log('Stopping {0} service.'.format(service))
host.service_stop(service)
# rename auth files
os.makedirs('/root/cdk', exist_ok=True)
rename_file_idempotent('/etc/kubernetes/serviceaccount.key',
'/root/cdk/serviceaccount.key')
rename_file_idempotent('/srv/kubernetes/basic_auth.csv',
'/root/cdk/basic_auth.csv')
rename_file_idempotent('/srv/kubernetes/known_tokens.csv',
'/root/cdk/known_tokens.csv')
# cleanup old files
files = [
"/lib/systemd/system/kube-apiserver.service",
"/lib/systemd/system/kube-controller-manager.service",
"/lib/systemd/system/kube-scheduler.service",
"/etc/default/kube-defaults",
"/etc/default/kube-apiserver.defaults",
"/etc/default/kube-controller-manager.defaults",
"/etc/default/kube-scheduler.defaults",
"/srv/kubernetes",
"/home/ubuntu/kubectl",
"/usr/local/bin/kubectl",
"/usr/local/bin/kube-apiserver",
"/usr/local/bin/kube-controller-manager",
"/usr/local/bin/kube-scheduler",
"/etc/kubernetes"
]
for file in files:
if os.path.isdir(file):
hookenv.log("Removing directory: " + file)
shutil.rmtree(file)
elif os.path.isfile(file):
hookenv.log("Removing file: " + file)
os.remove(file)
@when('kubernetes-master.upgrade-needed')
@when_not('kubernetes-master.upgrade-specified')
def upgrade_needed_status():
msg = 'Needs manual upgrade, run the upgrade action'
hookenv.status_set('blocked', msg)
@when('kubernetes-master.upgrade-specified')
def do_upgrade():
install_snaps()
remove_state('kubernetes-master.upgrade-needed')
remove_state('kubernetes-master.upgrade-specified')
def install_snaps():
channel = hookenv.config('channel')
hookenv.status_set('maintenance', 'Installing kubectl snap')
snap.install('kubectl', channel=channel, classic=True)
hookenv.status_set('maintenance', 'Installing kube-apiserver snap')
snap.install('kube-apiserver', channel=channel)
hookenv.status_set('maintenance',
'Installing kube-controller-manager snap')
snap.install('kube-controller-manager', channel=channel)
hookenv.status_set('maintenance', 'Installing kube-scheduler snap')
snap.install('kube-scheduler', channel=channel)
hookenv.status_set('maintenance', 'Installing cdk-addons snap')
snap.install('cdk-addons', channel=channel)
snap_resources_changed()
set_state('kubernetes-master.snaps.installed')
remove_state('kubernetes-master.components.started')
@when('config.changed.client_password', 'leadership.is_leader')
def password_changed():
"""Handle password change via the charms config."""
password = hookenv.config('client_password')
if password == "" and is_state('client.password.initialised'):
# password_changed is called during an upgrade. Nothing to do.
return
elif password == "":
# Password not initialised
password = token_generator()
setup_basic_auth(password, "admin", "admin")
set_state('reconfigure.authentication.setup')
remove_state('authentication.setup')
set_state('client.password.initialised')
@when('cni.connected')
@when_not('cni.configured')
def configure_cni(cni):
''' Set master configuration on the CNI relation. This lets the CNI
subordinate know that we're the master so it can respond accordingly. '''
cni.set_config(is_master=True, kubeconfig_path='')
@when('leadership.is_leader')
@when_not('authentication.setup')
def setup_leader_authentication():
'''Setup basic authentication and token access for the cluster.'''
service_key = '/root/cdk/serviceaccount.key'
basic_auth = '/root/cdk/basic_auth.csv'
known_tokens = '/root/cdk/known_tokens.csv'
hookenv.status_set('maintenance', 'Rendering authentication templates.')
keys = [service_key, basic_auth, known_tokens]
# Try first to fetch data from an old leadership broadcast.
if not get_keys_from_leader(keys) \
or is_state('reconfigure.authentication.setup'):
last_pass = get_password('basic_auth.csv', 'admin')
setup_basic_auth(last_pass, 'admin', 'admin', 'system:masters')
if not os.path.isfile(known_tokens):
touch(known_tokens)
# Generate the default service account token key
os.makedirs('/root/cdk', exist_ok=True)
if not os.path.isfile(service_key):
cmd = ['openssl', 'genrsa', '-out', service_key,
'2048']
check_call(cmd)
remove_state('reconfigure.authentication.setup')
# read service account key for syndication
leader_data = {}
for f in [known_tokens, basic_auth, service_key]:
with open(f, 'r') as fp:
leader_data[f] = fp.read()
# this is slightly opaque, but we are sending file contents under its file
# path as a key.
# eg:
# {'/root/cdk/serviceaccount.key': 'RSA:2471731...'}
charms.leadership.leader_set(leader_data)
remove_state('kubernetes-master.components.started')
set_state('authentication.setup')
@when_not('leadership.is_leader')
def setup_non_leader_authentication():
service_key = '/root/cdk/serviceaccount.key'
basic_auth = '/root/cdk/basic_auth.csv'
known_tokens = '/root/cdk/known_tokens.csv'
keys = [service_key, basic_auth, known_tokens]
# The source of truth for non-leaders is the leader.
# Therefore we overwrite_local with whatever the leader has.
if not get_keys_from_leader(keys, overwrite_local=True):
# the keys were not retrieved. Non-leaders have to retry.
return
if not any_file_changed(keys) and is_state('authentication.setup'):
# No change detected and we have already setup the authentication
return
hookenv.status_set('maintenance', 'Rendering authentication templates.')
remove_state('kubernetes-master.components.started')
set_state('authentication.setup')
def get_keys_from_leader(keys, overwrite_local=False):
"""
Gets the broadcasted keys from the leader and stores them in
the corresponding files.
Args:
keys: list of keys. Keys are actually files on the FS.
Returns: True if all key were fetched, False if not.
"""
# This races with other codepaths, and seems to require being created first
# This block may be extracted later, but for now seems to work as intended
os.makedirs('/root/cdk', exist_ok=True)
for k in keys:
# If the path does not exist, assume we need it
if not os.path.exists(k) or overwrite_local:
# Fetch data from leadership broadcast
contents = charms.leadership.leader_get(k)
# Default to logging the warning and wait for leader data to be set
if contents is None:
msg = "Waiting on leaders crypto keys."
hookenv.status_set('waiting', msg)
hookenv.log('Missing content for file {}'.format(k))
return False
# Write out the file and move on to the next item
with open(k, 'w+') as fp:
fp.write(contents)
fp.write('\n')
return True
@when('kubernetes-master.snaps.installed')
def set_app_version():
''' Declare the application version to juju '''
version = check_output(['kube-apiserver', '--version'])
hookenv.application_version_set(version.split(b' v')[-1].rstrip())
@when('cdk-addons.configured', 'kube-api-endpoint.available',
'kube-control.connected')
@when_not('kubernetes-master.upgrade-needed')
def idle_status(kube_api, kube_control):
''' Signal at the end of the run that we are running. '''
if not all_kube_system_pods_running():
hookenv.status_set('waiting', 'Waiting for kube-system pods to start')
elif hookenv.config('service-cidr') != service_cidr():
msg = 'WARN: cannot change service-cidr, still using ' + service_cidr()
hookenv.status_set('active', msg)
else:
# All services should be up and running at this point. Double-check...
failing_services = master_services_down()
if len(failing_services) == 0:
hookenv.status_set('active', 'Kubernetes master running.')
else:
msg = 'Stopped services: {}'.format(','.join(failing_services))
hookenv.status_set('blocked', msg)
def master_services_down():
"""Ensure master services are up and running.
Return: list of failing services"""
services = ['kube-apiserver',
'kube-controller-manager',
'kube-scheduler']
failing_services = []
for service in services:
daemon = 'snap.{}.daemon'.format(service)
if not host.service_running(daemon):
failing_services.append(service)
return failing_services
@when('etcd.available', 'tls_client.server.certificate.saved',
'authentication.setup')
@when_not('kubernetes-master.components.started')
def start_master(etcd):
'''Run the Kubernetes master components.'''
hookenv.status_set('maintenance',
'Configuring the Kubernetes master services.')
freeze_service_cidr()
if not etcd.get_connection_string():
# etcd is not returning a connection string. This happens when
# the master unit disconnects from etcd and is ready to terminate.
# No point in trying to start master services and fail. Just return.
return
# TODO: Make sure below relation is handled on change
# https://github.com/kubernetes/kubernetes/issues/43461
handle_etcd_relation(etcd)
# Add CLI options to all components
configure_apiserver(etcd)
configure_controller_manager()
configure_scheduler()
set_state('kubernetes-master.components.started')
hookenv.open_port(6443)
@when('etcd.available')
def etcd_data_change(etcd):
''' Etcd scale events block master reconfiguration due to the
kubernetes-master.components.started state. We need a way to
handle these events consistenly only when the number of etcd
units has actually changed '''
# key off of the connection string
connection_string = etcd.get_connection_string()
# If the connection string changes, remove the started state to trigger
# handling of the master components
if data_changed('etcd-connect', connection_string):
remove_state('kubernetes-master.components.started')
@when('kube-control.connected')
@when('cdk-addons.configured')
def send_cluster_dns_detail(kube_control):
''' Send cluster DNS info '''
enableKubeDNS = hookenv.config('enable-kube-dns')
dnsDomain = hookenv.config('dns_domain')
dns_ip = None
if enableKubeDNS:
try:
dns_ip = get_dns_ip()
except CalledProcessError:
hookenv.log("kubedns not ready yet")
return
kube_control.set_dns(53, dnsDomain, dns_ip, enableKubeDNS)
@when('kube-control.connected')
@when('snap.installed.kubectl')
@when('leadership.is_leader')
def create_service_configs(kube_control):
"""Create the users for kubelet"""
should_restart = False
# generate the username/pass for the requesting unit
proxy_token = get_token('system:kube-proxy')
if not proxy_token:
setup_tokens(None, 'system:kube-proxy', 'kube-proxy')
proxy_token = get_token('system:kube-proxy')
should_restart = True
client_token = get_token('admin')
if not client_token:
setup_tokens(None, 'admin', 'admin', "system:masters")
client_token = get_token('admin')
should_restart = True
requests = kube_control.auth_user()
for request in requests:
username = request[1]['user']
group = request[1]['group']
kubelet_token = get_token(username)
if not kubelet_token and username and group:
# Usernames have to be in the form of system:node:<nodeName>
userid = "kubelet-{}".format(request[0].split('/')[1])
setup_tokens(None, username, userid, group)
kubelet_token = get_token(username)
kube_control.sign_auth_request(request[0], username,
kubelet_token, proxy_token,
client_token)
should_restart = True
if should_restart:
host.service_restart('snap.kube-apiserver.daemon')
remove_state('authentication.setup')
@when_not('kube-control.connected')
def missing_kube_control():
"""Inform the operator master is waiting for a relation to workers.
If deploying via bundle this won't happen, but if operator is upgrading a
a charm in a deployment that pre-dates the kube-control relation, it'll be
missing.
"""
hookenv.status_set('blocked', 'Waiting for workers.')
@when('kube-api-endpoint.available')
def push_service_data(kube_api):
''' Send configuration to the load balancer, and close access to the
public interface '''
kube_api.configure(port=6443)
@when('certificates.available')
def send_data(tls):
'''Send the data that is required to create a server certificate for
this server.'''
# Use the public ip of this unit as the Common Name for the certificate.
common_name = hookenv.unit_public_ip()
# Get the SDN gateway based on the cidr address.
kubernetes_service_ip = get_kubernetes_service_ip()
domain = hookenv.config('dns_domain')
# Create SANs that the tls layer will add to the server cert.
sans = [
hookenv.unit_public_ip(),
hookenv.unit_private_ip(),
socket.gethostname(),
kubernetes_service_ip,
'kubernetes',
'kubernetes.{0}'.format(domain),
'kubernetes.default',
'kubernetes.default.svc',
'kubernetes.default.svc.{0}'.format(domain)
]
# maybe they have extra names they want as SANs
extra_sans = hookenv.config('extra_sans')
if extra_sans and not extra_sans == "":
sans.extend(extra_sans.split())
# Create a path safe name by removing path characters from the unit name.
certificate_name = hookenv.local_unit().replace('/', '_')
# Request a server cert with this information.
tls.request_server_cert(common_name, sans, certificate_name)
@when('config.changed.extra_sans', 'certificates.available')
def update_certificate(tls):
# Using the config.changed.extra_sans flag to catch changes.
# IP changes will take ~5 minutes or so to propagate, but
# it will update.
send_data(tls)
@when('certificates.server.cert.available',
'kubernetes-master.components.started',
'tls_client.server.certificate.written')
def kick_api_server(tls):
# need to be idempotent and don't want to kick the api server
# without need
if data_changed('cert', tls.get_server_cert()):
# certificate changed, so restart the api server
hookenv.log("Certificate information changed, restarting api server")
restart_apiserver()
tls_client.reset_certificate_write_flag('server')
@when('kubernetes-master.components.started')
def configure_cdk_addons():
''' Configure CDK addons '''
remove_state('cdk-addons.configured')
dbEnabled = str(hookenv.config('enable-dashboard-addons')).lower()
dnsEnabled = str(hookenv.config('enable-kube-dns')).lower()
args = [
'arch=' + arch(),
'dns-ip=' + get_deprecated_dns_ip(),
'dns-domain=' + hookenv.config('dns_domain'),
'enable-dashboard=' + dbEnabled,
'enable-kube-dns=' + dnsEnabled
]
check_call(['snap', 'set', 'cdk-addons'] + args)
if not addons_ready():
hookenv.status_set('waiting', 'Waiting to retry addon deployment')
remove_state('cdk-addons.configured')
return
set_state('cdk-addons.configured')
@retry(times=3, delay_secs=20)
def addons_ready():
"""
Test if the add ons got installed
Returns: True is the addons got applied
"""
try:
check_call(['cdk-addons.apply'])
return True
except CalledProcessError:
hookenv.log("Addons are not ready yet.")
return False
@when('loadbalancer.available', 'certificates.ca.available',
'certificates.client.cert.available', 'authentication.setup')
def loadbalancer_kubeconfig(loadbalancer, ca, client):
# Get the potential list of loadbalancers from the relation object.
hosts = loadbalancer.get_addresses_ports()
# Get the public address of loadbalancers so users can access the cluster.
address = hosts[0].get('public-address')
# Get the port of the loadbalancer so users can access the cluster.
port = hosts[0].get('port')
server = 'https://{0}:{1}'.format(address, port)
build_kubeconfig(server)
@when('certificates.ca.available', 'certificates.client.cert.available',
'authentication.setup')
@when_not('loadbalancer.available')
def create_self_config(ca, client):
'''Create a kubernetes configuration for the master unit.'''
server = 'https://{0}:{1}'.format(hookenv.unit_get('public-address'), 6443)
build_kubeconfig(server)
@when('ceph-storage.available')
def ceph_state_control(ceph_admin):
''' Determine if we should remove the state that controls the re-render
and execution of the ceph-relation-changed event because there
are changes in the relationship data, and we should re-render any
configs, keys, and/or service pre-reqs '''
ceph_relation_data = {
'mon_hosts': ceph_admin.mon_hosts(),
'fsid': ceph_admin.fsid(),
'auth_supported': ceph_admin.auth(),
'hostname': socket.gethostname(),
'key': ceph_admin.key()
}
# Re-execute the rendering if the data has changed.
if data_changed('ceph-config', ceph_relation_data):
remove_state('ceph-storage.configured')
@when('ceph-storage.available')
@when_not('ceph-storage.configured')
def ceph_storage(ceph_admin):
'''Ceph on kubernetes will require a few things - namely a ceph
configuration, and the ceph secret key file used for authentication.
This method will install the client package, and render the requisit files
in order to consume the ceph-storage relation.'''
ceph_context = {
'mon_hosts': ceph_admin.mon_hosts(),
'fsid': ceph_admin.fsid(),
'auth_supported': ceph_admin.auth(),
'use_syslog': "true",
'ceph_public_network': '',
'ceph_cluster_network': '',
'loglevel': 1,
'hostname': socket.gethostname(),
}
# Install the ceph common utilities.
apt_install(['ceph-common'], fatal=True)
etc_ceph_directory = '/etc/ceph'
if not os.path.isdir(etc_ceph_directory):
os.makedirs(etc_ceph_directory)
charm_ceph_conf = os.path.join(etc_ceph_directory, 'ceph.conf')
# Render the ceph configuration from the ceph conf template
render('ceph.conf', charm_ceph_conf, ceph_context)
# The key can rotate independently of other ceph config, so validate it
admin_key = os.path.join(etc_ceph_directory,
'ceph.client.admin.keyring')
try:
with open(admin_key, 'w') as key_file:
key_file.write("[client.admin]\n\tkey = {}\n".format(
ceph_admin.key()))
except IOError as err:
hookenv.log("IOError writing admin.keyring: {}".format(err))
# Enlist the ceph-admin key as a kubernetes secret
if ceph_admin.key():
encoded_key = base64.b64encode(ceph_admin.key().encode('utf-8'))
else:
# We didn't have a key, and cannot proceed. Do not set state and
# allow this method to re-execute
return
context = {'secret': encoded_key.decode('ascii')}
render('ceph-secret.yaml', '/tmp/ceph-secret.yaml', context)
try:
# At first glance this is deceptive. The apply stanza will create if
# it doesn't exist, otherwise it will update the entry, ensuring our
# ceph-secret is always reflective of what we have in /etc/ceph
# assuming we have invoked this anytime that file would change.
cmd = ['kubectl', 'apply', '-f', '/tmp/ceph-secret.yaml']
check_call(cmd)
os.remove('/tmp/ceph-secret.yaml')
except: # NOQA
# the enlistment in kubernetes failed, return and prepare for re-exec
return
# when complete, set a state relating to configuration of the storage
# backend that will allow other modules to hook into this and verify we
# have performed the necessary pre-req steps to interface with a ceph
# deployment.
set_state('ceph-storage.configured')
@when('nrpe-external-master.available')
@when_not('nrpe-external-master.initial-config')
def initial_nrpe_config(nagios=None):
set_state('nrpe-external-master.initial-config')
update_nrpe_config(nagios)
@when('config.changed.authorization-mode',
'kubernetes-master.components.started')
def switch_auth_mode():
config = hookenv.config()
mode = config.get('authorization-mode')
if data_changed('auth-mode', mode):
remove_state('kubernetes-master.components.started')
@when('kubernetes-master.components.started')
@when('nrpe-external-master.available')
@when_any('config.changed.nagios_context',
'config.changed.nagios_servicegroups')
def update_nrpe_config(unused=None):
services = (
'snap.kube-apiserver.daemon',
'snap.kube-controller-manager.daemon',
'snap.kube-scheduler.daemon'
)
hostname = nrpe.get_nagios_hostname()
current_unit = nrpe.get_nagios_unit_name()
nrpe_setup = nrpe.NRPE(hostname=hostname)
nrpe.add_init_service_checks(nrpe_setup, services, current_unit)
nrpe_setup.write()
@when_not('nrpe-external-master.available')
@when('nrpe-external-master.initial-config')
def remove_nrpe_config(nagios=None):
remove_state('nrpe-external-master.initial-config')
# List of systemd services for which the checks will be removed
services = (
'snap.kube-apiserver.daemon',
'snap.kube-controller-manager.daemon',
'snap.kube-scheduler.daemon'
)
# The current nrpe-external-master interface doesn't handle a lot of logic,
# use the charm-helpers code for now.
hostname = nrpe.get_nagios_hostname()
nrpe_setup = nrpe.NRPE(hostname=hostname)
for service in services:
nrpe_setup.remove_check(shortname=service)
def is_privileged():
"""Return boolean indicating whether or not to set allow-privileged=true.
"""
privileged = hookenv.config('allow-privileged')
if privileged == 'auto':
return is_state('kubernetes-master.gpu.enabled')
else:
return privileged == 'true'
@when('config.changed.allow-privileged')
@when('kubernetes-master.components.started')
def on_config_allow_privileged_change():
"""React to changed 'allow-privileged' config value.
"""
remove_state('kubernetes-master.components.started')
remove_state('config.changed.allow-privileged')
@when('config.changed.api-extra-args')
@when('kubernetes-master.components.started')
@when('etcd.available')
def on_config_api_extra_args_change(etcd):
configure_apiserver(etcd)
@when('config.changed.controller-manager-extra-args')
@when('kubernetes-master.components.started')
def on_config_controller_manager_extra_args_change():
configure_controller_manager()
@when('config.changed.scheduler-extra-args')
@when('kubernetes-master.components.started')
def on_config_scheduler_extra_args_change():
configure_scheduler()
@when('kube-control.gpu.available')
@when('kubernetes-master.components.started')
@when_not('kubernetes-master.gpu.enabled')
def on_gpu_available(kube_control):
"""The remote side (kubernetes-worker) is gpu-enabled.
We need to run in privileged mode.
"""
config = hookenv.config()
if config['allow-privileged'] == "false":
hookenv.status_set(
'active',
'GPUs available. Set allow-privileged="auto" to enable.'
)
return
remove_state('kubernetes-master.components.started')
set_state('kubernetes-master.gpu.enabled')
@when('kubernetes-master.gpu.enabled')
@when_not('kubernetes-master.privileged')
def disable_gpu_mode():
"""We were in gpu mode, but the operator has set allow-privileged="false",
so we can't run in gpu mode anymore.
"""
remove_state('kubernetes-master.gpu.enabled')
@hook('stop')
def shutdown():
""" Stop the kubernetes master services
"""
service_stop('snap.kube-apiserver.daemon')
service_stop('snap.kube-controller-manager.daemon')
service_stop('snap.kube-scheduler.daemon')
def restart_apiserver():
prev_state, prev_msg = hookenv.status_get()
hookenv.status_set('maintenance', 'Restarting kube-apiserver')
host.service_restart('snap.kube-apiserver.daemon')
hookenv.status_set(prev_state, prev_msg)
def restart_controller_manager():
prev_state, prev_msg = hookenv.status_get()
hookenv.status_set('maintenance', 'Restarting kube-controller-manager')
host.service_restart('snap.kube-controller-manager.daemon')
hookenv.status_set(prev_state, prev_msg)
def restart_scheduler():
prev_state, prev_msg = hookenv.status_get()
hookenv.status_set('maintenance', 'Restarting kube-scheduler')
host.service_restart('snap.kube-scheduler.daemon')
hookenv.status_set(prev_state, prev_msg)
def arch():
'''Return the package architecture as a string. Raise an exception if the
architecture is not supported by kubernetes.'''
# Get the package architecture for this system.
architecture = check_output(['dpkg', '--print-architecture']).rstrip()
# Convert the binary result into a string.
architecture = architecture.decode('utf-8')
return architecture
def build_kubeconfig(server):
'''Gather the relevant data for Kubernetes configuration objects and create
a config object with that information.'''
# Get the options from the tls-client layer.
layer_options = layer.options('tls-client')
# Get all the paths to the tls information required for kubeconfig.
ca = layer_options.get('ca_certificate_path')
ca_exists = ca and os.path.isfile(ca)
client_pass = get_password('basic_auth.csv', 'admin')
# Do we have everything we need?
if ca_exists and client_pass:
# Create an absolute path for the kubeconfig file.
kubeconfig_path = os.path.join(os.sep, 'home', 'ubuntu', 'config')
# Create the kubeconfig on this system so users can access the cluster.
create_kubeconfig(kubeconfig_path, server, ca,
user='admin', password=client_pass)
# Make the config file readable by the ubuntu users so juju scp works.
cmd = ['chown', 'ubuntu:ubuntu', kubeconfig_path]
check_call(cmd)
def create_kubeconfig(kubeconfig, server, ca, key=None, certificate=None,
user='ubuntu', context='juju-context',
cluster='juju-cluster', password=None, token=None):
'''Create a configuration for Kubernetes based on path using the supplied
arguments for values of the Kubernetes server, CA, key, certificate, user
context and cluster.'''
if not key and not certificate and not password and not token:
raise ValueError('Missing authentication mechanism.')
# token and password are mutually exclusive. Error early if both are
# present. The developer has requested an impossible situation.
# see: kubectl config set-credentials --help
if token and password:
raise ValueError('Token and Password are mutually exclusive.')
# Create the config file with the address of the master server.
cmd = 'kubectl config --kubeconfig={0} set-cluster {1} ' \
'--server={2} --certificate-authority={3} --embed-certs=true'
check_call(split(cmd.format(kubeconfig, cluster, server, ca)))
# Delete old users
cmd = 'kubectl config --kubeconfig={0} unset users'
check_call(split(cmd.format(kubeconfig)))
# Create the credentials using the client flags.
cmd = 'kubectl config --kubeconfig={0} ' \
'set-credentials {1} '.format(kubeconfig, user)
if key and certificate:
cmd = '{0} --client-key={1} --client-certificate={2} '\
'--embed-certs=true'.format(cmd, key, certificate)
if password:
cmd = "{0} --username={1} --password={2}".format(cmd, user, password)
# This is mutually exclusive from password. They will not work together.
if token:
cmd = "{0} --token={1}".format(cmd, token)
check_call(split(cmd))
# Create a default context with the cluster.
cmd = 'kubectl config --kubeconfig={0} set-context {1} ' \
'--cluster={2} --user={3}'
check_call(split(cmd.format(kubeconfig, context, cluster, user)))
# Make the config use this new context.
cmd = 'kubectl config --kubeconfig={0} use-context {1}'
check_call(split(cmd.format(kubeconfig, context)))
def get_dns_ip():
cmd = "kubectl get service --namespace kube-system kube-dns --output json"
output = check_output(cmd, shell=True).decode()
svc = json.loads(output)
return svc['spec']['clusterIP']
def get_deprecated_dns_ip():
'''We previously hardcoded the dns ip. This function returns the old
hardcoded value for use with older versions of cdk_addons.'''
interface = ipaddress.IPv4Interface(service_cidr())
ip = interface.network.network_address + 10
return ip.exploded
def get_kubernetes_service_ip():
'''Get the IP address for the kubernetes service based on the cidr.'''
interface = ipaddress.IPv4Interface(service_cidr())
# Add .1 at the end of the network
ip = interface.network.network_address + 1
return ip.exploded
def handle_etcd_relation(reldata):
''' Save the client credentials and set appropriate daemon flags when
etcd declares itself as available'''
# Define where the etcd tls files will be kept.
etcd_dir = '/root/cdk/etcd'
# Create paths to the etcd client ca, key, and cert file locations.
ca = os.path.join(etcd_dir, 'client-ca.pem')
key = os.path.join(etcd_dir, 'client-key.pem')
cert = os.path.join(etcd_dir, 'client-cert.pem')
# Save the client credentials (in relation data) to the paths provided.
reldata.save_client_credentials(key, cert, ca)
def parse_extra_args(config_key):
elements = hookenv.config().get(config_key, '').split()
args = {}
for element in elements:
if '=' in element:
key, _, value = element.partition('=')
args[key] = value
else:
args[element] = 'true'
return args
def configure_kubernetes_service(service, base_args, extra_args_key):
db = unitdata.kv()
prev_args_key = 'kubernetes-master.prev_args.' + service
prev_args = db.get(prev_args_key) or {}
extra_args = parse_extra_args(extra_args_key)
args = {}
for arg in prev_args:
# remove previous args by setting to null
args[arg] = 'null'
for k, v in base_args.items():
args[k] = v
for k, v in extra_args.items():
args[k] = v
cmd = ['snap', 'set', service] + ['%s=%s' % item for item in args.items()]
check_call(cmd)
db.set(prev_args_key, args)
def configure_apiserver(etcd):
api_opts = {}
# Get the tls paths from the layer data.
layer_options = layer.options('tls-client')
ca_cert_path = layer_options.get('ca_certificate_path')
client_cert_path = layer_options.get('client_certificate_path')
client_key_path = layer_options.get('client_key_path')
server_cert_path = layer_options.get('server_certificate_path')
server_key_path = layer_options.get('server_key_path')
if is_privileged():
api_opts['allow-privileged'] = 'true'
set_state('kubernetes-master.privileged')
else:
api_opts['allow-privileged'] = 'false'
remove_state('kubernetes-master.privileged')
# Handle static options for now
api_opts['service-cluster-ip-range'] = service_cidr()
api_opts['min-request-timeout'] = '300'
api_opts['v'] = '4'
api_opts['tls-cert-file'] = server_cert_path
api_opts['tls-private-key-file'] = server_key_path
api_opts['kubelet-certificate-authority'] = ca_cert_path
api_opts['kubelet-client-certificate'] = client_cert_path
api_opts['kubelet-client-key'] = client_key_path
api_opts['logtostderr'] = 'true'
api_opts['insecure-bind-address'] = '127.0.0.1'
api_opts['insecure-port'] = '8080'
api_opts['storage-backend'] = 'etcd2' # FIXME: add etcd3 support
api_opts['basic-auth-file'] = '/root/cdk/basic_auth.csv'
api_opts['token-auth-file'] = '/root/cdk/known_tokens.csv'
api_opts['service-account-key-file'] = '/root/cdk/serviceaccount.key'
etcd_dir = '/root/cdk/etcd'
etcd_ca = os.path.join(etcd_dir, 'client-ca.pem')
etcd_key = os.path.join(etcd_dir, 'client-key.pem')
etcd_cert = os.path.join(etcd_dir, 'client-cert.pem')
api_opts['etcd-cafile'] = etcd_ca
api_opts['etcd-keyfile'] = etcd_key
api_opts['etcd-certfile'] = etcd_cert
api_opts['etcd-servers'] = etcd.get_connection_string()
admission_control = [
'Initializers',
'NamespaceLifecycle',
'LimitRanger',
'ServiceAccount',
'ResourceQuota',
'DefaultTolerationSeconds'
]
auth_mode = hookenv.config('authorization-mode')
if 'Node' in auth_mode:
admission_control.append('NodeRestriction')
api_opts['authorization-mode'] = auth_mode
if get_version('kube-apiserver') < (1, 6):
hookenv.log('Removing DefaultTolerationSeconds from admission-control')
admission_control.remove('DefaultTolerationSeconds')
if get_version('kube-apiserver') < (1, 7):
hookenv.log('Removing Initializers from admission-control')
admission_control.remove('Initializers')
api_opts['admission-control'] = ','.join(admission_control)
configure_kubernetes_service('kube-apiserver', api_opts, 'api-extra-args')
restart_apiserver()
def configure_controller_manager():
controller_opts = {}
# Get the tls paths from the layer data.
layer_options = layer.options('tls-client')
ca_cert_path = layer_options.get('ca_certificate_path')
# Default to 3 minute resync. TODO: Make this configureable?
controller_opts['min-resync-period'] = '3m'
controller_opts['v'] = '2'
controller_opts['root-ca-file'] = ca_cert_path
controller_opts['logtostderr'] = 'true'
controller_opts['master'] = 'http://127.0.0.1:8080'
controller_opts['service-account-private-key-file'] = \
'/root/cdk/serviceaccount.key'
configure_kubernetes_service('kube-controller-manager', controller_opts,
'controller-manager-extra-args')
restart_controller_manager()
def configure_scheduler():
scheduler_opts = {}
scheduler_opts['v'] = '2'
scheduler_opts['logtostderr'] = 'true'
scheduler_opts['master'] = 'http://127.0.0.1:8080'
configure_kubernetes_service('kube-scheduler', scheduler_opts,
'scheduler-extra-args')
restart_scheduler()
def setup_basic_auth(password=None, username='admin', uid='admin',
groups=None):
'''Create the htacces file and the tokens.'''
root_cdk = '/root/cdk'
if not os.path.isdir(root_cdk):
os.makedirs(root_cdk)
htaccess = os.path.join(root_cdk, 'basic_auth.csv')
if not password:
password = token_generator()
with open(htaccess, 'w') as stream:
if groups:
stream.write('{0},{1},{2},"{3}"'.format(password,
username, uid, groups))
else:
stream.write('{0},{1},{2}'.format(password, username, uid))
def setup_tokens(token, username, user, groups=None):
'''Create a token file for kubernetes authentication.'''
root_cdk = '/root/cdk'
if not os.path.isdir(root_cdk):
os.makedirs(root_cdk)
known_tokens = os.path.join(root_cdk, 'known_tokens.csv')
if not token:
token = token_generator()
with open(known_tokens, 'a') as stream:
if groups:
stream.write('{0},{1},{2},"{3}"\n'.format(token,
username,
user,
groups))
else:
stream.write('{0},{1},{2}\n'.format(token, username, user))
def get_password(csv_fname, user):
'''Get the password of user within the csv file provided.'''
root_cdk = '/root/cdk'
tokens_fname = os.path.join(root_cdk, csv_fname)
if not os.path.isfile(tokens_fname):
return None
with open(tokens_fname, 'r') as stream:
for line in stream:
record = line.split(',')
if record[1] == user:
return record[0]
return None
def get_token(username):
"""Grab a token from the static file if present. """
return get_password('known_tokens.csv', username)
def set_token(password, save_salt):
''' Store a token so it can be recalled later by token_generator.
param: password - the password to be stored
param: save_salt - the key to store the value of the token.'''
db = unitdata.kv()
db.set(save_salt, password)
return db.get(save_salt)
def token_generator(length=32):
''' Generate a random token for use in passwords and account tokens.
param: length - the length of the token to generate'''
alpha = string.ascii_letters + string.digits
token = ''.join(random.SystemRandom().choice(alpha) for _ in range(length))
return token
@retry(times=3, delay_secs=10)
def all_kube_system_pods_running():
''' Check pod status in the kube-system namespace. Returns True if all
pods are running, False otherwise. '''
cmd = ['kubectl', 'get', 'po', '-n', 'kube-system', '-o', 'json']
try:
output = check_output(cmd).decode('utf-8')
except CalledProcessError:
hookenv.log('failed to get kube-system pod status')
return False
result = json.loads(output)
for pod in result['items']:
status = pod['status']['phase']
# Evicted nodes should re-spawn
if status != 'Running' and \
pod['status'].get('reason', '') != 'Evicted':
return False
return True
def apiserverVersion():
cmd = 'kube-apiserver --version'.split()
version_string = check_output(cmd).decode('utf-8')
return tuple(int(q) for q in re.findall("[0-9]+", version_string)[:3])
def touch(fname):
try:
os.utime(fname, None)
except OSError:
open(fname, 'a').close()
| {
"content_hash": "4cf13292b20499d8ce04d971a5ff65e4",
"timestamp": "",
"source": "github",
"line_count": 1246,
"max_line_length": 79,
"avg_line_length": 36.08667736757624,
"alnum_prop": 0.6516768970732141,
"repo_name": "KarolKraskiewicz/kubernetes",
"id": "d27caf59f57e68ee6f606c397b87837188e40c91",
"size": "45575",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "cluster/juju/layers/kubernetes-master/reactive/kubernetes_master.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "2525"
},
{
"name": "Go",
"bytes": "42565153"
},
{
"name": "HTML",
"bytes": "2898596"
},
{
"name": "Makefile",
"bytes": "74967"
},
{
"name": "Nginx",
"bytes": "595"
},
{
"name": "PowerShell",
"bytes": "4261"
},
{
"name": "Protocol Buffer",
"bytes": "556257"
},
{
"name": "Python",
"bytes": "2354106"
},
{
"name": "Ruby",
"bytes": "1591"
},
{
"name": "SaltStack",
"bytes": "52409"
},
{
"name": "Shell",
"bytes": "1650423"
}
],
"symlink_target": ""
} |
from datapackage_pipelines_mojp.common.processors.base_processors import BaseProcessor
import logging, datetime
from copy import deepcopy
class Processor(BaseProcessor):
def _filter_resource_descriptor(self, resource_descriptor):
super(Processor, self)._filter_resource_descriptor(resource_descriptor)
for field in self._parameters["fields"]:
field = deepcopy(field)
field.pop("value")
resource_descriptor["schema"]["fields"].append(field)
def _filter_row(self, row):
for field in self._parameters["fields"]:
value = field["value"]
if field["type"] == "datetime" and value == "now":
value = datetime.datetime.now()
row[field["name"]] = value
return row
if __name__ == '__main__':
Processor.main()
| {
"content_hash": "b5acc49008fff220ada71dd3ff235921",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 86,
"avg_line_length": 33.4,
"alnum_prop": 0.6287425149700598,
"repo_name": "Beit-Hatfutsot/mojp-dbs-pipelines",
"id": "2302e8dafa86c7ddefd50116844604c68a13d17e",
"size": "835",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "datapackage_pipelines_mojp/common/processors/add_fields.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "195876"
},
{
"name": "Shell",
"bytes": "4705"
}
],
"symlink_target": ""
} |
import base64
import urllib.request, urllib.parse, urllib.error
import urllib.parse
from saml2.authn_context import INTERNETPROTOCOLPASSWORD
from saml2.response import LogoutResponse
from saml2.client import Saml2Client
from saml2 import samlp, BINDING_HTTP_POST, BINDING_HTTP_REDIRECT
from saml2 import saml, config, class_name
from saml2.config import SPConfig
from saml2.saml import NAMEID_FORMAT_PERSISTENT
from saml2.saml import NAMEID_FORMAT_TRANSIENT
from saml2.saml import NameID
from saml2.server import Server
from saml2.time_util import in_a_while
from py.test import raises
from fakeIDP import FakeIDP, unpack_form
AUTHN = {
"class_ref": INTERNETPROTOCOLPASSWORD,
"authn_auth": "http://www.example.com/login"
}
def for_me(condition, me):
for restriction in condition.audience_restriction:
audience = restriction.audience
if audience.text.strip() == me:
return True
def ava(attribute_statement):
result = {}
for attribute in attribute_statement.attribute:
# Check name_format ??
name = attribute.name.strip()
result[name] = []
for value in attribute.attribute_value:
result[name].append(value.text.strip())
return result
def _leq(l1, l2):
return set(l1) == set(l2)
# def test_parse_3():
# xml_response = open(XML_RESPONSE_FILE3).read()
# response = samlp.response_from_string(xml_response)
# client = Saml2Client({})
# (ava, name_id, real_uri) = \
# client.do_response(response, "xenosmilus.umdc.umu.se")
# print 40*"="
# print ava
# print 40*","
# print name_id
# assert False
REQ1 = {"1.2.14": """<?xml version='1.0' encoding='UTF-8'?>
<ns0:AttributeQuery Destination="https://idp.example.com/idp/" ID="id1"
IssueInstant="%s" Version="2.0" xmlns:ns0="urn:oasis:names:tc:SAML:2
.0:protocol"><ns1:Issuer Format="urn:oasis:names:tc:SAML:2
.0:nameid-format:entity" xmlns:ns1="urn:oasis:names:tc:SAML:2
.0:assertion">urn:mace:example.com:saml:roland:sp</ns1:Issuer><ns1:Subject
xmlns:ns1="urn:oasis:names:tc:SAML:2.0:assertion"><ns1:NameID
Format="urn:oasis:names:tc:SAML:2
.0:nameid-format:persistent">E8042FB4-4D5B-48C3-8E14-8EDD852790DD</ns1:NameID
></ns1:Subject></ns0:AttributeQuery>""",
"1.2.16": """<?xml version='1.0' encoding='UTF-8'?>
<ns0:AttributeQuery xmlns:ns0="urn:oasis:names:tc:SAML:2.0:protocol"
xmlns:ns1="urn:oasis:names:tc:SAML:2.0:assertion" Destination="https://idp
.example.com/idp/" ID="id1" IssueInstant="%s" Version="2.0"><ns1:Issuer
Format="urn:oasis:names:tc:SAML:2.0:nameid-format:entity">urn:mace:example
.com:saml:roland:sp</ns1:Issuer><ns1:Subject><ns1:NameID
Format="urn:oasis:names:tc:SAML:2
.0:nameid-format:persistent">E8042FB4-4D5B-48C3-8E14-8EDD852790DD</ns1:NameID
></ns1:Subject></ns0:AttributeQuery>"""}
nid = NameID(name_qualifier="foo", format=NAMEID_FORMAT_TRANSIENT,
text="123456")
class TestClient:
def setup_class(self):
self.server = Server("idp_conf")
conf = config.SPConfig()
conf.load_file("server_conf")
self.client = Saml2Client(conf)
def test_create_attribute_query1(self):
req_id, req = self.client.create_attribute_query(
"https://idp.example.com/idp/",
"E8042FB4-4D5B-48C3-8E14-8EDD852790DD",
format=saml.NAMEID_FORMAT_PERSISTENT,
message_id="id1")
reqstr = req.to_string()
assert req.destination == "https://idp.example.com/idp/"
assert req.id == "id1"
assert req.version == "2.0"
subject = req.subject
name_id = subject.name_id
assert name_id.format == saml.NAMEID_FORMAT_PERSISTENT
assert name_id.text == "E8042FB4-4D5B-48C3-8E14-8EDD852790DD"
issuer = req.issuer
assert issuer.text == "urn:mace:example.com:saml:roland:sp"
attrq = samlp.attribute_query_from_string(reqstr)
print((attrq.keyswv()))
assert _leq(attrq.keyswv(), ['destination', 'subject', 'issue_instant',
'version', 'id', 'issuer'])
assert attrq.destination == req.destination
assert attrq.id == req.id
assert attrq.version == req.version
assert attrq.issuer.text == issuer.text
assert attrq.issue_instant == req.issue_instant
assert attrq.subject.name_id.format == name_id.format
assert attrq.subject.name_id.text == name_id.text
def test_create_attribute_query2(self):
req_id, req = self.client.create_attribute_query(
"https://idp.example.com/idp/",
"E8042FB4-4D5B-48C3-8E14-8EDD852790DD",
attribute={
("urn:oid:2.5.4.42",
"urn:oasis:names:tc:SAML:2.0:attrname-format:uri",
"givenName"): None,
("urn:oid:2.5.4.4",
"urn:oasis:names:tc:SAML:2.0:attrname-format:uri",
"surname"): None,
("urn:oid:1.2.840.113549.1.9.1",
"urn:oasis:names:tc:SAML:2.0:attrname-format:uri"): None,
},
format=saml.NAMEID_FORMAT_PERSISTENT,
message_id="id1")
print((req.to_string()))
assert req.destination == "https://idp.example.com/idp/"
assert req.id == "id1"
assert req.version == "2.0"
subject = req.subject
name_id = subject.name_id
assert name_id.format == saml.NAMEID_FORMAT_PERSISTENT
assert name_id.text == "E8042FB4-4D5B-48C3-8E14-8EDD852790DD"
assert len(req.attribute) == 3
# one is givenName
seen = []
for attribute in req.attribute:
if attribute.name == "urn:oid:2.5.4.42":
assert attribute.name_format == saml.NAME_FORMAT_URI
assert attribute.friendly_name == "givenName"
seen.append("givenName")
elif attribute.name == "urn:oid:2.5.4.4":
assert attribute.name_format == saml.NAME_FORMAT_URI
assert attribute.friendly_name == "surname"
seen.append("surname")
elif attribute.name == "urn:oid:1.2.840.113549.1.9.1":
assert attribute.name_format == saml.NAME_FORMAT_URI
if getattr(attribute, "friendly_name"):
assert False
seen.append("email")
assert _leq(seen, ["givenName", "surname", "email"])
def test_create_attribute_query_3(self):
req_id, req = self.client.create_attribute_query(
"https://aai-demo-idp.switch.ch/idp/shibboleth",
"_e7b68a04488f715cda642fbdd90099f5",
format=saml.NAMEID_FORMAT_TRANSIENT,
message_id="id1")
assert isinstance(req, samlp.AttributeQuery)
assert req.destination == "https://aai-demo-idp.switch" \
".ch/idp/shibboleth"
assert req.id == "id1"
assert req.version == "2.0"
assert req.issue_instant
assert req.issuer.text == "urn:mace:example.com:saml:roland:sp"
nameid = req.subject.name_id
assert nameid.format == saml.NAMEID_FORMAT_TRANSIENT
assert nameid.text == "_e7b68a04488f715cda642fbdd90099f5"
def test_create_auth_request_0(self):
ar_id, areq = self.client.create_authn_request(
"http://www.example.com/sso", message_id="id1")
ar_str = areq.to_string()
ar = samlp.authn_request_from_string(ar_str)
#print(ar)
assert ar.assertion_consumer_service_url == ("http://lingon.catalogix"
".se:8087/")
assert ar.destination == "http://www.example.com/sso"
assert ar.protocol_binding == BINDING_HTTP_POST
assert ar.version == "2.0"
assert ar.provider_name == "urn:mace:example.com:saml:roland:sp"
assert ar.issuer.text == "urn:mace:example.com:saml:roland:sp"
nid_policy = ar.name_id_policy
assert nid_policy.allow_create == "false"
assert nid_policy.format == saml.NAMEID_FORMAT_TRANSIENT
def test_create_auth_request_vo(self):
assert list(self.client.config.vorg.keys()) == [
"urn:mace:example.com:it:tek"]
ar_id, areq = self.client.create_authn_request(
"http://www.example.com/sso",
"urn:mace:example.com:it:tek", # vo
nameid_format=NAMEID_FORMAT_PERSISTENT,
message_id="666")
ar_str = "%s" % areq
ar = samlp.authn_request_from_string(ar_str)
print(ar)
assert ar.id == "666"
assert ar.assertion_consumer_service_url == ("http://lingon.catalogix"
".se:8087/")
assert ar.destination == "http://www.example.com/sso"
assert ar.protocol_binding == BINDING_HTTP_POST
assert ar.version == "2.0"
assert ar.provider_name == "urn:mace:example.com:saml:roland:sp"
assert ar.issuer.text == "urn:mace:example.com:saml:roland:sp"
nid_policy = ar.name_id_policy
assert nid_policy.allow_create == "false"
assert nid_policy.format == saml.NAMEID_FORMAT_PERSISTENT
assert nid_policy.sp_name_qualifier == "urn:mace:example.com:it:tek"
def test_sign_auth_request_0(self):
#print self.client.config
req_id, a_req = self.client.create_authn_request(
"http://www.example.com/sso", sign=True, message_id="id1")
if isinstance(a_req, bytes):
ar_str = a_req
else:
ar_str = a_req.to_string()
ar = samlp.authn_request_from_string(ar_str)
assert ar
assert ar.signature
assert ar.signature.signature_value
signed_info = ar.signature.signed_info
#print signed_info
assert len(signed_info.reference) == 1
assert signed_info.reference[0].uri == "#id1"
assert signed_info.reference[0].digest_value
print("------------------------------------------------")
try:
assert self.client.sec.correctly_signed_authn_request(
ar_str, self.client.config.xmlsec_binary,
self.client.config.metadata)
except Exception: # missing certificate
self.client.sec.verify_signature(ar_str, node_name=class_name(ar))
def test_response(self):
IDP = "urn:mace:example.com:saml:roland:idp"
ava = {"givenName": ["Derek"], "surName": ["Jeter"],
"mail": ["[email protected]"], "title": ["The man"]}
nameid_policy = samlp.NameIDPolicy(allow_create="false",
format=saml.NAMEID_FORMAT_PERSISTENT)
resp = self.server.create_authn_response(
identity=ava,
in_response_to="id1",
destination="http://lingon.catalogix.se:8087/",
sp_entity_id="urn:mace:example.com:saml:roland:sp",
name_id_policy=nameid_policy,
userid="[email protected]",
authn=AUTHN)
resp_str = "%s" % resp
resp_str = base64.encodebytes(resp_str.encode("utf8"))
authn_response = self.client.parse_authn_request_response(
resp_str, BINDING_HTTP_POST,
{"id1": "http://foo.example.com/service"})
assert authn_response is not None
assert authn_response.issuer() == IDP
assert authn_response.response.assertion[0].issuer.text == IDP
session_info = authn_response.session_info()
print(session_info)
assert session_info["ava"] == {'mail': ['[email protected]'],
'givenName': ['Derek'],
'sn': ['Jeter'],
'title': ["The man"]}
assert session_info["issuer"] == IDP
assert session_info["came_from"] == "http://foo.example.com/service"
response = samlp.response_from_string(authn_response.xmlstr)
assert response.destination == "http://lingon.catalogix.se:8087/"
# One person in the cache
assert len(self.client.users.subjects()) == 1
subject_id = self.client.users.subjects()[0]
print(("||||", self.client.users.get_info_from(subject_id, IDP)))
# The information I have about the subject comes from one source
assert self.client.users.issuers_of_info(subject_id) == [IDP]
# --- authenticate another person
ava = {"givenName": ["Alfonson"], "surName": ["Soriano"],
"mail": ["[email protected]"], "title": ["outfielder"]}
resp_str = "%s" % self.server.create_authn_response(
identity=ava,
in_response_to="id2",
destination="http://lingon.catalogix.se:8087/",
sp_entity_id="urn:mace:example.com:saml:roland:sp",
name_id_policy=nameid_policy,
userid="[email protected]",
authn=AUTHN)
resp_str = base64.encodebytes(resp_str.encode("utf8"))
self.client.parse_authn_request_response(
resp_str, BINDING_HTTP_POST,
{"id2": "http://foo.example.com/service"})
# Two persons in the cache
assert len(self.client.users.subjects()) == 2
issuers = [self.client.users.issuers_of_info(s) for s in
self.client.users.subjects()]
# The information I have about the subjects comes from the same source
print(issuers)
assert issuers == [[IDP], [IDP]]
def test_init_values(self):
entityid = self.client.config.entityid
print(entityid)
assert entityid == "urn:mace:example.com:saml:roland:sp"
print((self.client.metadata.with_descriptor("idpsso")))
location = self.client._sso_location()
print(location)
assert location == 'http://localhost:8088/sso'
my_name = self.client._my_name()
print(my_name)
assert my_name == "urn:mace:example.com:saml:roland:sp"
# Below can only be done with dummy Server
IDP = "urn:mace:example.com:saml:roland:idp"
class TestClientWithDummy():
def setup_class(self):
self.server = FakeIDP("idp_all_conf")
conf = SPConfig()
conf.load_file("servera_conf")
self.client = Saml2Client(conf)
self.client.send = self.server.receive
def test_do_authn(self):
binding = BINDING_HTTP_REDIRECT
response_binding = BINDING_HTTP_POST
sid, http_args = self.client.prepare_for_authenticate(
IDP, "http://www.example.com/relay_state",
binding=binding, response_binding=response_binding)
assert isinstance(sid, str)
assert len(http_args) == 4
assert http_args["headers"][0][0] == "Location"
assert http_args["data"] == []
redirect_url = http_args["headers"][0][1]
_, _, _, _, qs, _ = urllib.parse.urlparse(redirect_url)
qs_dict = urllib.parse.parse_qs(qs)
req = self.server.parse_authn_request(qs_dict["SAMLRequest"][0],
binding)
resp_args = self.server.response_args(req.message, [response_binding])
assert resp_args["binding"] == response_binding
def test_do_attribute_query(self):
response = self.client.do_attribute_query(
IDP, "_e7b68a04488f715cda642fbdd90099f5",
attribute={"eduPersonAffiliation": None},
nameid_format=NAMEID_FORMAT_TRANSIENT)
def test_logout_1(self):
""" one IdP/AA logout from"""
# information about the user from an IdP
session_info = {
"name_id": nid,
"issuer": "urn:mace:example.com:saml:roland:idp",
"not_on_or_after": in_a_while(minutes=15),
"ava": {
"givenName": "Anders",
"surName": "Andersson",
"mail": "[email protected]"
}
}
self.client.users.add_information_about_person(session_info)
entity_ids = self.client.users.issuers_of_info(nid)
assert entity_ids == ["urn:mace:example.com:saml:roland:idp"]
resp = self.client.global_logout(nid, "Tired", in_a_while(minutes=5))
print(resp)
assert resp
assert len(resp) == 1
assert list(resp.keys()) == entity_ids
response = resp[entity_ids[0]]
assert isinstance(response, LogoutResponse)
def test_post_sso(self):
binding = BINDING_HTTP_POST
response_binding = BINDING_HTTP_POST
sid, http_args = self.client.prepare_for_authenticate(
"urn:mace:example.com:saml:roland:idp", relay_state="really",
binding=binding, response_binding=response_binding)
_dic = unpack_form(http_args["data"][3])
req = self.server.parse_authn_request(_dic["SAMLRequest"], binding)
resp_args = self.server.response_args(req.message, [response_binding])
assert resp_args["binding"] == response_binding
# Normally a response would now be sent back to the users web client
# Here I fake what the client will do
# create the form post
http_args["data"] = urllib.parse.urlencode(_dic)
http_args["method"] = "POST"
http_args["dummy"] = _dic["SAMLRequest"]
http_args["headers"] = [('Content-type',
'application/x-www-form-urlencoded')]
response = self.client.send(**http_args)
print((response.text))
_dic = unpack_form(response.text[3], "SAMLResponse")
resp = self.client.parse_authn_request_response(_dic["SAMLResponse"],
BINDING_HTTP_POST,
{sid: "/"})
ac = resp.assertion.authn_statement[0].authn_context
assert ac.authenticating_authority[0].text == \
'http://www.example.com/login'
assert ac.authn_context_class_ref.text == INTERNETPROTOCOLPASSWORD
# if __name__ == "__main__":
# tc = TestClient()
# tc.setup_class()
# tc.test_response()
if __name__ == "__main__":
tc = TestClient()
tc.setup_class()
tc.test_create_auth_request_0()
| {
"content_hash": "d2d110eb3912f6e785f0e82acf423e18",
"timestamp": "",
"source": "github",
"line_count": 453,
"max_line_length": 80,
"avg_line_length": 40.549668874172184,
"alnum_prop": 0.5900702270129021,
"repo_name": "rohe/pysaml2-3",
"id": "cb684c756870d6a08ca39d1b67a1822d8e546171",
"size": "18416",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_51_client.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "5367558"
},
{
"name": "Shell",
"bytes": "6973"
}
],
"symlink_target": ""
} |
import uuid
import sqlalchemy
from keystone.common import sql
from keystone import exception
class PolicyAssociation(sql.ModelBase, sql.ModelDictMixin):
__tablename__ = 'policy_association'
attributes = ['policy_id', 'endpoint_id', 'region_id', 'service_id']
# The id column is never exposed outside this module. It only exists to
# provide a primary key, given that the real columns we would like to use
# (endpoint_id, service_id, region_id) can be null
id = sql.Column(sql.String(64), primary_key=True)
policy_id = sql.Column(sql.String(64), nullable=False)
endpoint_id = sql.Column(sql.String(64), nullable=True)
service_id = sql.Column(sql.String(64), nullable=True)
region_id = sql.Column(sql.String(64), nullable=True)
__table_args__ = (sql.UniqueConstraint('endpoint_id', 'service_id',
'region_id'),)
def to_dict(self):
"""Returns the model's attributes as a dictionary.
We override the standard method in order to hide the id column,
since this only exists to provide the table with a primary key.
"""
d = {}
for attr in self.__class__.attributes:
d[attr] = getattr(self, attr)
return d
class EndpointPolicy(object):
def create_policy_association(self, policy_id, endpoint_id=None,
service_id=None, region_id=None):
with sql.session_for_write() as session:
try:
# See if there is already a row for this association, and if
# so, update it with the new policy_id
query = session.query(PolicyAssociation)
query = query.filter_by(endpoint_id=endpoint_id)
query = query.filter_by(service_id=service_id)
query = query.filter_by(region_id=region_id)
association = query.one()
association.policy_id = policy_id
except sql.NotFound:
association = PolicyAssociation(id=uuid.uuid4().hex,
policy_id=policy_id,
endpoint_id=endpoint_id,
service_id=service_id,
region_id=region_id)
session.add(association)
def check_policy_association(self, policy_id, endpoint_id=None,
service_id=None, region_id=None):
sql_constraints = sqlalchemy.and_(
PolicyAssociation.policy_id == policy_id,
PolicyAssociation.endpoint_id == endpoint_id,
PolicyAssociation.service_id == service_id,
PolicyAssociation.region_id == region_id)
# NOTE(henry-nash): Getting a single value to save object
# management overhead.
with sql.session_for_read() as session:
if session.query(PolicyAssociation.id).filter(
sql_constraints).distinct().count() == 0:
raise exception.PolicyAssociationNotFound()
def delete_policy_association(self, policy_id, endpoint_id=None,
service_id=None, region_id=None):
with sql.session_for_write() as session:
query = session.query(PolicyAssociation)
query = query.filter_by(policy_id=policy_id)
query = query.filter_by(endpoint_id=endpoint_id)
query = query.filter_by(service_id=service_id)
query = query.filter_by(region_id=region_id)
query.delete()
def get_policy_association(self, endpoint_id=None,
service_id=None, region_id=None):
sql_constraints = sqlalchemy.and_(
PolicyAssociation.endpoint_id == endpoint_id,
PolicyAssociation.service_id == service_id,
PolicyAssociation.region_id == region_id)
try:
with sql.session_for_read() as session:
policy_id = session.query(PolicyAssociation.policy_id).filter(
sql_constraints).distinct().one()
return {'policy_id': policy_id}
except sql.NotFound:
raise exception.PolicyAssociationNotFound()
def list_associations_for_policy(self, policy_id):
with sql.session_for_read() as session:
query = session.query(PolicyAssociation)
query = query.filter_by(policy_id=policy_id)
return [ref.to_dict() for ref in query.all()]
def delete_association_by_endpoint(self, endpoint_id):
with sql.session_for_write() as session:
query = session.query(PolicyAssociation)
query = query.filter_by(endpoint_id=endpoint_id)
query.delete()
def delete_association_by_service(self, service_id):
with sql.session_for_write() as session:
query = session.query(PolicyAssociation)
query = query.filter_by(service_id=service_id)
query.delete()
def delete_association_by_region(self, region_id):
with sql.session_for_write() as session:
query = session.query(PolicyAssociation)
query = query.filter_by(region_id=region_id)
query.delete()
def delete_association_by_policy(self, policy_id):
with sql.session_for_write() as session:
query = session.query(PolicyAssociation)
query = query.filter_by(policy_id=policy_id)
query.delete()
| {
"content_hash": "e1e54868ef74def2a98469859da79278",
"timestamp": "",
"source": "github",
"line_count": 126,
"max_line_length": 78,
"avg_line_length": 43.92063492063492,
"alnum_prop": 0.5898084568124322,
"repo_name": "himanshu-setia/keystone",
"id": "aacbb083216f363dd3a6d351999d5d638d8b5a50",
"size": "6109",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "keystone/endpoint_policy/backends/sql.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "665"
},
{
"name": "Python",
"bytes": "4415061"
}
],
"symlink_target": ""
} |
import unittest
from sqlalchemy.exc import IntegrityError
from agentserver.db.models import (models, User, UserAuthToken,
Agent, AgentAuthToken, AgentDetail)
class TestDb(unittest.TestCase):
@classmethod
def setUpClass(cls):
models.connect('sqlite:///:memory:')
@classmethod
def tearDownClass(cls):
models.session.rollback()
models.session.close()
def test_users_and_user_tokens(self):
# Generate users and tokens
user = User(name='User A',
email='[email protected]',
is_admin=True,
password='randompassworda')
tokens = [UserAuthToken(user=user),
UserAuthToken(user=User(name='User B',
email='[email protected]',
is_admin=False,
password='randompasswordb')),
UserAuthToken(user=User(name='User C',
email='[email protected]',
is_admin=True,
password='randompasswordc'))]
UserAuthToken.save_all(tokens)
with self.assertRaises(IntegrityError):
User(name='User D', email='[email protected]',
is_admin=False, password='randompasswordd').save()
models.session.rollback()
# Test authorize method
self.assertEqual(User.authorize(user.token.uuid), user)
self.assertEqual(User.authorize('non-existent token'), None)
self.assertEqual(User.count(), 3)
self.assertEqual(UserAuthToken.count(), 3)
self.assertEqual(User.get(id=user.id), user)
self.assertEqual(User.get(email=user.email), user)
self.assertEqual(User.get(id=user.id), User.get(email=user.email))
self.assertEqual(User.get(id=user.id, email=user.email), user)
self.assertEqual(User.get(id=1000), None)
self.assertEqual(User.get(), None)
# Test delete method
self.assertTrue(User.get(email=user.email).delete())
self.assertRaises(AttributeError, lambda: User.get(
'[email protected]').delete())
self.assertEqual(User.count(), 2)
self.assertEqual(UserAuthToken.count(), 2)
def test_agents_and_agent_tokens(self):
# Generate agents and tokens
agents_before = Agent.count()
agent_tokens_before = AgentAuthToken.count()
agent_details_before = AgentDetail.count()
agent = Agent(name='Agent 0')
AgentAuthToken.save_all([
AgentAuthToken(agent=agent),
AgentAuthToken(agent=Agent(name='Agent 1')),
AgentAuthToken(agent=Agent(name='Agent 2'))])
AgentDetail(agent=agent, dist_name='Debian', dist_version='7.0',
hostname='host', num_cores=8, memory=160000,
processor='x86_64').save()
self.assertEqual(Agent.count(),
agents_before + 3)
self.assertEqual(AgentAuthToken.count(),
agent_tokens_before + 3)
self.assertEqual(AgentDetail.count(),
agent_details_before + 1)
# Test authorize method
self.assertEqual(Agent.authorize(agent.token.uuid), agent)
self.assertEqual(Agent.authorize('non-existent token'), None)
# Test delete method
self.assertTrue(Agent.get(name=agent.name).delete())
self.assertRaises(AttributeError, lambda: Agent.get(
name='non-existent agent').delete())
self.assertEqual(Agent.get(), None)
self.assertEqual(Agent.count(),
agents_before + 2)
self.assertEqual(AgentAuthToken.count(),
agent_tokens_before + 2)
self.assertEqual(AgentDetail.count(),
agent_details_before)
def test_agent_detail(self):
agent = Agent(name='Agent')
models.session.add(agent)
models.session.commit()
self.assertEqual(AgentDetail.count(), 0)
args = {'dist_name': 'Ubuntu', 'dist_version': '15.10',
'hostname': 'client', 'num_cores': 3,
'memory': 1040834560, 'processor': 'x86_64'}
created = AgentDetail.update_or_create(agent.id, **args)
self.assertTrue(created)
self.assertEqual(AgentDetail.count(), 1)
args = {'dist_name': 'Debian', 'dist_version': '7.0',
'hostname': 'client2', 'num_cores': 6,
'memory': 8888888, 'processor': 'amd64'}
created = AgentDetail.update_or_create(agent.id, **args)
self.assertFalse(created)
self.assertEqual(AgentDetail.count(), 1)
self.assertEqual(
agent.detail.id, AgentDetail.detail_for_agent_id(agent.id).id)
| {
"content_hash": "dbfc20e280923d99e036800e75142cd1",
"timestamp": "",
"source": "github",
"line_count": 124,
"max_line_length": 74,
"avg_line_length": 39.70967741935484,
"alnum_prop": 0.5696588139723802,
"repo_name": "silverfernsys/agentserver",
"id": "39d9bbb475fe63e498ae7067710bcb2e246d7d6b",
"size": "4947",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_db.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "155561"
},
{
"name": "Shell",
"bytes": "84"
}
],
"symlink_target": ""
} |
from .essential import Essential
class Action(Essential):
def __init__(self, app, id_):
super().__init__(app, id_)
self.running = True
async def execute(self):
self.logger.info('action_run')
try:
result = await self.run()
except:
self.logger.error('action_crash', exc_info=True)
raise
else:
self.logger.info('action_end')
finally:
await self.cleanup()
return result
async def run(self):
pass
def stop(self):
self.logger.info('action_stop')
self.running = False
async def cleanup(self):
self.logger.info('action_cleanup')
| {
"content_hash": "6af28e8a5b3afc6fd8bc8f2cc38f9a98",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 60,
"avg_line_length": 22.70967741935484,
"alnum_prop": 0.5426136363636364,
"repo_name": "insolite/alarme",
"id": "edbb4ede3d3242becc36e37b9568068ba51cf873",
"size": "704",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "alarme/core/action.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "702"
},
{
"name": "HTML",
"bytes": "8679"
},
{
"name": "Lua",
"bytes": "2394"
},
{
"name": "Python",
"bytes": "76113"
},
{
"name": "Shell",
"bytes": "62"
}
],
"symlink_target": ""
} |
from __future__ import print_function
from .charset import MBLENGTH
from ._compat import PY2, range_type
from .constants import FIELD_TYPE, SERVER_STATUS
from . import err
from .util import byte2int
import struct
import sys
DEBUG = False
NULL_COLUMN = 251
UNSIGNED_CHAR_COLUMN = 251
UNSIGNED_SHORT_COLUMN = 252
UNSIGNED_INT24_COLUMN = 253
UNSIGNED_INT64_COLUMN = 254
def dump_packet(data): # pragma: no cover
def printable(data):
if 32 <= byte2int(data) < 127:
if isinstance(data, int):
return chr(data)
return data
return '.'
try:
print("packet length:", len(data))
for i in range(1, 7):
f = sys._getframe(i)
print("call[%d]: %s (line %d)" % (i, f.f_code.co_name, f.f_lineno))
print("-" * 66)
except ValueError:
pass
dump_data = [data[i:i+16] for i in range_type(0, min(len(data), 256), 16)]
for d in dump_data:
print(' '.join("{:02X}".format(byte2int(x)) for x in d) +
' ' * (16 - len(d)) + ' ' * 2 +
''.join(printable(x) for x in d))
print("-" * 66)
print()
class MysqlPacket(object):
"""Representation of a MySQL response packet.
Provides an interface for reading/parsing the packet results.
"""
__slots__ = ('_position', '_data')
def __init__(self, data, encoding):
self._position = 0
self._data = data
def get_all_data(self):
return self._data
def read(self, size):
"""Read the first 'size' bytes in packet and advance cursor past them."""
result = self._data[self._position:(self._position+size)]
if len(result) != size:
error = ('Result length not requested length:\n'
'Expected=%s. Actual=%s. Position: %s. Data Length: %s'
% (size, len(result), self._position, len(self._data)))
if DEBUG:
print(error)
self.dump()
raise AssertionError(error)
self._position += size
return result
def read_all(self):
"""Read all remaining data in the packet.
(Subsequent read() will return errors.)
"""
result = self._data[self._position:]
self._position = None # ensure no subsequent read()
return result
def advance(self, length):
"""Advance the cursor in data buffer 'length' bytes."""
new_position = self._position + length
if new_position < 0 or new_position > len(self._data):
raise Exception('Invalid advance amount (%s) for cursor. '
'Position=%s' % (length, new_position))
self._position = new_position
def rewind(self, position=0):
"""Set the position of the data buffer cursor to 'position'."""
if position < 0 or position > len(self._data):
raise Exception("Invalid position to rewind cursor to: %s." % position)
self._position = position
def get_bytes(self, position, length=1):
"""Get 'length' bytes starting at 'position'.
Position is start of payload (first four packet header bytes are not
included) starting at index '0'.
No error checking is done. If requesting outside end of buffer
an empty string (or string shorter than 'length') may be returned!
"""
return self._data[position:(position+length)]
if PY2:
def read_uint8(self):
result = ord(self._data[self._position])
self._position += 1
return result
else:
def read_uint8(self):
result = self._data[self._position]
self._position += 1
return result
def read_uint16(self):
result = struct.unpack_from('<H', self._data, self._position)[0]
self._position += 2
return result
def read_uint24(self):
low, high = struct.unpack_from('<HB', self._data, self._position)
self._position += 3
return low + (high << 16)
def read_uint32(self):
result = struct.unpack_from('<I', self._data, self._position)[0]
self._position += 4
return result
def read_uint64(self):
result = struct.unpack_from('<Q', self._data, self._position)[0]
self._position += 8
return result
def read_string(self):
end_pos = self._data.find(b'\0', self._position)
if end_pos < 0:
return None
result = self._data[self._position:end_pos]
self._position = end_pos + 1
return result
def read_length_encoded_integer(self):
"""Read a 'Length Coded Binary' number from the data buffer.
Length coded numbers can be anywhere from 1 to 9 bytes depending
on the value of the first byte.
"""
c = self.read_uint8()
if c == NULL_COLUMN:
return None
if c < UNSIGNED_CHAR_COLUMN:
return c
elif c == UNSIGNED_SHORT_COLUMN:
return self.read_uint16()
elif c == UNSIGNED_INT24_COLUMN:
return self.read_uint24()
elif c == UNSIGNED_INT64_COLUMN:
return self.read_uint64()
def read_length_coded_string(self):
"""Read a 'Length Coded String' from the data buffer.
A 'Length Coded String' consists first of a length coded
(unsigned, positive) integer represented in 1-9 bytes followed by
that many bytes of binary data. (For example "cat" would be "3cat".)
"""
length = self.read_length_encoded_integer()
if length is None:
return None
return self.read(length)
def read_struct(self, fmt):
s = struct.Struct(fmt)
result = s.unpack_from(self._data, self._position)
self._position += s.size
return result
def is_ok_packet(self):
# https://dev.mysql.com/doc/internals/en/packet-OK_Packet.html
return self._data[0:1] == b'\0' and len(self._data) >= 7
def is_eof_packet(self):
# http://dev.mysql.com/doc/internals/en/generic-response-packets.html#packet-EOF_Packet
# Caution: \xFE may be LengthEncodedInteger.
# If \xFE is LengthEncodedInteger header, 8bytes followed.
return self._data[0:1] == b'\xfe' and len(self._data) < 9
def is_auth_switch_request(self):
# http://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchRequest
return self._data[0:1] == b'\xfe'
def is_extra_auth_data(self):
# https://dev.mysql.com/doc/internals/en/successful-authentication.html
return self._data[0:1] == b'\x01'
def is_resultset_packet(self):
field_count = ord(self._data[0:1])
return 1 <= field_count <= 250
def is_load_local_packet(self):
return self._data[0:1] == b'\xfb'
def is_error_packet(self):
return self._data[0:1] == b'\xff'
def check_error(self):
if self.is_error_packet():
self.raise_for_error()
def raise_for_error(self):
self.rewind()
self.advance(1) # field_count == error (we already know that)
errno = self.read_uint16()
if DEBUG: print("errno =", errno)
err.raise_mysql_exception(self._data)
def dump(self):
dump_packet(self._data)
class FieldDescriptorPacket(MysqlPacket):
"""A MysqlPacket that represents a specific column's metadata in the result.
Parsing is automatically done and the results are exported via public
attributes on the class such as: db, table_name, name, length, type_code.
"""
def __init__(self, data, encoding):
MysqlPacket.__init__(self, data, encoding)
self._parse_field_descriptor(encoding)
def _parse_field_descriptor(self, encoding):
"""Parse the 'Field Descriptor' (Metadata) packet.
This is compatible with MySQL 4.1+ (not compatible with MySQL 4.0).
"""
self.catalog = self.read_length_coded_string()
self.db = self.read_length_coded_string()
self.table_name = self.read_length_coded_string().decode(encoding)
self.org_table = self.read_length_coded_string().decode(encoding)
self.name = self.read_length_coded_string().decode(encoding)
self.org_name = self.read_length_coded_string().decode(encoding)
self.charsetnr, self.length, self.type_code, self.flags, self.scale = (
self.read_struct('<xHIBHBxx'))
# 'default' is a length coded binary and is still in the buffer?
# not used for normal result sets...
def description(self):
"""Provides a 7-item tuple compatible with the Python PEP249 DB Spec."""
return (
self.name,
self.type_code,
None, # TODO: display_length; should this be self.length?
self.get_column_length(), # 'internal_size'
self.get_column_length(), # 'precision' # TODO: why!?!?
self.scale,
self.flags % 2 == 0)
def get_column_length(self):
if self.type_code == FIELD_TYPE.VAR_STRING:
mblen = MBLENGTH.get(self.charsetnr, 1)
return self.length // mblen
return self.length
def __str__(self):
return ('%s %r.%r.%r, type=%s, flags=%x'
% (self.__class__, self.db, self.table_name, self.name,
self.type_code, self.flags))
class OKPacketWrapper(object):
"""
OK Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_ok_packet():
raise ValueError('Cannot create ' + str(self.__class__.__name__) +
' object from invalid packet type')
self.packet = from_packet
self.packet.advance(1)
self.affected_rows = self.packet.read_length_encoded_integer()
self.insert_id = self.packet.read_length_encoded_integer()
self.server_status, self.warning_count = self.read_struct('<HH')
self.message = self.packet.read_all()
self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
def __getattr__(self, key):
return getattr(self.packet, key)
class EOFPacketWrapper(object):
"""
EOF Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_eof_packet():
raise ValueError(
"Cannot create '{0}' object from invalid packet type".format(
self.__class__))
self.packet = from_packet
self.warning_count, self.server_status = self.packet.read_struct('<xhh')
if DEBUG: print("server_status=", self.server_status)
self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
def __getattr__(self, key):
return getattr(self.packet, key)
class LoadLocalPacketWrapper(object):
"""
Load Local Packet Wrapper. It uses an existing packet object, and wraps
around it, exposing useful variables while still providing access
to the original packet objects variables and methods.
"""
def __init__(self, from_packet):
if not from_packet.is_load_local_packet():
raise ValueError(
"Cannot create '{0}' object from invalid packet type".format(
self.__class__))
self.packet = from_packet
self.filename = self.packet.get_all_data()[1:]
if DEBUG: print("filename=", self.filename)
def __getattr__(self, key):
return getattr(self.packet, key)
| {
"content_hash": "b2b55e048fd84fda4a2acda782c0e96d",
"timestamp": "",
"source": "github",
"line_count": 341,
"max_line_length": 112,
"avg_line_length": 35.002932551319645,
"alnum_prop": 0.5961796246648794,
"repo_name": "sergey-dryabzhinsky/dedupsqlfs",
"id": "e302edab37549339499f458059142d39e769084a",
"size": "12071",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib-dynload/_pymysql/protocol.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5509796"
},
{
"name": "C++",
"bytes": "33360"
},
{
"name": "Cython",
"bytes": "107356"
},
{
"name": "Python",
"bytes": "1042676"
},
{
"name": "Shell",
"bytes": "1480"
}
],
"symlink_target": ""
} |
from __future__ import absolute_import
from zerver.lib.actions import check_send_message
from zerver.lib.response import json_success, json_error
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
import ujson
CIRCLECI_SUBJECT_TEMPLATE = '{repository_name}'
CIRCLECI_MESSAGE_TEMPLATE = '[Build]({build_url}) triggered by {username} on {branch} branch {status}.'
FAILED_STATUS = 'failed'
@api_key_only_webhook_view('CircleCI')
@has_request_variables
def api_circleci_webhook(request, user_profile, client, payload=REQ(argument_type='body'), stream=REQ(default='circleci')):
payload = payload['payload']
subject = get_subject(payload)
body = get_body(payload)
check_send_message(user_profile, client, 'stream', [stream], subject, body)
return json_success()
def get_subject(payload):
return CIRCLECI_SUBJECT_TEMPLATE.format(repository_name=payload['reponame'])
def get_body(payload):
data = {
'build_url': payload['build_url'],
'username': payload['username'],
'branch': payload['branch'],
'status': get_status(payload)
}
return CIRCLECI_MESSAGE_TEMPLATE.format(**data)
def get_status(payload):
status = payload['status']
if payload['previous']['status'] == FAILED_STATUS and status == FAILED_STATUS:
return 'is still failing'
if status == 'success':
return 'succeeded'
return status
| {
"content_hash": "e3aaa5f4026b14abb65268a76f5e84e4",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 123,
"avg_line_length": 33.88095238095238,
"alnum_prop": 0.6985242445537596,
"repo_name": "peiwei/zulip",
"id": "670fd610b1f32d658b4f1640ce1fb0d929c9c40e",
"size": "1461",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "zerver/views/webhooks/circleci.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "164"
},
{
"name": "CSS",
"bytes": "183830"
},
{
"name": "CoffeeScript",
"bytes": "18435"
},
{
"name": "Groovy",
"bytes": "5516"
},
{
"name": "HTML",
"bytes": "397966"
},
{
"name": "JavaScript",
"bytes": "1588795"
},
{
"name": "Nginx",
"bytes": "1228"
},
{
"name": "PHP",
"bytes": "18930"
},
{
"name": "Pascal",
"bytes": "1113"
},
{
"name": "Perl",
"bytes": "383634"
},
{
"name": "Puppet",
"bytes": "96085"
},
{
"name": "Python",
"bytes": "2010761"
},
{
"name": "Ruby",
"bytes": "255867"
},
{
"name": "Shell",
"bytes": "33341"
}
],
"symlink_target": ""
} |
import json
from copy import copy
import requests
MOPIDY_API = '/mopidy/rpc'
_base_dict = {'jsonrpc': '2.0', 'id': 1, 'params': {}}
class Mopidy:
def __init__(self, url):
self.is_playing = False
self.url = url + MOPIDY_API
self.volume = None
self.clear_list(force=True)
self.volume_low = 3
self.volume_high = 100
def find_artist(self, artist):
d = copy(_base_dict)
d['method'] = 'core.library.search'
d['params'] = {'artist': [artist]}
r = requests.post(self.url, data=json.dumps(d))
return r.json()['result'][1]['artists']
def get_playlists(self, filter=None):
d = copy(_base_dict)
d['method'] = 'core.playlists.as_list'
r = requests.post(self.url, data=json.dumps(d))
if filter is None:
return r.json()['result']
else:
return [l for l in r.json()['result'] if filter + ':' in l['uri']]
def find_album(self, album, filter=None):
d = copy(_base_dict)
d['method'] = 'core.library.search'
d['params'] = {'album': [album]}
r = requests.post(self.url, data=json.dumps(d))
lst = [res['albums'] for res in r.json()['result'] if 'albums' in res]
if filter is None:
return lst
else:
return [i for sl in lst for i in sl if filter + ':' in i['uri']]
def find_exact(self, uris='null'):
d = copy(_base_dict)
d['method'] = 'core.library.find_exact'
d['params'] = {'uris': uris}
r = requests.post(self.url, data=json.dumps(d))
return r.json()
def browse(self, uri):
d = copy(_base_dict)
d['method'] = 'core.library.browse'
d['params'] = {'uri': uri}
r = requests.post(self.url, data=json.dumps(d))
if 'result' in r.json():
return r.json()['result']
else:
return None
def clear_list(self, force=False):
if self.is_playing or force:
d = copy(_base_dict)
d['method'] = 'core.tracklist.clear'
r = requests.post(self.url, data=json.dumps(d))
return r
def add_list(self, uri):
d = copy(_base_dict)
d['method'] = 'core.tracklist.add'
if isinstance(uri, str):
d['params'] = {'uri': uri}
elif type(uri) == list:
d['params'] = {'uris': uri}
else:
return None
r = requests.post(self.url, data=json.dumps(d))
return r
def play(self):
self.is_playing = True
self.restore_volume()
d = copy(_base_dict)
d['method'] = 'core.playback.play'
r = requests.post(self.url, data=json.dumps(d))
def next(self):
if self.is_playing:
d = copy(_base_dict)
d['method'] = 'core.playback.next'
r = requests.post(self.url, data=json.dumps(d))
def previous(self):
if self.is_playing:
d = copy(_base_dict)
d['method'] = 'core.playback.previous'
r = requests.post(self.url, data=json.dumps(d))
def stop(self):
if self.is_playing:
d = copy(_base_dict)
d['method'] = 'core.playback.stop'
r = requests.post(self.url, data=json.dumps(d))
self.is_playing = False
def currently_playing(self):
if self.is_playing:
d = copy(_base_dict)
d['method'] = 'core.playback.get_current_track'
r = requests.post(self.url, data=json.dumps(d))
return r.json()['result']
else:
return None
def set_volume(self, percent):
if self.is_playing:
d = copy(_base_dict)
d['method'] = 'core.mixer.set_volume'
d['params'] = {'volume': percent}
r = requests.post(self.url, data=json.dumps(d))
def lower_volume(self):
self.set_volume(self.volume_low)
def restore_volume(self):
self.set_volume(self.volume_high)
def pause(self):
if self.is_playing:
d = copy(_base_dict)
d['method'] = 'core.playback.pause'
r = requests.post(self.url, data=json.dumps(d))
def resume(self):
if self.is_playing:
d = copy(_base_dict)
d['method'] = 'core.playback.resume'
r = requests.post(self.url, data=json.dumps(d))
def get_items(self, uri):
d = copy(_base_dict)
d['method'] = 'core.playlists.get_items'
d['params'] = {'uri': uri}
r = requests.post(self.url, data=json.dumps(d))
if 'result' in r.json():
return [e['uri'] for e in r.json()['result']]
else:
return None
def get_tracks(self, uri):
tracks = self.browse(uri)
ret = [t['uri'] for t in tracks if t['type'] == 'track']
sub_tracks = [t['uri'] for t in tracks if t['type'] != 'track']
for t in sub_tracks:
ret = ret + self.get_tracks(t)
return ret
def get_local_albums(self):
p = self.browse('local:directory?type=album')
return {e['name']: e for e in p if e['type'] == 'album'}
def get_local_artists(self):
p = self.browse('local:directory?type=artist')
return {e['name']: e for e in p if e['type'] == 'artist'}
def get_local_genres(self):
p = self.browse('local:directory?type=genre')
return {e['name']: e for e in p if e['type'] == 'directory'}
def get_local_playlists(self):
p = self.get_playlists('m3u')
return {e['name']: e for e in p}
def get_spotify_playlists(self):
p = self.get_playlists('spotify')
return {e['name'].split('(by')[0].strip().lower(): e for e in p}
def get_gmusic_albums(self):
p = self.browse('gmusic:album')
p = {e['name']: e for e in p if e['type'] == 'directory'}
return {e.split(' - ')[1]: p[e] for e in p}
def get_gmusic_artists(self):
p = self.browse('gmusic:artist')
return {e['name']: e for e in p if e['type'] == 'directory'}
def get_gmusic_radio(self):
p = self.browse('gmusic:radio')
return {e['name']: e for e in p if e['type'] == 'directory'}
| {
"content_hash": "b9238ce75e4b7cdcf54bbfa07bf2e288",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 78,
"avg_line_length": 32.24226804123711,
"alnum_prop": 0.5278976818545164,
"repo_name": "MycroftAI/mycroft-core",
"id": "9efd1dda43d65f5532d84b65642ed4117f0e25ea",
"size": "6835",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "mycroft/audio/services/mopidy/mopidypost.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3791"
},
{
"name": "Python",
"bytes": "1370285"
},
{
"name": "QML",
"bytes": "18805"
},
{
"name": "Shell",
"bytes": "85326"
}
],
"symlink_target": ""
} |
""" This script loads the raw phone_brand_device_model phone set, creates the
features and deals with NaN values."""
import os
from os import path
import pandas as pd
import pickle as pkl
from dotenv import load_dotenv, find_dotenv
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import StandardScaler
dotenv_path = find_dotenv()
load_dotenv(dotenv_path)
RAW_DATA_DIR = os.environ.get("RAW_DATA_DIR")
FEATURES_DATA_DIR = os.environ.get("FEATURES_DIR")
phone = pd.read_csv(path.join(RAW_DATA_DIR, 'phone_brand_device_model.csv'))
specs_table = pd.read_csv(path.join(FEATURES_DATA_DIR, 'specs_table.csv'))
model_mapping = pd.read_csv(path.join(FEATURES_DATA_DIR, 'model_mapping.csv'))
brand_mapping = pd.read_csv(path.join(FEATURES_DATA_DIR, 'brand_mapping.csv'))
gatrain = pd.read_csv(os.path.join(RAW_DATA_DIR,'gender_age_train.csv'),
index_col='device_id')
gatest = pd.read_csv(os.path.join(RAW_DATA_DIR,'gender_age_test.csv'),
index_col = 'device_id')
gatest.head()
gatrain['trainrow'] = np.arange(gatrain.shape[0])
gatest['testrow'] = np.arange(gatest.shape[0])
phone = phone.drop_duplicates('device_id')
# join phone to add the phone price feature
phone = phone.merge(brand_mapping, how='left', left_on='phone_brand',
right_on='phone_brand_chinese')
phone = phone.merge(model_mapping, how='left', left_on='device_model',
right_on='device_model_chinese')
phone = phone.drop(['phone_brand', 'device_model',
'phone_brand_chinese', 'device_model_chinese'], axis=1)
phone = phone.drop_duplicates('device_id')
phone = phone.rename( columns = {'phone_brand_latin': 'phone_brand',
'device_model_latin': 'device_model'})
phone = phone.merge(specs_table[['phone_brand', 'device_model', 'price_eur']],
on=['phone_brand', 'device_model'],
how='left',
suffixes=['', '_R'])
# not all models have the price stored, fill the NaN values
phone['price_eur'] = phone['price_eur'].fillna(-1)
phone = (phone.set_index('device_id').join(gatrain[['trainrow']], how='left')
.join(gatest[['testrow']], how='left'))
# encoding and scaling all features to a distribution with mean = 0
phone['device_model'] = phone['phone_brand'] + phone['device_model']
# ecoding strings to numbers
brandencoder = LabelEncoder().fit(phone['phone_brand'])
modelencoder = LabelEncoder().fit(phone['device_model'])
phone['phone_brand'] = brandencoder.transform(phone['phone_brand'])
phone['device_model'] = modelencoder.transform(phone['device_model'])
# scale data to a distribution with 0 mean and variance 1
brand_scale = StandardScaler().fit(phone['phone_brand'].reshape(-1,1))
model_scale = StandardScaler().fit(phone['device_model'].reshape(-1,1))
price_scale = StandardScaler().fit(phone['price_eur'].reshape(-1,1))
phone['phone_brand'] = brand_scale.transform(phone['phone_brand'].reshape(-1,1))
phone['device_model'] = model_scale.transform(phone['device_model'].reshape(-1,1))
phone['price_eur'] = price_scale.transform(phone['price_eur'].reshape(-1,1))
# device_ids that belongs to gatrain's rows make the training set. Test set is viceversa
phone_train = phone.dropna(subset=['trainrow']).drop(['testrow'],1)
phone_test = phone.dropna(subset=['testrow']).drop(['trainrow'],1)
#save
phone_train.to_csv(path.join(FEATURES_DATA_DIR, 'dense_brand_model_price_train.csv'))
phone_test.to_csv(path.join(FEATURES_DATA_DIR, 'dense_brand_model_price_test.csv'))
| {
"content_hash": "b04041a97f07d208ab70a6941116d7ac",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 88,
"avg_line_length": 46.935064935064936,
"alnum_prop": 0.6768123962368566,
"repo_name": "Kebniss/TalkingData-Mobile-User-Demographics",
"id": "127bf571260653ac1a54bd5bc14af180d1f34889",
"size": "3614",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/features/make_dense_brand_model_price.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "8078505"
},
{
"name": "Makefile",
"bytes": "1204"
},
{
"name": "Python",
"bytes": "80582"
}
],
"symlink_target": ""
} |
import os
import l20n.format.lol.parser as parser
import l20n.format.lol.serializer as serializer
import l20n.format.lol.ast as ast
import codecs
def read_file(path):
with file(path) as f:
return f.read()
def write_file(path, s):
f = codecs.open(path, encoding='utf_8', mode='w+')
f.write(s)
f.close()
def get_source_locale(path):
#from dulwich.client import TCPGitClient
#os.makedirs(path)
#client = TCPGitClient(server_address, server_port)
pass
repo_paths = {
'mozilla': {
'gaia': {
''
}
}
}
project = "mozilla/firefox"
ser = serializer.Serializer()
def bootstrap_lol(lol):
elems = len(lol.body)
i=0
while i<elems:
if isinstance(lol.body[i], ast.Entity):
#entity_str = ser.dump_entity(lol.body[i])
#c = ast.Comment(entity_str)
#c._template = '/*\n %(content)s\n*/'
#s = ast.String('')
#s._template = '"%(content)s"'
s = None
object.__setattr__(lol.body[i], 'value', s)
#lol.body.insert(i, c)
#lol._template_body.insert(i, '\n')
#i+=1
#elems+=1
i+=1
return lol
def bootstrap():
source_locale = 'en-US'
locale = 'pl'
try:
os.makedirs(os.path.join('data', locale))
except OSError:
pass
module = 'homescreen'
mpath = '/Users/zbraniecki/projects/mozilla/gaia/apps/homescreen'
f = read_file(os.path.join(mpath, 'locale', '%s.lol' % source_locale))
p = parser.Parser()
lol = p.parse(f)
s = ser.serialize(lol)
write_file(os.path.join('data', locale, '%s.lol.orig' % module), s)
lol = bootstrap_lol(lol)
s = ser.serialize(lol)
write_file(os.path.join('data', locale, '%s.lol' % module), s)
if __name__ == '__main__':
bootstrap()
| {
"content_hash": "73242eca50a30fdad9e56a26cf62eba9",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 74,
"avg_line_length": 25.315068493150687,
"alnum_prop": 0.5633116883116883,
"repo_name": "stasm/python-l20n",
"id": "a6b9b8b8ff77092031819164afb4d0c66442cbfb",
"size": "1848",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "tools/env/bootstrap-locale.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "90061"
}
],
"symlink_target": ""
} |
class ConfigurationError(Exception):
"""Raised when something is incorrectly configured."""
class ConnectionFailure(Exception):
"""Raised when a connection to the thrift source cannot be made os is lost."""
class ServerSelectionError(Exception):
"""Thrown when no thrift source is available for an operation."""
class OperationFailure(Exception):
"""Raised when a thrift operation fails."""
| {
"content_hash": "9b2acea869bf50dc248e5067b01cf911",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 82,
"avg_line_length": 29.5,
"alnum_prop": 0.7433414043583535,
"repo_name": "Free0xFF/flumelogger-for-python3",
"id": "71f00585dc35703d83ed041ec529687f7ac1c248",
"size": "414",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "flumelogger/errors.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "196029"
}
],
"symlink_target": ""
} |
"""
WSGI config for ExampleDjango project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ExampleDjango.settings")
application = get_wsgi_application()
| {
"content_hash": "b3347bdf7137e273ef65d614fbc73021",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 78,
"avg_line_length": 25.25,
"alnum_prop": 0.7772277227722773,
"repo_name": "GoberInfinity/ExampleDjango",
"id": "ea537b1bba99e010dcab502fc522fe6bd3e05b95",
"size": "404",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ExampleDjango/wsgi.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "187"
},
{
"name": "HTML",
"bytes": "2054"
},
{
"name": "JavaScript",
"bytes": "615"
},
{
"name": "Python",
"bytes": "16642"
}
],
"symlink_target": ""
} |
"""
Copyright (c) 2015 Michael Bright and Bamboo HR LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__version__ = '0.0.0'
class Version(object):
HEADER = 'X-RAPIDCI-VERSION'
@staticmethod
def get_version():
return __version__
| {
"content_hash": "a5f502c746cc185629cedab4cb09c505",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 73,
"avg_line_length": 30.833333333333332,
"alnum_prop": 0.7351351351351352,
"repo_name": "BambooHR/rapid",
"id": "0fa480f0d0354fd7bfa18139de508973a49a7ff3",
"size": "740",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rapid/lib/version.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1235"
},
{
"name": "Mako",
"bytes": "1069"
},
{
"name": "Python",
"bytes": "665011"
},
{
"name": "Shell",
"bytes": "6411"
}
],
"symlink_target": ""
} |
from StringIO import StringIO
from markdown import Markdown
from bleach import linkify, callbacks
markdown_extension_list = [
"extra",
"admonition",
"codehilite",
"nl2br",
"sane_lists",
"toc",
"wikilinks",
]
md = Markdown(extensions=markdown_extension_list)
def markdown_convert(source):
md.reset()
return md.convert(source)
def markdown_convert_file(file):
md.reset()
content = StringIO()
md.convertFile(file, content, "utf-8")
return content.getvalue().decode("utf-8")
def markdown_and_linkify(source):
source = markdown_convert(source)
source = linkify(source, parse_email=True,
callbacks=[callbacks.nofollow,
target_blank_except_footnote])
return source
def target_blank_except_footnote(attrs, new=False):
if "class" in attrs and \
attrs["class"] in ("footnote-backref", "footnote-ref"):
return attrs
else:
return callbacks.target_blank(attrs, new)
| {
"content_hash": "3091d2a2acb64f673ad094302786262f",
"timestamp": "",
"source": "github",
"line_count": 43,
"max_line_length": 62,
"avg_line_length": 23.511627906976745,
"alnum_prop": 0.6439169139465876,
"repo_name": "PoolC/Yuzuki",
"id": "34b66d664c799af5417d514015d60d24e4df7f99",
"size": "1035",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "helper/content.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "10296"
},
{
"name": "HTML",
"bytes": "55201"
},
{
"name": "JavaScript",
"bytes": "23070"
},
{
"name": "Python",
"bytes": "96176"
}
],
"symlink_target": ""
} |
"""A Task logger that presents our DB interface,
but exists entirely in memory and implemented with dicts.
Authors:
* Min RK
TaskRecords are dicts of the form::
{
'msg_id' : str(uuid),
'client_uuid' : str(uuid),
'engine_uuid' : str(uuid) or None,
'header' : dict(header),
'content': dict(content),
'buffers': list(buffers),
'submitted': datetime or None,
'started': datetime or None,
'completed': datetime or None,
'received': datetime or None,
'resubmitted': str(uuid) or None,
'result_header' : dict(header) or None,
'result_content' : dict(content) or None,
'result_buffers' : list(buffers) or None,
}
With this info, many of the special categories of tasks can be defined by query,
e.g.:
* pending: completed is None
* client's outstanding: client_uuid = uuid && completed is None
* MIA: arrived is None (and completed is None)
DictDB supports a subset of mongodb operators::
$lt,$gt,$lte,$gte,$ne,$in,$nin,$all,$mod,$exists
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2010-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
from copy import deepcopy as copy
from datetime import datetime
from IPython.config.configurable import LoggingConfigurable
from IPython.utils.py3compat import iteritems, itervalues
from IPython.utils.traitlets import Dict, Unicode, Integer, Float
filters = {
'$lt' : lambda a,b: a < b,
'$gt' : lambda a,b: b > a,
'$eq' : lambda a,b: a == b,
'$ne' : lambda a,b: a != b,
'$lte': lambda a,b: a <= b,
'$gte': lambda a,b: a >= b,
'$in' : lambda a,b: a in b,
'$nin': lambda a,b: a not in b,
'$all': lambda a,b: all([ a in bb for bb in b ]),
'$mod': lambda a,b: a%b[0] == b[1],
'$exists' : lambda a,b: (b and a is not None) or (a is None and not b)
}
class CompositeFilter(object):
"""Composite filter for matching multiple properties."""
def __init__(self, dikt):
self.tests = []
self.values = []
for key, value in iteritems(dikt):
self.tests.append(filters[key])
self.values.append(value)
def __call__(self, value):
for test,check in zip(self.tests, self.values):
if not test(value, check):
return False
return True
class BaseDB(LoggingConfigurable):
"""Empty Parent class so traitlets work on DB."""
# base configurable traits:
session = Unicode("")
class DictDB(BaseDB):
"""Basic in-memory dict-based object for saving Task Records.
This is the first object to present the DB interface
for logging tasks out of memory.
The interface is based on MongoDB, so adding a MongoDB
backend should be straightforward.
"""
_records = Dict()
_culled_ids = set() # set of ids which have been culled
_buffer_bytes = Integer(0) # running total of the bytes in the DB
size_limit = Integer(1024**3, config=True,
help="""The maximum total size (in bytes) of the buffers stored in the db
When the db exceeds this size, the oldest records will be culled until
the total size is under size_limit * (1-cull_fraction).
default: 1 GB
"""
)
record_limit = Integer(1024, config=True,
help="""The maximum number of records in the db
When the history exceeds this size, the first record_limit * cull_fraction
records will be culled.
"""
)
cull_fraction = Float(0.1, config=True,
help="""The fraction by which the db should culled when one of the limits is exceeded
In general, the db size will spend most of its time with a size in the range:
[limit * (1-cull_fraction), limit]
for each of size_limit and record_limit.
"""
)
def _match_one(self, rec, tests):
"""Check if a specific record matches tests."""
for key,test in iteritems(tests):
if not test(rec.get(key, None)):
return False
return True
def _match(self, check):
"""Find all the matches for a check dict."""
matches = []
tests = {}
for k,v in iteritems(check):
if isinstance(v, dict):
tests[k] = CompositeFilter(v)
else:
tests[k] = lambda o: o==v
for rec in itervalues(self._records):
if self._match_one(rec, tests):
matches.append(copy(rec))
return matches
def _extract_subdict(self, rec, keys):
"""extract subdict of keys"""
d = {}
d['msg_id'] = rec['msg_id']
for key in keys:
d[key] = rec[key]
return copy(d)
# methods for monitoring size / culling history
def _add_bytes(self, rec):
for key in ('buffers', 'result_buffers'):
for buf in rec.get(key) or []:
self._buffer_bytes += len(buf)
self._maybe_cull()
def _drop_bytes(self, rec):
for key in ('buffers', 'result_buffers'):
for buf in rec.get(key) or []:
self._buffer_bytes -= len(buf)
def _cull_oldest(self, n=1):
"""cull the oldest N records"""
for msg_id in self.get_history()[:n]:
self.log.debug("Culling record: %r", msg_id)
self._culled_ids.add(msg_id)
self.drop_record(msg_id)
def _maybe_cull(self):
# cull by count:
if len(self._records) > self.record_limit:
to_cull = int(self.cull_fraction * self.record_limit)
self.log.info("%i records exceeds limit of %i, culling oldest %i",
len(self._records), self.record_limit, to_cull
)
self._cull_oldest(to_cull)
# cull by size:
if self._buffer_bytes > self.size_limit:
limit = self.size_limit * (1 - self.cull_fraction)
before = self._buffer_bytes
before_count = len(self._records)
culled = 0
while self._buffer_bytes > limit:
self._cull_oldest(1)
culled += 1
self.log.info("%i records with total buffer size %i exceeds limit: %i. Culled oldest %i records.",
before_count, before, self.size_limit, culled
)
def _check_dates(self, rec):
for key in ('submitted', 'started', 'completed'):
value = rec.get(key, None)
if value is not None and not isinstance(value, datetime):
raise ValueError("%s must be None or datetime, not %r" % (key, value))
# public API methods:
def add_record(self, msg_id, rec):
"""Add a new Task Record, by msg_id."""
if msg_id in self._records:
raise KeyError("Already have msg_id %r"%(msg_id))
self._check_dates(rec)
self._records[msg_id] = rec
self._add_bytes(rec)
self._maybe_cull()
def get_record(self, msg_id):
"""Get a specific Task Record, by msg_id."""
if msg_id in self._culled_ids:
raise KeyError("Record %r has been culled for size" % msg_id)
if not msg_id in self._records:
raise KeyError("No such msg_id %r"%(msg_id))
return copy(self._records[msg_id])
def update_record(self, msg_id, rec):
"""Update the data in an existing record."""
if msg_id in self._culled_ids:
raise KeyError("Record %r has been culled for size" % msg_id)
self._check_dates(rec)
_rec = self._records[msg_id]
self._drop_bytes(_rec)
_rec.update(rec)
self._add_bytes(_rec)
def drop_matching_records(self, check):
"""Remove a record from the DB."""
matches = self._match(check)
for rec in matches:
self._drop_bytes(rec)
del self._records[rec['msg_id']]
def drop_record(self, msg_id):
"""Remove a record from the DB."""
rec = self._records[msg_id]
self._drop_bytes(rec)
del self._records[msg_id]
def find_records(self, check, keys=None):
"""Find records matching a query dict, optionally extracting subset of keys.
Returns dict keyed by msg_id of matching records.
Parameters
----------
check: dict
mongodb-style query argument
keys: list of strs [optional]
if specified, the subset of keys to extract. msg_id will *always* be
included.
"""
matches = self._match(check)
if keys:
return [ self._extract_subdict(rec, keys) for rec in matches ]
else:
return matches
def get_history(self):
"""get all msg_ids, ordered by time submitted."""
msg_ids = self._records.keys()
# Remove any that do not have a submitted timestamp.
# This is extremely unlikely to happen,
# but it seems to come up in some tests on VMs.
msg_ids = [ m for m in msg_ids if self._records[m]['submitted'] is not None ]
return sorted(msg_ids, key=lambda m: self._records[m]['submitted'])
class NoData(KeyError):
"""Special KeyError to raise when requesting data from NoDB"""
def __str__(self):
return "NoDB backend doesn't store any data. "
"Start the Controller with a DB backend to enable resubmission / result persistence."
class NoDB(BaseDB):
"""A blackhole db backend that actually stores no information.
Provides the full DB interface, but raises KeyErrors on any
method that tries to access the records. This can be used to
minimize the memory footprint of the Hub when its record-keeping
functionality is not required.
"""
def add_record(self, msg_id, record):
pass
def get_record(self, msg_id):
raise NoData()
def update_record(self, msg_id, record):
pass
def drop_matching_records(self, check):
pass
def drop_record(self, msg_id):
pass
def find_records(self, check, keys=None):
raise NoData()
def get_history(self):
raise NoData()
| {
"content_hash": "1c5bd263b78a22797ef29f4a74c71a70",
"timestamp": "",
"source": "github",
"line_count": 319,
"max_line_length": 110,
"avg_line_length": 32.81818181818182,
"alnum_prop": 0.5718788805043462,
"repo_name": "martydill/url_shortener",
"id": "8754c028b129227a0fd62db8c967c2304389c349",
"size": "10469",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "code/venv/lib/python2.7/site-packages/IPython/parallel/controller/dictdb.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "5939"
},
{
"name": "CSS",
"bytes": "8422"
},
{
"name": "HTML",
"bytes": "38632"
},
{
"name": "JavaScript",
"bytes": "1374725"
},
{
"name": "Python",
"bytes": "11917924"
},
{
"name": "Shell",
"bytes": "3769"
},
{
"name": "Smarty",
"bytes": "21425"
}
],
"symlink_target": ""
} |
from tulip import tlp
# The updateVisualization(centerViews = True) function can be called
# during script execution to update the opened views
# The pauseScript() function can be called to pause the script execution.
# To resume the script execution, you will have to click on the "Run script " button.
# The runGraphScript(scriptFile, graph) function can be called to launch
# another edited script on a tlp.Graph object.
# The scriptFile parameter defines the script name to call (in the form [a-zA-Z0-9_]+.py)
# The main(graph) function must be defined
# to run the script on the current graph
def main(graph):
KCore = graph.getDoubleProperty("K-Core")
viewLayout = graph.getLayoutProperty("viewLayout")
TentativeSIC = graph.getStringProperty("TentativeSIC")
acronym = graph.getStringProperty("acronym")
activityType = graph.getStringProperty("activityType")
barPower = graph.getDoubleProperty("barPower")
betwCentrality = graph.getDoubleProperty("betwCentrality")
birthDate = graph.getIntegerProperty("birthDate")
call = graph.getStringProperty("call")
city = graph.getStringProperty("city")
commDate = graph.getDoubleProperty("commDate")
country = graph.getStringProperty("country")
ecContribution = graph.getDoubleProperty("ecContribution")
ecMaxContribution = graph.getDoubleProperty("ecMaxContribution")
endDate = graph.getStringProperty("endDate")
endOfParticipation = graph.getBooleanProperty("endOfParticipation")
fundingScheme = graph.getStringProperty("fundingScheme")
intimacy = graph.getDoubleProperty("intimacy")
manager = graph.getBooleanProperty("manager")
moneyTogether = graph.getDoubleProperty("moneyTogether")
myMoney = graph.getDoubleProperty("myMoney")
name = graph.getStringProperty("name")
numPartners = graph.getDoubleProperty("numPartners")
numProjects = graph.getDoubleProperty("numProjects")
objective = graph.getStringProperty("objective")
orgId = graph.getStringProperty("orgId")
organizationUrl = graph.getStringProperty("organizationUrl")
postCode = graph.getStringProperty("postCode")
programme = graph.getStringProperty("programme")
projectNode = graph.getBooleanProperty("projectNode")
projectUrl = graph.getStringProperty("projectUrl")
projectsTogether = graph.getIntegerProperty("projectsTogether")
rcn = graph.getStringProperty("rcn")
relationshipValue = graph.getDoubleProperty("relationshipValue")
role = graph.getStringProperty("role")
shortName = graph.getStringProperty("shortName")
startDate = graph.getStringProperty("startDate")
status = graph.getStringProperty("status")
street = graph.getStringProperty("street")
topics = graph.getStringProperty("topics")
totMoney = graph.getDoubleProperty("totMoney")
totalCost = graph.getDoubleProperty("totalCost")
viewBorderColor = graph.getColorProperty("viewBorderColor")
viewBorderWidth = graph.getDoubleProperty("viewBorderWidth")
viewColor = graph.getColorProperty("viewColor")
viewFont = graph.getStringProperty("viewFont")
viewFontSize = graph.getIntegerProperty("viewFontSize")
viewIcon = graph.getStringProperty("viewIcon")
viewLabel = graph.getStringProperty("viewLabel")
viewLabelBorderColor = graph.getColorProperty("viewLabelBorderColor")
viewLabelBorderWidth = graph.getDoubleProperty("viewLabelBorderWidth")
viewLabelColor = graph.getColorProperty("viewLabelColor")
viewLabelPosition = graph.getIntegerProperty("viewLabelPosition")
viewMetric = graph.getDoubleProperty("viewMetric")
viewRotation = graph.getDoubleProperty("viewRotation")
viewSelection = graph.getBooleanProperty("viewSelection")
viewShape = graph.getIntegerProperty("viewShape")
viewSize = graph.getSizeProperty("viewSize")
viewSrcAnchorShape = graph.getIntegerProperty("viewSrcAnchorShape")
viewSrcAnchorSize = graph.getSizeProperty("viewSrcAnchorSize")
viewTexture = graph.getStringProperty("viewTexture")
viewTgtAnchorShape = graph.getIntegerProperty("viewTgtAnchorShape")
viewTgtAnchorSize = graph.getSizeProperty("viewTgtAnchorSize")
wBarPower = graph.getDoubleProperty("wBarPower")
weightedBarPower = graph.getDoubleProperty("weightedBarPower")
companies = 0
companiesMoney = 0
universities = 0
universitiesMoney = 0
researchCenters = 0
resCenMoney = 0
publicSector = 0
pubSecMoney = 0
other = 0
otherMoney = 0
missing = 0
missingMoney = 0
for n in graph.getNodes():
if activityType[n] == 'PRC':
companies += 1
companiesMoney += myMoney[n]
elif activityType[n] == 'HES':
universities += 1
universitiesMoney += myMoney[n]
elif activityType[n] == 'OTH':
other += 1
otherMoney += myMoney[n]
elif activityType[n] == 'PUB':
publicSector += 1
pubSecMoney += myMoney[n]
elif activityType[n] == 'REC':
researchCenters += 1
resCenMoney += myMoney[n]
else:
missing += 1
missingMoney += myMoney[n]
total = companies + universities + other + publicSector + researchCenters + missing
totalMoney = companiesMoney + universitiesMoney + otherMoney + pubSecMoney + resCenMoney + missingMoney
print ('PRC: ' + str(companies) + ' (' + str(companies/float(total)) + ')')
print ('EUR ' + str(companiesMoney) + ' (' + str(float(companiesMoney)/totalMoney) + ')')
print ('HES: ' + str(universities) + ' (' + str(universities/float(total)) + ')')
print ('EUR ' + str(universitiesMoney) + ' (' + str(float(universitiesMoney)/totalMoney) + ')')
print ('OTH: ' + str(other) + ' (' + str(other/float(total)) + ')')
print ('EUR ' + str(otherMoney) + ' (' + str(float(otherMoney)/totalMoney) + ')')
print ('PUB: ' + str(publicSector) + ' (' + str(float(publicSector)/total) + ')')
print ('EUR ' + str(pubSecMoney) + ' (' + str(float(pubSecMoney)/totalMoney) + ')')
print ('REC: ' + str(researchCenters) + ' (' + str(float(researchCenters)/total) + ')')
print ('EUR ' + str(resCenMoney) + ' (' + str(float(resCenMoney)/totalMoney) + ')')
print ('missing: ' + str(missing) + ' (' + str(float(missing)/total) + ')')
print ('EUR ' + str(missingMoney) + ' (' + str(float(missingMoney)/totalMoney) + ')')
| {
"content_hash": "e039ab73f6861bcca1c3102069746449",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 105,
"avg_line_length": 46.26315789473684,
"alnum_prop": 0.7305379489679831,
"repo_name": "spaghetti-open-data/ODFest2017-horizon2020-network",
"id": "d62af449e1125fe1ed5bd255f7c8581458912ee7",
"size": "6670",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "H2020_Code_2017/byActivityType.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "104248"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.