repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
schlueter/ansible
|
lib/ansible/modules/monitoring/airbrake_deployment.py
|
56
|
3599
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2013 Bruce Pennypacker <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: airbrake_deployment
version_added: "1.2"
author: "Bruce Pennypacker (@bpennypacker)"
short_description: Notify airbrake about app deployments
description:
- Notify airbrake about app deployments (see http://help.airbrake.io/kb/api-2/deploy-tracking)
options:
token:
description:
- API token.
required: true
environment:
description:
- The airbrake environment name, typically 'production', 'staging', etc.
required: true
user:
description:
- The username of the person doing the deployment
required: false
repo:
description:
- URL of the project repository
required: false
revision:
description:
- A hash, number, tag, or other identifier showing what revision was deployed
required: false
url:
description:
- Optional URL to submit the notification to. Use to send notifications to Airbrake-compliant tools like Errbit.
required: false
default: "https://airbrake.io/deploys.txt"
version_added: "1.5"
validate_certs:
description:
- If C(no), SSL certificates for the target url will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
requirements: []
'''
EXAMPLES = '''
- airbrake_deployment:
token: AAAAAA
environment: staging
user: ansible
revision: '4.2'
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import fetch_url
from ansible.module_utils.six.moves.urllib.parse import urlencode
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
token=dict(required=True, no_log=True),
environment=dict(required=True),
user=dict(required=False),
repo=dict(required=False),
revision=dict(required=False),
url=dict(required=False, default='https://api.airbrake.io/deploys.txt'),
validate_certs=dict(default='yes', type='bool'),
),
supports_check_mode=True
)
# build list of params
params = {}
if module.params["environment"]:
params["deploy[rails_env]"] = module.params["environment"]
if module.params["user"]:
params["deploy[local_username]"] = module.params["user"]
if module.params["repo"]:
params["deploy[scm_repository]"] = module.params["repo"]
if module.params["revision"]:
params["deploy[scm_revision]"] = module.params["revision"]
params["api_key"] = module.params["token"]
url = module.params.get('url')
# If we're in check mode, just exit pretending like we succeeded
if module.check_mode:
module.exit_json(changed=True)
# Send the data to airbrake
data = urlencode(params)
response, info = fetch_url(module, url, data=data)
if info['status'] == 200:
module.exit_json(changed=True)
else:
module.fail_json(msg="HTTP result code: %d connecting to %s" % (info['status'], url))
if __name__ == '__main__':
main()
|
gpl-3.0
|
bobwalker99/Pydev
|
plugins/org.python.pydev.jython/Lib/_weakrefset.py
|
33
|
6305
|
# Access WeakSet through the weakref module.
# This code is separated-out because it is needed
# by abc.py to load everything else at startup.
from _weakref import ref
__all__ = ['WeakSet']
class _IterationGuard(object):
# This context manager registers itself in the current iterators of the
# weak container, such as to delay all removals until the context manager
# exits.
# This technique should be relatively thread-safe (since sets are).
def __init__(self, weakcontainer):
# Don't create cycles
self.weakcontainer = ref(weakcontainer)
def __enter__(self):
w = self.weakcontainer()
if w is not None:
w._iterating.add(self)
return self
def __exit__(self, e, t, b):
w = self.weakcontainer()
if w is not None:
s = w._iterating
s.remove(self)
if not s:
w._commit_removals()
class WeakSet(object):
def __init__(self, data=None):
self.data = set()
def _remove(item, selfref=ref(self)):
self = selfref()
if self is not None:
if self._iterating:
self._pending_removals.append(item)
else:
self.data.discard(item)
self._remove = _remove
# A list of keys to be removed
self._pending_removals = []
self._iterating = set()
if data is not None:
self.update(data)
def _commit_removals(self):
l = self._pending_removals
discard = self.data.discard
while l:
discard(l.pop())
def __iter__(self):
with _IterationGuard(self):
for itemref in self.data:
item = itemref()
if item is not None:
yield item
def __len__(self):
return sum(x() is not None for x in self.data)
def __contains__(self, item):
return ref(item) in self.data
def __reduce__(self):
return (self.__class__, (list(self),),
getattr(self, '__dict__', None))
__hash__ = None
def add(self, item):
if self._pending_removals:
self._commit_removals()
self.data.add(ref(item, self._remove))
def clear(self):
if self._pending_removals:
self._commit_removals()
self.data.clear()
def copy(self):
return self.__class__(self)
def pop(self):
if self._pending_removals:
self._commit_removals()
while True:
try:
itemref = self.data.pop()
except KeyError:
raise KeyError('pop from empty WeakSet')
item = itemref()
if item is not None:
return item
def remove(self, item):
if self._pending_removals:
self._commit_removals()
self.data.remove(ref(item))
def discard(self, item):
if self._pending_removals:
self._commit_removals()
self.data.discard(ref(item))
def update(self, other):
if self._pending_removals:
self._commit_removals()
if isinstance(other, self.__class__):
self.data.update(other.data)
else:
for element in other:
self.add(element)
def __ior__(self, other):
self.update(other)
return self
# Helper functions for simple delegating methods.
def _apply(self, other, method):
if not isinstance(other, self.__class__):
other = self.__class__(other)
newdata = method(other.data)
newset = self.__class__()
newset.data = newdata
return newset
def difference(self, other):
return self._apply(other, self.data.difference)
__sub__ = difference
def difference_update(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
def __isub__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.difference_update(ref(item) for item in other)
return self
def intersection(self, other):
return self._apply(other, self.data.intersection)
__and__ = intersection
def intersection_update(self, other):
if self._pending_removals:
self._commit_removals()
self.data.intersection_update(ref(item) for item in other)
def __iand__(self, other):
if self._pending_removals:
self._commit_removals()
self.data.intersection_update(ref(item) for item in other)
return self
def issubset(self, other):
return self.data.issubset(ref(item) for item in other)
__lt__ = issubset
def __le__(self, other):
return self.data <= set(ref(item) for item in other)
def issuperset(self, other):
return self.data.issuperset(ref(item) for item in other)
__gt__ = issuperset
def __ge__(self, other):
return self.data >= set(ref(item) for item in other)
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self.data == set(ref(item) for item in other)
def symmetric_difference(self, other):
return self._apply(other, self.data.symmetric_difference)
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item) for item in other)
def __ixor__(self, other):
if self._pending_removals:
self._commit_removals()
if self is other:
self.data.clear()
else:
self.data.symmetric_difference_update(ref(item) for item in other)
return self
def union(self, other):
return self._apply(other, self.data.union)
__or__ = union
def isdisjoint(self, other):
return len(self.intersection(other)) == 0
|
epl-1.0
|
sean797/Flexget
|
flexget/plugins/filter/exists_movie.py
|
8
|
7530
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from past.builtins import basestring
import logging
import re
from path import Path
from flexget import plugin
from flexget.config_schema import one_or_more
from flexget.event import event
from flexget.plugin import get_plugin_by_name
from flexget.utils.tools import TimedDict
log = logging.getLogger('exists_movie')
class FilterExistsMovie(object):
"""
Reject existing movies.
Syntax:
exists_movie:
path: /path/to/movies
[type: {dirs|files}]
[allow_different_qualities: {better|yes|no}]
[lookup: {imdb|no}]
"""
schema = {
'anyOf': [
one_or_more({'type': 'string', 'format': 'path'}),
{
'type': 'object',
'properties': {
'path': one_or_more({'type': 'string', 'format': 'path'}),
'allow_different_qualities': {'enum': ['better', True, False], 'default': False},
'type': {'enum': ['files', 'dirs'], 'default': 'dirs'},
'lookup': {'enum': ['imdb', False], 'default': False}
},
'required': ['path'],
'additionalProperties': False
}
]
}
dir_pattern = re.compile('\b(cd.\d|subs?|samples?)\b', re.IGNORECASE)
file_pattern = re.compile('\.(avi|mkv|mp4|mpg|webm)$', re.IGNORECASE)
def __init__(self):
self.cache = TimedDict(cache_time='1 hour')
def prepare_config(self, config):
# if config is not a dict, assign value to 'path' key
if not isinstance(config, dict):
config = {'path': config}
if not config.get('type'):
config['type'] = 'dirs'
# if only a single path is passed turn it into a 1 element list
if isinstance(config['path'], basestring):
config['path'] = [config['path']]
return config
@plugin.priority(-1)
def on_task_filter(self, task, config):
if not task.accepted:
log.debug('nothing accepted, aborting')
return
config = self.prepare_config(config)
imdb_lookup = plugin.get_plugin_by_name('imdb_lookup').instance
incompatible_files = 0
incompatible_entries = 0
count_entries = 0
count_files = 0
# list of imdb ids gathered from paths / cache
qualities = {}
for folder in config['path']:
folder = Path(folder).expanduser()
# see if this path has already been scanned
cached_qualities = self.cache.get(folder, None)
if cached_qualities:
log.verbose('Using cached scan for %s ...' % folder)
qualities.update(cached_qualities)
continue
path_ids = {}
if not folder.isdir():
log.critical('Path %s does not exist' % folder)
continue
log.verbose('Scanning path %s ...' % folder)
# Help debugging by removing a lot of noise
# logging.getLogger('movieparser').setLevel(logging.WARNING)
# logging.getLogger('imdb_lookup').setLevel(logging.WARNING)
# scan through
items = []
if config.get('type') == 'dirs':
for d in folder.walkdirs(errors='ignore'):
if self.dir_pattern.search(d.name):
continue
log.debug('detected dir with name %s, adding to check list' % d.name)
items.append(d.name)
elif config.get('type') == 'files':
for f in folder.walkfiles(errors='ignore'):
if not self.file_pattern.search(f.name):
continue
log.debug('detected file with name %s, adding to check list' % f.name)
items.append(f.name)
if not items:
log.verbose('No items with type %s were found in %s' % (config.get('type'), folder))
continue
for item in items:
count_files += 1
movie = get_plugin_by_name('parsing').instance.parse_movie(item)
if config.get('lookup') == 'imdb':
try:
imdb_id = imdb_lookup.imdb_id_lookup(movie_title=movie.name,
movie_year=movie.year,
raw_title=item,
session=task.session)
if imdb_id in path_ids:
log.trace('duplicate %s' % item)
continue
if imdb_id is not None:
log.trace('adding: %s' % imdb_id)
path_ids[imdb_id] = movie.quality
except plugin.PluginError as e:
log.trace('%s lookup failed (%s)' % (item, e.value))
incompatible_files += 1
else:
path_ids[movie.name] = movie.quality
log.trace('adding: %s' % movie.name)
# store to cache and extend to found list
self.cache[folder] = path_ids
qualities.update(path_ids)
log.debug('-- Start filtering entries ----------------------------------')
# do actual filtering
for entry in task.accepted:
count_entries += 1
log.debug('trying to parse entry %s' % entry['title'])
if config.get('lookup') == 'imdb':
key = 'imdb_id'
if not entry.get('imdb_id', eval_lazy=False):
try:
imdb_lookup.lookup(entry)
except plugin.PluginError as e:
log.trace('entry %s imdb failed (%s)' % (entry['title'], e.value))
incompatible_entries += 1
continue
else:
key = 'movie_name'
if not entry.get('movie_name', eval_lazy=False):
movie = get_plugin_by_name('parsing').instance.parse_movie(entry['title'])
entry['movie_name'] = movie.name
# actual filtering
if entry[key] in qualities:
if config.get('allow_different_qualities') == 'better':
if entry['quality'] > qualities[entry[key]]:
log.trace('better quality')
continue
elif config.get('allow_different_qualities'):
if entry['quality'] != qualities[entry[key]]:
log.trace('wrong quality')
continue
entry.reject('movie exists')
if incompatible_files or incompatible_entries:
log.verbose('There were some incompatible items. %s of %s entries '
'and %s of %s directories could not be verified.' %
(incompatible_entries, count_entries, incompatible_files, count_files))
log.debug('-- Finished filtering entries -------------------------------')
@event('plugin.register')
def register_plugin():
plugin.register(FilterExistsMovie, 'exists_movie', interfaces=['task'], api_ver=2)
|
mit
|
zsjohny/jumpserver
|
apps/terminal/api/storage.py
|
1
|
2385
|
# coding: utf-8
#
from rest_framework import viewsets, generics, status
from rest_framework.response import Response
from django.utils.translation import ugettext_lazy as _
from common.permissions import IsSuperUser
from ..models import CommandStorage, ReplayStorage
from ..serializers import CommandStorageSerializer, ReplayStorageSerializer
__all__ = [
'CommandStorageViewSet', 'CommandStorageTestConnectiveApi',
'ReplayStorageViewSet', 'ReplayStorageTestConnectiveApi'
]
class BaseStorageViewSetMixin:
def destroy(self, request, *args, **kwargs):
instance = self.get_object()
if not instance.can_delete():
data = {'msg': _('Deleting the default storage is not allowed')}
return Response(data=data, status=status.HTTP_400_BAD_REQUEST)
return super().destroy(request, *args, **kwargs)
class CommandStorageViewSet(BaseStorageViewSetMixin, viewsets.ModelViewSet):
filter_fields = ('name', 'type',)
search_fields = filter_fields
queryset = CommandStorage.objects.all()
serializer_class = CommandStorageSerializer
permission_classes = (IsSuperUser,)
class ReplayStorageViewSet(BaseStorageViewSetMixin, viewsets.ModelViewSet):
filter_fields = ('name', 'type',)
search_fields = filter_fields
queryset = ReplayStorage.objects.all()
serializer_class = ReplayStorageSerializer
permission_classes = (IsSuperUser,)
class BaseStorageTestConnectiveMixin:
permission_classes = (IsSuperUser,)
def retrieve(self, request, *args, **kwargs):
instance = self.get_object()
try:
is_valid = instance.is_valid()
except Exception as e:
is_valid = False
msg = _("Test failure: {}".format(str(e)))
else:
if is_valid:
msg = _("Test successful")
else:
msg = _("Test failure: Account invalid")
data = {
'is_valid': is_valid,
'msg': msg
}
return Response(data)
class CommandStorageTestConnectiveApi(BaseStorageTestConnectiveMixin,
generics.RetrieveAPIView):
queryset = CommandStorage.objects.all()
class ReplayStorageTestConnectiveApi(BaseStorageTestConnectiveMixin,
generics.RetrieveAPIView):
queryset = ReplayStorage.objects.all()
|
gpl-2.0
|
damiansoriano/odoo
|
addons/event/__openerp__.py
|
20
|
2407
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Events Organisation',
'version': '0.1',
'category': 'Tools',
'summary': 'Trainings, Conferences, Meetings, Exhibitions, Registrations',
'description': """
Organization and management of Events.
======================================
The event module allows you to efficiently organise events and all related tasks: planification, registration tracking,
attendances, etc.
Key Features
------------
* Manage your Events and Registrations
* Use emails to automatically confirm and send acknowledgements for any event registration
""",
'author': 'OpenERP SA',
'depends': ['base_setup', 'board', 'email_template', 'marketing'],
'data': [
'security/event_security.xml',
'security/ir.model.access.csv',
'wizard/event_confirm_view.xml',
'event_view.xml',
'event_data.xml',
'report/report_event_registration_view.xml',
'res_partner_view.xml',
'res_config_view.xml',
'email_template.xml',
'views/event.xml',
],
'demo': [
'event_demo.xml',
],
'test': [
'test/ui/event_users.yml',
'test/process/event_draft2done.yml'
],
'installable': True,
'auto_install': False,
'images': ['images/1_event_type_list.jpeg','images/2_events.jpeg','images/3_registrations.jpeg','images/events_kanban.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
xq262144/hue
|
desktop/core/ext-py/Mako-0.8.1/test/test_filters.py
|
36
|
9579
|
# -*- coding: utf-8 -*-
from mako.template import Template
import unittest
from test import TemplateTest, eq_, requires_python_2
from test.util import result_lines, flatten_result
from mako.compat import u
class FilterTest(TemplateTest):
def test_basic(self):
t = Template("""
${x | myfilter}
""")
assert flatten_result(t.render(x="this is x", myfilter=lambda t: "MYFILTER->%s<-MYFILTER" % t)) == "MYFILTER->this is x<-MYFILTER"
def test_expr(self):
"""test filters that are themselves expressions"""
t = Template("""
${x | myfilter(y)}
""")
def myfilter(y):
return lambda x: "MYFILTER->%s<-%s" % (x, y)
assert flatten_result(t.render(x="this is x", myfilter=myfilter, y="this is y")) == "MYFILTER->this is x<-this is y"
def test_convert_str(self):
"""test that string conversion happens in expressions before sending to filters"""
t = Template("""
${x | trim}
""")
assert flatten_result(t.render(x=5)) == "5"
def test_quoting(self):
t = Template("""
foo ${bar | h}
""")
eq_(
flatten_result(t.render(bar="<'some bar'>")),
"foo <'some bar'>"
)
def test_entity(self):
t = Template("foo ${bar | entity}")
eq_(
flatten_result(t.render(bar="<'some bar'>")),
"foo <'some bar'>"
)
@requires_python_2
def test_quoting_non_unicode(self):
t = Template("""
foo ${bar | h}
""", disable_unicode=True,
output_encoding=None)
eq_(
flatten_result(t.render(bar="<'привет'>")),
"foo <'привет'>"
)
def test_def(self):
t = Template("""
<%def name="foo()" filter="myfilter">
this is foo
</%def>
${foo()}
""")
eq_(
flatten_result(t.render(x="this is x",
myfilter=lambda t: "MYFILTER->%s<-MYFILTER" % t)),
"MYFILTER-> this is foo <-MYFILTER"
)
def test_import(self):
t = Template("""
<%!
from mako import filters
%>\
trim this string: ${" some string to trim " | filters.trim} continue\
""")
assert t.render().strip()=="trim this string: some string to trim continue"
def test_import_2(self):
t = Template("""
trim this string: ${" some string to trim " | filters.trim} continue\
""", imports=["from mako import filters"])
#print t.code
assert t.render().strip()=="trim this string: some string to trim continue"
def test_encode_filter(self):
t = Template("""# coding: utf-8
some stuff.... ${x}
""", default_filters=['decode.utf8'])
#print t.code
eq_(
t.render_unicode(x=u("voix m’a réveillé")).strip(),
u("some stuff.... voix m’a réveillé")
)
def test_custom_default(self):
t = Template("""
<%!
def myfilter(x):
return "->" + x + "<-"
%>
hi ${'there'}
""", default_filters=['myfilter'])
assert t.render().strip()=="hi ->there<-"
def test_global(self):
t = Template("""
<%page expression_filter="h"/>
${"<tag>this is html</tag>"}
""")
assert t.render().strip() == "<tag>this is html</tag>"
def test_block_via_context(self):
t = Template("""
<%block name="foo" filter="myfilter">
some text
</%block>
""")
def myfilter(text):
return "MYTEXT" + text
eq_(
result_lines(t.render(myfilter=myfilter)),
["MYTEXT", "some text"]
)
def test_def_via_context(self):
t = Template("""
<%def name="foo()" filter="myfilter">
some text
</%def>
${foo()}
""")
def myfilter(text):
return "MYTEXT" + text
eq_(
result_lines(t.render(myfilter=myfilter)),
["MYTEXT", "some text"]
)
def test_text_via_context(self):
t = Template("""
<%text filter="myfilter">
some text
</%text>
""")
def myfilter(text):
return "MYTEXT" + text
eq_(
result_lines(t.render(myfilter=myfilter)),
["MYTEXT", "some text"]
)
def test_nflag(self):
t = Template("""
${"<tag>this is html</tag>" | n}
""", default_filters=['h', 'unicode'])
assert t.render().strip() == "<tag>this is html</tag>"
t = Template("""
<%page expression_filter="h"/>
${"<tag>this is html</tag>" | n}
""")
assert t.render().strip() == "<tag>this is html</tag>"
t = Template("""
<%page expression_filter="h"/>
${"<tag>this is html</tag>" | n, h}
""")
assert t.render().strip() == "<tag>this is html</tag>"
def test_non_expression(self):
t = Template("""
<%!
def a(text):
return "this is a"
def b(text):
return "this is b"
%>
${foo()}
<%def name="foo()" buffered="True">
this is text
</%def>
""", buffer_filters=['a'])
assert t.render().strip() == "this is a"
t = Template("""
<%!
def a(text):
return "this is a"
def b(text):
return "this is b"
%>
${'hi'}
${foo()}
<%def name="foo()" buffered="True">
this is text
</%def>
""", buffer_filters=['a'], default_filters=['b'])
assert flatten_result(t.render()) == "this is b this is b"
t = Template("""
<%!
class Foo(object):
foo = True
def __str__(self):
return "this is a"
def a(text):
return Foo()
def b(text):
if hasattr(text, 'foo'):
return str(text)
else:
return "this is b"
%>
${'hi'}
${foo()}
<%def name="foo()" buffered="True">
this is text
</%def>
""", buffer_filters=['a'], default_filters=['b'])
assert flatten_result(t.render()) == "this is b this is a"
t = Template("""
<%!
def a(text):
return "this is a"
def b(text):
return "this is b"
%>
${foo()}
${bar()}
<%def name="foo()" filter="b">
this is text
</%def>
<%def name="bar()" filter="b" buffered="True">
this is text
</%def>
""", buffer_filters=['a'])
assert flatten_result(t.render()) == "this is b this is a"
def test_builtins(self):
t = Template("""
${"this is <text>" | h}
""")
assert flatten_result(t.render()) == "this is <text>"
t = Template("""
http://foo.com/arg1=${"hi! this is a string." | u}
""")
assert flatten_result(t.render()) == "http://foo.com/arg1=hi%21+this+is+a+string."
class BufferTest(unittest.TestCase):
def test_buffered_def(self):
t = Template("""
<%def name="foo()" buffered="True">
this is foo
</%def>
${"hi->" + foo() + "<-hi"}
""")
assert flatten_result(t.render()) == "hi-> this is foo <-hi"
def test_unbuffered_def(self):
t = Template("""
<%def name="foo()" buffered="False">
this is foo
</%def>
${"hi->" + foo() + "<-hi"}
""")
assert flatten_result(t.render()) == "this is foo hi-><-hi"
def test_capture(self):
t = Template("""
<%def name="foo()" buffered="False">
this is foo
</%def>
${"hi->" + capture(foo) + "<-hi"}
""")
assert flatten_result(t.render()) == "hi-> this is foo <-hi"
def test_capture_exception(self):
template = Template("""
<%def name="a()">
this is a
<%
raise TypeError("hi")
%>
</%def>
<%
c = capture(a)
%>
a->${c}<-a
""")
try:
template.render()
assert False
except TypeError:
assert True
def test_buffered_exception(self):
template = Template("""
<%def name="a()" buffered="True">
<%
raise TypeError("hi")
%>
</%def>
${a()}
""")
try:
print(template.render())
assert False
except TypeError:
assert True
def test_capture_ccall(self):
t = Template("""
<%def name="foo()">
<%
x = capture(caller.body)
%>
this is foo. body: ${x}
</%def>
<%call expr="foo()">
ccall body
</%call>
""")
#print t.render()
assert flatten_result(t.render()) == "this is foo. body: ccall body"
|
apache-2.0
|
tjsavage/sfcsdatabase
|
sfcs/django/db/utils.py
|
78
|
6129
|
import inspect
import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
DEFAULT_DB_ALIAS = 'default'
# Define some exceptions that mirror the PEP249 interface.
# We will rethrow any backend-specific errors using these
# common wrappers
class DatabaseError(Exception):
pass
class IntegrityError(DatabaseError):
pass
def load_backend(backend_name):
try:
module = import_module('.base', 'django.db.backends.%s' % backend_name)
import warnings
warnings.warn(
"Short names for DATABASE_ENGINE are deprecated; prepend with 'django.db.backends.'",
DeprecationWarning
)
return module
except ImportError, e:
# Look for a fully qualified database backend name
try:
return import_module('.base', backend_name)
except ImportError, e_user:
# The database backend wasn't found. Display a helpful error message
# listing all possible (built-in) database backends.
backend_dir = os.path.join(os.path.dirname(__file__), 'backends')
try:
available_backends = [f for f in os.listdir(backend_dir)
if os.path.isdir(os.path.join(backend_dir, f))
and not f.startswith('.')]
except EnvironmentError:
available_backends = []
available_backends.sort()
if backend_name not in available_backends:
error_msg = ("%r isn't an available database backend. \n" +
"Try using django.db.backends.XXX, where XXX is one of:\n %s\n" +
"Error was: %s") % \
(backend_name, ", ".join(map(repr, available_backends)), e_user)
raise ImproperlyConfigured(error_msg)
else:
raise # If there's some other error, this must be an error in Django itself.
class ConnectionDoesNotExist(Exception):
pass
class ConnectionHandler(object):
def __init__(self, databases):
self.databases = databases
self._connections = {}
def ensure_defaults(self, alias):
"""
Puts the defaults into the settings dictionary for a given connection
where no settings is provided.
"""
try:
conn = self.databases[alias]
except KeyError:
raise ConnectionDoesNotExist("The connection %s doesn't exist" % alias)
conn.setdefault('ENGINE', 'django.db.backends.dummy')
if conn['ENGINE'] == 'django.db.backends.' or not conn['ENGINE']:
conn['ENGINE'] = 'django.db.backends.dummy'
conn.setdefault('OPTIONS', {})
conn.setdefault('TEST_CHARSET', None)
conn.setdefault('TEST_COLLATION', None)
conn.setdefault('TEST_NAME', None)
conn.setdefault('TEST_MIRROR', None)
conn.setdefault('TIME_ZONE', settings.TIME_ZONE)
for setting in ('NAME', 'USER', 'PASSWORD', 'HOST', 'PORT'):
conn.setdefault(setting, '')
def __getitem__(self, alias):
if alias in self._connections:
return self._connections[alias]
self.ensure_defaults(alias)
db = self.databases[alias]
backend = load_backend(db['ENGINE'])
conn = backend.DatabaseWrapper(db, alias)
self._connections[alias] = conn
return conn
def __iter__(self):
return iter(self.databases)
def all(self):
return [self[alias] for alias in self]
class ConnectionRouter(object):
def __init__(self, routers):
self.routers = []
for r in routers:
if isinstance(r, basestring):
try:
module_name, klass_name = r.rsplit('.', 1)
module = import_module(module_name)
except ImportError, e:
raise ImproperlyConfigured('Error importing database router %s: "%s"' % (klass_name, e))
try:
router_class = getattr(module, klass_name)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a database router name "%s"' % (module, klass_name))
else:
router = router_class()
else:
router = r
self.routers.append(router)
def _router_func(action):
def _route_db(self, model, **hints):
chosen_db = None
for router in self.routers:
try:
method = getattr(router, action)
except AttributeError:
# If the router doesn't have a method, skip to the next one.
pass
else:
chosen_db = method(model, **hints)
if chosen_db:
return chosen_db
try:
return hints['instance']._state.db or DEFAULT_DB_ALIAS
except KeyError:
return DEFAULT_DB_ALIAS
return _route_db
db_for_read = _router_func('db_for_read')
db_for_write = _router_func('db_for_write')
def allow_relation(self, obj1, obj2, **hints):
for router in self.routers:
try:
method = router.allow_relation
except AttributeError:
# If the router doesn't have a method, skip to the next one.
pass
else:
allow = method(obj1, obj2, **hints)
if allow is not None:
return allow
return obj1._state.db == obj2._state.db
def allow_syncdb(self, db, model):
for router in self.routers:
try:
method = router.allow_syncdb
except AttributeError:
# If the router doesn't have a method, skip to the next one.
pass
else:
allow = method(db, model)
if allow is not None:
return allow
return True
|
bsd-3-clause
|
squirrelo/qiime
|
tests/test_pick_rep_set.py
|
15
|
20044
|
#!/usr/bin/env python
"""Tests of code for representative set picking"""
__author__ = "Rob Knight"
__copyright__ = "Copyright 2011, The QIIME Project"
# remember to add yourself if you make changes
__credits__ = ["Rob Knight", "Kyle Bittinger", "Greg Caporaso"]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "Daniel McDonald"
__email__ = "[email protected]"
from os import remove, close
from tempfile import mkstemp
from unittest import TestCase, main
from skbio.util import remove_files
from skbio.parse.sequences import parse_fasta
from skbio.alignment import SequenceCollection
from skbio.sequence import DNA
from qiime.pick_rep_set import (RepSetPicker, GenericRepSetPicker, first_id,
first, random_id, longest_id, unique_id_map, label_to_name,
make_most_abundant, parse_fasta, ReferenceRepSetPicker)
class RepSetPickerTests(TestCase):
"""Tests of the abstract RepSetPicker class"""
def test_init(self):
"""Abstract RepSetPicker __init__ should store name, params"""
p = RepSetPicker({})
self.assertEqual(p.Name, 'RepSetPicker')
self.assertEqual(p.Params, {})
def test_call(self):
"""Abstract RepSetPicker __call__ should raise NotImplementedError"""
p = RepSetPicker({})
self.assertRaises(NotImplementedError, p, '/path/to/seqs',
'/path/to/otus')
class SharedSetupTestCase(TestCase):
"""Wrapper for shared setup stuff"""
def setUp(self):
# create the temporary input files
fd, self.tmp_seq_filepath = mkstemp(prefix='GenericRepSetPickerTest_',
suffix='.fasta')
close(fd)
seq_file = open(self.tmp_seq_filepath, 'w')
seq_file.write(dna_seqs)
seq_file.close()
fd, self.tmp_otu_filepath = mkstemp(prefix='GenericRepSetPickerTest_',
suffix='.otu')
close(fd)
otu_file = open(self.tmp_otu_filepath, 'w')
otu_file.write(otus)
otu_file.close()
self.files_to_remove = [self.tmp_seq_filepath, self.tmp_otu_filepath]
self.params = {'Algorithm': 'first', 'ChoiceF': first_id}
def tearDown(self):
remove_files(self.files_to_remove)
class GenericRepSetPickerTests(SharedSetupTestCase):
""" Tests of the generic RepSet picker """
def test_call_default_params(self):
"""GenericRepSetPicker.__call__ returns expected clusters default params"""
# adapted from test_app.test_cd_hit.test_cdhit_clusters_from_seqs
exp = {'0': 'R27DLI_4812',
'1': 'U1PLI_7889',
'2': 'W3Cecum_4858',
'3': 'R27DLI_3243',
}
app = GenericRepSetPicker(params={'Algorithm': 'first',
'ChoiceF': first_id})
obs = app(self.tmp_seq_filepath, self.tmp_otu_filepath)
self.assertEqual(obs, exp)
def test_call_wrapped_function(self):
"""GenericRepSetPicker.__call__ returns expected clusters default params"""
# adapted from test_app.test_cd_hit.test_cdhit_clusters_from_seqs
exp = {'0': 'R27DLI_4812',
'1': 'U1PLI_7889',
'2': 'W3Cecum_4858',
'3': 'R27DLI_3243',
}
app = GenericRepSetPicker(params={'Algorithm': 'most_abundant',
'ChoiceF': make_most_abundant, 'ChoiceFRequiresSeqs': True})
obs = app(self.tmp_seq_filepath, self.tmp_otu_filepath)
self.assertEqual(obs, exp)
def test_call_output_to_file(self):
"""GenericRepSetPicker.__call__ output to file functions as expected
"""
fd, tmp_result_filepath = mkstemp(
prefix='GenericRepSetPickerTest.test_call_output_to_file_',
suffix='.txt')
close(fd)
app = GenericRepSetPicker(params=self.params)
obs = app(self.tmp_seq_filepath, self.tmp_otu_filepath,
result_path=tmp_result_filepath)
result_file = open(tmp_result_filepath)
result_file_str = result_file.read()
result_file.close()
# remove the result file before running the test, so in
# case it fails the temp file is still cleaned up
remove(tmp_result_filepath)
# compare data in result file to fake expected file
self.assertEqual(result_file_str, rep_seqs_result_file_exp)
# confirm that nothing is returned when result_path is specified
self.assertEqual(obs, None)
def test_call_output_to_file_sorted(self):
"""GenericRepSetPicker.__call__ output to file sorts when requested
"""
fd, tmp_result_filepath = mkstemp(
prefix='GenericRepSetPickerTest.test_call_output_to_file_',
suffix='.txt')
close(fd)
app = GenericRepSetPicker(params=self.params)
obs = app(self.tmp_seq_filepath, self.tmp_otu_filepath,
result_path=tmp_result_filepath, sort_by='seq_id')
result_file = open(tmp_result_filepath)
result_file_str = result_file.read()
result_file.close()
# remove the result file before running the test, so in
# case it fails the temp file is still cleaned up
remove(tmp_result_filepath)
# compare data in result file to fake expected file
self.assertEqual(result_file_str, rep_seqs_result_file_sorted_exp)
# confirm that nothing is returned when result_path is specified
self.assertEqual(obs, None)
def test_call_log_file(self):
"""GenericRepSetPicker.__call__ writes log when expected
"""
fd, tmp_log_filepath = mkstemp(
prefix='GenericRepSetPickerTest.test_call_output_to_file_l_',
suffix='.txt')
close(fd)
fd, tmp_result_filepath = mkstemp(
prefix='GenericRepSetPickerTest.test_call_output_to_file_r_',
suffix='.txt')
close(fd)
app = GenericRepSetPicker(params=self.params)
obs = app(self.tmp_seq_filepath, self.tmp_otu_filepath,
result_path=tmp_result_filepath, log_path=tmp_log_filepath)
log_file = open(tmp_log_filepath)
log_file_str = log_file.read()
log_file.close()
# remove the temp files before running the test, so in
# case it fails the temp file is still cleaned up
remove(tmp_log_filepath)
remove(tmp_result_filepath)
log_file_exp = ["GenericRepSetPicker parameters:",
'Algorithm:first',
"Application:None",
'ChoiceF:first',
'ChoiceFRequiresSeqs:False',
"Result path: %s" % tmp_result_filepath, ]
# compare data in log file to fake expected log file
for i, j in zip(log_file_str.splitlines(), log_file_exp):
if not i.startswith('ChoiceF:'): # can't test, different each time
self.assertEqual(i, j)
class ReferenceRepSetPickerTests(SharedSetupTestCase):
"""Tests of the ReferenceRepSetPickerclass """
def setUp(self):
# create the temporary input files
fd, self.tmp_seq_filepath = mkstemp(
prefix='ReferenceRepSetPickerTest_',
suffix='.fasta')
close(fd)
seq_file = open(self.tmp_seq_filepath, 'w')
seq_file.write(dna_seqs)
seq_file.close()
fd, self.ref_seq_filepath = mkstemp(
prefix='ReferenceRepSetPickerTest_',
suffix='.fasta')
close(fd)
seq_file = open(self.ref_seq_filepath, 'w')
seq_file.write(reference_seqs)
seq_file.close()
fd, self.tmp_otu_filepath = mkstemp(
prefix='ReferenceRepSetPickerTest_',
suffix='.otu')
close(fd)
otu_file = open(self.tmp_otu_filepath, 'w')
otu_file.write(otus_w_ref)
otu_file.close()
fd, self.result_filepath = mkstemp(
prefix='ReferenceRepSetPickerTest_',
suffix='.fasta')
close(fd)
otu_file = open(self.result_filepath, 'w')
otu_file.write(otus_w_ref)
otu_file.close()
self.files_to_remove = [self.tmp_seq_filepath,
self.tmp_otu_filepath,
self.ref_seq_filepath,
self.result_filepath]
self.params = {'Algorithm': 'first', 'ChoiceF': first_id}
def test_call_default_params(self):
"""ReferenceRepSetPicker.__call__ expected clusters default params"""
exp = {'0': ('R27DLI_4812', 'CTGGGCCGTATCTC'),
'ref1': ('ref1', 'GGGGGGGAAAAAAAAAAAAA'),
'2': ('W3Cecum_4858', 'TTGGGCCGTGTCTCAGT'),
'ref0': ('ref0', 'CCCAAAAAAATTTTTT'),
}
app = ReferenceRepSetPicker(params={'Algorithm': 'first',
'ChoiceF': first_id})
obs = app(self.tmp_seq_filepath,
self.tmp_otu_filepath,
self.ref_seq_filepath)
self.assertEqual(obs, exp)
def test_call_write_to_file(self):
"""ReferenceRepSetPicker.__call__ otu map correctly written to file"""
app = ReferenceRepSetPicker(params={'Algorithm': 'first',
'ChoiceF': first_id})
app(self.tmp_seq_filepath,
self.tmp_otu_filepath,
self.ref_seq_filepath,
result_path=self.result_filepath)
with open(self.result_filepath) as f:
actual = SequenceCollection.from_fasta_records(parse_fasta(f), DNA)
expected = SequenceCollection.from_fasta_records(
parse_fasta(rep_seqs_reference_result_file_exp.split('\n')), DNA)
# we don't care about order in the results
self.assertEqual(set(actual), set(expected))
def test_non_ref_otus(self):
"""ReferenceRepSetPicker.__call__ same result as Generic when no ref otus
"""
exp = {'0': ('R27DLI_4812', 'CTGGGCCGTATCTC'),
'1': ('U1PLI_7889', 'TTGGACCGTG'),
'2': ('W3Cecum_4858', 'TTGGGCCGTGTCTCAGT'),
'3': ('R27DLI_3243', 'CTGGACCGTGTCT')}
fd, tmp_otu_filepath = mkstemp(
prefix='ReferenceRepSetPickerTest_',
suffix='.otu')
close(fd)
otu_file = open(tmp_otu_filepath, 'w')
otu_file.write(otus)
otu_file.close()
self.files_to_remove.append(tmp_otu_filepath)
app = ReferenceRepSetPicker(params={'Algorithm': 'first',
'ChoiceF': first_id})
obs = app(self.tmp_seq_filepath,
tmp_otu_filepath,
self.ref_seq_filepath)
self.assertEqual(obs, exp)
def test_call_invalid_id(self):
"""ReferenceRepSetPicker.__call__ expected clusters default params"""
app = ReferenceRepSetPicker(params={'Algorithm': 'first',
'ChoiceF': first_id})
fd, tmp_otu_filepath = mkstemp(
prefix='ReferenceRepSetPickerTest_',
suffix='.otu')
close(fd)
otu_file = open(tmp_otu_filepath, 'w')
# replace a valid sequence identifier with an invalid
# sequence identifier (i.e., one that we don't have a sequence for)
otu_file.write(otus_w_ref.replace('R27DLI_4812', 'bad_seq_identifier'))
otu_file.close()
self.files_to_remove.append(tmp_otu_filepath)
# returning in dict
self.assertRaises(KeyError,
app,
self.tmp_seq_filepath,
tmp_otu_filepath,
self.ref_seq_filepath)
# writing to file
self.assertRaises(KeyError,
app,
self.tmp_seq_filepath,
tmp_otu_filepath,
self.ref_seq_filepath,
result_path=self.result_filepath)
def test_call_ref_only(self):
"""ReferenceRepSetPicker.__call__ functions with no non-refseqs"""
fd, tmp_otu_filepath = mkstemp(
prefix='ReferenceRepSetPickerTest_',
suffix='.otu')
close(fd)
otu_file = open(tmp_otu_filepath, 'w')
otu_file.write(otus_all_ref)
otu_file.close()
self.files_to_remove.append(tmp_otu_filepath)
exp = {'ref1': ('ref1', 'GGGGGGGAAAAAAAAAAAAA'),
'ref0': ('ref0', 'CCCAAAAAAATTTTTT')}
# passing only reference (not input seqs)
app = ReferenceRepSetPicker(params={'Algorithm': 'first',
'ChoiceF': first_id})
obs = app(None,
tmp_otu_filepath,
self.ref_seq_filepath)
self.assertEqual(obs, exp)
# passing reference and input seqs
app = ReferenceRepSetPicker(params={'Algorithm': 'first',
'ChoiceF': first_id})
obs = app(self.tmp_seq_filepath,
tmp_otu_filepath,
self.ref_seq_filepath)
self.assertEqual(obs, exp)
def test_call_alt_non_ref_picker(self):
"""ReferenceRepSetPicker.__call__ handles alt non-ref picking method"""
exp = {'0': ('U1PLI_9526', 'CTGGGCCGTATCTCAGTCCCAATGTGGCCGGTCG'
'GTCTCTCAACCCGGCTACCCATCGCGGGCTAGGTGGGCCGTT'
'ACCCCGCCTACTACCTAATGGGCCGCGACCCCATCCCTTGCCGTCTGGGC'
'TTTCCCGGGCCCCCCAGGAGGGGGGCGAGGAGTATCCGGTATTAGCCTCGGTT'
'TCCCAAGGTTGTCCCGGAGCAAGGGGCAGGTTGGTCACGTGTTACTCACCCGT'
'TCGCCACTTCATGTCCGCCCGAGGGCGGTTTCATCG'),
'ref1': ('ref1', 'GGGGGGGAAAAAAAAAAAAA'),
'2': ('W3Cecum_4858', 'TTGGGCCGTGTCTCAGT'),
'ref0': ('ref0', 'CCCAAAAAAATTTTTT'),
}
app = ReferenceRepSetPicker(params={'Algorithm': 'longest',
'ChoiceF': longest_id})
obs = app(self.tmp_seq_filepath,
self.tmp_otu_filepath,
self.ref_seq_filepath)
self.assertEqual(obs, exp)
class TopLevelTests(SharedSetupTestCase):
"""Tests of top-level functions"""
def test_first(self):
"""first should always return first item"""
vals = [3, 4, 2]
self.assertEqual(first(vals), 3)
vals.reverse()
self.assertEqual(first(vals), 2)
def test_first_id(self):
"""first_id should return first id from list"""
ids = \
"R27DLI_4812 R27DLI_600 R27DLI_727 U1PLI_403 U1PLI_8969".split(
)
self.assertEqual(first_id(ids, {}), 'R27DLI_4812')
def test_random_id(self):
"""random_id should return random id from list"""
ids = \
"R27DLI_4812 R27DLI_600 R27DLI_727 U1PLI_403 U1PLI_8969".split(
)
assert random_id(ids, {}) in ids
# just test we got something from the list, don't add stochastic test
def test_longest_id(self):
"""longest_id should return id associated with longest seq"""
ids = \
"R27DLI_4812 R27DLI_600 R27DLI_727 U1PLI_403 U1PLI_8969".split(
)
seqs = dict(parse_fasta(dna_seqs.splitlines(),
label_to_name=label_to_name))
self.assertEqual(longest_id(ids, seqs), 'U1PLI_403')
def test_unique_id_map(self):
"""unique_id_map should return map of seqs:unique representatives"""
seqs = {'a': 'AG', 'b': 'AG', 'c': 'CC', 'd': 'CT'}
obs = unique_id_map(seqs)
exp = {'c': ['c'], 'd': ['d'], 'a': ['a', 'b'], 'b': ['a', 'b']}
# can't predict if a or b
for k in obs:
assert obs[k] in exp[k]
def test_make_most_abundant(self):
"""make_most_abundant should return function with correct behavior"""
ids = \
"R27DLI_4812 R27DLI_600 R27DLI_727 U1PLI_403 U1PLI_8969".split(
)
seqs = dict(parse_fasta(dna_seqs.splitlines(),
label_to_name=label_to_name))
f = make_most_abundant(seqs)
result = f(ids, seqs)
assert result in ['R27DLI_4812', 'R27DLI_727', 'U1PLI_8969']
dna_seqs = """>R27DLI_4812 FMSX0OV01EIYV5 orig_bc=CTTGATGCGTAT new_bc=CTTGATGCGTAT bc_diffs=0
CTGGGCCGTATCTC
>R27DLI_600 FMSX0OV01D110Y orig_bc=CTTGATGCGTAT new_bc=CTTGATGCGTAT bc_diffs=0
CTGGGCCGTATCTCA
>R27DLI_727 FMSX0OV01D5X55 orig_bc=CTTGATGCGTAT new_bc=CTTGATGCGTAT bc_diffs=0
CTGGGCCGTATCTC
>U1PLI_403 FMSX0OV01DVG99 orig_bc=TACAGATGGCTC new_bc=TACAGATGGCTC bc_diffs=0
CTGGGCCGTATCTCAGTCCCAA
>U1PLI_8969 FMSX0OV01ARWY7 orig_bc=TACAGATGGCTC new_bc=TACAGATGGCTC bc_diffs=0
CTGGGCCGTATCTC
>U1PLI_9080 FMSX0OV01C9JUX orig_bc=TACAGATGGCTC new_bc=TACAGATGGCTC bc_diffs=0
CTGGGCCG
>U1PLI_9526 FMSX0OV01EUN7B orig_bc=TACAGATGGCTC new_bc=TACAGATGGCTC bc_diffs=0
CTGGGCCGTATCTCAGTCCCAATGTGGCCGGTCGGTCTCTCAACCCGGCTACCCATCGCGGGCTAGGTGGGCCGTTACCCCGCCTACTACCTAATGGGCCGCGACCCCATCCCTTGCCGTCTGGGCTTTCCCGGGCCCCCCAGGAGGGGGGCGAGGAGTATCCGGTATTAGCCTCGGTTTCCCAAGGTTGTCCCGGAGCAAGGGGCAGGTTGGTCACGTGTTACTCACCCGTTCGCCACTTCATGTCCGCCCGAGGGCGGTTTCATCG
>W3Cecum_6642 FMSX0OV01CW7FI orig_bc=GATACGTCCTGA new_bc=GATACGTCCTGA bc_diffs=0
CTGGGCCGTATCTCAGT
>W3Cecum_8992 FMSX0OV01C3YXK orig_bc=GATACGTCCTGA new_bc=GATACGTCCTGA bc_diffs=0
CTGGGCCGTGTCTC
>U1PLI_7889 FMSX0OV01C6HRL orig_bc=TACAGATGGCTC new_bc=TACAGATGGCTC bc_diffs=0
TTGGACCGTG
>W3Cecum_4858 FMSX0OV01BX4KM orig_bc=GATACGTCCTGA new_bc=GATACGTCCTGA bc_diffs=0
TTGGGCCGTGTCTCAGT
>R27DLI_3243 FMSX0OV01DH41R orig_bc=CTTGATGCGTAT new_bc=CTTGATGCGTAT bc_diffs=0
CTGGACCGTGTCT
>R27DLI_4562 FMSX0OV01EJKLT orig_bc=CTTGATGCGTAT new_bc=CTTGATGCGTAT bc_diffs=0
CTGGACCGTGTCT
>R27DLI_6828 FMSX0OV01BCWTL orig_bc=CTTGATGCGTAT new_bc=CTTGATGCGTAT bc_diffs=0
CTGGACCGTGTCT
>R27DLI_9097 FMSX0OV01APUV6 orig_bc=CTTGATGCGTAT new_bc=CTTGATGCGTAT bc_diffs=0
CTGGACCGTGTCT
>U1PLI_2780 FMSX0OV01E2K1S orig_bc=TACAGATGGCTC new_bc=TACAGATGGCTC bc_diffs=0
CTGGACCGTGTCTC
>U1PLI_67 FMSX0OV01DO1NS orig_bc=TACAGATGGCTC new_bc=TACAGATGGCTC bc_diffs=0
CTGGACCGTGT
>U9PSI_10475 FMSX0OV01BB4Q3 orig_bc=GATAGCTGTCTT new_bc=GATAGCTGTCTT bc_diffs=0
CTGGACCGTGTCTC
>U9PSI_4341 FMSX0OV01B8SXV orig_bc=GATAGCTGTCTT new_bc=GATAGCTGTCTT bc_diffs=0
CTGGACCGTGTCT
>W3Cecum_5191 FMSX0OV01BMU6R orig_bc=GATACGTCCTGA new_bc=GATACGTCCTGA bc_diffs=0
CTGGACCGTGTCT
"""
otus = """0 R27DLI_4812 R27DLI_600 R27DLI_727 U1PLI_403 U1PLI_8969 U1PLI_9080 U1PLI_9526 W3Cecum_6642 W3Cecum_8992
1 U1PLI_7889
2 W3Cecum_4858
3 R27DLI_3243 R27DLI_4562 R27DLI_6828 R27DLI_9097 U1PLI_2780 U1PLI_67 U9PSI_10475 U9PSI_4341 W3Cecum_5191
"""
rep_seqs_result_file_exp = """>0 R27DLI_4812
CTGGGCCGTATCTC
>1 U1PLI_7889
TTGGACCGTG
>2 W3Cecum_4858
TTGGGCCGTGTCTCAGT
>3 R27DLI_3243
CTGGACCGTGTCT
"""
rep_seqs_result_file_sorted_exp = """>3 R27DLI_3243
CTGGACCGTGTCT
>0 R27DLI_4812
CTGGGCCGTATCTC
>2 W3Cecum_4858
TTGGGCCGTGTCTCAGT
>1 U1PLI_7889
TTGGACCGTG
"""
otus_w_ref = """0 R27DLI_4812 R27DLI_600 R27DLI_727 U1PLI_403 U1PLI_8969 U1PLI_9080 U1PLI_9526 W3Cecum_6642 W3Cecum_8992
ref1 U1PLI_7889
2 W3Cecum_4858
ref0 R27DLI_3243 R27DLI_4562 R27DLI_6828 R27DLI_9097 U1PLI_2780 U1PLI_67 U9PSI_10475 U9PSI_4341 W3Cecum_5191
"""
otus_all_ref = """ref1 U1PLI_7889
ref0 R27DLI_3243 R27DLI_4562 R27DLI_6828 R27DLI_9097 U1PLI_2780 U1PLI_67 U9PSI_10475 U9PSI_4341 W3Cecum_5191
"""
reference_seqs = """>ref0
CCCAAAAAAATTTTTT
>ref1 some comment
GGGGGGGAAAAAAAAAAAAA
>ref2
CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCAAAA
"""
rep_seqs_reference_result_file_exp = """>0 R27DLI_4812
CTGGGCCGTATCTC
>ref1 ref1
GGGGGGGAAAAAAAAAAAAA
>2 W3Cecum_4858
TTGGGCCGTGTCTCAGT
>ref0 ref0
CCCAAAAAAATTTTTT
"""
# run unit tests if run from command-line
if __name__ == '__main__':
main()
|
gpl-2.0
|
cnsoft/kbengine-cocos2dx
|
kbe/src/lib/python/Lib/test/test_metaclass.py
|
59
|
6062
|
doctests = """
Basic class construction.
>>> class C:
... def meth(self): print("Hello")
...
>>> C.__class__ is type
True
>>> a = C()
>>> a.__class__ is C
True
>>> a.meth()
Hello
>>>
Use *args notation for the bases.
>>> class A: pass
>>> class B: pass
>>> bases = (A, B)
>>> class C(*bases): pass
>>> C.__bases__ == bases
True
>>>
Use a trivial metaclass.
>>> class M(type):
... pass
...
>>> class C(metaclass=M):
... def meth(self): print("Hello")
...
>>> C.__class__ is M
True
>>> a = C()
>>> a.__class__ is C
True
>>> a.meth()
Hello
>>>
Use **kwds notation for the metaclass keyword.
>>> kwds = {'metaclass': M}
>>> class C(**kwds): pass
...
>>> C.__class__ is M
True
>>> a = C()
>>> a.__class__ is C
True
>>>
Use a metaclass with a __prepare__ static method.
>>> class M(type):
... @staticmethod
... def __prepare__(*args, **kwds):
... print("Prepare called:", args, kwds)
... return dict()
... def __new__(cls, name, bases, namespace, **kwds):
... print("New called:", kwds)
... return type.__new__(cls, name, bases, namespace)
... def __init__(cls, *args, **kwds):
... pass
...
>>> class C(metaclass=M):
... def meth(self): print("Hello")
...
Prepare called: ('C', ()) {}
New called: {}
>>>
Also pass another keyword.
>>> class C(object, metaclass=M, other="haha"):
... pass
...
Prepare called: ('C', (<class 'object'>,)) {'other': 'haha'}
New called: {'other': 'haha'}
>>> C.__class__ is M
True
>>> C.__bases__ == (object,)
True
>>> a = C()
>>> a.__class__ is C
True
>>>
Check that build_class doesn't mutate the kwds dict.
>>> kwds = {'metaclass': type}
>>> class C(**kwds): pass
...
>>> kwds == {'metaclass': type}
True
>>>
Use various combinations of explicit keywords and **kwds.
>>> bases = (object,)
>>> kwds = {'metaclass': M, 'other': 'haha'}
>>> class C(*bases, **kwds): pass
...
Prepare called: ('C', (<class 'object'>,)) {'other': 'haha'}
New called: {'other': 'haha'}
>>> C.__class__ is M
True
>>> C.__bases__ == (object,)
True
>>> class B: pass
>>> kwds = {'other': 'haha'}
>>> class C(B, metaclass=M, *bases, **kwds): pass
...
Prepare called: ('C', (<class 'test.test_metaclass.B'>, <class 'object'>)) {'other': 'haha'}
New called: {'other': 'haha'}
>>> C.__class__ is M
True
>>> C.__bases__ == (B, object)
True
>>>
Check for duplicate keywords.
>>> class C(metaclass=type, metaclass=type): pass
...
Traceback (most recent call last):
[...]
SyntaxError: keyword argument repeated
>>>
Another way.
>>> kwds = {'metaclass': type}
>>> class C(metaclass=type, **kwds): pass
...
Traceback (most recent call last):
[...]
TypeError: __build_class__() got multiple values for keyword argument 'metaclass'
>>>
Use a __prepare__ method that returns an instrumented dict.
>>> class LoggingDict(dict):
... def __setitem__(self, key, value):
... print("d[%r] = %r" % (key, value))
... dict.__setitem__(self, key, value)
...
>>> class Meta(type):
... @staticmethod
... def __prepare__(name, bases):
... return LoggingDict()
...
>>> class C(metaclass=Meta):
... foo = 2+2
... foo = 42
... bar = 123
...
d['__module__'] = 'test.test_metaclass'
d['foo'] = 4
d['foo'] = 42
d['bar'] = 123
>>>
Use a metaclass that doesn't derive from type.
>>> def meta(name, bases, namespace, **kwds):
... print("meta:", name, bases)
... print("ns:", sorted(namespace.items()))
... print("kw:", sorted(kwds.items()))
... return namespace
...
>>> class C(metaclass=meta):
... a = 42
... b = 24
...
meta: C ()
ns: [('__module__', 'test.test_metaclass'), ('a', 42), ('b', 24)]
kw: []
>>> type(C) is dict
True
>>> print(sorted(C.items()))
[('__module__', 'test.test_metaclass'), ('a', 42), ('b', 24)]
>>>
And again, with a __prepare__ attribute.
>>> def prepare(name, bases, **kwds):
... print("prepare:", name, bases, sorted(kwds.items()))
... return LoggingDict()
...
>>> meta.__prepare__ = prepare
>>> class C(metaclass=meta, other="booh"):
... a = 1
... a = 2
... b = 3
...
prepare: C () [('other', 'booh')]
d['__module__'] = 'test.test_metaclass'
d['a'] = 1
d['a'] = 2
d['b'] = 3
meta: C ()
ns: [('__module__', 'test.test_metaclass'), ('a', 2), ('b', 3)]
kw: [('other', 'booh')]
>>>
The default metaclass must define a __prepare__() method.
>>> type.__prepare__()
{}
>>>
Make sure it works with subclassing.
>>> class M(type):
... @classmethod
... def __prepare__(cls, *args, **kwds):
... d = super().__prepare__(*args, **kwds)
... d["hello"] = 42
... return d
...
>>> class C(metaclass=M):
... print(hello)
...
42
>>> print(C.hello)
42
>>>
Test failures in looking up the __prepare__ method work.
>>> class ObscureException(Exception):
... pass
>>> class FailDescr:
... def __get__(self, instance, owner):
... raise ObscureException
>>> class Meta(type):
... __prepare__ = FailDescr()
>>> class X(metaclass=Meta):
... pass
Traceback (most recent call last):
[...]
test.test_metaclass.ObscureException
"""
__test__ = {'doctests' : doctests}
def test_main(verbose=False):
from test import support
from test import test_metaclass
support.run_doctest(test_metaclass, verbose)
if __name__ == "__main__":
test_main(verbose=True)
|
lgpl-3.0
|
rombie/contrail-controller
|
src/container/kube-cni/kube_cni/params/params.py
|
3
|
10883
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2016 Juniper Networks, Inc. All rights reserved.
#
"""
CNI plugin parameters processing module
Parameters are defined in 3 different classes
- ContrailParams : Contains contrain specific parameters
- K8SParams : Contains kubernetes specific parameters
- CniParams : Contains CNI defined parameters
Also holds ContrailParams + K8SParams
"""
import inspect
import os
import sys
# set parent directory in sys.path
current_file = os.path.abspath(inspect.getfile(inspect.currentframe())) # nopep8
sys.path.append(os.path.dirname(os.path.dirname(current_file))) # nopep8
from common import logger as Logger
# Logger for the file
logger = None
# Error codes from params module
PARAMS_ERR_ENV = 101
PARAMS_ERR_DOCKER_CONNECTION = 102
PARAMS_ERR_GET_UUID = 103
PARAMS_ERR_GET_PID = 104
PARAMS_ERR_INVALID_CMD = 105
# Default VRouter related values
VROUTER_AGENT_IP = '127.0.0.1'
VROUTER_AGENT_PORT = 9091
VROUTER_POLL_TIMEOUT = 3
VROUTER_POLL_RETRIES = 20
# Container mode. Can only be k8s
CONTRAIL_CNI_MODE_K8S = "k8s"
CONTRAIL_CNI_MODE_CONTRAIL_K8S = "contrail-k8s"
CONTRAIL_PARENT_INTERFACE = "eth0"
CONTRAIL_CONTAINER_MTU = 1500
CONTRAIL_CONFIG_DIR = '/var/lib/contrail/ports/vm'
# Default K8S Pod related values
POD_DEFAULT_MTU = 1500
# Logging parameters
LOG_FILE = '/var/log/contrail/cni/opencontrail.log'
LOG_LEVEL = 'WARNING'
def get_env(key):
'''
Helper function to get environment variable
'''
val = os.environ.get(key)
if val is None:
raise ParamsError(PARAMS_ERR_ENV,
'Missing environment variable ' + key)
return val
class ParamsError(RuntimeError):
'''
Exception class for params related errors
'''
def __init__(self, code, msg):
self.msg = msg
self.code = code
return
def log(self):
logger.error(str(self.code) + ' : ' + self.msg)
return
class ContrailParams():
'''
Contrail specific parameters
- mode : CNI mode. Can take following values,
- k8s : Kubernetes running on baremetal
- nested-k8s : Kubernetes running on a VM. The container
interfaces are managed by VRouter running
on orchestrator of VM
- parent_interface : Field valid only when mode is "nested-k8s".
Specifies name of interface inside the VM.
Container interfaces are created as sub-interface over
this interface
- conf_dir : Plugin will store the Pod configuration in this directory.
The VRouter agent will scan this directore on restart
- vrouter_ip : IP address where VRouter agent is running
- vrouter_port : Port on which VRouter agent is running
- poll_timeout : Timeout for the GET request to VRouter
- poll_retries : Number of retries for GET request to VRouter
'''
def __init__(self):
self.mode = CONTRAIL_CNI_MODE_K8S
self.parent_interface = CONTRAIL_PARENT_INTERFACE
self.directory = CONTRAIL_CONFIG_DIR
self.vrouter_ip = VROUTER_AGENT_IP
self.vrouter_port = VROUTER_AGENT_PORT
self.poll_timeout = VROUTER_POLL_TIMEOUT
self.poll_retries = VROUTER_POLL_RETRIES
self.log_file = LOG_FILE
self.log_level = LOG_LEVEL
return
@staticmethod
def parse_mode(mode):
if mode.lower() == CONTRAIL_CNI_MODE_K8S:
return CONTRAIL_CNI_MODE_K8S
if mode.lower() == CONTRAIL_CNI_MODE_CONTRAIL_K8S:
return CONTRAIL_CNI_MODE_CONTRAIL_K8S
return CONTRAIL_CNI_MODE_K8S
def get_params(self, json_input=None):
if json_input is None:
return
if json_input.get('config-dir') != None:
self.directory = json_input['config-dir']
if json_input.get('vrouter-ip') != None:
self.vrouter_ip = json_input['vrouter-ip']
if json_input.get('vrouter-port') != None:
self.vrouter_port = json_input['vrouter-port']
if json_input.get('poll-timeout') != None:
self.poll_timeout = json_input['poll-timeout']
if json_input.get('poll-retries') != None:
self.poll_retries = json_input['poll-retries']
if json_input.get('mode') != None:
self.mode = self.parse_mode(json_input['mode'])
if json_input.get('parent-interface') != None:
self.parent_interface = json_input['parent-interface']
return
def get_loggin_params(self, json_input):
if json_input is None:
return
if json_input.get('log-file') != None:
self.log_file = json_input['log-file']
if json_input.get('log-level') != None:
self.log_level = json_input['log-level']
return
def log(self):
logger.debug('mode = ' + self.mode + ' config-dir = ' + self.directory +
' parent-interface = ' + self.parent_interface)
logger.debug('vrouter-ip = ' + self.vrouter_ip +
' vrouter-port = ' + str(self.vrouter_port) +
' poll-timeout = ' + str(self.poll_timeout) +
' poll-retries = ' + str(self.poll_retries))
return
class K8SParams():
'''
Kubernetes specific parameters. Will contain parameters not generic to CNI
pod_uuid - UUID for the POD. Got from "docker inspect" equivalent
pod_name - Name of POD got from CNI_ARGS
pod_namespace - Namespace for the POD got from CNI_ARGS
pod_pid - pid for the PODs pause container.
pid is needed by 'cni' module in creating veth interfaces
'''
def __init__(self):
self.pod_uuid = None
self.pod_name = None
self.pod_namespace = None
self.pod_pid = None
def set_pod_uuid(self, pod_uuid):
self.pod_uuid = pod_uuid
return
def set_pod_pid(self, pod_pid):
self.pod_pid = pod_pid
return
def get_pod_info(self, container_id, pod_uuid=None):
'''
Get UUID and PID for POD using "docker inspect" equivalent API
'''
from docker import client
os.environ['DOCKER_API_VERSION'] = '1.22'
try:
docker_client = client.Client()
if docker_client is None:
raise ParamsError(PARAMS_ERR_DOCKER_CONNECTION,
'Error creating docker client')
container = docker_client.inspect_container(container_id)
self.pod_pid = container['State']['Pid']
self.pod_uuid = \
container['Config']['Labels']['io.kubernetes.pod.uid']
except:
# Dont report exception if pod_uuid set from argument already
# pod-uuid will be specified in argument in case of UT
if self.pod_uuid is None:
raise ParamsError(PARAMS_ERR_GET_UUID,
'Error finding UUID for pod ' +
container_id)
if self.pod_pid is None:
raise ParamsError(PARAMS_ERR_GET_PID,
'Error finding PID for pod ' +
container_id)
return
def get_params(self, container_id=None, json_input=None):
'''
In K8S, CNI_ARGS is of format
"IgnoreUnknown=1;K8S_POD_NAMESPACE=default;\
K8S_POD_NAME=hello-world-1-81nl8;\
K8S_POD_INFRA_CONTAINER_ID=<container-id>"
Get pod-name and infra-container-id from this
'''
args = get_env('CNI_ARGS')
args_list = args.split(";")
for x in args_list:
vars_list = x.split('=')
if vars_list is None:
continue
if len(vars_list) >= 2:
if vars_list[0] == 'K8S_POD_NAMESPACE':
self.pod_namespace = vars_list[1]
if vars_list[0] == 'K8S_POD_NAME':
self.pod_name = vars_list[1]
if self.pod_namespace is None:
raise ParamsError(CNI_INVALID_ARGS,
'K8S_POD_NAMESPACE not set in CNI_ARGS')
if self.pod_name is None:
raise ParamsError(CNI_INVALID_ARGS,
'K8S_POD_NAME not set in CNI_ARGS')
# Get UUID and PID for the POD
self.get_pod_info(container_id)
return
def log(self):
logger.debug('K8SParams pod_uuid = ' + str(self.pod_uuid) +
' pod_pid = ' + str(self.pod_pid) +
' pod_name = ' + str(self.pod_name) +
' pod_namespace = ' + str(self.pod_namespace))
return
class Params():
'''
Top level class holding all arguments relavent to CNI
- command : CNI command for the operation
- k8s_params : Contains kubernetes specific arguements
- contrail_params : Contains contrail specific arguments needed for CNI
- container_id : Identifier for the container
- container_ifname : Name of interface inside the container
- container_netns : Network namespace for the container
'''
def __init__(self):
self.command = None
self.k8s_params = K8SParams()
self.contrail_params = ContrailParams()
self.container_id = None
self.container_ifname = None
self.container_netns = None
return
def get_loggin_params(self, json_input):
self.contrail_params.get_loggin_params(json_input.get('contrail'))
global logger
logger = Logger.Logger('params', self.contrail_params.log_file,
self.contrail_params.log_level)
def get_params(self, json_input=None):
self.command = get_env('CNI_COMMAND')
arg_cmds = ['get', 'poll', 'add', 'delete', 'del']
if self.command.lower() == 'version':
return
if self.command.lower() in arg_cmds:
self.container_id = get_env('CNI_CONTAINERID')
self.container_netns = get_env('CNI_NETNS')
self.container_ifname = get_env('CNI_IFNAME')
self.contrail_params.get_params(json_input.get('contrail'))
self.k8s_params.get_params(self.container_id, json_input.get('k8s'))
return
else:
raise ParamsError(PARAMS_ERR_INVALID_CMD, 'Invalid command : ' +
self.command)
return
def log(self):
logger.debug('Params container-id = ' + str(self.container_id) +
' container-ifname = ' + str(self.container_ifname) +
' continer-netns = ' + str(self.container_netns))
self.k8s_params.log()
self.contrail_params.log()
return
|
apache-2.0
|
benjaminjack/pinetree
|
lib/pybind11/setup.py
|
17
|
4485
|
#!/usr/bin/env python
# Setup script for PyPI; use CMakeFile.txt to build extension modules
from setuptools import setup
from distutils.command.install_headers import install_headers
from pybind11 import __version__
import os
# Prevent installation of pybind11 headers by setting
# PYBIND11_USE_CMAKE.
if os.environ.get('PYBIND11_USE_CMAKE'):
headers = []
else:
headers = [
'include/pybind11/detail/class.h',
'include/pybind11/detail/common.h',
'include/pybind11/detail/descr.h',
'include/pybind11/detail/init.h',
'include/pybind11/detail/internals.h',
'include/pybind11/detail/typeid.h',
'include/pybind11/attr.h',
'include/pybind11/buffer_info.h',
'include/pybind11/cast.h',
'include/pybind11/chrono.h',
'include/pybind11/common.h',
'include/pybind11/complex.h',
'include/pybind11/eigen.h',
'include/pybind11/embed.h',
'include/pybind11/eval.h',
'include/pybind11/functional.h',
'include/pybind11/iostream.h',
'include/pybind11/numpy.h',
'include/pybind11/operators.h',
'include/pybind11/options.h',
'include/pybind11/pybind11.h',
'include/pybind11/pytypes.h',
'include/pybind11/stl.h',
'include/pybind11/stl_bind.h',
]
class InstallHeaders(install_headers):
"""Use custom header installer because the default one flattens subdirectories"""
def run(self):
if not self.distribution.headers:
return
for header in self.distribution.headers:
subdir = os.path.dirname(os.path.relpath(header, 'include/pybind11'))
install_dir = os.path.join(self.install_dir, subdir)
self.mkpath(install_dir)
(out, _) = self.copy_file(header, install_dir)
self.outfiles.append(out)
setup(
name='pybind11',
version=__version__,
description='Seamless operability between C++11 and Python',
author='Wenzel Jakob',
author_email='[email protected]',
url='https://github.com/pybind/pybind11',
download_url='https://github.com/pybind/pybind11/tarball/v' + __version__,
packages=['pybind11'],
license='BSD',
headers=headers,
cmdclass=dict(install_headers=InstallHeaders),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities',
'Programming Language :: C++',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: BSD License'
],
keywords='C++11, Python bindings',
long_description="""pybind11 is a lightweight header-only library that
exposes C++ types in Python and vice versa, mainly to create Python bindings of
existing C++ code. Its goals and syntax are similar to the excellent
Boost.Python by David Abrahams: to minimize boilerplate code in traditional
extension modules by inferring type information using compile-time
introspection.
The main issue with Boost.Python-and the reason for creating such a similar
project-is Boost. Boost is an enormously large and complex suite of utility
libraries that works with almost every C++ compiler in existence. This
compatibility has its cost: arcane template tricks and workarounds are
necessary to support the oldest and buggiest of compiler specimens. Now that
C++11-compatible compilers are widely available, this heavy machinery has
become an excessively large and unnecessary dependency.
Think of this library as a tiny self-contained version of Boost.Python with
everything stripped away that isn't relevant for binding generation. Without
comments, the core header files only require ~4K lines of code and depend on
Python (2.7 or 3.x, or PyPy2.7 >= 5.7) and the C++ standard library. This
compact implementation was possible thanks to some of the new C++11 language
features (specifically: tuples, lambda functions and variadic templates). Since
its creation, this library has grown beyond Boost.Python in many ways, leading
to dramatically simpler binding code in many common situations.""")
|
mit
|
anandpdoshi/frappe
|
frappe/email/doctype/email_alert/test_email_alert.py
|
8
|
3857
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe, frappe.utils, frappe.utils.scheduler
import unittest
test_records = frappe.get_test_records('Email Alert')
class TestEmailAlert(unittest.TestCase):
def setUp(self):
frappe.db.sql("""delete from `tabEmail Queue`""")
frappe.set_user("[email protected]")
def tearDown(self):
frappe.set_user("Administrator")
def test_new_and_save(self):
communication = frappe.new_doc("Communication")
communication.communication_type = 'Comment'
communication.subject = "test"
communication.content = "test"
communication.insert(ignore_permissions=True)
self.assertTrue(frappe.db.get_value("Email Queue", {"reference_doctype": "Communication",
"reference_name": communication.name, "status":"Not Sent"}))
frappe.db.sql("""delete from `tabEmail Queue`""")
communication.content = "test 2"
communication.save()
self.assertTrue(frappe.db.get_value("Email Queue", {"reference_doctype": "Communication",
"reference_name": communication.name, "status":"Not Sent"}))
def test_condition(self):
event = frappe.new_doc("Event")
event.subject = "test",
event.event_type = "Private"
event.starts_on = "2014-06-06 12:00:00"
event.insert()
self.assertFalse(frappe.db.get_value("Email Queue", {"reference_doctype": "Event",
"reference_name": event.name, "status":"Not Sent"}))
event.event_type = "Public"
event.save()
self.assertTrue(frappe.db.get_value("Email Queue", {"reference_doctype": "Event",
"reference_name": event.name, "status":"Not Sent"}))
def test_invalid_condition(self):
frappe.set_user("Administrator")
email_alert = frappe.new_doc("Email Alert")
email_alert.subject = "test"
email_alert.document_type = "ToDo"
email_alert.send_alert_on = "New"
email_alert.message = "test"
recipent = frappe.new_doc("Email Alert Recipient")
recipent.email_by_document_field = "owner"
email_alert.recipents = recipent
email_alert.condition = "test"
self.assertRaises(frappe.ValidationError, email_alert.save)
def test_value_changed(self):
event = frappe.new_doc("Event")
event.subject = "test",
event.event_type = "Private"
event.starts_on = "2014-06-06 12:00:00"
event.insert()
self.assertFalse(frappe.db.get_value("Email Queue", {"reference_doctype": "Event",
"reference_name": event.name, "status":"Not Sent"}))
event.subject = "test 1"
event.save()
self.assertFalse(frappe.db.get_value("Email Queue", {"reference_doctype": "Event",
"reference_name": event.name, "status":"Not Sent"}))
event.description = "test"
event.save()
self.assertTrue(frappe.db.get_value("Email Queue", {"reference_doctype": "Event",
"reference_name": event.name, "status":"Not Sent"}))
def test_date_changed(self):
event = frappe.new_doc("Event")
event.subject = "test",
event.event_type = "Private"
event.starts_on = "2014-01-01 12:00:00"
event.insert()
self.assertFalse(frappe.db.get_value("Email Queue", {"reference_doctype": "Event",
"reference_name": event.name, "status":"Not Sent"}))
frappe.utils.scheduler.trigger(frappe.local.site, "daily", now=True)
# not today, so no alert
self.assertFalse(frappe.db.get_value("Email Queue", {"reference_doctype": "Event",
"reference_name": event.name, "status":"Not Sent"}))
event.starts_on = frappe.utils.add_days(frappe.utils.nowdate(), 2) + " 12:00:00"
event.save()
self.assertFalse(frappe.db.get_value("Email Queue", {"reference_doctype": "Event",
"reference_name": event.name, "status":"Not Sent"}))
frappe.utils.scheduler.trigger(frappe.local.site, "daily", now=True)
# today so show alert
self.assertTrue(frappe.db.get_value("Email Queue", {"reference_doctype": "Event",
"reference_name": event.name, "status":"Not Sent"}))
|
mit
|
google-research/google-research
|
better_storylines/src/evaluate_story_cloze_test.py
|
1
|
4258
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Output the overall test accuracy on the 2016 test set.
"""
import os
from absl import app
from absl import flags
from absl import logging
import gin
import gin.tf
import models
import rocstories_sentence_embeddings
import tensorflow.compat.v2 as tf
import tensorflow_datasets.public_api as tfds
import utils
gfile = tf.io.gfile
FLAGS = flags.FLAGS
flags.DEFINE_string('base_dir', '/tmp/model',
'Base directory containing checkpoints and .gin config.')
flags.DEFINE_string('data_dir', 'tfds_datasets',
'Where to look for TFDS datasets.')
flags.DEFINE_multi_string('gin_bindings', [], 'Not used.')
tf.enable_v2_behavior()
@gin.configurable('dataset')
def prepare_dataset(dataset_name=gin.REQUIRED,
shuffle_input_sentences=False,
num_eval_examples=2000,
batch_size=32):
"""Create batched, properly-formatted datasets from the TFDS datasets.
Args:
dataset_name: Name of TFDS dataset.
shuffle_input_sentences: Not used during evaluation, but arg still needed
for gin compatibility.
num_eval_examples: Number of examples to use during evaluation. For the
nolabel evaluation, this is also the number of distractors we choose
between.
batch_size: Batch size.
Returns:
A dictionary mapping from the dataset split to a Dataset object.
"""
del batch_size
del num_eval_examples
del shuffle_input_sentences
dataset = tfds.load(
dataset_name,
data_dir=FLAGS.data_dir,
split=rocstories_sentence_embeddings.TEST_2016,
download=False)
dataset = utils.build_validation_dataset(dataset)
return dataset
def eval_single_checkpoint(model, dataset):
"""Runs quantitative evaluation on a single checkpoint."""
test_2016_accuracy = tf.keras.metrics.Accuracy(name='test_spring2016_acc')
for x, fifth_embedding_1, fifth_embedding_2, label in dataset:
correct = utils.eval_step(
model, x, fifth_embedding_1, fifth_embedding_2, label)
test_2016_accuracy(1, correct)
logging.warning('Test accuracy: %f', test_2016_accuracy.result())
return test_2016_accuracy.result().numpy().tolist()
def run_eval(base_dir):
"""Writes model's predictions in proper format to [base_dir]/answer.txt."""
best_checkpoint_name = utils.pick_best_checkpoint(base_dir)
dataset = prepare_dataset()
checkpoint_path = os.path.join(base_dir, best_checkpoint_name)
embedding_dim = tf.compat.v1.data.get_output_shapes(dataset)[0][-1]
num_input_sentences = tf.compat.v1.data.get_output_shapes(dataset)[0][1]
model = models.build_model(
num_input_sentences=num_input_sentences, embedding_dim=embedding_dim)
checkpoint = tf.train.Checkpoint(model=model)
checkpoint.restore(checkpoint_path).expect_partial()
logging.info('Evaluating with checkpoint: "%s"', checkpoint_path)
test_accuracy = eval_single_checkpoint(model, dataset)
with gfile.GFile(os.path.join(base_dir, 'test_spring2016_acc.txt'), 'w') as f:
f.write(str(test_accuracy))
def main(argv):
del argv
base_dir = FLAGS.base_dir
# Load gin.config settings stored in model directory. It might take some time
# for the train script to start up and actually write out a gin config file.
# Wait 10 minutes (periodically checking for file existence) before giving up.
gin_config_path = os.path.join(base_dir, 'config.gin')
if not gfile.exists(gin_config_path):
raise ValueError('Could not find config.gin in "%s"' % base_dir)
gin.parse_config_file(gin_config_path, skip_unknown=True)
gin.finalize()
run_eval(base_dir)
if __name__ == '__main__':
app.run(main)
|
apache-2.0
|
andreparrish/python-for-android
|
python-modules/twisted/twisted/python/win32.py
|
56
|
5492
|
# -*- test-case-name: twisted.python.test.test_win32 -*-
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Win32 utilities.
See also twisted.python.shortcut.
@var O_BINARY: the 'binary' mode flag on Windows, or 0 on other platforms, so it
may safely be OR'ed into a mask for os.open.
"""
import re
import exceptions
import os
try:
import win32api
import win32con
except ImportError:
pass
from twisted.python.runtime import platform
# http://msdn.microsoft.com/library/default.asp?url=/library/en-us/debug/base/system_error_codes.asp
ERROR_FILE_NOT_FOUND = 2
ERROR_PATH_NOT_FOUND = 3
ERROR_INVALID_NAME = 123
ERROR_DIRECTORY = 267
O_BINARY = getattr(os, "O_BINARY", 0)
def _determineWindowsError():
"""
Determine which WindowsError name to export.
"""
return getattr(exceptions, 'WindowsError', FakeWindowsError)
class FakeWindowsError(OSError):
"""
Stand-in for sometimes-builtin exception on platforms for which it
is missing.
"""
WindowsError = _determineWindowsError()
# XXX fix this to use python's builtin _winreg?
def getProgramsMenuPath():
"""Get the path to the Programs menu.
Probably will break on non-US Windows.
@returns: the filesystem location of the common Start Menu->Programs.
"""
if not platform.isWinNT():
return "C:\\Windows\\Start Menu\\Programs"
keyname = 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders'
hShellFolders = win32api.RegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE,
keyname, 0, win32con.KEY_READ)
return win32api.RegQueryValueEx(hShellFolders, 'Common Programs')[0]
def getProgramFilesPath():
"""Get the path to the Program Files folder."""
keyname = 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion'
currentV = win32api.RegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE,
keyname, 0, win32con.KEY_READ)
return win32api.RegQueryValueEx(currentV, 'ProgramFilesDir')[0]
_cmdLineQuoteRe = re.compile(r'(\\*)"')
_cmdLineQuoteRe2 = re.compile(r'(\\+)\Z')
def cmdLineQuote(s):
"""
Internal method for quoting a single command-line argument.
@param s: an unquoted string that you want to quote so that something that
does cmd.exe-style unquoting will interpret it as a single argument,
even if it contains spaces.
@type s: C{str}
@return: a quoted string.
@rtype: C{str}
"""
quote = ((" " in s) or ("\t" in s) or ('"' in s) or s == '') and '"' or ''
return quote + _cmdLineQuoteRe2.sub(r"\1\1", _cmdLineQuoteRe.sub(r'\1\1\\"', s)) + quote
def quoteArguments(arguments):
"""
Quote an iterable of command-line arguments for passing to CreateProcess or
a similar API. This allows the list passed to C{reactor.spawnProcess} to
match the child process's C{sys.argv} properly.
@param arglist: an iterable of C{str}, each unquoted.
@return: a single string, with the given sequence quoted as necessary.
"""
return ' '.join([cmdLineQuote(a) for a in arguments])
class _ErrorFormatter(object):
"""
Formatter for Windows error messages.
@ivar winError: A callable which takes one integer error number argument
and returns an L{exceptions.WindowsError} instance for that error (like
L{ctypes.WinError}).
@ivar formatMessage: A callable which takes one integer error number
argument and returns a C{str} giving the message for that error (like
L{win32api.FormatMessage}).
@ivar errorTab: A mapping from integer error numbers to C{str} messages
which correspond to those erorrs (like L{socket.errorTab}).
"""
def __init__(self, WinError, FormatMessage, errorTab):
self.winError = WinError
self.formatMessage = FormatMessage
self.errorTab = errorTab
def fromEnvironment(cls):
"""
Get as many of the platform-specific error translation objects as
possible and return an instance of C{cls} created with them.
"""
try:
from ctypes import WinError
except ImportError:
WinError = None
try:
from win32api import FormatMessage
except ImportError:
FormatMessage = None
try:
from socket import errorTab
except ImportError:
errorTab = None
return cls(WinError, FormatMessage, errorTab)
fromEnvironment = classmethod(fromEnvironment)
def formatError(self, errorcode):
"""
Returns the string associated with a Windows error message, such as the
ones found in socket.error.
Attempts direct lookup against the win32 API via ctypes and then
pywin32 if available), then in the error table in the socket module,
then finally defaulting to C{os.strerror}.
@param errorcode: the Windows error code
@type errorcode: C{int}
@return: The error message string
@rtype: C{str}
"""
if self.winError is not None:
return self.winError(errorcode)[1]
if self.formatMessage is not None:
return self.formatMessage(errorcode)
if self.errorTab is not None:
result = self.errorTab.get(errorcode)
if result is not None:
return result
return os.strerror(errorcode)
formatError = _ErrorFormatter.fromEnvironment().formatError
|
apache-2.0
|
jazkarta/edx-platform
|
openedx/core/djangoapps/credit/migrations/0007_auto__add_field_creditprovider_enable_integration__chg_field_creditpro.py
|
84
|
11967
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'CreditProvider', fields ['provider_url']
db.delete_unique('credit_creditprovider', ['provider_url'])
# Adding field 'CreditProvider.enable_integration'
db.add_column('credit_creditprovider', 'enable_integration',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
# Changing field 'CreditProvider.provider_url'
db.alter_column('credit_creditprovider', 'provider_url', self.gf('django.db.models.fields.URLField')(max_length=200))
def backwards(self, orm):
# Deleting field 'CreditProvider.enable_integration'
db.delete_column('credit_creditprovider', 'enable_integration')
# Changing field 'CreditProvider.provider_url'
db.alter_column('credit_creditprovider', 'provider_url', self.gf('django.db.models.fields.URLField')(max_length=255, unique=True))
# Adding unique constraint on 'CreditProvider', fields ['provider_url']
db.create_unique('credit_creditprovider', ['provider_url'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'credit.creditcourse': {
'Meta': {'object_name': 'CreditCourse'},
'course_key': ('xmodule_django.models.CourseKeyField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'providers': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['credit.CreditProvider']", 'symmetrical': 'False'})
},
'credit.crediteligibility': {
'Meta': {'unique_together': "(('username', 'course'),)", 'object_name': 'CreditEligibility'},
'course': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'eligibilities'", 'to': "orm['credit.CreditCourse']"}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'provider': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'eligibilities'", 'to': "orm['credit.CreditProvider']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'credit.creditprovider': {
'Meta': {'object_name': 'CreditProvider'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'eligibility_duration': ('django.db.models.fields.PositiveIntegerField', [], {'default': '31556970'}),
'enable_integration': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'provider_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'}),
'provider_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200'})
},
'credit.creditrequest': {
'Meta': {'unique_together': "(('username', 'course', 'provider'),)", 'object_name': 'CreditRequest'},
'course': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'credit_requests'", 'to': "orm['credit.CreditCourse']"}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'parameters': ('jsonfield.fields.JSONField', [], {}),
'provider': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'credit_requests'", 'to': "orm['credit.CreditProvider']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '255'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'})
},
'credit.creditrequirement': {
'Meta': {'unique_together': "(('namespace', 'name', 'course'),)", 'object_name': 'CreditRequirement'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'course': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'credit_requirements'", 'to': "orm['credit.CreditCourse']"}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'criteria': ('jsonfield.fields.JSONField', [], {}),
'display_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'namespace': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'credit.creditrequirementstatus': {
'Meta': {'object_name': 'CreditRequirementStatus'},
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'reason': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'requirement': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'statuses'", 'to': "orm['credit.CreditRequirement']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'})
},
'credit.historicalcreditrequest': {
'Meta': {'ordering': "(u'-history_date', u'-history_id')", 'object_name': 'HistoricalCreditRequest'},
'course': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'+'", 'null': 'True', 'on_delete': 'models.DO_NOTHING', 'to': "orm['credit.CreditCourse']"}),
'created': ('model_utils.fields.AutoCreatedField', [], {'default': 'datetime.datetime.now'}),
u'history_date': ('django.db.models.fields.DateTimeField', [], {}),
u'history_id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'history_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
u'history_user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'+'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True', 'blank': 'True'}),
'modified': ('model_utils.fields.AutoLastModifiedField', [], {'default': 'datetime.datetime.now'}),
'parameters': ('jsonfield.fields.JSONField', [], {}),
'provider': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'+'", 'null': 'True', 'on_delete': 'models.DO_NOTHING', 'to': "orm['credit.CreditProvider']"}),
'status': ('django.db.models.fields.CharField', [], {'default': "'pending'", 'max_length': '255'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'})
}
}
complete_apps = ['credit']
|
agpl-3.0
|
FireWRT/OpenWrt-Firefly-Libraries
|
staging_dir/host/lib/python2.7/test/test_getargs2.py
|
35
|
12391
|
import unittest
from test import test_support
# Skip this test if the _testcapi module isn't available.
test_support.import_module('_testcapi')
from _testcapi import getargs_keywords
import warnings
"""
> How about the following counterproposal. This also changes some of
> the other format codes to be a little more regular.
>
> Code C type Range check
>
> b unsigned char 0..UCHAR_MAX
> h signed short SHRT_MIN..SHRT_MAX
> B unsigned char none **
> H unsigned short none **
> k * unsigned long none
> I * unsigned int 0..UINT_MAX
> i int INT_MIN..INT_MAX
> l long LONG_MIN..LONG_MAX
> K * unsigned long long none
> L long long LLONG_MIN..LLONG_MAX
> Notes:
>
> * New format codes.
>
> ** Changed from previous "range-and-a-half" to "none"; the
> range-and-a-half checking wasn't particularly useful.
Plus a C API or two, e.g. PyInt_AsLongMask() ->
unsigned long and PyInt_AsLongLongMask() -> unsigned
long long (if that exists).
"""
LARGE = 0x7FFFFFFF
VERY_LARGE = 0xFF0000121212121212121242L
from _testcapi import UCHAR_MAX, USHRT_MAX, UINT_MAX, ULONG_MAX, INT_MAX, \
INT_MIN, LONG_MIN, LONG_MAX, PY_SSIZE_T_MIN, PY_SSIZE_T_MAX, \
SHRT_MIN, SHRT_MAX
try:
from _testcapi import getargs_L, getargs_K
except ImportError:
_PY_LONG_LONG_available = False
else:
_PY_LONG_LONG_available = True
# fake, they are not defined in Python's header files
LLONG_MAX = 2**63-1
LLONG_MIN = -2**63
ULLONG_MAX = 2**64-1
class Long:
def __int__(self):
return 99L
class Int:
def __int__(self):
return 99
class Unsigned_TestCase(unittest.TestCase):
def test_b(self):
from _testcapi import getargs_b
# b returns 'unsigned char', and does range checking (0 ... UCHAR_MAX)
self.assertRaises(TypeError, getargs_b, 3.14)
self.assertEqual(99, getargs_b(Long()))
self.assertEqual(99, getargs_b(Int()))
self.assertRaises(OverflowError, getargs_b, -1)
self.assertEqual(0, getargs_b(0))
self.assertEqual(UCHAR_MAX, getargs_b(UCHAR_MAX))
self.assertRaises(OverflowError, getargs_b, UCHAR_MAX + 1)
self.assertEqual(42, getargs_b(42))
self.assertEqual(42, getargs_b(42L))
self.assertRaises(OverflowError, getargs_b, VERY_LARGE)
def test_B(self):
from _testcapi import getargs_B
# B returns 'unsigned char', no range checking
self.assertRaises(TypeError, getargs_B, 3.14)
self.assertEqual(99, getargs_B(Long()))
self.assertEqual(99, getargs_B(Int()))
self.assertEqual(UCHAR_MAX, getargs_B(-1))
self.assertEqual(UCHAR_MAX, getargs_B(-1L))
self.assertEqual(0, getargs_B(0))
self.assertEqual(UCHAR_MAX, getargs_B(UCHAR_MAX))
self.assertEqual(0, getargs_B(UCHAR_MAX+1))
self.assertEqual(42, getargs_B(42))
self.assertEqual(42, getargs_B(42L))
self.assertEqual(UCHAR_MAX & VERY_LARGE, getargs_B(VERY_LARGE))
def test_H(self):
from _testcapi import getargs_H
# H returns 'unsigned short', no range checking
self.assertRaises(TypeError, getargs_H, 3.14)
self.assertEqual(99, getargs_H(Long()))
self.assertEqual(99, getargs_H(Int()))
self.assertEqual(USHRT_MAX, getargs_H(-1))
self.assertEqual(0, getargs_H(0))
self.assertEqual(USHRT_MAX, getargs_H(USHRT_MAX))
self.assertEqual(0, getargs_H(USHRT_MAX+1))
self.assertEqual(42, getargs_H(42))
self.assertEqual(42, getargs_H(42L))
self.assertEqual(VERY_LARGE & USHRT_MAX, getargs_H(VERY_LARGE))
def test_I(self):
from _testcapi import getargs_I
# I returns 'unsigned int', no range checking
self.assertRaises(TypeError, getargs_I, 3.14)
self.assertEqual(99, getargs_I(Long()))
self.assertEqual(99, getargs_I(Int()))
self.assertEqual(UINT_MAX, getargs_I(-1))
self.assertEqual(0, getargs_I(0))
self.assertEqual(UINT_MAX, getargs_I(UINT_MAX))
self.assertEqual(0, getargs_I(UINT_MAX+1))
self.assertEqual(42, getargs_I(42))
self.assertEqual(42, getargs_I(42L))
self.assertEqual(VERY_LARGE & UINT_MAX, getargs_I(VERY_LARGE))
def test_k(self):
from _testcapi import getargs_k
# k returns 'unsigned long', no range checking
# it does not accept float, or instances with __int__
self.assertRaises(TypeError, getargs_k, 3.14)
self.assertRaises(TypeError, getargs_k, Long())
self.assertRaises(TypeError, getargs_k, Int())
self.assertEqual(ULONG_MAX, getargs_k(-1))
self.assertEqual(0, getargs_k(0))
self.assertEqual(ULONG_MAX, getargs_k(ULONG_MAX))
self.assertEqual(0, getargs_k(ULONG_MAX+1))
self.assertEqual(42, getargs_k(42))
self.assertEqual(42, getargs_k(42L))
self.assertEqual(VERY_LARGE & ULONG_MAX, getargs_k(VERY_LARGE))
class Signed_TestCase(unittest.TestCase):
def test_h(self):
from _testcapi import getargs_h
# h returns 'short', and does range checking (SHRT_MIN ... SHRT_MAX)
self.assertRaises(TypeError, getargs_h, 3.14)
self.assertEqual(99, getargs_h(Long()))
self.assertEqual(99, getargs_h(Int()))
self.assertRaises(OverflowError, getargs_h, SHRT_MIN-1)
self.assertEqual(SHRT_MIN, getargs_h(SHRT_MIN))
self.assertEqual(SHRT_MAX, getargs_h(SHRT_MAX))
self.assertRaises(OverflowError, getargs_h, SHRT_MAX+1)
self.assertEqual(42, getargs_h(42))
self.assertEqual(42, getargs_h(42L))
self.assertRaises(OverflowError, getargs_h, VERY_LARGE)
def test_i(self):
from _testcapi import getargs_i
# i returns 'int', and does range checking (INT_MIN ... INT_MAX)
self.assertRaises(TypeError, getargs_i, 3.14)
self.assertEqual(99, getargs_i(Long()))
self.assertEqual(99, getargs_i(Int()))
self.assertRaises(OverflowError, getargs_i, INT_MIN-1)
self.assertEqual(INT_MIN, getargs_i(INT_MIN))
self.assertEqual(INT_MAX, getargs_i(INT_MAX))
self.assertRaises(OverflowError, getargs_i, INT_MAX+1)
self.assertEqual(42, getargs_i(42))
self.assertEqual(42, getargs_i(42L))
self.assertRaises(OverflowError, getargs_i, VERY_LARGE)
def test_l(self):
from _testcapi import getargs_l
# l returns 'long', and does range checking (LONG_MIN ... LONG_MAX)
self.assertRaises(TypeError, getargs_l, 3.14)
self.assertEqual(99, getargs_l(Long()))
self.assertEqual(99, getargs_l(Int()))
self.assertRaises(OverflowError, getargs_l, LONG_MIN-1)
self.assertEqual(LONG_MIN, getargs_l(LONG_MIN))
self.assertEqual(LONG_MAX, getargs_l(LONG_MAX))
self.assertRaises(OverflowError, getargs_l, LONG_MAX+1)
self.assertEqual(42, getargs_l(42))
self.assertEqual(42, getargs_l(42L))
self.assertRaises(OverflowError, getargs_l, VERY_LARGE)
def test_n(self):
from _testcapi import getargs_n
# n returns 'Py_ssize_t', and does range checking
# (PY_SSIZE_T_MIN ... PY_SSIZE_T_MAX)
self.assertRaises(TypeError, getargs_n, 3.14)
self.assertEqual(99, getargs_n(Long()))
self.assertEqual(99, getargs_n(Int()))
self.assertRaises(OverflowError, getargs_n, PY_SSIZE_T_MIN-1)
self.assertEqual(PY_SSIZE_T_MIN, getargs_n(PY_SSIZE_T_MIN))
self.assertEqual(PY_SSIZE_T_MAX, getargs_n(PY_SSIZE_T_MAX))
self.assertRaises(OverflowError, getargs_n, PY_SSIZE_T_MAX+1)
self.assertEqual(42, getargs_n(42))
self.assertEqual(42, getargs_n(42L))
self.assertRaises(OverflowError, getargs_n, VERY_LARGE)
@unittest.skipUnless(_PY_LONG_LONG_available, 'PY_LONG_LONG not available')
class LongLong_TestCase(unittest.TestCase):
def test_L(self):
from _testcapi import getargs_L
# L returns 'long long', and does range checking (LLONG_MIN
# ... LLONG_MAX)
with warnings.catch_warnings():
warnings.filterwarnings(
"ignore",
category=DeprecationWarning,
message=".*integer argument expected, got float",
module=__name__)
self.assertEqual(3, getargs_L(3.14))
with warnings.catch_warnings():
warnings.filterwarnings(
"error",
category=DeprecationWarning,
message=".*integer argument expected, got float",
module="unittest")
self.assertRaises(DeprecationWarning, getargs_L, 3.14)
self.assertRaises(TypeError, getargs_L, "Hello")
self.assertEqual(99, getargs_L(Long()))
self.assertEqual(99, getargs_L(Int()))
self.assertRaises(OverflowError, getargs_L, LLONG_MIN-1)
self.assertEqual(LLONG_MIN, getargs_L(LLONG_MIN))
self.assertEqual(LLONG_MAX, getargs_L(LLONG_MAX))
self.assertRaises(OverflowError, getargs_L, LLONG_MAX+1)
self.assertEqual(42, getargs_L(42))
self.assertEqual(42, getargs_L(42L))
self.assertRaises(OverflowError, getargs_L, VERY_LARGE)
def test_K(self):
from _testcapi import getargs_K
# K return 'unsigned long long', no range checking
self.assertRaises(TypeError, getargs_K, 3.14)
self.assertRaises(TypeError, getargs_K, Long())
self.assertRaises(TypeError, getargs_K, Int())
self.assertEqual(ULLONG_MAX, getargs_K(ULLONG_MAX))
self.assertEqual(0, getargs_K(0))
self.assertEqual(0, getargs_K(ULLONG_MAX+1))
self.assertEqual(42, getargs_K(42))
self.assertEqual(42, getargs_K(42L))
self.assertEqual(VERY_LARGE & ULLONG_MAX, getargs_K(VERY_LARGE))
class Tuple_TestCase(unittest.TestCase):
def test_tuple(self):
from _testcapi import getargs_tuple
ret = getargs_tuple(1, (2, 3))
self.assertEqual(ret, (1,2,3))
# make sure invalid tuple arguments are handled correctly
class seq:
def __len__(self):
return 2
def __getitem__(self, n):
raise ValueError
self.assertRaises(TypeError, getargs_tuple, 1, seq())
class Keywords_TestCase(unittest.TestCase):
def test_positional_args(self):
# using all positional args
self.assertEqual(
getargs_keywords((1,2), 3, (4,(5,6)), (7,8,9), 10),
(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
)
def test_mixed_args(self):
# positional and keyword args
self.assertEqual(
getargs_keywords((1,2), 3, (4,(5,6)), arg4=(7,8,9), arg5=10),
(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
)
def test_keyword_args(self):
# all keywords
self.assertEqual(
getargs_keywords(arg1=(1,2), arg2=3, arg3=(4,(5,6)), arg4=(7,8,9), arg5=10),
(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
)
def test_optional_args(self):
# missing optional keyword args, skipping tuples
self.assertEqual(
getargs_keywords(arg1=(1,2), arg2=3, arg5=10),
(1, 2, 3, -1, -1, -1, -1, -1, -1, 10)
)
def test_required_args(self):
# required arg missing
try:
getargs_keywords(arg1=(1,2))
except TypeError, err:
self.assertEqual(str(err), "Required argument 'arg2' (pos 2) not found")
else:
self.fail('TypeError should have been raised')
def test_too_many_args(self):
try:
getargs_keywords((1,2),3,(4,(5,6)),(7,8,9),10,111)
except TypeError, err:
self.assertEqual(str(err), "function takes at most 5 arguments (6 given)")
else:
self.fail('TypeError should have been raised')
def test_invalid_keyword(self):
# extraneous keyword arg
try:
getargs_keywords((1,2),3,arg5=10,arg666=666)
except TypeError, err:
self.assertEqual(str(err), "'arg666' is an invalid keyword argument for this function")
else:
self.fail('TypeError should have been raised')
def test_main():
tests = [Signed_TestCase, Unsigned_TestCase, LongLong_TestCase,
Tuple_TestCase, Keywords_TestCase]
test_support.run_unittest(*tests)
if __name__ == "__main__":
test_main()
|
gpl-2.0
|
ORNL-CEES/Cap
|
python/example/helpers.py
|
3
|
2254
|
from pycap import Observer, ECLabAsciiFile
from IPython import display
from numpy import real, imag, absolute, angle
from matplotlib import pyplot
from sys import stdout, exit
from os import remove
class PrintColumns(Observer):
def __new__(cls, *args, **kwargs):
return object.__new__(PrintColumns)
def __init__(self):
self._template = u''
for i in range(3):
self._template += '{left}{0}:{format_spec}{right}{separator}'\
.format(i, format_spec='{format_spec}',
left='{', right='}', separator='\t')
def update(self, subject, *args, **kwargs):
extra = '>20'
print(self._template.format('freq/Hz',
'Re(Z)/ohm',
'-Im(Z)/ohm',
format_spec=extra + "s"),
file=stdout)
n = subject._data['frequency'].size
for i in range(n):
f = subject._data['frequency'][i]
Z = subject._data['impedance'][i]
Y = 1.0 / Z
place_holder = 255
line = self._template.format(float(f),
float(real(Z)),
-float(imag(Z)),
format_spec=extra + '.7e')
print(line, file=stdout)
class RefreshDisplay(Observer):
def __new__(cls, *args, **kwargs):
return object.__new__(RefreshDisplay)
def update(self, subject, *args, **kwargs):
display.clear_output(wait=True)
display.display(pyplot.gcf())
def check_input(device, experiment):
experiment._extra_data = device.inspect()
dummy = ECLabAsciiFile('dummy')
dummy.update(experiment)
with open('dummy', 'r', encoding='latin-1') as fin:
lines = fin.readlines()
for line in lines[7:-1]:
print(line.rstrip('\n'))
remove('dummy')
print('continue? [Y/n]')
yes = set(['yes', 'y', ''])
no = set(['no', 'n'])
while True:
answer = input().lower()
if answer in yes:
break
elif answer in no:
exit(0)
else:
print("Please respond with 'yes' or 'no'")
|
bsd-3-clause
|
dkodnik/Ant
|
addons/auth_oauth_signup/res_users.py
|
39
|
2471
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-2012 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import simplejson
import openerp
from openerp.addons.auth_signup.res_users import SignupError
from openerp.osv import osv, fields
_logger = logging.getLogger(__name__)
class res_users(osv.Model):
_inherit = 'res.users'
def _auth_oauth_signin(self, cr, uid, provider, validation, params, context=None):
# overridden to use signup method if regular oauth signin fails
try:
login = super(res_users, self)._auth_oauth_signin(cr, uid, provider, validation, params, context=context)
except openerp.exceptions.AccessDenied, access_denied_exception:
if context and context.get('no_user_creation'):
return None
state = simplejson.loads(params['state'])
token = state.get('t')
oauth_uid = validation['user_id']
email = validation.get('email', 'provider_%s_user_%s' % (provider, oauth_uid))
name = validation.get('name', email)
values = {
'name': name,
'login': email,
'email': email,
'oauth_provider_id': provider,
'oauth_uid': oauth_uid,
'oauth_access_token': params['access_token'],
'active': True,
}
try:
_, login, _ = self.signup(cr, uid, values, token, context=context)
except SignupError:
raise access_denied_exception
return login
|
agpl-3.0
|
Lyrositor/CoilSnake
|
coilsnake/model/eb/map_music.py
|
5
|
1493
|
from coilsnake.model.common.table import TableEntry, LittleEndianIntegerTableEntry, RowTableEntry
from coilsnake.model.eb.table import EbEventFlagTableEntry
MapMusicSubTableEntry = RowTableEntry.from_schema(
name="Map Music Sub Table Entry",
schema=[EbEventFlagTableEntry,
type("Music", (LittleEndianIntegerTableEntry,), {"name": "Music", "size": 2})]
)
class MapMusicTableEntry(TableEntry):
name = "Map Music Table Entry"
@classmethod
def from_block(cls, block, offset):
subentries = []
while True:
subentry = MapMusicSubTableEntry.from_block(block, offset)
subentries.append(subentry)
offset += MapMusicSubTableEntry.size
if subentry[0] == 0:
break
return subentries
@classmethod
def to_block_size(cls, value):
return MapMusicSubTableEntry.size * len(value)
@classmethod
def to_block(cls, block, offset, value):
for subentry in value:
MapMusicSubTableEntry.to_block(block, offset, subentry)
offset += MapMusicSubTableEntry.size
@classmethod
def to_yml_rep(cls, value):
return [MapMusicSubTableEntry.to_yml_rep(subentry) for subentry in value]
@classmethod
def from_yml_rep(cls, yml_rep):
return [MapMusicSubTableEntry.from_yml_rep(subentry) for subentry in yml_rep]
@classmethod
def yml_rep_hex_labels(cls):
return MapMusicSubTableEntry.yml_rep_hex_labels()
|
gpl-3.0
|
wxkdesky/phantomjs
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/steps/preparechangelogforrevert_unittest.py
|
122
|
5545
|
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
# Do not import changelog_unittest.ChangeLogTest directly as that will cause it to be run again.
from webkitpy.common.checkout import changelog_unittest
from webkitpy.common.checkout.changelog import ChangeLog
from webkitpy.common.system.filesystem_mock import MockFileSystem
from webkitpy.tool.steps.preparechangelogforrevert import *
class UpdateChangeLogsForRevertTest(unittest.TestCase):
_revert_entry_with_bug_url = '''2009-08-19 Eric Seidel <[email protected]>
Unreviewed, rolling out r12345.
http://trac.webkit.org/changeset/12345
http://example.com/123
Reason
* Scripts/bugzilla-tool:
'''
_revert_entry_without_bug_url = '''2009-08-19 Eric Seidel <[email protected]>
Unreviewed, rolling out r12345.
http://trac.webkit.org/changeset/12345
Reason
* Scripts/bugzilla-tool:
'''
_multiple_revert_entry_with_bug_url = '''2009-08-19 Eric Seidel <[email protected]>
Unreviewed, rolling out r12345, r12346, and r12347.
http://trac.webkit.org/changeset/12345
http://trac.webkit.org/changeset/12346
http://trac.webkit.org/changeset/12347
http://example.com/123
Reason
* Scripts/bugzilla-tool:
'''
_multiple_revert_entry_without_bug_url = '''2009-08-19 Eric Seidel <[email protected]>
Unreviewed, rolling out r12345, r12346, and r12347.
http://trac.webkit.org/changeset/12345
http://trac.webkit.org/changeset/12346
http://trac.webkit.org/changeset/12347
Reason
* Scripts/bugzilla-tool:
'''
_revert_with_log_reason = """2009-08-19 Eric Seidel <[email protected]>
Unreviewed, rolling out r12345.
http://trac.webkit.org/changeset/12345
http://example.com/123
This is a very long reason which should be long enough so that
_message_for_revert will need to wrap it. We'll also include
a
https://veryveryveryveryverylongbugurl.com/reallylongbugthingy.cgi?bug_id=12354
link so that we can make sure we wrap that right too.
* Scripts/bugzilla-tool:
"""
def _assert_message_for_revert_output(self, args, expected_entry):
changelog_contents = u"%s\n%s" % (changelog_unittest.ChangeLogTest._new_entry_boilerplate, changelog_unittest.ChangeLogTest._example_changelog)
changelog_path = "ChangeLog"
fs = MockFileSystem({changelog_path: changelog_contents.encode("utf-8")})
changelog = ChangeLog(changelog_path, fs)
changelog.update_with_unreviewed_message(PrepareChangeLogForRevert._message_for_revert(*args))
actual_entry = changelog.latest_entry()
self.assertMultiLineEqual(actual_entry.contents(), expected_entry)
self.assertIsNone(actual_entry.reviewer_text())
# These checks could be removed to allow this to work on other entries:
self.assertEqual(actual_entry.author_name(), "Eric Seidel")
self.assertEqual(actual_entry.author_email(), "[email protected]")
def test_message_for_revert(self):
self._assert_message_for_revert_output([[12345], "Reason"], self._revert_entry_without_bug_url)
self._assert_message_for_revert_output([[12345], "Reason", "http://example.com/123"], self._revert_entry_with_bug_url)
self._assert_message_for_revert_output([[12345, 12346, 12347], "Reason"], self._multiple_revert_entry_without_bug_url)
self._assert_message_for_revert_output([[12345, 12346, 12347], "Reason", "http://example.com/123"], self._multiple_revert_entry_with_bug_url)
long_reason = "This is a very long reason which should be long enough so that _message_for_revert will need to wrap it. We'll also include a https://veryveryveryveryverylongbugurl.com/reallylongbugthingy.cgi?bug_id=12354 link so that we can make sure we wrap that right too."
self._assert_message_for_revert_output([[12345], long_reason, "http://example.com/123"], self._revert_with_log_reason)
|
bsd-3-clause
|
vovojh/gem5
|
src/arch/x86/isa/insts/general_purpose/compare_and_test/bit_test.py
|
89
|
10961
|
# Copyright (c) 2007-2008 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2008 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop BT_R_I {
sexti t0, reg, imm, flags=(CF,)
};
def macroop BT_M_I {
limm t1, imm, dataSize=asz
# This fudges just a tiny bit, but it's reasonable to expect the
# microcode generation logic to have the log of the various sizes
# floating around as well.
ld t1, seg, sib, disp
sexti t0, t1, imm, flags=(CF,)
};
def macroop BT_P_I {
rdip t7
limm t1, imm, dataSize=asz
ld t1, seg, riprel, disp, dataSize=asz
sexti t0, t1, imm, flags=(CF,)
};
def macroop BT_R_R {
sext t0, reg, regm, flags=(CF,)
};
def macroop BT_M_R {
srai t2, reg, 3, dataSize=asz
srai t3, t2, ldsz, dataSize=asz
lea t3, flatseg, [dsz, t3, base], dataSize=asz
ld t1, seg, [scale, index, t3], disp
sext t0, t1, reg, flags=(CF,)
};
def macroop BT_P_R {
rdip t7
srai t2, reg, 3, dataSize=asz
srai t3, t2, ldsz, dataSize=asz
ld t1, seg, [dsz, t3, t7], disp
sext t0, t1, reg, flags=(CF,)
};
def macroop BTC_R_I {
sexti t0, reg, imm, flags=(CF,)
limm t1, 1
roli t1, t1, imm
xor reg, reg, t1
};
def macroop BTC_M_I {
limm t1, imm, dataSize=asz
# This fudges just a tiny bit, but it's reasonable to expect the
# microcode generation logic to have the log of the various sizes
# floating around as well.
limm t4, 1
roli t4, t4, imm
ldst t1, seg, sib, disp
sexti t0, t1, imm, flags=(CF,)
xor t1, t1, t4
st t1, seg, sib, disp
};
def macroop BTC_P_I {
rdip t7, dataSize=asz
limm t1, imm, dataSize=asz
limm t4, 1
roli t4, t4, imm
ldst t1, seg, riprel, disp
sexti t0, t1, imm, flags=(CF,)
xor t1, t1, t4
st t1, seg, riprel, disp
};
def macroop BTC_LOCKED_M_I {
limm t1, imm, dataSize=asz
limm t4, 1
roli t4, t4, imm
mfence
ldstl t1, seg, sib, disp
sexti t0, t1, imm, flags=(CF,)
xor t1, t1, t4
stul t1, seg, sib, disp
mfence
};
def macroop BTC_LOCKED_P_I {
rdip t7, dataSize=asz
limm t1, imm, dataSize=asz
limm t4, 1
roli t4, t4, imm
mfence
ldstl t1, seg, riprel, disp
sexti t0, t1, imm, flags=(CF,)
xor t1, t1, t4
stul t1, seg, riprel, disp
mfence
};
def macroop BTC_R_R {
sext t0, reg, regm, flags=(CF,)
limm t1, 1
rol t1, t1, regm
xor reg, reg, t1
};
def macroop BTC_M_R {
srai t2, reg, 3, dataSize=asz
srai t3, t2, ldsz, dataSize=asz
lea t3, flatseg, [dsz, t3, base], dataSize=asz
limm t4, 1
rol t4, t4, reg
ldst t1, seg, [scale, index, t3], disp
sext t0, t1, reg, flags=(CF,)
xor t1, t1, t4
st t1, seg, [scale, index, t3], disp
};
def macroop BTC_P_R {
rdip t7, dataSize=asz
srai t2, reg, 3, dataSize=asz
srai t3, t2, ldsz, dataSize=asz
limm t4, 1
rol t4, t4, reg
ldst t1, seg, [dsz, t3, t7], disp
sext t0, t1, reg, flags=(CF,)
xor t1, t1, t4
st t1, seg, [dsz, t3, t7], disp
};
def macroop BTC_LOCKED_M_R {
srai t2, reg, 3, dataSize=asz
srai t3, t2, ldsz, dataSize=asz
lea t3, flatseg, [dsz, t3, base], dataSize=asz
limm t4, 1
rol t4, t4, reg
mfence
ldstl t1, seg, [scale, index, t3], disp
sext t0, t1, reg, flags=(CF,)
xor t1, t1, t4
stul t1, seg, [scale, index, t3], disp
mfence
};
def macroop BTC_LOCKED_P_R {
rdip t7, dataSize=asz
srai t2, reg, 3, dataSize=asz
srai t3, t2, ldsz, dataSize=asz
limm t4, 1
rol t4, t4, reg
mfence
ldstl t1, seg, [dsz, t3, t7], disp
sext t0, t1, reg, flags=(CF,)
xor t1, t1, t4
stul t1, seg, [dsz, t3, t7], disp
mfence
};
def macroop BTR_R_I {
sexti t0, reg, imm, flags=(CF,)
limm t1, "(uint64_t(-(2ULL)))"
roli t1, t1, imm
and reg, reg, t1
};
def macroop BTR_M_I {
limm t1, imm, dataSize=asz
limm t4, "(uint64_t(-(2ULL)))"
roli t4, t4, imm
ldst t1, seg, sib, disp
sexti t0, t1, imm, flags=(CF,)
and t1, t1, t4
st t1, seg, sib, disp
};
def macroop BTR_P_I {
rdip t7, dataSize=asz
limm t1, imm, dataSize=asz
limm t4, "(uint64_t(-(2ULL)))"
roli t4, t4, imm
ldst t1, seg, riprel, disp
sexti t0, t1, imm, flags=(CF,)
and t1, t1, t4
st t1, seg, riprel, disp
};
def macroop BTR_LOCKED_M_I {
limm t1, imm, dataSize=asz
limm t4, "(uint64_t(-(2ULL)))"
roli t4, t4, imm
mfence
ldstl t1, seg, sib, disp
sexti t0, t1, imm, flags=(CF,)
and t1, t1, t4
stul t1, seg, sib, disp
mfence
};
def macroop BTR_LOCKED_P_I {
rdip t7, dataSize=asz
limm t1, imm, dataSize=asz
limm t4, "(uint64_t(-(2ULL)))"
roli t4, t4, imm
mfence
ldstl t1, seg, riprel, disp
sexti t0, t1, imm, flags=(CF,)
and t1, t1, t4
stul t1, seg, riprel, disp
mfence
};
def macroop BTR_R_R {
sext t0, reg, regm, flags=(CF,)
limm t1, "(uint64_t(-(2ULL)))"
rol t1, t1, regm
and reg, reg, t1
};
def macroop BTR_M_R {
srai t2, reg, 3, dataSize=asz
srai t3, t2, ldsz, dataSize=asz
lea t3, flatseg, [dsz, t3, base], dataSize=asz
limm t4, "(uint64_t(-(2ULL)))"
rol t4, t4, reg
ldst t1, seg, [scale, index, t3], disp
sext t0, t1, reg, flags=(CF,)
and t1, t1, t4
st t1, seg, [scale, index, t3], disp
};
def macroop BTR_P_R {
rdip t7, dataSize=asz
srai t2, reg, 3, dataSize=asz
srai t3, t2, ldsz, dataSize=asz
limm t4, "(uint64_t(-(2ULL)))"
rol t4, t4, reg
ldst t1, seg, [dsz, t3, t7], disp
sext t0, t1, reg, flags=(CF,)
and t1, t1, t4
st t1, seg, [dsz, t3, t7], disp
};
def macroop BTR_LOCKED_M_R {
srai t2, reg, 3, dataSize=asz
srai t3, t2, ldsz, dataSize=asz
lea t3, flatseg, [dsz, t3, base], dataSize=asz
limm t4, "(uint64_t(-(2ULL)))"
rol t4, t4, reg
mfence
ldstl t1, seg, [scale, index, t3], disp
sext t0, t1, reg, flags=(CF,)
and t1, t1, t4
stul t1, seg, [scale, index, t3], disp
mfence
};
def macroop BTR_LOCKED_P_R {
rdip t7, dataSize=asz
srai t2, reg, 3, dataSize=asz
srai t3, t2, ldsz, dataSize=asz
limm t4, "(uint64_t(-(2ULL)))"
rol t4, t4, reg
mfence
ldstl t1, seg, [dsz, t3, t7], disp
sext t0, t1, reg, flags=(CF,)
and t1, t1, t4
stul t1, seg, [dsz, t3, t7], disp
mfence
};
def macroop BTS_R_I {
sexti t0, reg, imm, flags=(CF,)
limm t1, 1
roli t1, t1, imm
or reg, reg, t1
};
def macroop BTS_M_I {
limm t1, imm, dataSize=asz
limm t4, 1
roli t4, t4, imm
ldst t1, seg, sib, disp
sexti t0, t1, imm, flags=(CF,)
or t1, t1, t4
st t1, seg, sib, disp
};
def macroop BTS_P_I {
rdip t7, dataSize=asz
limm t1, imm, dataSize=asz
limm t4, 1
roli t4, t4, imm
ldst t1, seg, riprel, disp
sexti t0, t1, imm, flags=(CF,)
or t1, t1, t4
st t1, seg, riprel, disp
};
def macroop BTS_LOCKED_M_I {
limm t1, imm, dataSize=asz
limm t4, 1
roli t4, t4, imm
mfence
ldstl t1, seg, sib, disp
sexti t0, t1, imm, flags=(CF,)
or t1, t1, t4
stul t1, seg, sib, disp
mfence
};
def macroop BTS_LOCKED_P_I {
rdip t7, dataSize=asz
limm t1, imm, dataSize=asz
limm t4, 1
roli t4, t4, imm
mfence
ldstl t1, seg, riprel, disp
sexti t0, t1, imm, flags=(CF,)
or t1, t1, t4
stul t1, seg, riprel, disp
mfence
};
def macroop BTS_R_R {
sext t0, reg, regm, flags=(CF,)
limm t1, 1
rol t1, t1, regm
or reg, reg, t1
};
def macroop BTS_M_R {
srai t2, reg, 3, dataSize=asz
srai t3, t2, ldsz, dataSize=asz
lea t3, flatseg, [dsz, t3, base], dataSize=asz
limm t4, 1
rol t4, t4, reg
ldst t1, seg, [scale, index, t3], disp
sext t0, t1, reg, flags=(CF,)
or t1, t1, t4
st t1, seg, [scale, index, t3], disp
};
def macroop BTS_P_R {
rdip t7, dataSize=asz
srai t2, reg, 3, dataSize=asz
srai t3, t2, ldsz, dataSize=asz
lea t3, flatseg, [dsz, t3, base], dataSize=asz
limm t4, 1
rol t4, t4, reg
ldst t1, seg, [1, t3, t7], disp
sext t0, t1, reg, flags=(CF,)
or t1, t1, t4
st t1, seg, [1, t3, t7], disp
};
def macroop BTS_LOCKED_M_R {
srai t2, reg, 3, dataSize=asz
srai t3, t2, ldsz, dataSize=asz
lea t3, flatseg, [dsz, t3, base], dataSize=asz
limm t4, 1
rol t4, t4, reg
mfence
ldstl t1, seg, [scale, index, t3], disp
sext t0, t1, reg, flags=(CF,)
or t1, t1, t4
stul t1, seg, [scale, index, t3], disp
mfence
};
def macroop BTS_LOCKED_P_R {
rdip t7, dataSize=asz
srai t2, reg, 3, dataSize=asz
srai t3, t2, ldsz, dataSize=asz
lea t3, flatseg, [dsz, t3, base], dataSize=asz
limm t4, 1
rol t4, t4, reg
mfence
ldstl t1, seg, [1, t3, t7], disp
sext t0, t1, reg, flags=(CF,)
or t1, t1, t4
stul t1, seg, [1, t3, t7], disp
mfence
};
'''
|
bsd-3-clause
|
mjg2203/edx-platform-seas
|
common/djangoapps/django_comment_common/migrations/0001_initial.py
|
188
|
6980
|
# -*- coding: utf-8 -*-
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
#
# cdodge: This is basically an empty migration since everything has - up to now - managed in the django_comment_client app
# But going forward we should be using this migration
#
def forwards(self, orm):
pass
def backwards(self, orm):
pass
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'django_comment_common.permission': {
'Meta': {'object_name': 'Permission'},
'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}),
'roles': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'permissions'", 'symmetrical': 'False', 'to': "orm['django_comment_common.Role']"})
},
'django_comment_common.role': {
'Meta': {'object_name': 'Role'},
'course_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'roles'", 'symmetrical': 'False', 'to': "orm['auth.User']"})
}
}
complete_apps = ['django_comment_common']
|
agpl-3.0
|
dgarros/ansible
|
test/units/modules/network/nxos/test_nxos_bgp_neighbor.py
|
23
|
1955
|
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.nxos import nxos_bgp_neighbor
from .nxos_module import TestNxosModule, load_fixture, set_module_args
class TestNxosBgpNeighborModule(TestNxosModule):
module = nxos_bgp_neighbor
def setUp(self):
self.mock_load_config = patch('ansible.modules.network.nxos.nxos_bgp_neighbor.load_config')
self.load_config = self.mock_load_config.start()
self.mock_get_config = patch('ansible.modules.network.nxos.nxos_bgp_neighbor.get_config')
self.get_config = self.mock_get_config.start()
def tearDown(self):
self.mock_load_config.stop()
self.mock_get_config.stop()
def load_fixtures(self, commands=None):
self.get_config.return_value = load_fixture('nxos_bgp_config.cfg')
self.load_config.return_value = None
def test_nxos_bgp_neighbor(self):
set_module_args(dict(asn=65535, neighbor='3.3.3.3', description='some words'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['router bgp 65535', 'neighbor 3.3.3.3', 'description some words'])
|
gpl-3.0
|
rafaelang/django-rest-framework
|
rest_framework/utils/formatting.py
|
72
|
2012
|
"""
Utility functions to return a formatted name and description for a given view.
"""
from __future__ import unicode_literals
import re
from django.utils.encoding import force_text
from django.utils.html import escape
from django.utils.safestring import mark_safe
from rest_framework.compat import apply_markdown
def remove_trailing_string(content, trailing):
"""
Strip trailing component `trailing` from `content` if it exists.
Used when generating names from view classes.
"""
if content.endswith(trailing) and content != trailing:
return content[:-len(trailing)]
return content
def dedent(content):
"""
Remove leading indent from a block of text.
Used when generating descriptions from docstrings.
Note that python's `textwrap.dedent` doesn't quite cut it,
as it fails to dedent multiline docstrings that include
unindented text on the initial line.
"""
content = force_text(content)
whitespace_counts = [len(line) - len(line.lstrip(' '))
for line in content.splitlines()[1:] if line.lstrip()]
# unindent the content if needed
if whitespace_counts:
whitespace_pattern = '^' + (' ' * min(whitespace_counts))
content = re.sub(re.compile(whitespace_pattern, re.MULTILINE), '', content)
return content.strip()
def camelcase_to_spaces(content):
"""
Translate 'CamelCaseNames' to 'Camel Case Names'.
Used when generating names from view classes.
"""
camelcase_boundry = '(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))'
content = re.sub(camelcase_boundry, ' \\1', content).strip()
return ' '.join(content.split('_')).title()
def markup_description(description):
"""
Apply HTML markup to the given description.
"""
if apply_markdown:
description = apply_markdown(description)
else:
description = escape(description).replace('\n', '<br />')
description = '<p>' + description + '</p>'
return mark_safe(description)
|
bsd-2-clause
|
jymannob/Sick-Beard
|
lib/socks/__init__.py
|
138
|
16505
|
"""SocksiPy - Python SOCKS module.
Version 1.00
Copyright 2006 Dan-Haim. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of Dan Haim nor the names of his contributors may be used
to endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED
WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE.
This module provides a standard socket-like interface for Python
for tunneling connections through SOCKS proxies.
"""
"""
Minor modifications made by Christopher Gilbert (http://motomastyle.com/)
for use in PyLoris (http://pyloris.sourceforge.net/)
Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/)
mainly to merge bug fixes found in Sourceforge
"""
import re
import socket
import struct
import sys
PROXY_TYPE_SOCKS4 = 1
PROXY_TYPE_SOCKS5 = 2
PROXY_TYPE_HTTP = 3
PROXY_REGEX = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*):([^/?#]*))?")
_defaultproxy = None
_orgsocket = socket.socket
class ProxyError(Exception): pass
class GeneralProxyError(ProxyError): pass
class Socks5AuthError(ProxyError): pass
class Socks5Error(ProxyError): pass
class Socks4Error(ProxyError): pass
class HTTPError(ProxyError): pass
_generalerrors = ("success",
"invalid data",
"not connected",
"not available",
"bad proxy type",
"bad input")
_socks5errors = ("succeeded",
"general SOCKS server failure",
"connection not allowed by ruleset",
"Network unreachable",
"Host unreachable",
"Connection refused",
"TTL expired",
"Command not supported",
"Address type not supported",
"Unknown error")
_socks5autherrors = ("succeeded",
"authentication is required",
"all offered authentication methods were rejected",
"unknown username or invalid password",
"unknown error")
_socks4errors = ("request granted",
"request rejected or failed",
"request rejected because SOCKS server cannot connect to identd on the client",
"request rejected because the client program and identd report different user-ids",
"unknown error")
def parseproxyuri(proxyurl):
"""Parses a http proxy uri in the format x://a.b.c.d:port
(protocol, addr, port) = parseproxyuri(uri)
"""
groups = PROXY_REGEX.match(proxyurl).groups()
return (groups[1], groups[3], groups[4])
def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
"""setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets a default proxy which all further socksocket objects will use,
unless explicitly changed.
"""
global _defaultproxy
_defaultproxy = (proxytype, addr, port, rdns, username, password)
def wrapmodule(module):
"""wrapmodule(module)
Attempts to replace a module's socket library with a SOCKS socket. Must set
a default proxy using setdefaultproxy(...) first.
This will only work on modules that import socket directly into the namespace;
most of the Python Standard Library falls into this category.
"""
if _defaultproxy != None:
module.socket.socket = socksocket
else:
raise GeneralProxyError((4, "no proxy specified"))
class socksocket(socket.socket):
"""socksocket([family[, type[, proto]]]) -> socket object
Open a SOCKS enabled socket. The parameters are the same as
those of the standard socket init. In order for SOCKS to work,
you must specify family=AF_INET, type=SOCK_STREAM and proto=0.
"""
def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None):
_orgsocket.__init__(self, family, type, proto, _sock)
if _defaultproxy != None:
self.__proxy = _defaultproxy
else:
self.__proxy = (None, None, None, None, None, None)
self.__proxysockname = None
self.__proxypeername = None
def __recvall(self, count):
"""__recvall(count) -> data
Receive EXACTLY the number of bytes requested from the socket.
Blocks until the required number of bytes have been received.
"""
data = self.recv(count)
while len(data) < count:
d = self.recv(count-len(data))
if not d: raise GeneralProxyError((0, "connection closed unexpectedly"))
data = data + d
return data
def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None):
"""setproxy(proxytype, addr[, port[, rdns[, username[, password]]]])
Sets the proxy to be used.
proxytype - The type of the proxy to be used. Three types
are supported: PROXY_TYPE_SOCKS4 (including socks4a),
PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP
addr - The address of the server (IP or DNS).
port - The port of the server. Defaults to 1080 for SOCKS
servers and 8080 for HTTP proxy servers.
rdns - Should DNS queries be preformed on the remote side
(rather than the local side). The default is True.
Note: This has no effect with SOCKS4 servers.
username - Username to authenticate with to the server.
The default is no authentication.
password - Password to authenticate with to the server.
Only relevant when username is also provided.
"""
self.__proxy = (proxytype, addr, port, rdns, username, password)
def __negotiatesocks5(self, destaddr, destport):
"""__negotiatesocks5(self,destaddr,destport)
Negotiates a connection through a SOCKS5 server.
"""
# First we'll send the authentication packages we support.
if (self.__proxy[4]!=None) and (self.__proxy[5]!=None):
# The username/password details were supplied to the
# setproxy method so we support the USERNAME/PASSWORD
# authentication (in addition to the standard none).
self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02))
else:
# No username/password were entered, therefore we
# only support connections with no authentication.
self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00))
# We'll receive the server's response to determine which
# method was selected
chosenauth = self.__recvall(2)
if chosenauth[0:1] != chr(0x05).encode():
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
# Check the chosen authentication method
if chosenauth[1:2] == chr(0x00).encode():
# No authentication is required
pass
elif chosenauth[1:2] == chr(0x02).encode():
# Okay, we need to perform a basic username/password
# authentication.
self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5])
authstat = self.__recvall(2)
if authstat[0:1] != chr(0x01).encode():
# Bad response
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if authstat[1:2] != chr(0x00).encode():
# Authentication failed
self.close()
raise Socks5AuthError((3, _socks5autherrors[3]))
# Authentication succeeded
else:
# Reaching here is always bad
self.close()
if chosenauth[1] == chr(0xFF).encode():
raise Socks5AuthError((2, _socks5autherrors[2]))
else:
raise GeneralProxyError((1, _generalerrors[1]))
# Now we can request the actual connection
req = struct.pack('BBB', 0x05, 0x01, 0x00)
# If the given destination address is an IP address, we'll
# use the IPv4 address request even if remote resolving was specified.
try:
ipaddr = socket.inet_aton(destaddr)
req = req + chr(0x01).encode() + ipaddr
except socket.error:
# Well it's not an IP number, so it's probably a DNS name.
if self.__proxy[3]:
# Resolve remotely
ipaddr = None
req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr
else:
# Resolve locally
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
req = req + chr(0x01).encode() + ipaddr
req = req + struct.pack(">H", destport)
self.sendall(req)
# Get the response
resp = self.__recvall(4)
if resp[0:1] != chr(0x05).encode():
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
elif resp[1:2] != chr(0x00).encode():
# Connection failed
self.close()
if ord(resp[1:2])<=8:
raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])]))
else:
raise Socks5Error((9, _socks5errors[9]))
# Get the bound address/port
elif resp[3:4] == chr(0x01).encode():
boundaddr = self.__recvall(4)
elif resp[3:4] == chr(0x03).encode():
resp = resp + self.recv(1)
boundaddr = self.__recvall(ord(resp[4:5]))
else:
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
boundport = struct.unpack(">H", self.__recvall(2))[0]
self.__proxysockname = (boundaddr, boundport)
if ipaddr != None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
self.__proxypeername = (destaddr, destport)
def getproxysockname(self):
"""getsockname() -> address info
Returns the bound IP address and port number at the proxy.
"""
return self.__proxysockname
def getproxypeername(self):
"""getproxypeername() -> address info
Returns the IP and port number of the proxy.
"""
return _orgsocket.getpeername(self)
def getpeername(self):
"""getpeername() -> address info
Returns the IP address and port number of the destination
machine (note: getproxypeername returns the proxy)
"""
return self.__proxypeername
def __negotiatesocks4(self,destaddr,destport):
"""__negotiatesocks4(self,destaddr,destport)
Negotiates a connection through a SOCKS4 server.
"""
# Check if the destination address provided is an IP address
rmtrslv = False
try:
ipaddr = socket.inet_aton(destaddr)
except socket.error:
# It's a DNS name. Check where it should be resolved.
if self.__proxy[3]:
ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01)
rmtrslv = True
else:
ipaddr = socket.inet_aton(socket.gethostbyname(destaddr))
# Construct the request packet
req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr
# The username parameter is considered userid for SOCKS4
if self.__proxy[4] != None:
req = req + self.__proxy[4]
req = req + chr(0x00).encode()
# DNS name if remote resolving is required
# NOTE: This is actually an extension to the SOCKS4 protocol
# called SOCKS4A and may not be supported in all cases.
if rmtrslv:
req = req + destaddr + chr(0x00).encode()
self.sendall(req)
# Get the response from the server
resp = self.__recvall(8)
if resp[0:1] != chr(0x00).encode():
# Bad data
self.close()
raise GeneralProxyError((1,_generalerrors[1]))
if resp[1:2] != chr(0x5A).encode():
# Server returned an error
self.close()
if ord(resp[1:2]) in (91, 92, 93):
self.close()
raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90]))
else:
raise Socks4Error((94, _socks4errors[4]))
# Get the bound address/port
self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0])
if rmtrslv != None:
self.__proxypeername = (socket.inet_ntoa(ipaddr), destport)
else:
self.__proxypeername = (destaddr, destport)
def __negotiatehttp(self, destaddr, destport):
"""__negotiatehttp(self,destaddr,destport)
Negotiates a connection through an HTTP server.
"""
# If we need to resolve locally, we do this now
if not self.__proxy[3]:
addr = socket.gethostbyname(destaddr)
else:
addr = destaddr
self.sendall(("CONNECT " + addr + ":" + str(destport) + " HTTP/1.1\r\n" + "Host: " + destaddr + "\r\n\r\n").encode())
# We read the response until we get the string "\r\n\r\n"
resp = self.recv(1)
while resp.find("\r\n\r\n".encode()) == -1:
resp = resp + self.recv(1)
# We just need the first line to check if the connection
# was successful
statusline = resp.splitlines()[0].split(" ".encode(), 2)
if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()):
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
try:
statuscode = int(statusline[1])
except ValueError:
self.close()
raise GeneralProxyError((1, _generalerrors[1]))
if statuscode != 200:
self.close()
raise HTTPError((statuscode, statusline[2]))
self.__proxysockname = ("0.0.0.0", 0)
self.__proxypeername = (addr, destport)
def connect(self, destpair):
"""connect(self, despair)
Connects to the specified destination through a proxy.
destpar - A tuple of the IP/DNS address and the port number.
(identical to socket's connect).
To select the proxy server use setproxy().
"""
# Do a minimal input check first
if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (type(destpair[0]) != type('')) or (type(destpair[1]) != int):
raise GeneralProxyError((5, _generalerrors[5]))
if self.__proxy[0] == PROXY_TYPE_SOCKS5:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self, (self.__proxy[1], portnum))
self.__negotiatesocks5(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_SOCKS4:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 1080
_orgsocket.connect(self,(self.__proxy[1], portnum))
self.__negotiatesocks4(destpair[0], destpair[1])
elif self.__proxy[0] == PROXY_TYPE_HTTP:
if self.__proxy[2] != None:
portnum = self.__proxy[2]
else:
portnum = 8080
_orgsocket.connect(self,(self.__proxy[1], portnum))
self.__negotiatehttp(destpair[0], destpair[1])
elif self.__proxy[0] == None:
_orgsocket.connect(self, (destpair[0], destpair[1]))
else:
raise GeneralProxyError((4, _generalerrors[4]))
|
gpl-3.0
|
jbq/uwsgi
|
t/pypy/t_continulet1.py
|
11
|
1191
|
"""
simplified test for continulet without checking for partial writes
to enable continulets you only need to call uwsgi_pypy_setup_continulets() soon after startup:
uwsgi --pypy-wsgi-file t/pypy/t_continulet1.py --http-socket :9090 --pypy-home /opt/pypy --pypy-eval "uwsgi_pypy_setup_continulets()" --async 8
"""
import uwsgi
def application(e, sr):
sr('200 OK', [('Content-Type','text/plain')])
# call suspend 10 times and yield some value
for i in range(0,10):
print i
uwsgi.suspend()
yield str(i)
# connect to a memcached server
fd = uwsgi.async_connect('127.0.0.1:11211')
try:
# start waiting for socket availability (4 seconds max)
uwsgi.wait_fd_write(fd, 4)
# suspend execution 'til event
uwsgi.suspend()
uwsgi.send(fd, "get /foobar\r\n")
# now wait for memcached response
uwsgi.wait_fd_read(fd, 4)
uwsgi.suspend()
# read the response
data = uwsgi.recv(fd, 4096)
# return to the client
yield data
finally:
uwsgi.close(fd)
print "sleeping for 3 seconds..."
uwsgi.async_sleep(3)
uwsgi.suspend()
yield "done"
|
gpl-2.0
|
xrmx/django
|
django/contrib/sessions/backends/cache.py
|
25
|
2491
|
from django.conf import settings
from django.contrib.sessions.backends.base import CreateError, SessionBase
from django.core.cache import caches
from django.utils.six.moves import range
KEY_PREFIX = "django.contrib.sessions.cache"
class SessionStore(SessionBase):
"""
A cache-based session store.
"""
def __init__(self, session_key=None):
self._cache = caches[settings.SESSION_CACHE_ALIAS]
super(SessionStore, self).__init__(session_key)
@property
def cache_key(self):
return KEY_PREFIX + self._get_or_create_session_key()
def load(self):
try:
session_data = self._cache.get(self.cache_key)
except Exception:
# Some backends (e.g. memcache) raise an exception on invalid
# cache keys. If this happens, reset the session. See #17810.
session_data = None
if session_data is not None:
return session_data
self.create()
return {}
def create(self):
# Because a cache can fail silently (e.g. memcache), we don't know if
# we are failing to create a new session because of a key collision or
# because the cache is missing. So we try for a (large) number of times
# and then raise an exception. That's the risk you shoulder if using
# cache backing.
for i in range(10000):
self._session_key = self._get_new_session_key()
try:
self.save(must_create=True)
except CreateError:
continue
self.modified = True
return
raise RuntimeError(
"Unable to create a new session key. "
"It is likely that the cache is unavailable.")
def save(self, must_create=False):
if must_create:
func = self._cache.add
else:
func = self._cache.set
result = func(self.cache_key,
self._get_session(no_load=must_create),
self.get_expiry_age())
if must_create and not result:
raise CreateError
def exists(self, session_key):
return (KEY_PREFIX + session_key) in self._cache
def delete(self, session_key=None):
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
self._cache.delete(KEY_PREFIX + session_key)
@classmethod
def clear_expired(cls):
pass
|
bsd-3-clause
|
MMOwning/MMOCore
|
dep/libmpq/bindings/python/mpq.py
|
107
|
10430
|
"""wrapper for libmpq"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import ctypes
import ctypes.util
import os
libmpq = ctypes.CDLL(ctypes.util.find_library("mpq"))
class Error(Exception):
pass
errors = {
-1: (IOError, "open"),
-2: (IOError, "close"),
-3: (IOError, "seek"),
-4: (IOError, "read"),
-5: (IOError, "write"),
-6: (MemoryError,),
-7: (Error, "file is not an mpq or is corrupted"),
-8: (AssertionError, "not initialized"),
-9: (AssertionError, "buffer size too small"),
-10: (IndexError, "file not in archive"),
-11: (AssertionError, "decrypt"),
-12: (AssertionError, "unpack"),
}
def check_error(result, func, arguments, errors=errors):
try:
error = errors[result]
except KeyError:
return result
else:
raise error[0](*error[1:])
libmpq.libmpq__version.restype = ctypes.c_char_p
libmpq.libmpq__archive_open.errcheck = check_error
libmpq.libmpq__archive_close.errcheck = check_error
libmpq.libmpq__archive_size_packed.errcheck = check_error
libmpq.libmpq__archive_size_unpacked.errcheck = check_error
libmpq.libmpq__archive_offset.errcheck = check_error
libmpq.libmpq__archive_version.errcheck = check_error
libmpq.libmpq__archive_files.errcheck = check_error
libmpq.libmpq__file_size_packed.errcheck = check_error
libmpq.libmpq__file_size_unpacked.errcheck = check_error
libmpq.libmpq__file_offset.errcheck = check_error
libmpq.libmpq__file_blocks.errcheck = check_error
libmpq.libmpq__file_encrypted.errcheck = check_error
libmpq.libmpq__file_compressed.errcheck = check_error
libmpq.libmpq__file_imploded.errcheck = check_error
libmpq.libmpq__file_number.errcheck = check_error
libmpq.libmpq__file_read.errcheck = check_error
libmpq.libmpq__block_open_offset.errcheck = check_error
libmpq.libmpq__block_close_offset.errcheck = check_error
libmpq.libmpq__block_size_unpacked.errcheck = check_error
libmpq.libmpq__block_read.errcheck = check_error
__version__ = libmpq.libmpq__version()
class Reader(object):
def __init__(self, file, libmpq=libmpq):
self._file = file
self._pos = 0
self._buf = []
self._cur_block = 0
libmpq.libmpq__block_open_offset(self._file._archive._mpq,
self._file.number)
def __iter__(self):
return self
def __repr__(self):
return "iter(%r)" % self._file
def seek(self, offset, whence=os.SEEK_SET, os=os):
if whence == os.SEEK_SET:
pass
elif whence == os.SEEK_CUR:
offset += self._pos
elif whence == os.SEEK_END:
offset += self._file.unpacked_size
else:
raise ValueError, "invalid whence"
if offset >= self._pos:
self.read(offset - self._pos)
else:
self._pos = 0
self._buf = []
self._cur_block = 0
self.read(offset)
def tell(self):
return self._pos
def _read_block(self, ctypes=ctypes, libmpq=libmpq):
block_size = ctypes.c_uint64()
libmpq.libmpq__block_size_unpacked(self._file._archive._mpq,
self._file.number, self._cur_block, ctypes.byref(block_size))
block_data = ctypes.create_string_buffer(block_size.value)
libmpq.libmpq__block_read(self._file._archive._mpq,
self._file.number, self._cur_block,
block_data, ctypes.c_uint64(len(block_data)), None)
self._buf.append(block_data.raw)
self._cur_block += 1
def read(self, size=-1):
while size < 0 or sum(map(len, self._buf)) < size:
if self._cur_block == self._file.blocks:
break
self._read_block()
buf = "".join(self._buf)
if size < 0:
ret = buf
self._buf = []
else:
ret = buf[:size]
self._buf = [buf[size:]]
self._pos += len(ret)
return ret
def readline(self, os=os):
line = []
while True:
char = self.read(1)
if char == "":
break
if char not in '\r\n' and line and line[-1] in '\r\n':
self.seek(-1, os.SEEK_CUR)
break
line.append(char)
return ''.join(line)
def next(self):
line = self.readline()
if not line:
raise StopIteration
return line
def readlines(self, sizehint=-1):
res = []
while sizehint < 0 or sum(map(len, res)) < sizehint:
line = self.readline()
if not line:
break
res.append(line)
return res
xreadlines = __iter__
def __del__(self, libmpq=libmpq):
libmpq.libmpq__block_close_offset(self._file._archive._mpq,
self._file.number)
class File(object):
def __init__(self, archive, number, ctypes=ctypes, libmpq=libmpq):
self._archive = archive
self.number = number
for name, atype in [
("packed_size", ctypes.c_uint64),
("unpacked_size", ctypes.c_uint64),
("offset", ctypes.c_uint64),
("blocks", ctypes.c_uint32),
("encrypted", ctypes.c_uint32),
("compressed", ctypes.c_uint32),
("imploded", ctypes.c_uint32),
]:
data = atype()
func = getattr(libmpq, "libmpq__file_"+name)
func(self._archive._mpq, self.number, ctypes.byref(data))
setattr(self, name, data.value)
def __str__(self, ctypes=ctypes, libmpq=libmpq):
data = ctypes.create_string_buffer(self.unpacked_size)
libmpq.libmpq__file_read(self._archive._mpq, self.number,
data, ctypes.c_uint64(len(data)), None)
return data.raw
def __repr__(self):
return "%r[%i]" % (self._archive, self.number)
def __iter__(self, Reader=Reader):
return Reader(self)
class Archive(object):
def __init__(self, source, ctypes=ctypes, File=File, libmpq=libmpq):
self._source = source
if isinstance(source, File):
assert not source.encrypted
assert not source.compressed
assert not source.imploded
self.filename = source._archive.filename
offset = source._archive.offset + source.offset
else:
self.filename = source
offset = -1
self._mpq = ctypes.c_void_p()
libmpq.libmpq__archive_open(ctypes.byref(self._mpq), self.filename,
ctypes.c_uint64(offset))
self._opened = True
for field_name, field_type in [
("packed_size", ctypes.c_uint64),
("unpacked_size", ctypes.c_uint64),
("offset", ctypes.c_uint64),
("version", ctypes.c_uint32),
("files", ctypes.c_uint32),
]:
func = getattr(libmpq, "libmpq__archive_" + field_name)
data = field_type()
func(self._mpq, ctypes.byref(data))
setattr(self, field_name, data.value)
def __del__(self, libmpq=libmpq):
if getattr(self, "_opened", False):
libmpq.libmpq__archive_close(self._mpq)
def __len__(self):
return self.files
def __contains__(self, item, ctypes=ctypes, libmpq=libmpq):
if isinstance(item, str):
data = ctypes.c_uint32()
try:
libmpq.libmpq__file_number(self._mpq, ctypes.c_char_p(item),
ctypes.byref(data))
except IndexError:
return False
return True
return 0 <= item < self.files
def __getitem__(self, item, ctypes=ctypes, File=File, libmpq=libmpq):
if isinstance(item, str):
data = ctypes.c_int()
libmpq.libmpq__file_number(self._mpq, ctypes.c_char_p(item),
ctypes.byref(data))
item = data.value
else:
if not 0 <= item < self.files:
raise IndexError, "file not in archive"
return File(self, item)
def __repr__(self):
return "mpq.Archive(%r)" % self._source
# Remove clutter - everything except Error and Archive.
del os, check_error, ctypes, errors, File, libmpq, Reader
if __name__ == "__main__":
import sys, random
archive = Archive(sys.argv[1])
print repr(archive)
for k, v in archive.__dict__.iteritems():
#if k[0] == '_': continue
print " " * (4 - 1), k, v
assert '(listfile)' in archive
assert 0 in archive
assert len(archive) == archive.files
files = [x.strip() for x in archive['(listfile)']]
files.extend(xrange(archive.files))
for key in files: #sys.argv[2:] if sys.argv[2:] else xrange(archive.files):
file = archive[key]
print
print " " * (4 - 1), repr(file)
for k, v in file.__dict__.iteritems():
#if k[0] == '_': continue
print " " * (8 - 1), k, v
a = str(file)
b = iter(file).read()
reader = iter(file)
c = []
while True:
l = random.randrange(1, 10)
d = reader.read(l)
if not d: break
assert len(d) <= l
c.append(d)
c = "".join(c)
d = []
reader.seek(0)
for line in reader:
d.append(line)
d = "".join(d)
assert a == b == c == d, map(hash, [a,b,c,d])
assert len(a) == file.unpacked_size
repr(iter(file))
reader.seek(0)
a = reader.readlines()
reader.seek(0)
b = list(reader)
assert a == b
|
gpl-2.0
|
edxzw/edx-platform
|
openedx/core/djangoapps/user_api/models.py
|
55
|
4107
|
"""
Django ORM model specifications for the User API application
"""
from django.contrib.auth.models import User
from django.core.validators import RegexValidator
from django.db import models
from django.db.models.signals import post_delete, pre_save, post_save
from django.dispatch import receiver
from model_utils.models import TimeStampedModel
from util.model_utils import get_changed_fields_dict, emit_setting_changed_event
from xmodule_django.models import CourseKeyField
# Currently, the "student" app is responsible for
# accounts, profiles, enrollments, and the student dashboard.
# We are trying to move some of this functionality into separate apps,
# but currently the rest of the system assumes that "student" defines
# certain models. For now we will leave the models in "student" and
# create an alias in "user_api".
from student.models import UserProfile, Registration, PendingEmailChange # pylint: disable=unused-import
class UserPreference(models.Model):
"""A user's preference, stored as generic text to be processed by client"""
KEY_REGEX = r"[-_a-zA-Z0-9]+"
user = models.ForeignKey(User, db_index=True, related_name="preferences")
key = models.CharField(max_length=255, db_index=True, validators=[RegexValidator(KEY_REGEX)])
value = models.TextField()
class Meta(object):
unique_together = ("user", "key")
@classmethod
def get_value(cls, user, preference_key):
"""Gets the user preference value for a given key.
Note:
This method provides no authorization of access to the user preference.
Consider using user_api.preferences.api.get_user_preference instead if
this is part of a REST API request.
Arguments:
user (User): The user whose preference should be set.
preference_key (str): The key for the user preference.
Returns:
The user preference value, or None if one is not set.
"""
try:
user_preference = cls.objects.get(user=user, key=preference_key)
return user_preference.value
except cls.DoesNotExist:
return None
@receiver(pre_save, sender=UserPreference)
def pre_save_callback(sender, **kwargs):
"""
Event changes to user preferences.
"""
user_preference = kwargs["instance"]
user_preference._old_value = get_changed_fields_dict(user_preference, sender).get("value", None)
@receiver(post_save, sender=UserPreference)
def post_save_callback(sender, **kwargs):
"""
Event changes to user preferences.
"""
user_preference = kwargs["instance"]
emit_setting_changed_event(
user_preference.user, sender._meta.db_table, user_preference.key,
user_preference._old_value, user_preference.value
)
user_preference._old_value = None
@receiver(post_delete, sender=UserPreference)
def post_delete_callback(sender, **kwargs):
"""
Event changes to user preferences.
"""
user_preference = kwargs["instance"]
emit_setting_changed_event(
user_preference.user, sender._meta.db_table, user_preference.key, user_preference.value, None
)
class UserCourseTag(models.Model):
"""
Per-course user tags, to be used by various things that want to store tags about
the user. Added initially to store assignment to experimental groups.
"""
user = models.ForeignKey(User, db_index=True, related_name="+")
key = models.CharField(max_length=255, db_index=True)
course_id = CourseKeyField(max_length=255, db_index=True)
value = models.TextField()
class Meta(object):
unique_together = ("user", "course_id", "key")
class UserOrgTag(TimeStampedModel):
""" Per-Organization user tags.
Allows settings to be configured at an organization level.
"""
user = models.ForeignKey(User, db_index=True, related_name="+")
key = models.CharField(max_length=255, db_index=True)
org = models.CharField(max_length=255, db_index=True)
value = models.TextField()
class Meta(object):
unique_together = ("user", "org", "key")
|
agpl-3.0
|
dhomeier/astropy
|
astropy/time/core.py
|
3
|
109168
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
The astropy.time package provides functionality for manipulating times and
dates. Specific emphasis is placed on supporting time scales (e.g. UTC, TAI,
UT1) and time representations (e.g. JD, MJD, ISO 8601) that are used in
astronomy.
"""
import os
import copy
import enum
import operator
import threading
from datetime import datetime, date, timedelta
from time import strftime
from warnings import warn
import numpy as np
import erfa
from astropy import units as u, constants as const
from astropy.units import UnitConversionError
from astropy.utils import ShapedLikeNDArray
from astropy.utils.compat.misc import override__dir__
from astropy.utils.data_info import MixinInfo, data_info_factory
from astropy.utils.exceptions import AstropyWarning
from .utils import day_frac
from .formats import (TIME_FORMATS, TIME_DELTA_FORMATS,
TimeJD, TimeUnique, TimeAstropyTime, TimeDatetime)
# Import TimeFromEpoch to avoid breaking code that followed the old example of
# making a custom timescale in the documentation.
from .formats import TimeFromEpoch # noqa
from astropy.extern import _strptime
__all__ = ['TimeBase', 'Time', 'TimeDelta', 'TimeInfo', 'update_leap_seconds',
'TIME_SCALES', 'STANDARD_TIME_SCALES', 'TIME_DELTA_SCALES',
'ScaleValueError', 'OperandTypeError']
STANDARD_TIME_SCALES = ('tai', 'tcb', 'tcg', 'tdb', 'tt', 'ut1', 'utc')
LOCAL_SCALES = ('local',)
TIME_TYPES = dict((scale, scales) for scales in (STANDARD_TIME_SCALES, LOCAL_SCALES)
for scale in scales)
TIME_SCALES = STANDARD_TIME_SCALES + LOCAL_SCALES
MULTI_HOPS = {('tai', 'tcb'): ('tt', 'tdb'),
('tai', 'tcg'): ('tt',),
('tai', 'ut1'): ('utc',),
('tai', 'tdb'): ('tt',),
('tcb', 'tcg'): ('tdb', 'tt'),
('tcb', 'tt'): ('tdb',),
('tcb', 'ut1'): ('tdb', 'tt', 'tai', 'utc'),
('tcb', 'utc'): ('tdb', 'tt', 'tai'),
('tcg', 'tdb'): ('tt',),
('tcg', 'ut1'): ('tt', 'tai', 'utc'),
('tcg', 'utc'): ('tt', 'tai'),
('tdb', 'ut1'): ('tt', 'tai', 'utc'),
('tdb', 'utc'): ('tt', 'tai'),
('tt', 'ut1'): ('tai', 'utc'),
('tt', 'utc'): ('tai',),
}
GEOCENTRIC_SCALES = ('tai', 'tt', 'tcg')
BARYCENTRIC_SCALES = ('tcb', 'tdb')
ROTATIONAL_SCALES = ('ut1',)
TIME_DELTA_TYPES = dict((scale, scales)
for scales in (GEOCENTRIC_SCALES, BARYCENTRIC_SCALES,
ROTATIONAL_SCALES, LOCAL_SCALES) for scale in scales)
TIME_DELTA_SCALES = GEOCENTRIC_SCALES + BARYCENTRIC_SCALES + ROTATIONAL_SCALES + LOCAL_SCALES
# For time scale changes, we need L_G and L_B, which are stored in erfam.h as
# /* L_G = 1 - d(TT)/d(TCG) */
# define ERFA_ELG (6.969290134e-10)
# /* L_B = 1 - d(TDB)/d(TCB), and TDB (s) at TAI 1977/1/1.0 */
# define ERFA_ELB (1.550519768e-8)
# These are exposed in erfa as erfa.ELG and erfa.ELB.
# Implied: d(TT)/d(TCG) = 1-L_G
# and d(TCG)/d(TT) = 1/(1-L_G) = 1 + (1-(1-L_G))/(1-L_G) = 1 + L_G/(1-L_G)
# scale offsets as second = first + first * scale_offset[(first,second)]
SCALE_OFFSETS = {('tt', 'tai'): None,
('tai', 'tt'): None,
('tcg', 'tt'): -erfa.ELG,
('tt', 'tcg'): erfa.ELG / (1. - erfa.ELG),
('tcg', 'tai'): -erfa.ELG,
('tai', 'tcg'): erfa.ELG / (1. - erfa.ELG),
('tcb', 'tdb'): -erfa.ELB,
('tdb', 'tcb'): erfa.ELB / (1. - erfa.ELB)}
# triple-level dictionary, yay!
SIDEREAL_TIME_MODELS = {
'mean': {
'IAU2006': {'function': erfa.gmst06, 'scales': ('ut1', 'tt')},
'IAU2000': {'function': erfa.gmst00, 'scales': ('ut1', 'tt')},
'IAU1982': {'function': erfa.gmst82, 'scales': ('ut1',)}},
'apparent': {
'IAU2006A': {'function': erfa.gst06a, 'scales': ('ut1', 'tt')},
'IAU2000A': {'function': erfa.gst00a, 'scales': ('ut1', 'tt')},
'IAU2000B': {'function': erfa.gst00b, 'scales': ('ut1',)},
'IAU1994': {'function': erfa.gst94, 'scales': ('ut1',)}}}
class _LeapSecondsCheck(enum.Enum):
NOT_STARTED = 0 # No thread has reached the check
RUNNING = 1 # A thread is running update_leap_seconds (_LEAP_SECONDS_LOCK is held)
DONE = 2 # update_leap_seconds has completed
_LEAP_SECONDS_CHECK = _LeapSecondsCheck.NOT_STARTED
_LEAP_SECONDS_LOCK = threading.RLock()
class TimeInfo(MixinInfo):
"""
Container for meta information like name, description, format. This is
required when the object is used as a mixin column within a table, but can
be used as a general way to store meta information.
"""
attr_names = MixinInfo.attr_names | {'serialize_method'}
_supports_indexing = True
# The usual tuple of attributes needed for serialization is replaced
# by a property, since Time can be serialized different ways.
_represent_as_dict_extra_attrs = ('format', 'scale', 'precision',
'in_subfmt', 'out_subfmt', 'location',
'_delta_ut1_utc', '_delta_tdb_tt')
# When serializing, write out the `value` attribute using the column name.
_represent_as_dict_primary_data = 'value'
mask_val = np.ma.masked
@property
def _represent_as_dict_attrs(self):
method = self.serialize_method[self._serialize_context]
if method == 'formatted_value':
out = ('value',)
elif method == 'jd1_jd2':
out = ('jd1', 'jd2')
else:
raise ValueError("serialize method must be 'formatted_value' or 'jd1_jd2'")
return out + self._represent_as_dict_extra_attrs
def __init__(self, bound=False):
super().__init__(bound)
# If bound to a data object instance then create the dict of attributes
# which stores the info attribute values.
if bound:
# Specify how to serialize this object depending on context.
# If ``True`` for a context, then use formatted ``value`` attribute
# (e.g. the ISO time string). If ``False`` then use float jd1 and jd2.
self.serialize_method = {'fits': 'jd1_jd2',
'ecsv': 'formatted_value',
'hdf5': 'jd1_jd2',
'yaml': 'jd1_jd2',
None: 'jd1_jd2'}
def get_sortable_arrays(self):
"""
Return a list of arrays which can be lexically sorted to represent
the order of the parent column.
Returns
-------
arrays : list of ndarray
"""
parent = self._parent
jd_approx = parent.jd
jd_remainder = (parent - parent.__class__(jd_approx, format='jd')).jd
return [jd_approx, jd_remainder]
@property
def unit(self):
return None
info_summary_stats = staticmethod(
data_info_factory(names=MixinInfo._stats,
funcs=[getattr(np, stat) for stat in MixinInfo._stats]))
# When Time has mean, std, min, max methods:
# funcs = [lambda x: getattr(x, stat)() for stat_name in MixinInfo._stats])
def _construct_from_dict_base(self, map):
if 'jd1' in map and 'jd2' in map:
# Initialize as JD but revert to desired format and out_subfmt (if needed)
format = map.pop('format')
out_subfmt = map.pop('out_subfmt', None)
map['format'] = 'jd'
map['val'] = map.pop('jd1')
map['val2'] = map.pop('jd2')
out = self._parent_cls(**map)
out.format = format
if out_subfmt is not None:
out.out_subfmt = out_subfmt
else:
map['val'] = map.pop('value')
out = self._parent_cls(**map)
return out
def _construct_from_dict(self, map):
delta_ut1_utc = map.pop('_delta_ut1_utc', None)
delta_tdb_tt = map.pop('_delta_tdb_tt', None)
out = self._construct_from_dict_base(map)
if delta_ut1_utc is not None:
out._delta_ut1_utc = delta_ut1_utc
if delta_tdb_tt is not None:
out._delta_tdb_tt = delta_tdb_tt
return out
def new_like(self, cols, length, metadata_conflicts='warn', name=None):
"""
Return a new Time instance which is consistent with the input Time objects
``cols`` and has ``length`` rows.
This is intended for creating an empty Time instance whose elements can
be set in-place for table operations like join or vstack. It checks
that the input locations and attributes are consistent. This is used
when a Time object is used as a mixin column in an astropy Table.
Parameters
----------
cols : list
List of input columns (Time objects)
length : int
Length of the output column object
metadata_conflicts : str ('warn'|'error'|'silent')
How to handle metadata conflicts
name : str
Output column name
Returns
-------
col : Time (or subclass)
Empty instance of this class consistent with ``cols``
"""
# Get merged info attributes like shape, dtype, format, description, etc.
attrs = self.merge_cols_attributes(cols, metadata_conflicts, name,
('meta', 'description'))
attrs.pop('dtype') # Not relevant for Time
col0 = cols[0]
# Check that location is consistent for all Time objects
for col in cols[1:]:
# This is the method used by __setitem__ to ensure that the right side
# has a consistent location (and coerce data if necessary, but that does
# not happen in this case since `col` is already a Time object). If this
# passes then any subsequent table operations via setitem will work.
try:
col0._make_value_equivalent(slice(None), col)
except ValueError:
raise ValueError('input columns have inconsistent locations')
# Make a new Time object with the desired shape and attributes
shape = (length,) + attrs.pop('shape')
jd2000 = 2451544.5 # Arbitrary JD value J2000.0 that will work with ERFA
jd1 = np.full(shape, jd2000, dtype='f8')
jd2 = np.zeros(shape, dtype='f8')
tm_attrs = {attr: getattr(col0, attr)
for attr in ('scale', 'location',
'precision', 'in_subfmt', 'out_subfmt')}
out = self._parent_cls(jd1, jd2, format='jd', **tm_attrs)
out.format = col0.format
# Set remaining info attributes
for attr, value in attrs.items():
setattr(out.info, attr, value)
return out
class TimeDeltaInfo(TimeInfo):
_represent_as_dict_extra_attrs = ('format', 'scale')
def _construct_from_dict(self, map):
return self._construct_from_dict_base(map)
def new_like(self, cols, length, metadata_conflicts='warn', name=None):
"""
Return a new TimeDelta instance which is consistent with the input Time objects
``cols`` and has ``length`` rows.
This is intended for creating an empty Time instance whose elements can
be set in-place for table operations like join or vstack. It checks
that the input locations and attributes are consistent. This is used
when a Time object is used as a mixin column in an astropy Table.
Parameters
----------
cols : list
List of input columns (Time objects)
length : int
Length of the output column object
metadata_conflicts : str ('warn'|'error'|'silent')
How to handle metadata conflicts
name : str
Output column name
Returns
-------
col : Time (or subclass)
Empty instance of this class consistent with ``cols``
"""
# Get merged info attributes like shape, dtype, format, description, etc.
attrs = self.merge_cols_attributes(cols, metadata_conflicts, name,
('meta', 'description'))
attrs.pop('dtype') # Not relevant for Time
col0 = cols[0]
# Make a new Time object with the desired shape and attributes
shape = (length,) + attrs.pop('shape')
jd1 = np.zeros(shape, dtype='f8')
jd2 = np.zeros(shape, dtype='f8')
out = self._parent_cls(jd1, jd2, format='jd', scale=col0.scale)
out.format = col0.format
# Set remaining info attributes
for attr, value in attrs.items():
setattr(out.info, attr, value)
return out
class TimeBase(ShapedLikeNDArray):
"""Base time class from which Time and TimeDelta inherit."""
# Make sure that reverse arithmetic (e.g., TimeDelta.__rmul__)
# gets called over the __mul__ of Numpy arrays.
__array_priority__ = 20000
# Declare that Time can be used as a Table column by defining the
# attribute where column attributes will be stored.
_astropy_column_attrs = None
def __getnewargs__(self):
return (self._time,)
def _init_from_vals(self, val, val2, format, scale, copy,
precision=None, in_subfmt=None, out_subfmt=None):
"""
Set the internal _format, scale, and _time attrs from user
inputs. This handles coercion into the correct shapes and
some basic input validation.
"""
if precision is None:
precision = 3
if in_subfmt is None:
in_subfmt = '*'
if out_subfmt is None:
out_subfmt = '*'
# Coerce val into an array
val = _make_array(val, copy)
# If val2 is not None, ensure consistency
if val2 is not None:
val2 = _make_array(val2, copy)
try:
np.broadcast(val, val2)
except ValueError:
raise ValueError('Input val and val2 have inconsistent shape; '
'they cannot be broadcast together.')
if scale is not None:
if not (isinstance(scale, str)
and scale.lower() in self.SCALES):
raise ScaleValueError("Scale {!r} is not in the allowed scales "
"{}".format(scale,
sorted(self.SCALES)))
# If either of the input val, val2 are masked arrays then
# find the masked elements and fill them.
mask, val, val2 = _check_for_masked_and_fill(val, val2)
# Parse / convert input values into internal jd1, jd2 based on format
self._time = self._get_time_fmt(val, val2, format, scale,
precision, in_subfmt, out_subfmt)
self._format = self._time.name
# Hack from #9969 to allow passing the location value that has been
# collected by the TimeAstropyTime format class up to the Time level.
# TODO: find a nicer way.
if hasattr(self._time, '_location'):
self.location = self._time._location
del self._time._location
# If any inputs were masked then masked jd2 accordingly. From above
# routine ``mask`` must be either Python bool False or an bool ndarray
# with shape broadcastable to jd2.
if mask is not False:
mask = np.broadcast_to(mask, self._time.jd2.shape)
self._time.jd1[mask] = 2451544.5 # Set to JD for 2000-01-01
self._time.jd2[mask] = np.nan
def _get_time_fmt(self, val, val2, format, scale,
precision, in_subfmt, out_subfmt):
"""
Given the supplied val, val2, format and scale try to instantiate
the corresponding TimeFormat class to convert the input values into
the internal jd1 and jd2.
If format is `None` and the input is a string-type or object array then
guess available formats and stop when one matches.
"""
if (format is None
and (val.dtype.kind in ('S', 'U', 'O', 'M') or val.dtype.names)):
# Input is a string, object, datetime, or a table-like ndarray
# (structured array, recarray). These input types can be
# uniquely identified by the format classes.
formats = [(name, cls) for name, cls in self.FORMATS.items()
if issubclass(cls, TimeUnique)]
# AstropyTime is a pseudo-format that isn't in the TIME_FORMATS registry,
# but try to guess it at the end.
formats.append(('astropy_time', TimeAstropyTime))
elif not (isinstance(format, str)
and format.lower() in self.FORMATS):
if format is None:
raise ValueError("No time format was given, and the input is "
"not unique")
else:
raise ValueError("Format {!r} is not one of the allowed "
"formats {}".format(format,
sorted(self.FORMATS)))
else:
formats = [(format, self.FORMATS[format])]
assert formats
problems = {}
for name, cls in formats:
try:
return cls(val, val2, scale, precision, in_subfmt, out_subfmt)
except UnitConversionError:
raise
except (ValueError, TypeError) as err:
# If ``format`` specified then there is only one possibility, so raise
# immediately and include the upstream exception message to make it
# easier for user to see what is wrong.
if len(formats) == 1:
raise ValueError(
f'Input values did not match the format class {format}:'
+ os.linesep
+ f'{err.__class__.__name__}: {err}'
) from err
else:
problems[name] = err
else:
raise ValueError(f'Input values did not match any of the formats '
f'where the format keyword is optional: '
f'{problems}') from problems[formats[0][0]]
@property
def writeable(self):
return self._time.jd1.flags.writeable & self._time.jd2.flags.writeable
@writeable.setter
def writeable(self, value):
self._time.jd1.flags.writeable = value
self._time.jd2.flags.writeable = value
@property
def format(self):
"""
Get or set time format.
The format defines the way times are represented when accessed via the
``.value`` attribute. By default it is the same as the format used for
initializing the `Time` instance, but it can be set to any other value
that could be used for initialization. These can be listed with::
>>> list(Time.FORMATS)
['jd', 'mjd', 'decimalyear', 'unix', 'unix_tai', 'cxcsec', 'gps', 'plot_date',
'stardate', 'datetime', 'ymdhms', 'iso', 'isot', 'yday', 'datetime64',
'fits', 'byear', 'jyear', 'byear_str', 'jyear_str']
"""
return self._format
@format.setter
def format(self, format):
"""Set time format"""
if format not in self.FORMATS:
raise ValueError(f'format must be one of {list(self.FORMATS)}')
format_cls = self.FORMATS[format]
# Get the new TimeFormat object to contain time in new format. Possibly
# coerce in/out_subfmt to '*' (default) if existing subfmt values are
# not valid in the new format.
self._time = format_cls(
self._time.jd1, self._time.jd2,
self._time._scale, self.precision,
in_subfmt=format_cls._get_allowed_subfmt(self.in_subfmt),
out_subfmt=format_cls._get_allowed_subfmt(self.out_subfmt),
from_jd=True)
self._format = format
def __repr__(self):
return ("<{} object: scale='{}' format='{}' value={}>"
.format(self.__class__.__name__, self.scale, self.format,
getattr(self, self.format)))
def __str__(self):
return str(getattr(self, self.format))
def __hash__(self):
try:
loc = getattr(self, 'location', None)
if loc is not None:
loc = loc.x.to_value(u.m), loc.y.to_value(u.m), loc.z.to_value(u.m)
return hash((self.jd1, self.jd2, self.scale, loc))
except TypeError:
if self.ndim != 0:
reason = '(must be scalar)'
elif self.masked:
reason = '(value is masked)'
else:
raise
raise TypeError(f"unhashable type: '{self.__class__.__name__}' {reason}")
@property
def scale(self):
"""Time scale"""
return self._time.scale
def _set_scale(self, scale):
"""
This is the key routine that actually does time scale conversions.
This is not public and not connected to the read-only scale property.
"""
if scale == self.scale:
return
if scale not in self.SCALES:
raise ValueError("Scale {!r} is not in the allowed scales {}"
.format(scale, sorted(self.SCALES)))
# Determine the chain of scale transformations to get from the current
# scale to the new scale. MULTI_HOPS contains a dict of all
# transformations (xforms) that require intermediate xforms.
# The MULTI_HOPS dict is keyed by (sys1, sys2) in alphabetical order.
xform = (self.scale, scale)
xform_sort = tuple(sorted(xform))
multi = MULTI_HOPS.get(xform_sort, ())
xforms = xform_sort[:1] + multi + xform_sort[-1:]
# If we made the reverse xform then reverse it now.
if xform_sort != xform:
xforms = tuple(reversed(xforms))
# Transform the jd1,2 pairs through the chain of scale xforms.
jd1, jd2 = self._time.jd1, self._time.jd2_filled
for sys1, sys2 in zip(xforms[:-1], xforms[1:]):
# Some xforms require an additional delta_ argument that is
# provided through Time methods. These values may be supplied by
# the user or computed based on available approximations. The
# get_delta_ methods are available for only one combination of
# sys1, sys2 though the property applies for both xform directions.
args = [jd1, jd2]
for sys12 in ((sys1, sys2), (sys2, sys1)):
dt_method = '_get_delta_{}_{}'.format(*sys12)
try:
get_dt = getattr(self, dt_method)
except AttributeError:
pass
else:
args.append(get_dt(jd1, jd2))
break
conv_func = getattr(erfa, sys1 + sys2)
jd1, jd2 = conv_func(*args)
jd1, jd2 = day_frac(jd1, jd2)
if self.masked:
jd2[self.mask] = np.nan
self._time = self.FORMATS[self.format](jd1, jd2, scale, self.precision,
self.in_subfmt, self.out_subfmt,
from_jd=True)
@property
def precision(self):
"""
Decimal precision when outputting seconds as floating point (int
value between 0 and 9 inclusive).
"""
return self._time.precision
@precision.setter
def precision(self, val):
del self.cache
if not isinstance(val, int) or val < 0 or val > 9:
raise ValueError('precision attribute must be an int between '
'0 and 9')
self._time.precision = val
@property
def in_subfmt(self):
"""
Unix wildcard pattern to select subformats for parsing string input
times.
"""
return self._time.in_subfmt
@in_subfmt.setter
def in_subfmt(self, val):
self._time.in_subfmt = val
del self.cache
@property
def out_subfmt(self):
"""
Unix wildcard pattern to select subformats for outputting times.
"""
return self._time.out_subfmt
@out_subfmt.setter
def out_subfmt(self, val):
# Setting the out_subfmt property here does validation of ``val``
self._time.out_subfmt = val
del self.cache
@property
def shape(self):
"""The shape of the time instances.
Like `~numpy.ndarray.shape`, can be set to a new shape by assigning a
tuple. Note that if different instances share some but not all
underlying data, setting the shape of one instance can make the other
instance unusable. Hence, it is strongly recommended to get new,
reshaped instances with the ``reshape`` method.
Raises
------
ValueError
If the new shape has the wrong total number of elements.
AttributeError
If the shape of the ``jd1``, ``jd2``, ``location``,
``delta_ut1_utc``, or ``delta_tdb_tt`` attributes cannot be changed
without the arrays being copied. For these cases, use the
`Time.reshape` method (which copies any arrays that cannot be
reshaped in-place).
"""
return self._time.jd1.shape
@shape.setter
def shape(self, shape):
del self.cache
# We have to keep track of arrays that were already reshaped,
# since we may have to return those to their original shape if a later
# shape-setting fails.
reshaped = []
oldshape = self.shape
# In-place reshape of data/attributes. Need to access _time.jd1/2 not
# self.jd1/2 because the latter are not guaranteed to be the actual
# data, and in fact should not be directly changeable from the public
# API.
for obj, attr in ((self._time, 'jd1'),
(self._time, 'jd2'),
(self, '_delta_ut1_utc'),
(self, '_delta_tdb_tt'),
(self, 'location')):
val = getattr(obj, attr, None)
if val is not None and val.size > 1:
try:
val.shape = shape
except Exception:
for val2 in reshaped:
val2.shape = oldshape
raise
else:
reshaped.append(val)
def _shaped_like_input(self, value):
if self._time.jd1.shape:
if isinstance(value, np.ndarray):
return value
else:
raise TypeError(
f"JD is an array ({self._time.jd1!r}) but value "
f"is not ({value!r})")
else:
# zero-dimensional array, is it safe to unbox?
if (isinstance(value, np.ndarray)
and not value.shape
and not np.ma.is_masked(value)):
if value.dtype.kind == 'M':
# existing test doesn't want datetime64 converted
return value[()]
elif value.dtype.fields:
# Unpack but keep field names; .item() doesn't
# Still don't get python types in the fields
return value[()]
else:
return value.item()
else:
return value
@property
def jd1(self):
"""
First of the two doubles that internally store time value(s) in JD.
"""
jd1 = self._time.mask_if_needed(self._time.jd1)
return self._shaped_like_input(jd1)
@property
def jd2(self):
"""
Second of the two doubles that internally store time value(s) in JD.
"""
jd2 = self._time.mask_if_needed(self._time.jd2)
return self._shaped_like_input(jd2)
def to_value(self, format, subfmt='*'):
"""Get time values expressed in specified output format.
This method allows representing the ``Time`` object in the desired
output ``format`` and optional sub-format ``subfmt``. Available
built-in formats include ``jd``, ``mjd``, ``iso``, and so forth. Each
format can have its own sub-formats
For built-in numerical formats like ``jd`` or ``unix``, ``subfmt`` can
be one of 'float', 'long', 'decimal', 'str', or 'bytes'. Here, 'long'
uses ``numpy.longdouble`` for somewhat enhanced precision (with
the enhancement depending on platform), and 'decimal'
:class:`decimal.Decimal` for full precision. For 'str' and 'bytes', the
number of digits is also chosen such that time values are represented
accurately.
For built-in date-like string formats, one of 'date_hms', 'date_hm', or
'date' (or 'longdate_hms', etc., for 5-digit years in
`~astropy.time.TimeFITS`). For sub-formats including seconds, the
number of digits used for the fractional seconds is as set by
`~astropy.time.Time.precision`.
Parameters
----------
format : str
The format in which one wants the time values. Default: the current
format.
subfmt : str or `None`, optional
Value or wildcard pattern to select the sub-format in which the
values should be given. The default of '*' picks the first
available for a given format, i.e., 'float' or 'date_hms'.
If `None`, use the instance's ``out_subfmt``.
"""
# TODO: add a precision argument (but ensure it is keyword argument
# only, to make life easier for TimeDelta.to_value()).
if format not in self.FORMATS:
raise ValueError(f'format must be one of {list(self.FORMATS)}')
cache = self.cache['format']
# Try to keep cache behaviour like it was in astropy < 4.0.
key = format if subfmt is None else (format, subfmt)
if key not in cache:
if format == self.format:
tm = self
else:
tm = self.replicate(format=format)
# Some TimeFormat subclasses may not be able to handle being passes
# on a out_subfmt. This includes some core classes like
# TimeBesselianEpochString that do not have any allowed subfmts. But
# those do deal with `self.out_subfmt` internally, so if subfmt is
# the same, we do not pass it on.
kwargs = {}
if subfmt is not None and subfmt != tm.out_subfmt:
kwargs['out_subfmt'] = subfmt
try:
value = tm._time.to_value(parent=tm, **kwargs)
except TypeError as exc:
# Try validating subfmt, e.g. for formats like 'jyear_str' that
# do not implement out_subfmt in to_value() (because there are
# no allowed subformats). If subfmt is not valid this gives the
# same exception as would have occurred if the call to
# `to_value()` had succeeded.
tm._time._select_subfmts(subfmt)
# Subfmt was valid, so fall back to the original exception to see
# if it was lack of support for out_subfmt as a call arg.
if "unexpected keyword argument 'out_subfmt'" in str(exc):
raise ValueError(
f"to_value() method for format {format!r} does not "
f"support passing a 'subfmt' argument") from None
else:
# Some unforeseen exception so raise.
raise
value = tm._shaped_like_input(value)
cache[key] = value
return cache[key]
@property
def value(self):
"""Time value(s) in current format"""
return self.to_value(self.format, None)
@property
def masked(self):
return self._time.masked
@property
def mask(self):
return self._time.mask
def insert(self, obj, values, axis=0):
"""
Insert values before the given indices in the column and return
a new `~astropy.time.Time` or `~astropy.time.TimeDelta` object.
The values to be inserted must conform to the rules for in-place setting
of ``Time`` objects (see ``Get and set values`` in the ``Time``
documentation).
The API signature matches the ``np.insert`` API, but is more limited.
The specification of insert index ``obj`` must be a single integer,
and the ``axis`` must be ``0`` for simple row insertion before the
index.
Parameters
----------
obj : int
Integer index before which ``values`` is inserted.
values : array_like
Value(s) to insert. If the type of ``values`` is different
from that of quantity, ``values`` is converted to the matching type.
axis : int, optional
Axis along which to insert ``values``. Default is 0, which is the
only allowed value and will insert a row.
Returns
-------
out : `~astropy.time.Time` subclass
New time object with inserted value(s)
"""
# Validate inputs: obj arg is integer, axis=0, self is not a scalar, and
# input index is in bounds.
try:
idx0 = operator.index(obj)
except TypeError:
raise TypeError('obj arg must be an integer')
if axis != 0:
raise ValueError('axis must be 0')
if not self.shape:
raise TypeError('cannot insert into scalar {} object'
.format(self.__class__.__name__))
if abs(idx0) > len(self):
raise IndexError('index {} is out of bounds for axis 0 with size {}'
.format(idx0, len(self)))
# Turn negative index into positive
if idx0 < 0:
idx0 = len(self) + idx0
# For non-Time object, use numpy to help figure out the length. (Note annoying
# case of a string input that has a length which is not the length we want).
if not isinstance(values, self.__class__):
values = np.asarray(values)
n_values = len(values) if values.shape else 1
# Finally make the new object with the correct length and set values for the
# three sections, before insert, the insert, and after the insert.
out = self.__class__.info.new_like([self], len(self) + n_values, name=self.info.name)
out._time.jd1[:idx0] = self._time.jd1[:idx0]
out._time.jd2[:idx0] = self._time.jd2[:idx0]
# This uses the Time setting machinery to coerce and validate as necessary.
out[idx0:idx0 + n_values] = values
out._time.jd1[idx0 + n_values:] = self._time.jd1[idx0:]
out._time.jd2[idx0 + n_values:] = self._time.jd2[idx0:]
return out
def __setitem__(self, item, value):
if not self.writeable:
if self.shape:
raise ValueError('{} object is read-only. Make a '
'copy() or set "writeable" attribute to True.'
.format(self.__class__.__name__))
else:
raise ValueError('scalar {} object is read-only.'
.format(self.__class__.__name__))
# Any use of setitem results in immediate cache invalidation
del self.cache
# Setting invalidates transform deltas
for attr in ('_delta_tdb_tt', '_delta_ut1_utc'):
if hasattr(self, attr):
delattr(self, attr)
if value is np.ma.masked or value is np.nan:
self._time.jd2[item] = np.nan
return
value = self._make_value_equivalent(item, value)
# Finally directly set the jd1/2 values. Locations are known to match.
if self.scale is not None:
value = getattr(value, self.scale)
self._time.jd1[item] = value._time.jd1
self._time.jd2[item] = value._time.jd2
def isclose(self, other, atol=None):
"""Returns a boolean or boolean array where two Time objects are
element-wise equal within a time tolerance.
This evaluates the expression below::
abs(self - other) <= atol
Parameters
----------
other : `~astropy.time.Time`
Time object for comparison.
atol : `~astropy.units.Quantity` or `~astropy.time.TimeDelta`
Absoute tolerance for equality with units of time (e.g. ``u.s`` or
``u.day``). Default is two bits in the 128-bit JD time representation,
equivalent to about 40 picosecs.
"""
if atol is None:
# Note: use 2 bits instead of 1 bit based on experience in precision
# tests, since taking the difference with a UTC time means one has
# to do a scale change.
atol = 2 * np.finfo(float).eps * u.day
if not isinstance(atol, (u.Quantity, TimeDelta)):
raise TypeError("'atol' argument must be a Quantity or TimeDelta instance, got "
f'{atol.__class__.__name__} instead')
try:
# Separate these out so user sees where the problem is
dt = self - other
dt = abs(dt)
out = dt <= atol
except Exception as err:
raise TypeError("'other' argument must support subtraction with Time "
f"and return a value that supports comparison with "
f"{atol.__class__.__name__}: {err}")
return out
def copy(self, format=None):
"""
Return a fully independent copy the Time object, optionally changing
the format.
If ``format`` is supplied then the time format of the returned Time
object will be set accordingly, otherwise it will be unchanged from the
original.
In this method a full copy of the internal time arrays will be made.
The internal time arrays are normally not changeable by the user so in
most cases the ``replicate()`` method should be used.
Parameters
----------
format : str, optional
Time format of the copy.
Returns
-------
tm : Time object
Copy of this object
"""
return self._apply('copy', format=format)
def replicate(self, format=None, copy=False, cls=None):
"""
Return a replica of the Time object, optionally changing the format.
If ``format`` is supplied then the time format of the returned Time
object will be set accordingly, otherwise it will be unchanged from the
original.
If ``copy`` is set to `True` then a full copy of the internal time arrays
will be made. By default the replica will use a reference to the
original arrays when possible to save memory. The internal time arrays
are normally not changeable by the user so in most cases it should not
be necessary to set ``copy`` to `True`.
The convenience method copy() is available in which ``copy`` is `True`
by default.
Parameters
----------
format : str, optional
Time format of the replica.
copy : bool, optional
Return a true copy instead of using references where possible.
Returns
-------
tm : Time object
Replica of this object
"""
return self._apply('copy' if copy else 'replicate', format=format, cls=cls)
def _apply(self, method, *args, format=None, cls=None, **kwargs):
"""Create a new time object, possibly applying a method to the arrays.
Parameters
----------
method : str or callable
If string, can be 'replicate' or the name of a relevant
`~numpy.ndarray` method. In the former case, a new time instance
with unchanged internal data is created, while in the latter the
method is applied to the internal ``jd1`` and ``jd2`` arrays, as
well as to possible ``location``, ``_delta_ut1_utc``, and
``_delta_tdb_tt`` arrays.
If a callable, it is directly applied to the above arrays.
Examples: 'copy', '__getitem__', 'reshape', `~numpy.broadcast_to`.
args : tuple
Any positional arguments for ``method``.
kwargs : dict
Any keyword arguments for ``method``. If the ``format`` keyword
argument is present, this will be used as the Time format of the
replica.
Examples
--------
Some ways this is used internally::
copy : ``_apply('copy')``
replicate : ``_apply('replicate')``
reshape : ``_apply('reshape', new_shape)``
index or slice : ``_apply('__getitem__', item)``
broadcast : ``_apply(np.broadcast, shape=new_shape)``
"""
new_format = self.format if format is None else format
if callable(method):
apply_method = lambda array: method(array, *args, **kwargs)
else:
if method == 'replicate':
apply_method = None
else:
apply_method = operator.methodcaller(method, *args, **kwargs)
jd1, jd2 = self._time.jd1, self._time.jd2
if apply_method:
jd1 = apply_method(jd1)
jd2 = apply_method(jd2)
# Get a new instance of our class and set its attributes directly.
tm = super().__new__(cls or self.__class__)
tm._time = TimeJD(jd1, jd2, self.scale, precision=0,
in_subfmt='*', out_subfmt='*', from_jd=True)
# Optional ndarray attributes.
for attr in ('_delta_ut1_utc', '_delta_tdb_tt', 'location'):
try:
val = getattr(self, attr)
except AttributeError:
continue
if apply_method:
# Apply the method to any value arrays (though skip if there is
# only an array scalar and the method would return a view,
# since in that case nothing would change).
if getattr(val, 'shape', ()):
val = apply_method(val)
elif method == 'copy' or method == 'flatten':
# flatten should copy also for a single element array, but
# we cannot use it directly for array scalars, since it
# always returns a one-dimensional array. So, just copy.
val = copy.copy(val)
setattr(tm, attr, val)
# Copy other 'info' attr only if it has actually been defined and the
# time object is not a scalar (issue #10688).
# See PR #3898 for further explanation and justification, along
# with Quantity.__array_finalize__
if 'info' in self.__dict__:
tm.info = self.info
# Make the new internal _time object corresponding to the format
# in the copy. If the format is unchanged this process is lightweight
# and does not create any new arrays.
if new_format not in tm.FORMATS:
raise ValueError(f'format must be one of {list(tm.FORMATS)}')
NewFormat = tm.FORMATS[new_format]
tm._time = NewFormat(
tm._time.jd1, tm._time.jd2,
tm._time._scale,
precision=self.precision,
in_subfmt=NewFormat._get_allowed_subfmt(self.in_subfmt),
out_subfmt=NewFormat._get_allowed_subfmt(self.out_subfmt),
from_jd=True)
tm._format = new_format
tm.SCALES = self.SCALES
return tm
def __copy__(self):
"""
Overrides the default behavior of the `copy.copy` function in
the python stdlib to behave like `Time.copy`. Does *not* make a
copy of the JD arrays - only copies by reference.
"""
return self.replicate()
def __deepcopy__(self, memo):
"""
Overrides the default behavior of the `copy.deepcopy` function
in the python stdlib to behave like `Time.copy`. Does make a
copy of the JD arrays.
"""
return self.copy()
def _advanced_index(self, indices, axis=None, keepdims=False):
"""Turn argmin, argmax output into an advanced index.
Argmin, argmax output contains indices along a given axis in an array
shaped like the other dimensions. To use this to get values at the
correct location, a list is constructed in which the other axes are
indexed sequentially. For ``keepdims`` is ``True``, the net result is
the same as constructing an index grid with ``np.ogrid`` and then
replacing the ``axis`` item with ``indices`` with its shaped expanded
at ``axis``. For ``keepdims`` is ``False``, the result is the same but
with the ``axis`` dimension removed from all list entries.
For ``axis`` is ``None``, this calls :func:`~numpy.unravel_index`.
Parameters
----------
indices : array
Output of argmin or argmax.
axis : int or None
axis along which argmin or argmax was used.
keepdims : bool
Whether to construct indices that keep or remove the axis along
which argmin or argmax was used. Default: ``False``.
Returns
-------
advanced_index : list of arrays
Suitable for use as an advanced index.
"""
if axis is None:
return np.unravel_index(indices, self.shape)
ndim = self.ndim
if axis < 0:
axis = axis + ndim
if keepdims and indices.ndim < self.ndim:
indices = np.expand_dims(indices, axis)
index = [indices
if i == axis
else np.arange(s).reshape(
(1,) * (i if keepdims or i < axis else i - 1)
+ (s,)
+ (1,) * (ndim - i - (1 if keepdims or i > axis else 2))
)
for i, s in enumerate(self.shape)]
return tuple(index)
def argmin(self, axis=None, out=None):
"""Return indices of the minimum values along the given axis.
This is similar to :meth:`~numpy.ndarray.argmin`, but adapted to ensure
that the full precision given by the two doubles ``jd1`` and ``jd2``
is used. See :func:`~numpy.argmin` for detailed documentation.
"""
# first get the minimum at normal precision.
jd = self.jd1 + self.jd2
approx = np.min(jd, axis, keepdims=True)
# Approx is very close to the true minimum, and by subtracting it at
# full precision, all numbers near 0 can be represented correctly,
# so we can be sure we get the true minimum.
# The below is effectively what would be done for
# dt = (self - self.__class__(approx, format='jd')).jd
# which translates to:
# approx_jd1, approx_jd2 = day_frac(approx, 0.)
# dt = (self.jd1 - approx_jd1) + (self.jd2 - approx_jd2)
dt = (self.jd1 - approx) + self.jd2
return dt.argmin(axis, out)
def argmax(self, axis=None, out=None):
"""Return indices of the maximum values along the given axis.
This is similar to :meth:`~numpy.ndarray.argmax`, but adapted to ensure
that the full precision given by the two doubles ``jd1`` and ``jd2``
is used. See :func:`~numpy.argmax` for detailed documentation.
"""
# For procedure, see comment on argmin.
jd = self.jd1 + self.jd2
approx = np.max(jd, axis, keepdims=True)
dt = (self.jd1 - approx) + self.jd2
return dt.argmax(axis, out)
def argsort(self, axis=-1):
"""Returns the indices that would sort the time array.
This is similar to :meth:`~numpy.ndarray.argsort`, but adapted to ensure
that the full precision given by the two doubles ``jd1`` and ``jd2``
is used, and that corresponding attributes are copied. Internally,
it uses :func:`~numpy.lexsort`, and hence no sort method can be chosen.
"""
jd_approx = self.jd
jd_remainder = (self - self.__class__(jd_approx, format='jd', scale=self.scale)).jd
if axis is None:
return np.lexsort((jd_remainder.ravel(), jd_approx.ravel()))
else:
return np.lexsort(keys=(jd_remainder, jd_approx), axis=axis)
def min(self, axis=None, out=None, keepdims=False):
"""Minimum along a given axis.
This is similar to :meth:`~numpy.ndarray.min`, but adapted to ensure
that the full precision given by the two doubles ``jd1`` and ``jd2``
is used, and that corresponding attributes are copied.
Note that the ``out`` argument is present only for compatibility with
``np.min``; since `Time` instances are immutable, it is not possible
to have an actual ``out`` to store the result in.
"""
if out is not None:
raise ValueError("Since `Time` instances are immutable, ``out`` "
"cannot be set to anything but ``None``.")
return self[self._advanced_index(self.argmin(axis), axis, keepdims)]
def max(self, axis=None, out=None, keepdims=False):
"""Maximum along a given axis.
This is similar to :meth:`~numpy.ndarray.max`, but adapted to ensure
that the full precision given by the two doubles ``jd1`` and ``jd2``
is used, and that corresponding attributes are copied.
Note that the ``out`` argument is present only for compatibility with
``np.max``; since `Time` instances are immutable, it is not possible
to have an actual ``out`` to store the result in.
"""
if out is not None:
raise ValueError("Since `Time` instances are immutable, ``out`` "
"cannot be set to anything but ``None``.")
return self[self._advanced_index(self.argmax(axis), axis, keepdims)]
def ptp(self, axis=None, out=None, keepdims=False):
"""Peak to peak (maximum - minimum) along a given axis.
This is similar to :meth:`~numpy.ndarray.ptp`, but adapted to ensure
that the full precision given by the two doubles ``jd1`` and ``jd2``
is used.
Note that the ``out`` argument is present only for compatibility with
`~numpy.ptp`; since `Time` instances are immutable, it is not possible
to have an actual ``out`` to store the result in.
"""
if out is not None:
raise ValueError("Since `Time` instances are immutable, ``out`` "
"cannot be set to anything but ``None``.")
return (self.max(axis, keepdims=keepdims)
- self.min(axis, keepdims=keepdims))
def sort(self, axis=-1):
"""Return a copy sorted along the specified axis.
This is similar to :meth:`~numpy.ndarray.sort`, but internally uses
indexing with :func:`~numpy.lexsort` to ensure that the full precision
given by the two doubles ``jd1`` and ``jd2`` is kept, and that
corresponding attributes are properly sorted and copied as well.
Parameters
----------
axis : int or None
Axis to be sorted. If ``None``, the flattened array is sorted.
By default, sort over the last axis.
"""
return self[self._advanced_index(self.argsort(axis), axis,
keepdims=True)]
@property
def cache(self):
"""
Return the cache associated with this instance.
"""
return self._time.cache
@cache.deleter
def cache(self):
del self._time.cache
def __getattr__(self, attr):
"""
Get dynamic attributes to output format or do timescale conversion.
"""
if attr in self.SCALES and self.scale is not None:
cache = self.cache['scale']
if attr not in cache:
if attr == self.scale:
tm = self
else:
tm = self.replicate()
tm._set_scale(attr)
if tm.shape:
# Prevent future modification of cached array-like object
tm.writeable = False
cache[attr] = tm
return cache[attr]
elif attr in self.FORMATS:
return self.to_value(attr, subfmt=None)
elif attr in TIME_SCALES: # allowed ones done above (self.SCALES)
if self.scale is None:
raise ScaleValueError("Cannot convert TimeDelta with "
"undefined scale to any defined scale.")
else:
raise ScaleValueError("Cannot convert {} with scale "
"'{}' to scale '{}'"
.format(self.__class__.__name__,
self.scale, attr))
else:
# Should raise AttributeError
return self.__getattribute__(attr)
@override__dir__
def __dir__(self):
result = set(self.SCALES)
result.update(self.FORMATS)
return result
def _match_shape(self, val):
"""
Ensure that `val` is matched to length of self. If val has length 1
then broadcast, otherwise cast to double and make sure shape matches.
"""
val = _make_array(val, copy=True) # be conservative and copy
if val.size > 1 and val.shape != self.shape:
try:
# check the value can be broadcast to the shape of self.
val = np.broadcast_to(val, self.shape, subok=True)
except Exception:
raise ValueError('Attribute shape must match or be '
'broadcastable to that of Time object. '
'Typically, give either a single value or '
'one for each time.')
return val
def _time_comparison(self, other, op):
"""If other is of same class as self, compare difference in self.scale.
Otherwise, return NotImplemented
"""
if other.__class__ is not self.__class__:
try:
other = self.__class__(other, scale=self.scale)
except Exception:
# Let other have a go.
return NotImplemented
if(self.scale is not None and self.scale not in other.SCALES
or other.scale is not None and other.scale not in self.SCALES):
# Other will also not be able to do it, so raise a TypeError
# immediately, allowing us to explain why it doesn't work.
raise TypeError("Cannot compare {} instances with scales "
"'{}' and '{}'".format(self.__class__.__name__,
self.scale, other.scale))
if self.scale is not None and other.scale is not None:
other = getattr(other, self.scale)
return op((self.jd1 - other.jd1) + (self.jd2 - other.jd2), 0.)
def __lt__(self, other):
return self._time_comparison(other, operator.lt)
def __le__(self, other):
return self._time_comparison(other, operator.le)
def __eq__(self, other):
"""
If other is an incompatible object for comparison, return `False`.
Otherwise, return `True` if the time difference between self and
other is zero.
"""
return self._time_comparison(other, operator.eq)
def __ne__(self, other):
"""
If other is an incompatible object for comparison, return `True`.
Otherwise, return `False` if the time difference between self and
other is zero.
"""
return self._time_comparison(other, operator.ne)
def __gt__(self, other):
return self._time_comparison(other, operator.gt)
def __ge__(self, other):
return self._time_comparison(other, operator.ge)
class Time(TimeBase):
"""
Represent and manipulate times and dates for astronomy.
A `Time` object is initialized with one or more times in the ``val``
argument. The input times in ``val`` must conform to the specified
``format`` and must correspond to the specified time ``scale``. The
optional ``val2`` time input should be supplied only for numeric input
formats (e.g. JD) where very high precision (better than 64-bit precision)
is required.
The allowed values for ``format`` can be listed with::
>>> list(Time.FORMATS)
['jd', 'mjd', 'decimalyear', 'unix', 'unix_tai', 'cxcsec', 'gps', 'plot_date',
'stardate', 'datetime', 'ymdhms', 'iso', 'isot', 'yday', 'datetime64',
'fits', 'byear', 'jyear', 'byear_str', 'jyear_str']
See also: http://docs.astropy.org/en/stable/time/
Parameters
----------
val : sequence, ndarray, number, str, bytes, or `~astropy.time.Time` object
Value(s) to initialize the time or times. Bytes are decoded as ascii.
val2 : sequence, ndarray, or number; optional
Value(s) to initialize the time or times. Only used for numerical
input, to help preserve precision.
format : str, optional
Format of input value(s)
scale : str, optional
Time scale of input value(s), must be one of the following:
('tai', 'tcb', 'tcg', 'tdb', 'tt', 'ut1', 'utc')
precision : int, optional
Digits of precision in string representation of time
in_subfmt : str, optional
Unix glob to select subformats for parsing input times
out_subfmt : str, optional
Unix glob to select subformat for outputting times
location : `~astropy.coordinates.EarthLocation` or tuple, optional
If given as an tuple, it should be able to initialize an
an EarthLocation instance, i.e., either contain 3 items with units of
length for geocentric coordinates, or contain a longitude, latitude,
and an optional height for geodetic coordinates.
Can be a single location, or one for each input time.
If not given, assumed to be the center of the Earth for time scale
transformations to and from the solar-system barycenter.
copy : bool, optional
Make a copy of the input values
"""
SCALES = TIME_SCALES
"""List of time scales"""
FORMATS = TIME_FORMATS
"""Dict of time formats"""
def __new__(cls, val, val2=None, format=None, scale=None,
precision=None, in_subfmt=None, out_subfmt=None,
location=None, copy=False):
# Because of import problems, this can only be done on
# first call of Time. The initialization is complicated because
# update_leap_seconds uses Time.
# In principle, this may cause wrong leap seconds in
# update_leap_seconds itself, but since expiration is in
# units of days, that is fine.
global _LEAP_SECONDS_CHECK
if _LEAP_SECONDS_CHECK != _LeapSecondsCheck.DONE:
with _LEAP_SECONDS_LOCK:
# There are three ways we can get here:
# 1. First call (NOT_STARTED).
# 2. Re-entrant call (RUNNING). We skip the initialisation
# and don't worry about leap second errors.
# 3. Another thread which raced with the first call
# (RUNNING). The first thread has relinquished the
# lock to us, so initialization is complete.
if _LEAP_SECONDS_CHECK == _LeapSecondsCheck.NOT_STARTED:
_LEAP_SECONDS_CHECK = _LeapSecondsCheck.RUNNING
update_leap_seconds()
_LEAP_SECONDS_CHECK = _LeapSecondsCheck.DONE
if isinstance(val, Time):
self = val.replicate(format=format, copy=copy, cls=cls)
else:
self = super().__new__(cls)
return self
def __init__(self, val, val2=None, format=None, scale=None,
precision=None, in_subfmt=None, out_subfmt=None,
location=None, copy=False):
if location is not None:
from astropy.coordinates import EarthLocation
if isinstance(location, EarthLocation):
self.location = location
else:
self.location = EarthLocation(*location)
if self.location.size == 1:
self.location = self.location.squeeze()
else:
if not hasattr(self, 'location'):
self.location = None
if isinstance(val, Time):
# Update _time formatting parameters if explicitly specified
if precision is not None:
self._time.precision = precision
if in_subfmt is not None:
self._time.in_subfmt = in_subfmt
if out_subfmt is not None:
self._time.out_subfmt = out_subfmt
self.SCALES = TIME_TYPES[self.scale]
if scale is not None:
self._set_scale(scale)
else:
self._init_from_vals(val, val2, format, scale, copy,
precision, in_subfmt, out_subfmt)
self.SCALES = TIME_TYPES[self.scale]
if self.location is not None and (self.location.size > 1
and self.location.shape != self.shape):
try:
# check the location can be broadcast to self's shape.
self.location = np.broadcast_to(self.location, self.shape,
subok=True)
except Exception as err:
raise ValueError('The location with shape {} cannot be '
'broadcast against time with shape {}. '
'Typically, either give a single location or '
'one for each time.'
.format(self.location.shape, self.shape)) from err
def _make_value_equivalent(self, item, value):
"""Coerce setitem value into an equivalent Time object"""
# If there is a vector location then broadcast to the Time shape
# and then select with ``item``
if self.location is not None and self.location.shape:
self_location = np.broadcast_to(self.location, self.shape, subok=True)[item]
else:
self_location = self.location
if isinstance(value, Time):
# Make sure locations are compatible. Location can be either None or
# a Location object.
if self_location is None and value.location is None:
match = True
elif ((self_location is None and value.location is not None)
or (self_location is not None and value.location is None)):
match = False
else:
match = np.all(self_location == value.location)
if not match:
raise ValueError('cannot set to Time with different location: '
'expected location={} and '
'got location={}'
.format(self_location, value.location))
else:
try:
value = self.__class__(value, scale=self.scale, location=self_location)
except Exception:
try:
value = self.__class__(value, scale=self.scale, format=self.format,
location=self_location)
except Exception as err:
raise ValueError('cannot convert value to a compatible Time object: {}'
.format(err))
return value
@classmethod
def now(cls):
"""
Creates a new object corresponding to the instant in time this
method is called.
.. note::
"Now" is determined using the `~datetime.datetime.utcnow`
function, so its accuracy and precision is determined by that
function. Generally that means it is set by the accuracy of
your system clock.
Returns
-------
nowtime
A new `Time` object (or a subclass of `Time` if this is called from
such a subclass) at the current time.
"""
# call `utcnow` immediately to be sure it's ASAP
dtnow = datetime.utcnow()
return cls(val=dtnow, format='datetime', scale='utc')
info = TimeInfo()
@classmethod
def strptime(cls, time_string, format_string, **kwargs):
"""
Parse a string to a Time according to a format specification.
See `time.strptime` documentation for format specification.
>>> Time.strptime('2012-Jun-30 23:59:60', '%Y-%b-%d %H:%M:%S')
<Time object: scale='utc' format='isot' value=2012-06-30T23:59:60.000>
Parameters
----------
time_string : str, sequence, or ndarray
Objects containing time data of type string
format_string : str
String specifying format of time_string.
kwargs : dict
Any keyword arguments for ``Time``. If the ``format`` keyword
argument is present, this will be used as the Time format.
Returns
-------
time_obj : `~astropy.time.Time`
A new `~astropy.time.Time` object corresponding to the input
``time_string``.
"""
time_array = np.asarray(time_string)
if time_array.dtype.kind not in ('U', 'S'):
err = "Expected type is string, a bytes-like object or a sequence"\
" of these. Got dtype '{}'".format(time_array.dtype.kind)
raise TypeError(err)
to_string = (str if time_array.dtype.kind == 'U' else
lambda x: str(x.item(), encoding='ascii'))
iterator = np.nditer([time_array, None],
op_dtypes=[time_array.dtype, 'U30'])
for time, formatted in iterator:
tt, fraction = _strptime._strptime(to_string(time), format_string)
time_tuple = tt[:6] + (fraction,)
formatted[...] = '{:04}-{:02}-{:02}T{:02}:{:02}:{:02}.{:06}'\
.format(*time_tuple)
format = kwargs.pop('format', None)
out = cls(*iterator.operands[1:], format='isot', **kwargs)
if format is not None:
out.format = format
return out
def strftime(self, format_spec):
"""
Convert Time to a string or a numpy.array of strings according to a
format specification.
See `time.strftime` documentation for format specification.
Parameters
----------
format_spec : str
Format definition of return string.
Returns
-------
formatted : str or numpy.array
String or numpy.array of strings formatted according to the given
format string.
"""
formatted_strings = []
for sk in self.replicate('iso')._time.str_kwargs():
date_tuple = date(sk['year'], sk['mon'], sk['day']).timetuple()
datetime_tuple = (sk['year'], sk['mon'], sk['day'],
sk['hour'], sk['min'], sk['sec'],
date_tuple[6], date_tuple[7], -1)
fmtd_str = format_spec
if '%f' in fmtd_str:
fmtd_str = fmtd_str.replace('%f', '{frac:0{precision}}'.format(
frac=sk['fracsec'], precision=self.precision))
fmtd_str = strftime(fmtd_str, datetime_tuple)
formatted_strings.append(fmtd_str)
if self.isscalar:
return formatted_strings[0]
else:
return np.array(formatted_strings).reshape(self.shape)
def light_travel_time(self, skycoord, kind='barycentric', location=None, ephemeris=None):
"""Light travel time correction to the barycentre or heliocentre.
The frame transformations used to calculate the location of the solar
system barycentre and the heliocentre rely on the erfa routine epv00,
which is consistent with the JPL DE405 ephemeris to an accuracy of
11.2 km, corresponding to a light travel time of 4 microseconds.
The routine assumes the source(s) are at large distance, i.e., neglects
finite-distance effects.
Parameters
----------
skycoord : `~astropy.coordinates.SkyCoord`
The sky location to calculate the correction for.
kind : str, optional
``'barycentric'`` (default) or ``'heliocentric'``
location : `~astropy.coordinates.EarthLocation`, optional
The location of the observatory to calculate the correction for.
If no location is given, the ``location`` attribute of the Time
object is used
ephemeris : str, optional
Solar system ephemeris to use (e.g., 'builtin', 'jpl'). By default,
use the one set with ``astropy.coordinates.solar_system_ephemeris.set``.
For more information, see `~astropy.coordinates.solar_system_ephemeris`.
Returns
-------
time_offset : `~astropy.time.TimeDelta`
The time offset between the barycentre or Heliocentre and Earth,
in TDB seconds. Should be added to the original time to get the
time in the Solar system barycentre or the Heliocentre.
Also, the time conversion to BJD will then include the relativistic correction as well.
"""
if kind.lower() not in ('barycentric', 'heliocentric'):
raise ValueError("'kind' parameter must be one of 'heliocentric' "
"or 'barycentric'")
if location is None:
if self.location is None:
raise ValueError('An EarthLocation needs to be set or passed '
'in to calculate bary- or heliocentric '
'corrections')
location = self.location
from astropy.coordinates import (UnitSphericalRepresentation, CartesianRepresentation,
HCRS, ICRS, GCRS, solar_system_ephemeris)
# ensure sky location is ICRS compatible
if not skycoord.is_transformable_to(ICRS()):
raise ValueError("Given skycoord is not transformable to the ICRS")
# get location of observatory in ITRS coordinates at this Time
try:
itrs = location.get_itrs(obstime=self)
except Exception:
raise ValueError("Supplied location does not have a valid `get_itrs` method")
with solar_system_ephemeris.set(ephemeris):
if kind.lower() == 'heliocentric':
# convert to heliocentric coordinates, aligned with ICRS
cpos = itrs.transform_to(HCRS(obstime=self)).cartesian.xyz
else:
# first we need to convert to GCRS coordinates with the correct
# obstime, since ICRS coordinates have no frame time
gcrs_coo = itrs.transform_to(GCRS(obstime=self))
# convert to barycentric (BCRS) coordinates, aligned with ICRS
cpos = gcrs_coo.transform_to(ICRS()).cartesian.xyz
# get unit ICRS vector to star
spos = (skycoord.icrs.represent_as(UnitSphericalRepresentation).
represent_as(CartesianRepresentation).xyz)
# Move X,Y,Z to last dimension, to enable possible broadcasting below.
cpos = np.rollaxis(cpos, 0, cpos.ndim)
spos = np.rollaxis(spos, 0, spos.ndim)
# calculate light travel time correction
tcor_val = (spos * cpos).sum(axis=-1) / const.c
return TimeDelta(tcor_val, scale='tdb')
def sidereal_time(self, kind, longitude=None, model=None):
"""Calculate sidereal time.
Parameters
---------------
kind : str
``'mean'`` or ``'apparent'``, i.e., accounting for precession
only, or also for nutation.
longitude : `~astropy.units.Quantity`, `str`, or `None`; optional
The longitude on the Earth at which to compute the sidereal time.
Can be given as a `~astropy.units.Quantity` with angular units
(or an `~astropy.coordinates.Angle` or
`~astropy.coordinates.Longitude`), or as a name of an
observatory (currently, only ``'greenwich'`` is supported,
equivalent to 0 deg). If `None` (default), the ``lon`` attribute of
the Time object is used.
model : str or `None`; optional
Precession (and nutation) model to use. The available ones are:
- {0}: {1}
- {2}: {3}
If `None` (default), the last (most recent) one from the appropriate
list above is used.
Returns
-------
sidereal time : `~astropy.coordinates.Longitude`
Sidereal time as a quantity with units of hourangle
""" # docstring is formatted below
from astropy.coordinates import Longitude
if kind.lower() not in SIDEREAL_TIME_MODELS.keys():
raise ValueError('The kind of sidereal time has to be {}'.format(
' or '.join(sorted(SIDEREAL_TIME_MODELS.keys()))))
available_models = SIDEREAL_TIME_MODELS[kind.lower()]
if model is None:
model = sorted(available_models.keys())[-1]
else:
if model.upper() not in available_models:
raise ValueError(
'Model {} not implemented for {} sidereal time; '
'available models are {}'
.format(model, kind, sorted(available_models.keys())))
if longitude is None:
if self.location is None:
raise ValueError('No longitude is given but the location for '
'the Time object is not set.')
longitude = self.location.lon
elif longitude == 'greenwich':
longitude = Longitude(0., u.degree,
wrap_angle=180. * u.degree)
else:
# sanity check on input
longitude = Longitude(longitude, u.degree,
wrap_angle=180. * u.degree)
gst = self._erfa_sidereal_time(available_models[model.upper()])
return Longitude(gst + longitude, u.hourangle)
if isinstance(sidereal_time.__doc__, str):
sidereal_time.__doc__ = sidereal_time.__doc__.format(
'apparent', sorted(SIDEREAL_TIME_MODELS['apparent'].keys()),
'mean', sorted(SIDEREAL_TIME_MODELS['mean'].keys()))
def _erfa_sidereal_time(self, model):
"""Calculate a sidereal time using a IAU precession/nutation model."""
from astropy.coordinates import Longitude
erfa_function = model['function']
erfa_parameters = [getattr(getattr(self, scale)._time, jd_part)
for scale in model['scales']
for jd_part in ('jd1', 'jd2_filled')]
sidereal_time = erfa_function(*erfa_parameters)
if self.masked:
sidereal_time[self.mask] = np.nan
return Longitude(sidereal_time, u.radian).to(u.hourangle)
def get_delta_ut1_utc(self, iers_table=None, return_status=False):
"""Find UT1 - UTC differences by interpolating in IERS Table.
Parameters
----------
iers_table : `~astropy.utils.iers.IERS` table, optional
Table containing UT1-UTC differences from IERS Bulletins A
and/or B. Default: `~astropy.utils.iers.earth_orientation_table`
(which in turn defaults to the combined version provided by
`~astropy.utils.iers.IERS_Auto`).
return_status : bool
Whether to return status values. If `False` (default), iers
raises `IndexError` if any time is out of the range
covered by the IERS table.
Returns
-------
ut1_utc : float or float array
UT1-UTC, interpolated in IERS Table
status : int or int array
Status values (if ``return_status=`True```)::
``astropy.utils.iers.FROM_IERS_B``
``astropy.utils.iers.FROM_IERS_A``
``astropy.utils.iers.FROM_IERS_A_PREDICTION``
``astropy.utils.iers.TIME_BEFORE_IERS_RANGE``
``astropy.utils.iers.TIME_BEYOND_IERS_RANGE``
Notes
-----
In normal usage, UT1-UTC differences are calculated automatically
on the first instance ut1 is needed.
Examples
--------
To check in code whether any times are before the IERS table range::
>>> from astropy.utils.iers import TIME_BEFORE_IERS_RANGE
>>> t = Time(['1961-01-01', '2000-01-01'], scale='utc')
>>> delta, status = t.get_delta_ut1_utc(return_status=True) # doctest: +REMOTE_DATA
>>> status == TIME_BEFORE_IERS_RANGE # doctest: +REMOTE_DATA
array([ True, False]...)
"""
if iers_table is None:
from astropy.utils.iers import earth_orientation_table
iers_table = earth_orientation_table.get()
return iers_table.ut1_utc(self.utc, return_status=return_status)
# Property for ERFA DUT arg = UT1 - UTC
def _get_delta_ut1_utc(self, jd1=None, jd2=None):
"""
Get ERFA DUT arg = UT1 - UTC. This getter takes optional jd1 and
jd2 args because it gets called that way when converting time scales.
If delta_ut1_utc is not yet set, this will interpolate them from the
the IERS table.
"""
# Sec. 4.3.1: the arg DUT is the quantity delta_UT1 = UT1 - UTC in
# seconds. It is obtained from tables published by the IERS.
if not hasattr(self, '_delta_ut1_utc'):
from astropy.utils.iers import earth_orientation_table
iers_table = earth_orientation_table.get()
# jd1, jd2 are normally set (see above), except if delta_ut1_utc
# is access directly; ensure we behave as expected for that case
if jd1 is None:
self_utc = self.utc
jd1, jd2 = self_utc._time.jd1, self_utc._time.jd2_filled
scale = 'utc'
else:
scale = self.scale
# interpolate UT1-UTC in IERS table
delta = iers_table.ut1_utc(jd1, jd2)
# if we interpolated using UT1 jds, we may be off by one
# second near leap seconds (and very slightly off elsewhere)
if scale == 'ut1':
# calculate UTC using the offset we got; the ERFA routine
# is tolerant of leap seconds, so will do this right
jd1_utc, jd2_utc = erfa.ut1utc(jd1, jd2, delta.to_value(u.s))
# calculate a better estimate using the nearly correct UTC
delta = iers_table.ut1_utc(jd1_utc, jd2_utc)
self._set_delta_ut1_utc(delta)
return self._delta_ut1_utc
def _set_delta_ut1_utc(self, val):
del self.cache
if hasattr(val, 'to'): # Matches Quantity but also TimeDelta.
val = val.to(u.second).value
val = self._match_shape(val)
self._delta_ut1_utc = val
# Note can't use @property because _get_delta_tdb_tt is explicitly
# called with the optional jd1 and jd2 args.
delta_ut1_utc = property(_get_delta_ut1_utc, _set_delta_ut1_utc)
"""UT1 - UTC time scale offset"""
# Property for ERFA DTR arg = TDB - TT
def _get_delta_tdb_tt(self, jd1=None, jd2=None):
if not hasattr(self, '_delta_tdb_tt'):
# If jd1 and jd2 are not provided (which is the case for property
# attribute access) then require that the time scale is TT or TDB.
# Otherwise the computations here are not correct.
if jd1 is None or jd2 is None:
if self.scale not in ('tt', 'tdb'):
raise ValueError('Accessing the delta_tdb_tt attribute '
'is only possible for TT or TDB time '
'scales')
else:
jd1 = self._time.jd1
jd2 = self._time.jd2_filled
# First go from the current input time (which is either
# TDB or TT) to an approximate UT1. Since TT and TDB are
# pretty close (few msec?), assume TT. Similarly, since the
# UT1 terms are very small, use UTC instead of UT1.
njd1, njd2 = erfa.tttai(jd1, jd2)
njd1, njd2 = erfa.taiutc(njd1, njd2)
# subtract 0.5, so UT is fraction of the day from midnight
ut = day_frac(njd1 - 0.5, njd2)[1]
if self.location is None:
# Assume geocentric.
self._delta_tdb_tt = erfa.dtdb(jd1, jd2, ut, 0., 0., 0.)
else:
location = self.location
# Geodetic params needed for d_tdb_tt()
lon = location.lon
rxy = np.hypot(location.x, location.y)
z = location.z
self._delta_tdb_tt = erfa.dtdb(
jd1, jd2, ut, lon.to_value(u.radian),
rxy.to_value(u.km), z.to_value(u.km))
return self._delta_tdb_tt
def _set_delta_tdb_tt(self, val):
del self.cache
if hasattr(val, 'to'): # Matches Quantity but also TimeDelta.
val = val.to(u.second).value
val = self._match_shape(val)
self._delta_tdb_tt = val
# Note can't use @property because _get_delta_tdb_tt is explicitly
# called with the optional jd1 and jd2 args.
delta_tdb_tt = property(_get_delta_tdb_tt, _set_delta_tdb_tt)
"""TDB - TT time scale offset"""
def __sub__(self, other):
# T - Tdelta = T
# T - T = Tdelta
other_is_delta = not isinstance(other, Time)
if other_is_delta: # T - Tdelta
# Check other is really a TimeDelta or something that can initialize.
if not isinstance(other, TimeDelta):
try:
other = TimeDelta(other)
except Exception:
return NotImplemented
# we need a constant scale to calculate, which is guaranteed for
# TimeDelta, but not for Time (which can be UTC)
out = self.replicate()
if self.scale in other.SCALES:
if other.scale not in (out.scale, None):
other = getattr(other, out.scale)
else:
if other.scale is None:
out._set_scale('tai')
else:
if self.scale not in TIME_TYPES[other.scale]:
raise TypeError("Cannot subtract Time and TimeDelta instances "
"with scales '{}' and '{}'"
.format(self.scale, other.scale))
out._set_scale(other.scale)
# remove attributes that are invalidated by changing time
for attr in ('_delta_ut1_utc', '_delta_tdb_tt'):
if hasattr(out, attr):
delattr(out, attr)
else: # T - T
# the scales should be compatible (e.g., cannot convert TDB to LOCAL)
if other.scale not in self.SCALES:
raise TypeError("Cannot subtract Time instances "
"with scales '{}' and '{}'"
.format(self.scale, other.scale))
self_time = (self._time if self.scale in TIME_DELTA_SCALES
else self.tai._time)
# set up TimeDelta, subtraction to be done shortly
out = TimeDelta(self_time.jd1, self_time.jd2, format='jd',
scale=self_time.scale)
if other.scale != out.scale:
other = getattr(other, out.scale)
jd1 = out._time.jd1 - other._time.jd1
jd2 = out._time.jd2 - other._time.jd2
out._time.jd1, out._time.jd2 = day_frac(jd1, jd2)
if other_is_delta:
# Go back to left-side scale if needed
out._set_scale(self.scale)
return out
def __add__(self, other):
# T + Tdelta = T
# T + T = error
if isinstance(other, Time):
raise OperandTypeError(self, other, '+')
# Check other is really a TimeDelta or something that can initialize.
if not isinstance(other, TimeDelta):
try:
other = TimeDelta(other)
except Exception:
return NotImplemented
# ideally, we calculate in the scale of the Time item, since that is
# what we want the output in, but this may not be possible, since
# TimeDelta cannot be converted arbitrarily
out = self.replicate()
if self.scale in other.SCALES:
if other.scale not in (out.scale, None):
other = getattr(other, out.scale)
else:
if other.scale is None:
out._set_scale('tai')
else:
if self.scale not in TIME_TYPES[other.scale]:
raise TypeError("Cannot add Time and TimeDelta instances "
"with scales '{}' and '{}'"
.format(self.scale, other.scale))
out._set_scale(other.scale)
# remove attributes that are invalidated by changing time
for attr in ('_delta_ut1_utc', '_delta_tdb_tt'):
if hasattr(out, attr):
delattr(out, attr)
jd1 = out._time.jd1 + other._time.jd1
jd2 = out._time.jd2 + other._time.jd2
out._time.jd1, out._time.jd2 = day_frac(jd1, jd2)
# Go back to left-side scale if needed
out._set_scale(self.scale)
return out
# Reverse addition is possible: <something-Tdelta-ish> + T
# but there is no case of <something> - T, so no __rsub__.
def __radd__(self, other):
return self.__add__(other)
def to_datetime(self, timezone=None):
# TODO: this could likely go through to_value, as long as that
# had an **kwargs part that was just passed on to _time.
tm = self.replicate(format='datetime')
return tm._shaped_like_input(tm._time.to_value(timezone))
to_datetime.__doc__ = TimeDatetime.to_value.__doc__
class TimeDelta(TimeBase):
"""
Represent the time difference between two times.
A TimeDelta object is initialized with one or more times in the ``val``
argument. The input times in ``val`` must conform to the specified
``format``. The optional ``val2`` time input should be supplied only for
numeric input formats (e.g. JD) where very high precision (better than
64-bit precision) is required.
The allowed values for ``format`` can be listed with::
>>> list(TimeDelta.FORMATS)
['sec', 'jd', 'datetime']
Note that for time differences, the scale can be among three groups:
geocentric ('tai', 'tt', 'tcg'), barycentric ('tcb', 'tdb'), and rotational
('ut1'). Within each of these, the scales for time differences are the
same. Conversion between geocentric and barycentric is possible, as there
is only a scale factor change, but one cannot convert to or from 'ut1', as
this requires knowledge of the actual times, not just their difference. For
a similar reason, 'utc' is not a valid scale for a time difference: a UTC
day is not always 86400 seconds.
See also:
- https://docs.astropy.org/en/stable/time/
- https://docs.astropy.org/en/stable/time/index.html#time-deltas
Parameters
----------
val : sequence, ndarray, number, `~astropy.units.Quantity` or `~astropy.time.TimeDelta` object
Value(s) to initialize the time difference(s). Any quantities will
be converted appropriately (with care taken to avoid rounding
errors for regular time units).
val2 : sequence, ndarray, number, or `~astropy.units.Quantity`; optional
Additional values, as needed to preserve precision.
format : str, optional
Format of input value(s)
scale : str, optional
Time scale of input value(s), must be one of the following values:
('tdb', 'tt', 'ut1', 'tcg', 'tcb', 'tai'). If not given (or
``None``), the scale is arbitrary; when added or subtracted from a
``Time`` instance, it will be used without conversion.
copy : bool, optional
Make a copy of the input values
"""
SCALES = TIME_DELTA_SCALES
"""List of time delta scales."""
FORMATS = TIME_DELTA_FORMATS
"""Dict of time delta formats."""
info = TimeDeltaInfo()
def __new__(cls, val, val2=None, format=None, scale=None,
precision=None, in_subfmt=None, out_subfmt=None,
location=None, copy=False):
if isinstance(val, TimeDelta):
self = val.replicate(format=format, copy=copy, cls=cls)
else:
self = super().__new__(cls)
return self
def __init__(self, val, val2=None, format=None, scale=None, copy=False):
if isinstance(val, TimeDelta):
if scale is not None:
self._set_scale(scale)
else:
if format is None:
format = 'datetime' if isinstance(val, timedelta) else 'jd'
self._init_from_vals(val, val2, format, scale, copy)
if scale is not None:
self.SCALES = TIME_DELTA_TYPES[scale]
def replicate(self, *args, **kwargs):
out = super().replicate(*args, **kwargs)
out.SCALES = self.SCALES
return out
def to_datetime(self):
"""
Convert to ``datetime.timedelta`` object.
"""
tm = self.replicate(format='datetime')
return tm._shaped_like_input(tm._time.value)
def _set_scale(self, scale):
"""
This is the key routine that actually does time scale conversions.
This is not public and not connected to the read-only scale property.
"""
if scale == self.scale:
return
if scale not in self.SCALES:
raise ValueError("Scale {!r} is not in the allowed scales {}"
.format(scale, sorted(self.SCALES)))
# For TimeDelta, there can only be a change in scale factor,
# which is written as time2 - time1 = scale_offset * time1
scale_offset = SCALE_OFFSETS[(self.scale, scale)]
if scale_offset is None:
self._time.scale = scale
else:
jd1, jd2 = self._time.jd1, self._time.jd2
offset1, offset2 = day_frac(jd1, jd2, factor=scale_offset)
self._time = self.FORMATS[self.format](
jd1 + offset1, jd2 + offset2, scale,
self.precision, self.in_subfmt,
self.out_subfmt, from_jd=True)
def _add_sub(self, other, op):
"""Perform common elements of addition / subtraction for two delta times"""
# If not a TimeDelta then see if it can be turned into a TimeDelta.
if not isinstance(other, TimeDelta):
try:
other = TimeDelta(other)
except Exception:
return NotImplemented
# the scales should be compatible (e.g., cannot convert TDB to TAI)
if(self.scale is not None and self.scale not in other.SCALES
or other.scale is not None and other.scale not in self.SCALES):
raise TypeError("Cannot add TimeDelta instances with scales "
"'{}' and '{}'".format(self.scale, other.scale))
# adjust the scale of other if the scale of self is set (or no scales)
if self.scale is not None or other.scale is None:
out = self.replicate()
if other.scale is not None:
other = getattr(other, self.scale)
else:
out = other.replicate()
jd1 = op(self._time.jd1, other._time.jd1)
jd2 = op(self._time.jd2, other._time.jd2)
out._time.jd1, out._time.jd2 = day_frac(jd1, jd2)
return out
def __add__(self, other):
# If other is a Time then use Time.__add__ to do the calculation.
if isinstance(other, Time):
return other.__add__(self)
return self._add_sub(other, operator.add)
def __sub__(self, other):
# TimeDelta - Time is an error
if isinstance(other, Time):
raise OperandTypeError(self, other, '-')
return self._add_sub(other, operator.sub)
def __radd__(self, other):
return self.__add__(other)
def __rsub__(self, other):
out = self.__sub__(other)
return -out
def __neg__(self):
"""Negation of a `TimeDelta` object."""
new = self.copy()
new._time.jd1 = -self._time.jd1
new._time.jd2 = -self._time.jd2
return new
def __abs__(self):
"""Absolute value of a `TimeDelta` object."""
jd1, jd2 = self._time.jd1, self._time.jd2
negative = jd1 + jd2 < 0
new = self.copy()
new._time.jd1 = np.where(negative, -jd1, jd1)
new._time.jd2 = np.where(negative, -jd2, jd2)
return new
def __mul__(self, other):
"""Multiplication of `TimeDelta` objects by numbers/arrays."""
# Check needed since otherwise the self.jd1 * other multiplication
# would enter here again (via __rmul__)
if isinstance(other, Time):
raise OperandTypeError(self, other, '*')
elif ((isinstance(other, u.UnitBase)
and other == u.dimensionless_unscaled)
or (isinstance(other, str) and other == '')):
return self.copy()
# If other is something consistent with a dimensionless quantity
# (could just be a float or an array), then we can just multiple in.
try:
other = u.Quantity(other, u.dimensionless_unscaled, copy=False)
except Exception:
# If not consistent with a dimensionless quantity, try downgrading
# self to a quantity and see if things work.
try:
return self.to(u.day) * other
except Exception:
# The various ways we could multiply all failed;
# returning NotImplemented to give other a final chance.
return NotImplemented
jd1, jd2 = day_frac(self.jd1, self.jd2, factor=other.value)
out = TimeDelta(jd1, jd2, format='jd', scale=self.scale)
if self.format != 'jd':
out = out.replicate(format=self.format)
return out
def __rmul__(self, other):
"""Multiplication of numbers/arrays with `TimeDelta` objects."""
return self.__mul__(other)
def __truediv__(self, other):
"""Division of `TimeDelta` objects by numbers/arrays."""
# Cannot do __mul__(1./other) as that looses precision
if ((isinstance(other, u.UnitBase)
and other == u.dimensionless_unscaled)
or (isinstance(other, str) and other == '')):
return self.copy()
# If other is something consistent with a dimensionless quantity
# (could just be a float or an array), then we can just divide in.
try:
other = u.Quantity(other, u.dimensionless_unscaled, copy=False)
except Exception:
# If not consistent with a dimensionless quantity, try downgrading
# self to a quantity and see if things work.
try:
return self.to(u.day) / other
except Exception:
# The various ways we could divide all failed;
# returning NotImplemented to give other a final chance.
return NotImplemented
jd1, jd2 = day_frac(self.jd1, self.jd2, divisor=other.value)
out = TimeDelta(jd1, jd2, format='jd', scale=self.scale)
if self.format != 'jd':
out = out.replicate(format=self.format)
return out
def __rtruediv__(self, other):
"""Division by `TimeDelta` objects of numbers/arrays."""
# Here, we do not have to worry about returning NotImplemented,
# since other has already had a chance to look at us.
return other / self.to(u.day)
def to(self, unit, equivalencies=[]):
"""
Convert to a quantity in the specified unit.
Parameters
----------
unit : `~astropy.units.UnitBase` instance, str
The unit to convert to.
equivalencies : list of equivalence pairs, optional
A list of equivalence pairs to try if the units are not directly
convertible (see :ref:`unit_equivalencies`). If `None`, no
equivalencies will be applied at all, not even any set globallyq
or within a context.
Returns
-------
quantity : `~astropy.units.Quantity`
The quantity in the units specified.
See also
--------
to_value : get the numerical value in a given unit.
"""
return u.Quantity(self._time.jd1 + self._time.jd2,
u.day).to(unit, equivalencies=equivalencies)
def to_value(self, *args, **kwargs):
"""Get time delta values expressed in specified output format or unit.
This method is flexible and handles both conversion to a specified
``TimeDelta`` format / sub-format AND conversion to a specified unit.
If positional argument(s) are provided then the first one is checked
to see if it is a valid ``TimeDelta`` format, and next it is checked
to see if it is a valid unit or unit string.
To convert to a ``TimeDelta`` format and optional sub-format the options
are::
tm = TimeDelta(1.0 * u.s)
tm.to_value('jd') # equivalent of tm.jd
tm.to_value('jd', 'decimal') # convert to 'jd' as a Decimal object
tm.to_value('jd', subfmt='decimal')
tm.to_value(format='jd', subfmt='decimal')
To convert to a unit with optional equivalencies, the options are::
tm.to_value('hr') # convert to u.hr (hours)
tm.to_value('hr', []) # specify equivalencies as a positional arg
tm.to_value('hr', equivalencies=[])
tm.to_value(unit='hr', equivalencies=[])
The built-in `~astropy.time.TimeDelta` options for ``format`` are:
{'jd', 'sec', 'datetime'}.
For the two numerical formats 'jd' and 'sec', the available ``subfmt``
options are: {'float', 'long', 'decimal', 'str', 'bytes'}. Here, 'long'
uses ``numpy.longdouble`` for somewhat enhanced precision (with the
enhancement depending on platform), and 'decimal' instances of
:class:`decimal.Decimal` for full precision. For the 'str' and 'bytes'
sub-formats, the number of digits is also chosen such that time values
are represented accurately. Default: as set by ``out_subfmt`` (which by
default picks the first available for a given format, i.e., 'float').
Parameters
----------
format : str, optional
The format in which one wants the `~astropy.time.TimeDelta` values.
Default: the current format.
subfmt : str, optional
Possible sub-format in which the values should be given. Default: as
set by ``out_subfmt`` (which by default picks the first available
for a given format, i.e., 'float' or 'date_hms').
unit : `~astropy.units.UnitBase` instance or str, optional
The unit in which the value should be given.
equivalencies : list of equivalence pairs, optional
A list of equivalence pairs to try if the units are not directly
convertible (see :ref:`unit_equivalencies`). If `None`, no
equivalencies will be applied at all, not even any set globally or
within a context.
Returns
-------
value : `~numpy.ndarray` or scalar
The value in the format or units specified.
See also
--------
to : Convert to a `~astropy.units.Quantity` instance in a given unit.
value : The time value in the current format.
"""
if not (args or kwargs):
raise TypeError('to_value() missing required format or unit argument')
# TODO: maybe allow 'subfmt' also for units, keeping full precision
# (effectively, by doing the reverse of quantity_day_frac)?
# This way, only equivalencies could lead to possible precision loss.
if ('format' in kwargs
or (args != () and (args[0] is None or args[0] in self.FORMATS))):
# Super-class will error with duplicate arguments, etc.
return super().to_value(*args, **kwargs)
# With positional arguments, we try parsing the first one as a unit,
# so that on failure we can give a more informative exception.
if args:
try:
unit = u.Unit(args[0])
except ValueError as exc:
raise ValueError("first argument is not one of the known "
"formats ({}) and failed to parse as a unit."
.format(list(self.FORMATS))) from exc
args = (unit,) + args[1:]
return u.Quantity(self._time.jd1 + self._time.jd2,
u.day).to_value(*args, **kwargs)
def _make_value_equivalent(self, item, value):
"""Coerce setitem value into an equivalent TimeDelta object"""
if not isinstance(value, TimeDelta):
try:
value = self.__class__(value, scale=self.scale, format=self.format)
except Exception as err:
raise ValueError('cannot convert value to a compatible TimeDelta '
'object: {}'.format(err))
return value
def isclose(self, other, atol=None, rtol=0.0):
"""Returns a boolean or boolean array where two TimeDelta objects are
element-wise equal within a time tolerance.
This effectively evaluates the expression below::
abs(self - other) <= atol + rtol * abs(other)
Parameters
----------
other : `~astropy.units.Quantity` or `~astropy.time.TimeDelta`
Quantity or TimeDelta object for comparison.
atol : `~astropy.units.Quantity` or `~astropy.time.TimeDelta`
Absolute tolerance for equality with units of time (e.g. ``u.s`` or
``u.day``). Default is one bit in the 128-bit JD time representation,
equivalent to about 20 picosecs.
rtol : float
Relative tolerance for equality
"""
try:
other_day = other.to_value(u.day)
except Exception as err:
raise TypeError(f"'other' argument must support conversion to days: {err}")
if atol is None:
atol = np.finfo(float).eps * u.day
if not isinstance(atol, (u.Quantity, TimeDelta)):
raise TypeError("'atol' argument must be a Quantity or TimeDelta instance, got "
f'{atol.__class__.__name__} instead')
return np.isclose(self.to_value(u.day), other_day,
rtol=rtol, atol=atol.to_value(u.day))
class ScaleValueError(Exception):
pass
def _make_array(val, copy=False):
"""
Take ``val`` and convert/reshape to an array. If ``copy`` is `True`
then copy input values.
Returns
-------
val : ndarray
Array version of ``val``.
"""
if isinstance(val, (tuple, list)) and len(val) > 0 and isinstance(val[0], Time):
dtype = object
else:
dtype = None
val = np.array(val, copy=copy, subok=True, dtype=dtype)
# Allow only float64, string or object arrays as input
# (object is for datetime, maybe add more specific test later?)
# This also ensures the right byteorder for float64 (closes #2942).
if val.dtype.kind == "f" and val.dtype.itemsize >= np.dtype(np.float64).itemsize:
pass
elif val.dtype.kind in 'OSUMaV':
pass
else:
val = np.asanyarray(val, dtype=np.float64)
return val
def _check_for_masked_and_fill(val, val2):
"""
If ``val`` or ``val2`` are masked arrays then fill them and cast
to ndarray.
Returns a mask corresponding to the logical-or of masked elements
in ``val`` and ``val2``. If neither is masked then the return ``mask``
is ``None``.
If either ``val`` or ``val2`` are masked then they are replaced
with filled versions of themselves.
Parameters
----------
val : ndarray or MaskedArray
Input val
val2 : ndarray or MaskedArray
Input val2
Returns
-------
mask, val, val2: ndarray or None
Mask: (None or bool ndarray), val, val2: ndarray
"""
def get_as_filled_ndarray(mask, val):
"""
Fill the given MaskedArray ``val`` from the first non-masked
element in the array. This ensures that upstream Time initialization
will succeed.
Note that nothing happens if there are no masked elements.
"""
fill_value = None
if np.any(val.mask):
# Final mask is the logical-or of inputs
mask = mask | val.mask
# First unmasked element. If all elements are masked then
# use fill_value=None from above which will use val.fill_value.
# As long as the user has set this appropriately then all will
# be fine.
val_unmasked = val.compressed() # 1-d ndarray of unmasked values
if len(val_unmasked) > 0:
fill_value = val_unmasked[0]
# Fill the input ``val``. If fill_value is None then this just returns
# an ndarray view of val (no copy).
val = val.filled(fill_value)
return mask, val
mask = False
if isinstance(val, np.ma.MaskedArray):
mask, val = get_as_filled_ndarray(mask, val)
if isinstance(val2, np.ma.MaskedArray):
mask, val2 = get_as_filled_ndarray(mask, val2)
return mask, val, val2
class OperandTypeError(TypeError):
def __init__(self, left, right, op=None):
op_string = '' if op is None else f' for {op}'
super().__init__(
"Unsupported operand type(s){}: "
"'{}' and '{}'".format(op_string,
left.__class__.__name__,
right.__class__.__name__))
def update_leap_seconds(files=None):
"""If the current ERFA leap second table is out of date, try to update it.
Uses `astropy.utils.iers.LeapSeconds.auto_open` to try to find an
up-to-date table. See that routine for the definition of "out of date".
In order to make it safe to call this any time, all exceptions are turned
into warnings,
Parameters
----------
files : list of path, optional
List of files/URLs to attempt to open. By default, uses defined by
`astropy.utils.iers.LeapSeconds.auto_open`, which includes the table
used by ERFA itself, so if that is up to date, nothing will happen.
Returns
-------
n_update : int
Number of items updated.
"""
try:
from astropy.utils import iers
table = iers.LeapSeconds.auto_open(files)
return erfa.leap_seconds.update(table)
except Exception as exc:
warn("leap-second auto-update failed due to the following "
f"exception: {exc!r}", AstropyWarning)
return 0
|
bsd-3-clause
|
DataDog/integrations-core
|
gitlab_runner/tests/common.py
|
1
|
1976
|
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
from datadog_checks.base.utils.common import get_docker_hostname
HERE = os.path.dirname(os.path.abspath(__file__))
# Networking
HOST = get_docker_hostname()
GITLAB_TEST_TOKEN = "ddtesttoken"
GITLAB_LOCAL_MASTER_PORT = 8085
GITLAB_LOCAL_RUNNER_PORT = 8087
GITLAB_MASTER_URL = "http://{}:{}".format(HOST, GITLAB_LOCAL_MASTER_PORT)
GITLAB_RUNNER_URL = "http://{}:{}/metrics".format(HOST, GITLAB_LOCAL_RUNNER_PORT)
GITLAB_RUNNER_TAGS = ['gitlab_host:{}'.format(HOST), 'gitlab_port:{}'.format(GITLAB_LOCAL_MASTER_PORT)]
GITLAB_RUNNER_VERSION = os.environ['GITLAB_RUNNER_VERSION']
CUSTOM_TAGS = ['optional:tag1']
# Note that this is a subset of the ones defined in GitlabCheck
# When we stand up a clean test infrastructure some of those metrics might not
# be available yet, hence we validate a stable subset
ALLOWED_METRICS = [
'ci_runner_errors',
'ci_runner_version_info',
'process_max_fds',
'process_open_fds',
'process_resident_memory_bytes',
'process_start_time_seconds',
'process_virtual_memory_bytes',
]
CONFIG = {
'init_config': {'allowed_metrics': ALLOWED_METRICS},
'instances': [
{
'prometheus_endpoint': GITLAB_RUNNER_URL,
'gitlab_url': '{}/ci'.format(GITLAB_MASTER_URL),
'send_monotonic_counter': True,
'disable_ssl_validation': True,
'tags': list(CUSTOM_TAGS),
}
],
'logs': [
{
"type": "docker",
"source": "gitlab-runner",
}
],
}
BAD_CONFIG = {
'init_config': {'allowed_metrics': ALLOWED_METRICS},
'instances': [
{
'prometheus_endpoint': 'http://{}:1234/metrics'.format(HOST),
'gitlab_url': 'http://{}:1234/ci'.format(HOST),
'disable_ssl_validation': True,
'tags': list(CUSTOM_TAGS),
}
],
}
|
bsd-3-clause
|
x111ong/django
|
tests/generic_views/models.py
|
382
|
1631
|
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models import QuerySet
from django.db.models.manager import BaseManager
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Artist(models.Model):
name = models.CharField(max_length=100)
class Meta:
ordering = ['name']
verbose_name = 'professional artist'
verbose_name_plural = 'professional artists'
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('artist_detail', kwargs={'pk': self.id})
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=100)
slug = models.SlugField()
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class DoesNotExistQuerySet(QuerySet):
def get(self, *args, **kwargs):
raise Author.DoesNotExist
DoesNotExistBookManager = BaseManager.from_queryset(DoesNotExistQuerySet)
@python_2_unicode_compatible
class Book(models.Model):
name = models.CharField(max_length=300)
slug = models.SlugField()
pages = models.IntegerField()
authors = models.ManyToManyField(Author)
pubdate = models.DateField()
objects = models.Manager()
does_not_exist = DoesNotExistBookManager()
class Meta:
ordering = ['-pubdate']
def __str__(self):
return self.name
class Page(models.Model):
content = models.TextField()
template = models.CharField(max_length=300)
class BookSigning(models.Model):
event_date = models.DateTimeField()
|
bsd-3-clause
|
claudio-bonati/sun-topo-c
|
tools/analisi_top.py
|
1
|
2132
|
#!/usr/bin/env python3
import os, glob, sys
import numpy as np
from scipy.optimize import fmin
out_form="The output is #latox, latoy, latoz, latot, beta and then\n"
out_form+="i, <Q^2>/vol, b_2, b_4, k_1/vol, k_2/vol, k_3/vol, k_4/vol, <Q Q_{noncool}>/<Q^2> \n"
out_form+="where Q=top. charge after i*cooling and k_n are its cumulants\n"
try:
sys.argv[6]
except:
print("Use: "+ sys.argv[0] + " -i input_file -o output_file -b block"); print(out_form); sys.exit(1)
for i in range(1,6,2):
if(sys.argv[i]==r'-i'):
file=sys.argv[i+1]
elif(sys.argv[i]==r'-o'):
output=sys.argv[i+1]
elif(sys.argv[i]==r'-b'):
blocco=int(sys.argv[i+1])
#data acquisition
in_file=open(file, 'r')
cont_file=in_file.readlines()
campione=len(cont_file)-1 ## sample dimension definition
num_top_charge=int((int(len(cont_file[1].split()))-5)/2) ## number of topological charges measured after coonling
in_file.close()
#function to minimize
def residual(s, col):
aux=0.0
for i in range(1, len(cont_file)):
helper=float(cont_file[i].split()[col])
aux+=(helper*s-np.round(helper*s, 0))*(helper*s-np.round(helper*s, 0))
return aux
## compute the scale factor
if not os.path.isfile("./scale_factor.dat"):
scale=np.empty(num_top_charge);
for col1 in range(0, num_top_charge, 1):
col=5+2*col1
scal=1.1;
ris=fmin(residual, scal, args=(col,), xtol=1.0e-6)
print(ris)
scal=ris[0]
scale[col1]=ris[0]
## print the rescaling factors
out_file=open('scale_factor.dat','w')
for i in range(0, num_top_charge, 1):
out_file.write("%f\n" %(scale[i]) )
out_file.close()
## print input file
out_file=open('input_analysis','w')
out_file.write("""
datafile %s #input file
outfile %s #output file
block %d #block for jacknife
sample %d #number of lines
numobs %d #number of observables
""" % (file, output, blocco, campione, 1+num_top_charge))
out_file.close()
## compile and execute
os.system('g++ -Wall --pedantic -O3 ANALISI_TOP.cc -o ANALISI')
os.system('./ANALISI input_analysis')
## clean
os.system('rm ANALISI')
os.system('rm input_analysis')
|
gpl-3.0
|
JioEducation/edx-platform
|
common/test/acceptance/tests/lms/test_library.py
|
20
|
13509
|
# -*- coding: utf-8 -*-
"""
End-to-end tests for LibraryContent block in LMS
"""
import ddt
import textwrap
from nose.plugins.attrib import attr
from ..helpers import UniqueCourseTest, TestWithSearchIndexMixin
from ...pages.studio.auto_auth import AutoAuthPage
from ...pages.studio.overview import CourseOutlinePage
from ...pages.studio.library import StudioLibraryContentEditor, StudioLibraryContainerXBlockWrapper
from ...pages.lms.courseware import CoursewarePage
from ...pages.lms.library import LibraryContentXBlockWrapper
from ...pages.common.logout import LogoutPage
from ...fixtures.course import CourseFixture, XBlockFixtureDesc
from ...fixtures.library import LibraryFixture
SECTION_NAME = 'Test Section'
SUBSECTION_NAME = 'Test Subsection'
UNIT_NAME = 'Test Unit'
@attr('shard_7')
class LibraryContentTestBase(UniqueCourseTest):
""" Base class for library content block tests """
USERNAME = "STUDENT_TESTER"
EMAIL = "[email protected]"
STAFF_USERNAME = "STAFF_TESTER"
STAFF_EMAIL = "[email protected]"
def populate_library_fixture(self, library_fixture):
"""
To be overwritten by subclassed tests. Used to install a library to
run tests on.
"""
def setUp(self):
"""
Set up library, course and library content XBlock
"""
super(LibraryContentTestBase, self).setUp()
self.courseware_page = CoursewarePage(self.browser, self.course_id)
self.course_outline = CourseOutlinePage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
self.library_fixture = LibraryFixture('test_org', self.unique_id, 'Test Library {}'.format(self.unique_id))
self.populate_library_fixture(self.library_fixture)
self.library_fixture.install()
self.library_info = self.library_fixture.library_info
self.library_key = self.library_fixture.library_key
# Install a course with library content xblock
self.course_fixture = CourseFixture(
self.course_info['org'], self.course_info['number'],
self.course_info['run'], self.course_info['display_name']
)
library_content_metadata = {
'source_library_id': unicode(self.library_key),
'mode': 'random',
'max_count': 1,
'has_score': False
}
self.lib_block = XBlockFixtureDesc('library_content', "Library Content", metadata=library_content_metadata)
self.course_fixture.add_children(
XBlockFixtureDesc('chapter', SECTION_NAME).add_children(
XBlockFixtureDesc('sequential', SUBSECTION_NAME).add_children(
XBlockFixtureDesc('vertical', UNIT_NAME).add_children(
self.lib_block
)
)
)
)
self.course_fixture.install()
def _change_library_content_settings(self, count=1, capa_type=None):
"""
Performs library block refresh in Studio, configuring it to show {count} children
"""
unit_page = self._go_to_unit_page(True)
library_container_block = StudioLibraryContainerXBlockWrapper.from_xblock_wrapper(unit_page.xblocks[1])
library_container_block.edit()
editor = StudioLibraryContentEditor(self.browser, library_container_block.locator)
editor.count = count
if capa_type is not None:
editor.capa_type = capa_type
editor.save()
self._go_to_unit_page(change_login=False)
unit_page.wait_for_page()
unit_page.publish_action.click()
unit_page.wait_for_ajax()
self.assertIn("Published and Live", unit_page.publish_title)
@property
def library_xblocks_texts(self):
"""
Gets texts of all xblocks in library
"""
return frozenset(child.data for child in self.library_fixture.children)
def _go_to_unit_page(self, change_login=True):
"""
Open unit page in Studio
"""
if change_login:
LogoutPage(self.browser).visit()
self._auto_auth(self.STAFF_USERNAME, self.STAFF_EMAIL, True)
self.course_outline.visit()
subsection = self.course_outline.section(SECTION_NAME).subsection(SUBSECTION_NAME)
return subsection.expand_subsection().unit(UNIT_NAME).go_to()
def _goto_library_block_page(self, block_id=None):
"""
Open library page in LMS
"""
self.courseware_page.visit()
paragraphs = self.courseware_page.q(css='.course-content p').results
if not paragraphs:
self.courseware_page.q(css='.menu-item a').results[0].click()
block_id = block_id if block_id is not None else self.lib_block.locator
#pylint: disable=attribute-defined-outside-init
self.library_content_page = LibraryContentXBlockWrapper(self.browser, block_id)
self.library_content_page.wait_for_page()
def _auto_auth(self, username, email, staff):
"""
Logout and login with given credentials.
"""
AutoAuthPage(self.browser, username=username, email=email,
course_id=self.course_id, staff=staff).visit()
@ddt.ddt
@attr('shard_7')
class LibraryContentTest(LibraryContentTestBase):
"""
Test courseware.
"""
def populate_library_fixture(self, library_fixture):
"""
Populates library fixture with XBlock Fixtures
"""
library_fixture.add_children(
XBlockFixtureDesc("html", "Html1", data='html1'),
XBlockFixtureDesc("html", "Html2", data='html2'),
XBlockFixtureDesc("html", "Html3", data='html3'),
XBlockFixtureDesc("html", "Html4", data='html4'),
)
@ddt.data(2, 3, 4)
def test_shows_random_xblocks_from_configured(self, count):
"""
Scenario: Ensures that library content shows {count} random xblocks from library in LMS
Given I have a library, a course and a LibraryContent block in that course
When I go to studio unit page for library content xblock as staff
And I set library content xblock to display {count} random children
And I refresh library content xblock and pulbish unit
When I go to LMS courseware page for library content xblock as student
Then I can see {count} random xblocks from the library
"""
self._change_library_content_settings(count=count)
self._auto_auth(self.USERNAME, self.EMAIL, False)
self._goto_library_block_page()
children_contents = self.library_content_page.children_contents
self.assertEqual(len(children_contents), count)
self.assertLessEqual(children_contents, self.library_xblocks_texts)
def test_shows_all_if_max_set_to_greater_value(self):
"""
Scenario: Ensures that library content shows {count} random xblocks from library in LMS
Given I have a library, a course and a LibraryContent block in that course
When I go to studio unit page for library content xblock as staff
And I set library content xblock to display more children than library have
And I refresh library content xblock and pulbish unit
When I go to LMS courseware page for library content xblock as student
Then I can see all xblocks from the library
"""
self._change_library_content_settings(count=10)
self._auto_auth(self.USERNAME, self.EMAIL, False)
self._goto_library_block_page()
children_contents = self.library_content_page.children_contents
self.assertEqual(len(children_contents), 4)
self.assertEqual(children_contents, self.library_xblocks_texts)
@ddt.ddt
@attr('shard_7')
class StudioLibraryContainerCapaFilterTest(LibraryContentTestBase, TestWithSearchIndexMixin):
"""
Test Library Content block in LMS
"""
def setUp(self):
""" SetUp method """
self._create_search_index()
super(StudioLibraryContainerCapaFilterTest, self).setUp()
def tearDown(self):
self._cleanup_index_file()
super(StudioLibraryContainerCapaFilterTest, self).tearDown()
def _get_problem_choice_group_text(self, name, items):
""" Generates Choice Group CAPA problem XML """
items_text = "\n".join([
"<choice correct='{correct}'>{item}</choice>".format(correct=correct, item=item)
for item, correct in items
])
return textwrap.dedent("""
<problem>
<p>{name}</p>
<multiplechoiceresponse>
<choicegroup label="{name}" type="MultipleChoice">{items}</choicegroup>
</multiplechoiceresponse>
</problem>""").format(name=name, items=items_text)
def _get_problem_select_text(self, name, items, correct):
""" Generates Select Option CAPA problem XML """
items_text = ",".join(["'{0}'".format(item) for item in items])
return textwrap.dedent("""
<problem>
<p>{name}</p>
<optionresponse>
<optioninput label="{name}" options="({options})" correct="{correct}"></optioninput>
</optionresponse>
</problem>""").format(name=name, options=items_text, correct=correct)
def populate_library_fixture(self, library_fixture):
"""
Populates library fixture with XBlock Fixtures
"""
items = (
XBlockFixtureDesc(
"problem", "Problem Choice Group 1",
data=self._get_problem_choice_group_text("Problem Choice Group 1 Text", [("1", False), ('2', True)])
),
XBlockFixtureDesc(
"problem", "Problem Choice Group 2",
data=self._get_problem_choice_group_text("Problem Choice Group 2 Text", [("Q", True), ('W', False)])
),
XBlockFixtureDesc(
"problem", "Problem Select 1",
data=self._get_problem_select_text("Problem Select 1 Text", ["Option 1", "Option 2"], "Option 1")
),
XBlockFixtureDesc(
"problem", "Problem Select 2",
data=self._get_problem_select_text("Problem Select 2 Text", ["Option 3", "Option 4"], "Option 4")
),
)
library_fixture.add_children(*items)
@property
def _problem_headers(self):
""" Expected XBLock headers according to populate_library_fixture """
return frozenset(child.display_name for child in self.library_fixture.children)
def _set_library_content_settings(self, count=1, capa_type="Any Type"):
"""
Sets library content XBlock parameters, saves, publishes unit, goes to LMS unit page and
gets children XBlock headers to assert against them
"""
self._change_library_content_settings(count=count, capa_type=capa_type)
self._auto_auth(self.USERNAME, self.EMAIL, False)
self._goto_library_block_page()
return self.library_content_page.children_headers
def test_problem_type_selector(self):
"""
Scenario: Ensure setting "Any Type" for Problem Type does not filter out Problems
Given I have a library with two "Select Option" and two "Choice Group" problems, and a course containing
LibraryContent XBlock configured to draw XBlocks from that library
When I set library content xblock Problem Type to "Any Type" and Count to 3 and publish unit
When I go to LMS courseware page for library content xblock as student
Then I can see 3 xblocks from the library of any type
When I set library content xblock Problem Type to "Choice Group" and Count to 1 and publish unit
When I go to LMS courseware page for library content xblock as student
Then I can see 1 xblock from the library of "Choice Group" type
When I set library content xblock Problem Type to "Select Option" and Count to 2 and publish unit
When I go to LMS courseware page for library content xblock as student
Then I can see 2 xblock from the library of "Select Option" type
When I set library content xblock Problem Type to "Matlab" and Count to 2 and publish unit
When I go to LMS courseware page for library content xblock as student
Then I can see 0 xblocks from the library
"""
children_headers = self._set_library_content_settings(count=3, capa_type="Any Type")
self.assertEqual(len(children_headers), 3)
self.assertLessEqual(children_headers, self._problem_headers)
# Choice group test
children_headers = self._set_library_content_settings(count=1, capa_type="Multiple Choice")
self.assertEqual(len(children_headers), 1)
self.assertLessEqual(
children_headers,
set(["Problem Choice Group 1", "Problem Choice Group 2"])
)
# Choice group test
children_headers = self._set_library_content_settings(count=2, capa_type="Dropdown")
self.assertEqual(len(children_headers), 2)
self.assertEqual(
children_headers,
set(["Problem Select 1", "Problem Select 2"])
)
# Missing problem type test
children_headers = self._set_library_content_settings(count=2, capa_type="Custom Evaluated Script")
self.assertEqual(children_headers, set())
|
agpl-3.0
|
c2g14/cda_0628
|
static/Brython3.1.1-20150328-091302/Lib/ui/dialog.py
|
607
|
4994
|
from . import widget
from browser import html, document
class Dialog(widget.DraggableWidget):
def __init__(self, id=None):
self._div_shell=html.DIV(
Class="ui-dialog ui-widget ui-widget-content ui-corner-all ui-front ui-draggable ui-resizable",
style={'position': 'absolute', 'height': 'auto', 'width': '300px',
'top': '98px', 'left': '140px', 'display': 'block'})
widget.DraggableWidget.__init__(self, self._div_shell, 'dialog', id)
_div_titlebar=html.DIV(Id="titlebar",
Class="ui-dialog-titlebar ui-widget-header ui-corner-all ui-helper-clearfix")
self._div_shell <= _div_titlebar
self._div_title=html.SPAN(Id="title", Class="ui-dialog-title")
_div_titlebar <= self._div_title
self._title_button=html.BUTTON(Title="close",
Class="ui-button ui-widget ui-state-default ui-corner-all ui-button-icon-only ui-dialog-titlebar-close")
def dialog_close(e):
#del document[self._div_shell.id]
del document[self._div_shell.id]
self._title_button.bind('click', dialog_close)
_span=html.SPAN(Class="ui-button-icon-primary ui-icon ui-icon-closethick")
self._title_button <= _span
_span=html.SPAN('close', Class="ui-button-text")
self._title_button <= _span
_div_titlebar <= self._title_button
self._div_dialog=html.DIV(Class="ui-dialog-content ui-widget-content",
style={'width': 'auto', 'min-height': '105px',
'max-height': 'none', 'height': 'auto'})
self._div_shell <= self._div_dialog
for _i in ['n', 'e', 's', 'w', 'se', 'sw', 'ne', 'nw']:
if _i == 'se':
_class="ui-resizable-handle ui-resizable-%s ui-icon ui-icon-gripsmall-diagonal-%s" % (_i, _i)
else:
_class="ui-resizable-handle ui-resizable-%s" % _i
self._div_shell <= html.DIV(Class=_class, style={'z-index': '90'})
document <= self._div_shell
def set_title(self, title):
self._div_title.set_text(title)
def set_body(self, body):
self._div_dialog.set_html(body)
class EntryDialog(Dialog):
def __init__(self, title, prompt, action, _id=None):
Dialog.__init__(self, _id)
self.set_title(title)
self.action = action
d_prompt = html.DIV(prompt, Class="ui-widget",
style=dict(float="left",paddingRight="10px"))
self.entry = html.INPUT()
body = html.DIV(d_prompt+self.entry,
style={'padding':'15px'})
b_ok = html.BUTTON("Ok")
b_ok.bind('click', self.ok)
b_cancel = html.BUTTON("Cancel")
b_cancel.bind('click', self.cancel)
body += html.DIV(b_ok+b_cancel, style={'padding':'15px'})
self._div_dialog <= body
def ok(self, ev):
self.result = self._div_shell.get(selector='INPUT')[0].value
self.action(self.result)
document.remove(self._div_shell)
def cancel(self, ev):
document.remove(self._div_shell)
class SelectDialog(Dialog):
def __init__(self, title, prompt, options, action, _id=None):
Dialog.__init__(self, _id)
self.set_title(title)
self.options = options
self.action = action
d_prompt = html.DIV(prompt, Class="ui-widget",
style=dict(float="left",paddingRight="10px"))
self.select = html.SELECT()
for option in options:
self.select <= html.OPTION(option)
body = html.DIV(d_prompt+self.select,
style={'padding':'15px'})
b_ok = html.BUTTON("Ok")
b_ok.bind('click', self.ok)
b_cancel = html.BUTTON("Cancel")
b_cancel.bind('click', self.cancel)
body += html.DIV(b_ok+b_cancel, style={'padding':'15px'})
self._div_dialog <= body
def ok(self, ev):
ix = self._div_shell.get(selector='SELECT')[0].selectedIndex
document.remove(self._div_shell)
self.action(self.options[ix])
def cancel(self, ev):
document.remove(self._div_shell)
class YesNoDialog(Dialog):
def __init__(self, title, prompt, action_if_yes, action_if_no, _id=None):
Dialog.__init__(self, _id)
self.set_title(title)
self.action_if_yes = action_if_yes
self.action_if_no = action_if_no
d_prompt = html.DIV(prompt, Class="ui-widget",
style=dict(float="left",paddingRight="10px"))
body = html.DIV(d_prompt, style={'padding':'15px'})
b_ok = html.BUTTON("Yes")
b_ok.bind('click', self.yes)
b_cancel = html.BUTTON("No")
b_cancel.bind('click', self.no)
body += html.DIV(b_ok+b_cancel, style={'padding':'15px'})
self._div_dialog <= body
def yes(self, ev):
document.remove(self._div_shell)
self.action_if_yes(self)
def no(self, ev):
document.remove(self._div_shell)
if self.action_if_no is not None:
self.action_if_no(self)
|
agpl-3.0
|
hacksterio/pygments.rb
|
vendor/pygments-main/pygments/lexers/trafficscript.py
|
23
|
1546
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.trafficscript
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Lexer for RiverBed's TrafficScript (RTS) language.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer
from pygments.token import String, Number, Name, Keyword, Operator, Text, Comment
__all__ = ['RtsLexer']
class RtsLexer(RegexLexer):
"""
For `Riverbed Stingray Traffic Manager <http://www.riverbed.com/stingray>`_
.. versionadded:: 2.1
"""
name = 'TrafficScript'
aliases = ['rts','trafficscript']
filenames = ['*.rts']
tokens = {
'root' : [
(r"'(\\\\|\\[^\\]|[^'\\])*'", String),
(r'"', String, 'escapable-string'),
(r'(0x[0-9a-fA-F]+|\d+)', Number),
(r'\d+\.\d+', Number.Float),
(r'\$[a-zA-Z](\w|_)*', Name.Variable),
(r'(if|else|for(each)?|in|while|do|break|sub|return|import)', Keyword),
(r'[a-zA-Z][\w.]*', Name.Function),
(r'[-+*/%=,;(){}<>^.!~|&\[\]\?\:]', Operator),
(r'(>=|<=|==|!=|'
r'&&|\|\||'
r'\+=|.=|-=|\*=|/=|%=|<<=|>>=|&=|\|=|\^=|'
r'>>|<<|'
r'\+\+|--|=>)', Operator),
(r'[ \t\r]+', Text),
(r'#[^\n]*', Comment),
],
'escapable-string' : [
(r'\\[tsn]', String.Escape),
(r'[^"]', String),
(r'"', String, '#pop'),
],
}
|
mit
|
vrooje/pulsar-hunters-analysis
|
aggregate_pulsarclass.py
|
1
|
33505
|
#Python 2.7.9 (default, Apr 5 2015, 22:21:35)
import sys, os
# file with raw classifications (csv)
# put this way up here so if there are no inputs we exit quickly before even trying to load everything else
try:
classfile_in = sys.argv[1]
except:
#classfile_in = 'data/2e3d12a2-56ca-4d1f-930a-9ecc7fd39885.csv'
print("\nUsage: %s classifications_infile [weight_class aggregations_outfile]" % sys.argv[0])
print(" classifications_infile is a Zooniverse (Panoptes) classifications data export CSV.")
print(" weight_class is 1 if you want to calculate and apply user weightings, 0 otherwise.")
print(" aggregations_outfile is the name of the file you want written. If you don't specify,")
print(" the filename is %s by default." % outfile_default)
sys.exit(0)
import numpy as np # using 1.10.1
import pandas as pd # using 0.13.1
#import datetime
#import dateutil.parser
import json
############ Define files and settings below ##############
# default outfile
outfile_default = 'pulsar_aggregations.csv'
rankfile_stem = 'subjects_ranked_by_weighted_class_asof_'
# file with tags left in Talk, for value-added columns below
talk_export_file = "project-764-tags_2016-01-15.json"
# file with master list between Zooniverse metadata image filename (no source coords) and
# original filename with source coords and additional info
# also I get to have a variable that uses "filename" twice where each means a different thing
# a filename for a file full of filenames #alliterationbiyotch
filename_master_list_filename = "HTRU-N_sets_keys.csv"
# this is a list of possible matches to known pulsars that was done after the fact so they
# are flagged as "cand" in the database instead of "known" etc.
poss_match_file = 'PossibleMatches.csv'
# later we will select on tags by the project team and possibly weight them differently
# note I've included the moderators and myself (though I didn't tag anything).
# Also note it's possible to do this in a more general fashion using a file with project users and roles
# However, hard-coding seemed the thing to do given our time constraints (and the fact that I don't think
# you can currently export the user role file from the project builder)
project_team = 'bretonr jocelynbb spindizzy Simon_Rookyard Polzin cristina_ilie jamesy23 ADCameron Prabu walkcr roblyon chiatan llevin benjamin_shaw bhaswati djchampion jwbmartin bstappers ElisabethB Capella05 vrooje'.split()
# define the active workflow - we will ignore all classifications not on this workflow
# we could make this an input but let's not get too fancy for a specific case.
# for beta test
#active_workflow_id = 1099
#active_workflow_major = 6
# for live project
active_workflow_id = 1224
active_workflow_major = 4
# do we want sum(weighted vote count) = sum(raw vote count)?
normalise_weights = True
# do we want to write an extra file with just classification counts and usernames
# (and a random color column, for treemaps)?
counts_out = True
counts_out_file = 'class_counts_colors.csv'
############ Set the other inputs now ###############
try:
apply_weight = int(sys.argv[2])
except:
apply_weight = 0
try:
outfile = sys.argv[3]
except:
outfile = outfile_default
#################################################################################
#################################################################################
#################################################################################
# This is the function that actually does the aggregating
def aggregate_class(grp):
# translate the group to a dataframe because FML if I don't (some indexing etc is different)
thegrp = pd.DataFrame(grp)
# figure out what we're looping over below
answers = thegrp.pulsar_classification.unique()
# aggregating is a matter of grouping by different answers and summing the counts/weights
byans = thegrp.groupby('pulsar_classification')
ans_ct_tot = byans['count'].aggregate('sum')
ans_wt_tot = byans['weight'].aggregate('sum')
# we want fractions eventually, so we need denominators
count_tot = np.sum(ans_ct_tot) # we could also do len(thegrp)
weight_tot = np.sum(ans_wt_tot)
# okay, now we should have a series of counts for each answer, one for weighted counts, and
# the total votes and weighted votes for this subject.
# now loop through the possible answers and create the raw and weighted vote fractions
# and save the counts as well.
# this is a list for now and we'll make it into a series and order the columns later
class_agg = {}
class_agg['count_unweighted'] = count_tot
class_agg['count_weighted'] = weight_tot
class_agg['subject_type'] = thegrp.subject_type.unique()[0]
class_agg['filename'] = thegrp.filename.unique()[0]
for a in answers:
# don't be that jerk who labels things with "p0" or otherwise useless internal indices.
# Use the text of the response next to this answer choice in the project builder (but strip spaces)
raw_frac_label = ('p_'+a).replace(' ', '_')
wt_frac_label = ('p_'+a+'_weight').replace(' ', '_')
class_agg[raw_frac_label] = ans_ct_tot[a]/float(count_tot)
class_agg[wt_frac_label] = ans_wt_tot[a]/float(weight_tot)
# oops, this is hard-coded so that there's Yes and No as answers - sorry to those trying to generalise
col_order = ["filename", "p_Yes", "p_No", "p_Yes_weight", "p_No_weight",
"count_unweighted", "count_weighted", "subject_type"]
return pd.Series(class_agg)[col_order]
#################################################################################
#################################################################################
#################################################################################
# The new weighting assignment function allows the user to choose between different weighting schemes
# though note the one in this function is not preferred for reasons explained below.
def assign_weight_old(seed):
# keep the two seed cases separate because we might want to use a different base for each
if seed < 0.:
return max([0.05, pow(1.0025, seed)])
elif seed > 0:
return min([3.0, pow(1.0025, seed)])
else:
return 1.0
# assigns a weight based on a seed parameter
# The weight is assigned using the seed as an exponent and the number below as the base.
# The number is just slightly offset from 1 so that it takes many classifications for
# a user's potential weight to cap out at the max weight (3) or bottom out at the min (0.05).
# Currently there are 641 "known" pulsars in the DB so the base of 1.025 is largely based on that.
# Update: there are now about 5,000 simulated pulsars in the subject set as well, and they have a
# much higher retirement limit, so that more people will have classified them and we have more info.
# Note I'd rather this did a proper analysis with a confusion matrix etc but under a time crunch
# we went with something simpler.
def assign_weight(q, which_weight):
# the floor weight for the case of which_weight == 2
# i.e. someone who has seed = 0 will have this
# seed = 0 could either be equal numbers right & wrong, OR that we don't have any information
c0 = 0.5
seed = q[1].seed
n_gs = q[1].n_gs
# Two possible weighting schemes:
# which_weight == 1: w = 1.0025^(seed), bounded between 0.05 and 3.0
# which_weight == 2: w = (1 + log n_gs)^(seed/n_gs), bounded between 0.05 and 3.0
#
# Weighting Scheme 1:
# this is an okay weighting scheme, but it doesn't account for the fact that someone might be prolific
# but not a very good classifier, and those classifiers shouldn't have a high weight.
# Example: Bob does 10000 gold-standard classifications and gets 5100 right, 4900 wrong.
# In this weighting scheme, Bob's weighting seed is +100, which means a weight of 1.0025^100 = 1.3,
# despite the fact that Bob's classifications are consistent with random within 1%.
# The weighting below this one would take the weight based on 100/10000, which is much better.
if which_weight == 1:
# keep the two seed cases separate because we might want to use a different base for each
if seed < 0.:
return max([0.05, pow(1.0025, seed)])
elif seed > 0:
return min([3.0, pow(1.0025, seed)])
else:
return 1.0
elif which_weight == 2:
if n_gs < 1: # don't divide by or take the log of 0
# also if they didn't do any gold-standard classifications assume they have the default weight
return c0
else:
# note the max of 3 is unlikely to be reached, but someone could hit the floor.
return min([3.0, max([0.05, c0*pow((1.0 + np.log10(n_gs)), (float(seed)/float(n_gs)))])])
else:
# unweighted - so maybe don't even enter this function if which_weight is not 1 or 2...
return 1.0
#################################################################################
#################################################################################
#################################################################################
# Get the Gini coefficient - https://en.wikipedia.org/wiki/Gini_coefficient
# Typical values of the Gini for healthy Zooniverse projects (Cox et al. 2015) are
# in the range of 0.7-0.9.
def gini(list_of_values):
sorted_list = sorted(list_of_values)
height, area = 0, 0
for value in sorted_list:
height += value
area += height - value / 2.
fair_area = height * len(list_of_values) / 2
return (fair_area - area) / fair_area
#################################################################################
#################################################################################
#################################################################################
# assign a color randomly if logged in, gray otherwise
def randcolor(user_label):
if user_label.startswith('not-logged-in-'):
# keep it confined to grays, i.e. R=G=B and not too bright, not too dark
g = random.randint(25,150)
return '#%02X%02X%02X' % (g,g,g)
#return '#555555'
else:
# the lambda makes this generate a new int every time it's called, so that
# in general R != G != B below.
r = lambda: random.randint(0,255)
return '#%02X%02X%02X' % (r(),r(),r())
#################################################################################
#################################################################################
#################################################################################
# These are functions that extract information from the various JSONs that are
# included in the classification exports. To Do: optimise these so that one .apply()
# call will extract them for everything without so many &^%@$ing loops.
def get_subject_type(q):
try:
return q[1].subject_json[q[1].subject_id]['#Class']
except:
return "cand"
def get_filename(q):
try:
return q[1].subject_json[q[1].subject_id]['CandidateFile']
except:
try:
return q[1].subject_json[q[1].subject_id]['CandidateFileVertical']
except:
try:
return q[1].subject_json[q[1].subject_id]['CandidateFileHorizontal']
except:
return "filenotfound.png"
# get number of gold-standard classifications completed by a user (used if weighting)
def get_n_gs(thegrp):
return sum(pd.DataFrame(thegrp).seed != 0)
# Something went weird with IP addresses, so use more info to determine unique users
# Note the user_name still has the IP address in it if the user is not logged in;
# it's just that for this specific project it's not that informative.
def get_alternate_sessioninfo(row):
# if they're logged in, save yourself all this trouble
if not row[1]['user_name'].startswith('not-logged-in'):
return row[1]['user_name']
else:
metadata = row[1]['meta_json']
# IP + session, if it exists
# (IP, agent, viewport_width, viewport_height) if session doesn't exist
try:
# start with "not-logged-in" so stuff later doesn't break
return str(row[1]['user_name']) +"_"+ str(metadata['session'])
except:
try:
viewport = str(metadata['viewport'])
except:
viewport = "NoViewport"
try:
user_agent = str(metadata['user_agent'])
except:
user_agent = "NoUserAgent"
try:
user_ip = str(row[1]['user_name'])
except:
user_ip = "NoUserIP"
thesession = user_ip + user_agent + viewport
return thesession
#################################################################################
#################################################################################
#################################################################################
# Print out the input parameters just as a sanity check
print("Computing aggregations using:")
print(" infile: %s" % classfile_in)
print(" weighted? %d" % apply_weight)
print(" Will print to %s after processing." % outfile)
#################################################################################
#################################################################################
#################################################################################
#
#
#
#
# Begin the main work
#
#
#
#
print("Reading classifications from %s ..." % classfile_in)
classifications = pd.read_csv(classfile_in) # this step can take a few minutes for a big file
# Talk tags are not usually huge files so this doesn't usually take that long
print("Parsing Talk tag file for team tags %s ..." % talk_export_file)
talkjson = json.loads(open(talk_export_file).read())
talktags_all = pd.DataFrame(talkjson)
# we only care about the Subject comments here, not discussions on the boards
# also we only care about tags by the research team & moderators
talktags = talktags_all[(talktags_all.taggable_type == "Subject") & (talktags_all.user_login.isin(project_team))].copy()
# make a username-tag pair column
# subject id is a string in the classifications array so force it to be one here or the match won't work
talktags['subject_id'] = [str(int(q)) for q in talktags.taggable_id]
talktags["user_tag"] = talktags.user_login+": #"+talktags.name+";"
# when we're talking about Subject tags, taggable_id is subject_id
talk_bysubj = talktags.groupby('subject_id')
# this now contains all the project-team-written tags on each subject, 1 row per subject
subj_tags = pd.DataFrame(talk_bysubj.user_tag.unique())
# if we need this as an explicit column
#subj_tags['subject_id'] = subj_tags.index
# likewise reading this matched files doesn't take long even though we have a for loop.
print("Reading master list of matched filenames %s..." % filename_master_list_filename)
matched_filenames = pd.read_csv(filename_master_list_filename)
print("Reading from list of possible matches to known pulsars %s..." % poss_match_file)
# ['Zooniverse name', 'HTRU-N name', 'Possible source']
possible_knowns = pd.read_csv(poss_match_file)
possible_knowns['is_poss_known'] = [True for q in possible_knowns['Possible source']]
# This section takes quite a while and it's because we have so many for loops, which I think is
# in part because reading out of a dict from a column in a DataFrame needs loops when done this way
# and in part because we were in a rush.
# I think it's possible we could pass this to a function and reshape things there, then return
# a set of new columns - but I didn't have time to figure that out under the deadlines we had.
print("Making new columns and getting user labels...")
# first, extract the started_at and finished_at from the annotations column
classifications['meta_json'] = [json.loads(q) for q in classifications.metadata]
classifications['started_at_str'] = [q['started_at'] for q in classifications.meta_json]
classifications['finished_at_str'] = [q['finished_at'] for q in classifications.meta_json]
# we need to set up a new user id column that's login name if the classification is while logged in,
# session if not (right now "user_name" is login name or hashed IP and, well, read on...)
# in this particular run of this particular project, session is a better tracer of uniqueness than IP
# for anonymous users, because of a bug with some back-end stuff that someone else is fixing
# but we also want to keep the user name if it exists, so let's use this function
#classifications['user_label'] = [get_alternate_sessioninfo(q) for q in classifications.iterrows()]
classifications['user_label'] = [get_alternate_sessioninfo(q) for q in classifications['user_name meta_json'.split()].iterrows()]
classifications['created_day'] = [q[:10] for q in classifications.created_at]
# Get subject info into a format we can actually use
classifications['subject_json'] = [json.loads(q) for q in classifications.subject_data]
# extract the subject ID because that's needed later
# Note the subject ID becomes the *index* of the dict, which is actually pretty strange versus
# everything else in the export, and I'd really rather it be included here as "subject_id":"1234567" etc.
#
# You can isolate the keys as a new column but then it's a DictKey type, but stringifying it adds
# all these other characters that you then have to take out. Thankfully all our subject IDs are numbers
# this is a little weird and there must be a better way but... it works
classifications['subject_id'] = [str(q.keys()).replace("dict_keys(['", "").replace("'])", '') for q in classifications.subject_json]
# extract retired status, though not sure we're actually going to use it.
# also, what a mess - you have to extract the subject ID first and then use it to call the subject_json. UGH
# update: we didn't use it and each of these lines takes ages, so commenting it out
#classifications['retired'] = [q[1].subject_json[q[1].subject_id]['retired'] for q in classifications.iterrows()]
# Get annotation info into a format we can actually use
# these annotations are just a single yes or no question, yay
classifications['annotation_json'] = [json.loads(q) for q in classifications.annotations]
classifications['pulsar_classification'] = [q[0]['value'] for q in classifications.annotation_json]
# create a weight parameter but set it to 1.0 for all classifications (unweighted) - may change later
classifications['weight'] = [1.0 for q in classifications.workflow_version]
# also create a count parameter, because at the time of writing this .aggregate('count') was sometimes off by 1
classifications['count'] = [1 for q in classifications.workflow_version]
#######################################################
# discard classifications not in the active workflow #
#######################################################
print("Picking classifications from the active workflow (id %d, version %d.*)" % (active_workflow_id, active_workflow_major))
# use any workflow consistent with this major version, e.g. 6.12 and 6.23 are both 6 so they're both ok
# also check it's the correct workflow id
the_active_workflow = [int(q) == active_workflow_major for q in classifications.workflow_version]
this_workflow = classifications.workflow_id == active_workflow_id
in_workflow = this_workflow & the_active_workflow
# note I haven't saved the full DF anywhere because of memory reasons, so if you're debugging:
# classifications_all = classifications.copy()
classifications = classifications[in_workflow]
print("Extracting filenames and subject types...")
# extract whether this is a known pulsar or a candidate that needs classifying - that info is in the
# "#Class" column in the subject metadata (where # means it can't be seen by classifiers).
# the options are "cand" for "candidate", "known" for known pulsar, "disc" for a pulsar that has been
# discovered by this team but is not yet published
# do this after you choose a workflow because #Class doesn't exist for the early subjects so it will break
# also don't send the entirety of classifications into the function, to save memory
#classifications['subject_type'] = [get_subject_type(q) for q in classifications.iterrows()]
#classifications['filename'] = [get_filename(q) for q in classifications.iterrows()]
classifications['subject_type'] = [get_subject_type(q) for q in classifications['subject_id subject_json'.split()].iterrows()]
classifications['filename'] = [get_filename(q) for q in classifications['subject_id subject_json'.split()].iterrows()]
# Let me just pause a second to rant again about the fact that subject ID is the index of the subject_json.
# Because of that, because the top-level access to that was-json-now-a-dict requires the subject id rather than
# just being label:value pairs, I have to do an iterrows() and send part of the entire classifications DF into
# a loop so that I can simultaneously access each subject ID *and* the dict, rather than just accessing the
# info from the dict directly, which would be much faster.
# this might be useful for a sanity check later
# first_class_day = min(classifications.created_day).replace(' ', '')
# last_class_day = max(classifications.created_day).replace(' ', '')
# for some reason this is reporting last-classification dates that are days after the actual last
# classification. Not sure? Might be because this is a front-end reporting, so if someone has set
# their computer's time wrong we could get the wrong time here.
# could fix that by using created_at but ... I forgot.
last_class_time = max(classifications.finished_at_str)[:16].replace(' ', '_').replace('T', '_').replace(':', 'h')+"m"
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## #
#######################################################
# Apply weighting function (or don't) #
#######################################################
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## #
classifications['seed'] = [0 for q in classifications.weight]
classifications['is_gs'] = [0 for q in classifications.weight]
if apply_weight > 0:
print(" Computing user weights...")
# for now this is assuming all subjects marked as "known" or "disc" are pulsars
# and also "fake" are simulated pulsars
is_known = (classifications.subject_type == 'known') | (classifications.subject_type == 'disc') | (classifications.subject_type == 'fake')
#is_candidate = np.invert(is_known)
# if it's a non-gold-standard classification, mark it
classifications.loc[is_known, 'is_gs'] = 1
ok_incr = 1.0 # upweight if correct
oops_incr = -2.0 # downweight more if incorrect
# find the correct classifications of known pulsars
ok_class = (is_known) & (classifications.pulsar_classification == 'Yes')
# find the incorrect classifications of known pulsars
oops_class = (is_known) & (classifications.pulsar_classification == 'No')
# set the individual seeds
classifications.loc[ok_class, 'seed'] = ok_incr
classifications.loc[oops_class, 'seed'] = oops_incr
# then group classifications by user name, which will weight logged in as well as not-logged-in (the latter by session)
by_user = classifications.groupby('user_label')
# get the user's summed seed, which goes into the exponent for the weight
user_exp = by_user.seed.aggregate('sum')
# then set up the DF that will contain the weights etc, and fill it
user_weights = pd.DataFrame(user_exp)
user_weights.columns = ['seed']
user_weights['user_label'] = user_weights.index
user_weights['nclass_user'] = by_user['count'].aggregate('sum')
user_weights['n_gs'] = by_user['is_gs'].aggregate('sum')
user_weights['weight'] = [assign_weight(q, apply_weight) for q in user_weights.iterrows()]
#user_weights['weight'] = [assign_weight_old(q) for q in user_exp]
# if you want sum(unweighted classification count) == sum(weighted classification count), do this
if normalise_weights:
user_weights.weight *= float(len(classifications))/float(sum(user_weights.weight * user_weights.nclass_user))
# weights are assigned, now need to match them up to the main classifications table
# making sure that this weight keeps the name 'weight' and the other gets renamed (suffixes flag)
# if assign_weight == 0 then we won't enter this loop and the old "weights" will stay
# as they are, i.e. == 1 uniformly.
classifications_old = classifications.copy()
classifications = pd.merge(classifications_old, user_weights, how='left',
on='user_label',
sort=False, suffixes=('_2', ''), copy=True)
else:
# just make a collated classification count array so we can print it to the screen
by_user = classifications.groupby('user_label')
user_exp = by_user.seed.aggregate('sum')
user_weights = pd.DataFrame(user_exp)
user_weights.columns = ['seed']
#user_weights['user_label'] = user_weights.index
user_weights['nclass_user'] = by_user['count'].aggregate('sum')
user_weights['n_gs'] = by_user['is_gs'].aggregate('sum')
# UNWEIGHTED
user_weights['weight'] = [1 for q in user_exp]
# grab basic stats
n_subj_tot = len(classifications.subject_data.unique())
by_subject = classifications.groupby('subject_id')
subj_class = by_subject.created_at.aggregate('count')
all_users = classifications.user_label.unique()
n_user_tot = len(all_users)
n_user_unreg = sum([q.startswith('not-logged-in-') for q in all_users])
# obviously if we didn't weight then we don't need to get stats on weights
if apply_weight > 0:
user_weight_mean = np.mean(user_weights.weight)
user_weight_median = np.median(user_weights.weight)
user_weight_25pct = np.percentile(user_weights.weight, 25)
user_weight_75pct = np.percentile(user_weights.weight, 75)
user_weight_min = min(user_weights.weight)
user_weight_max = max(user_weights.weight)
nclass_mean = np.mean(user_weights.nclass_user)
nclass_median = np.median(user_weights.nclass_user)
nclass_tot = len(classifications)
user_weights.sort_values(['nclass_user'], ascending=False, inplace=True)
# If you want to print out a file of classification counts per user, with colors for making a treemap
# honestly I'm not sure why you wouldn't want to print this, as it's very little extra effort
if counts_out == True:
print("Printing classification counts to %s..." % counts_out_file)
user_weight['color'] = [randcolor(q) for q in user_weight.index]
user_weight.to_csv(counts_out_file)
## ## ## ## ## ## ## ## ## ## ## ## ## ## #
#######################################################
# Print out basic project info #
#######################################################
## ## ## ## ## ## ## ## ## ## ## ## ## ## #
print("%d classifications from %d users, %d registered and %d unregistered.\n" % (nclass_tot, n_user_tot, n_user_tot - n_user_unreg, n_user_unreg))
print("Mean n_class per user %.1f, median %.1f." % (nclass_mean, nclass_median))
if apply_weight > 0:
print("Mean user weight %.3f, median %.3f, with the middle 50 percent of users between %.3f and %.3f." % (user_weight_mean, user_weight_median, user_weight_25pct, user_weight_75pct))
print("The min user weight is %.3f and the max user weight is %.3f.\n" % (user_weight_min, user_weight_max))
cols_print = 'nclass_user weight'.split()
else:
cols_print = 'nclass_user'
# don't make this leaderboard public unless you want to gamify your users in ways we already know
# have unintended and sometimes negative consequences. This is just for your information.
print("Classification leaderboard:")
print(user_weights[cols_print].head(20))
print("Gini coefficient for project: %.3f" % gini(user_weight['nclass_user']))
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## #
#######################################################
# Aggregate classifications, unweighted and weighted #
#######################################################
## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## ## #
print("\nAggregating classifications...\n")
class_agg = by_subject['weight count pulsar_classification subject_type filename'.split()].apply(aggregate_class)
# really ought to replace all the NaNs with 0.0
#######################################################
# Write to files #
#######################################################
#
# add value-added columns
#
# let people look up the subject on Talk directly from the aggregated file
class_agg['link'] = ['https://www.zooniverse.org/projects/zooniverse/pulsar-hunters/talk/subjects/'+str(q) for q in class_agg.index]
# after we do the merges below the new indices might not be linked to the subject id, so save it explicitly
class_agg['subject_id'] = [str(q) for q in class_agg.index]
# match up all the ancillary file data. Maybe there's a faster way to do this than with a chain but meh,
# it's actually not *that* slow compared to the clusterf*ck of for loops in the column assignment part above
class_agg_old = class_agg.copy()
class_agg_interm = pd.merge(class_agg_old, subj_tags, how='left', left_index=True, right_index=True, sort=False, copy=True)
class_agg_interm2 = pd.merge(class_agg_interm, matched_filenames, how='left', left_on='filename', right_on='Pulsar Hunters File', sort=False, copy=True)
class_agg = pd.merge(class_agg_interm2, possible_knowns, how='left', left_on='filename', right_on='Zooniverse name', sort=False, copy=True)
# fill in the is_poss_known column with False where it is currently NaN
# currently it's either True or NaN -- with pd.isnull NaN becomes True and True becomes False, so invert that.
class_agg['is_poss_known'] = np.invert(pd.isnull(class_agg['is_poss_known']))
# make the list ranked by p_Yes_weight
class_agg.sort_values(['subject_type','p_Yes_weight'], ascending=False, inplace=True)
print("Writing aggregated output to file %s...\n" % outfile)
pd.DataFrame(class_agg).to_csv(outfile)
# Now make files ranked by p_Yes, one with all subjects classified and one with only candidates
# /Users/vrooje/anaconda/bin/ipython:1: FutureWarning: sort(columns=....) is deprecated, use sort_values(by=.....)
# #!/bin/bash /Users/vrooje/anaconda/bin/python.app
#class_agg.sort('p_Yes_weight', ascending=False, inplace=True)
class_agg.sort_values(['p_Yes_weight'], ascending=False, inplace=True)
# I'd rather note the last classification date than the date we happen to produce the file
# rightnow = datetime.datetime.now().strftime('%Y-%M-%D_%H:%M')
# rankfile_all = rankfile_stem + rightnow + ".csv"
rankfile_all = 'all_'+rankfile_stem + last_class_time + ".csv"
# there go those hard-coded columns again
rank_cols = ['subject_id', 'filename', 'p_Yes_weight', 'count_weighted', 'p_Yes', 'count_unweighted', 'subject_type', 'link', 'user_tag', 'HTRU-N File']
print("Writing full ranked list to file %s...\n" % rankfile_all)
# write just the weighted yes percentage, the weighted count, the subject type, and the link to the subject page
# the subject ID is the index so it will be written anyway
pd.DataFrame(class_agg[rank_cols]).to_csv(rankfile_all)
rankfile = 'cand_allsubj_'+rankfile_stem + last_class_time + ".csv"
print("Writing candidate-only ranked list to file %s...\n" % rankfile)
# also only include entries where there were at least 5 weighted votes tallied
# and only "cand" subject_type objects
classified_candidate = (class_agg.count_weighted > 5) & (class_agg.subject_type == 'cand')
pd.DataFrame(class_agg[rank_cols][classified_candidate]).to_csv(rankfile)
rankfile_unk = 'cand_'+rankfile_stem + last_class_time + ".csv"
print("Writing candidate-only, unknown-only ranked list to file %s...\n" % rankfile_unk)
# also only include entries where there were at least 5 weighted votes tallied
# and only "cand" subject_type objects
classified_unknown_candidate = (classified_candidate) & (np.invert(class_agg.is_poss_known))
pd.DataFrame(class_agg[rank_cols][classified_unknown_candidate]).to_csv(rankfile_unk)
# copy the candidate list into Google Drive so others can see it, overwriting previous versions
# Note: this is the way I instantly shared the new aggregated results with collaborators, because
# Google Drive automatically syncs with the online version. Dropbox would work too, etc. YMMV
cpfile = "/Users/vrooje/Google Drive/pulsar_hunters_share/all_candidates_ranked_by_classifications_%dclass.csv" % nclass_tot
print("Copying to Google Drive folder as %s..." % cpfile)
os.system("cp -f '%s' '%s'" % (rankfile, cpfile))
# and the unknown candidate sub-list
cpfile2 = "/Users/vrooje/Google Drive/pulsar_hunters_share/unknown_candidates_ranked_by_classifications_%dclass.csv" % nclass_tot
print("Copying to Google Drive folder as %s..." % cpfile2)
os.system("cp -f '%s' '%s'" % (rankfile_unk, cpfile2))
# and just for the record, all subjects.
cpfile3 = "/Users/vrooje/Google Drive/pulsar_hunters_share/all_subjects_ranked_by_classifications_%dclass.csv" % nclass_tot
print("... and %s" % cpfile3)
os.system("cp -f '%s' '%s'" % (rankfile_all, cpfile3))
#done.
|
mit
|
DirtyUnicorns/android_external_chromium_org
|
third_party/jstemplate/compile.py
|
184
|
1402
|
#!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Combines the javascript files needed by jstemplate into a single file."""
import httplib
import urllib
def main():
srcs = ['util.js', 'jsevalcontext.js', 'jstemplate.js', 'exports.js']
out = 'jstemplate_compiled.js'
# Wrap the output in an anonymous function to prevent poluting the global
# namespace.
output_wrapper = '(function(){%s})()'
# Define the parameters for the POST request and encode them in a URL-safe
# format. See http://code.google.com/closure/compiler/docs/api-ref.html for
# API reference.
params = urllib.urlencode(
map(lambda src: ('js_code', file(src).read()), srcs) +
[
('compilation_level', 'ADVANCED_OPTIMIZATIONS'),
('output_format', 'text'),
('output_info', 'compiled_code'),
])
# Always use the following value for the Content-type header.
headers = {'Content-type': 'application/x-www-form-urlencoded'}
conn = httplib.HTTPConnection('closure-compiler.appspot.com')
conn.request('POST', '/compile', params, headers)
response = conn.getresponse()
out_file = file(out, 'w')
out_file.write(output_wrapper % response.read())
out_file.close()
conn.close()
return 0
if __name__ == '__main__':
sys.exit(main())
|
bsd-3-clause
|
40223137/cdag7test37
|
static/Brython3.1.3-20150514-095342/Lib/pydoc.py
|
637
|
102017
|
#!/usr/bin/env python3
"""Generate Python documentation in HTML or text for interactive use.
In the Python interpreter, do "from pydoc import help" to provide
help. Calling help(thing) on a Python object documents the object.
Or, at the shell command line outside of Python:
Run "pydoc <name>" to show documentation on something. <name> may be
the name of a function, module, package, or a dotted reference to a
class or function within a module or module in a package. If the
argument contains a path segment delimiter (e.g. slash on Unix,
backslash on Windows) it is treated as the path to a Python source file.
Run "pydoc -k <keyword>" to search for a keyword in the synopsis lines
of all available modules.
Run "pydoc -p <port>" to start an HTTP server on the given port on the
local machine. Port number 0 can be used to get an arbitrary unused port.
Run "pydoc -b" to start an HTTP server on an arbitrary unused port and
open a Web browser to interactively browse documentation. The -p option
can be used with the -b option to explicitly specify the server port.
Run "pydoc -w <name>" to write out the HTML documentation for a module
to a file named "<name>.html".
Module docs for core modules are assumed to be in
http://docs.python.org/X.Y/library/
This can be overridden by setting the PYTHONDOCS environment variable
to a different URL or to a local directory containing the Library
Reference Manual pages.
"""
__all__ = ['help']
__author__ = "Ka-Ping Yee <[email protected]>"
__date__ = "26 February 2001"
__credits__ = """Guido van Rossum, for an excellent programming language.
Tommy Burnette, the original creator of manpy.
Paul Prescod, for all his work on onlinehelp.
Richard Chamberlain, for the first implementation of textdoc.
"""
# Known bugs that can't be fixed here:
# - imp.load_module() cannot be prevented from clobbering existing
# loaded modules, so calling synopsis() on a binary module file
# changes the contents of any existing module with the same name.
# - If the __file__ attribute on a module is a relative path and
# the current directory is changed with os.chdir(), an incorrect
# path will be displayed.
import builtins
import imp
import importlib.machinery
#brython fix me
import inspect
import io
import os
#brython fix me
#import pkgutil
import platform
import re
import sys
import time
import tokenize
import warnings
from collections import deque
from reprlib import Repr
#fix me brython
#from traceback import extract_tb, format_exception_only
# --------------------------------------------------------- common routines
def pathdirs():
"""Convert sys.path into a list of absolute, existing, unique paths."""
dirs = []
normdirs = []
for dir in sys.path:
dir = os.path.abspath(dir or '.')
normdir = os.path.normcase(dir)
if normdir not in normdirs and os.path.isdir(dir):
dirs.append(dir)
normdirs.append(normdir)
return dirs
def getdoc(object):
"""Get the doc string or comments for an object."""
result = inspect.getdoc(object) or inspect.getcomments(object)
return result and re.sub('^ *\n', '', result.rstrip()) or ''
def splitdoc(doc):
"""Split a doc string into a synopsis line (if any) and the rest."""
lines = doc.strip().split('\n')
if len(lines) == 1:
return lines[0], ''
elif len(lines) >= 2 and not lines[1].rstrip():
return lines[0], '\n'.join(lines[2:])
return '', '\n'.join(lines)
def classname(object, modname):
"""Get a class name and qualify it with a module name if necessary."""
name = object.__name__
if object.__module__ != modname:
name = object.__module__ + '.' + name
return name
def isdata(object):
"""Check if an object is of a type that probably means it's data."""
return not (inspect.ismodule(object) or inspect.isclass(object) or
inspect.isroutine(object) or inspect.isframe(object) or
inspect.istraceback(object) or inspect.iscode(object))
def replace(text, *pairs):
"""Do a series of global replacements on a string."""
while pairs:
text = pairs[1].join(text.split(pairs[0]))
pairs = pairs[2:]
return text
def cram(text, maxlen):
"""Omit part of a string if needed to make it fit in a maximum length."""
if len(text) > maxlen:
pre = max(0, (maxlen-3)//2)
post = max(0, maxlen-3-pre)
return text[:pre] + '...' + text[len(text)-post:]
return text
_re_stripid = re.compile(r' at 0x[0-9a-f]{6,16}(>+)$', re.IGNORECASE)
def stripid(text):
"""Remove the hexadecimal id from a Python object representation."""
# The behaviour of %p is implementation-dependent in terms of case.
#fix me brython
#return _re_stripid.sub(r'\1', text)
return text
def _is_some_method(obj):
return (inspect.isfunction(obj) or
inspect.ismethod(obj) or
inspect.isbuiltin(obj) or
inspect.ismethoddescriptor(obj))
def allmethods(cl):
methods = {}
for key, value in inspect.getmembers(cl, _is_some_method):
methods[key] = 1
for base in cl.__bases__:
methods.update(allmethods(base)) # all your base are belong to us
for key in methods.keys():
methods[key] = getattr(cl, key)
return methods
def _split_list(s, predicate):
"""Split sequence s via predicate, and return pair ([true], [false]).
The return value is a 2-tuple of lists,
([x for x in s if predicate(x)],
[x for x in s if not predicate(x)])
"""
yes = []
no = []
for x in s:
if predicate(x):
yes.append(x)
else:
no.append(x)
return yes, no
def visiblename(name, all=None, obj=None):
"""Decide whether to show documentation on a variable."""
# Certain special names are redundant or internal.
if name in {'__author__', '__builtins__', '__cached__', '__credits__',
'__date__', '__doc__', '__file__', '__initializing__',
'__loader__', '__module__', '__name__', '__package__',
'__path__', '__qualname__', '__slots__', '__version__'}:
return 0
# Private names are hidden, but special names are displayed.
if name.startswith('__') and name.endswith('__'): return 1
# Namedtuples have public fields and methods with a single leading underscore
if name.startswith('_') and hasattr(obj, '_fields'):
return True
if all is not None:
# only document that which the programmer exported in __all__
return name in all
else:
return not name.startswith('_')
def classify_class_attrs(object):
"""Wrap inspect.classify_class_attrs, with fixup for data descriptors."""
results = []
for (name, kind, cls, value) in inspect.classify_class_attrs(object):
if inspect.isdatadescriptor(value):
kind = 'data descriptor'
results.append((name, kind, cls, value))
return results
# ----------------------------------------------------- module manipulation
def ispackage(path):
"""Guess whether a path refers to a package directory."""
if os.path.isdir(path):
for ext in ('.py', '.pyc', '.pyo'):
if os.path.isfile(os.path.join(path, '__init__' + ext)):
return True
return False
def source_synopsis(file):
line = file.readline()
while line[:1] == '#' or not line.strip():
line = file.readline()
if not line: break
line = line.strip()
if line[:4] == 'r"""': line = line[1:]
if line[:3] == '"""':
line = line[3:]
if line[-1:] == '\\': line = line[:-1]
while not line.strip():
line = file.readline()
if not line: break
result = line.split('"""')[0].strip()
else: result = None
return result
def synopsis(filename, cache={}):
"""Get the one-line summary out of a module file."""
mtime = os.stat(filename).st_mtime
lastupdate, result = cache.get(filename, (None, None))
if lastupdate is None or lastupdate < mtime:
try:
file = tokenize.open(filename)
except IOError:
# module can't be opened, so skip it
return None
binary_suffixes = importlib.machinery.BYTECODE_SUFFIXES[:]
binary_suffixes += importlib.machinery.EXTENSION_SUFFIXES[:]
if any(filename.endswith(x) for x in binary_suffixes):
# binary modules have to be imported
file.close()
if any(filename.endswith(x) for x in
importlib.machinery.BYTECODE_SUFFIXES):
loader = importlib.machinery.SourcelessFileLoader('__temp__',
filename)
else:
loader = importlib.machinery.ExtensionFileLoader('__temp__',
filename)
try:
module = loader.load_module('__temp__')
except:
return None
result = (module.__doc__ or '').splitlines()[0]
del sys.modules['__temp__']
else:
# text modules can be directly examined
result = source_synopsis(file)
file.close()
cache[filename] = (mtime, result)
return result
class ErrorDuringImport(Exception):
"""Errors that occurred while trying to import something to document it."""
def __init__(self, filename, exc_info):
self.filename = filename
self.exc, self.value, self.tb = exc_info
def __str__(self):
exc = self.exc.__name__
return 'problem in %s - %s: %s' % (self.filename, exc, self.value)
def importfile(path):
"""Import a Python source file or compiled file given its path."""
magic = imp.get_magic()
with open(path, 'rb') as file:
if file.read(len(magic)) == magic:
kind = imp.PY_COMPILED
else:
kind = imp.PY_SOURCE
file.seek(0)
filename = os.path.basename(path)
name, ext = os.path.splitext(filename)
try:
module = imp.load_module(name, file, path, (ext, 'r', kind))
except:
raise ErrorDuringImport(path, sys.exc_info())
return module
def safeimport(path, forceload=0, cache={}):
"""Import a module; handle errors; return None if the module isn't found.
If the module *is* found but an exception occurs, it's wrapped in an
ErrorDuringImport exception and reraised. Unlike __import__, if a
package path is specified, the module at the end of the path is returned,
not the package at the beginning. If the optional 'forceload' argument
is 1, we reload the module from disk (unless it's a dynamic extension)."""
try:
# If forceload is 1 and the module has been previously loaded from
# disk, we always have to reload the module. Checking the file's
# mtime isn't good enough (e.g. the module could contain a class
# that inherits from another module that has changed).
if forceload and path in sys.modules:
if path not in sys.builtin_module_names:
# Remove the module from sys.modules and re-import to try
# and avoid problems with partially loaded modules.
# Also remove any submodules because they won't appear
# in the newly loaded module's namespace if they're already
# in sys.modules.
subs = [m for m in sys.modules if m.startswith(path + '.')]
for key in [path] + subs:
# Prevent garbage collection.
cache[key] = sys.modules[key]
del sys.modules[key]
module = __import__(path)
except:
# Did the error occur before or after the module was found?
(exc, value, tb) = info = sys.exc_info()
if path in sys.modules:
# An error occurred while executing the imported module.
raise ErrorDuringImport(sys.modules[path].__file__, info)
elif exc is SyntaxError:
# A SyntaxError occurred before we could execute the module.
raise ErrorDuringImport(value.filename, info)
#fix me brython
#elif exc is ImportError and value.name == path:
elif exc is ImportError and str(value) == str(path):
# No such module in the path.
return None
else:
# Some other error occurred during the importing process.
raise ErrorDuringImport(path, sys.exc_info())
for part in path.split('.')[1:]:
try: module = getattr(module, part)
except AttributeError: return None
return module
# ---------------------------------------------------- formatter base class
class Doc:
PYTHONDOCS = os.environ.get("PYTHONDOCS",
"http://docs.python.org/%d.%d/library"
% sys.version_info[:2])
def document(self, object, name=None, *args):
"""Generate documentation for an object."""
args = (object, name) + args
# 'try' clause is to attempt to handle the possibility that inspect
# identifies something in a way that pydoc itself has issues handling;
# think 'super' and how it is a descriptor (which raises the exception
# by lacking a __name__ attribute) and an instance.
if inspect.isgetsetdescriptor(object): return self.docdata(*args)
if inspect.ismemberdescriptor(object): return self.docdata(*args)
try:
if inspect.ismodule(object): return self.docmodule(*args)
if inspect.isclass(object): return self.docclass(*args)
if inspect.isroutine(object): return self.docroutine(*args)
except AttributeError:
pass
if isinstance(object, property): return self.docproperty(*args)
return self.docother(*args)
def fail(self, object, name=None, *args):
"""Raise an exception for unimplemented types."""
message = "don't know how to document object%s of type %s" % (
name and ' ' + repr(name), type(object).__name__)
raise TypeError(message)
docmodule = docclass = docroutine = docother = docproperty = docdata = fail
def getdocloc(self, object):
"""Return the location of module docs or None"""
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
docloc = os.environ.get("PYTHONDOCS", self.PYTHONDOCS)
basedir = os.path.join(sys.base_exec_prefix, "lib",
"python%d.%d" % sys.version_info[:2])
if (isinstance(object, type(os)) and
(object.__name__ in ('errno', 'exceptions', 'gc', 'imp',
'marshal', 'posix', 'signal', 'sys',
'_thread', 'zipimport') or
(file.startswith(basedir) and
not file.startswith(os.path.join(basedir, 'site-packages')))) and
object.__name__ not in ('xml.etree', 'test.pydoc_mod')):
if docloc.startswith("http://"):
docloc = "%s/%s" % (docloc.rstrip("/"), object.__name__)
else:
docloc = os.path.join(docloc, object.__name__ + ".html")
else:
docloc = None
return docloc
# -------------------------------------------- HTML documentation generator
class HTMLRepr(Repr):
"""Class for safely making an HTML representation of a Python object."""
def __init__(self):
Repr.__init__(self)
self.maxlist = self.maxtuple = 20
self.maxdict = 10
self.maxstring = self.maxother = 100
def escape(self, text):
return replace(text, '&', '&', '<', '<', '>', '>')
def repr(self, object):
return Repr.repr(self, object)
def repr1(self, x, level):
if hasattr(type(x), '__name__'):
methodname = 'repr_' + '_'.join(type(x).__name__.split())
if hasattr(self, methodname):
return getattr(self, methodname)(x, level)
return self.escape(cram(stripid(repr(x)), self.maxother))
def repr_string(self, x, level):
test = cram(x, self.maxstring)
testrepr = repr(test)
if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
# Backslashes are only literal in the string and are never
# needed to make any special characters, so show a raw string.
return 'r' + testrepr[0] + self.escape(test) + testrepr[0]
return re.sub(r'((\\[\\abfnrtv\'"]|\\[0-9]..|\\x..|\\u....)+)',
r'<font color="#c040c0">\1</font>',
self.escape(testrepr))
repr_str = repr_string
def repr_instance(self, x, level):
try:
return self.escape(cram(stripid(repr(x)), self.maxstring))
except:
return self.escape('<%s instance>' % x.__class__.__name__)
repr_unicode = repr_string
class HTMLDoc(Doc):
"""Formatter class for HTML documentation."""
# ------------------------------------------- HTML formatting utilities
_repr_instance = HTMLRepr()
repr = _repr_instance.repr
escape = _repr_instance.escape
def page(self, title, contents):
"""Format an HTML page."""
return '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html><head><title>Python: %s</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
</head><body bgcolor="#f0f0f8">
%s
</body></html>''' % (title, contents)
def heading(self, title, fgcol, bgcol, extras=''):
"""Format a page heading."""
return '''
<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="heading">
<tr bgcolor="%s">
<td valign=bottom> <br>
<font color="%s" face="helvetica, arial"> <br>%s</font></td
><td align=right valign=bottom
><font color="%s" face="helvetica, arial">%s</font></td></tr></table>
''' % (bgcol, fgcol, title, fgcol, extras or ' ')
def section(self, title, fgcol, bgcol, contents, width=6,
prelude='', marginalia=None, gap=' '):
"""Format a section with a heading."""
if marginalia is None:
marginalia = '<tt>' + ' ' * width + '</tt>'
result = '''<p>
<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="section">
<tr bgcolor="%s">
<td colspan=3 valign=bottom> <br>
<font color="%s" face="helvetica, arial">%s</font></td></tr>
''' % (bgcol, fgcol, title)
if prelude:
result = result + '''
<tr bgcolor="%s"><td rowspan=2>%s</td>
<td colspan=2>%s</td></tr>
<tr><td>%s</td>''' % (bgcol, marginalia, prelude, gap)
else:
result = result + '''
<tr><td bgcolor="%s">%s</td><td>%s</td>''' % (bgcol, marginalia, gap)
return result + '\n<td width="100%%">%s</td></tr></table>' % contents
def bigsection(self, title, *args):
"""Format a section with a big heading."""
title = '<big><strong>%s</strong></big>' % title
return self.section(title, *args)
def preformat(self, text):
"""Format literal preformatted text."""
text = self.escape(text.expandtabs())
return replace(text, '\n\n', '\n \n', '\n\n', '\n \n',
' ', ' ', '\n', '<br>\n')
def multicolumn(self, list, format, cols=4):
"""Format a list of items into a multi-column list."""
result = ''
rows = (len(list)+cols-1)//cols
for col in range(cols):
result = result + '<td width="%d%%" valign=top>' % (100//cols)
for i in range(rows*col, rows*col+rows):
if i < len(list):
result = result + format(list[i]) + '<br>\n'
result = result + '</td>'
return '<table width="100%%" summary="list"><tr>%s</tr></table>' % result
def grey(self, text): return '<font color="#909090">%s</font>' % text
def namelink(self, name, *dicts):
"""Make a link for an identifier, given name-to-URL mappings."""
for dict in dicts:
if name in dict:
return '<a href="%s">%s</a>' % (dict[name], name)
return name
def classlink(self, object, modname):
"""Make a link for a class."""
name, module = object.__name__, sys.modules.get(object.__module__)
if hasattr(module, name) and getattr(module, name) is object:
return '<a href="%s.html#%s">%s</a>' % (
module.__name__, name, classname(object, modname))
return classname(object, modname)
def modulelink(self, object):
"""Make a link for a module."""
return '<a href="%s.html">%s</a>' % (object.__name__, object.__name__)
def modpkglink(self, modpkginfo):
"""Make a link for a module or package to display in an index."""
name, path, ispackage, shadowed = modpkginfo
if shadowed:
return self.grey(name)
if path:
url = '%s.%s.html' % (path, name)
else:
url = '%s.html' % name
if ispackage:
text = '<strong>%s</strong> (package)' % name
else:
text = name
return '<a href="%s">%s</a>' % (url, text)
def filelink(self, url, path):
"""Make a link to source file."""
return '<a href="file:%s">%s</a>' % (url, path)
def markup(self, text, escape=None, funcs={}, classes={}, methods={}):
"""Mark up some plain text, given a context of symbols to look for.
Each context dictionary maps object names to anchor names."""
escape = escape or self.escape
results = []
here = 0
pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
r'RFC[- ]?(\d+)|'
r'PEP[- ]?(\d+)|'
r'(self\.)?(\w+))')
while True:
match = pattern.search(text, here)
if not match: break
start, end = match.span()
results.append(escape(text[here:start]))
all, scheme, rfc, pep, selfdot, name = match.groups()
if scheme:
url = escape(all).replace('"', '"')
results.append('<a href="%s">%s</a>' % (url, url))
elif rfc:
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif pep:
url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif text[end:end+1] == '(':
results.append(self.namelink(name, methods, funcs, classes))
elif selfdot:
results.append('self.<strong>%s</strong>' % name)
else:
results.append(self.namelink(name, classes))
here = end
results.append(escape(text[here:]))
return ''.join(results)
# ---------------------------------------------- type-specific routines
def formattree(self, tree, modname, parent=None):
"""Produce HTML for a class tree as given by inspect.getclasstree()."""
result = ''
for entry in tree:
if type(entry) is type(()):
c, bases = entry
result = result + '<dt><font face="helvetica, arial">'
result = result + self.classlink(c, modname)
if bases and bases != (parent,):
parents = []
for base in bases:
parents.append(self.classlink(base, modname))
result = result + '(' + ', '.join(parents) + ')'
result = result + '\n</font></dt>'
elif type(entry) is type([]):
result = result + '<dd>\n%s</dd>\n' % self.formattree(
entry, modname, c)
return '<dl>\n%s</dl>\n' % result
def docmodule(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a module object."""
name = object.__name__ # ignore the passed-in name
try:
all = object.__all__
except AttributeError:
all = None
parts = name.split('.')
links = []
for i in range(len(parts)-1):
links.append(
'<a href="%s.html"><font color="#ffffff">%s</font></a>' %
('.'.join(parts[:i+1]), parts[i]))
linkedname = '.'.join(links + parts[-1:])
head = '<big><big><strong>%s</strong></big></big>' % linkedname
try:
path = inspect.getabsfile(object)
url = path
if sys.platform == 'win32':
import nturl2path
url = nturl2path.pathname2url(path)
filelink = self.filelink(url, path)
except TypeError:
filelink = '(built-in)'
info = []
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = version[11:-1].strip()
info.append('version %s' % self.escape(version))
if hasattr(object, '__date__'):
info.append(self.escape(str(object.__date__)))
if info:
head = head + ' (%s)' % ', '.join(info)
docloc = self.getdocloc(object)
if docloc is not None:
docloc = '<br><a href="%(docloc)s">Module Reference</a>' % locals()
else:
docloc = ''
result = self.heading(
head, '#ffffff', '#7799ee',
'<a href=".">index</a><br>' + filelink + docloc)
modules = inspect.getmembers(object, inspect.ismodule)
classes, cdict = [], {}
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
(inspect.getmodule(value) or object) is object):
if visiblename(key, all, object):
classes.append((key, value))
cdict[key] = cdict[value] = '#' + key
for key, value in classes:
for base in value.__bases__:
key, modname = base.__name__, base.__module__
module = sys.modules.get(modname)
if modname != name and module and hasattr(module, key):
if getattr(module, key) is base:
if not key in cdict:
cdict[key] = cdict[base] = modname + '.html#' + key
funcs, fdict = [], {}
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all, object):
funcs.append((key, value))
fdict[key] = '#-' + key
if inspect.isfunction(value): fdict[value] = fdict[key]
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all, object):
data.append((key, value))
doc = self.markup(getdoc(object), self.preformat, fdict, cdict)
doc = doc and '<tt>%s</tt>' % doc
result = result + '<p>%s</p>\n' % doc
if hasattr(object, '__path__'):
modpkgs = []
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
modpkgs.append((modname, name, ispkg, 0))
modpkgs.sort()
contents = self.multicolumn(modpkgs, self.modpkglink)
result = result + self.bigsection(
'Package Contents', '#ffffff', '#aa55cc', contents)
elif modules:
contents = self.multicolumn(
modules, lambda t: self.modulelink(t[1]))
result = result + self.bigsection(
'Modules', '#ffffff', '#aa55cc', contents)
if classes:
classlist = [value for (key, value) in classes]
contents = [
self.formattree(inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name, fdict, cdict))
result = result + self.bigsection(
'Classes', '#ffffff', '#ee77aa', ' '.join(contents))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name, fdict, cdict))
result = result + self.bigsection(
'Functions', '#ffffff', '#eeaa77', ' '.join(contents))
if data:
contents = []
for key, value in data:
contents.append(self.document(value, key))
result = result + self.bigsection(
'Data', '#ffffff', '#55aa55', '<br>\n'.join(contents))
if hasattr(object, '__author__'):
contents = self.markup(str(object.__author__), self.preformat)
result = result + self.bigsection(
'Author', '#ffffff', '#7799ee', contents)
if hasattr(object, '__credits__'):
contents = self.markup(str(object.__credits__), self.preformat)
result = result + self.bigsection(
'Credits', '#ffffff', '#7799ee', contents)
return result
def docclass(self, object, name=None, mod=None, funcs={}, classes={},
*ignored):
"""Produce HTML documentation for a class object."""
print('docclass')
realname = object.__name__
name = name or realname
bases = object.__bases__
contents = []
push = contents.append
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
self.needone = 0
def maybe(self):
if self.needone:
push('<hr>\n')
self.needone = 1
hr = HorizontalRule()
# List the mro, if non-trivial.
mro = deque(inspect.getmro(object))
if len(mro) > 2:
hr.maybe()
push('<dl><dt>Method resolution order:</dt>\n')
for base in mro:
push('<dd>%s</dd>\n' % self.classlink(base,
object.__module__))
push('</dl>\n')
def spill(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
push(self._docdescriptor(name, value, mod))
else:
push(self.document(value, name, mod,
funcs, classes, mdict, object))
push('\n')
return attrs
def spilldescriptors(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self._docdescriptor(name, value, mod))
return attrs
def spilldata(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
base = self.docother(getattr(object, name), name, mod)
if callable(value) or inspect.isdatadescriptor(value):
doc = getattr(value, "__doc__", None)
else:
doc = None
if doc is None:
push('<dl><dt>%s</dl>\n' % base)
else:
doc = self.markup(getdoc(value), self.preformat,
funcs, classes, mdict)
doc = '<dd><tt>%s</tt>' % doc
push('<dl><dt>%s%s</dl>\n' % (base, doc))
push('\n')
return attrs
attrs = [(name, kind, cls, value)
for name, kind, cls, value in classify_class_attrs(object)
if visiblename(name, obj=object)]
mdict = {}
for key, kind, homecls, value in attrs:
mdict[key] = anchor = '#' + name + '-' + key
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
pass
try:
# The value may not be hashable (e.g., a data attr with
# a dict or list value).
mdict[value] = anchor
except TypeError:
pass
while attrs:
if mro:
thisclass = mro.popleft()
else:
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
if thisclass is builtins.object:
attrs = inherited
continue
elif thisclass is object:
tag = 'defined here'
else:
tag = 'inherited from %s' % self.classlink(thisclass,
object.__module__)
tag += ':<br>\n'
# Sort attrs by name.
attrs.sort(key=lambda t: t[0])
# Pump out the attrs, segregated by kind.
attrs = spill('Methods %s' % tag, attrs,
lambda t: t[1] == 'method')
attrs = spill('Class methods %s' % tag, attrs,
lambda t: t[1] == 'class method')
attrs = spill('Static methods %s' % tag, attrs,
lambda t: t[1] == 'static method')
attrs = spilldescriptors('Data descriptors %s' % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata('Data and other attributes %s' % tag, attrs,
lambda t: t[1] == 'data')
assert attrs == []
attrs = inherited
contents = ''.join(contents)
if name == realname:
title = '<a name="%s">class <strong>%s</strong></a>' % (
name, realname)
else:
title = '<strong>%s</strong> = <a name="%s">class %s</a>' % (
name, name, realname)
if bases:
parents = []
for base in bases:
parents.append(self.classlink(base, object.__module__))
title = title + '(%s)' % ', '.join(parents)
doc = self.markup(getdoc(object), self.preformat, funcs, classes, mdict)
doc = doc and '<tt>%s<br> </tt>' % doc
return self.section(title, '#000000', '#ffc8d8', contents, 3, doc)
def formatvalue(self, object):
"""Format an argument default value as text."""
return self.grey('=' + self.repr(object))
def docroutine(self, object, name=None, mod=None,
funcs={}, classes={}, methods={}, cl=None):
"""Produce HTML documentation for a function or method object."""
realname = object.__name__
name = name or realname
anchor = (cl and cl.__name__ or '') + '-' + name
note = ''
skipdocs = 0
if inspect.ismethod(object):
imclass = object.__self__.__class__
if cl:
if imclass is not cl:
note = ' from ' + self.classlink(imclass, mod)
else:
if object.__self__ is not None:
note = ' method of %s instance' % self.classlink(
object.__self__.__class__, mod)
else:
note = ' unbound %s method' % self.classlink(imclass,mod)
object = object.__func__
if name == realname:
title = '<a name="%s"><strong>%s</strong></a>' % (anchor, realname)
else:
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
reallink = '<a href="#%s">%s</a>' % (
cl.__name__ + '-' + realname, realname)
skipdocs = 1
else:
reallink = realname
title = '<a name="%s"><strong>%s</strong></a> = %s' % (
anchor, name, reallink)
if inspect.isfunction(object):
args, varargs, kwonlyargs, kwdefaults, varkw, defaults, ann = \
inspect.getfullargspec(object)
argspec = inspect.formatargspec(
args, varargs, kwonlyargs, kwdefaults, varkw, defaults, ann,
formatvalue=self.formatvalue,
formatannotation=inspect.formatannotationrelativeto(object))
if realname == '<lambda>':
title = '<strong>%s</strong> <em>lambda</em> ' % name
# XXX lambda's won't usually have func_annotations['return']
# since the syntax doesn't support but it is possible.
# So removing parentheses isn't truly safe.
argspec = argspec[1:-1] # remove parentheses
else:
argspec = '(...)'
decl = title + argspec + (note and self.grey(
'<font face="helvetica, arial">%s</font>' % note))
if skipdocs:
return '<dl><dt>%s</dt></dl>\n' % decl
else:
doc = self.markup(
getdoc(object), self.preformat, funcs, classes, methods)
doc = doc and '<dd><tt>%s</tt></dd>' % doc
return '<dl><dt>%s</dt>%s</dl>\n' % (decl, doc)
def _docdescriptor(self, name, value, mod):
results = []
push = results.append
if name:
push('<dl><dt><strong>%s</strong></dt>\n' % name)
if value.__doc__ is not None:
doc = self.markup(getdoc(value), self.preformat)
push('<dd><tt>%s</tt></dd>\n' % doc)
push('</dl>\n')
return ''.join(results)
def docproperty(self, object, name=None, mod=None, cl=None):
"""Produce html documentation for a property."""
return self._docdescriptor(name, object, mod)
def docother(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a data object."""
lhs = name and '<strong>%s</strong> = ' % name or ''
return lhs + self.repr(object)
def docdata(self, object, name=None, mod=None, cl=None):
"""Produce html documentation for a data descriptor."""
return self._docdescriptor(name, object, mod)
def index(self, dir, shadowed=None):
"""Generate an HTML index for a directory of modules."""
modpkgs = []
if shadowed is None: shadowed = {}
for importer, name, ispkg in pkgutil.iter_modules([dir]):
if any((0xD800 <= ord(ch) <= 0xDFFF) for ch in name):
# ignore a module if its name contains a surrogate character
continue
modpkgs.append((name, '', ispkg, name in shadowed))
shadowed[name] = 1
modpkgs.sort()
contents = self.multicolumn(modpkgs, self.modpkglink)
return self.bigsection(dir, '#ffffff', '#ee77aa', contents)
# -------------------------------------------- text documentation generator
class TextRepr(Repr):
"""Class for safely making a text representation of a Python object."""
def __init__(self):
Repr.__init__(self)
self.maxlist = self.maxtuple = 20
self.maxdict = 10
self.maxstring = self.maxother = 100
#def repr1(self, x, level):
# if hasattr(type(x), '__name__'):
# methodname = 'repr_' + '_'.join(type(x).__name__.split())
# if hasattr(self, methodname):
# return getattr(self, methodname)(x, level)
# return cram(stripid(repr(x)), self.maxother)
def repr_string(self, x, level):
test = cram(x, self.maxstring)
testrepr = repr(test)
if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
# Backslashes are only literal in the string and are never
# needed to make any special characters, so show a raw string.
return 'r' + testrepr[0] + test + testrepr[0]
return testrepr
repr_str = repr_string
def repr_instance(self, x, level):
try:
return cram(stripid(repr(x)), self.maxstring)
except:
return '<%s instance>' % x.__class__.__name__
class TextDoc(Doc):
"""Formatter class for text documentation."""
# ------------------------------------------- text formatting utilities
_repr_instance = TextRepr()
repr = _repr_instance.repr
def bold(self, text):
"""Format a string in bold by overstriking."""
return ''.join(ch + '\b' + ch for ch in text)
def indent(self, text, prefix=' '):
"""Indent text by prepending a given prefix to each line."""
if not text: return ''
lines = [prefix + line for line in text.split('\n')]
if lines: lines[-1] = lines[-1].rstrip()
return '\n'.join(lines)
def section(self, title, contents):
"""Format a section with a given heading."""
clean_contents = self.indent(contents).rstrip()
return self.bold(title) + '\n' + clean_contents + '\n\n'
# ---------------------------------------------- type-specific routines
def formattree(self, tree, modname, parent=None, prefix=''):
"""Render in text a class tree as returned by inspect.getclasstree()."""
result = ''
for entry in tree:
if type(entry) is type(()):
c, bases = entry
result = result + prefix + classname(c, modname)
if bases and bases != (parent,):
parents = (classname(c, modname) for c in bases)
result = result + '(%s)' % ', '.join(parents)
result = result + '\n'
elif type(entry) is type([]):
result = result + self.formattree(
entry, modname, c, prefix + ' ')
return result
def docmodule(self, object, name=None, mod=None):
"""Produce text documentation for a given module object."""
name = object.__name__ # ignore the passed-in name
synop, desc = splitdoc(getdoc(object))
result = self.section('NAME', name + (synop and ' - ' + synop))
all = getattr(object, '__all__', None)
docloc = self.getdocloc(object)
if docloc is not None:
result = result + self.section('MODULE REFERENCE', docloc + """
The following documentation is automatically generated from the Python
source files. It may be incomplete, incorrect or include features that
are considered implementation detail and may vary between Python
implementations. When in doubt, consult the module reference at the
location listed above.
""")
if desc:
result = result + self.section('DESCRIPTION', desc)
classes = []
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None
or (inspect.getmodule(value) or object) is object):
if visiblename(key, all, object):
classes.append((key, value))
funcs = []
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all, object):
funcs.append((key, value))
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all, object):
data.append((key, value))
modpkgs = []
modpkgs_names = set()
if hasattr(object, '__path__'):
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
modpkgs_names.add(modname)
if ispkg:
modpkgs.append(modname + ' (package)')
else:
modpkgs.append(modname)
modpkgs.sort()
result = result + self.section(
'PACKAGE CONTENTS', '\n'.join(modpkgs))
# Detect submodules as sometimes created by C extensions
submodules = []
for key, value in inspect.getmembers(object, inspect.ismodule):
if value.__name__.startswith(name + '.') and key not in modpkgs_names:
submodules.append(key)
if submodules:
submodules.sort()
result = result + self.section(
'SUBMODULES', '\n'.join(submodules))
if classes:
classlist = [value for key, value in classes]
contents = [self.formattree(
inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name))
result = result + self.section('CLASSES', '\n'.join(contents))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name))
result = result + self.section('FUNCTIONS', '\n'.join(contents))
if data:
contents = []
for key, value in data:
contents.append(self.docother(value, key, name, maxlen=70))
result = result + self.section('DATA', '\n'.join(contents))
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = version[11:-1].strip()
result = result + self.section('VERSION', version)
if hasattr(object, '__date__'):
result = result + self.section('DATE', str(object.__date__))
if hasattr(object, '__author__'):
result = result + self.section('AUTHOR', str(object.__author__))
if hasattr(object, '__credits__'):
result = result + self.section('CREDITS', str(object.__credits__))
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
result = result + self.section('FILE', file)
return result
def docclass(self, object, name=None, mod=None, *ignored):
"""Produce text documentation for a given class object."""
realname = object.__name__
name = name or realname
bases = object.__bases__
def makename(c, m=object.__module__):
return classname(c, m)
if name == realname:
title = 'class ' + self.bold(realname)
else:
title = self.bold(name) + ' = class ' + realname
if bases:
parents = map(makename, bases)
title = title + '(%s)' % ', '.join(parents)
doc = getdoc(object)
contents = doc and [doc + '\n'] or []
push = contents.append
# List the mro, if non-trivial.
mro = deque(inspect.getmro(object))
if len(mro) > 2:
push("Method resolution order:")
for base in mro:
push(' ' + makename(base))
push('')
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
self.needone = 0
def maybe(self):
if self.needone:
push('-' * 70)
self.needone = 1
hr = HorizontalRule()
def spill(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
try:
value = getattr(object, name)
except Exception:
# Some descriptors may meet a failure in their __get__.
# (bug #1785)
push(self._docdescriptor(name, value, mod))
else:
push(self.document(value,
name, mod, object))
return attrs
def spilldescriptors(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self._docdescriptor(name, value, mod))
return attrs
def spilldata(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
if callable(value) or inspect.isdatadescriptor(value):
doc = getdoc(value)
else:
doc = None
push(self.docother(getattr(object, name),
name, mod, maxlen=70, doc=doc) + '\n')
return attrs
attrs = [(name, kind, cls, value)
for name, kind, cls, value in classify_class_attrs(object)
if visiblename(name, obj=object)]
while attrs:
if mro:
thisclass = mro.popleft()
else:
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
if thisclass is builtins.object:
attrs = inherited
continue
elif thisclass is object:
tag = "defined here"
else:
tag = "inherited from %s" % classname(thisclass,
object.__module__)
# Sort attrs by name.
attrs.sort()
# Pump out the attrs, segregated by kind.
attrs = spill("Methods %s:\n" % tag, attrs,
lambda t: t[1] == 'method')
attrs = spill("Class methods %s:\n" % tag, attrs,
lambda t: t[1] == 'class method')
attrs = spill("Static methods %s:\n" % tag, attrs,
lambda t: t[1] == 'static method')
attrs = spilldescriptors("Data descriptors %s:\n" % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata("Data and other attributes %s:\n" % tag, attrs,
lambda t: t[1] == 'data')
assert attrs == []
attrs = inherited
contents = '\n'.join(contents)
if not contents:
return title + '\n'
return title + '\n' + self.indent(contents.rstrip(), ' | ') + '\n'
def formatvalue(self, object):
"""Format an argument default value as text."""
return '=' + self.repr(object)
def docroutine(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a function or method object."""
realname = object.__name__
name = name or realname
note = ''
skipdocs = 0
if inspect.ismethod(object):
imclass = object.__self__.__class__
if cl:
if imclass is not cl:
note = ' from ' + classname(imclass, mod)
else:
if object.__self__ is not None:
note = ' method of %s instance' % classname(
object.__self__.__class__, mod)
else:
note = ' unbound %s method' % classname(imclass,mod)
object = object.__func__
if name == realname:
title = self.bold(realname)
else:
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
skipdocs = 1
title = self.bold(name) + ' = ' + realname
if inspect.isfunction(object):
args, varargs, varkw, defaults, kwonlyargs, kwdefaults, ann = \
inspect.getfullargspec(object)
argspec = inspect.formatargspec(
args, varargs, varkw, defaults, kwonlyargs, kwdefaults, ann,
formatvalue=self.formatvalue,
formatannotation=inspect.formatannotationrelativeto(object))
if realname == '<lambda>':
title = self.bold(name) + ' lambda '
# XXX lambda's won't usually have func_annotations['return']
# since the syntax doesn't support but it is possible.
# So removing parentheses isn't truly safe.
argspec = argspec[1:-1] # remove parentheses
else:
argspec = '(...)'
decl = title + argspec + note
if skipdocs:
return decl + '\n'
else:
doc = getdoc(object) or ''
return decl + '\n' + (doc and self.indent(doc).rstrip() + '\n')
def _docdescriptor(self, name, value, mod):
results = []
push = results.append
if name:
push(self.bold(name))
push('\n')
doc = getdoc(value) or ''
if doc:
push(self.indent(doc))
push('\n')
return ''.join(results)
def docproperty(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a property."""
return self._docdescriptor(name, object, mod)
def docdata(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a data descriptor."""
return self._docdescriptor(name, object, mod)
def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None):
"""Produce text documentation for a data object."""
repr = self.repr(object)
if maxlen:
line = (name and name + ' = ' or '') + repr
chop = maxlen - len(line)
if chop < 0: repr = repr[:chop] + '...'
line = (name and self.bold(name) + ' = ' or '') + repr
if doc is not None:
line += '\n' + self.indent(str(doc))
return line
class _PlainTextDoc(TextDoc):
"""Subclass of TextDoc which overrides string styling"""
def bold(self, text):
return text
# --------------------------------------------------------- user interfaces
def pager(text):
"""The first time this is called, determine what kind of pager to use."""
global pager
pager = getpager()
pager(text)
def getpager():
"""Decide what method to use for paging through text."""
if not hasattr(sys.stdout, "isatty"):
return plainpager
if not sys.stdin.isatty() or not sys.stdout.isatty():
return plainpager
if 'PAGER' in os.environ:
if sys.platform == 'win32': # pipes completely broken in Windows
return lambda text: tempfilepager(plain(text), os.environ['PAGER'])
elif os.environ.get('TERM') in ('dumb', 'emacs'):
return lambda text: pipepager(plain(text), os.environ['PAGER'])
else:
return lambda text: pipepager(text, os.environ['PAGER'])
if os.environ.get('TERM') in ('dumb', 'emacs'):
return plainpager
if sys.platform == 'win32' or sys.platform.startswith('os2'):
return lambda text: tempfilepager(plain(text), 'more <')
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
return lambda text: pipepager(text, 'less')
import tempfile
(fd, filename) = tempfile.mkstemp()
os.close(fd)
try:
if hasattr(os, 'system') and os.system('more "%s"' % filename) == 0:
return lambda text: pipepager(text, 'more')
else:
return ttypager
finally:
os.unlink(filename)
def plain(text):
"""Remove boldface formatting from text."""
return re.sub('.\b', '', text)
def pipepager(text, cmd):
"""Page through text by feeding it to another program."""
pipe = os.popen(cmd, 'w')
try:
pipe.write(text)
pipe.close()
except IOError:
pass # Ignore broken pipes caused by quitting the pager program.
def tempfilepager(text, cmd):
"""Page through text by invoking a program on a temporary file."""
import tempfile
filename = tempfile.mktemp()
file = open(filename, 'w')
file.write(text)
file.close()
try:
os.system(cmd + ' "' + filename + '"')
finally:
os.unlink(filename)
def ttypager(text):
"""Page through text on a text terminal."""
lines = plain(text).split('\n')
try:
import tty
fd = sys.stdin.fileno()
old = tty.tcgetattr(fd)
tty.setcbreak(fd)
getchar = lambda: sys.stdin.read(1)
except (ImportError, AttributeError):
tty = None
getchar = lambda: sys.stdin.readline()[:-1][:1]
try:
r = inc = os.environ.get('LINES', 25) - 1
sys.stdout.write('\n'.join(lines[:inc]) + '\n')
while lines[r:]:
sys.stdout.write('-- more --')
sys.stdout.flush()
c = getchar()
if c in ('q', 'Q'):
sys.stdout.write('\r \r')
break
elif c in ('\r', '\n'):
sys.stdout.write('\r \r' + lines[r] + '\n')
r = r + 1
continue
if c in ('b', 'B', '\x1b'):
r = r - inc - inc
if r < 0: r = 0
sys.stdout.write('\n' + '\n'.join(lines[r:r+inc]) + '\n')
r = r + inc
finally:
if tty:
tty.tcsetattr(fd, tty.TCSAFLUSH, old)
def plainpager(text):
"""Simply print unformatted text. This is the ultimate fallback."""
sys.stdout.write(plain(text))
def describe(thing):
"""Produce a short description of the given thing."""
if inspect.ismodule(thing):
if thing.__name__ in sys.builtin_module_names:
return 'built-in module ' + thing.__name__
if hasattr(thing, '__path__'):
return 'package ' + thing.__name__
else:
return 'module ' + thing.__name__
if inspect.isbuiltin(thing):
return 'built-in function ' + thing.__name__
if inspect.isgetsetdescriptor(thing):
return 'getset descriptor %s.%s.%s' % (
thing.__objclass__.__module__, thing.__objclass__.__name__,
thing.__name__)
if inspect.ismemberdescriptor(thing):
return 'member descriptor %s.%s.%s' % (
thing.__objclass__.__module__, thing.__objclass__.__name__,
thing.__name__)
if inspect.isclass(thing):
return 'class ' + thing.__name__
if inspect.isfunction(thing):
return 'function ' + thing.__name__
if inspect.ismethod(thing):
return 'method ' + thing.__name__
return type(thing).__name__
def locate(path, forceload=0):
"""Locate an object by name or dotted path, importing as necessary."""
parts = [part for part in path.split('.') if part]
module, n = None, 0
while n < len(parts):
nextmodule = safeimport('.'.join(parts[:n+1]), forceload)
if nextmodule: module, n = nextmodule, n + 1
else: break
if module:
object = module
else:
object = builtins
for part in parts[n:]:
try:
object = getattr(object, part)
except AttributeError:
return None
return object
# --------------------------------------- interactive interpreter interface
text = TextDoc()
plaintext = _PlainTextDoc()
html = HTMLDoc()
def resolve(thing, forceload=0):
"""Given an object or a path to an object, get the object and its name."""
if isinstance(thing, str):
object = locate(thing, forceload)
if not object:
raise ImportError('no Python documentation found for %r' % thing)
return object, thing
else:
name = getattr(thing, '__name__', None)
return thing, name if isinstance(name, str) else None
def render_doc(thing, title='Python Library Documentation: %s', forceload=0,
renderer=None):
"""Render text documentation, given an object or a path to an object."""
if renderer is None:
renderer = text
object, name = resolve(thing, forceload)
desc = describe(object)
module = inspect.getmodule(object)
if name and '.' in name:
desc += ' in ' + name[:name.rfind('.')]
elif module and module is not object:
desc += ' in module ' + module.__name__
if not (inspect.ismodule(object) or
inspect.isclass(object) or
inspect.isroutine(object) or
inspect.isgetsetdescriptor(object) or
inspect.ismemberdescriptor(object) or
isinstance(object, property)):
# If the passed object is a piece of data or an instance,
# document its available methods instead of its value.
object = type(object)
desc += ' object'
return title % desc + '\n\n' + renderer.document(object, name)
def doc(thing, title='Python Library Documentation: %s', forceload=0,
output=None):
"""Display text documentation, given an object or a path to an object."""
try:
if output is None:
pager(render_doc(thing, title, forceload))
else:
output.write(render_doc(thing, title, forceload, plaintext))
except (ImportError, ErrorDuringImport) as value:
print(value)
def writedoc(thing, forceload=0):
"""Write HTML documentation to a file in the current directory."""
try:
object, name = resolve(thing, forceload)
page = html.page(describe(object), html.document(object, name))
file = open(name + '.html', 'w', encoding='utf-8')
file.write(page)
file.close()
print('wrote', name + '.html')
except (ImportError, ErrorDuringImport) as value:
print(value)
def writedocs(dir, pkgpath='', done=None):
"""Write out HTML documentation for all modules in a directory tree."""
if done is None: done = {}
for importer, modname, ispkg in pkgutil.walk_packages([dir], pkgpath):
writedoc(modname)
return
class Helper:
# These dictionaries map a topic name to either an alias, or a tuple
# (label, seealso-items). The "label" is the label of the corresponding
# section in the .rst file under Doc/ and an index into the dictionary
# in pydoc_data/topics.py.
#
# CAUTION: if you change one of these dictionaries, be sure to adapt the
# list of needed labels in Doc/tools/sphinxext/pyspecific.py and
# regenerate the pydoc_data/topics.py file by running
# make pydoc-topics
# in Doc/ and copying the output file into the Lib/ directory.
keywords = {
'False': '',
'None': '',
'True': '',
'and': 'BOOLEAN',
'as': 'with',
'assert': ('assert', ''),
'break': ('break', 'while for'),
'class': ('class', 'CLASSES SPECIALMETHODS'),
'continue': ('continue', 'while for'),
'def': ('function', ''),
'del': ('del', 'BASICMETHODS'),
'elif': 'if',
'else': ('else', 'while for'),
'except': 'try',
'finally': 'try',
'for': ('for', 'break continue while'),
'from': 'import',
'global': ('global', 'nonlocal NAMESPACES'),
'if': ('if', 'TRUTHVALUE'),
'import': ('import', 'MODULES'),
'in': ('in', 'SEQUENCEMETHODS'),
'is': 'COMPARISON',
'lambda': ('lambda', 'FUNCTIONS'),
'nonlocal': ('nonlocal', 'global NAMESPACES'),
'not': 'BOOLEAN',
'or': 'BOOLEAN',
'pass': ('pass', ''),
'raise': ('raise', 'EXCEPTIONS'),
'return': ('return', 'FUNCTIONS'),
'try': ('try', 'EXCEPTIONS'),
'while': ('while', 'break continue if TRUTHVALUE'),
'with': ('with', 'CONTEXTMANAGERS EXCEPTIONS yield'),
'yield': ('yield', ''),
}
# Either add symbols to this dictionary or to the symbols dictionary
# directly: Whichever is easier. They are merged later.
_symbols_inverse = {
'STRINGS' : ("'", "'''", "r'", "b'", '"""', '"', 'r"', 'b"'),
'OPERATORS' : ('+', '-', '*', '**', '/', '//', '%', '<<', '>>', '&',
'|', '^', '~', '<', '>', '<=', '>=', '==', '!=', '<>'),
'COMPARISON' : ('<', '>', '<=', '>=', '==', '!=', '<>'),
'UNARY' : ('-', '~'),
'AUGMENTEDASSIGNMENT' : ('+=', '-=', '*=', '/=', '%=', '&=', '|=',
'^=', '<<=', '>>=', '**=', '//='),
'BITWISE' : ('<<', '>>', '&', '|', '^', '~'),
'COMPLEX' : ('j', 'J')
}
symbols = {
'%': 'OPERATORS FORMATTING',
'**': 'POWER',
',': 'TUPLES LISTS FUNCTIONS',
'.': 'ATTRIBUTES FLOAT MODULES OBJECTS',
'...': 'ELLIPSIS',
':': 'SLICINGS DICTIONARYLITERALS',
'@': 'def class',
'\\': 'STRINGS',
'_': 'PRIVATENAMES',
'__': 'PRIVATENAMES SPECIALMETHODS',
'`': 'BACKQUOTES',
'(': 'TUPLES FUNCTIONS CALLS',
')': 'TUPLES FUNCTIONS CALLS',
'[': 'LISTS SUBSCRIPTS SLICINGS',
']': 'LISTS SUBSCRIPTS SLICINGS'
}
for topic, symbols_ in _symbols_inverse.items():
for symbol in symbols_:
topics = symbols.get(symbol, topic)
if topic not in topics:
topics = topics + ' ' + topic
symbols[symbol] = topics
topics = {
'TYPES': ('types', 'STRINGS UNICODE NUMBERS SEQUENCES MAPPINGS '
'FUNCTIONS CLASSES MODULES FILES inspect'),
'STRINGS': ('strings', 'str UNICODE SEQUENCES STRINGMETHODS '
'FORMATTING TYPES'),
'STRINGMETHODS': ('string-methods', 'STRINGS FORMATTING'),
'FORMATTING': ('formatstrings', 'OPERATORS'),
'UNICODE': ('strings', 'encodings unicode SEQUENCES STRINGMETHODS '
'FORMATTING TYPES'),
'NUMBERS': ('numbers', 'INTEGER FLOAT COMPLEX TYPES'),
'INTEGER': ('integers', 'int range'),
'FLOAT': ('floating', 'float math'),
'COMPLEX': ('imaginary', 'complex cmath'),
'SEQUENCES': ('typesseq', 'STRINGMETHODS FORMATTING range LISTS'),
'MAPPINGS': 'DICTIONARIES',
'FUNCTIONS': ('typesfunctions', 'def TYPES'),
'METHODS': ('typesmethods', 'class def CLASSES TYPES'),
'CODEOBJECTS': ('bltin-code-objects', 'compile FUNCTIONS TYPES'),
'TYPEOBJECTS': ('bltin-type-objects', 'types TYPES'),
'FRAMEOBJECTS': 'TYPES',
'TRACEBACKS': 'TYPES',
'NONE': ('bltin-null-object', ''),
'ELLIPSIS': ('bltin-ellipsis-object', 'SLICINGS'),
'FILES': ('bltin-file-objects', ''),
'SPECIALATTRIBUTES': ('specialattrs', ''),
'CLASSES': ('types', 'class SPECIALMETHODS PRIVATENAMES'),
'MODULES': ('typesmodules', 'import'),
'PACKAGES': 'import',
'EXPRESSIONS': ('operator-summary', 'lambda or and not in is BOOLEAN '
'COMPARISON BITWISE SHIFTING BINARY FORMATTING POWER '
'UNARY ATTRIBUTES SUBSCRIPTS SLICINGS CALLS TUPLES '
'LISTS DICTIONARIES'),
'OPERATORS': 'EXPRESSIONS',
'PRECEDENCE': 'EXPRESSIONS',
'OBJECTS': ('objects', 'TYPES'),
'SPECIALMETHODS': ('specialnames', 'BASICMETHODS ATTRIBUTEMETHODS '
'CALLABLEMETHODS SEQUENCEMETHODS MAPPINGMETHODS '
'NUMBERMETHODS CLASSES'),
'BASICMETHODS': ('customization', 'hash repr str SPECIALMETHODS'),
'ATTRIBUTEMETHODS': ('attribute-access', 'ATTRIBUTES SPECIALMETHODS'),
'CALLABLEMETHODS': ('callable-types', 'CALLS SPECIALMETHODS'),
'SEQUENCEMETHODS': ('sequence-types', 'SEQUENCES SEQUENCEMETHODS '
'SPECIALMETHODS'),
'MAPPINGMETHODS': ('sequence-types', 'MAPPINGS SPECIALMETHODS'),
'NUMBERMETHODS': ('numeric-types', 'NUMBERS AUGMENTEDASSIGNMENT '
'SPECIALMETHODS'),
'EXECUTION': ('execmodel', 'NAMESPACES DYNAMICFEATURES EXCEPTIONS'),
'NAMESPACES': ('naming', 'global nonlocal ASSIGNMENT DELETION DYNAMICFEATURES'),
'DYNAMICFEATURES': ('dynamic-features', ''),
'SCOPING': 'NAMESPACES',
'FRAMES': 'NAMESPACES',
'EXCEPTIONS': ('exceptions', 'try except finally raise'),
'CONVERSIONS': ('conversions', ''),
'IDENTIFIERS': ('identifiers', 'keywords SPECIALIDENTIFIERS'),
'SPECIALIDENTIFIERS': ('id-classes', ''),
'PRIVATENAMES': ('atom-identifiers', ''),
'LITERALS': ('atom-literals', 'STRINGS NUMBERS TUPLELITERALS '
'LISTLITERALS DICTIONARYLITERALS'),
'TUPLES': 'SEQUENCES',
'TUPLELITERALS': ('exprlists', 'TUPLES LITERALS'),
'LISTS': ('typesseq-mutable', 'LISTLITERALS'),
'LISTLITERALS': ('lists', 'LISTS LITERALS'),
'DICTIONARIES': ('typesmapping', 'DICTIONARYLITERALS'),
'DICTIONARYLITERALS': ('dict', 'DICTIONARIES LITERALS'),
'ATTRIBUTES': ('attribute-references', 'getattr hasattr setattr ATTRIBUTEMETHODS'),
'SUBSCRIPTS': ('subscriptions', 'SEQUENCEMETHODS'),
'SLICINGS': ('slicings', 'SEQUENCEMETHODS'),
'CALLS': ('calls', 'EXPRESSIONS'),
'POWER': ('power', 'EXPRESSIONS'),
'UNARY': ('unary', 'EXPRESSIONS'),
'BINARY': ('binary', 'EXPRESSIONS'),
'SHIFTING': ('shifting', 'EXPRESSIONS'),
'BITWISE': ('bitwise', 'EXPRESSIONS'),
'COMPARISON': ('comparisons', 'EXPRESSIONS BASICMETHODS'),
'BOOLEAN': ('booleans', 'EXPRESSIONS TRUTHVALUE'),
'ASSERTION': 'assert',
'ASSIGNMENT': ('assignment', 'AUGMENTEDASSIGNMENT'),
'AUGMENTEDASSIGNMENT': ('augassign', 'NUMBERMETHODS'),
'DELETION': 'del',
'RETURNING': 'return',
'IMPORTING': 'import',
'CONDITIONAL': 'if',
'LOOPING': ('compound', 'for while break continue'),
'TRUTHVALUE': ('truth', 'if while and or not BASICMETHODS'),
'DEBUGGING': ('debugger', 'pdb'),
'CONTEXTMANAGERS': ('context-managers', 'with'),
}
def __init__(self, input=None, output=None):
self._input = input
self._output = output
#fix me brython
self.input = self._input or sys.stdin
self.output = self._output or sys.stdout
#fix me brython
#input = property(lambda self: self._input or sys.stdin)
#output = property(lambda self: self._output or sys.stdout)
def __repr__(self):
if inspect.stack()[1][3] == '?':
self()
return ''
return '<pydoc.Helper instance>'
_GoInteractive = object()
def __call__(self, request=_GoInteractive):
if request is not self._GoInteractive:
self.help(request)
else:
self.intro()
self.interact()
self.output.write('''
You are now leaving help and returning to the Python interpreter.
If you want to ask for help on a particular object directly from the
interpreter, you can type "help(object)". Executing "help('string')"
has the same effect as typing a particular string at the help> prompt.
''')
def interact(self):
self.output.write('\n')
while True:
try:
request = self.getline('help> ')
if not request: break
except (KeyboardInterrupt, EOFError):
break
request = replace(request, '"', '', "'", '').strip()
if request.lower() in ('q', 'quit'): break
self.help(request)
def getline(self, prompt):
"""Read one line, using input() when appropriate."""
if self.input is sys.stdin:
return input(prompt)
else:
self.output.write(prompt)
self.output.flush()
return self.input.readline()
def help(self, request):
if type(request) is type(''):
request = request.strip()
if request == 'help': self.intro()
elif request == 'keywords': self.listkeywords()
elif request == 'symbols': self.listsymbols()
elif request == 'topics': self.listtopics()
elif request == 'modules': self.listmodules()
elif request[:8] == 'modules ':
self.listmodules(request.split()[1])
elif request in self.symbols: self.showsymbol(request)
elif request in ['True', 'False', 'None']:
# special case these keywords since they are objects too
doc(eval(request), 'Help on %s:')
elif request in self.keywords: self.showtopic(request)
elif request in self.topics: self.showtopic(request)
elif request: doc(request, 'Help on %s:', output=self._output)
elif isinstance(request, Helper): self()
else: doc(request, 'Help on %s:', output=self._output)
self.output.write('\n')
def intro(self):
self.output.write('''
Welcome to Python %s! This is the interactive help utility.
If this is your first time using Python, you should definitely check out
the tutorial on the Internet at http://docs.python.org/%s/tutorial/.
Enter the name of any module, keyword, or topic to get help on writing
Python programs and using Python modules. To quit this help utility and
return to the interpreter, just type "quit".
To get a list of available modules, keywords, or topics, type "modules",
"keywords", or "topics". Each module also comes with a one-line summary
of what it does; to list the modules whose summaries contain a given word
such as "spam", type "modules spam".
''' % tuple([sys.version[:3]]*2))
def list(self, items, columns=4, width=80):
items = list(sorted(items))
colw = width // columns
rows = (len(items) + columns - 1) // columns
for row in range(rows):
for col in range(columns):
i = col * rows + row
if i < len(items):
self.output.write(items[i])
if col < columns - 1:
self.output.write(' ' + ' ' * (colw - 1 - len(items[i])))
self.output.write('\n')
def listkeywords(self):
self.output.write('''
Here is a list of the Python keywords. Enter any keyword to get more help.
''')
self.list(self.keywords.keys())
def listsymbols(self):
self.output.write('''
Here is a list of the punctuation symbols which Python assigns special meaning
to. Enter any symbol to get more help.
''')
self.list(self.symbols.keys())
def listtopics(self):
self.output.write('''
Here is a list of available topics. Enter any topic name to get more help.
''')
self.list(self.topics.keys())
def showtopic(self, topic, more_xrefs=''):
try:
import pydoc_data.topics
except ImportError:
self.output.write('''
Sorry, topic and keyword documentation is not available because the
module "pydoc_data.topics" could not be found.
''')
return
target = self.topics.get(topic, self.keywords.get(topic))
if not target:
self.output.write('no documentation found for %s\n' % repr(topic))
return
if type(target) is type(''):
return self.showtopic(target, more_xrefs)
label, xrefs = target
try:
doc = pydoc_data.topics.topics[label]
except KeyError:
self.output.write('no documentation found for %s\n' % repr(topic))
return
pager(doc.strip() + '\n')
if more_xrefs:
xrefs = (xrefs or '') + ' ' + more_xrefs
if xrefs:
import formatter
buffer = io.StringIO()
formatter.DumbWriter(buffer).send_flowing_data(
'Related help topics: ' + ', '.join(xrefs.split()) + '\n')
self.output.write('\n%s\n' % buffer.getvalue())
def _gettopic(self, topic, more_xrefs=''):
"""Return unbuffered tuple of (topic, xrefs).
If an error occurs here, the exception is caught and displayed by
the url handler.
This function duplicates the showtopic method but returns its
result directly so it can be formatted for display in an html page.
"""
try:
import pydoc_data.topics
except ImportError:
return('''
Sorry, topic and keyword documentation is not available because the
module "pydoc_data.topics" could not be found.
''' , '')
target = self.topics.get(topic, self.keywords.get(topic))
if not target:
raise ValueError('could not find topic')
if isinstance(target, str):
return self._gettopic(target, more_xrefs)
label, xrefs = target
doc = pydoc_data.topics.topics[label]
if more_xrefs:
xrefs = (xrefs or '') + ' ' + more_xrefs
return doc, xrefs
def showsymbol(self, symbol):
target = self.symbols[symbol]
topic, _, xrefs = target.partition(' ')
self.showtopic(topic, xrefs)
def listmodules(self, key=''):
if key:
self.output.write('''
Here is a list of matching modules. Enter any module name to get more help.
''')
apropos(key)
else:
self.output.write('''
Please wait a moment while I gather a list of all available modules...
''')
modules = {}
def callback(path, modname, desc, modules=modules):
if modname and modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
if modname.find('.') < 0:
modules[modname] = 1
def onerror(modname):
callback(None, modname, None)
ModuleScanner().run(callback, onerror=onerror)
self.list(modules.keys())
self.output.write('''
Enter any module name to get more help. Or, type "modules spam" to search
for modules whose descriptions contain the word "spam".
''')
help = Helper()
class Scanner:
"""A generic tree iterator."""
def __init__(self, roots, children, descendp):
self.roots = roots[:]
self.state = []
self.children = children
self.descendp = descendp
def next(self):
if not self.state:
if not self.roots:
return None
root = self.roots.pop(0)
self.state = [(root, self.children(root))]
node, children = self.state[-1]
if not children:
self.state.pop()
return self.next()
child = children.pop(0)
if self.descendp(child):
self.state.append((child, self.children(child)))
return child
class ModuleScanner:
"""An interruptible scanner that searches module synopses."""
def run(self, callback, key=None, completer=None, onerror=None):
if key: key = key.lower()
self.quit = False
seen = {}
for modname in sys.builtin_module_names:
if modname != '__main__':
seen[modname] = 1
if key is None:
callback(None, modname, '')
else:
name = __import__(modname).__doc__ or ''
desc = name.split('\n')[0]
name = modname + ' - ' + desc
if name.lower().find(key) >= 0:
callback(None, modname, desc)
for importer, modname, ispkg in pkgutil.walk_packages(onerror=onerror):
if self.quit:
break
if key is None:
callback(None, modname, '')
else:
try:
loader = importer.find_module(modname)
except SyntaxError:
# raised by tests for bad coding cookies or BOM
continue
if hasattr(loader, 'get_source'):
try:
source = loader.get_source(modname)
except Exception:
if onerror:
onerror(modname)
continue
desc = source_synopsis(io.StringIO(source)) or ''
if hasattr(loader, 'get_filename'):
path = loader.get_filename(modname)
else:
path = None
else:
try:
module = loader.load_module(modname)
except ImportError:
if onerror:
onerror(modname)
continue
desc = (module.__doc__ or '').splitlines()[0]
path = getattr(module,'__file__',None)
name = modname + ' - ' + desc
if name.lower().find(key) >= 0:
callback(path, modname, desc)
if completer:
completer()
def apropos(key):
"""Print all the one-line module summaries that contain a substring."""
def callback(path, modname, desc):
if modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
print(modname, desc and '- ' + desc)
def onerror(modname):
pass
with warnings.catch_warnings():
warnings.filterwarnings('ignore') # ignore problems during import
ModuleScanner().run(callback, key, onerror=onerror)
# --------------------------------------- enhanced Web browser interface
def _start_server(urlhandler, port):
"""Start an HTTP server thread on a specific port.
Start an HTML/text server thread, so HTML or text documents can be
browsed dynamically and interactively with a Web browser. Example use:
>>> import time
>>> import pydoc
Define a URL handler. To determine what the client is asking
for, check the URL and content_type.
Then get or generate some text or HTML code and return it.
>>> def my_url_handler(url, content_type):
... text = 'the URL sent was: (%s, %s)' % (url, content_type)
... return text
Start server thread on port 0.
If you use port 0, the server will pick a random port number.
You can then use serverthread.port to get the port number.
>>> port = 0
>>> serverthread = pydoc._start_server(my_url_handler, port)
Check that the server is really started. If it is, open browser
and get first page. Use serverthread.url as the starting page.
>>> if serverthread.serving:
... import webbrowser
The next two lines are commented out so a browser doesn't open if
doctest is run on this module.
#... webbrowser.open(serverthread.url)
#True
Let the server do its thing. We just need to monitor its status.
Use time.sleep so the loop doesn't hog the CPU.
>>> starttime = time.time()
>>> timeout = 1 #seconds
This is a short timeout for testing purposes.
>>> while serverthread.serving:
... time.sleep(.01)
... if serverthread.serving and time.time() - starttime > timeout:
... serverthread.stop()
... break
Print any errors that may have occurred.
>>> print(serverthread.error)
None
"""
import http.server
import email.message
import select
import threading
class DocHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
"""Process a request from an HTML browser.
The URL received is in self.path.
Get an HTML page from self.urlhandler and send it.
"""
if self.path.endswith('.css'):
content_type = 'text/css'
else:
content_type = 'text/html'
self.send_response(200)
self.send_header('Content-Type', '%s; charset=UTF-8' % content_type)
self.end_headers()
self.wfile.write(self.urlhandler(
self.path, content_type).encode('utf-8'))
def log_message(self, *args):
# Don't log messages.
pass
class DocServer(http.server.HTTPServer):
def __init__(self, port, callback):
self.host = (sys.platform == 'mac') and '127.0.0.1' or 'localhost'
self.address = ('', port)
self.callback = callback
self.base.__init__(self, self.address, self.handler)
self.quit = False
def serve_until_quit(self):
while not self.quit:
rd, wr, ex = select.select([self.socket.fileno()], [], [], 1)
if rd:
self.handle_request()
self.server_close()
def server_activate(self):
self.base.server_activate(self)
if self.callback:
self.callback(self)
class ServerThread(threading.Thread):
def __init__(self, urlhandler, port):
self.urlhandler = urlhandler
self.port = int(port)
threading.Thread.__init__(self)
self.serving = False
self.error = None
def run(self):
"""Start the server."""
try:
DocServer.base = http.server.HTTPServer
DocServer.handler = DocHandler
DocHandler.MessageClass = email.message.Message
DocHandler.urlhandler = staticmethod(self.urlhandler)
docsvr = DocServer(self.port, self.ready)
self.docserver = docsvr
docsvr.serve_until_quit()
except Exception as e:
self.error = e
def ready(self, server):
self.serving = True
self.host = server.host
self.port = server.server_port
self.url = 'http://%s:%d/' % (self.host, self.port)
def stop(self):
"""Stop the server and this thread nicely"""
self.docserver.quit = True
self.serving = False
self.url = None
thread = ServerThread(urlhandler, port)
thread.start()
# Wait until thread.serving is True to make sure we are
# really up before returning.
while not thread.error and not thread.serving:
time.sleep(.01)
return thread
def _url_handler(url, content_type="text/html"):
"""The pydoc url handler for use with the pydoc server.
If the content_type is 'text/css', the _pydoc.css style
sheet is read and returned if it exits.
If the content_type is 'text/html', then the result of
get_html_page(url) is returned.
"""
class _HTMLDoc(HTMLDoc):
def page(self, title, contents):
"""Format an HTML page."""
css_path = "pydoc_data/_pydoc.css"
css_link = (
'<link rel="stylesheet" type="text/css" href="%s">' %
css_path)
return '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html><head><title>Pydoc: %s</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
%s</head><body bgcolor="#f0f0f8">%s<div style="clear:both;padding-top:.5em;">%s</div>
</body></html>''' % (title, css_link, html_navbar(), contents)
def filelink(self, url, path):
return '<a href="getfile?key=%s">%s</a>' % (url, path)
html = _HTMLDoc()
def html_navbar():
version = html.escape("%s [%s, %s]" % (platform.python_version(),
platform.python_build()[0],
platform.python_compiler()))
return """
<div style='float:left'>
Python %s<br>%s
</div>
<div style='float:right'>
<div style='text-align:center'>
<a href="index.html">Module Index</a>
: <a href="topics.html">Topics</a>
: <a href="keywords.html">Keywords</a>
</div>
<div>
<form action="get" style='display:inline;'>
<input type=text name=key size=15>
<input type=submit value="Get">
</form>
<form action="search" style='display:inline;'>
<input type=text name=key size=15>
<input type=submit value="Search">
</form>
</div>
</div>
""" % (version, html.escape(platform.platform(terse=True)))
def html_index():
"""Module Index page."""
def bltinlink(name):
return '<a href="%s.html">%s</a>' % (name, name)
heading = html.heading(
'<big><big><strong>Index of Modules</strong></big></big>',
'#ffffff', '#7799ee')
names = [name for name in sys.builtin_module_names
if name != '__main__']
contents = html.multicolumn(names, bltinlink)
contents = [heading, '<p>' + html.bigsection(
'Built-in Modules', '#ffffff', '#ee77aa', contents)]
seen = {}
for dir in sys.path:
contents.append(html.index(dir, seen))
contents.append(
'<p align=right><font color="#909090" face="helvetica,'
'arial"><strong>pydoc</strong> by Ka-Ping Yee'
'<[email protected]></font>')
return 'Index of Modules', ''.join(contents)
def html_search(key):
"""Search results page."""
# scan for modules
search_result = []
def callback(path, modname, desc):
if modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
search_result.append((modname, desc and '- ' + desc))
with warnings.catch_warnings():
warnings.filterwarnings('ignore') # ignore problems during import
ModuleScanner().run(callback, key)
# format page
def bltinlink(name):
return '<a href="%s.html">%s</a>' % (name, name)
results = []
heading = html.heading(
'<big><big><strong>Search Results</strong></big></big>',
'#ffffff', '#7799ee')
for name, desc in search_result:
results.append(bltinlink(name) + desc)
contents = heading + html.bigsection(
'key = %s' % key, '#ffffff', '#ee77aa', '<br>'.join(results))
return 'Search Results', contents
def html_getfile(path):
"""Get and display a source file listing safely."""
path = path.replace('%20', ' ')
with tokenize.open(path) as fp:
lines = html.escape(fp.read())
body = '<pre>%s</pre>' % lines
heading = html.heading(
'<big><big><strong>File Listing</strong></big></big>',
'#ffffff', '#7799ee')
contents = heading + html.bigsection(
'File: %s' % path, '#ffffff', '#ee77aa', body)
return 'getfile %s' % path, contents
def html_topics():
"""Index of topic texts available."""
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
heading = html.heading(
'<big><big><strong>INDEX</strong></big></big>',
'#ffffff', '#7799ee')
names = sorted(Helper.topics.keys())
contents = html.multicolumn(names, bltinlink)
contents = heading + html.bigsection(
'Topics', '#ffffff', '#ee77aa', contents)
return 'Topics', contents
def html_keywords():
"""Index of keywords."""
heading = html.heading(
'<big><big><strong>INDEX</strong></big></big>',
'#ffffff', '#7799ee')
names = sorted(Helper.keywords.keys())
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
contents = html.multicolumn(names, bltinlink)
contents = heading + html.bigsection(
'Keywords', '#ffffff', '#ee77aa', contents)
return 'Keywords', contents
def html_topicpage(topic):
"""Topic or keyword help page."""
buf = io.StringIO()
htmlhelp = Helper(buf, buf)
contents, xrefs = htmlhelp._gettopic(topic)
if topic in htmlhelp.keywords:
title = 'KEYWORD'
else:
title = 'TOPIC'
heading = html.heading(
'<big><big><strong>%s</strong></big></big>' % title,
'#ffffff', '#7799ee')
contents = '<pre>%s</pre>' % html.markup(contents)
contents = html.bigsection(topic , '#ffffff','#ee77aa', contents)
if xrefs:
xrefs = sorted(xrefs.split())
def bltinlink(name):
return '<a href="topic?key=%s">%s</a>' % (name, name)
xrefs = html.multicolumn(xrefs, bltinlink)
xrefs = html.section('Related help topics: ',
'#ffffff', '#ee77aa', xrefs)
return ('%s %s' % (title, topic),
''.join((heading, contents, xrefs)))
def html_getobj(url):
obj = locate(url, forceload=1)
if obj is None and url != 'None':
raise ValueError('could not find object')
title = describe(obj)
content = html.document(obj, url)
return title, content
def html_error(url, exc):
heading = html.heading(
'<big><big><strong>Error</strong></big></big>',
'#ffffff', '#7799ee')
contents = '<br>'.join(html.escape(line) for line in
format_exception_only(type(exc), exc))
contents = heading + html.bigsection(url, '#ffffff', '#bb0000',
contents)
return "Error - %s" % url, contents
def get_html_page(url):
"""Generate an HTML page for url."""
complete_url = url
if url.endswith('.html'):
url = url[:-5]
try:
if url in ("", "index"):
title, content = html_index()
elif url == "topics":
title, content = html_topics()
elif url == "keywords":
title, content = html_keywords()
elif '=' in url:
op, _, url = url.partition('=')
if op == "search?key":
title, content = html_search(url)
elif op == "getfile?key":
title, content = html_getfile(url)
elif op == "topic?key":
# try topics first, then objects.
try:
title, content = html_topicpage(url)
except ValueError:
title, content = html_getobj(url)
elif op == "get?key":
# try objects first, then topics.
if url in ("", "index"):
title, content = html_index()
else:
try:
title, content = html_getobj(url)
except ValueError:
title, content = html_topicpage(url)
else:
raise ValueError('bad pydoc url')
else:
title, content = html_getobj(url)
except Exception as exc:
# Catch any errors and display them in an error page.
title, content = html_error(complete_url, exc)
return html.page(title, content)
if url.startswith('/'):
url = url[1:]
if content_type == 'text/css':
path_here = os.path.dirname(os.path.realpath(__file__))
css_path = os.path.join(path_here, url)
with open(css_path) as fp:
return ''.join(fp.readlines())
elif content_type == 'text/html':
return get_html_page(url)
# Errors outside the url handler are caught by the server.
raise TypeError('unknown content type %r for url %s' % (content_type, url))
def browse(port=0, *, open_browser=True):
"""Start the enhanced pydoc Web server and open a Web browser.
Use port '0' to start the server on an arbitrary port.
Set open_browser to False to suppress opening a browser.
"""
import webbrowser
serverthread = _start_server(_url_handler, port)
if serverthread.error:
print(serverthread.error)
return
if serverthread.serving:
server_help_msg = 'Server commands: [b]rowser, [q]uit'
if open_browser:
webbrowser.open(serverthread.url)
try:
print('Server ready at', serverthread.url)
print(server_help_msg)
while serverthread.serving:
cmd = input('server> ')
cmd = cmd.lower()
if cmd == 'q':
break
elif cmd == 'b':
webbrowser.open(serverthread.url)
else:
print(server_help_msg)
except (KeyboardInterrupt, EOFError):
print()
finally:
if serverthread.serving:
serverthread.stop()
print('Server stopped')
# -------------------------------------------------- command-line interface
def ispath(x):
return isinstance(x, str) and x.find(os.sep) >= 0
def cli():
"""Command-line interface (looks at sys.argv to decide what to do)."""
import getopt
class BadUsage(Exception): pass
# Scripts don't get the current directory in their path by default
# unless they are run with the '-m' switch
if '' not in sys.path:
scriptdir = os.path.dirname(sys.argv[0])
if scriptdir in sys.path:
sys.path.remove(scriptdir)
sys.path.insert(0, '.')
try:
opts, args = getopt.getopt(sys.argv[1:], 'bk:p:w')
writing = False
start_server = False
open_browser = False
port = None
for opt, val in opts:
if opt == '-b':
start_server = True
open_browser = True
if opt == '-k':
apropos(val)
return
if opt == '-p':
start_server = True
port = val
if opt == '-w':
writing = True
if start_server:
if port is None:
port = 0
browse(port, open_browser=open_browser)
return
if not args: raise BadUsage
for arg in args:
if ispath(arg) and not os.path.exists(arg):
print('file %r does not exist' % arg)
break
try:
if ispath(arg) and os.path.isfile(arg):
arg = importfile(arg)
if writing:
if ispath(arg) and os.path.isdir(arg):
writedocs(arg)
else:
writedoc(arg)
else:
help.help(arg)
except ErrorDuringImport as value:
print(value)
except (getopt.error, BadUsage):
cmd = os.path.splitext(os.path.basename(sys.argv[0]))[0]
print("""pydoc - the Python documentation tool
{cmd} <name> ...
Show text documentation on something. <name> may be the name of a
Python keyword, topic, function, module, or package, or a dotted
reference to a class or function within a module or module in a
package. If <name> contains a '{sep}', it is used as the path to a
Python source file to document. If name is 'keywords', 'topics',
or 'modules', a listing of these things is displayed.
{cmd} -k <keyword>
Search for a keyword in the synopsis lines of all available modules.
{cmd} -p <port>
Start an HTTP server on the given port on the local machine. Port
number 0 can be used to get an arbitrary unused port.
{cmd} -b
Start an HTTP server on an arbitrary unused port and open a Web browser
to interactively browse documentation. The -p option can be used with
the -b option to explicitly specify the server port.
{cmd} -w <name> ...
Write out the HTML documentation for a module to a file in the current
directory. If <name> contains a '{sep}', it is treated as a filename; if
it names a directory, documentation is written for all the contents.
""".format(cmd=cmd, sep=os.sep))
if __name__ == '__main__':
cli()
|
gpl-3.0
|
WhireCrow/openwrt-mt7620
|
staging_dir/target-mipsel_r2_uClibc-0.9.33.2/usr/lib/python2.7/tarfile.py
|
26
|
88986
|
#!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
#-------------------------------------------------------------------
# tarfile.py
#-------------------------------------------------------------------
# Copyright (C) 2002 Lars Gustäbel <[email protected]>
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
"""Read from and write to tar format archives.
"""
__version__ = "$Revision: 85213 $"
# $Source$
version = "0.9.0"
__author__ = "Lars Gustäbel ([email protected])"
__date__ = "$Date$"
__cvsid__ = "$Id$"
__credits__ = "Gustavo Niemeyer, Niels Gustäbel, Richard Townsend."
#---------
# Imports
#---------
import sys
import os
import shutil
import stat
import errno
import time
import struct
import copy
import re
import operator
try:
import grp, pwd
except ImportError:
grp = pwd = None
# from tarfile import *
__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
#---------------------------------------------------------
# tar constants
#---------------------------------------------------------
NUL = "\0" # the null character
BLOCKSIZE = 512 # length of processing blocks
RECORDSIZE = BLOCKSIZE * 20 # length of records
GNU_MAGIC = "ustar \0" # magic gnu tar string
POSIX_MAGIC = "ustar\x0000" # magic posix tar string
LENGTH_NAME = 100 # maximum length of a filename
LENGTH_LINK = 100 # maximum length of a linkname
LENGTH_PREFIX = 155 # maximum length of the prefix field
REGTYPE = "0" # regular file
AREGTYPE = "\0" # regular file
LNKTYPE = "1" # link (inside tarfile)
SYMTYPE = "2" # symbolic link
CHRTYPE = "3" # character special device
BLKTYPE = "4" # block special device
DIRTYPE = "5" # directory
FIFOTYPE = "6" # fifo special device
CONTTYPE = "7" # contiguous file
GNUTYPE_LONGNAME = "L" # GNU tar longname
GNUTYPE_LONGLINK = "K" # GNU tar longlink
GNUTYPE_SPARSE = "S" # GNU tar sparse file
XHDTYPE = "x" # POSIX.1-2001 extended header
XGLTYPE = "g" # POSIX.1-2001 global header
SOLARIS_XHDTYPE = "X" # Solaris extended header
USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
GNU_FORMAT = 1 # GNU tar format
PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
DEFAULT_FORMAT = GNU_FORMAT
#---------------------------------------------------------
# tarfile constants
#---------------------------------------------------------
# File types that tarfile supports:
SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
SYMTYPE, DIRTYPE, FIFOTYPE,
CONTTYPE, CHRTYPE, BLKTYPE,
GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
GNUTYPE_SPARSE)
# File types that will be treated as a regular file.
REGULAR_TYPES = (REGTYPE, AREGTYPE,
CONTTYPE, GNUTYPE_SPARSE)
# File types that are part of the GNU tar format.
GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
GNUTYPE_SPARSE)
# Fields from a pax header that override a TarInfo attribute.
PAX_FIELDS = ("path", "linkpath", "size", "mtime",
"uid", "gid", "uname", "gname")
# Fields in a pax header that are numbers, all other fields
# are treated as strings.
PAX_NUMBER_FIELDS = {
"atime": float,
"ctime": float,
"mtime": float,
"uid": int,
"gid": int,
"size": int
}
#---------------------------------------------------------
# Bits used in the mode field, values in octal.
#---------------------------------------------------------
S_IFLNK = 0120000 # symbolic link
S_IFREG = 0100000 # regular file
S_IFBLK = 0060000 # block device
S_IFDIR = 0040000 # directory
S_IFCHR = 0020000 # character device
S_IFIFO = 0010000 # fifo
TSUID = 04000 # set UID on execution
TSGID = 02000 # set GID on execution
TSVTX = 01000 # reserved
TUREAD = 0400 # read by owner
TUWRITE = 0200 # write by owner
TUEXEC = 0100 # execute/search by owner
TGREAD = 0040 # read by group
TGWRITE = 0020 # write by group
TGEXEC = 0010 # execute/search by group
TOREAD = 0004 # read by other
TOWRITE = 0002 # write by other
TOEXEC = 0001 # execute/search by other
#---------------------------------------------------------
# initialization
#---------------------------------------------------------
ENCODING = sys.getfilesystemencoding()
if ENCODING is None:
ENCODING = sys.getdefaultencoding()
#---------------------------------------------------------
# Some useful functions
#---------------------------------------------------------
def stn(s, length):
"""Convert a python string to a null-terminated string buffer.
"""
return s[:length] + (length - len(s)) * NUL
def nts(s):
"""Convert a null-terminated string field to a python string.
"""
# Use the string up to the first null char.
p = s.find("\0")
if p == -1:
return s
return s[:p]
def nti(s):
"""Convert a number field to a python number.
"""
# There are two possible encodings for a number field, see
# itn() below.
if s[0] != chr(0200):
try:
n = int(nts(s) or "0", 8)
except ValueError:
raise InvalidHeaderError("invalid header")
else:
n = 0L
for i in xrange(len(s) - 1):
n <<= 8
n += ord(s[i + 1])
return n
def itn(n, digits=8, format=DEFAULT_FORMAT):
"""Convert a python number to a number field.
"""
# POSIX 1003.1-1988 requires numbers to be encoded as a string of
# octal digits followed by a null-byte, this allows values up to
# (8**(digits-1))-1. GNU tar allows storing numbers greater than
# that if necessary. A leading 0200 byte indicates this particular
# encoding, the following digits-1 bytes are a big-endian
# representation. This allows values up to (256**(digits-1))-1.
if 0 <= n < 8 ** (digits - 1):
s = "%0*o" % (digits - 1, n) + NUL
else:
if format != GNU_FORMAT or n >= 256 ** (digits - 1):
raise ValueError("overflow in number field")
if n < 0:
# XXX We mimic GNU tar's behaviour with negative numbers,
# this could raise OverflowError.
n = struct.unpack("L", struct.pack("l", n))[0]
s = ""
for i in xrange(digits - 1):
s = chr(n & 0377) + s
n >>= 8
s = chr(0200) + s
return s
def uts(s, encoding, errors):
"""Convert a unicode object to a string.
"""
if errors == "utf-8":
# An extra error handler similar to the -o invalid=UTF-8 option
# in POSIX.1-2001. Replace untranslatable characters with their
# UTF-8 representation.
try:
return s.encode(encoding, "strict")
except UnicodeEncodeError:
x = []
for c in s:
try:
x.append(c.encode(encoding, "strict"))
except UnicodeEncodeError:
x.append(c.encode("utf8"))
return "".join(x)
else:
return s.encode(encoding, errors)
def calc_chksums(buf):
"""Calculate the checksum for a member's header by summing up all
characters except for the chksum field which is treated as if
it was filled with spaces. According to the GNU tar sources,
some tars (Sun and NeXT) calculate chksum with signed char,
which will be different if there are chars in the buffer with
the high bit set. So we calculate two checksums, unsigned and
signed.
"""
unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512]))
signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512]))
return unsigned_chksum, signed_chksum
def copyfileobj(src, dst, length=None):
"""Copy length bytes from fileobj src to fileobj dst.
If length is None, copy the entire content.
"""
if length == 0:
return
if length is None:
shutil.copyfileobj(src, dst)
return
BUFSIZE = 16 * 1024
blocks, remainder = divmod(length, BUFSIZE)
for b in xrange(blocks):
buf = src.read(BUFSIZE)
if len(buf) < BUFSIZE:
raise IOError("end of file reached")
dst.write(buf)
if remainder != 0:
buf = src.read(remainder)
if len(buf) < remainder:
raise IOError("end of file reached")
dst.write(buf)
return
filemode_table = (
((S_IFLNK, "l"),
(S_IFREG, "-"),
(S_IFBLK, "b"),
(S_IFDIR, "d"),
(S_IFCHR, "c"),
(S_IFIFO, "p")),
((TUREAD, "r"),),
((TUWRITE, "w"),),
((TUEXEC|TSUID, "s"),
(TSUID, "S"),
(TUEXEC, "x")),
((TGREAD, "r"),),
((TGWRITE, "w"),),
((TGEXEC|TSGID, "s"),
(TSGID, "S"),
(TGEXEC, "x")),
((TOREAD, "r"),),
((TOWRITE, "w"),),
((TOEXEC|TSVTX, "t"),
(TSVTX, "T"),
(TOEXEC, "x"))
)
def filemode(mode):
"""Convert a file's mode to a string of the form
-rwxrwxrwx.
Used by TarFile.list()
"""
perm = []
for table in filemode_table:
for bit, char in table:
if mode & bit == bit:
perm.append(char)
break
else:
perm.append("-")
return "".join(perm)
class TarError(Exception):
"""Base exception."""
pass
class ExtractError(TarError):
"""General exception for extract errors."""
pass
class ReadError(TarError):
"""Exception for unreadble tar archives."""
pass
class CompressionError(TarError):
"""Exception for unavailable compression methods."""
pass
class StreamError(TarError):
"""Exception for unsupported operations on stream-like TarFiles."""
pass
class HeaderError(TarError):
"""Base exception for header errors."""
pass
class EmptyHeaderError(HeaderError):
"""Exception for empty headers."""
pass
class TruncatedHeaderError(HeaderError):
"""Exception for truncated headers."""
pass
class EOFHeaderError(HeaderError):
"""Exception for end of file headers."""
pass
class InvalidHeaderError(HeaderError):
"""Exception for invalid headers."""
pass
class SubsequentHeaderError(HeaderError):
"""Exception for missing and invalid extended headers."""
pass
#---------------------------
# internal stream interface
#---------------------------
class _LowLevelFile:
"""Low-level file object. Supports reading and writing.
It is used instead of a regular file object for streaming
access.
"""
def __init__(self, name, mode):
mode = {
"r": os.O_RDONLY,
"w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
}[mode]
if hasattr(os, "O_BINARY"):
mode |= os.O_BINARY
self.fd = os.open(name, mode, 0666)
def close(self):
os.close(self.fd)
def read(self, size):
return os.read(self.fd, size)
def write(self, s):
os.write(self.fd, s)
class _Stream:
"""Class that serves as an adapter between TarFile and
a stream-like object. The stream-like object only
needs to have a read() or write() method and is accessed
blockwise. Use of gzip or bzip2 compression is possible.
A stream-like object could be for example: sys.stdin,
sys.stdout, a socket, a tape device etc.
_Stream is intended to be used only internally.
"""
def __init__(self, name, mode, comptype, fileobj, bufsize):
"""Construct a _Stream object.
"""
self._extfileobj = True
if fileobj is None:
fileobj = _LowLevelFile(name, mode)
self._extfileobj = False
if comptype == '*':
# Enable transparent compression detection for the
# stream interface
fileobj = _StreamProxy(fileobj)
comptype = fileobj.getcomptype()
self.name = name or ""
self.mode = mode
self.comptype = comptype
self.fileobj = fileobj
self.bufsize = bufsize
self.buf = ""
self.pos = 0L
self.closed = False
if comptype == "gz":
try:
import zlib
except ImportError:
raise CompressionError("zlib module is not available")
self.zlib = zlib
self.crc = zlib.crc32("") & 0xffffffffL
if mode == "r":
self._init_read_gz()
else:
self._init_write_gz()
if comptype == "bz2":
try:
import bz2
except ImportError:
raise CompressionError("bz2 module is not available")
if mode == "r":
self.dbuf = ""
self.cmp = bz2.BZ2Decompressor()
else:
self.cmp = bz2.BZ2Compressor()
def __del__(self):
if hasattr(self, "closed") and not self.closed:
self.close()
def _init_write_gz(self):
"""Initialize for writing with gzip compression.
"""
self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
-self.zlib.MAX_WBITS,
self.zlib.DEF_MEM_LEVEL,
0)
timestamp = struct.pack("<L", long(time.time()))
self.__write("\037\213\010\010%s\002\377" % timestamp)
if type(self.name) is unicode:
self.name = self.name.encode("iso-8859-1", "replace")
if self.name.endswith(".gz"):
self.name = self.name[:-3]
self.__write(self.name + NUL)
def write(self, s):
"""Write string s to the stream.
"""
if self.comptype == "gz":
self.crc = self.zlib.crc32(s, self.crc) & 0xffffffffL
self.pos += len(s)
if self.comptype != "tar":
s = self.cmp.compress(s)
self.__write(s)
def __write(self, s):
"""Write string s to the stream if a whole new block
is ready to be written.
"""
self.buf += s
while len(self.buf) > self.bufsize:
self.fileobj.write(self.buf[:self.bufsize])
self.buf = self.buf[self.bufsize:]
def close(self):
"""Close the _Stream object. No operation should be
done on it afterwards.
"""
if self.closed:
return
if self.mode == "w" and self.comptype != "tar":
self.buf += self.cmp.flush()
if self.mode == "w" and self.buf:
self.fileobj.write(self.buf)
self.buf = ""
if self.comptype == "gz":
# The native zlib crc is an unsigned 32-bit integer, but
# the Python wrapper implicitly casts that to a signed C
# long. So, on a 32-bit box self.crc may "look negative",
# while the same crc on a 64-bit box may "look positive".
# To avoid irksome warnings from the `struct` module, force
# it to look positive on all boxes.
self.fileobj.write(struct.pack("<L", self.crc & 0xffffffffL))
self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFFL))
if not self._extfileobj:
self.fileobj.close()
self.closed = True
def _init_read_gz(self):
"""Initialize for reading a gzip compressed fileobj.
"""
self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
self.dbuf = ""
# taken from gzip.GzipFile with some alterations
if self.__read(2) != "\037\213":
raise ReadError("not a gzip file")
if self.__read(1) != "\010":
raise CompressionError("unsupported compression method")
flag = ord(self.__read(1))
self.__read(6)
if flag & 4:
xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
self.read(xlen)
if flag & 8:
while True:
s = self.__read(1)
if not s or s == NUL:
break
if flag & 16:
while True:
s = self.__read(1)
if not s or s == NUL:
break
if flag & 2:
self.__read(2)
def tell(self):
"""Return the stream's file pointer position.
"""
return self.pos
def seek(self, pos=0):
"""Set the stream's file pointer to pos. Negative seeking
is forbidden.
"""
if pos - self.pos >= 0:
blocks, remainder = divmod(pos - self.pos, self.bufsize)
for i in xrange(blocks):
self.read(self.bufsize)
self.read(remainder)
else:
raise StreamError("seeking backwards is not allowed")
return self.pos
def read(self, size=None):
"""Return the next size number of bytes from the stream.
If size is not defined, return all bytes of the stream
up to EOF.
"""
if size is None:
t = []
while True:
buf = self._read(self.bufsize)
if not buf:
break
t.append(buf)
buf = "".join(t)
else:
buf = self._read(size)
self.pos += len(buf)
return buf
def _read(self, size):
"""Return size bytes from the stream.
"""
if self.comptype == "tar":
return self.__read(size)
c = len(self.dbuf)
t = [self.dbuf]
while c < size:
buf = self.__read(self.bufsize)
if not buf:
break
try:
buf = self.cmp.decompress(buf)
except IOError:
raise ReadError("invalid compressed data")
t.append(buf)
c += len(buf)
t = "".join(t)
self.dbuf = t[size:]
return t[:size]
def __read(self, size):
"""Return size bytes from stream. If internal buffer is empty,
read another block from the stream.
"""
c = len(self.buf)
t = [self.buf]
while c < size:
buf = self.fileobj.read(self.bufsize)
if not buf:
break
t.append(buf)
c += len(buf)
t = "".join(t)
self.buf = t[size:]
return t[:size]
# class _Stream
class _StreamProxy(object):
"""Small proxy class that enables transparent compression
detection for the Stream interface (mode 'r|*').
"""
def __init__(self, fileobj):
self.fileobj = fileobj
self.buf = self.fileobj.read(BLOCKSIZE)
def read(self, size):
self.read = self.fileobj.read
return self.buf
def getcomptype(self):
if self.buf.startswith("\037\213\010"):
return "gz"
if self.buf[0:3] == "BZh" and self.buf[4:10] == "1AY&SY":
return "bz2"
return "tar"
def close(self):
self.fileobj.close()
# class StreamProxy
class _BZ2Proxy(object):
"""Small proxy class that enables external file object
support for "r:bz2" and "w:bz2" modes. This is actually
a workaround for a limitation in bz2 module's BZ2File
class which (unlike gzip.GzipFile) has no support for
a file object argument.
"""
blocksize = 16 * 1024
def __init__(self, fileobj, mode):
self.fileobj = fileobj
self.mode = mode
self.name = getattr(self.fileobj, "name", None)
self.init()
def init(self):
import bz2
self.pos = 0
if self.mode == "r":
self.bz2obj = bz2.BZ2Decompressor()
self.fileobj.seek(0)
self.buf = ""
else:
self.bz2obj = bz2.BZ2Compressor()
def read(self, size):
b = [self.buf]
x = len(self.buf)
while x < size:
raw = self.fileobj.read(self.blocksize)
if not raw:
break
data = self.bz2obj.decompress(raw)
b.append(data)
x += len(data)
self.buf = "".join(b)
buf = self.buf[:size]
self.buf = self.buf[size:]
self.pos += len(buf)
return buf
def seek(self, pos):
if pos < self.pos:
self.init()
self.read(pos - self.pos)
def tell(self):
return self.pos
def write(self, data):
self.pos += len(data)
raw = self.bz2obj.compress(data)
self.fileobj.write(raw)
def close(self):
if self.mode == "w":
raw = self.bz2obj.flush()
self.fileobj.write(raw)
# class _BZ2Proxy
#------------------------
# Extraction file object
#------------------------
class _FileInFile(object):
"""A thin wrapper around an existing file object that
provides a part of its data as an individual file
object.
"""
def __init__(self, fileobj, offset, size, sparse=None):
self.fileobj = fileobj
self.offset = offset
self.size = size
self.sparse = sparse
self.position = 0
def tell(self):
"""Return the current file position.
"""
return self.position
def seek(self, position):
"""Seek to a position in the file.
"""
self.position = position
def read(self, size=None):
"""Read data from the file.
"""
if size is None:
size = self.size - self.position
else:
size = min(size, self.size - self.position)
if self.sparse is None:
return self.readnormal(size)
else:
return self.readsparse(size)
def readnormal(self, size):
"""Read operation for regular files.
"""
self.fileobj.seek(self.offset + self.position)
self.position += size
return self.fileobj.read(size)
def readsparse(self, size):
"""Read operation for sparse files.
"""
data = []
while size > 0:
buf = self.readsparsesection(size)
if not buf:
break
size -= len(buf)
data.append(buf)
return "".join(data)
def readsparsesection(self, size):
"""Read a single section of a sparse file.
"""
section = self.sparse.find(self.position)
if section is None:
return ""
size = min(size, section.offset + section.size - self.position)
if isinstance(section, _data):
realpos = section.realpos + self.position - section.offset
self.fileobj.seek(self.offset + realpos)
self.position += size
return self.fileobj.read(size)
else:
self.position += size
return NUL * size
#class _FileInFile
class ExFileObject(object):
"""File-like object for reading an archive member.
Is returned by TarFile.extractfile().
"""
blocksize = 1024
def __init__(self, tarfile, tarinfo):
self.fileobj = _FileInFile(tarfile.fileobj,
tarinfo.offset_data,
tarinfo.size,
getattr(tarinfo, "sparse", None))
self.name = tarinfo.name
self.mode = "r"
self.closed = False
self.size = tarinfo.size
self.position = 0
self.buffer = ""
def read(self, size=None):
"""Read at most size bytes from the file. If size is not
present or None, read all data until EOF is reached.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
buf = ""
if self.buffer:
if size is None:
buf = self.buffer
self.buffer = ""
else:
buf = self.buffer[:size]
self.buffer = self.buffer[size:]
if size is None:
buf += self.fileobj.read()
else:
buf += self.fileobj.read(size - len(buf))
self.position += len(buf)
return buf
def readline(self, size=-1):
"""Read one entire line from the file. If size is present
and non-negative, return a string with at most that
size, which may be an incomplete line.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
if "\n" in self.buffer:
pos = self.buffer.find("\n") + 1
else:
buffers = [self.buffer]
while True:
buf = self.fileobj.read(self.blocksize)
buffers.append(buf)
if not buf or "\n" in buf:
self.buffer = "".join(buffers)
pos = self.buffer.find("\n") + 1
if pos == 0:
# no newline found.
pos = len(self.buffer)
break
if size != -1:
pos = min(size, pos)
buf = self.buffer[:pos]
self.buffer = self.buffer[pos:]
self.position += len(buf)
return buf
def readlines(self):
"""Return a list with all remaining lines.
"""
result = []
while True:
line = self.readline()
if not line: break
result.append(line)
return result
def tell(self):
"""Return the current file position.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
return self.position
def seek(self, pos, whence=os.SEEK_SET):
"""Seek to a position in the file.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
if whence == os.SEEK_SET:
self.position = min(max(pos, 0), self.size)
elif whence == os.SEEK_CUR:
if pos < 0:
self.position = max(self.position + pos, 0)
else:
self.position = min(self.position + pos, self.size)
elif whence == os.SEEK_END:
self.position = max(min(self.size + pos, self.size), 0)
else:
raise ValueError("Invalid argument")
self.buffer = ""
self.fileobj.seek(self.position)
def close(self):
"""Close the file object.
"""
self.closed = True
def __iter__(self):
"""Get an iterator over the file's lines.
"""
while True:
line = self.readline()
if not line:
break
yield line
#class ExFileObject
#------------------
# Exported Classes
#------------------
class TarInfo(object):
"""Informational class which holds the details about an
archive member given by a tar header block.
TarInfo objects are returned by TarFile.getmember(),
TarFile.getmembers() and TarFile.gettarinfo() and are
usually created internally.
"""
def __init__(self, name=""):
"""Construct a TarInfo object. name is the optional name
of the member.
"""
self.name = name # member name
self.mode = 0644 # file permissions
self.uid = 0 # user id
self.gid = 0 # group id
self.size = 0 # file size
self.mtime = 0 # modification time
self.chksum = 0 # header checksum
self.type = REGTYPE # member type
self.linkname = "" # link name
self.uname = "" # user name
self.gname = "" # group name
self.devmajor = 0 # device major number
self.devminor = 0 # device minor number
self.offset = 0 # the tar header starts here
self.offset_data = 0 # the file's data starts here
self.pax_headers = {} # pax header information
# In pax headers the "name" and "linkname" field are called
# "path" and "linkpath".
def _getpath(self):
return self.name
def _setpath(self, name):
self.name = name
path = property(_getpath, _setpath)
def _getlinkpath(self):
return self.linkname
def _setlinkpath(self, linkname):
self.linkname = linkname
linkpath = property(_getlinkpath, _setlinkpath)
def __repr__(self):
return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
def get_info(self, encoding, errors):
"""Return the TarInfo's attributes as a dictionary.
"""
info = {
"name": self.name,
"mode": self.mode & 07777,
"uid": self.uid,
"gid": self.gid,
"size": self.size,
"mtime": self.mtime,
"chksum": self.chksum,
"type": self.type,
"linkname": self.linkname,
"uname": self.uname,
"gname": self.gname,
"devmajor": self.devmajor,
"devminor": self.devminor
}
if info["type"] == DIRTYPE and not info["name"].endswith("/"):
info["name"] += "/"
for key in ("name", "linkname", "uname", "gname"):
if type(info[key]) is unicode:
info[key] = info[key].encode(encoding, errors)
return info
def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="strict"):
"""Return a tar header as a string of 512 byte blocks.
"""
info = self.get_info(encoding, errors)
if format == USTAR_FORMAT:
return self.create_ustar_header(info)
elif format == GNU_FORMAT:
return self.create_gnu_header(info)
elif format == PAX_FORMAT:
return self.create_pax_header(info, encoding, errors)
else:
raise ValueError("invalid format")
def create_ustar_header(self, info):
"""Return the object as a ustar header block.
"""
info["magic"] = POSIX_MAGIC
if len(info["linkname"]) > LENGTH_LINK:
raise ValueError("linkname is too long")
if len(info["name"]) > LENGTH_NAME:
info["prefix"], info["name"] = self._posix_split_name(info["name"])
return self._create_header(info, USTAR_FORMAT)
def create_gnu_header(self, info):
"""Return the object as a GNU header block sequence.
"""
info["magic"] = GNU_MAGIC
buf = ""
if len(info["linkname"]) > LENGTH_LINK:
buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK)
if len(info["name"]) > LENGTH_NAME:
buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME)
return buf + self._create_header(info, GNU_FORMAT)
def create_pax_header(self, info, encoding, errors):
"""Return the object as a ustar header block. If it cannot be
represented this way, prepend a pax extended header sequence
with supplement information.
"""
info["magic"] = POSIX_MAGIC
pax_headers = self.pax_headers.copy()
# Test string fields for values that exceed the field length or cannot
# be represented in ASCII encoding.
for name, hname, length in (
("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
("uname", "uname", 32), ("gname", "gname", 32)):
if hname in pax_headers:
# The pax header has priority.
continue
val = info[name].decode(encoding, errors)
# Try to encode the string as ASCII.
try:
val.encode("ascii")
except UnicodeEncodeError:
pax_headers[hname] = val
continue
if len(info[name]) > length:
pax_headers[hname] = val
# Test number fields for values that exceed the field limit or values
# that like to be stored as float.
for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
if name in pax_headers:
# The pax header has priority. Avoid overflow.
info[name] = 0
continue
val = info[name]
if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
pax_headers[name] = unicode(val)
info[name] = 0
# Create a pax extended header if necessary.
if pax_headers:
buf = self._create_pax_generic_header(pax_headers)
else:
buf = ""
return buf + self._create_header(info, USTAR_FORMAT)
@classmethod
def create_pax_global_header(cls, pax_headers):
"""Return the object as a pax global header block sequence.
"""
return cls._create_pax_generic_header(pax_headers, type=XGLTYPE)
def _posix_split_name(self, name):
"""Split a name longer than 100 chars into a prefix
and a name part.
"""
prefix = name[:LENGTH_PREFIX + 1]
while prefix and prefix[-1] != "/":
prefix = prefix[:-1]
name = name[len(prefix):]
prefix = prefix[:-1]
if not prefix or len(name) > LENGTH_NAME:
raise ValueError("name is too long")
return prefix, name
@staticmethod
def _create_header(info, format):
"""Return a header block. info is a dictionary with file
information, format must be one of the *_FORMAT constants.
"""
parts = [
stn(info.get("name", ""), 100),
itn(info.get("mode", 0) & 07777, 8, format),
itn(info.get("uid", 0), 8, format),
itn(info.get("gid", 0), 8, format),
itn(info.get("size", 0), 12, format),
itn(info.get("mtime", 0), 12, format),
" ", # checksum field
info.get("type", REGTYPE),
stn(info.get("linkname", ""), 100),
stn(info.get("magic", POSIX_MAGIC), 8),
stn(info.get("uname", ""), 32),
stn(info.get("gname", ""), 32),
itn(info.get("devmajor", 0), 8, format),
itn(info.get("devminor", 0), 8, format),
stn(info.get("prefix", ""), 155)
]
buf = struct.pack("%ds" % BLOCKSIZE, "".join(parts))
chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
buf = buf[:-364] + "%06o\0" % chksum + buf[-357:]
return buf
@staticmethod
def _create_payload(payload):
"""Return the string payload filled with zero bytes
up to the next 512 byte border.
"""
blocks, remainder = divmod(len(payload), BLOCKSIZE)
if remainder > 0:
payload += (BLOCKSIZE - remainder) * NUL
return payload
@classmethod
def _create_gnu_long_header(cls, name, type):
"""Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
for name.
"""
name += NUL
info = {}
info["name"] = "././@LongLink"
info["type"] = type
info["size"] = len(name)
info["magic"] = GNU_MAGIC
# create extended header + name blocks.
return cls._create_header(info, USTAR_FORMAT) + \
cls._create_payload(name)
@classmethod
def _create_pax_generic_header(cls, pax_headers, type=XHDTYPE):
"""Return a POSIX.1-2001 extended or global header sequence
that contains a list of keyword, value pairs. The values
must be unicode objects.
"""
records = []
for keyword, value in pax_headers.iteritems():
keyword = keyword.encode("utf8")
value = value.encode("utf8")
l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
n = p = 0
while True:
n = l + len(str(p))
if n == p:
break
p = n
records.append("%d %s=%s\n" % (p, keyword, value))
records = "".join(records)
# We use a hardcoded "././@PaxHeader" name like star does
# instead of the one that POSIX recommends.
info = {}
info["name"] = "././@PaxHeader"
info["type"] = type
info["size"] = len(records)
info["magic"] = POSIX_MAGIC
# Create pax header + record blocks.
return cls._create_header(info, USTAR_FORMAT) + \
cls._create_payload(records)
@classmethod
def frombuf(cls, buf):
"""Construct a TarInfo object from a 512 byte string buffer.
"""
if len(buf) == 0:
raise EmptyHeaderError("empty header")
if len(buf) != BLOCKSIZE:
raise TruncatedHeaderError("truncated header")
if buf.count(NUL) == BLOCKSIZE:
raise EOFHeaderError("end of file header")
chksum = nti(buf[148:156])
if chksum not in calc_chksums(buf):
raise InvalidHeaderError("bad checksum")
obj = cls()
obj.buf = buf
obj.name = nts(buf[0:100])
obj.mode = nti(buf[100:108])
obj.uid = nti(buf[108:116])
obj.gid = nti(buf[116:124])
obj.size = nti(buf[124:136])
obj.mtime = nti(buf[136:148])
obj.chksum = chksum
obj.type = buf[156:157]
obj.linkname = nts(buf[157:257])
obj.uname = nts(buf[265:297])
obj.gname = nts(buf[297:329])
obj.devmajor = nti(buf[329:337])
obj.devminor = nti(buf[337:345])
prefix = nts(buf[345:500])
# Old V7 tar format represents a directory as a regular
# file with a trailing slash.
if obj.type == AREGTYPE and obj.name.endswith("/"):
obj.type = DIRTYPE
# Remove redundant slashes from directories.
if obj.isdir():
obj.name = obj.name.rstrip("/")
# Reconstruct a ustar longname.
if prefix and obj.type not in GNU_TYPES:
obj.name = prefix + "/" + obj.name
return obj
@classmethod
def fromtarfile(cls, tarfile):
"""Return the next TarInfo object from TarFile object
tarfile.
"""
buf = tarfile.fileobj.read(BLOCKSIZE)
obj = cls.frombuf(buf)
obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
return obj._proc_member(tarfile)
#--------------------------------------------------------------------------
# The following are methods that are called depending on the type of a
# member. The entry point is _proc_member() which can be overridden in a
# subclass to add custom _proc_*() methods. A _proc_*() method MUST
# implement the following
# operations:
# 1. Set self.offset_data to the position where the data blocks begin,
# if there is data that follows.
# 2. Set tarfile.offset to the position where the next member's header will
# begin.
# 3. Return self or another valid TarInfo object.
def _proc_member(self, tarfile):
"""Choose the right processing method depending on
the type and call it.
"""
if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
return self._proc_gnulong(tarfile)
elif self.type == GNUTYPE_SPARSE:
return self._proc_sparse(tarfile)
elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
return self._proc_pax(tarfile)
else:
return self._proc_builtin(tarfile)
def _proc_builtin(self, tarfile):
"""Process a builtin type or an unknown type which
will be treated as a regular file.
"""
self.offset_data = tarfile.fileobj.tell()
offset = self.offset_data
if self.isreg() or self.type not in SUPPORTED_TYPES:
# Skip the following data blocks.
offset += self._block(self.size)
tarfile.offset = offset
# Patch the TarInfo object with saved global
# header information.
self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
return self
def _proc_gnulong(self, tarfile):
"""Process the blocks that hold a GNU longname
or longlink member.
"""
buf = tarfile.fileobj.read(self._block(self.size))
# Fetch the next header and process it.
try:
next = self.fromtarfile(tarfile)
except HeaderError:
raise SubsequentHeaderError("missing or bad subsequent header")
# Patch the TarInfo object from the next header with
# the longname information.
next.offset = self.offset
if self.type == GNUTYPE_LONGNAME:
next.name = nts(buf)
elif self.type == GNUTYPE_LONGLINK:
next.linkname = nts(buf)
return next
def _proc_sparse(self, tarfile):
"""Process a GNU sparse header plus extra headers.
"""
buf = self.buf
sp = _ringbuffer()
pos = 386
lastpos = 0L
realpos = 0L
# There are 4 possible sparse structs in the
# first header.
for i in xrange(4):
try:
offset = nti(buf[pos:pos + 12])
numbytes = nti(buf[pos + 12:pos + 24])
except ValueError:
break
if offset > lastpos:
sp.append(_hole(lastpos, offset - lastpos))
sp.append(_data(offset, numbytes, realpos))
realpos += numbytes
lastpos = offset + numbytes
pos += 24
isextended = ord(buf[482])
origsize = nti(buf[483:495])
# If the isextended flag is given,
# there are extra headers to process.
while isextended == 1:
buf = tarfile.fileobj.read(BLOCKSIZE)
pos = 0
for i in xrange(21):
try:
offset = nti(buf[pos:pos + 12])
numbytes = nti(buf[pos + 12:pos + 24])
except ValueError:
break
if offset > lastpos:
sp.append(_hole(lastpos, offset - lastpos))
sp.append(_data(offset, numbytes, realpos))
realpos += numbytes
lastpos = offset + numbytes
pos += 24
isextended = ord(buf[504])
if lastpos < origsize:
sp.append(_hole(lastpos, origsize - lastpos))
self.sparse = sp
self.offset_data = tarfile.fileobj.tell()
tarfile.offset = self.offset_data + self._block(self.size)
self.size = origsize
return self
def _proc_pax(self, tarfile):
"""Process an extended or global header as described in
POSIX.1-2001.
"""
# Read the header information.
buf = tarfile.fileobj.read(self._block(self.size))
# A pax header stores supplemental information for either
# the following file (extended) or all following files
# (global).
if self.type == XGLTYPE:
pax_headers = tarfile.pax_headers
else:
pax_headers = tarfile.pax_headers.copy()
# Parse pax header information. A record looks like that:
# "%d %s=%s\n" % (length, keyword, value). length is the size
# of the complete record including the length field itself and
# the newline. keyword and value are both UTF-8 encoded strings.
regex = re.compile(r"(\d+) ([^=]+)=", re.U)
pos = 0
while True:
match = regex.match(buf, pos)
if not match:
break
length, keyword = match.groups()
length = int(length)
value = buf[match.end(2) + 1:match.start(1) + length - 1]
keyword = keyword.decode("utf8")
value = value.decode("utf8")
pax_headers[keyword] = value
pos += length
# Fetch the next header.
try:
next = self.fromtarfile(tarfile)
except HeaderError:
raise SubsequentHeaderError("missing or bad subsequent header")
if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
# Patch the TarInfo object with the extended header info.
next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
next.offset = self.offset
if "size" in pax_headers:
# If the extended header replaces the size field,
# we need to recalculate the offset where the next
# header starts.
offset = next.offset_data
if next.isreg() or next.type not in SUPPORTED_TYPES:
offset += next._block(next.size)
tarfile.offset = offset
return next
def _apply_pax_info(self, pax_headers, encoding, errors):
"""Replace fields with supplemental information from a previous
pax extended or global header.
"""
for keyword, value in pax_headers.iteritems():
if keyword not in PAX_FIELDS:
continue
if keyword == "path":
value = value.rstrip("/")
if keyword in PAX_NUMBER_FIELDS:
try:
value = PAX_NUMBER_FIELDS[keyword](value)
except ValueError:
value = 0
else:
value = uts(value, encoding, errors)
setattr(self, keyword, value)
self.pax_headers = pax_headers.copy()
def _block(self, count):
"""Round up a byte count by BLOCKSIZE and return it,
e.g. _block(834) => 1024.
"""
blocks, remainder = divmod(count, BLOCKSIZE)
if remainder:
blocks += 1
return blocks * BLOCKSIZE
def isreg(self):
return self.type in REGULAR_TYPES
def isfile(self):
return self.isreg()
def isdir(self):
return self.type == DIRTYPE
def issym(self):
return self.type == SYMTYPE
def islnk(self):
return self.type == LNKTYPE
def ischr(self):
return self.type == CHRTYPE
def isblk(self):
return self.type == BLKTYPE
def isfifo(self):
return self.type == FIFOTYPE
def issparse(self):
return self.type == GNUTYPE_SPARSE
def isdev(self):
return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
# class TarInfo
class TarFile(object):
"""The TarFile Class provides an interface to tar archives.
"""
debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
dereference = False # If true, add content of linked file to the
# tar file, else the link.
ignore_zeros = False # If true, skips empty or invalid blocks and
# continues processing.
errorlevel = 1 # If 0, fatal errors only appear in debug
# messages (if debug >= 0). If > 0, errors
# are passed to the caller as exceptions.
format = DEFAULT_FORMAT # The format to use when creating an archive.
encoding = ENCODING # Encoding for 8-bit character strings.
errors = None # Error handler for unicode conversion.
tarinfo = TarInfo # The default TarInfo class to use.
fileobject = ExFileObject # The default ExFileObject class to use.
def __init__(self, name=None, mode="r", fileobj=None, format=None,
tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
errors=None, pax_headers=None, debug=None, errorlevel=None):
"""Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
read from an existing archive, 'a' to append data to an existing
file or 'w' to create a new file overwriting an existing one. `mode'
defaults to 'r'.
If `fileobj' is given, it is used for reading or writing data. If it
can be determined, `mode' is overridden by `fileobj's mode.
`fileobj' is not closed, when TarFile is closed.
"""
if len(mode) > 1 or mode not in "raw":
raise ValueError("mode must be 'r', 'a' or 'w'")
self.mode = mode
self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]
if not fileobj:
if self.mode == "a" and not os.path.exists(name):
# Create nonexistent files in append mode.
self.mode = "w"
self._mode = "wb"
fileobj = bltn_open(name, self._mode)
self._extfileobj = False
else:
if name is None and hasattr(fileobj, "name"):
name = fileobj.name
if hasattr(fileobj, "mode"):
self._mode = fileobj.mode
self._extfileobj = True
self.name = os.path.abspath(name) if name else None
self.fileobj = fileobj
# Init attributes.
if format is not None:
self.format = format
if tarinfo is not None:
self.tarinfo = tarinfo
if dereference is not None:
self.dereference = dereference
if ignore_zeros is not None:
self.ignore_zeros = ignore_zeros
if encoding is not None:
self.encoding = encoding
if errors is not None:
self.errors = errors
elif mode == "r":
self.errors = "utf-8"
else:
self.errors = "strict"
if pax_headers is not None and self.format == PAX_FORMAT:
self.pax_headers = pax_headers
else:
self.pax_headers = {}
if debug is not None:
self.debug = debug
if errorlevel is not None:
self.errorlevel = errorlevel
# Init datastructures.
self.closed = False
self.members = [] # list of members as TarInfo objects
self._loaded = False # flag if all members have been read
self.offset = self.fileobj.tell()
# current position in the archive file
self.inodes = {} # dictionary caching the inodes of
# archive members already added
try:
if self.mode == "r":
self.firstmember = None
self.firstmember = self.next()
if self.mode == "a":
# Move to the end of the archive,
# before the first empty block.
while True:
self.fileobj.seek(self.offset)
try:
tarinfo = self.tarinfo.fromtarfile(self)
self.members.append(tarinfo)
except EOFHeaderError:
self.fileobj.seek(self.offset)
break
except HeaderError, e:
raise ReadError(str(e))
if self.mode in "aw":
self._loaded = True
if self.pax_headers:
buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
self.fileobj.write(buf)
self.offset += len(buf)
except:
if not self._extfileobj:
self.fileobj.close()
self.closed = True
raise
def _getposix(self):
return self.format == USTAR_FORMAT
def _setposix(self, value):
import warnings
warnings.warn("use the format attribute instead", DeprecationWarning,
2)
if value:
self.format = USTAR_FORMAT
else:
self.format = GNU_FORMAT
posix = property(_getposix, _setposix)
#--------------------------------------------------------------------------
# Below are the classmethods which act as alternate constructors to the
# TarFile class. The open() method is the only one that is needed for
# public use; it is the "super"-constructor and is able to select an
# adequate "sub"-constructor for a particular compression using the mapping
# from OPEN_METH.
#
# This concept allows one to subclass TarFile without losing the comfort of
# the super-constructor. A sub-constructor is registered and made available
# by adding it to the mapping in OPEN_METH.
@classmethod
def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
"""Open a tar archive for reading, writing or appending. Return
an appropriate TarFile class.
mode:
'r' or 'r:*' open for reading with transparent compression
'r:' open for reading exclusively uncompressed
'r:gz' open for reading with gzip compression
'r:bz2' open for reading with bzip2 compression
'a' or 'a:' open for appending, creating the file if necessary
'w' or 'w:' open for writing without compression
'w:gz' open for writing with gzip compression
'w:bz2' open for writing with bzip2 compression
'r|*' open a stream of tar blocks with transparent compression
'r|' open an uncompressed stream of tar blocks for reading
'r|gz' open a gzip compressed stream of tar blocks
'r|bz2' open a bzip2 compressed stream of tar blocks
'w|' open an uncompressed stream for writing
'w|gz' open a gzip compressed stream for writing
'w|bz2' open a bzip2 compressed stream for writing
"""
if not name and not fileobj:
raise ValueError("nothing to open")
if mode in ("r", "r:*"):
# Find out which *open() is appropriate for opening the file.
for comptype in cls.OPEN_METH:
func = getattr(cls, cls.OPEN_METH[comptype])
if fileobj is not None:
saved_pos = fileobj.tell()
try:
return func(name, "r", fileobj, **kwargs)
except (ReadError, CompressionError), e:
if fileobj is not None:
fileobj.seek(saved_pos)
continue
raise ReadError("file could not be opened successfully")
elif ":" in mode:
filemode, comptype = mode.split(":", 1)
filemode = filemode or "r"
comptype = comptype or "tar"
# Select the *open() function according to
# given compression.
if comptype in cls.OPEN_METH:
func = getattr(cls, cls.OPEN_METH[comptype])
else:
raise CompressionError("unknown compression type %r" % comptype)
return func(name, filemode, fileobj, **kwargs)
elif "|" in mode:
filemode, comptype = mode.split("|", 1)
filemode = filemode or "r"
comptype = comptype or "tar"
if filemode not in "rw":
raise ValueError("mode must be 'r' or 'w'")
t = cls(name, filemode,
_Stream(name, filemode, comptype, fileobj, bufsize),
**kwargs)
t._extfileobj = False
return t
elif mode in "aw":
return cls.taropen(name, mode, fileobj, **kwargs)
raise ValueError("undiscernible mode")
@classmethod
def taropen(cls, name, mode="r", fileobj=None, **kwargs):
"""Open uncompressed tar archive name for reading or writing.
"""
if len(mode) > 1 or mode not in "raw":
raise ValueError("mode must be 'r', 'a' or 'w'")
return cls(name, mode, fileobj, **kwargs)
@classmethod
def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
"""Open gzip compressed tar archive name for reading or writing.
Appending is not allowed.
"""
if len(mode) > 1 or mode not in "rw":
raise ValueError("mode must be 'r' or 'w'")
try:
import gzip
gzip.GzipFile
except (ImportError, AttributeError):
raise CompressionError("gzip module is not available")
if fileobj is None:
fileobj = bltn_open(name, mode + "b")
try:
t = cls.taropen(name, mode,
gzip.GzipFile(name, mode, compresslevel, fileobj),
**kwargs)
except IOError:
raise ReadError("not a gzip file")
t._extfileobj = False
return t
@classmethod
def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
"""Open bzip2 compressed tar archive name for reading or writing.
Appending is not allowed.
"""
if len(mode) > 1 or mode not in "rw":
raise ValueError("mode must be 'r' or 'w'.")
try:
import bz2
except ImportError:
raise CompressionError("bz2 module is not available")
if fileobj is not None:
fileobj = _BZ2Proxy(fileobj, mode)
else:
fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel)
try:
t = cls.taropen(name, mode, fileobj, **kwargs)
except (IOError, EOFError):
raise ReadError("not a bzip2 file")
t._extfileobj = False
return t
# All *open() methods are registered here.
OPEN_METH = {
"tar": "taropen", # uncompressed tar
"gz": "gzopen", # gzip compressed tar
"bz2": "bz2open" # bzip2 compressed tar
}
#--------------------------------------------------------------------------
# The public methods which TarFile provides:
def close(self):
"""Close the TarFile. In write-mode, two finishing zero blocks are
appended to the archive.
"""
if self.closed:
return
if self.mode in "aw":
self.fileobj.write(NUL * (BLOCKSIZE * 2))
self.offset += (BLOCKSIZE * 2)
# fill up the end with zero-blocks
# (like option -b20 for tar does)
blocks, remainder = divmod(self.offset, RECORDSIZE)
if remainder > 0:
self.fileobj.write(NUL * (RECORDSIZE - remainder))
if not self._extfileobj:
self.fileobj.close()
self.closed = True
def getmember(self, name):
"""Return a TarInfo object for member `name'. If `name' can not be
found in the archive, KeyError is raised. If a member occurs more
than once in the archive, its last occurrence is assumed to be the
most up-to-date version.
"""
tarinfo = self._getmember(name)
if tarinfo is None:
raise KeyError("filename %r not found" % name)
return tarinfo
def getmembers(self):
"""Return the members of the archive as a list of TarInfo objects. The
list has the same order as the members in the archive.
"""
self._check()
if not self._loaded: # if we want to obtain a list of
self._load() # all members, we first have to
# scan the whole archive.
return self.members
def getnames(self):
"""Return the members of the archive as a list of their names. It has
the same order as the list returned by getmembers().
"""
return [tarinfo.name for tarinfo in self.getmembers()]
def gettarinfo(self, name=None, arcname=None, fileobj=None):
"""Create a TarInfo object for either the file `name' or the file
object `fileobj' (using os.fstat on its file descriptor). You can
modify some of the TarInfo's attributes before you add it using
addfile(). If given, `arcname' specifies an alternative name for the
file in the archive.
"""
self._check("aw")
# When fileobj is given, replace name by
# fileobj's real name.
if fileobj is not None:
name = fileobj.name
# Building the name of the member in the archive.
# Backward slashes are converted to forward slashes,
# Absolute paths are turned to relative paths.
if arcname is None:
arcname = name
drv, arcname = os.path.splitdrive(arcname)
arcname = arcname.replace(os.sep, "/")
arcname = arcname.lstrip("/")
# Now, fill the TarInfo object with
# information specific for the file.
tarinfo = self.tarinfo()
tarinfo.tarfile = self
# Use os.stat or os.lstat, depending on platform
# and if symlinks shall be resolved.
if fileobj is None:
if hasattr(os, "lstat") and not self.dereference:
statres = os.lstat(name)
else:
statres = os.stat(name)
else:
statres = os.fstat(fileobj.fileno())
linkname = ""
stmd = statres.st_mode
if stat.S_ISREG(stmd):
inode = (statres.st_ino, statres.st_dev)
if not self.dereference and statres.st_nlink > 1 and \
inode in self.inodes and arcname != self.inodes[inode]:
# Is it a hardlink to an already
# archived file?
type = LNKTYPE
linkname = self.inodes[inode]
else:
# The inode is added only if its valid.
# For win32 it is always 0.
type = REGTYPE
if inode[0]:
self.inodes[inode] = arcname
elif stat.S_ISDIR(stmd):
type = DIRTYPE
elif stat.S_ISFIFO(stmd):
type = FIFOTYPE
elif stat.S_ISLNK(stmd):
type = SYMTYPE
linkname = os.readlink(name)
elif stat.S_ISCHR(stmd):
type = CHRTYPE
elif stat.S_ISBLK(stmd):
type = BLKTYPE
else:
return None
# Fill the TarInfo object with all
# information we can get.
tarinfo.name = arcname
tarinfo.mode = stmd
tarinfo.uid = statres.st_uid
tarinfo.gid = statres.st_gid
if type == REGTYPE:
tarinfo.size = statres.st_size
else:
tarinfo.size = 0L
tarinfo.mtime = statres.st_mtime
tarinfo.type = type
tarinfo.linkname = linkname
if pwd:
try:
tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
except KeyError:
pass
if grp:
try:
tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
except KeyError:
pass
if type in (CHRTYPE, BLKTYPE):
if hasattr(os, "major") and hasattr(os, "minor"):
tarinfo.devmajor = os.major(statres.st_rdev)
tarinfo.devminor = os.minor(statres.st_rdev)
return tarinfo
def list(self, verbose=True):
"""Print a table of contents to sys.stdout. If `verbose' is False, only
the names of the members are printed. If it is True, an `ls -l'-like
output is produced.
"""
self._check()
for tarinfo in self:
if verbose:
print filemode(tarinfo.mode),
print "%s/%s" % (tarinfo.uname or tarinfo.uid,
tarinfo.gname or tarinfo.gid),
if tarinfo.ischr() or tarinfo.isblk():
print "%10s" % ("%d,%d" \
% (tarinfo.devmajor, tarinfo.devminor)),
else:
print "%10d" % tarinfo.size,
print "%d-%02d-%02d %02d:%02d:%02d" \
% time.localtime(tarinfo.mtime)[:6],
print tarinfo.name + ("/" if tarinfo.isdir() else ""),
if verbose:
if tarinfo.issym():
print "->", tarinfo.linkname,
if tarinfo.islnk():
print "link to", tarinfo.linkname,
print
def add(self, name, arcname=None, recursive=True, exclude=None, filter=None):
"""Add the file `name' to the archive. `name' may be any type of file
(directory, fifo, symbolic link, etc.). If given, `arcname'
specifies an alternative name for the file in the archive.
Directories are added recursively by default. This can be avoided by
setting `recursive' to False. `exclude' is a function that should
return True for each filename to be excluded. `filter' is a function
that expects a TarInfo object argument and returns the changed
TarInfo object, if it returns None the TarInfo object will be
excluded from the archive.
"""
self._check("aw")
if arcname is None:
arcname = name
# Exclude pathnames.
if exclude is not None:
import warnings
warnings.warn("use the filter argument instead",
DeprecationWarning, 2)
if exclude(name):
self._dbg(2, "tarfile: Excluded %r" % name)
return
# Skip if somebody tries to archive the archive...
if self.name is not None and os.path.abspath(name) == self.name:
self._dbg(2, "tarfile: Skipped %r" % name)
return
self._dbg(1, name)
# Create a TarInfo object from the file.
tarinfo = self.gettarinfo(name, arcname)
if tarinfo is None:
self._dbg(1, "tarfile: Unsupported type %r" % name)
return
# Change or exclude the TarInfo object.
if filter is not None:
tarinfo = filter(tarinfo)
if tarinfo is None:
self._dbg(2, "tarfile: Excluded %r" % name)
return
# Append the tar header and data to the archive.
if tarinfo.isreg():
f = bltn_open(name, "rb")
self.addfile(tarinfo, f)
f.close()
elif tarinfo.isdir():
self.addfile(tarinfo)
if recursive:
for f in os.listdir(name):
self.add(os.path.join(name, f), os.path.join(arcname, f),
recursive, exclude, filter)
else:
self.addfile(tarinfo)
def addfile(self, tarinfo, fileobj=None):
"""Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
given, tarinfo.size bytes are read from it and added to the archive.
You can create TarInfo objects using gettarinfo().
On Windows platforms, `fileobj' should always be opened with mode
'rb' to avoid irritation about the file size.
"""
self._check("aw")
tarinfo = copy.copy(tarinfo)
buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
self.fileobj.write(buf)
self.offset += len(buf)
# If there's data to follow, append it.
if fileobj is not None:
copyfileobj(fileobj, self.fileobj, tarinfo.size)
blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
if remainder > 0:
self.fileobj.write(NUL * (BLOCKSIZE - remainder))
blocks += 1
self.offset += blocks * BLOCKSIZE
self.members.append(tarinfo)
def extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 0700
self.extract(tarinfo, path)
# Reverse sort directories.
directories.sort(key=operator.attrgetter('name'))
directories.reverse()
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError, e:
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def extract(self, member, path=""):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a TarInfo object. You can
specify a different directory using `path'.
"""
self._check("r")
if isinstance(member, basestring):
tarinfo = self.getmember(member)
else:
tarinfo = member
# Prepare the link target for makelink().
if tarinfo.islnk():
tarinfo._link_target = os.path.join(path, tarinfo.linkname)
try:
self._extract_member(tarinfo, os.path.join(path, tarinfo.name))
except EnvironmentError, e:
if self.errorlevel > 0:
raise
else:
if e.filename is None:
self._dbg(1, "tarfile: %s" % e.strerror)
else:
self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
except ExtractError, e:
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def extractfile(self, member):
"""Extract a member from the archive as a file object. `member' may be
a filename or a TarInfo object. If `member' is a regular file, a
file-like object is returned. If `member' is a link, a file-like
object is constructed from the link's target. If `member' is none of
the above, None is returned.
The file-like object is read-only and provides the following
methods: read(), readline(), readlines(), seek() and tell()
"""
self._check("r")
if isinstance(member, basestring):
tarinfo = self.getmember(member)
else:
tarinfo = member
if tarinfo.isreg():
return self.fileobject(self, tarinfo)
elif tarinfo.type not in SUPPORTED_TYPES:
# If a member's type is unknown, it is treated as a
# regular file.
return self.fileobject(self, tarinfo)
elif tarinfo.islnk() or tarinfo.issym():
if isinstance(self.fileobj, _Stream):
# A small but ugly workaround for the case that someone tries
# to extract a (sym)link as a file-object from a non-seekable
# stream of tar blocks.
raise StreamError("cannot extract (sym)link as file object")
else:
# A (sym)link's file object is its target's file object.
return self.extractfile(self._find_link_target(tarinfo))
else:
# If there's no data associated with the member (directory, chrdev,
# blkdev, etc.), return None instead of a file object.
return None
def _extract_member(self, tarinfo, targetpath):
"""Extract the TarInfo object tarinfo to a physical
file called targetpath.
"""
# Fetch the TarInfo object for the given name
# and build the destination pathname, replacing
# forward slashes to platform specific separators.
targetpath = targetpath.rstrip("/")
targetpath = targetpath.replace("/", os.sep)
# Create all upper directories.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
# Create directories that are not part of the archive with
# default permissions.
os.makedirs(upperdirs)
if tarinfo.islnk() or tarinfo.issym():
self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
else:
self._dbg(1, tarinfo.name)
if tarinfo.isreg():
self.makefile(tarinfo, targetpath)
elif tarinfo.isdir():
self.makedir(tarinfo, targetpath)
elif tarinfo.isfifo():
self.makefifo(tarinfo, targetpath)
elif tarinfo.ischr() or tarinfo.isblk():
self.makedev(tarinfo, targetpath)
elif tarinfo.islnk() or tarinfo.issym():
self.makelink(tarinfo, targetpath)
elif tarinfo.type not in SUPPORTED_TYPES:
self.makeunknown(tarinfo, targetpath)
else:
self.makefile(tarinfo, targetpath)
self.chown(tarinfo, targetpath)
if not tarinfo.issym():
self.chmod(tarinfo, targetpath)
self.utime(tarinfo, targetpath)
#--------------------------------------------------------------------------
# Below are the different file methods. They are called via
# _extract_member() when extract() is called. They can be replaced in a
# subclass to implement other functionality.
def makedir(self, tarinfo, targetpath):
"""Make a directory called targetpath.
"""
try:
# Use a safe mode for the directory, the real mode is set
# later in _extract_member().
os.mkdir(targetpath, 0700)
except EnvironmentError, e:
if e.errno != errno.EEXIST:
raise
def makefile(self, tarinfo, targetpath):
"""Make a file called targetpath.
"""
source = self.extractfile(tarinfo)
target = bltn_open(targetpath, "wb")
copyfileobj(source, target)
source.close()
target.close()
def makeunknown(self, tarinfo, targetpath):
"""Make a file from a TarInfo object with an unknown type
at targetpath.
"""
self.makefile(tarinfo, targetpath)
self._dbg(1, "tarfile: Unknown file type %r, " \
"extracted as regular file." % tarinfo.type)
def makefifo(self, tarinfo, targetpath):
"""Make a fifo called targetpath.
"""
if hasattr(os, "mkfifo"):
os.mkfifo(targetpath)
else:
raise ExtractError("fifo not supported by system")
def makedev(self, tarinfo, targetpath):
"""Make a character or block device called targetpath.
"""
if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
raise ExtractError("special devices not supported by system")
mode = tarinfo.mode
if tarinfo.isblk():
mode |= stat.S_IFBLK
else:
mode |= stat.S_IFCHR
os.mknod(targetpath, mode,
os.makedev(tarinfo.devmajor, tarinfo.devminor))
def makelink(self, tarinfo, targetpath):
"""Make a (symbolic) link called targetpath. If it cannot be created
(platform limitation), we try to make a copy of the referenced file
instead of a link.
"""
if hasattr(os, "symlink") and hasattr(os, "link"):
# For systems that support symbolic and hard links.
if tarinfo.issym():
if os.path.lexists(targetpath):
os.unlink(targetpath)
os.symlink(tarinfo.linkname, targetpath)
else:
# See extract().
if os.path.exists(tarinfo._link_target):
if os.path.lexists(targetpath):
os.unlink(targetpath)
os.link(tarinfo._link_target, targetpath)
else:
self._extract_member(self._find_link_target(tarinfo), targetpath)
else:
try:
self._extract_member(self._find_link_target(tarinfo), targetpath)
except KeyError:
raise ExtractError("unable to resolve link inside archive")
def chown(self, tarinfo, targetpath):
"""Set owner of targetpath according to tarinfo.
"""
if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
# We have to be root to do so.
try:
g = grp.getgrnam(tarinfo.gname)[2]
except KeyError:
g = tarinfo.gid
try:
u = pwd.getpwnam(tarinfo.uname)[2]
except KeyError:
u = tarinfo.uid
try:
if tarinfo.issym() and hasattr(os, "lchown"):
os.lchown(targetpath, u, g)
else:
if sys.platform != "os2emx":
os.chown(targetpath, u, g)
except EnvironmentError, e:
raise ExtractError("could not change owner")
def chmod(self, tarinfo, targetpath):
"""Set file permissions of targetpath according to tarinfo.
"""
if hasattr(os, 'chmod'):
try:
os.chmod(targetpath, tarinfo.mode)
except EnvironmentError, e:
raise ExtractError("could not change mode")
def utime(self, tarinfo, targetpath):
"""Set modification time of targetpath according to tarinfo.
"""
if not hasattr(os, 'utime'):
return
try:
os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
except EnvironmentError, e:
raise ExtractError("could not change modification time")
#--------------------------------------------------------------------------
def next(self):
"""Return the next member of the archive as a TarInfo object, when
TarFile is opened for reading. Return None if there is no more
available.
"""
self._check("ra")
if self.firstmember is not None:
m = self.firstmember
self.firstmember = None
return m
# Read the next block.
self.fileobj.seek(self.offset)
tarinfo = None
while True:
try:
tarinfo = self.tarinfo.fromtarfile(self)
except EOFHeaderError, e:
if self.ignore_zeros:
self._dbg(2, "0x%X: %s" % (self.offset, e))
self.offset += BLOCKSIZE
continue
except InvalidHeaderError, e:
if self.ignore_zeros:
self._dbg(2, "0x%X: %s" % (self.offset, e))
self.offset += BLOCKSIZE
continue
elif self.offset == 0:
raise ReadError(str(e))
except EmptyHeaderError:
if self.offset == 0:
raise ReadError("empty file")
except TruncatedHeaderError, e:
if self.offset == 0:
raise ReadError(str(e))
except SubsequentHeaderError, e:
raise ReadError(str(e))
break
if tarinfo is not None:
self.members.append(tarinfo)
else:
self._loaded = True
return tarinfo
#--------------------------------------------------------------------------
# Little helper methods:
def _getmember(self, name, tarinfo=None, normalize=False):
"""Find an archive member by name from bottom to top.
If tarinfo is given, it is used as the starting point.
"""
# Ensure that all members have been loaded.
members = self.getmembers()
# Limit the member search list up to tarinfo.
if tarinfo is not None:
members = members[:members.index(tarinfo)]
if normalize:
name = os.path.normpath(name)
for member in reversed(members):
if normalize:
member_name = os.path.normpath(member.name)
else:
member_name = member.name
if name == member_name:
return member
def _load(self):
"""Read through the entire archive file and look for readable
members.
"""
while True:
tarinfo = self.next()
if tarinfo is None:
break
self._loaded = True
def _check(self, mode=None):
"""Check if TarFile is still open, and if the operation's mode
corresponds to TarFile's mode.
"""
if self.closed:
raise IOError("%s is closed" % self.__class__.__name__)
if mode is not None and self.mode not in mode:
raise IOError("bad operation for mode %r" % self.mode)
def _find_link_target(self, tarinfo):
"""Find the target member of a symlink or hardlink member in the
archive.
"""
if tarinfo.issym():
# Always search the entire archive.
linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname
limit = None
else:
# Search the archive before the link, because a hard link is
# just a reference to an already archived file.
linkname = tarinfo.linkname
limit = tarinfo
member = self._getmember(linkname, tarinfo=limit, normalize=True)
if member is None:
raise KeyError("linkname %r not found" % linkname)
return member
def __iter__(self):
"""Provide an iterator object.
"""
if self._loaded:
return iter(self.members)
else:
return TarIter(self)
def _dbg(self, level, msg):
"""Write debugging output to sys.stderr.
"""
if level <= self.debug:
print >> sys.stderr, msg
def __enter__(self):
self._check()
return self
def __exit__(self, type, value, traceback):
if type is None:
self.close()
else:
# An exception occurred. We must not call close() because
# it would try to write end-of-archive blocks and padding.
if not self._extfileobj:
self.fileobj.close()
self.closed = True
# class TarFile
class TarIter:
"""Iterator Class.
for tarinfo in TarFile(...):
suite...
"""
def __init__(self, tarfile):
"""Construct a TarIter object.
"""
self.tarfile = tarfile
self.index = 0
def __iter__(self):
"""Return iterator object.
"""
return self
def next(self):
"""Return the next item using TarFile's next() method.
When all members have been read, set TarFile as _loaded.
"""
# Fix for SF #1100429: Under rare circumstances it can
# happen that getmembers() is called during iteration,
# which will cause TarIter to stop prematurely.
if not self.tarfile._loaded:
tarinfo = self.tarfile.next()
if not tarinfo:
self.tarfile._loaded = True
raise StopIteration
else:
try:
tarinfo = self.tarfile.members[self.index]
except IndexError:
raise StopIteration
self.index += 1
return tarinfo
# Helper classes for sparse file support
class _section:
"""Base class for _data and _hole.
"""
def __init__(self, offset, size):
self.offset = offset
self.size = size
def __contains__(self, offset):
return self.offset <= offset < self.offset + self.size
class _data(_section):
"""Represent a data section in a sparse file.
"""
def __init__(self, offset, size, realpos):
_section.__init__(self, offset, size)
self.realpos = realpos
class _hole(_section):
"""Represent a hole section in a sparse file.
"""
pass
class _ringbuffer(list):
"""Ringbuffer class which increases performance
over a regular list.
"""
def __init__(self):
self.idx = 0
def find(self, offset):
idx = self.idx
while True:
item = self[idx]
if offset in item:
break
idx += 1
if idx == len(self):
idx = 0
if idx == self.idx:
# End of File
return None
self.idx = idx
return item
#---------------------------------------------
# zipfile compatible TarFile class
#---------------------------------------------
TAR_PLAIN = 0 # zipfile.ZIP_STORED
TAR_GZIPPED = 8 # zipfile.ZIP_DEFLATED
class TarFileCompat:
"""TarFile class compatible with standard module zipfile's
ZipFile class.
"""
def __init__(self, file, mode="r", compression=TAR_PLAIN):
from warnings import warnpy3k
warnpy3k("the TarFileCompat class has been removed in Python 3.0",
stacklevel=2)
if compression == TAR_PLAIN:
self.tarfile = TarFile.taropen(file, mode)
elif compression == TAR_GZIPPED:
self.tarfile = TarFile.gzopen(file, mode)
else:
raise ValueError("unknown compression constant")
if mode[0:1] == "r":
members = self.tarfile.getmembers()
for m in members:
m.filename = m.name
m.file_size = m.size
m.date_time = time.gmtime(m.mtime)[:6]
def namelist(self):
return map(lambda m: m.name, self.infolist())
def infolist(self):
return filter(lambda m: m.type in REGULAR_TYPES,
self.tarfile.getmembers())
def printdir(self):
self.tarfile.list()
def testzip(self):
return
def getinfo(self, name):
return self.tarfile.getmember(name)
def read(self, name):
return self.tarfile.extractfile(self.tarfile.getmember(name)).read()
def write(self, filename, arcname=None, compress_type=None):
self.tarfile.add(filename, arcname)
def writestr(self, zinfo, bytes):
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import calendar
tinfo = TarInfo(zinfo.filename)
tinfo.size = len(bytes)
tinfo.mtime = calendar.timegm(zinfo.date_time)
self.tarfile.addfile(tinfo, StringIO(bytes))
def close(self):
self.tarfile.close()
#class TarFileCompat
#--------------------
# exported functions
#--------------------
def is_tarfile(name):
"""Return True if name points to a tar archive that we
are able to handle, else return False.
"""
try:
t = open(name)
t.close()
return True
except TarError:
return False
bltn_open = open
open = TarFile.open
|
gpl-2.0
|
dhermes/gcloud-python
|
bigtable/tests/unit/test_app_profile.py
|
4
|
26669
|
# Copyright 2018 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
from ._testing import _make_credentials
class MultiCallableStub(object):
"""Stub for the grpc.UnaryUnaryMultiCallable interface."""
def __init__(self, method, channel_stub):
self.method = method
self.channel_stub = channel_stub
def __call__(self, request, timeout=None, metadata=None, credentials=None):
self.channel_stub.requests.append((self.method, request))
return self.channel_stub.responses.pop()
class ChannelStub(object):
"""Stub for the grpc.Channel interface."""
def __init__(self, responses=[]):
self.responses = responses
self.requests = []
def unary_unary(self, method, request_serializer=None, response_deserializer=None):
return MultiCallableStub(method, self)
class TestAppProfile(unittest.TestCase):
PROJECT = "project"
INSTANCE_ID = "instance-id"
APP_PROFILE_ID = "app-profile-id"
APP_PROFILE_NAME = "projects/{}/instances/{}/appProfiles/{}".format(
PROJECT, INSTANCE_ID, APP_PROFILE_ID
)
CLUSTER_ID = "cluster-id"
OP_ID = 8765
OP_NAME = "operations/projects/{}/instances/{}/appProfiles/{}/operations/{}".format(
PROJECT, INSTANCE_ID, APP_PROFILE_ID, OP_ID
)
@staticmethod
def _get_target_class():
from google.cloud.bigtable.app_profile import AppProfile
return AppProfile
def _make_one(self, *args, **kwargs):
return self._get_target_class()(*args, **kwargs)
@staticmethod
def _get_target_client_class():
from google.cloud.bigtable.client import Client
return Client
def _make_client(self, *args, **kwargs):
return self._get_target_client_class()(*args, **kwargs)
def test_constructor_defaults(self):
client = _Client(self.PROJECT)
instance = _Instance(self.INSTANCE_ID, client)
app_profile = self._make_one(self.APP_PROFILE_ID, instance)
self.assertIsInstance(app_profile, self._get_target_class())
self.assertEqual(app_profile._instance, instance)
self.assertIsNone(app_profile.routing_policy_type)
self.assertIsNone(app_profile.description)
self.assertIsNone(app_profile.cluster_id)
self.assertIsNone(app_profile.allow_transactional_writes)
def test_constructor_non_defaults(self):
from google.cloud.bigtable.enums import RoutingPolicyType
ANY = RoutingPolicyType.ANY
DESCRIPTION_1 = "routing policy any"
APP_PROFILE_ID_2 = "app-profile-id-2"
SINGLE = RoutingPolicyType.SINGLE
DESCRIPTION_2 = "routing policy single"
ALLOW_WRITES = True
client = _Client(self.PROJECT)
instance = _Instance(self.INSTANCE_ID, client)
app_profile1 = self._make_one(
self.APP_PROFILE_ID,
instance,
routing_policy_type=ANY,
description=DESCRIPTION_1,
)
app_profile2 = self._make_one(
APP_PROFILE_ID_2,
instance,
routing_policy_type=SINGLE,
description=DESCRIPTION_2,
cluster_id=self.CLUSTER_ID,
allow_transactional_writes=ALLOW_WRITES,
)
self.assertEqual(app_profile1.app_profile_id, self.APP_PROFILE_ID)
self.assertIs(app_profile1._instance, instance)
self.assertEqual(app_profile1.routing_policy_type, ANY)
self.assertEqual(app_profile1.description, DESCRIPTION_1)
self.assertEqual(app_profile2.app_profile_id, APP_PROFILE_ID_2)
self.assertIs(app_profile2._instance, instance)
self.assertEqual(app_profile2.routing_policy_type, SINGLE)
self.assertEqual(app_profile2.description, DESCRIPTION_2)
self.assertEqual(app_profile2.cluster_id, self.CLUSTER_ID)
self.assertEqual(app_profile2.allow_transactional_writes, ALLOW_WRITES)
def test_name_property(self):
credentials = _make_credentials()
client = self._make_client(
project=self.PROJECT, credentials=credentials, admin=True
)
instance = _Instance(self.INSTANCE_ID, client)
app_profile = self._make_one(self.APP_PROFILE_ID, instance)
self.assertEqual(app_profile.name, self.APP_PROFILE_NAME)
def test___eq__(self):
client = _Client(self.PROJECT)
instance = _Instance(self.INSTANCE_ID, client)
app_profile1 = self._make_one(self.APP_PROFILE_ID, instance)
app_profile2 = self._make_one(self.APP_PROFILE_ID, instance)
self.assertTrue(app_profile1 == app_profile2)
def test___eq__type_instance_differ(self):
client = _Client(self.PROJECT)
instance = _Instance(self.INSTANCE_ID, client)
alt_instance = _Instance("other-instance", client)
other_object = _Other(self.APP_PROFILE_ID, instance)
app_profile1 = self._make_one(self.APP_PROFILE_ID, instance)
app_profile2 = self._make_one(self.APP_PROFILE_ID, alt_instance)
self.assertFalse(app_profile1 == other_object)
self.assertFalse(app_profile1 == app_profile2)
def test___ne__same_value(self):
client = _Client(self.PROJECT)
instance = _Instance(self.INSTANCE_ID, client)
app_profile1 = self._make_one(self.APP_PROFILE_ID, instance)
app_profile2 = self._make_one(self.APP_PROFILE_ID, instance)
self.assertFalse(app_profile1 != app_profile2)
def test___ne__(self):
client = _Client(self.PROJECT)
instance = _Instance(self.INSTANCE_ID, client)
app_profile1 = self._make_one("app_profile_id1", instance)
app_profile2 = self._make_one("app_profile_id2", instance)
self.assertTrue(app_profile1 != app_profile2)
def test_from_pb_success_routing_any(self):
from google.cloud.bigtable_admin_v2.types import instance_pb2 as data_v2_pb2
from google.cloud.bigtable.enums import RoutingPolicyType
client = _Client(self.PROJECT)
instance = _Instance(self.INSTANCE_ID, client)
desctiption = "routing any"
routing = RoutingPolicyType.ANY
multi_cluster_routing_use_any = (
data_v2_pb2.AppProfile.MultiClusterRoutingUseAny()
)
app_profile_pb = data_v2_pb2.AppProfile(
name=self.APP_PROFILE_NAME,
description=desctiption,
multi_cluster_routing_use_any=multi_cluster_routing_use_any,
)
klass = self._get_target_class()
app_profile = klass.from_pb(app_profile_pb, instance)
self.assertIsInstance(app_profile, klass)
self.assertIs(app_profile._instance, instance)
self.assertEqual(app_profile.app_profile_id, self.APP_PROFILE_ID)
self.assertEqual(app_profile.description, desctiption)
self.assertEqual(app_profile.routing_policy_type, routing)
self.assertIsNone(app_profile.cluster_id)
self.assertEqual(app_profile.allow_transactional_writes, False)
def test_from_pb_success_routing_single(self):
from google.cloud.bigtable_admin_v2.types import instance_pb2 as data_v2_pb2
from google.cloud.bigtable.enums import RoutingPolicyType
client = _Client(self.PROJECT)
instance = _Instance(self.INSTANCE_ID, client)
desctiption = "routing single"
allow_transactional_writes = True
routing = RoutingPolicyType.SINGLE
single_cluster_routing = data_v2_pb2.AppProfile.SingleClusterRouting(
cluster_id=self.CLUSTER_ID,
allow_transactional_writes=allow_transactional_writes,
)
app_profile_pb = data_v2_pb2.AppProfile(
name=self.APP_PROFILE_NAME,
description=desctiption,
single_cluster_routing=single_cluster_routing,
)
klass = self._get_target_class()
app_profile = klass.from_pb(app_profile_pb, instance)
self.assertIsInstance(app_profile, klass)
self.assertIs(app_profile._instance, instance)
self.assertEqual(app_profile.app_profile_id, self.APP_PROFILE_ID)
self.assertEqual(app_profile.description, desctiption)
self.assertEqual(app_profile.routing_policy_type, routing)
self.assertEqual(app_profile.cluster_id, self.CLUSTER_ID)
self.assertEqual(
app_profile.allow_transactional_writes, allow_transactional_writes
)
def test_from_pb_bad_app_profile_name(self):
from google.cloud.bigtable_admin_v2.proto import instance_pb2 as data_v2_pb2
bad_app_profile_name = "BAD_NAME"
app_profile_pb = data_v2_pb2.AppProfile(name=bad_app_profile_name)
klass = self._get_target_class()
with self.assertRaises(ValueError):
klass.from_pb(app_profile_pb, None)
def test_from_pb_instance_id_mistmatch(self):
from google.cloud.bigtable_admin_v2.proto import instance_pb2 as data_v2_pb2
ALT_INSTANCE_ID = "ALT_INSTANCE_ID"
client = _Client(self.PROJECT)
instance = _Instance(ALT_INSTANCE_ID, client)
self.assertEqual(instance.instance_id, ALT_INSTANCE_ID)
app_profile_pb = data_v2_pb2.AppProfile(name=self.APP_PROFILE_NAME)
klass = self._get_target_class()
with self.assertRaises(ValueError):
klass.from_pb(app_profile_pb, instance)
def test_from_pb_project_mistmatch(self):
from google.cloud.bigtable_admin_v2.proto import instance_pb2 as data_v2_pb2
ALT_PROJECT = "ALT_PROJECT"
client = _Client(project=ALT_PROJECT)
instance = _Instance(self.INSTANCE_ID, client)
self.assertEqual(client.project, ALT_PROJECT)
app_profile_pb = data_v2_pb2.AppProfile(name=self.APP_PROFILE_NAME)
klass = self._get_target_class()
with self.assertRaises(ValueError):
klass.from_pb(app_profile_pb, instance)
def test_reload_routing_any(self):
from google.cloud.bigtable_admin_v2.gapic import bigtable_instance_admin_client
from google.cloud.bigtable_admin_v2.proto import instance_pb2 as data_v2_pb2
from google.cloud.bigtable.enums import RoutingPolicyType
api = bigtable_instance_admin_client.BigtableInstanceAdminClient(mock.Mock())
credentials = _make_credentials()
client = self._make_client(
project=self.PROJECT, credentials=credentials, admin=True
)
instance = _Instance(self.INSTANCE_ID, client)
routing = RoutingPolicyType.ANY
description = "routing policy any"
app_profile = self._make_one(
self.APP_PROFILE_ID,
instance,
routing_policy_type=routing,
description=description,
)
# Create response_pb
description_from_server = "routing policy switched to single"
cluster_id_from_server = self.CLUSTER_ID
allow_transactional_writes = True
single_cluster_routing = data_v2_pb2.AppProfile.SingleClusterRouting(
cluster_id=cluster_id_from_server,
allow_transactional_writes=allow_transactional_writes,
)
response_pb = data_v2_pb2.AppProfile(
name=app_profile.name,
single_cluster_routing=single_cluster_routing,
description=description_from_server,
)
# Patch the stub used by the API method.
client._instance_admin_client = api
instance_stub = client._instance_admin_client.transport
instance_stub.get_app_profile.side_effect = [response_pb]
# Create expected_result.
expected_result = None # reload() has no return value.
# Check app_profile config values before.
self.assertEqual(app_profile.routing_policy_type, routing)
self.assertEqual(app_profile.description, description)
self.assertIsNone(app_profile.cluster_id)
self.assertIsNone(app_profile.allow_transactional_writes)
# Perform the method and check the result.
result = app_profile.reload()
self.assertEqual(result, expected_result)
self.assertEqual(app_profile.routing_policy_type, RoutingPolicyType.SINGLE)
self.assertEqual(app_profile.description, description_from_server)
self.assertEqual(app_profile.cluster_id, cluster_id_from_server)
self.assertEqual(
app_profile.allow_transactional_writes, allow_transactional_writes
)
def test_exists(self):
from google.cloud.bigtable_admin_v2.gapic import bigtable_instance_admin_client
from google.cloud.bigtable_admin_v2.proto import instance_pb2 as data_v2_pb2
from google.api_core import exceptions
instance_api = bigtable_instance_admin_client.BigtableInstanceAdminClient(
mock.Mock()
)
credentials = _make_credentials()
client = self._make_client(
project=self.PROJECT, credentials=credentials, admin=True
)
instance = client.instance(self.INSTANCE_ID)
# Create response_pb
response_pb = data_v2_pb2.AppProfile(name=self.APP_PROFILE_NAME)
client._instance_admin_client = instance_api
# Patch the stub used by the API method.
client._instance_admin_client = instance_api
instance_stub = client._instance_admin_client.transport
instance_stub.get_app_profile.side_effect = [
response_pb,
exceptions.NotFound("testing"),
exceptions.BadRequest("testing"),
]
# Perform the method and check the result.
non_existing_app_profile_id = "other-app-profile-id"
app_profile = self._make_one(self.APP_PROFILE_ID, instance)
alt_app_profile = self._make_one(non_existing_app_profile_id, instance)
self.assertTrue(app_profile.exists())
self.assertFalse(alt_app_profile.exists())
with self.assertRaises(exceptions.BadRequest):
alt_app_profile.exists()
def test_create_routing_any(self):
from google.cloud.bigtable_admin_v2.proto import (
bigtable_instance_admin_pb2 as messages_v2_pb2,
)
from google.cloud.bigtable.enums import RoutingPolicyType
from google.cloud.bigtable_admin_v2.gapic import bigtable_instance_admin_client
credentials = _make_credentials()
client = self._make_client(
project=self.PROJECT, credentials=credentials, admin=True
)
instance = client.instance(self.INSTANCE_ID)
routing = RoutingPolicyType.ANY
description = "routing policy any"
ignore_warnings = True
app_profile = self._make_one(
self.APP_PROFILE_ID,
instance,
routing_policy_type=routing,
description=description,
)
expected_request_app_profile = app_profile._to_pb()
expected_request = messages_v2_pb2.CreateAppProfileRequest(
parent=instance.name,
app_profile_id=self.APP_PROFILE_ID,
app_profile=expected_request_app_profile,
ignore_warnings=ignore_warnings,
)
# Patch the stub used by the API method.
channel = ChannelStub(responses=[expected_request_app_profile])
instance_api = bigtable_instance_admin_client.BigtableInstanceAdminClient(
channel=channel
)
client._instance_admin_client = instance_api
# Perform the method and check the result.
result = app_profile.create(ignore_warnings)
actual_request = channel.requests[0][1]
self.assertEqual(actual_request, expected_request)
self.assertIsInstance(result, self._get_target_class())
self.assertEqual(result.app_profile_id, self.APP_PROFILE_ID)
self.assertIs(result._instance, instance)
self.assertEqual(result.routing_policy_type, routing)
self.assertEqual(result.description, description)
self.assertEqual(result.allow_transactional_writes, False)
self.assertIsNone(result.cluster_id)
def test_create_routing_single(self):
from google.cloud.bigtable_admin_v2.proto import (
bigtable_instance_admin_pb2 as messages_v2_pb2,
)
from google.cloud.bigtable.enums import RoutingPolicyType
from google.cloud.bigtable_admin_v2.gapic import bigtable_instance_admin_client
credentials = _make_credentials()
client = self._make_client(
project=self.PROJECT, credentials=credentials, admin=True
)
instance = client.instance(self.INSTANCE_ID)
routing = RoutingPolicyType.SINGLE
description = "routing policy single"
allow_writes = False
ignore_warnings = True
app_profile = self._make_one(
self.APP_PROFILE_ID,
instance,
routing_policy_type=routing,
description=description,
cluster_id=self.CLUSTER_ID,
allow_transactional_writes=allow_writes,
)
expected_request_app_profile = app_profile._to_pb()
expected_request = messages_v2_pb2.CreateAppProfileRequest(
parent=instance.name,
app_profile_id=self.APP_PROFILE_ID,
app_profile=expected_request_app_profile,
ignore_warnings=ignore_warnings,
)
# Patch the stub used by the API method.
channel = ChannelStub(responses=[expected_request_app_profile])
instance_api = bigtable_instance_admin_client.BigtableInstanceAdminClient(
channel=channel
)
client._instance_admin_client = instance_api
# Perform the method and check the result.
result = app_profile.create(ignore_warnings)
actual_request = channel.requests[0][1]
self.assertEqual(actual_request, expected_request)
self.assertIsInstance(result, self._get_target_class())
self.assertEqual(result.app_profile_id, self.APP_PROFILE_ID)
self.assertIs(result._instance, instance)
self.assertEqual(result.routing_policy_type, routing)
self.assertEqual(result.description, description)
self.assertEqual(result.allow_transactional_writes, allow_writes)
self.assertEqual(result.cluster_id, self.CLUSTER_ID)
def test_create_app_profile_with_wrong_routing_policy(self):
credentials = _make_credentials()
client = self._make_client(
project=self.PROJECT, credentials=credentials, admin=True
)
instance = client.instance(self.INSTANCE_ID)
app_profile = self._make_one(
self.APP_PROFILE_ID, instance, routing_policy_type=None
)
with self.assertRaises(ValueError):
app_profile.create()
def test_update_app_profile_routing_any(self):
from google.api_core import operation
from google.longrunning import operations_pb2
from google.protobuf.any_pb2 import Any
from google.cloud.bigtable_admin_v2.proto import (
bigtable_instance_admin_pb2 as messages_v2_pb2,
)
from google.cloud.bigtable.enums import RoutingPolicyType
from google.cloud.bigtable_admin_v2.gapic import bigtable_instance_admin_client
from google.protobuf import field_mask_pb2
credentials = _make_credentials()
client = self._make_client(
project=self.PROJECT, credentials=credentials, admin=True
)
instance = client.instance(self.INSTANCE_ID)
routing = RoutingPolicyType.SINGLE
description = "to routing policy single"
allow_writes = True
app_profile = self._make_one(
self.APP_PROFILE_ID,
instance,
routing_policy_type=routing,
description=description,
cluster_id=self.CLUSTER_ID,
allow_transactional_writes=allow_writes,
)
# Create response_pb
metadata = messages_v2_pb2.UpdateAppProfileMetadata()
type_url = "type.googleapis.com/{}".format(
messages_v2_pb2.UpdateAppProfileMetadata.DESCRIPTOR.full_name
)
response_pb = operations_pb2.Operation(
name=self.OP_NAME,
metadata=Any(type_url=type_url, value=metadata.SerializeToString()),
)
# Patch the stub used by the API method.
channel = ChannelStub(responses=[response_pb])
instance_api = bigtable_instance_admin_client.BigtableInstanceAdminClient(
channel=channel
)
# Mock api calls
client._instance_admin_client = instance_api
# Perform the method and check the result.
ignore_warnings = True
expected_request_update_mask = field_mask_pb2.FieldMask(
paths=["description", "single_cluster_routing"]
)
expected_request = messages_v2_pb2.UpdateAppProfileRequest(
app_profile=app_profile._to_pb(),
update_mask=expected_request_update_mask,
ignore_warnings=ignore_warnings,
)
result = app_profile.update(ignore_warnings=ignore_warnings)
actual_request = channel.requests[0][1]
self.assertEqual(actual_request, expected_request)
self.assertIsInstance(result, operation.Operation)
self.assertEqual(result.operation.name, self.OP_NAME)
self.assertIsInstance(result.metadata, messages_v2_pb2.UpdateAppProfileMetadata)
def test_update_app_profile_routing_single(self):
from google.api_core import operation
from google.longrunning import operations_pb2
from google.protobuf.any_pb2 import Any
from google.cloud.bigtable_admin_v2.proto import (
bigtable_instance_admin_pb2 as messages_v2_pb2,
)
from google.cloud.bigtable.enums import RoutingPolicyType
from google.cloud.bigtable_admin_v2.gapic import bigtable_instance_admin_client
from google.protobuf import field_mask_pb2
credentials = _make_credentials()
client = self._make_client(
project=self.PROJECT, credentials=credentials, admin=True
)
instance = client.instance(self.INSTANCE_ID)
routing = RoutingPolicyType.ANY
app_profile = self._make_one(
self.APP_PROFILE_ID, instance, routing_policy_type=routing
)
# Create response_pb
metadata = messages_v2_pb2.UpdateAppProfileMetadata()
type_url = "type.googleapis.com/{}".format(
messages_v2_pb2.UpdateAppProfileMetadata.DESCRIPTOR.full_name
)
response_pb = operations_pb2.Operation(
name=self.OP_NAME,
metadata=Any(type_url=type_url, value=metadata.SerializeToString()),
)
# Patch the stub used by the API method.
channel = ChannelStub(responses=[response_pb])
instance_api = bigtable_instance_admin_client.BigtableInstanceAdminClient(
channel=channel
)
# Mock api calls
client._instance_admin_client = instance_api
# Perform the method and check the result.
ignore_warnings = True
expected_request_update_mask = field_mask_pb2.FieldMask(
paths=["multi_cluster_routing_use_any"]
)
expected_request = messages_v2_pb2.UpdateAppProfileRequest(
app_profile=app_profile._to_pb(),
update_mask=expected_request_update_mask,
ignore_warnings=ignore_warnings,
)
result = app_profile.update(ignore_warnings=ignore_warnings)
actual_request = channel.requests[0][1]
self.assertEqual(actual_request, expected_request)
self.assertIsInstance(result, operation.Operation)
self.assertEqual(result.operation.name, self.OP_NAME)
self.assertIsInstance(result.metadata, messages_v2_pb2.UpdateAppProfileMetadata)
def test_update_app_profile_with_wrong_routing_policy(self):
credentials = _make_credentials()
client = self._make_client(
project=self.PROJECT, credentials=credentials, admin=True
)
instance = client.instance(self.INSTANCE_ID)
app_profile = self._make_one(
self.APP_PROFILE_ID, instance, routing_policy_type=None
)
with self.assertRaises(ValueError):
app_profile.update()
def test_delete(self):
from google.protobuf import empty_pb2
from google.cloud.bigtable_admin_v2.gapic import bigtable_instance_admin_client
instance_api = bigtable_instance_admin_client.BigtableInstanceAdminClient(
mock.Mock()
)
credentials = _make_credentials()
client = self._make_client(
project=self.PROJECT, credentials=credentials, admin=True
)
instance = client.instance(self.INSTANCE_ID)
app_profile = self._make_one(self.APP_PROFILE_ID, instance)
# Create response_pb
response_pb = empty_pb2.Empty()
# Patch the stub used by the API method.
client._instance_admin_client = instance_api
instance_stub = client._instance_admin_client.transport
instance_stub.delete_cluster.side_effect = [response_pb]
# Create expected_result.
expected_result = None # delete() has no return value.
# Perform the method and check the result.
result = app_profile.delete()
self.assertEqual(result, expected_result)
class _Client(object):
def __init__(self, project):
self.project = project
self.project_name = "projects/" + self.project
self._operations_stub = mock.sentinel.operations_stub
def __eq__(self, other):
return other.project == self.project and other.project_name == self.project_name
class _Instance(object):
def __init__(self, instance_id, client):
self.instance_id = instance_id
self._client = client
def __eq__(self, other):
return other.instance_id == self.instance_id and other._client == self._client
class _Other(object):
def __init__(self, app_profile_id, instance):
self.app_profile_id = app_profile_id
self._instance = instance
|
apache-2.0
|
kenshay/ImageScripter
|
ProgramData/Android/ADB/platform-tools/systrace/catapult/telemetry/telemetry/value/trace.py
|
5
|
5019
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import datetime
import logging
import os
import random
import shutil
import sys
import tempfile
from py_utils import cloud_storage # pylint: disable=import-error
from telemetry.internal.util import file_handle
from telemetry.timeline import trace_data as trace_data_module
from telemetry import value as value_module
from tracing.trace_data import trace_data as trace_data_module
class TraceValue(value_module.Value):
def __init__(self, page, trace_data, important=False, description=None):
"""A value that contains a TraceData object and knows how to
output it.
Adding TraceValues and outputting as JSON will produce a directory full of
HTML files called trace_files. Outputting as chart JSON will also produce
an index, files.html, linking to each of these files.
"""
super(TraceValue, self).__init__(
page, name='trace', units='', important=important,
description=description, tir_label=None, grouping_keys=None)
self._temp_file = self._GetTempFileHandle(trace_data)
self._cloud_url = None
self._serialized_file_handle = None
@property
def value(self):
if self._cloud_url:
return self._cloud_url
elif self._serialized_file_handle:
return self._serialized_file_handle.GetAbsPath()
def _GetTraceParts(self, trace_data):
return [(trace_data.GetTracesFor(p), p)
for p in trace_data_module.ALL_TRACE_PARTS
if trace_data.HasTracesFor(p)]
def _GetTempFileHandle(self, trace_data):
tf = tempfile.NamedTemporaryFile(delete=False, suffix='.html')
tf.close()
title = ''
if self.page:
title = self.page.display_name
trace_data.Serialize(tf.name, trace_title=title)
return file_handle.FromFilePath(tf.name)
def __repr__(self):
if self.page:
page_name = self.page.display_name
else:
page_name = 'None'
return 'TraceValue(%s, %s)' % (page_name, self.name)
def CleanUp(self):
"""Cleans up tempfile after it is no longer needed.
A cleaned up TraceValue cannot be used for further operations. CleanUp()
may be called more than once without error.
"""
if self._temp_file is None:
return
os.remove(self._temp_file.GetAbsPath())
self._temp_file = None
def __enter__(self):
return self
def __exit__(self, _, __, ___):
self.CleanUp()
@property
def cleaned_up(self):
return self._temp_file is None
@property
def filename(self):
return self._temp_file.GetAbsPath()
def GetBuildbotDataType(self, output_context):
return None
def GetBuildbotValue(self):
return None
def GetRepresentativeNumber(self):
return None
def GetRepresentativeString(self):
return None
@staticmethod
def GetJSONTypeName():
return 'trace'
@classmethod
def MergeLikeValuesFromSamePage(cls, values):
assert len(values) > 0
return values[0]
@classmethod
def MergeLikeValuesFromDifferentPages(cls, values):
return None
def AsDict(self):
if self._temp_file is None:
raise ValueError('Tried to serialize TraceValue without tempfile.')
d = super(TraceValue, self).AsDict()
if self._serialized_file_handle:
d['file_id'] = self._serialized_file_handle.id
if self._cloud_url:
d['cloud_url'] = self._cloud_url
return d
def Serialize(self, dir_path):
if self._temp_file is None:
raise ValueError('Tried to serialize nonexistent trace.')
if self.page:
file_name = self.page.file_safe_name
else:
file_name = ''
file_name += str(self._temp_file.id)
file_name += datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
file_name += self._temp_file.extension
file_path = os.path.abspath(os.path.join(dir_path, file_name))
shutil.copy(self._temp_file.GetAbsPath(), file_path)
self._serialized_file_handle = file_handle.FromFilePath(file_path)
return self._serialized_file_handle
def UploadToCloud(self, bucket):
if self._temp_file is None:
raise ValueError('Tried to upload nonexistent trace to Cloud Storage.')
try:
if self._serialized_file_handle:
fh = self._serialized_file_handle
else:
fh = self._temp_file
remote_path = ('trace-file-id_%s-%s-%d%s' % (
fh.id,
datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S'),
random.randint(1, 100000),
fh.extension))
self._cloud_url = cloud_storage.Insert(
bucket, remote_path, fh.GetAbsPath())
sys.stderr.write(
'View generated trace files online at %s for page %s\n' %
(self._cloud_url, self.page.url if self.page else 'unknown'))
return self._cloud_url
except cloud_storage.PermissionError as e:
logging.error('Cannot upload trace files to cloud storage due to '
' permission error: %s' % e.message)
|
gpl-3.0
|
simbs/edx-platform
|
common/test/acceptance/tests/studio/test_studio_asset.py
|
37
|
1708
|
"""
Acceptance tests for Studio related to the asset index page.
"""
from ...pages.studio.asset_index import AssetIndexPage
from .base_studio_test import StudioCourseTest
from ...fixtures.base import StudioApiLoginError
class AssetIndexTest(StudioCourseTest):
"""
Tests for the Asset index page.
"""
def setUp(self, is_staff=False):
super(AssetIndexTest, self).setUp()
self.asset_page = AssetIndexPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
def populate_course_fixture(self, course_fixture):
"""
Populate the children of the test course fixture.
"""
self.course_fixture.add_asset(['image.jpg', 'textbook.pdf'])
def test_page_existence(self):
"""
Make sure that the page is accessible.
"""
self.asset_page.visit()
def test_type_filter_exists(self):
"""
Make sure type filter is on the page.
"""
self.asset_page.visit()
assert self.asset_page.type_filter_on_page() is True
def test_filter_results(self):
"""
Make sure type filter actually filters the results.
"""
self.asset_page.visit()
all_results = len(self.asset_page.return_results_set())
if self.asset_page.select_type_filter(1):
filtered_results = len(self.asset_page.return_results_set())
assert self.asset_page.type_filter_header_label_visible()
assert all_results > filtered_results
else:
msg = "Could not open select Type filter"
raise StudioApiLoginError(msg)
|
agpl-3.0
|
LeShadow/MTGProject
|
data_process/set.py
|
1
|
1308
|
class Set:
def __init__(self):
self.name = ""
self.code = ""
self.code_magiccards = ""
self.is_promo = False
self.date = ""
def setName(self, name):
self.name = name
def getName(self, name):
return self.name
def setCode(self, code):
self.code = code
def getCode(self):
return self.code
def setCodeMC(self, CodeMC):
self.code_magiccards = CodeMC
def getCodeMC(self):
return self.code_magiccards
def setIsPromo(self, promo=False):
self.is_promo = promo
def getIsPromo(self):
return self.is_promo
def setDate(self, date):
self.date = date
def getDate(self):
return self.date
def __str__(self):
return_string = ""
#return_string = "\"\": {\n"
name = self.name.replace("\"","²")
return_string = "{\n"
return_string +="\"Name\": \"" + name + "\",\n"
return_string += "\"Code\": \"" + self.code + "\",\n"
return_string += "\"Code_Magiccards\": \"" + self.code_magiccards + "\",\n"
return_string += "\"Is_Promo\": \"" + str(self.is_promo) + "\",\n"
return_string += "\"Date\": \"" + self.date + "\"\n"
return_string += "},"
return return_string
|
gpl-3.0
|
mozilla-b2g/external_skia
|
gm/rebaseline_server/imagediffdb.py
|
65
|
16019
|
#!/usr/bin/python
"""
Copyright 2013 Google Inc.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
Calulate differences between image pairs, and store them in a database.
"""
import contextlib
import csv
import logging
import os
import re
import shutil
import sys
import tempfile
import urllib
try:
from PIL import Image, ImageChops
except ImportError:
raise ImportError('Requires PIL to be installed; see '
+ 'http://www.pythonware.com/products/pil/')
# Set the PYTHONPATH to include the tools directory.
sys.path.append(
os.path.join(
os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir,
'tools'))
import find_run_binary
SKPDIFF_BINARY = find_run_binary.find_path_to_program('skpdiff')
DEFAULT_IMAGE_SUFFIX = '.png'
DEFAULT_IMAGES_SUBDIR = 'images'
DISALLOWED_FILEPATH_CHAR_REGEX = re.compile('[^\w\-]')
DIFFS_SUBDIR = 'diffs'
WHITEDIFFS_SUBDIR = 'whitediffs'
VALUES_PER_BAND = 256
# Keys used within DiffRecord dictionary representations.
# NOTE: Keep these in sync with static/constants.js
KEY__DIFFERENCES__MAX_DIFF_PER_CHANNEL = 'maxDiffPerChannel'
KEY__DIFFERENCES__NUM_DIFF_PIXELS = 'numDifferingPixels'
KEY__DIFFERENCES__PERCENT_DIFF_PIXELS = 'percentDifferingPixels'
KEY__DIFFERENCES__PERCEPTUAL_DIFF = 'perceptualDifference'
class DiffRecord(object):
""" Record of differences between two images. """
def __init__(self, storage_root,
expected_image_url, expected_image_locator,
actual_image_url, actual_image_locator,
expected_images_subdir=DEFAULT_IMAGES_SUBDIR,
actual_images_subdir=DEFAULT_IMAGES_SUBDIR,
image_suffix=DEFAULT_IMAGE_SUFFIX):
"""Download this pair of images (unless we already have them on local disk),
and prepare a DiffRecord for them.
TODO(epoger): Make this asynchronously download images, rather than blocking
until the images have been downloaded and processed.
Args:
storage_root: root directory on local disk within which we store all
images
expected_image_url: file or HTTP url from which we will download the
expected image
expected_image_locator: a unique ID string under which we will store the
expected image within storage_root (probably including a checksum to
guarantee uniqueness)
actual_image_url: file or HTTP url from which we will download the
actual image
actual_image_locator: a unique ID string under which we will store the
actual image within storage_root (probably including a checksum to
guarantee uniqueness)
expected_images_subdir: the subdirectory expected images are stored in.
actual_images_subdir: the subdirectory actual images are stored in.
image_suffix: the suffix of images.
"""
expected_image_locator = _sanitize_locator(expected_image_locator)
actual_image_locator = _sanitize_locator(actual_image_locator)
# Download the expected/actual images, if we don't have them already.
# TODO(rmistry): Add a parameter that makes _download_and_open_image raise
# an exception if images are not found locally (instead of trying to
# download them).
expected_image_file = os.path.join(
storage_root, expected_images_subdir,
str(expected_image_locator) + image_suffix)
actual_image_file = os.path.join(
storage_root, actual_images_subdir,
str(actual_image_locator) + image_suffix)
try:
expected_image = _download_and_open_image(
expected_image_file, expected_image_url)
except Exception:
logging.exception('unable to download expected_image_url %s to file %s' %
(expected_image_url, expected_image_file))
raise
try:
actual_image = _download_and_open_image(
actual_image_file, actual_image_url)
except Exception:
logging.exception('unable to download actual_image_url %s to file %s' %
(actual_image_url, actual_image_file))
raise
# Generate the diff image (absolute diff at each pixel) and
# max_diff_per_channel.
diff_image = _generate_image_diff(actual_image, expected_image)
diff_histogram = diff_image.histogram()
(diff_width, diff_height) = diff_image.size
self._max_diff_per_channel = _max_per_band(diff_histogram)
# Generate the whitediff image (any differing pixels show as white).
# This is tricky, because when you convert color images to grayscale or
# black & white in PIL, it has its own ideas about thresholds.
# We have to force it: if a pixel has any color at all, it's a '1'.
bands = diff_image.split()
graydiff_image = ImageChops.lighter(ImageChops.lighter(
bands[0], bands[1]), bands[2])
whitediff_image = (graydiff_image.point(lambda p: p > 0 and VALUES_PER_BAND)
.convert('1', dither=Image.NONE))
# Calculate the perceptual difference percentage.
skpdiff_csv_dir = tempfile.mkdtemp()
try:
skpdiff_csv_output = os.path.join(skpdiff_csv_dir, 'skpdiff-output.csv')
expected_img = os.path.join(storage_root, expected_images_subdir,
str(expected_image_locator) + image_suffix)
actual_img = os.path.join(storage_root, actual_images_subdir,
str(actual_image_locator) + image_suffix)
find_run_binary.run_command(
[SKPDIFF_BINARY, '-p', expected_img, actual_img,
'--csv', skpdiff_csv_output, '-d', 'perceptual'])
with contextlib.closing(open(skpdiff_csv_output)) as csv_file:
for row in csv.DictReader(csv_file):
perceptual_similarity = float(row[' perceptual'].strip())
if not 0 <= perceptual_similarity <= 1:
# skpdiff outputs -1 if the images are different sizes. Treat any
# output that does not lie in [0, 1] as having 0% perceptual
# similarity.
perceptual_similarity = 0
# skpdiff returns the perceptual similarity, convert it to get the
# perceptual difference percentage.
self._perceptual_difference = 100 - (perceptual_similarity * 100)
finally:
shutil.rmtree(skpdiff_csv_dir)
# Final touches on diff_image: use whitediff_image as an alpha mask.
# Unchanged pixels are transparent; differing pixels are opaque.
diff_image.putalpha(whitediff_image)
# Store the diff and whitediff images generated above.
diff_image_locator = _get_difference_locator(
expected_image_locator=expected_image_locator,
actual_image_locator=actual_image_locator)
basename = str(diff_image_locator) + image_suffix
_save_image(diff_image, os.path.join(
storage_root, DIFFS_SUBDIR, basename))
_save_image(whitediff_image, os.path.join(
storage_root, WHITEDIFFS_SUBDIR, basename))
# Calculate difference metrics.
(self._width, self._height) = diff_image.size
self._num_pixels_differing = (
whitediff_image.histogram()[VALUES_PER_BAND - 1])
def get_num_pixels_differing(self):
"""Returns the absolute number of pixels that differ."""
return self._num_pixels_differing
def get_percent_pixels_differing(self):
"""Returns the percentage of pixels that differ, as a float between
0 and 100 (inclusive)."""
return ((float(self._num_pixels_differing) * 100) /
(self._width * self._height))
def get_perceptual_difference(self):
"""Returns the perceptual difference percentage."""
return self._perceptual_difference
def get_max_diff_per_channel(self):
"""Returns the maximum difference between the expected and actual images
for each R/G/B channel, as a list."""
return self._max_diff_per_channel
def as_dict(self):
"""Returns a dictionary representation of this DiffRecord, as needed when
constructing the JSON representation."""
return {
KEY__DIFFERENCES__NUM_DIFF_PIXELS: self._num_pixels_differing,
KEY__DIFFERENCES__PERCENT_DIFF_PIXELS:
self.get_percent_pixels_differing(),
KEY__DIFFERENCES__MAX_DIFF_PER_CHANNEL: self._max_diff_per_channel,
KEY__DIFFERENCES__PERCEPTUAL_DIFF: self._perceptual_difference,
}
class ImageDiffDB(object):
""" Calculates differences between image pairs, maintaining a database of
them for download."""
def __init__(self, storage_root):
"""
Args:
storage_root: string; root path within the DB will store all of its stuff
"""
self._storage_root = storage_root
# Dictionary of DiffRecords, keyed by (expected_image_locator,
# actual_image_locator) tuples.
self._diff_dict = {}
def add_image_pair(self,
expected_image_url, expected_image_locator,
actual_image_url, actual_image_locator):
"""Download this pair of images (unless we already have them on local disk),
and prepare a DiffRecord for them.
TODO(epoger): Make this asynchronously download images, rather than blocking
until the images have been downloaded and processed.
When we do that, we should probably add a new method that will block
until all of the images have been downloaded and processed. Otherwise,
we won't know when it's safe to start calling get_diff_record().
jcgregorio notes: maybe just make ImageDiffDB thread-safe and create a
thread-pool/worker queue at a higher level that just uses ImageDiffDB?
Args:
expected_image_url: file or HTTP url from which we will download the
expected image
expected_image_locator: a unique ID string under which we will store the
expected image within storage_root (probably including a checksum to
guarantee uniqueness)
actual_image_url: file or HTTP url from which we will download the
actual image
actual_image_locator: a unique ID string under which we will store the
actual image within storage_root (probably including a checksum to
guarantee uniqueness)
"""
expected_image_locator = _sanitize_locator(expected_image_locator)
actual_image_locator = _sanitize_locator(actual_image_locator)
key = (expected_image_locator, actual_image_locator)
if not key in self._diff_dict:
try:
new_diff_record = DiffRecord(
self._storage_root,
expected_image_url=expected_image_url,
expected_image_locator=expected_image_locator,
actual_image_url=actual_image_url,
actual_image_locator=actual_image_locator)
except Exception:
# If we can't create a real DiffRecord for this (expected, actual) pair,
# store None and the UI will show whatever information we DO have.
# Fixes http://skbug.com/2368 .
logging.exception(
'got exception while creating a DiffRecord for '
'expected_image_url=%s , actual_image_url=%s; returning None' % (
expected_image_url, actual_image_url))
new_diff_record = None
self._diff_dict[key] = new_diff_record
def get_diff_record(self, expected_image_locator, actual_image_locator):
"""Returns the DiffRecord for this image pair.
Raises a KeyError if we don't have a DiffRecord for this image pair.
"""
key = (_sanitize_locator(expected_image_locator),
_sanitize_locator(actual_image_locator))
return self._diff_dict[key]
# Utility functions
def _max_per_band(histogram):
"""Given the histogram of an image, return the maximum value of each band
(a.k.a. "color channel", such as R/G/B) across the entire image.
Args:
histogram: PIL histogram
Returns the maximum value of each band within the image histogram, as a list.
"""
max_per_band = []
assert(len(histogram) % VALUES_PER_BAND == 0)
num_bands = len(histogram) / VALUES_PER_BAND
for band in xrange(num_bands):
# Assuming that VALUES_PER_BAND is 256...
# the 'R' band makes up indices 0-255 in the histogram,
# the 'G' band makes up indices 256-511 in the histogram,
# etc.
min_index = band * VALUES_PER_BAND
index = min_index + VALUES_PER_BAND
while index > min_index:
index -= 1
if histogram[index] > 0:
max_per_band.append(index - min_index)
break
return max_per_band
def _generate_image_diff(image1, image2):
"""Wrapper for ImageChops.difference(image1, image2) that will handle some
errors automatically, or at least yield more useful error messages.
TODO(epoger): Currently, some of the images generated by the bots are RGBA
and others are RGB. I'm not sure why that is. For now, to avoid confusion
within the UI, convert all to RGB when diffing.
Args:
image1: a PIL image object
image2: a PIL image object
Returns: per-pixel diffs between image1 and image2, as a PIL image object
"""
try:
return ImageChops.difference(image1.convert('RGB'), image2.convert('RGB'))
except ValueError:
logging.error('Error diffing image1 [%s] and image2 [%s].' % (
repr(image1), repr(image2)))
raise
def _download_and_open_image(local_filepath, url):
"""Open the image at local_filepath; if there is no file at that path,
download it from url to that path and then open it.
Args:
local_filepath: path on local disk where the image should be stored
url: URL from which we can download the image if we don't have it yet
Returns: a PIL image object
"""
if not os.path.exists(local_filepath):
_mkdir_unless_exists(os.path.dirname(local_filepath))
with contextlib.closing(urllib.urlopen(url)) as url_handle:
with open(local_filepath, 'wb') as file_handle:
shutil.copyfileobj(fsrc=url_handle, fdst=file_handle)
return _open_image(local_filepath)
def _open_image(filepath):
"""Wrapper for Image.open(filepath) that yields more useful error messages.
Args:
filepath: path on local disk to load image from
Returns: a PIL image object
"""
try:
return Image.open(filepath)
except IOError:
# If we are unable to load an image from the file, delete it from disk
# and we will try to fetch it again next time. Fixes http://skbug.com/2247
logging.error('IOError loading image file %s ; deleting it.' % filepath)
os.remove(filepath)
raise
def _save_image(image, filepath, format='PNG'):
"""Write an image to disk, creating any intermediate directories as needed.
Args:
image: a PIL image object
filepath: path on local disk to write image to
format: one of the PIL image formats, listed at
http://effbot.org/imagingbook/formats.htm
"""
_mkdir_unless_exists(os.path.dirname(filepath))
image.save(filepath, format)
def _mkdir_unless_exists(path):
"""Unless path refers to an already-existing directory, create it.
Args:
path: path on local disk
"""
if not os.path.isdir(path):
os.makedirs(path)
def _sanitize_locator(locator):
"""Returns a sanitized version of a locator (one in which we know none of the
characters will have special meaning in filenames).
Args:
locator: string, or something that can be represented as a string
"""
return DISALLOWED_FILEPATH_CHAR_REGEX.sub('_', str(locator))
def _get_difference_locator(expected_image_locator, actual_image_locator):
"""Returns the locator string used to look up the diffs between expected_image
and actual_image.
We must keep this function in sync with getImageDiffRelativeUrl() in
static/loader.js
Args:
expected_image_locator: locator string pointing at expected image
actual_image_locator: locator string pointing at actual image
Returns: already-sanitized locator where the diffs between expected and
actual images can be found
"""
return "%s-vs-%s" % (_sanitize_locator(expected_image_locator),
_sanitize_locator(actual_image_locator))
|
bsd-3-clause
|
blueboxgroup/nova
|
nova/objects/migration.py
|
7
|
4655
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import db
from nova import exception
from nova import objects
from nova.objects import base
from nova.objects import fields
# TODO(berrange): Remove NovaObjectDictCompat
class Migration(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: String attributes updated to support unicode
VERSION = '1.1'
fields = {
'id': fields.IntegerField(),
'source_compute': fields.StringField(nullable=True),
'dest_compute': fields.StringField(nullable=True),
'source_node': fields.StringField(nullable=True),
'dest_node': fields.StringField(nullable=True),
'dest_host': fields.StringField(nullable=True),
'old_instance_type_id': fields.IntegerField(nullable=True),
'new_instance_type_id': fields.IntegerField(nullable=True),
'instance_uuid': fields.StringField(nullable=True),
'status': fields.StringField(nullable=True),
}
@staticmethod
def _from_db_object(context, migration, db_migration):
for key in migration.fields:
migration[key] = db_migration[key]
migration._context = context
migration.obj_reset_changes()
return migration
@base.remotable_classmethod
def get_by_id(cls, context, migration_id):
db_migration = db.migration_get(context, migration_id)
return cls._from_db_object(context, cls(), db_migration)
@base.remotable_classmethod
def get_by_instance_and_status(cls, context, instance_uuid, status):
db_migration = db.migration_get_by_instance_and_status(
context, instance_uuid, status)
return cls._from_db_object(context, cls(), db_migration)
@base.remotable
def create(self):
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason='already created')
updates = self.obj_get_changes()
db_migration = db.migration_create(self._context, updates)
self._from_db_object(self._context, self, db_migration)
@base.remotable
def save(self):
updates = self.obj_get_changes()
updates.pop('id', None)
db_migration = db.migration_update(self._context, self.id, updates)
self._from_db_object(self._context, self, db_migration)
self.obj_reset_changes()
@property
def instance(self):
return objects.Instance.get_by_uuid(self._context, self.instance_uuid)
class MigrationList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
# Migration <= 1.1
# Version 1.1: Added use_slave to get_unconfirmed_by_dest_compute
VERSION = '1.1'
fields = {
'objects': fields.ListOfObjectsField('Migration'),
}
child_versions = {
'1.0': '1.1',
# NOTE(danms): Migration was at 1.1 before we added this
'1.1': '1.1',
}
@base.remotable_classmethod
def get_unconfirmed_by_dest_compute(cls, context, confirm_window,
dest_compute, use_slave=False):
db_migrations = db.migration_get_unconfirmed_by_dest_compute(
context, confirm_window, dest_compute, use_slave=use_slave)
return base.obj_make_list(context, cls(context), objects.Migration,
db_migrations)
@base.remotable_classmethod
def get_in_progress_by_host_and_node(cls, context, host, node):
db_migrations = db.migration_get_in_progress_by_host_and_node(
context, host, node)
return base.obj_make_list(context, cls(context), objects.Migration,
db_migrations)
@base.remotable_classmethod
def get_by_filters(cls, context, filters):
db_migrations = db.migration_get_all_by_filters(context, filters)
return base.obj_make_list(context, cls(context), objects.Migration,
db_migrations)
|
apache-2.0
|
TangHao1987/intellij-community
|
python/lib/Lib/email/quopriMIME.py
|
93
|
10839
|
# Copyright (C) 2001-2006 Python Software Foundation
# Author: Ben Gertzfield
# Contact: [email protected]
"""Quoted-printable content transfer encoding per RFCs 2045-2047.
This module handles the content transfer encoding method defined in RFC 2045
to encode US ASCII-like 8-bit data called `quoted-printable'. It is used to
safely encode text that is in a character set similar to the 7-bit US ASCII
character set, but that includes some 8-bit characters that are normally not
allowed in email bodies or headers.
Quoted-printable is very space-inefficient for encoding binary files; use the
email.base64MIME module for that instead.
This module provides an interface to encode and decode both headers and bodies
with quoted-printable encoding.
RFC 2045 defines a method for including character set information in an
`encoded-word' in a header. This method is commonly used for 8-bit real names
in To:/From:/Cc: etc. fields, as well as Subject: lines.
This module does not do the line wrapping or end-of-line character
conversion necessary for proper internationalized headers; it only
does dumb encoding and decoding. To deal with the various line
wrapping issues, use the email.Header module.
"""
__all__ = [
'body_decode',
'body_encode',
'body_quopri_check',
'body_quopri_len',
'decode',
'decodestring',
'encode',
'encodestring',
'header_decode',
'header_encode',
'header_quopri_check',
'header_quopri_len',
'quote',
'unquote',
]
import re
from string import hexdigits
from email.utils import fix_eols
CRLF = '\r\n'
NL = '\n'
# See also Charset.py
MISC_LEN = 7
hqre = re.compile(r'[^-a-zA-Z0-9!*+/ ]')
bqre = re.compile(r'[^ !-<>-~\t]')
# Helpers
def header_quopri_check(c):
"""Return True if the character should be escaped with header quopri."""
return bool(hqre.match(c))
def body_quopri_check(c):
"""Return True if the character should be escaped with body quopri."""
return bool(bqre.match(c))
def header_quopri_len(s):
"""Return the length of str when it is encoded with header quopri."""
count = 0
for c in s:
if hqre.match(c):
count += 3
else:
count += 1
return count
def body_quopri_len(str):
"""Return the length of str when it is encoded with body quopri."""
count = 0
for c in str:
if bqre.match(c):
count += 3
else:
count += 1
return count
def _max_append(L, s, maxlen, extra=''):
if not L:
L.append(s.lstrip())
elif len(L[-1]) + len(s) <= maxlen:
L[-1] += extra + s
else:
L.append(s.lstrip())
def unquote(s):
"""Turn a string in the form =AB to the ASCII character with value 0xab"""
return chr(int(s[1:3], 16))
def quote(c):
return "=%02X" % ord(c)
def header_encode(header, charset="iso-8859-1", keep_eols=False,
maxlinelen=76, eol=NL):
"""Encode a single header line with quoted-printable (like) encoding.
Defined in RFC 2045, this `Q' encoding is similar to quoted-printable, but
used specifically for email header fields to allow charsets with mostly 7
bit characters (and some 8 bit) to remain more or less readable in non-RFC
2045 aware mail clients.
charset names the character set to use to encode the header. It defaults
to iso-8859-1.
The resulting string will be in the form:
"=?charset?q?I_f=E2rt_in_your_g=E8n=E8ral_dire=E7tion?\\n
=?charset?q?Silly_=C8nglish_Kn=EEghts?="
with each line wrapped safely at, at most, maxlinelen characters (defaults
to 76 characters). If maxlinelen is None, the entire string is encoded in
one chunk with no splitting.
End-of-line characters (\\r, \\n, \\r\\n) will be automatically converted
to the canonical email line separator \\r\\n unless the keep_eols
parameter is True (the default is False).
Each line of the header will be terminated in the value of eol, which
defaults to "\\n". Set this to "\\r\\n" if you are using the result of
this function directly in email.
"""
# Return empty headers unchanged
if not header:
return header
if not keep_eols:
header = fix_eols(header)
# Quopri encode each line, in encoded chunks no greater than maxlinelen in
# length, after the RFC chrome is added in.
quoted = []
if maxlinelen is None:
# An obnoxiously large number that's good enough
max_encoded = 100000
else:
max_encoded = maxlinelen - len(charset) - MISC_LEN - 1
for c in header:
# Space may be represented as _ instead of =20 for readability
if c == ' ':
_max_append(quoted, '_', max_encoded)
# These characters can be included verbatim
elif not hqre.match(c):
_max_append(quoted, c, max_encoded)
# Otherwise, replace with hex value like =E2
else:
_max_append(quoted, "=%02X" % ord(c), max_encoded)
# Now add the RFC chrome to each encoded chunk and glue the chunks
# together. BAW: should we be able to specify the leading whitespace in
# the joiner?
joiner = eol + ' '
return joiner.join(['=?%s?q?%s?=' % (charset, line) for line in quoted])
def encode(body, binary=False, maxlinelen=76, eol=NL):
"""Encode with quoted-printable, wrapping at maxlinelen characters.
If binary is False (the default), end-of-line characters will be converted
to the canonical email end-of-line sequence \\r\\n. Otherwise they will
be left verbatim.
Each line of encoded text will end with eol, which defaults to "\\n". Set
this to "\\r\\n" if you will be using the result of this function directly
in an email.
Each line will be wrapped at, at most, maxlinelen characters (defaults to
76 characters). Long lines will have the `soft linefeed' quoted-printable
character "=" appended to them, so the decoded text will be identical to
the original text.
"""
if not body:
return body
if not binary:
body = fix_eols(body)
# BAW: We're accumulating the body text by string concatenation. That
# can't be very efficient, but I don't have time now to rewrite it. It
# just feels like this algorithm could be more efficient.
encoded_body = ''
lineno = -1
# Preserve line endings here so we can check later to see an eol needs to
# be added to the output later.
lines = body.splitlines(1)
for line in lines:
# But strip off line-endings for processing this line.
if line.endswith(CRLF):
line = line[:-2]
elif line[-1] in CRLF:
line = line[:-1]
lineno += 1
encoded_line = ''
prev = None
linelen = len(line)
# Now we need to examine every character to see if it needs to be
# quopri encoded. BAW: again, string concatenation is inefficient.
for j in range(linelen):
c = line[j]
prev = c
if bqre.match(c):
c = quote(c)
elif j+1 == linelen:
# Check for whitespace at end of line; special case
if c not in ' \t':
encoded_line += c
prev = c
continue
# Check to see to see if the line has reached its maximum length
if len(encoded_line) + len(c) >= maxlinelen:
encoded_body += encoded_line + '=' + eol
encoded_line = ''
encoded_line += c
# Now at end of line..
if prev and prev in ' \t':
# Special case for whitespace at end of file
if lineno + 1 == len(lines):
prev = quote(prev)
if len(encoded_line) + len(prev) > maxlinelen:
encoded_body += encoded_line + '=' + eol + prev
else:
encoded_body += encoded_line + prev
# Just normal whitespace at end of line
else:
encoded_body += encoded_line + prev + '=' + eol
encoded_line = ''
# Now look at the line we just finished and it has a line ending, we
# need to add eol to the end of the line.
if lines[lineno].endswith(CRLF) or lines[lineno][-1] in CRLF:
encoded_body += encoded_line + eol
else:
encoded_body += encoded_line
encoded_line = ''
return encoded_body
# For convenience and backwards compatibility w/ standard base64 module
body_encode = encode
encodestring = encode
# BAW: I'm not sure if the intent was for the signature of this function to be
# the same as base64MIME.decode() or not...
def decode(encoded, eol=NL):
"""Decode a quoted-printable string.
Lines are separated with eol, which defaults to \\n.
"""
if not encoded:
return encoded
# BAW: see comment in encode() above. Again, we're building up the
# decoded string with string concatenation, which could be done much more
# efficiently.
decoded = ''
for line in encoded.splitlines():
line = line.rstrip()
if not line:
decoded += eol
continue
i = 0
n = len(line)
while i < n:
c = line[i]
if c <> '=':
decoded += c
i += 1
# Otherwise, c == "=". Are we at the end of the line? If so, add
# a soft line break.
elif i+1 == n:
i += 1
continue
# Decode if in form =AB
elif i+2 < n and line[i+1] in hexdigits and line[i+2] in hexdigits:
decoded += unquote(line[i:i+3])
i += 3
# Otherwise, not in form =AB, pass literally
else:
decoded += c
i += 1
if i == n:
decoded += eol
# Special case if original string did not end with eol
if not encoded.endswith(eol) and decoded.endswith(eol):
decoded = decoded[:-1]
return decoded
# For convenience and backwards compatibility w/ standard base64 module
body_decode = decode
decodestring = decode
def _unquote_match(match):
"""Turn a match in the form =AB to the ASCII character with value 0xab"""
s = match.group(0)
return unquote(s)
# Header decoding is done a bit differently
def header_decode(s):
"""Decode a string encoded with RFC 2045 MIME header `Q' encoding.
This function does not parse a full MIME header value encoded with
quoted-printable (like =?iso-8895-1?q?Hello_World?=) -- please use
the high level email.Header class for that functionality.
"""
s = s.replace('_', ' ')
return re.sub(r'=\w{2}', _unquote_match, s)
|
apache-2.0
|
hkchenhongyi/django
|
django/core/management/utils.py
|
405
|
2590
|
from __future__ import unicode_literals
import os
import sys
from subprocess import PIPE, Popen
from django.utils import six
from django.utils.encoding import DEFAULT_LOCALE_ENCODING, force_text
from .base import CommandError
def popen_wrapper(args, os_err_exc_type=CommandError, universal_newlines=True):
"""
Friendly wrapper around Popen.
Returns stdout output, stderr output and OS status code.
"""
try:
p = Popen(args, shell=False, stdout=PIPE, stderr=PIPE,
close_fds=os.name != 'nt', universal_newlines=universal_newlines)
except OSError as e:
strerror = force_text(e.strerror, DEFAULT_LOCALE_ENCODING,
strings_only=True)
six.reraise(os_err_exc_type, os_err_exc_type('Error executing %s: %s' %
(args[0], strerror)), sys.exc_info()[2])
output, errors = p.communicate()
return (
output,
force_text(errors, DEFAULT_LOCALE_ENCODING, strings_only=True),
p.returncode
)
def handle_extensions(extensions):
"""
Organizes multiple extensions that are separated with commas or passed by
using --extension/-e multiple times.
For example: running 'django-admin makemessages -e js,txt -e xhtml -a'
would result in an extension list: ['.js', '.txt', '.xhtml']
>>> handle_extensions(['.html', 'html,js,py,py,py,.py', 'py,.py'])
{'.html', '.js', '.py'}
>>> handle_extensions(['.html, txt,.tpl'])
{'.html', '.tpl', '.txt'}
"""
ext_list = []
for ext in extensions:
ext_list.extend(ext.replace(' ', '').split(','))
for i, ext in enumerate(ext_list):
if not ext.startswith('.'):
ext_list[i] = '.%s' % ext_list[i]
return set(ext_list)
def find_command(cmd, path=None, pathext=None):
if path is None:
path = os.environ.get('PATH', '').split(os.pathsep)
if isinstance(path, six.string_types):
path = [path]
# check if there are funny path extensions for executables, e.g. Windows
if pathext is None:
pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD').split(os.pathsep)
# don't use extensions if the command ends with one of them
for ext in pathext:
if cmd.endswith(ext):
pathext = ['']
break
# check if we find the command on PATH
for p in path:
f = os.path.join(p, cmd)
if os.path.isfile(f):
return f
for ext in pathext:
fext = f + ext
if os.path.isfile(fext):
return fext
return None
|
bsd-3-clause
|
Genomon-Project/paplot
|
scripts/paplot/convert.py
|
1
|
5764
|
# -*- coding: utf-8 -*-
"""
Created on Thu May 12 12:34:57 2016
@author: okada
$Id: convert.py 208 2017-08-16 06:16:25Z aokada $
"""
import paplot.subcode.tools as tools
def prohibition(text):
import re
new_text = re.sub(r'[\'"/;:\[\] ]', "_", text)
if re.match(r'^[0-9]', new_text):
new_text = "_" + new_text
return new_text
def list_prohibition(li):
li_p = []
for item in li:
li_p.append(prohibition(item))
return li_p
def value_to_index(li, value, default):
for i in range(len(li)):
if li[i] == value:
return i
return default
def list_to_text(li):
text = ""
for item in li:
text += "'" + str(item) + "',"
return text
def text_to_list(text, sep):
splt = []
if sep == "": splt.append(text)
else: splt = text.split(sep)
li = []
for item in splt:
value = item.strip().rstrip("\r\n")
if value != "":
li.append(value)
return li
def fnmatch_list(target, li):
import fnmatch
for value in li:
if fnmatch.fnmatch(target, value):
return True
return False
def group_list(colmun, mode, name, config):
import paplot.color as color
option_input = ""
if mode == "mutation":
option_input = "result_format_mutation"
elif mode == "ca":
option_input = "result_format_ca"
else:
return []
sept = tools.config_getstr(config, option_input, "sept_%s" % name)
limited_list = text_to_list(tools.config_getstr(config, mode, "limited_%s" % name), ",")
nouse_list = text_to_list(tools.config_getstr(config, mode, "nouse_%s" % name), ",")
funcs = []
for row in colmun:
splt = []
if sept == "": splt.append(row)
else: splt = row.split(sept)
for func in splt:
func = func.strip()
if func == "":
continue
if len(limited_list) > 0 and fnmatch_list(func, limited_list) == False:
continue
#if func in nouse_list:
if fnmatch_list(func, nouse_list):
continue
funcs.append(func)
# sort list
funcs = list(set(funcs))
funcs.sort()
color_list = {};
for f in tools.config_getstr(config, mode, "%s_color" % name).split(","):
if len(f) == 0: continue
cols = text_to_list(f, ":")
if len(cols) >= 2:
color_list[cols[0]] = color.name_to_value(cols[1])
color_list = color.create_color_dict(funcs, color_list, color.metro_colors)
# dict to value
colors = []
for key in funcs:
colors.append(color_list[key])
return [funcs, colors]
def pyformat_to_jstooltip_text(positions, config, section_fmt, section_col, item_startwith):
tooltip_templete = "{{format:[{formats}], keys: '{keys}'}}"
tooltip_detail_templete = "{{label:'{label}',type:'{type}',keys:[{keys}],ext:'{ext}'}},"
import re
re_compile=re.compile(r"\{[0-9a-zA-Z\+\-\*\/\#\:\,\.\_\ ]+\}")
re_compile2=re.compile(r"[\+\-\*\/\:]")
keys_list = []
tooltip_fomat_text = ""
for option in tools.config_getoptions(config, section_fmt, item_startwith):
formt = tools.config_getstr(config, section_fmt, option)
key_text_list = re_compile.findall(formt)
tooltip_detail_text = ""
for key_text in key_text_list:
start = formt.find(key_text)
# write fix area
if start > 0:
tooltip_detail_text += tooltip_detail_templete.format(label = formt[0:start], type="fix", keys="", ext="")
key_text = key_text.lower()
formt = formt[start+len(key_text):]
label_text = key_text.replace(" ", "").replace("{", "").replace("}", "")
sub_keys = re_compile2.split(label_text)
ttype = "numeric"
ext = ""
# case str
if len(sub_keys) == 1:
ttype = "str"
# case with-extention
if label_text.find(":") > 0:
ext_start = label_text.index(":")
ext=label_text[ext_start+1:]
label_text = label_text[0:ext_start]
sub_keys = re_compile2.split(label_text)
for sub_key in sub_keys:
# remove numeric block
try:
float(sub_key)
sub_keys.remove(sub_key)
except Exception:
pass
check = True
for sub_key in list(set(sub_keys)):
if not sub_key in positions.keys() and not sub_key.startswith("#"):
print("[WARNING] key:{key} is not defined.".format(key = sub_key))
check = False
break
label_text = label_text.replace(sub_key, "{" + sub_key +"}")
if check == True:
tooltip_detail_text += tooltip_detail_templete.format(label= label_text, type=ttype, keys=list_to_text(sub_keys), ext=ext)
keys_list.extend(sub_keys)
if len(formt) > 0:
tooltip_detail_text += tooltip_detail_templete.format(label=formt, type="fix", keys="", ext="")
tooltip_fomat_text += "[" + tooltip_detail_text + "],"
key_text = ""
keys_dup = list(set(keys_list))
keys_dup.sort()
for key in keys_dup:
key_text += "{" + key.lower() + "} "
return tooltip_templete.format(formats = tooltip_fomat_text, keys = key_text)
|
mit
|
iulian787/spack
|
var/spack/repos/builtin/packages/candle-benchmarks/package.py
|
2
|
1543
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class CandleBenchmarks(Package):
"""ECP-CANDLE Benchmarks"""
homepage = "https://github.com/ECP-CANDLE/Benchmarks"
url = "https://github.com/ECP-CANDLE/Benchmarks/archive/v0.1.tar.gz"
tags = ['proxy-app', 'ecp-proxy-app']
version('0.1', sha256='767f74f43ee3a5d4e0f26750f2a96b8433e25a9cd4f2d29938ac8acf263ab58d')
variant('mpi', default=True, description='Build with MPI support')
extends('python')
depends_on('[email protected]:')
depends_on('py-theano +gpu', type=('build', 'run'))
depends_on('py-keras', type=('build', 'run'))
depends_on('py-matplotlib +image@:2.2.3', type=('build', 'run'))
depends_on('py-tqdm', type=('build', 'run'))
depends_on('py-scikit-learn', type=('build', 'run'))
depends_on('[email protected]: +core +highgui +imgproc +jpeg +png +tiff +zlib +python -dnn ~eigen ~gtk')
depends_on('py-mdanalysis', type=('build', 'run'))
depends_on('py-mpi4py', when='+mpi', type=('build', 'run'))
depends_on('py-h5py~mpi', when='~mpi', type=('build', 'run'))
depends_on('py-h5py+mpi', when='+mpi', type=('build', 'run'))
depends_on('py-requests', type=('build', 'run'))
# see #3244, but use external for now
# depends_on('tensorflow')
def install(self, spec, prefix):
install_tree(self.stage.source_path, prefix.bin)
|
lgpl-2.1
|
wfxiang08/ansible
|
test/integration/setup_gce.py
|
131
|
1388
|
'''
Create GCE resources for use in integration tests.
Takes a prefix as a command-line argument and creates two persistent disks named
${prefix}-base and ${prefix}-extra and a snapshot of the base disk named
${prefix}-snapshot. prefix will be forced to lowercase, to ensure the names are
legal GCE resource names.
'''
import sys
import optparse
import gce_credentials
def parse_args():
parser = optparse.OptionParser(
usage="%s [options] <prefix>" % (sys.argv[0],), description=__doc__)
gce_credentials.add_credentials_options(parser)
parser.add_option("--prefix",
action="store", dest="prefix",
help="String used to prefix GCE resource names (default: %default)")
(opts, args) = parser.parse_args()
gce_credentials.check_required(opts, parser)
if not args:
parser.error("Missing required argument: name prefix")
return (opts, args)
if __name__ == '__main__':
(opts, args) = parse_args()
gce = gce_credentials.get_gce_driver(opts)
prefix = args[0].lower()
try:
base_volume = gce.create_volume(
size=10, name=prefix+'-base', location='us-central1-a')
gce.create_volume_snapshot(base_volume, name=prefix+'-snapshot')
gce.create_volume(
size=10, name=prefix+'-extra', location='us-central1-a')
except KeyboardInterrupt, e:
print "\nExiting on user command."
|
gpl-3.0
|
nestle1998/srs
|
trunk/3rdparty/gprof/gprof2dot.py
|
46
|
72884
|
#!/usr/bin/env python
#
# Copyright 2008-2009 Jose Fonseca
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Generate a dot graph from the output of several profilers."""
__author__ = "Jose Fonseca"
__version__ = "1.0"
import sys
import math
import os.path
import re
import textwrap
import optparse
import xml.parsers.expat
try:
# Debugging helper module
import debug
except ImportError:
pass
def percentage(p):
return "%.02f%%" % (p*100.0,)
def add(a, b):
return a + b
def equal(a, b):
if a == b:
return a
else:
return None
def fail(a, b):
assert False
tol = 2 ** -23
def ratio(numerator, denominator):
try:
ratio = float(numerator)/float(denominator)
except ZeroDivisionError:
# 0/0 is undefined, but 1.0 yields more useful results
return 1.0
if ratio < 0.0:
if ratio < -tol:
sys.stderr.write('warning: negative ratio (%s/%s)\n' % (numerator, denominator))
return 0.0
if ratio > 1.0:
if ratio > 1.0 + tol:
sys.stderr.write('warning: ratio greater than one (%s/%s)\n' % (numerator, denominator))
return 1.0
return ratio
class UndefinedEvent(Exception):
"""Raised when attempting to get an event which is undefined."""
def __init__(self, event):
Exception.__init__(self)
self.event = event
def __str__(self):
return 'unspecified event %s' % self.event.name
class Event(object):
"""Describe a kind of event, and its basic operations."""
def __init__(self, name, null, aggregator, formatter = str):
self.name = name
self._null = null
self._aggregator = aggregator
self._formatter = formatter
def __eq__(self, other):
return self is other
def __hash__(self):
return id(self)
def null(self):
return self._null
def aggregate(self, val1, val2):
"""Aggregate two event values."""
assert val1 is not None
assert val2 is not None
return self._aggregator(val1, val2)
def format(self, val):
"""Format an event value."""
assert val is not None
return self._formatter(val)
MODULE = Event("Module", None, equal)
PROCESS = Event("Process", None, equal)
CALLS = Event("Calls", 0, add)
SAMPLES = Event("Samples", 0, add)
SAMPLES2 = Event("Samples", 0, add)
TIME = Event("Time", 0.0, add, lambda x: '(' + str(x) + ')')
TIME_RATIO = Event("Time ratio", 0.0, add, lambda x: '(' + percentage(x) + ')')
TOTAL_TIME = Event("Total time", 0.0, fail)
TOTAL_TIME_RATIO = Event("Total time ratio", 0.0, fail, percentage)
CALL_RATIO = Event("Call ratio", 0.0, add, percentage)
PRUNE_RATIO = Event("Prune ratio", 0.0, add, percentage)
class Object(object):
"""Base class for all objects in profile which can store events."""
def __init__(self, events=None):
if events is None:
self.events = {}
else:
self.events = events
def __hash__(self):
return id(self)
def __eq__(self, other):
return self is other
def __contains__(self, event):
return event in self.events
def __getitem__(self, event):
try:
return self.events[event]
except KeyError:
raise UndefinedEvent(event)
def __setitem__(self, event, value):
if value is None:
if event in self.events:
del self.events[event]
else:
self.events[event] = value
class Call(Object):
"""A call between functions.
There should be at most one call object for every pair of functions.
"""
def __init__(self, callee_id):
Object.__init__(self)
self.callee_id = callee_id
class Function(Object):
"""A function."""
def __init__(self, id, name):
Object.__init__(self)
self.id = id
self.name = name
self.calls = {}
self.cycle = None
def add_call(self, call):
if call.callee_id in self.calls:
sys.stderr.write('warning: overwriting call from function %s to %s\n' % (str(self.id), str(call.callee_id)))
self.calls[call.callee_id] = call
# TODO: write utility functions
def __repr__(self):
return self.name
class Cycle(Object):
"""A cycle made from recursive function calls."""
def __init__(self):
Object.__init__(self)
# XXX: Do cycles need an id?
self.functions = set()
def add_function(self, function):
assert function not in self.functions
self.functions.add(function)
# XXX: Aggregate events?
if function.cycle is not None:
for other in function.cycle.functions:
if function not in self.functions:
self.add_function(other)
function.cycle = self
class Profile(Object):
"""The whole profile."""
def __init__(self):
Object.__init__(self)
self.functions = {}
self.cycles = []
def add_function(self, function):
if function.id in self.functions:
sys.stderr.write('warning: overwriting function %s (id %s)\n' % (function.name, str(function.id)))
self.functions[function.id] = function
def add_cycle(self, cycle):
self.cycles.append(cycle)
def validate(self):
"""Validate the edges."""
for function in self.functions.itervalues():
for callee_id in function.calls.keys():
assert function.calls[callee_id].callee_id == callee_id
if callee_id not in self.functions:
sys.stderr.write('warning: call to undefined function %s from function %s\n' % (str(callee_id), function.name))
del function.calls[callee_id]
def find_cycles(self):
"""Find cycles using Tarjan's strongly connected components algorithm."""
# Apply the Tarjan's algorithm successively until all functions are visited
visited = set()
for function in self.functions.itervalues():
if function not in visited:
self._tarjan(function, 0, [], {}, {}, visited)
cycles = []
for function in self.functions.itervalues():
if function.cycle is not None and function.cycle not in cycles:
cycles.append(function.cycle)
self.cycles = cycles
if 0:
for cycle in cycles:
sys.stderr.write("Cycle:\n")
for member in cycle.functions:
sys.stderr.write("\tFunction %s\n" % member.name)
def _tarjan(self, function, order, stack, orders, lowlinks, visited):
"""Tarjan's strongly connected components algorithm.
See also:
- http://en.wikipedia.org/wiki/Tarjan's_strongly_connected_components_algorithm
"""
visited.add(function)
orders[function] = order
lowlinks[function] = order
order += 1
pos = len(stack)
stack.append(function)
for call in function.calls.itervalues():
callee = self.functions[call.callee_id]
# TODO: use a set to optimize lookup
if callee not in orders:
order = self._tarjan(callee, order, stack, orders, lowlinks, visited)
lowlinks[function] = min(lowlinks[function], lowlinks[callee])
elif callee in stack:
lowlinks[function] = min(lowlinks[function], orders[callee])
if lowlinks[function] == orders[function]:
# Strongly connected component found
members = stack[pos:]
del stack[pos:]
if len(members) > 1:
cycle = Cycle()
for member in members:
cycle.add_function(member)
return order
def call_ratios(self, event):
# Aggregate for incoming calls
cycle_totals = {}
for cycle in self.cycles:
cycle_totals[cycle] = 0.0
function_totals = {}
for function in self.functions.itervalues():
function_totals[function] = 0.0
for function in self.functions.itervalues():
for call in function.calls.itervalues():
if call.callee_id != function.id:
callee = self.functions[call.callee_id]
function_totals[callee] += call[event]
if callee.cycle is not None and callee.cycle is not function.cycle:
cycle_totals[callee.cycle] += call[event]
# Compute the ratios
for function in self.functions.itervalues():
for call in function.calls.itervalues():
assert CALL_RATIO not in call
if call.callee_id != function.id:
callee = self.functions[call.callee_id]
if callee.cycle is not None and callee.cycle is not function.cycle:
total = cycle_totals[callee.cycle]
else:
total = function_totals[callee]
call[CALL_RATIO] = ratio(call[event], total)
def integrate(self, outevent, inevent):
"""Propagate function time ratio allong the function calls.
Must be called after finding the cycles.
See also:
- http://citeseer.ist.psu.edu/graham82gprof.html
"""
# Sanity checking
assert outevent not in self
for function in self.functions.itervalues():
assert outevent not in function
assert inevent in function
for call in function.calls.itervalues():
assert outevent not in call
if call.callee_id != function.id:
assert CALL_RATIO in call
# Aggregate the input for each cycle
for cycle in self.cycles:
total = inevent.null()
for function in self.functions.itervalues():
total = inevent.aggregate(total, function[inevent])
self[inevent] = total
# Integrate along the edges
total = inevent.null()
for function in self.functions.itervalues():
total = inevent.aggregate(total, function[inevent])
self._integrate_function(function, outevent, inevent)
self[outevent] = total
def _integrate_function(self, function, outevent, inevent):
if function.cycle is not None:
return self._integrate_cycle(function.cycle, outevent, inevent)
else:
if outevent not in function:
total = function[inevent]
for call in function.calls.itervalues():
if call.callee_id != function.id:
total += self._integrate_call(call, outevent, inevent)
function[outevent] = total
return function[outevent]
def _integrate_call(self, call, outevent, inevent):
assert outevent not in call
assert CALL_RATIO in call
callee = self.functions[call.callee_id]
subtotal = call[CALL_RATIO]*self._integrate_function(callee, outevent, inevent)
call[outevent] = subtotal
return subtotal
def _integrate_cycle(self, cycle, outevent, inevent):
if outevent not in cycle:
# Compute the outevent for the whole cycle
total = inevent.null()
for member in cycle.functions:
subtotal = member[inevent]
for call in member.calls.itervalues():
callee = self.functions[call.callee_id]
if callee.cycle is not cycle:
subtotal += self._integrate_call(call, outevent, inevent)
total += subtotal
cycle[outevent] = total
# Compute the time propagated to callers of this cycle
callees = {}
for function in self.functions.itervalues():
if function.cycle is not cycle:
for call in function.calls.itervalues():
callee = self.functions[call.callee_id]
if callee.cycle is cycle:
try:
callees[callee] += call[CALL_RATIO]
except KeyError:
callees[callee] = call[CALL_RATIO]
for member in cycle.functions:
member[outevent] = outevent.null()
for callee, call_ratio in callees.iteritems():
ranks = {}
call_ratios = {}
partials = {}
self._rank_cycle_function(cycle, callee, 0, ranks)
self._call_ratios_cycle(cycle, callee, ranks, call_ratios, set())
partial = self._integrate_cycle_function(cycle, callee, call_ratio, partials, ranks, call_ratios, outevent, inevent)
assert partial == max(partials.values())
assert not total or abs(1.0 - partial/(call_ratio*total)) <= 0.001
return cycle[outevent]
def _rank_cycle_function(self, cycle, function, rank, ranks):
if function not in ranks or ranks[function] > rank:
ranks[function] = rank
for call in function.calls.itervalues():
if call.callee_id != function.id:
callee = self.functions[call.callee_id]
if callee.cycle is cycle:
self._rank_cycle_function(cycle, callee, rank + 1, ranks)
def _call_ratios_cycle(self, cycle, function, ranks, call_ratios, visited):
if function not in visited:
visited.add(function)
for call in function.calls.itervalues():
if call.callee_id != function.id:
callee = self.functions[call.callee_id]
if callee.cycle is cycle:
if ranks[callee] > ranks[function]:
call_ratios[callee] = call_ratios.get(callee, 0.0) + call[CALL_RATIO]
self._call_ratios_cycle(cycle, callee, ranks, call_ratios, visited)
def _integrate_cycle_function(self, cycle, function, partial_ratio, partials, ranks, call_ratios, outevent, inevent):
if function not in partials:
partial = partial_ratio*function[inevent]
for call in function.calls.itervalues():
if call.callee_id != function.id:
callee = self.functions[call.callee_id]
if callee.cycle is not cycle:
assert outevent in call
partial += partial_ratio*call[outevent]
else:
if ranks[callee] > ranks[function]:
callee_partial = self._integrate_cycle_function(cycle, callee, partial_ratio, partials, ranks, call_ratios, outevent, inevent)
call_ratio = ratio(call[CALL_RATIO], call_ratios[callee])
call_partial = call_ratio*callee_partial
try:
call[outevent] += call_partial
except UndefinedEvent:
call[outevent] = call_partial
partial += call_partial
partials[function] = partial
try:
function[outevent] += partial
except UndefinedEvent:
function[outevent] = partial
return partials[function]
def aggregate(self, event):
"""Aggregate an event for the whole profile."""
total = event.null()
for function in self.functions.itervalues():
try:
total = event.aggregate(total, function[event])
except UndefinedEvent:
return
self[event] = total
def ratio(self, outevent, inevent):
assert outevent not in self
assert inevent in self
for function in self.functions.itervalues():
assert outevent not in function
assert inevent in function
function[outevent] = ratio(function[inevent], self[inevent])
for call in function.calls.itervalues():
assert outevent not in call
if inevent in call:
call[outevent] = ratio(call[inevent], self[inevent])
self[outevent] = 1.0
def prune(self, node_thres, edge_thres):
"""Prune the profile"""
# compute the prune ratios
for function in self.functions.itervalues():
try:
function[PRUNE_RATIO] = function[TOTAL_TIME_RATIO]
except UndefinedEvent:
pass
for call in function.calls.itervalues():
callee = self.functions[call.callee_id]
if TOTAL_TIME_RATIO in call:
# handle exact cases first
call[PRUNE_RATIO] = call[TOTAL_TIME_RATIO]
else:
try:
# make a safe estimate
call[PRUNE_RATIO] = min(function[TOTAL_TIME_RATIO], callee[TOTAL_TIME_RATIO])
except UndefinedEvent:
pass
# prune the nodes
for function_id in self.functions.keys():
function = self.functions[function_id]
try:
if function[PRUNE_RATIO] < node_thres:
del self.functions[function_id]
except UndefinedEvent:
pass
# prune the egdes
for function in self.functions.itervalues():
for callee_id in function.calls.keys():
call = function.calls[callee_id]
try:
if callee_id not in self.functions or call[PRUNE_RATIO] < edge_thres:
del function.calls[callee_id]
except UndefinedEvent:
pass
def dump(self):
for function in self.functions.itervalues():
sys.stderr.write('Function %s:\n' % (function.name,))
self._dump_events(function.events)
for call in function.calls.itervalues():
callee = self.functions[call.callee_id]
sys.stderr.write(' Call %s:\n' % (callee.name,))
self._dump_events(call.events)
for cycle in self.cycles:
sys.stderr.write('Cycle:\n')
self._dump_events(cycle.events)
for function in cycle.functions:
sys.stderr.write(' Function %s\n' % (function.name,))
def _dump_events(self, events):
for event, value in events.iteritems():
sys.stderr.write(' %s: %s\n' % (event.name, event.format(value)))
class Struct:
"""Masquerade a dictionary with a structure-like behavior."""
def __init__(self, attrs = None):
if attrs is None:
attrs = {}
self.__dict__['_attrs'] = attrs
def __getattr__(self, name):
try:
return self._attrs[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self._attrs[name] = value
def __str__(self):
return str(self._attrs)
def __repr__(self):
return repr(self._attrs)
class ParseError(Exception):
"""Raised when parsing to signal mismatches."""
def __init__(self, msg, line):
self.msg = msg
# TODO: store more source line information
self.line = line
def __str__(self):
return '%s: %r' % (self.msg, self.line)
class Parser:
"""Parser interface."""
def __init__(self):
pass
def parse(self):
raise NotImplementedError
class LineParser(Parser):
"""Base class for parsers that read line-based formats."""
def __init__(self, file):
Parser.__init__(self)
self._file = file
self.__line = None
self.__eof = False
def readline(self):
line = self._file.readline()
if not line:
self.__line = ''
self.__eof = True
self.__line = line.rstrip('\r\n')
def lookahead(self):
assert self.__line is not None
return self.__line
def consume(self):
assert self.__line is not None
line = self.__line
self.readline()
return line
def eof(self):
assert self.__line is not None
return self.__eof
XML_ELEMENT_START, XML_ELEMENT_END, XML_CHARACTER_DATA, XML_EOF = range(4)
class XmlToken:
def __init__(self, type, name_or_data, attrs = None, line = None, column = None):
assert type in (XML_ELEMENT_START, XML_ELEMENT_END, XML_CHARACTER_DATA, XML_EOF)
self.type = type
self.name_or_data = name_or_data
self.attrs = attrs
self.line = line
self.column = column
def __str__(self):
if self.type == XML_ELEMENT_START:
return '<' + self.name_or_data + ' ...>'
if self.type == XML_ELEMENT_END:
return '</' + self.name_or_data + '>'
if self.type == XML_CHARACTER_DATA:
return self.name_or_data
if self.type == XML_EOF:
return 'end of file'
assert 0
class XmlTokenizer:
"""Expat based XML tokenizer."""
def __init__(self, fp, skip_ws = True):
self.fp = fp
self.tokens = []
self.index = 0
self.final = False
self.skip_ws = skip_ws
self.character_pos = 0, 0
self.character_data = ''
self.parser = xml.parsers.expat.ParserCreate()
self.parser.StartElementHandler = self.handle_element_start
self.parser.EndElementHandler = self.handle_element_end
self.parser.CharacterDataHandler = self.handle_character_data
def handle_element_start(self, name, attributes):
self.finish_character_data()
line, column = self.pos()
token = XmlToken(XML_ELEMENT_START, name, attributes, line, column)
self.tokens.append(token)
def handle_element_end(self, name):
self.finish_character_data()
line, column = self.pos()
token = XmlToken(XML_ELEMENT_END, name, None, line, column)
self.tokens.append(token)
def handle_character_data(self, data):
if not self.character_data:
self.character_pos = self.pos()
self.character_data += data
def finish_character_data(self):
if self.character_data:
if not self.skip_ws or not self.character_data.isspace():
line, column = self.character_pos
token = XmlToken(XML_CHARACTER_DATA, self.character_data, None, line, column)
self.tokens.append(token)
self.character_data = ''
def next(self):
size = 16*1024
while self.index >= len(self.tokens) and not self.final:
self.tokens = []
self.index = 0
data = self.fp.read(size)
self.final = len(data) < size
try:
self.parser.Parse(data, self.final)
except xml.parsers.expat.ExpatError, e:
#if e.code == xml.parsers.expat.errors.XML_ERROR_NO_ELEMENTS:
if e.code == 3:
pass
else:
raise e
if self.index >= len(self.tokens):
line, column = self.pos()
token = XmlToken(XML_EOF, None, None, line, column)
else:
token = self.tokens[self.index]
self.index += 1
return token
def pos(self):
return self.parser.CurrentLineNumber, self.parser.CurrentColumnNumber
class XmlTokenMismatch(Exception):
def __init__(self, expected, found):
self.expected = expected
self.found = found
def __str__(self):
return '%u:%u: %s expected, %s found' % (self.found.line, self.found.column, str(self.expected), str(self.found))
class XmlParser(Parser):
"""Base XML document parser."""
def __init__(self, fp):
Parser.__init__(self)
self.tokenizer = XmlTokenizer(fp)
self.consume()
def consume(self):
self.token = self.tokenizer.next()
def match_element_start(self, name):
return self.token.type == XML_ELEMENT_START and self.token.name_or_data == name
def match_element_end(self, name):
return self.token.type == XML_ELEMENT_END and self.token.name_or_data == name
def element_start(self, name):
while self.token.type == XML_CHARACTER_DATA:
self.consume()
if self.token.type != XML_ELEMENT_START:
raise XmlTokenMismatch(XmlToken(XML_ELEMENT_START, name), self.token)
if self.token.name_or_data != name:
raise XmlTokenMismatch(XmlToken(XML_ELEMENT_START, name), self.token)
attrs = self.token.attrs
self.consume()
return attrs
def element_end(self, name):
while self.token.type == XML_CHARACTER_DATA:
self.consume()
if self.token.type != XML_ELEMENT_END:
raise XmlTokenMismatch(XmlToken(XML_ELEMENT_END, name), self.token)
if self.token.name_or_data != name:
raise XmlTokenMismatch(XmlToken(XML_ELEMENT_END, name), self.token)
self.consume()
def character_data(self, strip = True):
data = ''
while self.token.type == XML_CHARACTER_DATA:
data += self.token.name_or_data
self.consume()
if strip:
data = data.strip()
return data
class GprofParser(Parser):
"""Parser for GNU gprof output.
See also:
- Chapter "Interpreting gprof's Output" from the GNU gprof manual
http://sourceware.org/binutils/docs-2.18/gprof/Call-Graph.html#Call-Graph
- File "cg_print.c" from the GNU gprof source code
http://sourceware.org/cgi-bin/cvsweb.cgi/~checkout~/src/gprof/cg_print.c?rev=1.12&cvsroot=src
"""
def __init__(self, fp):
Parser.__init__(self)
self.fp = fp
self.functions = {}
self.cycles = {}
def readline(self):
line = self.fp.readline()
if not line:
sys.stderr.write('error: unexpected end of file\n')
sys.exit(1)
line = line.rstrip('\r\n')
return line
_int_re = re.compile(r'^\d+$')
_float_re = re.compile(r'^\d+\.\d+$')
def translate(self, mo):
"""Extract a structure from a match object, while translating the types in the process."""
attrs = {}
groupdict = mo.groupdict()
for name, value in groupdict.iteritems():
if value is None:
value = None
elif self._int_re.match(value):
value = int(value)
elif self._float_re.match(value):
value = float(value)
attrs[name] = (value)
return Struct(attrs)
_cg_header_re = re.compile(
# original gprof header
r'^\s+called/total\s+parents\s*$|' +
r'^index\s+%time\s+self\s+descendents\s+called\+self\s+name\s+index\s*$|' +
r'^\s+called/total\s+children\s*$|' +
# GNU gprof header
r'^index\s+%\s+time\s+self\s+children\s+called\s+name\s*$'
)
_cg_ignore_re = re.compile(
# spontaneous
r'^\s+<spontaneous>\s*$|'
# internal calls (such as "mcount")
r'^.*\((\d+)\)$'
)
_cg_primary_re = re.compile(
r'^\[(?P<index>\d+)\]?' +
r'\s+(?P<percentage_time>\d+\.\d+)' +
r'\s+(?P<self>\d+\.\d+)' +
r'\s+(?P<descendants>\d+\.\d+)' +
r'\s+(?:(?P<called>\d+)(?:\+(?P<called_self>\d+))?)?' +
r'\s+(?P<name>\S.*?)' +
r'(?:\s+<cycle\s(?P<cycle>\d+)>)?' +
r'\s\[(\d+)\]$'
)
_cg_parent_re = re.compile(
r'^\s+(?P<self>\d+\.\d+)?' +
r'\s+(?P<descendants>\d+\.\d+)?' +
r'\s+(?P<called>\d+)(?:/(?P<called_total>\d+))?' +
r'\s+(?P<name>\S.*?)' +
r'(?:\s+<cycle\s(?P<cycle>\d+)>)?' +
r'\s\[(?P<index>\d+)\]$'
)
_cg_child_re = _cg_parent_re
_cg_cycle_header_re = re.compile(
r'^\[(?P<index>\d+)\]?' +
r'\s+(?P<percentage_time>\d+\.\d+)' +
r'\s+(?P<self>\d+\.\d+)' +
r'\s+(?P<descendants>\d+\.\d+)' +
r'\s+(?:(?P<called>\d+)(?:\+(?P<called_self>\d+))?)?' +
r'\s+<cycle\s(?P<cycle>\d+)\sas\sa\swhole>' +
r'\s\[(\d+)\]$'
)
_cg_cycle_member_re = re.compile(
r'^\s+(?P<self>\d+\.\d+)?' +
r'\s+(?P<descendants>\d+\.\d+)?' +
r'\s+(?P<called>\d+)(?:\+(?P<called_self>\d+))?' +
r'\s+(?P<name>\S.*?)' +
r'(?:\s+<cycle\s(?P<cycle>\d+)>)?' +
r'\s\[(?P<index>\d+)\]$'
)
_cg_sep_re = re.compile(r'^--+$')
def parse_function_entry(self, lines):
parents = []
children = []
while True:
if not lines:
sys.stderr.write('warning: unexpected end of entry\n')
line = lines.pop(0)
if line.startswith('['):
break
# read function parent line
mo = self._cg_parent_re.match(line)
if not mo:
if self._cg_ignore_re.match(line):
continue
sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line)
else:
parent = self.translate(mo)
parents.append(parent)
# read primary line
mo = self._cg_primary_re.match(line)
if not mo:
sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line)
return
else:
function = self.translate(mo)
while lines:
line = lines.pop(0)
# read function subroutine line
mo = self._cg_child_re.match(line)
if not mo:
if self._cg_ignore_re.match(line):
continue
sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line)
else:
child = self.translate(mo)
children.append(child)
function.parents = parents
function.children = children
self.functions[function.index] = function
def parse_cycle_entry(self, lines):
# read cycle header line
line = lines[0]
mo = self._cg_cycle_header_re.match(line)
if not mo:
sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line)
return
cycle = self.translate(mo)
# read cycle member lines
cycle.functions = []
for line in lines[1:]:
mo = self._cg_cycle_member_re.match(line)
if not mo:
sys.stderr.write('warning: unrecognized call graph entry: %r\n' % line)
continue
call = self.translate(mo)
cycle.functions.append(call)
self.cycles[cycle.cycle] = cycle
def parse_cg_entry(self, lines):
if lines[0].startswith("["):
self.parse_cycle_entry(lines)
else:
self.parse_function_entry(lines)
def parse_cg(self):
"""Parse the call graph."""
# skip call graph header
while not self._cg_header_re.match(self.readline()):
pass
line = self.readline()
while self._cg_header_re.match(line):
line = self.readline()
# process call graph entries
entry_lines = []
while line != '\014': # form feed
if line and not line.isspace():
if self._cg_sep_re.match(line):
self.parse_cg_entry(entry_lines)
entry_lines = []
else:
entry_lines.append(line)
line = self.readline()
def parse(self):
self.parse_cg()
self.fp.close()
profile = Profile()
profile[TIME] = 0.0
cycles = {}
for index in self.cycles.iterkeys():
cycles[index] = Cycle()
for entry in self.functions.itervalues():
# populate the function
function = Function(entry.index, entry.name)
function[TIME] = entry.self
if entry.called is not None:
function[CALLS] = entry.called
if entry.called_self is not None:
call = Call(entry.index)
call[CALLS] = entry.called_self
function[CALLS] += entry.called_self
# populate the function calls
for child in entry.children:
call = Call(child.index)
assert child.called is not None
call[CALLS] = child.called
if child.index not in self.functions:
# NOTE: functions that were never called but were discovered by gprof's
# static call graph analysis dont have a call graph entry so we need
# to add them here
missing = Function(child.index, child.name)
function[TIME] = 0.0
function[CALLS] = 0
profile.add_function(missing)
function.add_call(call)
profile.add_function(function)
if entry.cycle is not None:
try:
cycle = cycles[entry.cycle]
except KeyError:
sys.stderr.write('warning: <cycle %u as a whole> entry missing\n' % entry.cycle)
cycle = Cycle()
cycles[entry.cycle] = cycle
cycle.add_function(function)
profile[TIME] = profile[TIME] + function[TIME]
for cycle in cycles.itervalues():
profile.add_cycle(cycle)
# Compute derived events
profile.validate()
profile.ratio(TIME_RATIO, TIME)
profile.call_ratios(CALLS)
profile.integrate(TOTAL_TIME, TIME)
profile.ratio(TOTAL_TIME_RATIO, TOTAL_TIME)
return profile
class OprofileParser(LineParser):
"""Parser for oprofile callgraph output.
See also:
- http://oprofile.sourceforge.net/doc/opreport.html#opreport-callgraph
"""
_fields_re = {
'samples': r'(?P<samples>\d+)',
'%': r'(?P<percentage>\S+)',
'linenr info': r'(?P<source>\(no location information\)|\S+:\d+)',
'image name': r'(?P<image>\S+(?:\s\(tgid:[^)]*\))?)',
'app name': r'(?P<application>\S+)',
'symbol name': r'(?P<symbol>\(no symbols\)|.+?)',
}
def __init__(self, infile):
LineParser.__init__(self, infile)
self.entries = {}
self.entry_re = None
def add_entry(self, callers, function, callees):
try:
entry = self.entries[function.id]
except KeyError:
self.entries[function.id] = (callers, function, callees)
else:
callers_total, function_total, callees_total = entry
self.update_subentries_dict(callers_total, callers)
function_total.samples += function.samples
self.update_subentries_dict(callees_total, callees)
def update_subentries_dict(self, totals, partials):
for partial in partials.itervalues():
try:
total = totals[partial.id]
except KeyError:
totals[partial.id] = partial
else:
total.samples += partial.samples
def parse(self):
# read lookahead
self.readline()
self.parse_header()
while self.lookahead():
self.parse_entry()
profile = Profile()
reverse_call_samples = {}
# populate the profile
profile[SAMPLES] = 0
for _callers, _function, _callees in self.entries.itervalues():
function = Function(_function.id, _function.name)
function[SAMPLES] = _function.samples
profile.add_function(function)
profile[SAMPLES] += _function.samples
if _function.application:
function[PROCESS] = os.path.basename(_function.application)
if _function.image:
function[MODULE] = os.path.basename(_function.image)
total_callee_samples = 0
for _callee in _callees.itervalues():
total_callee_samples += _callee.samples
for _callee in _callees.itervalues():
if not _callee.self:
call = Call(_callee.id)
call[SAMPLES2] = _callee.samples
function.add_call(call)
# compute derived data
profile.validate()
profile.find_cycles()
profile.ratio(TIME_RATIO, SAMPLES)
profile.call_ratios(SAMPLES2)
profile.integrate(TOTAL_TIME_RATIO, TIME_RATIO)
return profile
def parse_header(self):
while not self.match_header():
self.consume()
line = self.lookahead()
fields = re.split(r'\s\s+', line)
entry_re = r'^\s*' + r'\s+'.join([self._fields_re[field] for field in fields]) + r'(?P<self>\s+\[self\])?$'
self.entry_re = re.compile(entry_re)
self.skip_separator()
def parse_entry(self):
callers = self.parse_subentries()
if self.match_primary():
function = self.parse_subentry()
if function is not None:
callees = self.parse_subentries()
self.add_entry(callers, function, callees)
self.skip_separator()
def parse_subentries(self):
subentries = {}
while self.match_secondary():
subentry = self.parse_subentry()
subentries[subentry.id] = subentry
return subentries
def parse_subentry(self):
entry = Struct()
line = self.consume()
mo = self.entry_re.match(line)
if not mo:
raise ParseError('failed to parse', line)
fields = mo.groupdict()
entry.samples = int(fields.get('samples', 0))
entry.percentage = float(fields.get('percentage', 0.0))
if 'source' in fields and fields['source'] != '(no location information)':
source = fields['source']
filename, lineno = source.split(':')
entry.filename = filename
entry.lineno = int(lineno)
else:
source = ''
entry.filename = None
entry.lineno = None
entry.image = fields.get('image', '')
entry.application = fields.get('application', '')
if 'symbol' in fields and fields['symbol'] != '(no symbols)':
entry.symbol = fields['symbol']
else:
entry.symbol = ''
if entry.symbol.startswith('"') and entry.symbol.endswith('"'):
entry.symbol = entry.symbol[1:-1]
entry.id = ':'.join((entry.application, entry.image, source, entry.symbol))
entry.self = fields.get('self', None) != None
if entry.self:
entry.id += ':self'
if entry.symbol:
entry.name = entry.symbol
else:
entry.name = entry.image
return entry
def skip_separator(self):
while not self.match_separator():
self.consume()
self.consume()
def match_header(self):
line = self.lookahead()
return line.startswith('samples')
def match_separator(self):
line = self.lookahead()
return line == '-'*len(line)
def match_primary(self):
line = self.lookahead()
return not line[:1].isspace()
def match_secondary(self):
line = self.lookahead()
return line[:1].isspace()
class SysprofParser(XmlParser):
def __init__(self, stream):
XmlParser.__init__(self, stream)
def parse(self):
objects = {}
nodes = {}
self.element_start('profile')
while self.token.type == XML_ELEMENT_START:
if self.token.name_or_data == 'objects':
assert not objects
objects = self.parse_items('objects')
elif self.token.name_or_data == 'nodes':
assert not nodes
nodes = self.parse_items('nodes')
else:
self.parse_value(self.token.name_or_data)
self.element_end('profile')
return self.build_profile(objects, nodes)
def parse_items(self, name):
assert name[-1] == 's'
items = {}
self.element_start(name)
while self.token.type == XML_ELEMENT_START:
id, values = self.parse_item(name[:-1])
assert id not in items
items[id] = values
self.element_end(name)
return items
def parse_item(self, name):
attrs = self.element_start(name)
id = int(attrs['id'])
values = self.parse_values()
self.element_end(name)
return id, values
def parse_values(self):
values = {}
while self.token.type == XML_ELEMENT_START:
name = self.token.name_or_data
value = self.parse_value(name)
assert name not in values
values[name] = value
return values
def parse_value(self, tag):
self.element_start(tag)
value = self.character_data()
self.element_end(tag)
if value.isdigit():
return int(value)
if value.startswith('"') and value.endswith('"'):
return value[1:-1]
return value
def build_profile(self, objects, nodes):
profile = Profile()
profile[SAMPLES] = 0
for id, object in objects.iteritems():
# Ignore fake objects (process names, modules, "Everything", "kernel", etc.)
if object['self'] == 0:
continue
function = Function(id, object['name'])
function[SAMPLES] = object['self']
profile.add_function(function)
profile[SAMPLES] += function[SAMPLES]
for id, node in nodes.iteritems():
# Ignore fake calls
if node['self'] == 0:
continue
# Find a non-ignored parent
parent_id = node['parent']
while parent_id != 0:
parent = nodes[parent_id]
caller_id = parent['object']
if objects[caller_id]['self'] != 0:
break
parent_id = parent['parent']
if parent_id == 0:
continue
callee_id = node['object']
assert objects[caller_id]['self']
assert objects[callee_id]['self']
function = profile.functions[caller_id]
samples = node['self']
try:
call = function.calls[callee_id]
except KeyError:
call = Call(callee_id)
call[SAMPLES2] = samples
function.add_call(call)
else:
call[SAMPLES2] += samples
# Compute derived events
profile.validate()
profile.find_cycles()
profile.ratio(TIME_RATIO, SAMPLES)
profile.call_ratios(SAMPLES2)
profile.integrate(TOTAL_TIME_RATIO, TIME_RATIO)
return profile
class SharkParser(LineParser):
"""Parser for MacOSX Shark output.
Author: [email protected]
"""
def __init__(self, infile):
LineParser.__init__(self, infile)
self.stack = []
self.entries = {}
def add_entry(self, function):
try:
entry = self.entries[function.id]
except KeyError:
self.entries[function.id] = (function, { })
else:
function_total, callees_total = entry
function_total.samples += function.samples
def add_callee(self, function, callee):
func, callees = self.entries[function.id]
try:
entry = callees[callee.id]
except KeyError:
callees[callee.id] = callee
else:
entry.samples += callee.samples
def parse(self):
self.readline()
self.readline()
self.readline()
self.readline()
match = re.compile(r'(?P<prefix>[|+ ]*)(?P<samples>\d+), (?P<symbol>[^,]+), (?P<image>.*)')
while self.lookahead():
line = self.consume()
mo = match.match(line)
if not mo:
raise ParseError('failed to parse', line)
fields = mo.groupdict()
prefix = len(fields.get('prefix', 0)) / 2 - 1
symbol = str(fields.get('symbol', 0))
image = str(fields.get('image', 0))
entry = Struct()
entry.id = ':'.join([symbol, image])
entry.samples = int(fields.get('samples', 0))
entry.name = symbol
entry.image = image
# adjust the callstack
if prefix < len(self.stack):
del self.stack[prefix:]
if prefix == len(self.stack):
self.stack.append(entry)
# if the callstack has had an entry, it's this functions caller
if prefix > 0:
self.add_callee(self.stack[prefix - 1], entry)
self.add_entry(entry)
profile = Profile()
profile[SAMPLES] = 0
for _function, _callees in self.entries.itervalues():
function = Function(_function.id, _function.name)
function[SAMPLES] = _function.samples
profile.add_function(function)
profile[SAMPLES] += _function.samples
if _function.image:
function[MODULE] = os.path.basename(_function.image)
for _callee in _callees.itervalues():
call = Call(_callee.id)
call[SAMPLES] = _callee.samples
function.add_call(call)
# compute derived data
profile.validate()
profile.find_cycles()
profile.ratio(TIME_RATIO, SAMPLES)
profile.call_ratios(SAMPLES)
profile.integrate(TOTAL_TIME_RATIO, TIME_RATIO)
return profile
class SleepyParser(Parser):
"""Parser for GNU gprof output.
See also:
- http://www.codersnotes.com/sleepy/
- http://sleepygraph.sourceforge.net/
"""
def __init__(self, filename):
Parser.__init__(self)
from zipfile import ZipFile
self.database = ZipFile(filename)
self.symbols = {}
self.calls = {}
self.profile = Profile()
_symbol_re = re.compile(
r'^(?P<id>\w+)' +
r'\s+"(?P<module>[^"]*)"' +
r'\s+"(?P<procname>[^"]*)"' +
r'\s+"(?P<sourcefile>[^"]*)"' +
r'\s+(?P<sourceline>\d+)$'
)
def parse_symbols(self):
lines = self.database.read('symbols.txt').splitlines()
for line in lines:
mo = self._symbol_re.match(line)
if mo:
symbol_id, module, procname, sourcefile, sourceline = mo.groups()
function_id = ':'.join([module, procname])
try:
function = self.profile.functions[function_id]
except KeyError:
function = Function(function_id, procname)
function[SAMPLES] = 0
self.profile.add_function(function)
self.symbols[symbol_id] = function
def parse_callstacks(self):
lines = self.database.read("callstacks.txt").splitlines()
for line in lines:
fields = line.split()
samples = int(fields[0])
callstack = fields[1:]
callstack = [self.symbols[symbol_id] for symbol_id in callstack]
callee = callstack[0]
callee[SAMPLES] += samples
self.profile[SAMPLES] += samples
for caller in callstack[1:]:
try:
call = caller.calls[callee.id]
except KeyError:
call = Call(callee.id)
call[SAMPLES2] = samples
caller.add_call(call)
else:
call[SAMPLES2] += samples
callee = caller
def parse(self):
profile = self.profile
profile[SAMPLES] = 0
self.parse_symbols()
self.parse_callstacks()
# Compute derived events
profile.validate()
profile.find_cycles()
profile.ratio(TIME_RATIO, SAMPLES)
profile.call_ratios(SAMPLES2)
profile.integrate(TOTAL_TIME_RATIO, TIME_RATIO)
return profile
class AQtimeTable:
def __init__(self, name, fields):
self.name = name
self.fields = fields
self.field_column = {}
for column in range(len(fields)):
self.field_column[fields[column]] = column
self.rows = []
def __len__(self):
return len(self.rows)
def __iter__(self):
for values, children in self.rows:
fields = {}
for name, value in zip(self.fields, values):
fields[name] = value
children = dict([(child.name, child) for child in children])
yield fields, children
raise StopIteration
def add_row(self, values, children=()):
self.rows.append((values, children))
class AQtimeParser(XmlParser):
def __init__(self, stream):
XmlParser.__init__(self, stream)
self.tables = {}
def parse(self):
self.element_start('AQtime_Results')
self.parse_headers()
results = self.parse_results()
self.element_end('AQtime_Results')
return self.build_profile(results)
def parse_headers(self):
self.element_start('HEADERS')
while self.token.type == XML_ELEMENT_START:
self.parse_table_header()
self.element_end('HEADERS')
def parse_table_header(self):
attrs = self.element_start('TABLE_HEADER')
name = attrs['NAME']
id = int(attrs['ID'])
field_types = []
field_names = []
while self.token.type == XML_ELEMENT_START:
field_type, field_name = self.parse_table_field()
field_types.append(field_type)
field_names.append(field_name)
self.element_end('TABLE_HEADER')
self.tables[id] = name, field_types, field_names
def parse_table_field(self):
attrs = self.element_start('TABLE_FIELD')
type = attrs['TYPE']
name = self.character_data()
self.element_end('TABLE_FIELD')
return type, name
def parse_results(self):
self.element_start('RESULTS')
table = self.parse_data()
self.element_end('RESULTS')
return table
def parse_data(self):
rows = []
attrs = self.element_start('DATA')
table_id = int(attrs['TABLE_ID'])
table_name, field_types, field_names = self.tables[table_id]
table = AQtimeTable(table_name, field_names)
while self.token.type == XML_ELEMENT_START:
row, children = self.parse_row(field_types)
table.add_row(row, children)
self.element_end('DATA')
return table
def parse_row(self, field_types):
row = [None]*len(field_types)
children = []
self.element_start('ROW')
while self.token.type == XML_ELEMENT_START:
if self.token.name_or_data == 'FIELD':
field_id, field_value = self.parse_field(field_types)
row[field_id] = field_value
elif self.token.name_or_data == 'CHILDREN':
children = self.parse_children()
else:
raise XmlTokenMismatch("<FIELD ...> or <CHILDREN ...>", self.token)
self.element_end('ROW')
return row, children
def parse_field(self, field_types):
attrs = self.element_start('FIELD')
id = int(attrs['ID'])
type = field_types[id]
value = self.character_data()
if type == 'Integer':
value = int(value)
elif type == 'Float':
value = float(value)
elif type == 'Address':
value = int(value)
elif type == 'String':
pass
else:
assert False
self.element_end('FIELD')
return id, value
def parse_children(self):
children = []
self.element_start('CHILDREN')
while self.token.type == XML_ELEMENT_START:
table = self.parse_data()
assert table.name not in children
children.append(table)
self.element_end('CHILDREN')
return children
def build_profile(self, results):
assert results.name == 'Routines'
profile = Profile()
profile[TIME] = 0.0
for fields, tables in results:
function = self.build_function(fields)
children = tables['Children']
for fields, _ in children:
call = self.build_call(fields)
function.add_call(call)
profile.add_function(function)
profile[TIME] = profile[TIME] + function[TIME]
profile[TOTAL_TIME] = profile[TIME]
profile.ratio(TOTAL_TIME_RATIO, TOTAL_TIME)
return profile
def build_function(self, fields):
function = Function(self.build_id(fields), self.build_name(fields))
function[TIME] = fields['Time']
function[TOTAL_TIME] = fields['Time with Children']
#function[TIME_RATIO] = fields['% Time']/100.0
#function[TOTAL_TIME_RATIO] = fields['% with Children']/100.0
return function
def build_call(self, fields):
call = Call(self.build_id(fields))
call[TIME] = fields['Time']
call[TOTAL_TIME] = fields['Time with Children']
#call[TIME_RATIO] = fields['% Time']/100.0
#call[TOTAL_TIME_RATIO] = fields['% with Children']/100.0
return call
def build_id(self, fields):
return ':'.join([fields['Module Name'], fields['Unit Name'], fields['Routine Name']])
def build_name(self, fields):
# TODO: use more fields
return fields['Routine Name']
class PstatsParser:
"""Parser python profiling statistics saved with te pstats module."""
def __init__(self, *filename):
import pstats
try:
self.stats = pstats.Stats(*filename)
except ValueError:
import hotshot.stats
self.stats = hotshot.stats.load(filename[0])
self.profile = Profile()
self.function_ids = {}
def get_function_name(self, (filename, line, name)):
module = os.path.splitext(filename)[0]
module = os.path.basename(module)
return "%s:%d:%s" % (module, line, name)
def get_function(self, key):
try:
id = self.function_ids[key]
except KeyError:
id = len(self.function_ids)
name = self.get_function_name(key)
function = Function(id, name)
self.profile.functions[id] = function
self.function_ids[key] = id
else:
function = self.profile.functions[id]
return function
def parse(self):
self.profile[TIME] = 0.0
self.profile[TOTAL_TIME] = self.stats.total_tt
for fn, (cc, nc, tt, ct, callers) in self.stats.stats.iteritems():
callee = self.get_function(fn)
callee[CALLS] = nc
callee[TOTAL_TIME] = ct
callee[TIME] = tt
self.profile[TIME] += tt
self.profile[TOTAL_TIME] = max(self.profile[TOTAL_TIME], ct)
for fn, value in callers.iteritems():
caller = self.get_function(fn)
call = Call(callee.id)
if isinstance(value, tuple):
for i in xrange(0, len(value), 4):
nc, cc, tt, ct = value[i:i+4]
if CALLS in call:
call[CALLS] += cc
else:
call[CALLS] = cc
if TOTAL_TIME in call:
call[TOTAL_TIME] += ct
else:
call[TOTAL_TIME] = ct
else:
call[CALLS] = value
call[TOTAL_TIME] = ratio(value, nc)*ct
caller.add_call(call)
#self.stats.print_stats()
#self.stats.print_callees()
# Compute derived events
self.profile.validate()
self.profile.ratio(TIME_RATIO, TIME)
self.profile.ratio(TOTAL_TIME_RATIO, TOTAL_TIME)
return self.profile
class Theme:
def __init__(self,
bgcolor = (0.0, 0.0, 1.0),
mincolor = (0.0, 0.0, 0.0),
maxcolor = (0.0, 0.0, 1.0),
fontname = "Arial",
minfontsize = 10.0,
maxfontsize = 10.0,
minpenwidth = 0.5,
maxpenwidth = 4.0,
gamma = 2.2,
skew = 1.0):
self.bgcolor = bgcolor
self.mincolor = mincolor
self.maxcolor = maxcolor
self.fontname = fontname
self.minfontsize = minfontsize
self.maxfontsize = maxfontsize
self.minpenwidth = minpenwidth
self.maxpenwidth = maxpenwidth
self.gamma = gamma
self.skew = skew
def graph_bgcolor(self):
return self.hsl_to_rgb(*self.bgcolor)
def graph_fontname(self):
return self.fontname
def graph_fontsize(self):
return self.minfontsize
def node_bgcolor(self, weight):
return self.color(weight)
def node_fgcolor(self, weight):
return self.graph_bgcolor()
def node_fontsize(self, weight):
return self.fontsize(weight)
def edge_color(self, weight):
return self.color(weight)
def edge_fontsize(self, weight):
return self.fontsize(weight)
def edge_penwidth(self, weight):
return max(weight*self.maxpenwidth, self.minpenwidth)
def edge_arrowsize(self, weight):
return 0.5 * math.sqrt(self.edge_penwidth(weight))
def fontsize(self, weight):
return max(weight**2 * self.maxfontsize, self.minfontsize)
def color(self, weight):
weight = min(max(weight, 0.0), 1.0)
hmin, smin, lmin = self.mincolor
hmax, smax, lmax = self.maxcolor
if self.skew < 0:
raise ValueError("Skew must be greater than 0")
elif self.skew == 1.0:
h = hmin + weight*(hmax - hmin)
s = smin + weight*(smax - smin)
l = lmin + weight*(lmax - lmin)
else:
base = self.skew
h = hmin + ((hmax-hmin)*(-1.0 + (base ** weight)) / (base - 1.0))
s = smin + ((smax-smin)*(-1.0 + (base ** weight)) / (base - 1.0))
l = lmin + ((lmax-lmin)*(-1.0 + (base ** weight)) / (base - 1.0))
return self.hsl_to_rgb(h, s, l)
def hsl_to_rgb(self, h, s, l):
"""Convert a color from HSL color-model to RGB.
See also:
- http://www.w3.org/TR/css3-color/#hsl-color
"""
h = h % 1.0
s = min(max(s, 0.0), 1.0)
l = min(max(l, 0.0), 1.0)
if l <= 0.5:
m2 = l*(s + 1.0)
else:
m2 = l + s - l*s
m1 = l*2.0 - m2
r = self._hue_to_rgb(m1, m2, h + 1.0/3.0)
g = self._hue_to_rgb(m1, m2, h)
b = self._hue_to_rgb(m1, m2, h - 1.0/3.0)
# Apply gamma correction
r **= self.gamma
g **= self.gamma
b **= self.gamma
return (r, g, b)
def _hue_to_rgb(self, m1, m2, h):
if h < 0.0:
h += 1.0
elif h > 1.0:
h -= 1.0
if h*6 < 1.0:
return m1 + (m2 - m1)*h*6.0
elif h*2 < 1.0:
return m2
elif h*3 < 2.0:
return m1 + (m2 - m1)*(2.0/3.0 - h)*6.0
else:
return m1
TEMPERATURE_COLORMAP = Theme(
mincolor = (2.0/3.0, 0.80, 0.25), # dark blue
maxcolor = (0.0, 1.0, 0.5), # satured red
gamma = 1.0
)
PINK_COLORMAP = Theme(
mincolor = (0.0, 1.0, 0.90), # pink
maxcolor = (0.0, 1.0, 0.5), # satured red
)
GRAY_COLORMAP = Theme(
mincolor = (0.0, 0.0, 0.85), # light gray
maxcolor = (0.0, 0.0, 0.0), # black
)
BW_COLORMAP = Theme(
minfontsize = 8.0,
maxfontsize = 24.0,
mincolor = (0.0, 0.0, 0.0), # black
maxcolor = (0.0, 0.0, 0.0), # black
minpenwidth = 0.1,
maxpenwidth = 8.0,
)
class DotWriter:
"""Writer for the DOT language.
See also:
- "The DOT Language" specification
http://www.graphviz.org/doc/info/lang.html
"""
def __init__(self, fp):
self.fp = fp
def graph(self, profile, theme):
self.begin_graph()
fontname = theme.graph_fontname()
self.attr('graph', fontname=fontname, ranksep=0.25, nodesep=0.125)
self.attr('node', fontname=fontname, shape="box", style="filled", fontcolor="white", width=0, height=0)
self.attr('edge', fontname=fontname)
for function in profile.functions.itervalues():
labels = []
for event in PROCESS, MODULE:
if event in function.events:
label = event.format(function[event])
labels.append(label)
labels.append(function.name)
for event in TOTAL_TIME_RATIO, TIME_RATIO, CALLS:
if event in function.events:
label = event.format(function[event])
labels.append(label)
try:
weight = function[PRUNE_RATIO]
except UndefinedEvent:
weight = 0.0
label = '\n'.join(labels)
self.node(function.id,
label = label,
color = self.color(theme.node_bgcolor(weight)),
fontcolor = self.color(theme.node_fgcolor(weight)),
fontsize = "%.2f" % theme.node_fontsize(weight),
)
for call in function.calls.itervalues():
callee = profile.functions[call.callee_id]
labels = []
for event in TOTAL_TIME_RATIO, CALLS:
if event in call.events:
label = event.format(call[event])
labels.append(label)
try:
weight = call[PRUNE_RATIO]
except UndefinedEvent:
try:
weight = callee[PRUNE_RATIO]
except UndefinedEvent:
weight = 0.0
label = '\n'.join(labels)
self.edge(function.id, call.callee_id,
label = label,
color = self.color(theme.edge_color(weight)),
fontcolor = self.color(theme.edge_color(weight)),
fontsize = "%.2f" % theme.edge_fontsize(weight),
penwidth = "%.2f" % theme.edge_penwidth(weight),
labeldistance = "%.2f" % theme.edge_penwidth(weight),
arrowsize = "%.2f" % theme.edge_arrowsize(weight),
)
self.end_graph()
def begin_graph(self):
self.write('digraph {\n')
def end_graph(self):
self.write('}\n')
def attr(self, what, **attrs):
self.write("\t")
self.write(what)
self.attr_list(attrs)
self.write(";\n")
def node(self, node, **attrs):
self.write("\t")
self.id(node)
self.attr_list(attrs)
self.write(";\n")
def edge(self, src, dst, **attrs):
self.write("\t")
self.id(src)
self.write(" -> ")
self.id(dst)
self.attr_list(attrs)
self.write(";\n")
def attr_list(self, attrs):
if not attrs:
return
self.write(' [')
first = True
for name, value in attrs.iteritems():
if first:
first = False
else:
self.write(", ")
self.id(name)
self.write('=')
self.id(value)
self.write(']')
def id(self, id):
if isinstance(id, (int, float)):
s = str(id)
elif isinstance(id, basestring):
if id.isalnum():
s = id
else:
s = self.escape(id)
else:
raise TypeError
self.write(s)
def color(self, (r, g, b)):
def float2int(f):
if f <= 0.0:
return 0
if f >= 1.0:
return 255
return int(255.0*f + 0.5)
return "#" + "".join(["%02x" % float2int(c) for c in (r, g, b)])
def escape(self, s):
s = s.encode('utf-8')
s = s.replace('\\', r'\\')
s = s.replace('\n', r'\n')
s = s.replace('\t', r'\t')
s = s.replace('"', r'\"')
return '"' + s + '"'
def write(self, s):
self.fp.write(s)
class Main:
"""Main program."""
themes = {
"color": TEMPERATURE_COLORMAP,
"pink": PINK_COLORMAP,
"gray": GRAY_COLORMAP,
"bw": BW_COLORMAP,
}
def main(self):
"""Main program."""
parser = optparse.OptionParser(
usage="\n\t%prog [options] [file] ...",
version="%%prog %s" % __version__)
parser.add_option(
'-o', '--output', metavar='FILE',
type="string", dest="output",
help="output filename [stdout]")
parser.add_option(
'-n', '--node-thres', metavar='PERCENTAGE',
type="float", dest="node_thres", default=0.5,
help="eliminate nodes below this threshold [default: %default]")
parser.add_option(
'-e', '--edge-thres', metavar='PERCENTAGE',
type="float", dest="edge_thres", default=0.1,
help="eliminate edges below this threshold [default: %default]")
parser.add_option(
'-f', '--format',
type="choice", choices=('prof', 'oprofile', 'sysprof', 'pstats', 'shark', 'sleepy', 'aqtime'),
dest="format", default="prof",
help="profile format: prof, oprofile, sysprof, shark, sleepy, aqtime, or pstats [default: %default]")
parser.add_option(
'-c', '--colormap',
type="choice", choices=('color', 'pink', 'gray', 'bw'),
dest="theme", default="color",
help="color map: color, pink, gray, or bw [default: %default]")
parser.add_option(
'-s', '--strip',
action="store_true",
dest="strip", default=False,
help="strip function parameters, template parameters, and const modifiers from demangled C++ function names")
parser.add_option(
'-w', '--wrap',
action="store_true",
dest="wrap", default=False,
help="wrap function names")
# add a new option to control skew of the colorization curve
parser.add_option(
'--skew',
type="float", dest="theme_skew", default=1.0,
help="skew the colorization curve. Values < 1.0 give more variety to lower percentages. Value > 1.0 give less variety to lower percentages")
(self.options, self.args) = parser.parse_args(sys.argv[1:])
if len(self.args) > 1 and self.options.format != 'pstats':
parser.error('incorrect number of arguments')
try:
self.theme = self.themes[self.options.theme]
except KeyError:
parser.error('invalid colormap \'%s\'' % self.options.theme)
# set skew on the theme now that it has been picked.
if self.options.theme_skew:
self.theme.skew = self.options.theme_skew
if self.options.format == 'prof':
if not self.args:
fp = sys.stdin
else:
fp = open(self.args[0], 'rt')
parser = GprofParser(fp)
elif self.options.format == 'oprofile':
if not self.args:
fp = sys.stdin
else:
fp = open(self.args[0], 'rt')
parser = OprofileParser(fp)
elif self.options.format == 'sysprof':
if not self.args:
fp = sys.stdin
else:
fp = open(self.args[0], 'rt')
parser = SysprofParser(fp)
elif self.options.format == 'pstats':
if not self.args:
parser.error('at least a file must be specified for pstats input')
parser = PstatsParser(*self.args)
elif self.options.format == 'shark':
if not self.args:
fp = sys.stdin
else:
fp = open(self.args[0], 'rt')
parser = SharkParser(fp)
elif self.options.format == 'sleepy':
if len(self.args) != 1:
parser.error('exactly one file must be specified for sleepy input')
parser = SleepyParser(self.args[0])
elif self.options.format == 'aqtime':
if not self.args:
fp = sys.stdin
else:
fp = open(self.args[0], 'rt')
parser = AQtimeParser(fp)
else:
parser.error('invalid format \'%s\'' % self.options.format)
self.profile = parser.parse()
if self.options.output is None:
self.output = sys.stdout
else:
self.output = open(self.options.output, 'wt')
self.write_graph()
_parenthesis_re = re.compile(r'\([^()]*\)')
_angles_re = re.compile(r'<[^<>]*>')
_const_re = re.compile(r'\s+const$')
def strip_function_name(self, name):
"""Remove extraneous information from C++ demangled function names."""
# Strip function parameters from name by recursively removing paired parenthesis
while True:
name, n = self._parenthesis_re.subn('', name)
if not n:
break
# Strip const qualifier
name = self._const_re.sub('', name)
# Strip template parameters from name by recursively removing paired angles
while True:
name, n = self._angles_re.subn('', name)
if not n:
break
return name
def wrap_function_name(self, name):
"""Split the function name on multiple lines."""
if len(name) > 32:
ratio = 2.0/3.0
height = max(int(len(name)/(1.0 - ratio) + 0.5), 1)
width = max(len(name)/height, 32)
# TODO: break lines in symbols
name = textwrap.fill(name, width, break_long_words=False)
# Take away spaces
name = name.replace(", ", ",")
name = name.replace("> >", ">>")
name = name.replace("> >", ">>") # catch consecutive
return name
def compress_function_name(self, name):
"""Compress function name according to the user preferences."""
if self.options.strip:
name = self.strip_function_name(name)
if self.options.wrap:
name = self.wrap_function_name(name)
# TODO: merge functions with same resulting name
return name
def write_graph(self):
dot = DotWriter(self.output)
profile = self.profile
profile.prune(self.options.node_thres/100.0, self.options.edge_thres/100.0)
for function in profile.functions.itervalues():
function.name = self.compress_function_name(function.name)
dot.graph(profile, self.theme)
if __name__ == '__main__':
Main().main()
|
mit
|
uw-it-aca/spotseeker_server
|
spotseeker_server/test/search/uw_noise_level.py
|
1
|
4334
|
# Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
from django.test import TestCase
from django.test.client import Client
from django.test.utils import override_settings
import simplejson as json
from spotseeker_server.models import Spot, SpotExtendedInfo
from spotseeker_server.org_filters import SearchFilterChain
def spot_with_noise_level(name, noise_level):
"""Create a spot with the given noise level"""
spot = Spot.objects.create(name=name)
spot.spotextendedinfo_set.create(key='noise_level',
value=noise_level)
return spot
@override_settings(SPOTSEEKER_AUTH_MODULE='spotseeker_server.auth.all_ok',
SPOTSEEKER_SEARCH_FILTERS=(
'spotseeker_server.org_filters.uw_search.Filter',))
class UWNoiseLevelTestCase(TestCase):
@classmethod
def setUpClass(cls):
cls.silent_spot = spot_with_noise_level('Silent Spot', 'silent')
cls.quiet_spot = spot_with_noise_level('Quiet Spot', 'quiet')
cls.moderate_spot = spot_with_noise_level('Moderate', 'moderate')
cls.variable_spot = spot_with_noise_level('Var Spot', 'variable')
@classmethod
def tearDownClass(cls):
Spot.objects.all().delete()
def get_spots_for_noise_levels(self, levels):
"""Do a search for spots with particular noise levels"""
c = self.client
response = c.get('/api/v1/spot',
{'extended_info:noise_level': levels},
content_type='application/json')
return json.loads(response.content)
def assertResponseSpaces(self, res_json, spaces):
"""
Assert that a particular decoded response contains exactly the same
spaces as 'spaces'.
"""
def sortfunc(spot_dict):
return spot_dict['id']
expected_json = [spot.json_data_structure() for spot in spaces]
expected_json.sort(key=sortfunc)
res_json.sort(key=sortfunc)
self.assertEqual(expected_json, res_json)
def test_only_silent(self):
"""Searching for silent should return only silent"""
SearchFilterChain._load_filters() # make sure the uw filters is loaded
res_json = self.get_spots_for_noise_levels(['silent'])
self.assertResponseSpaces(res_json, [self.silent_spot])
def test_uw_only_quiet(self):
"""Quiet should return both a quiet spot and variable"""
SearchFilterChain._load_filters() # make sure the uw filters is loaded
res_json = self.get_spots_for_noise_levels(['quiet'])
expected = [self.quiet_spot, self.variable_spot]
self.assertResponseSpaces(res_json, expected)
def test_uw_only_moderate(self):
"""Moderate should return moderate and variable"""
SearchFilterChain._load_filters() # make sure the uw filters is loaded
res_json = self.get_spots_for_noise_levels(['moderate'])
expected = [self.moderate_spot, self.variable_spot]
self.assertResponseSpaces(res_json, expected)
def test_uw_silent_and_quiet(self):
"""Silent+quiet should give everything but moderate"""
SearchFilterChain._load_filters() # make sure the uw filters is loaded
res_json = self.get_spots_for_noise_levels(['silent', 'quiet'])
expected = [self.quiet_spot, self.silent_spot, self.variable_spot]
self.assertResponseSpaces(res_json, expected)
def test_uw_silent_and_moderate(self):
"""Silent+moderate should give everything but quiet"""
SearchFilterChain._load_filters() # make sure the uw filters is loaded
res_json = self.get_spots_for_noise_levels(['silent', 'moderate'])
expected = [self.silent_spot, self.moderate_spot, self.variable_spot]
self.assertResponseSpaces(res_json, expected)
def test_uw_all_three(self):
"""All 3 should give everything"""
SearchFilterChain._load_filters() # make sure the uw filters is loaded
query = ['silent', 'quiet', 'moderate']
res_json = self.get_spots_for_noise_levels(query)
expected = [self.silent_spot,
self.quiet_spot,
self.moderate_spot,
self.variable_spot]
self.assertResponseSpaces(res_json, expected)
|
apache-2.0
|
wanghaven/readthedocs.org
|
readthedocs/restapi/views/core_views.py
|
25
|
3594
|
from rest_framework import decorators, permissions, status
from rest_framework.renderers import JSONPRenderer, JSONRenderer, BrowsableAPIRenderer
from rest_framework.response import Response
import json
import requests
from django.conf import settings
from django.core.cache import cache
from django.shortcuts import get_object_or_404
from readthedocs.core.utils import clean_url, cname_to_slug
from readthedocs.builds.constants import LATEST
from readthedocs.builds.models import Version
from readthedocs.projects.models import Project
from readthedocs.core.templatetags.core_tags import make_document_url
@decorators.api_view(['GET'])
@decorators.permission_classes((permissions.AllowAny,))
@decorators.renderer_classes((JSONRenderer, JSONPRenderer, BrowsableAPIRenderer))
def cname(request):
"""
Get the slug that a particular hostname resolves to.
This is useful for debugging your DNS settings,
or for getting the backing project name on Read the Docs for a URL.
Example::
GET https://readthedocs.org/api/v2/cname/?host=docs.python-requests.org
This will return information about ``docs.python-requests.org``
"""
host = request.GET.get('host')
if not host:
return Response({'error': 'host GET arg required'}, status=status.HTTP_400_BAD_REQUEST)
host = clean_url(host)
slug = cname_to_slug(host)
return Response({
'host': host,
'slug': slug,
})
@decorators.api_view(['GET'])
@decorators.permission_classes((permissions.AllowAny,))
@decorators.renderer_classes((JSONRenderer, JSONPRenderer, BrowsableAPIRenderer))
def docurl(request):
"""
Get the url that a slug resolves to.
Example::
GET https://readthedocs.org/api/v2/docurl/?project=requests&version=latest&doc=index
"""
project = request.GET.get('project')
version = request.GET.get('version', LATEST)
doc = request.GET.get('doc', 'index')
project = get_object_or_404(Project, slug=project)
version = get_object_or_404(
Version.objects.public(request.user, project=project, only_active=False),
slug=version)
return Response({
'url': make_document_url(project=project, version=version.slug, page=doc)
})
@decorators.api_view(['GET'])
@decorators.permission_classes((permissions.AllowAny,))
@decorators.renderer_classes((JSONRenderer, JSONPRenderer, BrowsableAPIRenderer))
def embed(request):
"""
Embed a section of content from any Read the Docs page.
Returns headers and content that matches the queried section.
### Arguments
* project (requied)
* doc (required)
* version (default latest)
* section
### Example
GET https://readthedocs.org/api/v2/embed/?project=requests&doc=index§ion=User%20Guide
# Current Request
"""
project = request.GET.get('project')
version = request.GET.get('version', LATEST)
doc = request.GET.get('doc')
section = request.GET.get('section')
embed_cache = cache.get('embed:%s' % project)
if embed_cache:
embed = json.loads(embed_cache)
else:
try:
resp = requests.get(
'{host}/api/v1/embed/'.format(host=settings.GROK_API_HOST),
params={'project': project, 'version': version, 'doc': doc, 'section': section}
)
embed = resp.json()
cache.set('embed:%s' % project, resp.content, 1800)
except Exception as e:
return Response({'error': '%s' % e.msg}, status=status.HTTP_400_BAD_REQUEST)
return Response(embed)
|
mit
|
SivilTaram/edx-platform
|
cms/djangoapps/contentstore/push_notification.py
|
154
|
2772
|
"""
Helper methods for push notifications from Studio.
"""
from uuid import uuid4
from django.conf import settings
from logging import exception as log_exception
from contentstore.tasks import push_course_update_task
from contentstore.models import PushNotificationConfig
from xmodule.modulestore.django import modulestore
from parse_rest.installation import Push
from parse_rest.connection import register
from parse_rest.core import ParseError
def push_notification_enabled():
"""
Returns whether the push notification feature is enabled.
"""
return PushNotificationConfig.is_enabled()
def enqueue_push_course_update(update, course_key):
"""
Enqueues a task for push notification for the given update for the given course if
(1) the feature is enabled and
(2) push_notification is selected for the update
"""
if push_notification_enabled() and update.get("push_notification_selected"):
course = modulestore().get_course(course_key)
if course:
push_course_update_task.delay(
unicode(course_key),
course.clean_id(padding_char='_'),
course.display_name
)
def send_push_course_update(course_key_string, course_subscription_id, course_display_name):
"""
Sends a push notification for a course update, given the course's subscription_id and display_name.
"""
if settings.PARSE_KEYS:
try:
register(
settings.PARSE_KEYS["APPLICATION_ID"],
settings.PARSE_KEYS["REST_API_KEY"],
)
push_payload = {
"action": "course.announcement",
"notification-id": unicode(uuid4()),
"course-id": course_key_string,
"course-name": course_display_name,
}
push_channels = [course_subscription_id]
# Push to all Android devices
Push.alert(
data=push_payload,
channels={"$in": push_channels},
where={"deviceType": "android"},
)
# Push to all iOS devices
# With additional payload so that
# 1. The push is displayed automatically
# 2. The app gets it even in the background.
# See http://stackoverflow.com/questions/19239737/silent-push-notification-in-ios-7-does-not-work
push_payload.update({
"alert": "",
"content-available": 1
})
Push.alert(
data=push_payload,
channels={"$in": push_channels},
where={"deviceType": "ios"},
)
except ParseError as error:
log_exception(error.message)
|
agpl-3.0
|
rmsr/misc
|
coursera/algorithmic-toolbox/week5/primitive_calculator.py
|
1
|
2044
|
#!/bin/python3
"""
Primitive calculator
Given ops *3, *2, +1, what is the fewest ops to reach n from 1?
"""
import os
import sys
def main():
sequence = optimal_sequence_linear(int(input()))
print(len(sequence) - 1)
print(*sequence)
def optimal_sequence_linear(n):
"""
Solve by calculating min-steps for each i in 1..n
"""
steps = [0] * (n + 1)
# calculate array
for i in range(1, n + 1):
prev = [ steps[i - 1] ]
if not i % 3:
prev.append(steps[i // 3])
if not i % 2:
prev.append(steps[i // 2])
steps[i] = min(prev) + 1
# now work backwards to find the solution
seq = []
while n:
seq.append(n)
prev = [ (steps[n-1], n-1) ]
if not n % 3:
prev.append((steps[n // 3], n // 3))
if not n % 2:
prev.append((steps[n // 2], n // 2))
prev.sort()
n = prev[0][1]
seq.reverse()
return seq
class SolutionFound(Exception): pass
def optimal_sequence_bfs_nested(n):
"""
Solving this as BFS of a math ops DAG, each vertex with 3 edges. Whichever
branch gets n to 1 first is the keeper. TOO SLOW.
"""
if n == 1:
return [1]
ops = [
lambda n: 0 if n % 3 else n // 3,
lambda n: 0 if n % 2 else n // 2,
lambda n: n - 1
]
solution = None
queue = [ (n,) ]
try:
while True:
previous = queue
queue = []
for steps in previous:
for new in [ op(steps[0]) for op in ops ]:
if new == 1:
solution = steps
raise SolutionFound
if new > 1:
queue.append((new, steps))
except SolutionFound:
pass
sequence = [1]
try:
while True:
sequence.append(solution[0])
solution = solution[1]
except IndexError:
pass
return sequence
if __name__ == "__main__":
main()
|
isc
|
zhimin711/nova
|
nova/cells/manager.py
|
9
|
26574
|
# Copyright (c) 2012 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Cells Service Manager
"""
import datetime
import time
from oslo_log import log as logging
import oslo_messaging
from oslo_service import periodic_task
from oslo_utils import importutils
from oslo_utils import timeutils
import six
from six.moves import range
from nova.cells import messaging
from nova.cells import state as cells_state
from nova.cells import utils as cells_utils
import nova.conf
from nova import context
from nova import exception
from nova.i18n import _LW
from nova import manager
from nova import objects
from nova.objects import base as base_obj
from nova.objects import instance as instance_obj
CONF = nova.conf.CONF
LOG = logging.getLogger(__name__)
class CellsManager(manager.Manager):
"""The nova-cells manager class. This class defines RPC
methods that the local cell may call. This class is NOT used for
messages coming from other cells. That communication is
driver-specific.
Communication to other cells happens via the nova.cells.messaging module.
The MessageRunner from that module will handle routing the message to
the correct cell via the communications driver. Most methods below
create 'targeted' (where we want to route a message to a specific cell)
or 'broadcast' (where we want a message to go to multiple cells)
messages.
Scheduling requests get passed to the scheduler class.
"""
target = oslo_messaging.Target(version='1.37')
def __init__(self, *args, **kwargs):
LOG.warning(_LW('The cells feature of Nova is considered experimental '
'by the OpenStack project because it receives much '
'less testing than the rest of Nova. This may change '
'in the future, but current deployers should be aware '
'that the use of it in production right now may be '
'risky. Also note that cells does not currently '
'support rolling upgrades, it is assumed that cells '
'deployments are upgraded lockstep so n-1 cells '
'compatibility does not work.'))
# Mostly for tests.
cell_state_manager = kwargs.pop('cell_state_manager', None)
super(CellsManager, self).__init__(service_name='cells',
*args, **kwargs)
if cell_state_manager is None:
cell_state_manager = cells_state.CellStateManager
self.state_manager = cell_state_manager()
self.msg_runner = messaging.MessageRunner(self.state_manager)
cells_driver_cls = importutils.import_class(
CONF.cells.driver)
self.driver = cells_driver_cls()
self.instances_to_heal = iter([])
def post_start_hook(self):
"""Have the driver start its servers for inter-cell communication.
Also ask our child cells for their capacities and capabilities so
we get them more quickly than just waiting for the next periodic
update. Receiving the updates from the children will cause us to
update our parents. If we don't have any children, just update
our parents immediately.
"""
# FIXME(comstud): There's currently no hooks when services are
# stopping, so we have no way to stop servers cleanly.
self.driver.start_servers(self.msg_runner)
ctxt = context.get_admin_context()
if self.state_manager.get_child_cells():
self.msg_runner.ask_children_for_capabilities(ctxt)
self.msg_runner.ask_children_for_capacities(ctxt)
else:
self._update_our_parents(ctxt)
@periodic_task.periodic_task
def _update_our_parents(self, ctxt):
"""Update our parent cells with our capabilities and capacity
if we're at the bottom of the tree.
"""
self.msg_runner.tell_parents_our_capabilities(ctxt)
self.msg_runner.tell_parents_our_capacities(ctxt)
@periodic_task.periodic_task
def _heal_instances(self, ctxt):
"""Periodic task to send updates for a number of instances to
parent cells.
On every run of the periodic task, we will attempt to sync
'CONF.cells.instance_update_num_instances' number of instances.
When we get the list of instances, we shuffle them so that multiple
nova-cells services aren't attempting to sync the same instances
in lockstep.
If CONF.cells.instance_update_at_threshold is set, only attempt
to sync instances that have been updated recently. The CONF
setting defines the maximum number of seconds old the updated_at
can be. Ie, a threshold of 3600 means to only update instances
that have modified in the last hour.
"""
if not self.state_manager.get_parent_cells():
# No need to sync up if we have no parents.
return
info = {'updated_list': False}
def _next_instance():
try:
instance = next(self.instances_to_heal)
except StopIteration:
if info['updated_list']:
return
threshold = CONF.cells.instance_updated_at_threshold
updated_since = None
if threshold > 0:
updated_since = timeutils.utcnow() - datetime.timedelta(
seconds=threshold)
self.instances_to_heal = cells_utils.get_instances_to_sync(
ctxt, updated_since=updated_since, shuffle=True,
uuids_only=True)
info['updated_list'] = True
try:
instance = next(self.instances_to_heal)
except StopIteration:
return
return instance
rd_context = ctxt.elevated(read_deleted='yes')
for i in range(CONF.cells.instance_update_num_instances):
while True:
# Yield to other greenthreads
time.sleep(0)
instance_uuid = _next_instance()
if not instance_uuid:
return
try:
instance = objects.Instance.get_by_uuid(rd_context,
instance_uuid)
except exception.InstanceNotFound:
continue
self._sync_instance(ctxt, instance)
break
def _sync_instance(self, ctxt, instance):
"""Broadcast an instance_update or instance_destroy message up to
parent cells.
"""
if instance.deleted:
self.instance_destroy_at_top(ctxt, instance)
else:
self.instance_update_at_top(ctxt, instance)
def build_instances(self, ctxt, build_inst_kwargs):
"""Pick a cell (possibly ourselves) to build new instance(s) and
forward the request accordingly.
"""
# Target is ourselves first.
filter_properties = build_inst_kwargs.get('filter_properties')
if (filter_properties is not None and
not isinstance(filter_properties['instance_type'],
objects.Flavor)):
# NOTE(danms): Handle pre-1.30 build_instances() call. Remove me
# when we bump the RPC API version to 2.0.
flavor = objects.Flavor(**filter_properties['instance_type'])
build_inst_kwargs['filter_properties'] = dict(
filter_properties, instance_type=flavor)
instances = build_inst_kwargs['instances']
if not isinstance(instances[0], objects.Instance):
# NOTE(danms): Handle pre-1.32 build_instances() call. Remove me
# when we bump the RPC API version to 2.0
build_inst_kwargs['instances'] = instance_obj._make_instance_list(
ctxt, objects.InstanceList(), instances, ['system_metadata',
'metadata'])
our_cell = self.state_manager.get_my_state()
self.msg_runner.build_instances(ctxt, our_cell, build_inst_kwargs)
def get_cell_info_for_neighbors(self, _ctxt):
"""Return cell information for our neighbor cells."""
return self.state_manager.get_cell_info_for_neighbors()
def run_compute_api_method(self, ctxt, cell_name, method_info, call):
"""Call a compute API method in a specific cell."""
response = self.msg_runner.run_compute_api_method(ctxt,
cell_name,
method_info,
call)
if call:
return response.value_or_raise()
def instance_update_at_top(self, ctxt, instance):
"""Update an instance at the top level cell."""
self.msg_runner.instance_update_at_top(ctxt, instance)
def instance_destroy_at_top(self, ctxt, instance):
"""Destroy an instance at the top level cell."""
self.msg_runner.instance_destroy_at_top(ctxt, instance)
def instance_delete_everywhere(self, ctxt, instance, delete_type):
"""This is used by API cell when it didn't know what cell
an instance was in, but the instance was requested to be
deleted or soft_deleted. So, we'll broadcast this everywhere.
"""
if isinstance(instance, dict):
instance = objects.Instance._from_db_object(ctxt,
objects.Instance(), instance)
self.msg_runner.instance_delete_everywhere(ctxt, instance,
delete_type)
def instance_fault_create_at_top(self, ctxt, instance_fault):
"""Create an instance fault at the top level cell."""
self.msg_runner.instance_fault_create_at_top(ctxt, instance_fault)
def bw_usage_update_at_top(self, ctxt, bw_update_info):
"""Update bandwidth usage at top level cell."""
self.msg_runner.bw_usage_update_at_top(ctxt, bw_update_info)
def sync_instances(self, ctxt, project_id, updated_since, deleted):
"""Force a sync of all instances, potentially by project_id,
and potentially since a certain date/time.
"""
self.msg_runner.sync_instances(ctxt, project_id, updated_since,
deleted)
def service_get_all(self, ctxt, filters):
"""Return services in this cell and in all child cells."""
responses = self.msg_runner.service_get_all(ctxt, filters)
ret_services = []
# 1 response per cell. Each response is a list of services.
for response in responses:
services = response.value_or_raise()
for service in services:
service = cells_utils.add_cell_to_service(
service, response.cell_name)
ret_services.append(service)
return ret_services
@oslo_messaging.expected_exceptions(exception.CellRoutingInconsistency)
def service_get_by_compute_host(self, ctxt, host_name):
"""Return a service entry for a compute host in a certain cell."""
cell_name, host_name = cells_utils.split_cell_and_item(host_name)
response = self.msg_runner.service_get_by_compute_host(ctxt,
cell_name,
host_name)
service = response.value_or_raise()
service = cells_utils.add_cell_to_service(service, response.cell_name)
return service
def get_host_uptime(self, ctxt, host_name):
"""Return host uptime for a compute host in a certain cell
:param host_name: fully qualified hostname. It should be in format of
parent!child@host_id
"""
cell_name, host_name = cells_utils.split_cell_and_item(host_name)
response = self.msg_runner.get_host_uptime(ctxt, cell_name,
host_name)
return response.value_or_raise()
def service_update(self, ctxt, host_name, binary, params_to_update):
"""Used to enable/disable a service. For compute services, setting to
disabled stops new builds arriving on that host.
:param host_name: the name of the host machine that the service is
running
:param binary: The name of the executable that the service runs as
:param params_to_update: eg. {'disabled': True}
:returns: the service reference
"""
cell_name, host_name = cells_utils.split_cell_and_item(host_name)
response = self.msg_runner.service_update(
ctxt, cell_name, host_name, binary, params_to_update)
service = response.value_or_raise()
service = cells_utils.add_cell_to_service(service, response.cell_name)
return service
def service_delete(self, ctxt, cell_service_id):
"""Deletes the specified service."""
cell_name, service_id = cells_utils.split_cell_and_item(
cell_service_id)
self.msg_runner.service_delete(ctxt, cell_name, service_id)
@oslo_messaging.expected_exceptions(exception.CellRoutingInconsistency)
def proxy_rpc_to_manager(self, ctxt, topic, rpc_message, call, timeout):
"""Proxy an RPC message as-is to a manager."""
compute_topic = CONF.compute_topic
cell_and_host = topic[len(compute_topic) + 1:]
cell_name, host_name = cells_utils.split_cell_and_item(cell_and_host)
response = self.msg_runner.proxy_rpc_to_manager(ctxt, cell_name,
host_name, topic, rpc_message, call, timeout)
return response.value_or_raise()
def task_log_get_all(self, ctxt, task_name, period_beginning,
period_ending, host=None, state=None):
"""Get task logs from the DB from all cells or a particular
cell.
If 'host' is not None, host will be of the format 'cell!name@host',
with '@host' being optional. The query will be directed to the
appropriate cell and return all task logs, or task logs matching
the host if specified.
'state' also may be None. If it's not, filter by the state as well.
"""
if host is None:
cell_name = None
else:
cell_name, host = cells_utils.split_cell_and_item(host)
# If no cell name was given, assume that the host name is the
# cell_name and that the target is all hosts
if cell_name is None:
cell_name, host = host, cell_name
responses = self.msg_runner.task_log_get_all(ctxt, cell_name,
task_name, period_beginning, period_ending,
host=host, state=state)
# 1 response per cell. Each response is a list of task log
# entries.
ret_task_logs = []
for response in responses:
task_logs = response.value_or_raise()
for task_log in task_logs:
cells_utils.add_cell_to_task_log(task_log,
response.cell_name)
ret_task_logs.append(task_log)
return ret_task_logs
@oslo_messaging.expected_exceptions(exception.CellRoutingInconsistency)
def compute_node_get(self, ctxt, compute_id):
"""Get a compute node by ID in a specific cell."""
cell_name, compute_id = cells_utils.split_cell_and_item(
compute_id)
response = self.msg_runner.compute_node_get(ctxt, cell_name,
compute_id)
node = response.value_or_raise()
node = cells_utils.add_cell_to_compute_node(node, cell_name)
return node
def compute_node_get_all(self, ctxt, hypervisor_match=None):
"""Return list of compute nodes in all cells."""
responses = self.msg_runner.compute_node_get_all(ctxt,
hypervisor_match=hypervisor_match)
# 1 response per cell. Each response is a list of compute_node
# entries.
ret_nodes = []
for response in responses:
nodes = response.value_or_raise()
for node in nodes:
node = cells_utils.add_cell_to_compute_node(node,
response.cell_name)
ret_nodes.append(node)
return ret_nodes
def compute_node_stats(self, ctxt):
"""Return compute node stats totals from all cells."""
responses = self.msg_runner.compute_node_stats(ctxt)
totals = {}
for response in responses:
data = response.value_or_raise()
for key, val in six.iteritems(data):
totals.setdefault(key, 0)
totals[key] += val
return totals
def actions_get(self, ctxt, cell_name, instance_uuid):
response = self.msg_runner.actions_get(ctxt, cell_name, instance_uuid)
return response.value_or_raise()
def action_get_by_request_id(self, ctxt, cell_name, instance_uuid,
request_id):
response = self.msg_runner.action_get_by_request_id(ctxt, cell_name,
instance_uuid,
request_id)
return response.value_or_raise()
def action_events_get(self, ctxt, cell_name, action_id):
response = self.msg_runner.action_events_get(ctxt, cell_name,
action_id)
return response.value_or_raise()
def consoleauth_delete_tokens(self, ctxt, instance_uuid):
"""Delete consoleauth tokens for an instance in API cells."""
self.msg_runner.consoleauth_delete_tokens(ctxt, instance_uuid)
def validate_console_port(self, ctxt, instance_uuid, console_port,
console_type):
"""Validate console port with child cell compute node."""
instance = objects.Instance.get_by_uuid(ctxt, instance_uuid)
if not instance.cell_name:
raise exception.InstanceUnknownCell(instance_uuid=instance_uuid)
response = self.msg_runner.validate_console_port(ctxt,
instance.cell_name, instance_uuid, console_port,
console_type)
return response.value_or_raise()
def get_capacities(self, ctxt, cell_name):
return self.state_manager.get_capacities(cell_name)
def bdm_update_or_create_at_top(self, ctxt, bdm, create=None):
"""BDM was created/updated in this cell. Tell the API cells."""
# TODO(ndipanov): Move inter-cell RPC to use objects
bdm = base_obj.obj_to_primitive(bdm)
self.msg_runner.bdm_update_or_create_at_top(ctxt, bdm, create=create)
def bdm_destroy_at_top(self, ctxt, instance_uuid, device_name=None,
volume_id=None):
"""BDM was destroyed for instance in this cell. Tell the API cells."""
self.msg_runner.bdm_destroy_at_top(ctxt, instance_uuid,
device_name=device_name,
volume_id=volume_id)
def get_migrations(self, ctxt, filters):
"""Fetch migrations applying the filters."""
target_cell = None
if "cell_name" in filters:
_path_cell_sep = cells_utils.PATH_CELL_SEP
target_cell = '%s%s%s' % (CONF.cells.name, _path_cell_sep,
filters['cell_name'])
responses = self.msg_runner.get_migrations(ctxt, target_cell,
False, filters)
migrations = []
for response in responses:
migrations += response.value_or_raise()
return migrations
def instance_update_from_api(self, ctxt, instance, expected_vm_state,
expected_task_state, admin_state_reset):
"""Update an instance in its cell."""
self.msg_runner.instance_update_from_api(ctxt, instance,
expected_vm_state,
expected_task_state,
admin_state_reset)
def start_instance(self, ctxt, instance):
"""Start an instance in its cell."""
self.msg_runner.start_instance(ctxt, instance)
def stop_instance(self, ctxt, instance, do_cast=True,
clean_shutdown=True):
"""Stop an instance in its cell."""
response = self.msg_runner.stop_instance(ctxt, instance,
do_cast=do_cast,
clean_shutdown=clean_shutdown)
if not do_cast:
return response.value_or_raise()
def cell_create(self, ctxt, values):
return self.state_manager.cell_create(ctxt, values)
def cell_update(self, ctxt, cell_name, values):
return self.state_manager.cell_update(ctxt, cell_name, values)
def cell_delete(self, ctxt, cell_name):
return self.state_manager.cell_delete(ctxt, cell_name)
def cell_get(self, ctxt, cell_name):
return self.state_manager.cell_get(ctxt, cell_name)
def reboot_instance(self, ctxt, instance, reboot_type):
"""Reboot an instance in its cell."""
self.msg_runner.reboot_instance(ctxt, instance, reboot_type)
def pause_instance(self, ctxt, instance):
"""Pause an instance in its cell."""
self.msg_runner.pause_instance(ctxt, instance)
def unpause_instance(self, ctxt, instance):
"""Unpause an instance in its cell."""
self.msg_runner.unpause_instance(ctxt, instance)
def suspend_instance(self, ctxt, instance):
"""Suspend an instance in its cell."""
self.msg_runner.suspend_instance(ctxt, instance)
def resume_instance(self, ctxt, instance):
"""Resume an instance in its cell."""
self.msg_runner.resume_instance(ctxt, instance)
def terminate_instance(self, ctxt, instance, delete_type='delete'):
"""Delete an instance in its cell."""
# NOTE(rajesht): The `delete_type` parameter is passed so that it will
# be routed to destination cell, where instance deletion will happen.
self.msg_runner.terminate_instance(ctxt, instance,
delete_type=delete_type)
def soft_delete_instance(self, ctxt, instance):
"""Soft-delete an instance in its cell."""
self.msg_runner.soft_delete_instance(ctxt, instance)
def resize_instance(self, ctxt, instance, flavor,
extra_instance_updates,
clean_shutdown=True):
"""Resize an instance in its cell."""
self.msg_runner.resize_instance(ctxt, instance,
flavor, extra_instance_updates,
clean_shutdown=clean_shutdown)
def live_migrate_instance(self, ctxt, instance, block_migration,
disk_over_commit, host_name):
"""Live migrate an instance in its cell."""
self.msg_runner.live_migrate_instance(ctxt, instance,
block_migration,
disk_over_commit,
host_name)
def revert_resize(self, ctxt, instance):
"""Revert a resize for an instance in its cell."""
self.msg_runner.revert_resize(ctxt, instance)
def confirm_resize(self, ctxt, instance):
"""Confirm a resize for an instance in its cell."""
self.msg_runner.confirm_resize(ctxt, instance)
def reset_network(self, ctxt, instance):
"""Reset networking for an instance in its cell."""
self.msg_runner.reset_network(ctxt, instance)
def inject_network_info(self, ctxt, instance):
"""Inject networking for an instance in its cell."""
self.msg_runner.inject_network_info(ctxt, instance)
def snapshot_instance(self, ctxt, instance, image_id):
"""Snapshot an instance in its cell."""
self.msg_runner.snapshot_instance(ctxt, instance, image_id)
def backup_instance(self, ctxt, instance, image_id, backup_type, rotation):
"""Backup an instance in its cell."""
self.msg_runner.backup_instance(ctxt, instance, image_id,
backup_type, rotation)
def rebuild_instance(self, ctxt, instance, image_href, admin_password,
files_to_inject, preserve_ephemeral, kwargs):
self.msg_runner.rebuild_instance(ctxt, instance, image_href,
admin_password, files_to_inject,
preserve_ephemeral, kwargs)
def set_admin_password(self, ctxt, instance, new_pass):
self.msg_runner.set_admin_password(ctxt, instance, new_pass)
def get_keypair_at_top(self, ctxt, user_id, name):
responses = self.msg_runner.get_keypair_at_top(ctxt, user_id, name)
keypairs = [resp.value for resp in responses if resp.value is not None]
if len(keypairs) == 0:
return None
elif len(keypairs) > 1:
cell_names = ', '.join([resp.cell_name for resp in responses
if resp.value is not None])
LOG.warning(_LW("The same keypair name '%(name)s' exists in the "
"following cells: %(cell_names)s. The keypair "
"value from the first cell is returned."),
{'name': name, 'cell_names': cell_names})
return keypairs[0]
|
apache-2.0
|
zahodi/ansible
|
lib/ansible/modules/web_infrastructure/htpasswd.py
|
47
|
9177
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Nimbis Services, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = """
module: htpasswd
version_added: "1.3"
short_description: manage user files for basic authentication
description:
- Add and remove username/password entries in a password file using htpasswd.
- This is used by web servers such as Apache and Nginx for basic authentication.
options:
path:
required: true
aliases: [ dest, destfile ]
description:
- Path to the file that contains the usernames and passwords
name:
required: true
aliases: [ username ]
description:
- User name to add or remove
password:
required: false
description:
- Password associated with user.
- Must be specified if user does not exist yet.
crypt_scheme:
required: false
choices: ["apr_md5_crypt", "des_crypt", "ldap_sha1", "plaintext"]
default: "apr_md5_crypt"
description:
- Encryption scheme to be used. As well as the four choices listed
here, you can also use any other hash supported by passlib, such as
md5_crypt and sha256_crypt, which are linux passwd hashes. If you
do so the password file will not be compatible with Apache or Nginx
state:
required: false
choices: [ present, absent ]
default: "present"
description:
- Whether the user entry should be present or not
create:
required: false
choices: [ "yes", "no" ]
default: "yes"
description:
- Used with C(state=present). If specified, the file will be created
if it does not already exist. If set to "no", will fail if the
file does not exist
notes:
- "This module depends on the I(passlib) Python library, which needs to be installed on all target systems."
- "On Debian, Ubuntu, or Fedora: install I(python-passlib)."
- "On RHEL or CentOS: Enable EPEL, then install I(python-passlib)."
requires: [ passlib>=1.6 ]
author: "Ansible Core Team"
"""
EXAMPLES = """
# Add a user to a password file and ensure permissions are set
- htpasswd:
path: /etc/nginx/passwdfile
name: janedoe
password: '9s36?;fyNp'
owner: root
group: www-data
mode: 0640
# Remove a user from a password file
- htpasswd:
path: /etc/apache2/passwdfile
name: foobar
state: absent
# Add a user to a password file suitable for use by libpam-pwdfile
- htpasswd:
path: /etc/mail/passwords
name: alex
password: oedu2eGh
crypt_scheme: md5_crypt
"""
import os
import tempfile
from distutils.version import StrictVersion
try:
from passlib.apache import HtpasswdFile, htpasswd_context
from passlib.context import CryptContext
import passlib
except ImportError:
passlib_installed = False
else:
passlib_installed = True
apache_hashes = ["apr_md5_crypt", "des_crypt", "ldap_sha1", "plaintext"]
def create_missing_directories(dest):
destpath = os.path.dirname(dest)
if not os.path.exists(destpath):
os.makedirs(destpath)
def present(dest, username, password, crypt_scheme, create, check_mode):
""" Ensures user is present
Returns (msg, changed) """
if crypt_scheme in apache_hashes:
context = htpasswd_context
else:
context = CryptContext(schemes = [ crypt_scheme ] + apache_hashes)
if not os.path.exists(dest):
if not create:
raise ValueError('Destination %s does not exist' % dest)
if check_mode:
return ("Create %s" % dest, True)
create_missing_directories(dest)
if StrictVersion(passlib.__version__) >= StrictVersion('1.6'):
ht = HtpasswdFile(dest, new=True, default_scheme=crypt_scheme, context=context)
else:
ht = HtpasswdFile(dest, autoload=False, default=crypt_scheme, context=context)
if getattr(ht, 'set_password', None):
ht.set_password(username, password)
else:
ht.update(username, password)
ht.save()
return ("Created %s and added %s" % (dest, username), True)
else:
if StrictVersion(passlib.__version__) >= StrictVersion('1.6'):
ht = HtpasswdFile(dest, new=False, default_scheme=crypt_scheme, context=context)
else:
ht = HtpasswdFile(dest, default=crypt_scheme, context=context)
found = None
if getattr(ht, 'check_password', None):
found = ht.check_password(username, password)
else:
found = ht.verify(username, password)
if found:
return ("%s already present" % username, False)
else:
if not check_mode:
if getattr(ht, 'set_password', None):
ht.set_password(username, password)
else:
ht.update(username, password)
ht.save()
return ("Add/update %s" % username, True)
def absent(dest, username, check_mode):
""" Ensures user is absent
Returns (msg, changed) """
if StrictVersion(passlib.__version__) >= StrictVersion('1.6'):
ht = HtpasswdFile(dest, new=False)
else:
ht = HtpasswdFile(dest)
if username not in ht.users():
return ("%s not present" % username, False)
else:
if not check_mode:
ht.delete(username)
ht.save()
return ("Remove %s" % username, True)
def check_file_attrs(module, changed, message):
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
if changed:
message += " and "
changed = True
message += "ownership, perms or SE linux context changed"
return message, changed
def main():
arg_spec = dict(
path=dict(required=True, aliases=["dest", "destfile"]),
name=dict(required=True, aliases=["username"]),
password=dict(required=False, default=None, no_log=True),
crypt_scheme=dict(required=False, default="apr_md5_crypt"),
state=dict(required=False, default="present"),
create=dict(type='bool', default='yes'),
)
module = AnsibleModule(argument_spec=arg_spec,
add_file_common_args=True,
supports_check_mode=True)
path = module.params['path']
username = module.params['name']
password = module.params['password']
crypt_scheme = module.params['crypt_scheme']
state = module.params['state']
create = module.params['create']
check_mode = module.check_mode
if not passlib_installed:
module.fail_json(msg="This module requires the passlib Python library")
# Check file for blank lines in effort to avoid "need more than 1 value to unpack" error.
try:
f = open(path, "r")
except IOError:
# No preexisting file to remove blank lines from
f = None
else:
try:
lines = f.readlines()
finally:
f.close()
# If the file gets edited, it returns true, so only edit the file if it has blank lines
strip = False
for line in lines:
if not line.strip():
strip = True
break
if strip:
# If check mode, create a temporary file
if check_mode:
temp = tempfile.NamedTemporaryFile()
path = temp.name
f = open(path, "w")
try:
[ f.write(line) for line in lines if line.strip() ]
finally:
f.close()
try:
if state == 'present':
(msg, changed) = present(path, username, password, crypt_scheme, create, check_mode)
elif state == 'absent':
if not os.path.exists(path):
module.exit_json(msg="%s not present" % username,
warnings="%s does not exist" % path, changed=False)
(msg, changed) = absent(path, username, check_mode)
else:
module.fail_json(msg="Invalid state: %s" % state)
check_file_attrs(module, changed, msg)
module.exit_json(msg=msg, changed=changed)
except Exception:
e = get_exception()
module.fail_json(msg=str(e))
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
if __name__ == '__main__':
main()
|
gpl-3.0
|
argentumproject/electrum-arg
|
lib/plugins.py
|
1
|
18775
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from collections import namedtuple
import traceback
import sys
import os
import imp
import pkgutil
import time
from util import *
from i18n import _
from util import profiler, PrintError, DaemonThread, UserCancelled
plugin_loaders = {}
hook_names = set()
hooks = {}
class Plugins(DaemonThread):
@profiler
def __init__(self, config, is_local, gui_name):
DaemonThread.__init__(self)
if is_local:
find = imp.find_module('plugins')
plugins = imp.load_module('electrum_arg_plugins', *find)
else:
plugins = __import__('electrum_arg_plugins')
self.pkgpath = os.path.dirname(plugins.__file__)
self.config = config
self.hw_wallets = {}
self.plugins = {}
self.gui_name = gui_name
self.descriptions = {}
self.device_manager = DeviceMgr(config)
self.load_plugins()
self.add_jobs(self.device_manager.thread_jobs())
self.start()
def load_plugins(self):
for loader, name, ispkg in pkgutil.iter_modules([self.pkgpath]):
# do not load deprecated plugins
if name in ['plot', 'exchange_rate']:
continue
m = loader.find_module(name).load_module(name)
d = m.__dict__
gui_good = self.gui_name in d.get('available_for', [])
if not gui_good:
continue
details = d.get('registers_wallet_type')
if details:
self.register_wallet_type(name, gui_good, details)
details = d.get('registers_keystore')
if details:
self.register_keystore(name, gui_good, details)
self.descriptions[name] = d
if not d.get('requires_wallet_type') and self.config.get('use_' + name):
try:
self.load_plugin(name)
except BaseException as e:
traceback.print_exc(file=sys.stdout)
self.print_error("cannot initialize plugin %s:" % name, str(e))
def get(self, name):
return self.plugins.get(name)
def count(self):
return len(self.plugins)
def load_plugin(self, name):
if name in self.plugins:
return
full_name = 'electrum_arg_plugins.' + name + '.' + self.gui_name
loader = pkgutil.find_loader(full_name)
if not loader:
raise RuntimeError("%s implementation for %s plugin not found"
% (self.gui_name, name))
p = loader.load_module(full_name)
plugin = p.Plugin(self, self.config, name)
self.add_jobs(plugin.thread_jobs())
self.plugins[name] = plugin
self.print_error("loaded", name)
return plugin
def close_plugin(self, plugin):
self.remove_jobs(plugin.thread_jobs())
def enable(self, name):
self.config.set_key('use_' + name, True, True)
p = self.get(name)
if p:
return p
return self.load_plugin(name)
def disable(self, name):
self.config.set_key('use_' + name, False, True)
p = self.get(name)
if not p:
return
self.plugins.pop(name)
p.close()
self.print_error("closed", name)
def toggle(self, name):
p = self.get(name)
return self.disable(name) if p else self.enable(name)
def is_available(self, name, w):
d = self.descriptions.get(name)
if not d:
return False
deps = d.get('requires', [])
for dep, s in deps:
try:
__import__(dep)
except ImportError:
return False
requires = d.get('requires_wallet_type', [])
return not requires or w.wallet_type in requires
def get_hardware_support(self):
out = []
for name, (gui_good, details) in self.hw_wallets.items():
if gui_good:
try:
p = self.get_plugin(name)
if p.is_enabled():
out.append([name, details[2], p])
except:
traceback.print_exc()
self.print_error("cannot load plugin for:", name)
return out
def register_wallet_type(self, name, gui_good, wallet_type):
from wallet import register_wallet_type, register_constructor
self.print_error("registering wallet type", (wallet_type, name))
def loader():
plugin = self.get_plugin(name)
register_constructor(wallet_type, plugin.wallet_class)
register_wallet_type(wallet_type)
plugin_loaders[wallet_type] = loader
def register_keystore(self, name, gui_good, details):
from keystore import register_keystore
def dynamic_constructor(d):
return self.get_plugin(name).keystore_class(d)
if details[0] == 'hardware':
self.hw_wallets[name] = (gui_good, details)
self.print_error("registering hardware %s: %s" %(name, details))
register_keystore(details[1], dynamic_constructor)
def get_plugin(self, name):
if not name in self.plugins:
self.load_plugin(name)
return self.plugins[name]
def run(self):
while self.is_running():
time.sleep(0.1)
self.run_jobs()
self.on_stop()
def hook(func):
hook_names.add(func.func_name)
return func
def run_hook(name, *args):
results = []
f_list = hooks.get(name, [])
for p, f in f_list:
if p.is_enabled():
try:
r = f(*args)
except Exception:
print_error("Plugin error")
traceback.print_exc(file=sys.stdout)
r = False
if r:
results.append(r)
if results:
assert len(results) == 1, results
return results[0]
class BasePlugin(PrintError):
def __init__(self, parent, config, name):
self.parent = parent # The plugins object
self.name = name
self.config = config
self.wallet = None
# add self to hooks
for k in dir(self):
if k in hook_names:
l = hooks.get(k, [])
l.append((self, getattr(self, k)))
hooks[k] = l
def diagnostic_name(self):
return self.name
def __str__(self):
return self.name
def close(self):
# remove self from hooks
for k in dir(self):
if k in hook_names:
l = hooks.get(k, [])
l.remove((self, getattr(self, k)))
hooks[k] = l
self.parent.close_plugin(self)
self.on_close()
def on_close(self):
pass
def requires_settings(self):
return False
def thread_jobs(self):
return []
def is_enabled(self):
return self.is_available() and self.config.get('use_'+self.name) is True
def is_available(self):
return True
def settings_dialog(self):
pass
class DeviceNotFoundError(Exception):
pass
class DeviceUnpairableError(Exception):
pass
Device = namedtuple("Device", "path interface_number id_ product_key")
DeviceInfo = namedtuple("DeviceInfo", "device label initialized")
class DeviceMgr(ThreadJob, PrintError):
'''Manages hardware clients. A client communicates over a hardware
channel with the device.
In addition to tracking device HID IDs, the device manager tracks
hardware wallets and manages wallet pairing. A HID ID may be
paired with a wallet when it is confirmed that the hardware device
matches the wallet, i.e. they have the same master public key. A
HID ID can be unpaired if e.g. it is wiped.
Because of hotplugging, a wallet must request its client
dynamically each time it is required, rather than caching it
itself.
The device manager is shared across plugins, so just one place
does hardware scans when needed. By tracking HID IDs, if a device
is plugged into a different port the wallet is automatically
re-paired.
Wallets are informed on connect / disconnect events. It must
implement connected(), disconnected() callbacks. Being connected
implies a pairing. Callbacks can happen in any thread context,
and we do them without holding the lock.
Confusingly, the HID ID (serial number) reported by the HID system
doesn't match the device ID reported by the device itself. We use
the HID IDs.
This plugin is thread-safe. Currently only devices supported by
hidapi are implemented.'''
def __init__(self, config):
super(DeviceMgr, self).__init__()
# Keyed by xpub. The value is the device id
# has been paired, and None otherwise.
self.xpub_ids = {}
# A list of clients. The key is the client, the value is
# a (path, id_) pair.
self.clients = {}
# What we recognise. Each entry is a (vendor_id, product_id)
# pair.
self.recognised_hardware = set()
# For synchronization
self.lock = threading.RLock()
self.hid_lock = threading.RLock()
self.config = config
def thread_jobs(self):
# Thread job to handle device timeouts
return [self]
def run(self):
'''Handle device timeouts. Runs in the context of the Plugins
thread.'''
with self.lock:
clients = list(self.clients.keys())
cutoff = time.time() - self.config.get_session_timeout()
for client in clients:
client.timeout(cutoff)
def register_devices(self, device_pairs):
for pair in device_pairs:
self.recognised_hardware.add(pair)
def create_client(self, device, handler, plugin):
# Get from cache first
client = self.client_lookup(device.id_)
if client:
return client
client = plugin.create_client(device, handler)
if client:
self.print_error("Registering", client)
with self.lock:
self.clients[client] = (device.path, device.id_)
return client
def xpub_id(self, xpub):
with self.lock:
return self.xpub_ids.get(xpub)
def xpub_by_id(self, id_):
with self.lock:
for xpub, xpub_id in self.xpub_ids.items():
if xpub_id == id_:
return xpub
return None
def unpair_xpub(self, xpub):
with self.lock:
if not xpub in self.xpub_ids:
return
_id = self.xpub_ids.pop(xpub)
client = self.client_lookup(_id)
self.clients.pop(client, None)
if client:
client.close()
def unpair_id(self, id_):
xpub = self.xpub_by_id(id_)
if xpub:
self.unpair_xpub(xpub)
def pair_xpub(self, xpub, id_):
with self.lock:
self.xpub_ids[xpub] = id_
def client_lookup(self, id_):
with self.lock:
for client, (path, client_id) in self.clients.items():
if client_id == id_:
return client
return None
def client_by_id(self, id_):
'''Returns a client for the device ID if one is registered. If
a device is wiped or in bootloader mode pairing is impossible;
in such cases we communicate by device ID and not wallet.'''
self.scan_devices()
return self.client_lookup(id_)
def client_for_keystore(self, plugin, handler, keystore, force_pair):
self.print_error("getting client for keystore")
handler.update_status(False)
devices = self.scan_devices()
xpub = keystore.xpub
derivation = keystore.get_derivation()
client = self.client_by_xpub(plugin, xpub, handler, devices)
if client is None and force_pair:
info = self.select_device(plugin, handler, keystore, devices)
client = self.force_pair_xpub(plugin, handler, info, xpub, derivation, devices)
if client:
handler.update_status(True)
self.print_error("end client for keystore")
return client
def client_by_xpub(self, plugin, xpub, handler, devices):
_id = self.xpub_id(xpub)
client = self.client_lookup(_id)
if client:
# An unpaired client might have another wallet's handler
# from a prior scan. Replace to fix dialog parenting.
client.handler = handler
return client
for device in devices:
if device.id_ == _id:
return self.create_client(device, handler, plugin)
def force_pair_xpub(self, plugin, handler, info, xpub, derivation, devices):
# The wallet has not been previously paired, so let the user
# choose an unpaired device and compare its first address.
client = self.client_lookup(info.device.id_)
if client and client.is_pairable():
# See comment above for same code
client.handler = handler
# This will trigger a PIN/passphrase entry request
try:
client_xpub = client.get_xpub(derivation)
except (UserCancelled, RuntimeError):
# Bad / cancelled PIN / passphrase
client_xpub = None
if client_xpub == xpub:
self.pair_xpub(xpub, info.device.id_)
return client
# The user input has wrong PIN or passphrase, or cancelled input,
# or it is not pairable
raise DeviceUnpairableError(
_('Electrum cannot pair with your %s.\n\n'
'Before you request argentums to be sent to addresses in this '
'wallet, ensure you can pair with your device, or that you have '
'its seed (and passphrase, if any). Otherwise all argentums you '
'receive will be unspendable.') % plugin.device)
def unpaired_device_infos(self, handler, plugin, devices=None):
'''Returns a list of DeviceInfo objects: one for each connected,
unpaired device accepted by the plugin.'''
if devices is None:
devices = self.scan_devices()
devices = [dev for dev in devices if not self.xpub_by_id(dev.id_)]
infos = []
for device in devices:
if not device.product_key in plugin.DEVICE_IDS:
continue
client = self.create_client(device, handler, plugin)
if not client:
continue
infos.append(DeviceInfo(device, client.label(), client.is_initialized()))
return infos
def select_device(self, plugin, handler, keystore, devices=None):
'''Ask the user to select a device to use if there is more than one,
and return the DeviceInfo for the device.'''
while True:
infos = self.unpaired_device_infos(handler, plugin, devices)
if infos:
break
msg = _('Could not connect to your %s. Verify the cable is '
'connected and that no other application is using it.\n\n'
'Try to connect again?') % plugin.device
if not handler.yes_no_question(msg):
raise UserCancelled()
devices = None
if len(infos) == 1:
return infos[0]
# select device by label
for info in infos:
if info.label == keystore.label:
return info
msg = _("Please select which %s device to use:") % plugin.device
descriptions = [info.label + ' (%s)'%(_("initialized") if info.initialized else _("wiped")) for info in infos]
c = handler.query_choice(msg, descriptions)
if c is None:
raise UserCancelled()
info = infos[c]
# save new label
keystore.set_label(info.label)
handler.win.wallet.save_keystore()
return info
def scan_devices(self):
# All currently supported hardware libraries use hid, so we
# assume it here. This can be easily abstracted if necessary.
# Note this import must be local so those without hardware
# wallet libraries are not affected.
import hid
self.print_error("scanning devices...")
with self.hid_lock:
hid_list = hid.enumerate(0, 0)
# First see what's connected that we know about
devices = []
for d in hid_list:
product_key = (d['vendor_id'], d['product_id'])
if product_key in self.recognised_hardware:
# Older versions of hid don't provide interface_number
interface_number = d.get('interface_number', 0)
serial = d['serial_number']
if len(serial) == 0:
serial = d['path']
devices.append(Device(d['path'], interface_number,
serial, product_key))
# Now find out what was disconnected
pairs = [(dev.path, dev.id_) for dev in devices]
disconnected_ids = []
with self.lock:
connected = {}
for client, pair in self.clients.items():
if pair in pairs:
connected[client] = pair
else:
disconnected_ids.append(pair[1])
self.clients = connected
# Unpair disconnected devices
for id_ in disconnected_ids:
self.unpair_id(id_)
return devices
|
mit
|
ajhager/copycat
|
lib/pyglet/extlibs/future/py2_3/future/builtins/newnext.py
|
70
|
2014
|
'''
This module provides a newnext() function in Python 2 that mimics the
behaviour of ``next()`` in Python 3, falling back to Python 2's behaviour for
compatibility if this fails.
``newnext(iterator)`` calls the iterator's ``__next__()`` method if it exists. If this
doesn't exist, it falls back to calling a ``next()`` method.
For example:
>>> class Odds(object):
... def __init__(self, start=1):
... self.value = start - 2
... def __next__(self): # note the Py3 interface
... self.value += 2
... return self.value
... def __iter__(self):
... return self
...
>>> iterator = Odds()
>>> next(iterator)
1
>>> next(iterator)
3
If you are defining your own custom iterator class as above, it is preferable
to explicitly decorate the class with the @implements_iterator decorator from
``future.utils`` as follows:
>>> @implements_iterator
... class Odds(object):
... # etc
... pass
This next() function is primarily for consuming iterators defined in Python 3
code elsewhere that we would like to run on Python 2 or 3.
'''
_builtin_next = next
_SENTINEL = object()
def newnext(iterator, default=_SENTINEL):
"""
next(iterator[, default])
Return the next item from the iterator. If default is given and the iterator
is exhausted, it is returned instead of raising StopIteration.
"""
# args = []
# if default is not _SENTINEL:
# args.append(default)
try:
try:
return iterator.__next__()
except AttributeError:
try:
return iterator.next()
except AttributeError:
raise TypeError("'{0}' object is not an iterator".format(
iterator.__class__.__name__))
except StopIteration as e:
if default is _SENTINEL:
raise e
else:
return default
__all__ = ['newnext']
|
gpl-2.0
|
miguelparaiso/OdooAccessible
|
addons/base_iban/__init__.py
|
447
|
1073
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base_iban
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
jk1/intellij-community
|
python/helpers/pydev/_pydevd_bundle/pydevd_xml.py
|
2
|
12915
|
from _pydev_bundle import pydev_log
import traceback
from _pydevd_bundle import pydevd_extension_utils
from _pydevd_bundle import pydevd_resolver
import sys
from _pydevd_bundle.pydevd_constants import dict_iter_items, dict_keys, IS_PY3K, \
BUILTINS_MODULE_NAME, MAXIMUM_VARIABLE_REPRESENTATION_SIZE, RETURN_VALUES_DICT, LOAD_VALUES_POLICY, ValuesPolicy, DEFAULT_VALUES_DICT
from _pydev_bundle.pydev_imports import quote
from _pydevd_bundle.pydevd_extension_api import TypeResolveProvider, StrPresentationProvider
try:
import types
frame_type = types.FrameType
except:
frame_type = None
try:
from xml.sax.saxutils import escape
def make_valid_xml_value(s):
return escape(s, {'"': '"'})
except:
# Simple replacement if it's not there.
def make_valid_xml_value(s):
return s.replace('<', '<').replace('>', '>').replace('"', '"')
class ExceptionOnEvaluate:
def __init__(self, result):
self.result = result
_IS_JYTHON = sys.platform.startswith("java")
def _create_default_type_map():
if not _IS_JYTHON:
default_type_map = [
# None means that it should not be treated as a compound variable
# isintance does not accept a tuple on some versions of python, so, we must declare it expanded
(type(None), None,),
(int, None),
(float, None),
(complex, None),
(str, None),
(tuple, pydevd_resolver.tupleResolver),
(list, pydevd_resolver.tupleResolver),
(dict, pydevd_resolver.dictResolver),
]
try:
default_type_map.append((long, None)) # @UndefinedVariable
except:
pass # not available on all python versions
try:
default_type_map.append((unicode, None)) # @UndefinedVariable
except:
pass # not available on all python versions
try:
default_type_map.append((set, pydevd_resolver.setResolver))
except:
pass # not available on all python versions
try:
default_type_map.append((frozenset, pydevd_resolver.setResolver))
except:
pass # not available on all python versions
try:
from django.utils.datastructures import MultiValueDict
default_type_map.insert(0, (MultiValueDict, pydevd_resolver.multiValueDictResolver))
# we should put it before dict
except:
pass # django may not be installed
try:
from django.forms import BaseForm
default_type_map.insert(0, (BaseForm, pydevd_resolver.djangoFormResolver))
# we should put it before instance resolver
except:
pass # django may not be installed
try:
from collections import deque
default_type_map.append((deque, pydevd_resolver.dequeResolver))
except:
pass
if frame_type is not None:
default_type_map.append((frame_type, pydevd_resolver.frameResolver))
else:
from org.python import core # @UnresolvedImport
default_type_map = [
(core.PyNone, None),
(core.PyInteger, None),
(core.PyLong, None),
(core.PyFloat, None),
(core.PyComplex, None),
(core.PyString, None),
(core.PyTuple, pydevd_resolver.tupleResolver),
(core.PyList, pydevd_resolver.tupleResolver),
(core.PyDictionary, pydevd_resolver.dictResolver),
(core.PyStringMap, pydevd_resolver.dictResolver),
]
if hasattr(core, 'PyJavaInstance'):
# Jython 2.5b3 removed it.
default_type_map.append((core.PyJavaInstance, pydevd_resolver.instanceResolver))
return default_type_map
class TypeResolveHandler(object):
NO_PROVIDER = [] # Sentinel value (any mutable object to be used as a constant would be valid).
def __init__(self):
# Note: don't initialize with the types we already know about so that the extensions can override
# the default resolvers that are already available if they want.
self._type_to_resolver_cache = {}
self._type_to_str_provider_cache = {}
self._initialized = False
def _initialize(self):
self._default_type_map = _create_default_type_map()
self._resolve_providers = pydevd_extension_utils.extensions_of_type(TypeResolveProvider)
self._str_providers = pydevd_extension_utils.extensions_of_type(StrPresentationProvider)
self._initialized = True
def get_type(self, o):
try:
try:
# Faster than type(o) as we don't need the function call.
type_object = o.__class__
except:
# Not all objects have __class__ (i.e.: there are bad bindings around).
type_object = type(o)
type_name = type_object.__name__
except:
# This happens for org.python.core.InitModule
return 'Unable to get Type', 'Unable to get Type', None
return self._get_type(o, type_object, type_name)
def _get_type(self, o, type_object, type_name):
resolver = self._type_to_resolver_cache.get(type_object)
if resolver is not None:
return type_object, type_name, resolver
if not self._initialized:
self._initialize()
try:
for resolver in self._resolve_providers:
if resolver.can_provide(type_object, type_name):
# Cache it
self._type_to_resolver_cache[type_object] = resolver
return type_object, type_name, resolver
for t in self._default_type_map:
if isinstance(o, t[0]):
# Cache it
resolver = t[1]
self._type_to_resolver_cache[type_object] = resolver
return (type_object, type_name, resolver)
except:
traceback.print_exc()
# No match return default (and cache it).
resolver = pydevd_resolver.defaultResolver
self._type_to_resolver_cache[type_object] = resolver
return type_object, type_name, resolver
if _IS_JYTHON:
_base_get_type = _get_type
def _get_type(self, o, type_object, type_name):
if type_name == 'org.python.core.PyJavaInstance':
return type_object, type_name, pydevd_resolver.instanceResolver
if type_name == 'org.python.core.PyArray':
return type_object, type_name, pydevd_resolver.jyArrayResolver
return self._base_get_type(o, type_name, type_name)
def str_from_providers(self, o, type_object, type_name):
provider = self._type_to_str_provider_cache.get(type_object)
if provider is self.NO_PROVIDER:
return None
if provider is not None:
return provider.get_str(o)
if not self._initialized:
self._initialize()
for provider in self._str_providers:
if provider.can_provide(type_object, type_name):
self._type_to_str_provider_cache[type_object] = provider
return provider.get_str(o)
self._type_to_str_provider_cache[type_object] = self.NO_PROVIDER
return None
_TYPE_RESOLVE_HANDLER = TypeResolveHandler()
"""
def get_type(o):
Receives object and returns a triple (typeObject, typeString, resolver).
resolver != None means that variable is a container, and should be displayed as a hierarchy.
Use the resolver to get its attributes.
All container objects should have a resolver.
"""
get_type = _TYPE_RESOLVE_HANDLER.get_type
_str_from_providers = _TYPE_RESOLVE_HANDLER.str_from_providers
def is_builtin(x):
return getattr(x, '__module__', None) == BUILTINS_MODULE_NAME
def should_evaluate_full_value(val):
return LOAD_VALUES_POLICY == ValuesPolicy.SYNC or (is_builtin(type(val)) and not isinstance(val, (list, tuple, dict)))
def frame_vars_to_xml(frame_f_locals, hidden_ns=None):
""" dumps frame variables to XML
<var name="var_name" scope="local" type="type" value="value"/>
"""
xml = ""
keys = dict_keys(frame_f_locals)
if hasattr(keys, 'sort'):
keys.sort() # Python 3.0 does not have it
else:
keys = sorted(keys) # Jython 2.1 does not have it
return_values_xml = ''
for k in keys:
try:
v = frame_f_locals[k]
eval_full_val = should_evaluate_full_value(v)
if k == RETURN_VALUES_DICT:
for name, val in dict_iter_items(v):
return_values_xml += var_to_xml(val, name, additional_in_xml=' isRetVal="True"')
else:
if hidden_ns is not None and k in hidden_ns:
xml += var_to_xml(v, str(k), additional_in_xml=' isIPythonHidden="True"',
evaluate_full_value=eval_full_val)
else:
xml += var_to_xml(v, str(k), evaluate_full_value=eval_full_val)
except Exception:
traceback.print_exc()
pydev_log.error("Unexpected error, recovered safely.\n")
# Show return values as the first entry.
return return_values_xml + xml
def var_to_xml(val, name, doTrim=True, additional_in_xml='', evaluate_full_value=True):
""" single variable or dictionary to xml representation """
try:
# This should be faster than isinstance (but we have to protect against not having a '__class__' attribute).
is_exception_on_eval = val.__class__ == ExceptionOnEvaluate
except:
is_exception_on_eval = False
if is_exception_on_eval:
v = val.result
else:
v = val
_type, typeName, resolver = get_type(v)
type_qualifier = getattr(_type, "__module__", "")
if not evaluate_full_value:
value = DEFAULT_VALUES_DICT[LOAD_VALUES_POLICY]
else:
try:
str_from_provider = _str_from_providers(v, _type, typeName)
if str_from_provider is not None:
value = str_from_provider
elif hasattr(v, '__class__'):
if v.__class__ == frame_type:
value = pydevd_resolver.frameResolver.get_frame_name(v)
elif v.__class__ in (list, tuple):
if len(v) > 300:
value = '%s: %s' % (str(v.__class__), '<Too big to print. Len: %s>' % (len(v),))
else:
value = '%s: %s' % (str(v.__class__), v)
else:
try:
cName = str(v.__class__)
if cName.find('.') != -1:
cName = cName.split('.')[-1]
elif cName.find("'") != -1: # does not have '.' (could be something like <type 'int'>)
cName = cName[cName.index("'") + 1:]
if cName.endswith("'>"):
cName = cName[:-2]
except:
cName = str(v.__class__)
value = '%s: %s' % (cName, v)
else:
value = str(v)
except:
try:
value = repr(v)
except:
value = 'Unable to get repr for %s' % v.__class__
try:
name = quote(name, '/>_= ') # TODO: Fix PY-5834 without using quote
except:
pass
xml = '<var name="%s" type="%s" ' % (make_valid_xml_value(name), make_valid_xml_value(typeName))
if type_qualifier:
xml_qualifier = 'qualifier="%s"' % make_valid_xml_value(type_qualifier)
else:
xml_qualifier = ''
if value:
# cannot be too big... communication may not handle it.
if len(value) > MAXIMUM_VARIABLE_REPRESENTATION_SIZE and doTrim:
value = value[0:MAXIMUM_VARIABLE_REPRESENTATION_SIZE]
value += '...'
# fix to work with unicode values
try:
if not IS_PY3K:
if value.__class__ == unicode: # @UndefinedVariable
value = value.encode('utf-8')
else:
if value.__class__ == bytes:
value = value.encode('utf-8')
except TypeError: # in java, unicode is a function
pass
xml_value = ' value="%s"' % (make_valid_xml_value(quote(value, '/>_= ')))
else:
xml_value = ''
if is_exception_on_eval:
xml_container = ' isErrorOnEval="True"'
else:
if resolver is not None:
xml_container = ' isContainer="True"'
else:
xml_container = ''
return ''.join((xml, xml_qualifier, xml_value, xml_container, additional_in_xml, ' />\n'))
|
apache-2.0
|
dubourg/openturns
|
python/test/t_MonteCarlo_draw.py
|
2
|
1515
|
#! /usr/bin/env python
from __future__ import print_function
from openturns import *
TESTPREAMBLE()
RandomGenerator.SetSeed(0)
try:
# We create a numerical math function
myFunction = NumericalMathFunction(
('E', 'F', 'L', 'I'), ('y',), ('-F*L^3/(3.*E*I)',))
dim = myFunction.getInputDimension()
# We create a normal distribution point of dimension 1
mean = NumericalPoint(dim, 0.0)
# E
mean[0] = 50.0
# F
mean[1] = 1.0
# L
mean[2] = 10.0
# I
mean[3] = 5.0
sigma = NumericalPoint(dim, 1.0)
R = IdentityMatrix(dim)
myDistribution = Normal(mean, sigma, R)
# We create a 'usual' RandomVector from the Distribution
vect = RandomVector(myDistribution)
# We create a composite random vector
output = RandomVector(myFunction, vect)
# We create an Event from this RandomVector
myEvent = Event(output, Less(), -3.0)
# We create a Monte Carlo algorithm
myAlgo = MonteCarlo(myEvent)
myAlgo.setMaximumOuterSampling(500)
myAlgo.setBlockSize(10)
myAlgo.setMaximumCoefficientOfVariation(0.05)
print("MonteCarlo=", myAlgo)
# Perform the simulation
myAlgo.run()
# Stream out the result
print("MonteCarlo result=", myAlgo.getResult())
# Draw the convergence graph
convergenceGraph = myAlgo.drawProbabilityConvergence()
convergenceGraph.draw("convergenceMonteCarlo", 640, 480)
except:
import sys
print("t_MonteCarlo_draw.py", sys.exc_info()[0], sys.exc_info()[1])
|
gpl-3.0
|
googleapis/googleapis-gen
|
google/ads/googleads/v8/googleads-py/google/ads/googleads/v8/services/services/campaign_bid_modifier_service/client.py
|
1
|
23610
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from distutils import util
import os
import re
from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions as core_exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.ads.googleads.v8.common.types import criteria
from google.ads.googleads.v8.resources.types import campaign_bid_modifier
from google.ads.googleads.v8.services.types import campaign_bid_modifier_service
from google.rpc import status_pb2 # type: ignore
from .transports.base import CampaignBidModifierServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import CampaignBidModifierServiceGrpcTransport
class CampaignBidModifierServiceClientMeta(type):
"""Metaclass for the CampaignBidModifierService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = OrderedDict() # type: Dict[str, Type[CampaignBidModifierServiceTransport]]
_transport_registry['grpc'] = CampaignBidModifierServiceGrpcTransport
def get_transport_class(cls,
label: str = None,
) -> Type[CampaignBidModifierServiceTransport]:
"""Return an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class CampaignBidModifierServiceClient(metaclass=CampaignBidModifierServiceClientMeta):
"""Service to manage campaign bid modifiers."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Convert api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = 'googleads.googleapis.com'
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
CampaignBidModifierServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
CampaignBidModifierServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> CampaignBidModifierServiceTransport:
"""Return the transport used by the client instance.
Returns:
CampaignBidModifierServiceTransport: The transport used by the client instance.
"""
return self._transport
@staticmethod
def campaign_path(customer_id: str,campaign_id: str,) -> str:
"""Return a fully-qualified campaign string."""
return "customers/{customer_id}/campaigns/{campaign_id}".format(customer_id=customer_id, campaign_id=campaign_id, )
@staticmethod
def parse_campaign_path(path: str) -> Dict[str,str]:
"""Parse a campaign path into its component segments."""
m = re.match(r"^customers/(?P<customer_id>.+?)/campaigns/(?P<campaign_id>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def campaign_bid_modifier_path(customer_id: str,campaign_id: str,criterion_id: str,) -> str:
"""Return a fully-qualified campaign_bid_modifier string."""
return "customers/{customer_id}/campaignBidModifiers/{campaign_id}~{criterion_id}".format(customer_id=customer_id, campaign_id=campaign_id, criterion_id=criterion_id, )
@staticmethod
def parse_campaign_bid_modifier_path(path: str) -> Dict[str,str]:
"""Parse a campaign_bid_modifier path into its component segments."""
m = re.match(r"^customers/(?P<customer_id>.+?)/campaignBidModifiers/(?P<campaign_id>.+?)~(?P<criterion_id>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_billing_account_path(billing_account: str, ) -> str:
"""Return a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(billing_account=billing_account, )
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str,str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str, ) -> str:
"""Return a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder, )
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str,str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str, ) -> str:
"""Return a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization, )
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str,str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str, ) -> str:
"""Return a fully-qualified project string."""
return "projects/{project}".format(project=project, )
@staticmethod
def parse_common_project_path(path: str) -> Dict[str,str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str, ) -> str:
"""Return a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(project=project, location=location, )
@staticmethod
def parse_common_location_path(path: str) -> Dict[str,str]:
"""Parse a location path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {}
def __init__(self, *,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, CampaignBidModifierServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the campaign bid modifier service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.CampaignBidModifierServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")))
ssl_credentials = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
import grpc # type: ignore
cert, key = client_options.client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
is_mtls = True
else:
creds = SslCredentials()
is_mtls = creds.is_mtls
ssl_credentials = creds.ssl_credentials if is_mtls else None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, CampaignBidModifierServiceTransport):
# transport is a CampaignBidModifierServiceTransport instance.
if credentials:
raise ValueError('When providing a transport instance, '
'provide its credentials directly.')
self._transport = transport
elif isinstance(transport, str):
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials, host=self.DEFAULT_ENDPOINT
)
else:
self._transport = CampaignBidModifierServiceGrpcTransport(
credentials=credentials,
host=api_endpoint,
ssl_channel_credentials=ssl_credentials,
client_info=client_info,
)
def get_campaign_bid_modifier(self,
request: campaign_bid_modifier_service.GetCampaignBidModifierRequest = None,
*,
resource_name: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> campaign_bid_modifier.CampaignBidModifier:
r"""Returns the requested campaign bid modifier in full detail.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RequestError <>`__
Args:
request (:class:`google.ads.googleads.v8.services.types.GetCampaignBidModifierRequest`):
The request object. Request message for
[CampaignBidModifierService.GetCampaignBidModifier][google.ads.googleads.v8.services.CampaignBidModifierService.GetCampaignBidModifier].
resource_name (:class:`str`):
Required. The resource name of the
campaign bid modifier to fetch.
This corresponds to the ``resource_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v8.resources.types.CampaignBidModifier:
Represents a bid-modifiable only
criterion at the campaign level.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([resource_name]):
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a campaign_bid_modifier_service.GetCampaignBidModifierRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, campaign_bid_modifier_service.GetCampaignBidModifierRequest):
request = campaign_bid_modifier_service.GetCampaignBidModifierRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if resource_name is not None:
request.resource_name = resource_name
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.get_campaign_bid_modifier]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('resource_name', request.resource_name),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
def mutate_campaign_bid_modifiers(self,
request: campaign_bid_modifier_service.MutateCampaignBidModifiersRequest = None,
*,
customer_id: str = None,
operations: Sequence[campaign_bid_modifier_service.CampaignBidModifierOperation] = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> campaign_bid_modifier_service.MutateCampaignBidModifiersResponse:
r"""Creates, updates, or removes campaign bid modifiers. Operation
statuses are returned.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `ContextError <>`__
`CriterionError <>`__ `DatabaseError <>`__ `DateError <>`__
`DistinctError <>`__ `FieldError <>`__ `HeaderError <>`__
`IdError <>`__ `InternalError <>`__ `MutateError <>`__
`NewResourceCreationError <>`__ `NotEmptyError <>`__
`NullError <>`__ `OperatorError <>`__ `QuotaError <>`__
`RangeError <>`__ `RequestError <>`__ `SizeLimitError <>`__
`StringFormatError <>`__ `StringLengthError <>`__
Args:
request (:class:`google.ads.googleads.v8.services.types.MutateCampaignBidModifiersRequest`):
The request object. Request message for
[CampaignBidModifierService.MutateCampaignBidModifiers][google.ads.googleads.v8.services.CampaignBidModifierService.MutateCampaignBidModifiers].
customer_id (:class:`str`):
Required. ID of the customer whose
campaign bid modifiers are being
modified.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
operations (:class:`Sequence[google.ads.googleads.v8.services.types.CampaignBidModifierOperation]`):
Required. The list of operations to
perform on individual campaign bid
modifiers.
This corresponds to the ``operations`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v8.services.types.MutateCampaignBidModifiersResponse:
Response message for campaign bid
modifiers mutate.
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
if request is not None and any([customer_id, operations]):
raise ValueError('If the `request` argument is set, then none of '
'the individual field arguments should be set.')
# Minor optimization to avoid making a copy if the user passes
# in a campaign_bid_modifier_service.MutateCampaignBidModifiersRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, campaign_bid_modifier_service.MutateCampaignBidModifiersRequest):
request = campaign_bid_modifier_service.MutateCampaignBidModifiersRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if customer_id is not None:
request.customer_id = customer_id
if operations is not None:
request.operations = operations
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.mutate_campaign_bid_modifiers]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('customer_id', request.customer_id),
)),
)
# Send the request.
response = rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
__all__ = (
'CampaignBidModifierServiceClient',
)
|
apache-2.0
|
linuxdeepin/deepin-media-player
|
src/format_conv/new_progressbar.py
|
2
|
1350
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 ~ 2012 Deepin, Inc.
# 2011 ~ 2012 Hai longqiu.
#
# Author: Hai longqiu <[email protected]>
# Maintainer: Hai longqiu <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from skin import app_theme
from dtk.ui.progressbar import ProgressBar
class NewProgressBar(ProgressBar):
def __init__(self):
ProgressBar.__init__(self)
def set_text(self, text):
pass
def set_fraction(self, value):
if 0.0 <= value <= 1.0:
print ":value:", value
# 0.5 * 100 = 50
# 1.0 * 100 = 100
# 0.1 * 100 = 10
self.progress_buffer.progress = int(float(value) * 100.0)
|
gpl-3.0
|
dllsf/odootest
|
addons/account/wizard/account_open_closed_fiscalyear.py
|
104
|
2341
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class account_open_closed_fiscalyear(osv.osv_memory):
_name = "account.open.closed.fiscalyear"
_description = "Choose Fiscal Year"
_columns = {
'fyear_id': fields.many2one('account.fiscalyear', \
'Fiscal Year', required=True, help='Select Fiscal Year which you want to remove entries for its End of year entries journal'),
}
def remove_entries(self, cr, uid, ids, context=None):
move_obj = self.pool.get('account.move')
data = self.browse(cr, uid, ids, context=context)[0]
period_journal = data.fyear_id.end_journal_period_id or False
if not period_journal:
raise osv.except_osv(_('Error!'), _("You have to set the 'End of Year Entries Journal' for this Fiscal Year which is set after generating opening entries from 'Generate Opening Entries'."))
ids_move = move_obj.search(cr, uid, [('journal_id','=',period_journal.journal_id.id),('period_id','=',period_journal.period_id.id)])
if ids_move:
cr.execute('delete from account_move where id IN %s', (tuple(ids_move),))
self.invalidate_cache(cr, uid, context=context)
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
seanwestfall/django
|
setup.py
|
123
|
3257
|
import os
import sys
from distutils.sysconfig import get_python_lib
from setuptools import find_packages, setup
# Warn if we are installing over top of an existing installation. This can
# cause issues where files that were deleted from a more recent Django are
# still present in site-packages. See #18115.
overlay_warning = False
if "install" in sys.argv:
lib_paths = [get_python_lib()]
if lib_paths[0].startswith("/usr/lib/"):
# We have to try also with an explicit prefix of /usr/local in order to
# catch Debian's custom user site-packages directory.
lib_paths.append(get_python_lib(prefix="/usr/local"))
for lib_path in lib_paths:
existing_path = os.path.abspath(os.path.join(lib_path, "django"))
if os.path.exists(existing_path):
# We note the need for the warning here, but present it after the
# command is run, so it's more likely to be seen.
overlay_warning = True
break
EXCLUDE_FROM_PACKAGES = ['django.conf.project_template',
'django.conf.app_template',
'django.bin']
# Dynamically calculate the version based on django.VERSION.
version = __import__('django').get_version()
setup(
name='Django',
version=version,
url='http://www.djangoproject.com/',
author='Django Software Foundation',
author_email='[email protected]',
description=('A high-level Python Web framework that encourages '
'rapid development and clean, pragmatic design.'),
license='BSD',
packages=find_packages(exclude=EXCLUDE_FROM_PACKAGES),
include_package_data=True,
scripts=['django/bin/django-admin.py'],
entry_points={'console_scripts': [
'django-admin = django.core.management:execute_from_command_line',
]},
extras_require={
"bcrypt": ["bcrypt"],
},
zip_safe=False,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
if overlay_warning:
sys.stderr.write("""
========
WARNING!
========
You have just installed Django over top of an existing
installation, without removing it first. Because of this,
your install may now include extraneous files from a
previous version that have since been removed from
Django. This is known to cause a variety of problems. You
should manually remove the
%(existing_path)s
directory and re-install Django.
""" % {"existing_path": existing_path})
|
bsd-3-clause
|
Tejal011089/digitales_frappe
|
frappe/core/report/permitted_documents_for_user/permitted_documents_for_user.py
|
32
|
1678
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _, throw
import frappe.utils.user
from frappe.permissions import check_admin_or_system_manager
from frappe.model.db_schema import type_map
def execute(filters=None):
user, doctype = filters.get("user"), filters.get("doctype")
validate(user, doctype)
columns, fields = get_columns_and_fields(doctype)
data = frappe.get_list(doctype, fields=fields, as_list=True, user=user)
return columns, data
def validate(user, doctype):
# check if current user is System Manager
check_admin_or_system_manager()
if not user:
throw(_("Please specify user"))
if not doctype:
throw(_("Please specify doctype"))
def get_columns_and_fields(doctype):
columns = ["Name:Link/{}:200".format(doctype)]
fields = ["name"]
for df in frappe.get_meta(doctype).fields:
if df.in_list_view and df.fieldtype in type_map:
fields.append(df.fieldname)
fieldtype = "Link/{}".format(df.options) if df.fieldtype=="Link" else df.fieldtype
columns.append("{label}:{fieldtype}:{width}".format(label=df.label, fieldtype=fieldtype, width=df.width or 100))
return columns, fields
def query_doctypes(doctype, txt, searchfield, start, page_len, filters):
user = filters.get("user")
user_obj = frappe.utils.user.User(user)
user_obj.build_permissions()
can_read = user_obj.can_read
single_doctypes = [d[0] for d in frappe.db.get_values("DocType", {"issingle": 1})]
out = []
for dt in can_read:
if txt.lower().replace("%", "") in dt.lower() and dt not in single_doctypes:
out.append([dt])
return out
|
mit
|
Bysmyyr/chromium-crosswalk
|
third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/port/mock_drt_unittest.py
|
26
|
7929
|
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit tests for MockDRT."""
import io
import sys
import unittest
from webkitpy.common.system.systemhost_mock import MockSystemHost
from webkitpy.layout_tests.port import mock_drt
from webkitpy.layout_tests.port import port_testcase
from webkitpy.layout_tests.port import test
from webkitpy.layout_tests.port.factory import PortFactory
from webkitpy.tool import mocktool
mock_options = mocktool.MockOptions(configuration='Release')
class MockDRTPortTest(port_testcase.PortTestCase):
def make_port(self, host=None, options=mock_options):
host = host or MockSystemHost()
test.add_unit_tests_to_mock_filesystem(host.filesystem)
return mock_drt.MockDRTPort(host, port_name='mock-mac', options=options)
def make_wdiff_available(self, port):
port._make_wdiff_available()
def test_port_name_in_constructor(self):
self.assertTrue(mock_drt.MockDRTPort(MockSystemHost(), port_name='mock-test'))
def test_check_sys_deps(self):
pass
def test_default_max_locked_shards(self):
pass
def test_diff_image(self):
pass
def test_diff_image_crashed(self):
pass
def test_uses_apache(self):
pass
def test_get_crash_log(self):
pass
def test_check_build(self):
pass
def test_virtual_test_suites(self):
pass
class MockDRTTest(unittest.TestCase):
def input_line(self, port, test_name, pixel_tests, checksum=None):
url = port.create_driver(0).test_to_uri(test_name)
if url.startswith('file://'):
url = url[len('file://'):]
if pixel_tests:
url += "'--pixel-test"
if checksum:
url += "'" + checksum
return url + '\n'
def make_drt(self, options, args, host, stdin, stdout, stderr):
return mock_drt.MockDRT(options, args, host, stdin, stdout, stderr)
def make_input_output(self, port, test_name, pixel_tests,
expected_checksum, drt_output, drt_input=None, expected_text=None):
if pixel_tests:
if not expected_checksum:
expected_checksum = port.expected_checksum(test_name)
if not drt_input:
drt_input = self.input_line(port, test_name, pixel_tests, expected_checksum)
text_output = expected_text or port.expected_text(test_name) or ''
if not drt_output:
drt_output = self.expected_output(port, test_name, pixel_tests,
text_output, expected_checksum)
return (drt_input, drt_output)
def expected_output(self, port, test_name, pixel_tests, text_output, expected_checksum):
output = ['Content-Type: text/plain\n']
if text_output:
output.append(text_output)
output.append('#EOF\n')
if pixel_tests and expected_checksum:
output.extend(['\n',
'ActualHash: %s\n' % expected_checksum,
'ExpectedHash: %s\n' % expected_checksum])
output.append('#EOF\n')
return output
def assertTest(self, test_name, pixel_tests, expected_checksum=None, drt_output=None, host=None, expected_text=None):
port_name = 'test'
host = host or MockSystemHost()
test.add_unit_tests_to_mock_filesystem(host.filesystem)
port = PortFactory(host).get(port_name)
drt_input, drt_output = self.make_input_output(port, test_name,
pixel_tests, expected_checksum, drt_output, drt_input=None, expected_text=expected_text)
args = ['--run-layout-test', '--platform', port_name, '-']
stdin = io.BytesIO(drt_input)
stdout = io.BytesIO()
stderr = io.BytesIO()
options, args = mock_drt.parse_options(args)
drt = self.make_drt(options, args, host, stdin, stdout, stderr)
res = drt.run()
self.assertEqual(res, 0)
self.assertEqual(stdout.getvalue(), ''.join(drt_output))
self.assertEqual(stderr.getvalue(), '#EOF\n')
def test_main(self):
host = MockSystemHost()
test.add_unit_tests_to_mock_filesystem(host.filesystem)
stdin = io.BytesIO()
stdout = io.BytesIO()
stderr = io.BytesIO()
res = mock_drt.main(['--run-layout-test', '--platform', 'test', '-'],
host, stdin, stdout, stderr)
self.assertEqual(res, 0)
self.assertEqual(stdout.getvalue(), '')
self.assertEqual(stderr.getvalue(), '')
self.assertEqual(host.filesystem.written_files, {})
def test_pixeltest_passes(self):
# This also tests that we handle HTTP: test URLs properly.
self.assertTest('http/tests/passes/text.html', True)
def test_pixeltest__fails(self):
self.assertTest('failures/expected/image_checksum.html', pixel_tests=True,
expected_checksum='image_checksum-checksum',
drt_output=['Content-Type: text/plain\n',
'image_checksum-txt',
'#EOF\n',
'\n',
'ActualHash: image_checksum-checksum\n',
'ExpectedHash: image_checksum-checksum\n',
'#EOF\n'])
def test_textonly(self):
self.assertTest('passes/image.html', False)
def test_checksum_in_png(self):
self.assertTest('passes/checksum_in_image.html', True)
def test_missing_image(self):
self.assertTest('failures/expected/missing_image.html', True)
def test_missing_text(self):
self.assertTest('failures/expected/missing_text.html', True)
def test_reftest_match(self):
self.assertTest('passes/reftest.html', True, expected_checksum='mock-checksum', expected_text='reference text\n')
def test_reftest_mismatch(self):
self.assertTest('passes/mismatch.html', True, expected_checksum='mock-checksum', expected_text='reference text\n')
def test_audio(self):
self.assertTest('passes/audio.html', pixel_tests=True,
drt_output=['Content-Type: audio/wav\n',
'Content-Transfer-Encoding: base64\n',
'YXVkaW8td2F2',
'\n',
'#EOF\n',
'#EOF\n'])
def test_virtual(self):
self.assertTest('virtual/passes/text.html', True)
|
bsd-3-clause
|
open-craft/opencraft
|
instance/migrations/0079_openstack_settings_per_appserver.py
|
1
|
2903
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-03-09 16:05
from __future__ import unicode_literals
from django.db import migrations, models
import django_extensions.db.fields.json
import functools
import instance.models.utils
class Migration(migrations.Migration):
dependencies = [
('instance', '0078_field_defaults'),
]
operations = [
migrations.AddField(
model_name='openedxappserver',
name='openstack_server_base_image',
field=django_extensions.db.fields.json.JSONField(blank=True, default=None, help_text='JSON openstack base image selector, e.g. {"name": "ubuntu-12.04-ref-ul"} Defaults to settings.OPENSTACK_SANDBOX_BASE_IMAGE on server creation.', null=True),
),
migrations.AddField(
model_name='openedxappserver',
name='openstack_server_flavor',
field=django_extensions.db.fields.json.JSONField(blank=True, default=None, help_text='JSON openstack flavor selector, e.g. {"name": "vps-ssd-1"}. Defaults to settings.OPENSTACK_SANDBOX_FLAVOR on server creation.', null=True),
),
migrations.AddField(
model_name='openedxappserver',
name='openstack_server_ssh_keyname',
field=models.CharField(blank=True, default=None, help_text='SSH key name used when setting up access to the openstack project. Defaults to settings.OPENSTACK_SANDBOX_SSH_KEYNAME on server creation.', max_length=256, null=True),
),
migrations.AlterField(
model_name='openedxappserver',
name='openstack_server_base_image',
field=django_extensions.db.fields.json.JSONField(blank=True, default=functools.partial(instance.models.utils._get_setting, *('OPENSTACK_SANDBOX_BASE_IMAGE',), **{}), help_text='JSON openstack base image selector, e.g. {"name": "ubuntu-12.04-ref-ul"} Defaults to settings.OPENSTACK_SANDBOX_BASE_IMAGE on server creation.', null=True),
),
migrations.AlterField(
model_name='openedxappserver',
name='openstack_server_flavor',
field=django_extensions.db.fields.json.JSONField(blank=True, default=functools.partial(instance.models.utils._get_setting, *('OPENSTACK_SANDBOX_FLAVOR',), **{}), help_text='JSON openstack flavor selector, e.g. {"name": "vps-ssd-1"}. Defaults to settings.OPENSTACK_SANDBOX_FLAVOR on server creation.', null=True),
),
migrations.AlterField(
model_name='openedxappserver',
name='openstack_server_ssh_keyname',
field=models.CharField(blank=True, default=functools.partial(instance.models.utils._get_setting, *('OPENSTACK_SANDBOX_SSH_KEYNAME',), **{}), help_text='SSH key name used when setting up access to the openstack project. Defaults to settings.OPENSTACK_SANDBOX_SSH_KEYNAME on server creation.', max_length=256, null=True),
),
]
|
agpl-3.0
|
b0ttl3z/SickRage
|
SickBeard.py
|
5
|
21755
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*
# Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# Rewrite Author: miigotu <[email protected]>
# URL: https://sickrage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, unicode_literals
import codecs
import datetime
import io
import locale
import platform
import os
import shutil
import signal
import subprocess
import sys
import threading
import time
import traceback
codecs.register(lambda name: codecs.lookup('utf-8') if name == 'cp65001' else None)
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), 'lib')))
if (2, 7, 99) < sys.version_info < (2, 7):
print('Sorry, requires Python 2.7')
sys.exit(1)
# https://mail.python.org/pipermail/python-dev/2014-September/136300.html
if sys.version_info >= (2, 7, 9):
import ssl
ssl._create_default_https_context = ssl._create_unverified_context # pylint: disable=protected-access
# Fix mimetypes on misconfigured systems
import mimetypes
mimetypes.add_type("text/css", ".css")
mimetypes.add_type("application/sfont", ".otf")
mimetypes.add_type("application/sfont", ".ttf")
mimetypes.add_type("application/javascript", ".js")
mimetypes.add_type("application/font-woff", ".woff")
# Not sure about this one, but we also have halflings in .woff so I think it wont matter
# mimetypes.add_type("application/font-woff2", ".woff2")
# Do this before importing sickbeard, to prevent locked files and incorrect import
OLD_TORNADO = os.path.abspath(os.path.join(os.path.dirname(__file__), 'tornado'))
if os.path.isdir(OLD_TORNADO):
shutil.move(OLD_TORNADO, OLD_TORNADO + '_kill')
shutil.rmtree(OLD_TORNADO + '_kill')
import sickbeard
from sickbeard import db, logger, network_timezones, failed_history, name_cache
from sickbeard.tv import TVShow
from sickbeard.webserveInit import SRWebServer
from sickbeard.event_queue import Events
from sickbeard.versionChecker import SourceUpdateManager, GitUpdateManager
from configobj import ConfigObj # pylint: disable=import-error
from sickrage.helper.encoding import ek
from sickrage.helper.argument_parser import SickRageArgumentParser
# noinspection PyUnresolvedReferences
from six.moves import reload_module
# http://bugs.python.org/issue7980#msg221094
THROWAWAY = datetime.datetime.strptime('20110101', '%Y%m%d')
signal.signal(signal.SIGINT, sickbeard.sig_handler)
signal.signal(signal.SIGTERM, sickbeard.sig_handler)
class SickRage(object):
# pylint: disable=too-many-instance-attributes
"""
Main SickRage module
"""
def __init__(self):
# system event callback for shutdown/restart
sickbeard.events = Events(self.shutdown)
# daemon constants
self.run_as_daemon = False
self.create_pid = False
self.pid_file = ''
# web server constants
self.web_server = None
self.forced_port = None
self.no_launch = False
self.web_host = '0.0.0.0'
self.start_port = sickbeard.WEB_PORT
self.web_options = {}
self.log_dir = None
self.console_logging = True
@staticmethod
def clear_cache():
"""
Remove the Mako cache directory
"""
try:
cache_folder = ek(os.path.join, sickbeard.CACHE_DIR, 'mako')
if os.path.isdir(cache_folder):
shutil.rmtree(cache_folder)
except Exception: # pylint: disable=broad-except
logger.log('Unable to remove the cache/mako directory!', logger.WARNING)
@staticmethod
def help_message():
"""
Print help message for commandline options
"""
help_msg = __doc__
help_msg = help_msg.replace('SickBeard.py', sickbeard.MY_FULLNAME)
help_msg = help_msg.replace('SickRage directory', sickbeard.PROG_DIR)
return help_msg
def start(self): # pylint: disable=too-many-branches,too-many-statements
"""
Start SickRage
"""
# do some preliminary stuff
sickbeard.MY_FULLNAME = ek(os.path.normpath, ek(os.path.abspath, __file__))
sickbeard.MY_NAME = ek(os.path.basename, sickbeard.MY_FULLNAME)
sickbeard.PROG_DIR = ek(os.path.dirname, sickbeard.MY_FULLNAME)
sickbeard.LOCALE_DIR = ek(os.path.join, sickbeard.PROG_DIR, 'locale')
sickbeard.DATA_DIR = sickbeard.PROG_DIR
sickbeard.MY_ARGS = sys.argv[1:]
try:
locale.setlocale(locale.LC_ALL, '')
sickbeard.SYS_ENCODING = locale.getpreferredencoding()
except (locale.Error, IOError):
sickbeard.SYS_ENCODING = 'UTF-8'
# pylint: disable=no-member
if not sickbeard.SYS_ENCODING or sickbeard.SYS_ENCODING.lower() in ('ansi_x3.4-1968', 'us-ascii', 'ascii', 'charmap') or \
(sys.platform.startswith('win') and sys.getwindowsversion()[0] >= 6 and str(getattr(sys.stdout, 'device', sys.stdout).encoding).lower() in ('cp65001', 'charmap')):
sickbeard.SYS_ENCODING = 'UTF-8'
# TODO: Continue working on making this unnecessary, this hack creates all sorts of hellish problems
if not hasattr(sys, 'setdefaultencoding'):
reload_module(sys)
try:
# On non-unicode builds this will raise an AttributeError, if encoding type is not valid it throws a LookupError
sys.setdefaultencoding(sickbeard.SYS_ENCODING) # pylint: disable=no-member
except (AttributeError, LookupError):
sys.exit('Sorry, you MUST add the SickRage folder to the PYTHONPATH environment variable\n'
'or find another way to force Python to use {} for string encoding.'.format(sickbeard.SYS_ENCODING))
# Rename the main thread
threading.currentThread().name = 'MAIN'
args = SickRageArgumentParser(sickbeard.PROG_DIR).parse_args()
if args.force_update:
result = self.force_update()
sys.exit(int(not result)) # Ok -> 0 , Error -> 1
# Need console logging for SickBeard.py and SickBeard-console.exe
sickbeard.NO_RESIZE = args.noresize
self.console_logging = (not hasattr(sys, 'frozen')) or (sickbeard.MY_NAME.lower().find('-console') > 0) and not args.quiet
self.no_launch = args.nolaunch
self.forced_port = args.port
if args.daemon:
self.run_as_daemon = platform.system() != 'Windows'
self.console_logging = False
self.no_launch = True
self.create_pid = bool(args.pidfile)
self.pid_file = args.pidfile
if self.pid_file and ek(os.path.exists, self.pid_file):
# If the pid file already exists, SickRage may still be running, so exit
raise SystemExit('PID file: {0} already exists. Exiting.'.format(self.pid_file))
sickbeard.DATA_DIR = ek(os.path.abspath, args.datadir) if args.datadir else sickbeard.DATA_DIR
sickbeard.CONFIG_FILE = ek(os.path.abspath, args.config) if args.config else ek(os.path.join, sickbeard.DATA_DIR, 'config.ini')
# The pid file is only useful in daemon mode, make sure we can write the file properly
if self.create_pid:
if self.run_as_daemon:
pid_dir = ek(os.path.dirname, self.pid_file)
if not ek(os.access, pid_dir, os.F_OK):
sys.exit('PID dir: {0} doesn\'t exist. Exiting.'.format(pid_dir))
if not ek(os.access, pid_dir, os.W_OK):
raise SystemExit('PID dir: {0} must be writable (write permissions). Exiting.'.format(pid_dir))
else:
if self.console_logging:
sys.stdout.write('Not running in daemon mode. PID file creation disabled.\n')
self.create_pid = False
# Make sure that we can create the data dir
if not ek(os.access, sickbeard.DATA_DIR, os.F_OK):
try:
ek(os.makedirs, sickbeard.DATA_DIR, 0o744)
except os.error:
raise SystemExit('Unable to create data directory: {0}'.format(sickbeard.DATA_DIR))
# Make sure we can write to the data dir
if not ek(os.access, sickbeard.DATA_DIR, os.W_OK):
raise SystemExit('Data directory must be writeable: {0}'.format(sickbeard.DATA_DIR))
# Make sure we can write to the config file
if not ek(os.access, sickbeard.CONFIG_FILE, os.W_OK):
if ek(os.path.isfile, sickbeard.CONFIG_FILE):
raise SystemExit('Config file must be writeable: {0}'.format(sickbeard.CONFIG_FILE))
elif not ek(os.access, ek(os.path.dirname, sickbeard.CONFIG_FILE), os.W_OK):
raise SystemExit('Config file root dir must be writeable: {0}'.format(ek(os.path.dirname, sickbeard.CONFIG_FILE)))
ek(os.chdir, sickbeard.DATA_DIR)
# Check if we need to perform a restore first
restore_dir = ek(os.path.join, sickbeard.DATA_DIR, 'restore')
if ek(os.path.exists, restore_dir):
success = self.restore_db(restore_dir, sickbeard.DATA_DIR)
if self.console_logging:
sys.stdout.write('Restore: restoring DB and config.ini {0}!\n'.format(('FAILED', 'SUCCESSFUL')[success]))
# Load the config and publish it to the sickbeard package
if self.console_logging and not ek(os.path.isfile, sickbeard.CONFIG_FILE):
sys.stdout.write('Unable to find {0}, all settings will be default!\n'.format(sickbeard.CONFIG_FILE))
sickbeard.CFG = ConfigObj(sickbeard.CONFIG_FILE, encoding='UTF-8')
# Initialize the config and our threads
sickbeard.initialize(consoleLogging=self.console_logging)
if self.run_as_daemon:
self.daemonize()
# Get PID
sickbeard.PID = os.getpid()
# Build from the DB to start with
self.load_shows_from_db()
logger.log('Starting SickRage [{branch}] using \'{config}\''.format
(branch=sickbeard.BRANCH, config=sickbeard.CONFIG_FILE))
self.clear_cache()
if self.forced_port:
logger.log('Forcing web server to port {port}'.format(port=self.forced_port))
self.start_port = self.forced_port
else:
self.start_port = sickbeard.WEB_PORT
if sickbeard.WEB_LOG:
self.log_dir = sickbeard.LOG_DIR
else:
self.log_dir = None
# sickbeard.WEB_HOST is available as a configuration value in various
# places but is not configurable. It is supported here for historic reasons.
if sickbeard.WEB_HOST and sickbeard.WEB_HOST != '0.0.0.0':
self.web_host = sickbeard.WEB_HOST
else:
self.web_host = '' if sickbeard.WEB_IPV6 else '0.0.0.0'
# web server options
self.web_options = {
'port': int(self.start_port),
'host': self.web_host,
'data_root': ek(os.path.join, sickbeard.PROG_DIR, 'gui', sickbeard.GUI_NAME),
'web_root': sickbeard.WEB_ROOT,
'log_dir': self.log_dir,
'username': sickbeard.WEB_USERNAME,
'password': sickbeard.WEB_PASSWORD,
'enable_https': sickbeard.ENABLE_HTTPS,
'handle_reverse_proxy': sickbeard.HANDLE_REVERSE_PROXY,
'https_cert': ek(os.path.join, sickbeard.PROG_DIR, sickbeard.HTTPS_CERT),
'https_key': ek(os.path.join, sickbeard.PROG_DIR, sickbeard.HTTPS_KEY),
}
# start web server
self.web_server = SRWebServer(self.web_options)
self.web_server.start()
# Fire up all our threads
sickbeard.start()
# Build internal name cache
name_cache.buildNameCache()
# Pre-populate network timezones, it isn't thread safe
network_timezones.update_network_dict()
# sure, why not?
if sickbeard.USE_FAILED_DOWNLOADS:
failed_history.trimHistory()
# Check for metadata indexer updates for shows (sets the next aired ep!)
# sickbeard.showUpdateScheduler.forceRun()
# Launch browser
if sickbeard.LAUNCH_BROWSER and not (self.no_launch or self.run_as_daemon):
sickbeard.launchBrowser('https' if sickbeard.ENABLE_HTTPS else 'http', self.start_port, sickbeard.WEB_ROOT)
# main loop
while True:
time.sleep(1)
def daemonize(self):
"""
Fork off as a daemon
"""
# pylint: disable=protected-access
# An object is accessed for a non-existent member.
# Access to a protected member of a client class
# Make a non-session-leader child process
try:
pid = os.fork() # @UndefinedVariable - only available in UNIX
if pid != 0:
os._exit(0)
except OSError as error:
sys.stderr.write('fork #1 failed: {error_num}: {error_message}\n'.format
(error_num=error.errno, error_message=error.strerror))
sys.exit(1)
os.setsid() # @UndefinedVariable - only available in UNIX
# https://github.com/SickRage/SickRage/issues/2969
# http://www.microhowto.info/howto/cause_a_process_to_become_a_daemon_in_c.html#idp23920
# https://www.safaribooksonline.com/library/view/python-cookbook/0596001673/ch06s08.html
# Previous code simply set the umask to whatever it was because it was ANDing instead of OR-ing
# Daemons traditionally run with umask 0 anyways and this should not have repercussions
os.umask(0)
# Make the child a session-leader by detaching from the terminal
try:
pid = os.fork() # @UndefinedVariable - only available in UNIX
if pid != 0:
os._exit(0)
except OSError as error:
sys.stderr.write('fork #2 failed: Error {error_num}: {error_message}\n'.format
(error_num=error.errno, error_message=error.strerror))
sys.exit(1)
# Write pid
if self.create_pid:
pid = os.getpid()
logger.log('Writing PID: {pid} to {filename}'.format(pid=pid, filename=self.pid_file))
try:
with io.open(self.pid_file, 'w') as f_pid:
f_pid.write('{0}\n'.format(pid))
except EnvironmentError as error:
logger.log_error_and_exit('Unable to write PID file: {filename} Error {error_num}: {error_message}'.format
(filename=self.pid_file, error_num=error.errno, error_message=error.strerror))
# Redirect all output
sys.stdout.flush()
sys.stderr.flush()
devnull = getattr(os, 'devnull', '/dev/null')
stdin = open(devnull)
stdout = open(devnull, 'a+')
stderr = open(devnull, 'a+')
os.dup2(stdin.fileno(), getattr(sys.stdin, 'device', sys.stdin).fileno())
os.dup2(stdout.fileno(), getattr(sys.stdout, 'device', sys.stdout).fileno())
os.dup2(stderr.fileno(), getattr(sys.stderr, 'device', sys.stderr).fileno())
@staticmethod
def remove_pid_file(pid_file):
"""
Remove pid file
:param pid_file: to remove
:return:
"""
try:
if ek(os.path.exists, pid_file):
ek(os.remove, pid_file)
except EnvironmentError:
return False
return True
@staticmethod
def load_shows_from_db():
"""
Populates the showList with shows from the database
"""
logger.log('Loading initial show list', logger.DEBUG)
main_db_con = db.DBConnection()
sql_results = main_db_con.select('SELECT indexer, indexer_id, location FROM tv_shows;')
sickbeard.showList = []
for sql_show in sql_results:
try:
cur_show = TVShow(sql_show[b'indexer'], sql_show[b'indexer_id'])
cur_show.nextEpisode()
sickbeard.showList.append(cur_show)
except Exception as error: # pylint: disable=broad-except
logger.log('There was an error creating the show in {0}: Error {1}'.format
(sql_show[b'location'], error), logger.ERROR)
logger.log(traceback.format_exc(), logger.DEBUG)
@staticmethod
def restore_db(src_dir, dst_dir):
"""
Restore the Database from a backup
:param src_dir: Directory containing backup
:param dst_dir: Directory to restore to
:return:
"""
try:
files_list = ['sickbeard.db', 'config.ini', 'failed.db', 'cache.db']
for filename in files_list:
src_file = ek(os.path.join, src_dir, filename)
dst_file = ek(os.path.join, dst_dir, filename)
bak_file = ek(os.path.join, dst_dir, '{0}.bak-{1}'.format(filename, datetime.datetime.now().strftime('%Y%m%d_%H%M%S')))
if ek(os.path.isfile, dst_file):
shutil.move(dst_file, bak_file)
shutil.move(src_file, dst_file)
return True
except Exception: # pylint: disable=broad-except
return False
def shutdown(self, event):
"""
Shut down SickRage
:param event: Type of shutdown event, used to see if restart required
"""
if sickbeard.started:
sickbeard.halt() # stop all tasks
sickbeard.saveAll() # save all shows to DB
# shutdown web server
if self.web_server:
logger.log('Shutting down Tornado')
self.web_server.shutdown()
try:
self.web_server.join(10)
except Exception: # pylint: disable=broad-except
pass
self.clear_cache() # Clean cache
# if run as daemon delete the pid file
if self.run_as_daemon and self.create_pid:
self.remove_pid_file(self.pid_file)
if event == sickbeard.event_queue.Events.SystemEvent.RESTART:
install_type = sickbeard.versionCheckScheduler.action.install_type
popen_list = []
if install_type in ('git', 'source'):
popen_list = [sys.executable, sickbeard.MY_FULLNAME]
elif install_type == 'win':
logger.log('You are using a binary Windows build of SickRage. '
'Please switch to using git.', logger.ERROR)
if popen_list and not sickbeard.NO_RESTART:
popen_list += sickbeard.MY_ARGS
if '--nolaunch' not in popen_list:
popen_list += ['--nolaunch']
logger.log('Restarting SickRage with {options}'.format(options=popen_list))
# shutdown the logger to make sure it's released the logfile BEFORE it restarts SR.
logger.shutdown()
subprocess.Popen(popen_list, cwd=os.getcwd())
# Make sure the logger has stopped, just in case
logger.shutdown()
os._exit(0) # pylint: disable=protected-access
@staticmethod
def force_update():
"""
Forces SickRage to update to the latest version and exit.
:return: True if successful, False otherwise
"""
def update_with_git():
def run_git(updater, cmd):
stdout_, stderr_, exit_status = updater._run_git(updater._git_path, cmd)
if not exit_status == 0:
print('Failed to run command: {0} {1}'.format(updater._git_path, cmd))
return False
else:
return True
updater = GitUpdateManager()
if not run_git(updater, 'config remote.origin.url https://github.com/SickRage/SickRage.git'):
return False
if not run_git(updater, 'fetch origin --prune'):
return False
if not run_git(updater, 'checkout master'):
return False
if not run_git(updater, 'reset --hard origin/master'):
return False
return True
if ek(os.path.isdir, ek(os.path.join, sickbeard.PROG_DIR, '.git')): # update with git
print('Forcing SickRage to update using git...')
result = update_with_git()
if result:
print('Successfully updated to latest commit. You may now run SickRage normally.')
return True
else:
print('Error while trying to force an update using git.')
print('Forcing SickRage to update using source...')
if not SourceUpdateManager().update():
print('Failed to force an update.')
return False
print('Successfully updated to latest commit. You may now run SickRage normally.')
return True
if __name__ == '__main__':
# start SickRage
SickRage().start()
|
gpl-3.0
|
hynnet/hiwifi-openwrt-HC5661-HC5761
|
staging_dir/host/lib64/scons-2.1.0/SCons/Tool/suncc.py
|
21
|
1986
|
"""SCons.Tool.suncc
Tool-specific initialization for Sun Solaris (Forte) CC and cc.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/suncc.py 5357 2011/09/09 21:31:03 bdeegan"
import SCons.Util
import cc
def generate(env):
"""
Add Builders and construction variables for Forte C and C++ compilers
to an Environment.
"""
cc.generate(env)
env['CXX'] = 'CC'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -KPIC')
env['SHOBJPREFIX'] = 'so_'
env['SHOBJSUFFIX'] = '.o'
def exists(env):
return env.Detect('CC')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
gpl-2.0
|
bjzhang/xen
|
tools/xm-test/tests/sched-credit/01_sched_credit_weight_cap_pos.py
|
42
|
1858
|
#!/usr/bin/python
#
# Sched-credit tests modified from SEDF tests
#
import re
from XmTestLib import *
paramsRE = re.compile(r'^[^ ]* *[^ ]* *([^ ]*) *([^ ]*)$')
def get_sched_credit_params(domain):
status, output = traceCommand("xm sched-credit -d %s | tail -1" %
domain.getName())
if status != 0:
FAIL("Getting sched-credit parameters return non-zero rv (%d)",
status)
m = paramsRE.match(output)
if not m:
FAIL("xm sched-credit gave bad output")
weight = int(m.group(1))
cap = int(m.group(2))
return (weight, cap)
def set_sched_credit_weight(domain, weight):
status, output = traceCommand("xm sched-credit -d %s -w %d" %(domain.getName(), weight))
return status
def set_sched_credit_cap(domain, cap):
status, output = traceCommand("xm sched-credit -d %s -c %d" %(domain.getName(), cap))
return status
domain = XmTestDomain()
try:
domain.start(noConsole=True)
except DomainError, e:
if verbose:
print "Failed to create test domain because:"
print e.extra
FAIL(str(e))
# check default param values
(weight, cap) = get_sched_credit_params(domain)
if weight != 256:
FAIL("default weight is 256 (got %d)", weight)
if cap != 0:
FAIL("default cap is 0 (got %d)", cap)
# set new parameters
status = set_sched_credit_weight(domain, 512)
if status != 0:
FAIL("Setting sched-credit weight return non-zero rv (%d)", status)
status = set_sched_credit_cap(domain, 100)
if status != 0:
FAIL("Setting sched-credit cap return non-zero rv (%d)", status)
# check new param values
(weight, cap) = get_sched_credit_params(domain)
if weight != 512:
FAIL("expected weight of 512 (got %d)", weight)
if cap != 100:
FAIL("expected cap of 100 (got %d)", cap)
# Stop the domain (nice shutdown)
domain.stop()
|
gpl-2.0
|
AEHO/collect
|
collect/src/endpoints_proto_datastore/utils_test.py
|
26
|
2058
|
# Copyright 2013 Google Inc. All Rights Reserved.
"""Tests for utils.py."""
import unittest
from protorpc import messages
from . import utils
class UtilsTests(unittest.TestCase):
"""Comprehensive test for the endpoints_proto_datastore.utils module."""
def testIsSubclass(self):
"""Tests the utils.IsSubclass method."""
self.assertTrue(utils.IsSubclass(int, int))
self.assertTrue(utils.IsSubclass(bool, int))
self.assertTrue(utils.IsSubclass(str, (str, basestring)))
self.assertFalse(utils.IsSubclass(int, bool))
# Make sure this does not fail
self.assertFalse(utils.IsSubclass(int, None))
def testDictToTuple(self):
"""Tests the utils._DictToTuple method."""
# pylint:disable-msg=W0212
self.assertRaises(AttributeError, utils._DictToTuple, None)
class Simple(object):
items = None # Not callable
self.assertRaises(TypeError, utils._DictToTuple, Simple)
single_value_dictionary = {1: 2}
self.assertEqual((1,), utils._DictToTuple(single_value_dictionary))
multiple_value_dictionary = {-5: 3, 1: 1, 3: 2}
self.assertEqual((1, 3, -5), utils._DictToTuple(multiple_value_dictionary))
# pylint:enable-msg=W0212
def testGeoPtMessage(self):
"""Tests the utils.GeoPtMessage protorpc message class."""
geo_pt_message = utils.GeoPtMessage(lat=1.0)
self.assertEqual(geo_pt_message.lat, 1.0)
self.assertEqual(geo_pt_message.lon, None)
self.assertFalse(geo_pt_message.is_initialized())
geo_pt_message.lon = 2.0
self.assertEqual(geo_pt_message.lon, 2.0)
self.assertTrue(geo_pt_message.is_initialized())
self.assertRaises(messages.ValidationError,
utils.GeoPtMessage, lat='1', lon=2)
self.assertRaises(TypeError, utils.GeoPtMessage, 1.0, 2.0)
self.assertRaises(AttributeError, utils.GeoPtMessage,
lat=1.0, lon=2.0, other=3.0)
geo_pt_message = utils.GeoPtMessage(lat=1.0, lon=2.0)
self.assertTrue(geo_pt_message.is_initialized())
if __name__ == '__main__':
unittest.main()
|
mit
|
TheProjecter/kassie
|
src/primaires/log/message.py
|
1
|
2331
|
# -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Ce fichier contient la classe Message, définie plus bas."""
class Message:
"""Cette classe représente un message de log stockée par la fil
d'attente du Logger.
"""
def __init__(self, niveau, message, formate):
"""Un message de log contient :
- un niveau d'erreur (int)
- un message informatif (str)
- le message formaté
On conserve le message formaté afin de garder la date réelle de
l'enregistrement du message. Le message est enregistré après coup,
mais la date est celle de l'ajout dans la fil, non celle de
l'enregistrement.
"""
self.niveau = niveau
self.message = message
self.message_formate = formate
|
bsd-3-clause
|
ntt-sic/python-cinderclient
|
cinderclient/exceptions.py
|
1
|
4571
|
# Copyright 2010 Jacob Kaplan-Moss
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Exception definitions.
"""
class UnsupportedVersion(Exception):
"""Indicates that the user is trying to use an unsupported
version of the API.
"""
pass
class InvalidAPIVersion(Exception):
pass
class CommandError(Exception):
pass
class AuthorizationFailure(Exception):
pass
class NoUniqueMatch(Exception):
pass
class NoTokenLookupException(Exception):
"""This form of authentication does not support looking up
endpoints from an existing token.
"""
pass
class EndpointNotFound(Exception):
"""Could not find Service or Region in Service Catalog."""
pass
class ConnectionError(Exception):
"""Could not open a connection to the API service."""
pass
class AmbiguousEndpoints(Exception):
"""Found more than one matching endpoint in Service Catalog."""
def __init__(self, endpoints=None):
self.endpoints = endpoints
def __str__(self):
return "AmbiguousEndpoints: %s" % repr(self.endpoints)
class ClientException(Exception):
"""
The base exception class for all exceptions this library raises.
"""
def __init__(self, code, message=None, details=None, request_id=None):
self.code = code
self.message = message or self.__class__.message
self.details = details
self.request_id = request_id
def __str__(self):
formatted_string = "%s (HTTP %s)" % (self.message, self.code)
if self.request_id:
formatted_string += " (Request-ID: %s)" % self.request_id
return formatted_string
class BadRequest(ClientException):
"""
HTTP 400 - Bad request: you sent some malformed data.
"""
http_status = 400
message = "Bad request"
class Unauthorized(ClientException):
"""
HTTP 401 - Unauthorized: bad credentials.
"""
http_status = 401
message = "Unauthorized"
class Forbidden(ClientException):
"""
HTTP 403 - Forbidden: your credentials don't give you access to this
resource.
"""
http_status = 403
message = "Forbidden"
class NotFound(ClientException):
"""
HTTP 404 - Not found
"""
http_status = 404
message = "Not found"
class OverLimit(ClientException):
"""
HTTP 413 - Over limit: you're over the API limits for this time period.
"""
http_status = 413
message = "Over limit"
# NotImplemented is a python keyword.
class HTTPNotImplemented(ClientException):
"""
HTTP 501 - Not Implemented: the server does not support this operation.
"""
http_status = 501
message = "Not Implemented"
# In Python 2.4 Exception is old-style and thus doesn't have a __subclasses__()
# so we can do this:
# _code_map = dict((c.http_status, c)
# for c in ClientException.__subclasses__())
#
# Instead, we have to hardcode it:
_code_map = dict((c.http_status, c) for c in [BadRequest, Unauthorized,
Forbidden, NotFound,
OverLimit, HTTPNotImplemented])
def from_response(response, body):
"""
Return an instance of an ClientException or subclass
based on an requests response.
Usage::
resp, body = requests.request(...)
if resp.status_code != 200:
raise exception_from_response(resp, rest.text)
"""
cls = _code_map.get(response.status_code, ClientException)
if response.headers:
request_id = response.headers.get('x-compute-request-id')
else:
request_id = None
if body:
message = "n/a"
details = "n/a"
if hasattr(body, 'keys'):
error = body[list(body)[0]]
message = error.get('message', None)
details = error.get('details', None)
return cls(code=response.status_code, message=message, details=details,
request_id=request_id)
else:
return cls(code=response.status_code, request_id=request_id)
|
apache-2.0
|
rahulunair/nova
|
nova/virt/powervm/media.py
|
4
|
9988
|
# Copyright 2015, 2017 IBM Corp.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import os
import tempfile
from oslo_log import log as logging
from oslo_utils import excutils
from pypowervm import const as pvm_const
from pypowervm.tasks import scsi_mapper as tsk_map
from pypowervm.tasks import storage as tsk_stg
from pypowervm.tasks import vopt as tsk_vopt
from pypowervm import util as pvm_util
from pypowervm.wrappers import storage as pvm_stg
from pypowervm.wrappers import virtual_io_server as pvm_vios
import retrying
from taskflow import task
from nova.api.metadata import base as instance_metadata
from nova.network import model as network_model
from nova.virt import configdrive
from nova.virt.powervm import vm
LOG = logging.getLogger(__name__)
_LLA_SUBNET = "fe80::/64"
# TODO(efried): CONF these (maybe)
_VOPT_VG = 'rootvg'
_VOPT_SIZE_GB = 1
class ConfigDrivePowerVM(object):
def __init__(self, adapter):
"""Creates the config drive manager for PowerVM.
:param adapter: The pypowervm adapter to communicate with the system.
"""
self.adapter = adapter
# Validate that the virtual optical exists
self.vios_uuid, self.vg_uuid = tsk_vopt.validate_vopt_repo_exists(
self.adapter, vopt_media_volume_group=_VOPT_VG,
vopt_media_rep_size=_VOPT_SIZE_GB)
@staticmethod
def _sanitize_network_info(network_info):
"""Will sanitize the network info for the config drive.
Newer versions of cloud-init look at the vif type information in
the network info and utilize it to determine what to do. There are
a limited number of vif types, and it seems to be built on the idea
that the neutron vif type is the cloud init vif type (which is not
quite right).
This sanitizes the network info that gets passed into the config
drive to work properly with cloud-inits.
"""
network_info = copy.deepcopy(network_info)
# OVS is the only supported vif type. All others (SEA, PowerVM SR-IOV)
# will default to generic vif.
for vif in network_info:
if vif.get('type') != 'ovs':
LOG.debug('Changing vif type from %(type)s to vif for vif '
'%(id)s.', {'type': vif.get('type'),
'id': vif.get('id')})
vif['type'] = 'vif'
return network_info
def _create_cfg_dr_iso(self, instance, injected_files, network_info,
iso_path, admin_pass=None):
"""Creates an ISO file that contains the injected files.
Used for config drive.
:param instance: The VM instance from OpenStack.
:param injected_files: A list of file paths that will be injected into
the ISO.
:param network_info: The network_info from the nova spawn method.
:param iso_path: The absolute file path for the new ISO
:param admin_pass: Optional password to inject for the VM.
"""
LOG.info("Creating config drive.", instance=instance)
extra_md = {}
if admin_pass is not None:
extra_md['admin_pass'] = admin_pass
# Sanitize the vifs for the network config
network_info = self._sanitize_network_info(network_info)
inst_md = instance_metadata.InstanceMetadata(instance,
content=injected_files,
extra_md=extra_md,
network_info=network_info)
with configdrive.ConfigDriveBuilder(instance_md=inst_md) as cdb:
LOG.info("Config drive ISO being built in %s.", iso_path,
instance=instance)
# There may be an OSError exception when create the config drive.
# If so, retry the operation before raising.
@retrying.retry(retry_on_exception=lambda exc: isinstance(
exc, OSError), stop_max_attempt_number=2)
def _make_cfg_drive(iso_path):
cdb.make_drive(iso_path)
try:
_make_cfg_drive(iso_path)
except OSError:
with excutils.save_and_reraise_exception(logger=LOG):
LOG.exception("Config drive ISO could not be built",
instance=instance)
def create_cfg_drv_vopt(self, instance, injected_files, network_info,
stg_ftsk, admin_pass=None, mgmt_cna=None):
"""Create the config drive virtual optical and attach to VM.
:param instance: The VM instance from OpenStack.
:param injected_files: A list of file paths that will be injected into
the ISO.
:param network_info: The network_info from the nova spawn method.
:param stg_ftsk: FeedTask to defer storage connectivity operations.
:param admin_pass: (Optional) password to inject for the VM.
:param mgmt_cna: (Optional) The management (RMC) CNA wrapper.
"""
# If there is a management client network adapter, then we should
# convert that to a VIF and add it to the network info
if mgmt_cna is not None:
network_info = copy.deepcopy(network_info)
network_info.append(self._mgmt_cna_to_vif(mgmt_cna))
# Pick a file name for when we upload the media to VIOS
file_name = pvm_util.sanitize_file_name_for_api(
instance.uuid.replace('-', ''), prefix='cfg_', suffix='.iso',
max_len=pvm_const.MaxLen.VOPT_NAME)
# Create and upload the media
with tempfile.NamedTemporaryFile(mode='rb') as fh:
self._create_cfg_dr_iso(instance, injected_files, network_info,
fh.name, admin_pass=admin_pass)
vopt, f_uuid = tsk_stg.upload_vopt(
self.adapter, self.vios_uuid, fh, file_name,
os.path.getsize(fh.name))
# Define the function to build and add the mapping
def add_func(vios_w):
LOG.info("Adding cfg drive mapping to Virtual I/O Server %s.",
vios_w.name, instance=instance)
mapping = tsk_map.build_vscsi_mapping(
None, vios_w, vm.get_pvm_uuid(instance), vopt)
return tsk_map.add_map(vios_w, mapping)
# Add the subtask to create the mapping when the FeedTask runs
stg_ftsk.wrapper_tasks[self.vios_uuid].add_functor_subtask(add_func)
def _mgmt_cna_to_vif(self, cna):
"""Converts the mgmt CNA to VIF format for network injection."""
mac = vm.norm_mac(cna.mac)
ipv6_link_local = self._mac_to_link_local(mac)
subnet = network_model.Subnet(
version=6, cidr=_LLA_SUBNET,
ips=[network_model.FixedIP(address=ipv6_link_local)])
network = network_model.Network(id='mgmt', subnets=[subnet],
injected='yes')
return network_model.VIF(id='mgmt_vif', address=mac,
network=network)
@staticmethod
def _mac_to_link_local(mac):
# Convert the address to IPv6. The first step is to separate out the
# mac address
splits = mac.split(':')
# Create EUI-64 id per RFC 4291 Appendix A
splits.insert(3, 'ff')
splits.insert(4, 'fe')
# Create modified EUI-64 id via bit flip per RFC 4291 Appendix A
splits[0] = "%.2x" % (int(splits[0], 16) ^ 0b00000010)
# Convert to the IPv6 link local format. The prefix is fe80::. Join
# the hexes together at every other digit.
ll = ['fe80:']
ll.extend([splits[x] + splits[x + 1]
for x in range(0, len(splits), 2)])
return ':'.join(ll)
def dlt_vopt(self, instance, stg_ftsk):
"""Deletes the virtual optical and scsi mappings for a VM.
:param instance: The nova instance whose VOpt(s) are to be removed.
:param stg_ftsk: A FeedTask. The actions to modify the storage will be
added as batched functions onto the FeedTask.
"""
lpar_uuid = vm.get_pvm_uuid(instance)
# The matching function for find_maps, remove_maps
match_func = tsk_map.gen_match_func(pvm_stg.VOptMedia)
# Add a function to remove the mappings
stg_ftsk.wrapper_tasks[self.vios_uuid].add_functor_subtask(
tsk_map.remove_maps, lpar_uuid, match_func=match_func)
# Find the VOpt device based from the mappings
media_mappings = tsk_map.find_maps(
stg_ftsk.get_wrapper(self.vios_uuid).scsi_mappings,
client_lpar_id=lpar_uuid, match_func=match_func)
media_elems = [x.backing_storage for x in media_mappings]
def rm_vopt():
LOG.info("Removing virtual optical storage.",
instance=instance)
vg_wrap = pvm_stg.VG.get(self.adapter, uuid=self.vg_uuid,
parent_type=pvm_vios.VIOS,
parent_uuid=self.vios_uuid)
tsk_stg.rm_vg_storage(vg_wrap, vopts=media_elems)
# Add task to remove the media if it exists
if media_elems:
stg_ftsk.add_post_execute(task.FunctorTask(rm_vopt))
|
apache-2.0
|
consulo/consulo-python
|
plugin/src/main/dist/helpers/coveragepy/coverage/backunittest.py
|
95
|
1515
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""Implementations of unittest features from the future."""
# Use unittest2 if it's available, otherwise unittest. This gives us
# back-ported features for 2.6.
try:
import unittest2 as unittest
except ImportError:
import unittest
def unittest_has(method):
"""Does `unittest.TestCase` have `method` defined?"""
return hasattr(unittest.TestCase, method)
class TestCase(unittest.TestCase):
"""Just like unittest.TestCase, but with assert methods added.
Designed to be compatible with 3.1 unittest. Methods are only defined if
`unittest` doesn't have them.
"""
# pylint: disable=missing-docstring
# Many Pythons have this method defined. But PyPy3 has a bug with it
# somehow (https://bitbucket.org/pypy/pypy/issues/2092), so always use our
# own implementation that works everywhere, at least for the ways we're
# calling it.
def assertCountEqual(self, s1, s2):
"""Assert these have the same elements, regardless of order."""
self.assertEqual(sorted(s1), sorted(s2))
if not unittest_has('assertRaisesRegex'):
def assertRaisesRegex(self, *args, **kwargs):
return self.assertRaisesRegexp(*args, **kwargs)
if not unittest_has('assertRegex'):
def assertRegex(self, *args, **kwargs):
return self.assertRegexpMatches(*args, **kwargs)
|
apache-2.0
|
FCP-INDI/nipype
|
nipype/interfaces/semtools/diffusion/gtract.py
|
10
|
68939
|
# -*- coding: utf8 -*-
"""Autogenerated file - DO NOT EDIT
If you spot a bug, please report it on the mailing list and/or change the generator."""
import os
from ...base import (CommandLine, CommandLineInputSpec, SEMLikeCommandLine,
TraitedSpec, File, Directory, traits, isdefined,
InputMultiPath, OutputMultiPath)
class gtractTransformToDisplacementFieldInputSpec(CommandLineInputSpec):
inputTransform = File(desc="Input Transform File Name", exists=True, argstr="--inputTransform %s")
inputReferenceVolume = File(desc="Required: input image file name to exemplify the anatomical space over which to vcl_express the transform as a displacement field.", exists=True, argstr="--inputReferenceVolume %s")
outputDeformationFieldVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Output deformation field", argstr="--outputDeformationFieldVolume %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractTransformToDisplacementFieldOutputSpec(TraitedSpec):
outputDeformationFieldVolume = File(desc="Output deformation field", exists=True)
class gtractTransformToDisplacementField(SEMLikeCommandLine):
"""title: Create Displacement Field
category: Diffusion.GTRACT
description: This program will compute forward deformation from the given Transform. The size of the DF is equal to MNI space
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta, Madhura Ingalhalikar, and Greg Harris
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractTransformToDisplacementFieldInputSpec
output_spec = gtractTransformToDisplacementFieldOutputSpec
_cmd = " gtractTransformToDisplacementField "
_outputs_filenames = {'outputDeformationFieldVolume': 'outputDeformationFieldVolume.nii'}
_redirect_x = False
class gtractInvertBSplineTransformInputSpec(CommandLineInputSpec):
inputReferenceVolume = File(desc="Required: input image file name to exemplify the anatomical space to interpolate over.", exists=True, argstr="--inputReferenceVolume %s")
inputTransform = File(desc="Required: input B-Spline transform file name", exists=True, argstr="--inputTransform %s")
outputTransform = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: output transform file name", argstr="--outputTransform %s")
landmarkDensity = InputMultiPath(traits.Int, desc="Number of landmark subdivisions in all 3 directions", sep=",", argstr="--landmarkDensity %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractInvertBSplineTransformOutputSpec(TraitedSpec):
outputTransform = File(desc="Required: output transform file name", exists=True)
class gtractInvertBSplineTransform(SEMLikeCommandLine):
"""title: B-Spline Transform Inversion
category: Diffusion.GTRACT
description: This program will invert a B-Spline transform using a thin-plate spline approximation.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractInvertBSplineTransformInputSpec
output_spec = gtractInvertBSplineTransformOutputSpec
_cmd = " gtractInvertBSplineTransform "
_outputs_filenames = {'outputTransform': 'outputTransform.h5'}
_redirect_x = False
class gtractConcatDwiInputSpec(CommandLineInputSpec):
inputVolume = InputMultiPath(File(exists=True), desc="Required: input file containing the first diffusion weighted image", argstr="--inputVolume %s...")
ignoreOrigins = traits.Bool(desc="If image origins are different force all images to origin of first image", argstr="--ignoreOrigins ")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the combined diffusion weighted images.", argstr="--outputVolume %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractConcatDwiOutputSpec(TraitedSpec):
outputVolume = File(desc="Required: name of output NRRD file containing the combined diffusion weighted images.", exists=True)
class gtractConcatDwi(SEMLikeCommandLine):
"""title: Concat DWI Images
category: Diffusion.GTRACT
description: This program will concatenate two DTI runs together.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractConcatDwiInputSpec
output_spec = gtractConcatDwiOutputSpec
_cmd = " gtractConcatDwi "
_outputs_filenames = {'outputVolume': 'outputVolume.nrrd'}
_redirect_x = False
class gtractAverageBvaluesInputSpec(CommandLineInputSpec):
inputVolume = File(desc="Required: input image file name containing multiple baseline gradients to average", exists=True, argstr="--inputVolume %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing directly averaged baseline images", argstr="--outputVolume %s")
directionsTolerance = traits.Float(desc="Tolerance for matching identical gradient direction pairs", argstr="--directionsTolerance %f")
averageB0only = traits.Bool(desc="Average only baseline gradients. All other gradient directions are not averaged, but retained in the outputVolume", argstr="--averageB0only ")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractAverageBvaluesOutputSpec(TraitedSpec):
outputVolume = File(desc="Required: name of output NRRD file containing directly averaged baseline images", exists=True)
class gtractAverageBvalues(SEMLikeCommandLine):
"""title: Average B-Values
category: Diffusion.GTRACT
description: This program will directly average together the baseline gradients (b value equals 0) within a DWI scan. This is usually used after gtractCoregBvalues.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractAverageBvaluesInputSpec
output_spec = gtractAverageBvaluesOutputSpec
_cmd = " gtractAverageBvalues "
_outputs_filenames = {'outputVolume': 'outputVolume.nrrd'}
_redirect_x = False
class gtractCoregBvaluesInputSpec(CommandLineInputSpec):
movingVolume = File(desc="Required: input moving image file name. In order to register gradients within a scan to its first gradient, set the movingVolume and fixedVolume as the same image.", exists=True, argstr="--movingVolume %s")
fixedVolume = File(desc="Required: input fixed image file name. It is recommended that this image should either contain or be a b0 image.", exists=True, argstr="--fixedVolume %s")
fixedVolumeIndex = traits.Int(desc="Index in the fixed image for registration. It is recommended that this image should be a b0 image.", argstr="--fixedVolumeIndex %d")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing moving images individually resampled and fit to the specified fixed image index.", argstr="--outputVolume %s")
outputTransform = traits.Either(traits.Bool, File(), hash_files=False, desc="Registration 3D transforms concatenated in a single output file. There are no tools that can use this, but can be used for debugging purposes.", argstr="--outputTransform %s")
eddyCurrentCorrection = traits.Bool(desc="Flag to perform eddy current corection in addition to motion correction (recommended)", argstr="--eddyCurrentCorrection ")
numberOfIterations = traits.Int(desc="Number of iterations in each 3D fit", argstr="--numberOfIterations %d")
numberOfSpatialSamples = traits.Int(
desc="The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. NOTE that it is suggested to use samplingPercentage instead of this option. However, if set, it overwrites the samplingPercentage option. ", argstr="--numberOfSpatialSamples %d")
samplingPercentage = traits.Float(
desc="This is a number in (0.0,1.0] interval that shows the percentage of the input fixed image voxels that are sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is to use approximately 5% of voxels (for backwards compatibility 5% ~= 500000/(256*256*256)). Typical values range from 1% for low detail images to 20% for high detail images.", argstr="--samplingPercentage %f")
relaxationFactor = traits.Float(desc="Fraction of gradient from Jacobian to attempt to move in each 3D fit step (adjust when eddyCurrentCorrection is enabled; suggested value = 0.25)", argstr="--relaxationFactor %f")
maximumStepSize = traits.Float(desc="Maximum permitted step size to move in each 3D fit step (adjust when eddyCurrentCorrection is enabled; suggested value = 0.1)", argstr="--maximumStepSize %f")
minimumStepSize = traits.Float(desc="Minimum required step size to move in each 3D fit step without converging -- decrease this to make the fit more exacting", argstr="--minimumStepSize %f")
spatialScale = traits.Float(desc="How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more rotation in the fit", argstr="--spatialScale %f")
registerB0Only = traits.Bool(desc="Register the B0 images only", argstr="--registerB0Only ")
debugLevel = traits.Int(desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", argstr="--debugLevel %d")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractCoregBvaluesOutputSpec(TraitedSpec):
outputVolume = File(desc="Required: name of output NRRD file containing moving images individually resampled and fit to the specified fixed image index.", exists=True)
outputTransform = File(desc="Registration 3D transforms concatenated in a single output file. There are no tools that can use this, but can be used for debugging purposes.", exists=True)
class gtractCoregBvalues(SEMLikeCommandLine):
"""title: Coregister B-Values
category: Diffusion.GTRACT
description: This step should be performed after converting DWI scans from DICOM to NRRD format. This program will register all gradients in a NRRD diffusion weighted 4D vector image (moving image) to a specified index in a fixed image. It also supports co-registration with a T2 weighted image or field map in the same plane as the DWI data. The fixed image for the registration should be a b0 image. A mutual information metric cost function is used for the registration because of the differences in signal intensity as a result of the diffusion gradients. The full affine allows the registration procedure to correct for eddy current distortions that may exist in the data. If the eddyCurrentCorrection is enabled, relaxationFactor (0.25) and maximumStepSize (0.1) should be adjusted.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractCoregBvaluesInputSpec
output_spec = gtractCoregBvaluesOutputSpec
_cmd = " gtractCoregBvalues "
_outputs_filenames = {'outputVolume': 'outputVolume.nrrd', 'outputTransform': 'outputTransform.h5'}
_redirect_x = False
class gtractResampleAnisotropyInputSpec(CommandLineInputSpec):
inputAnisotropyVolume = File(desc="Required: input file containing the anisotropy image", exists=True, argstr="--inputAnisotropyVolume %s")
inputAnatomicalVolume = File(desc="Required: input file containing the anatomical image whose characteristics will be cloned.", exists=True, argstr="--inputAnatomicalVolume %s")
inputTransform = File(desc="Required: input Rigid OR Bspline transform file name", exists=True, argstr="--inputTransform %s")
transformType = traits.Enum("Rigid", "B-Spline", desc="Transform type: Rigid, B-Spline", argstr="--transformType %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the resampled transformed anisotropy image.", argstr="--outputVolume %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractResampleAnisotropyOutputSpec(TraitedSpec):
outputVolume = File(desc="Required: name of output NRRD file containing the resampled transformed anisotropy image.", exists=True)
class gtractResampleAnisotropy(SEMLikeCommandLine):
"""title: Resample Anisotropy
category: Diffusion.GTRACT
description: This program will resample a floating point image using either the Rigid or B-Spline transform. You may want to save the aligned B0 image after each of the anisotropy map co-registration steps with the anatomical image to check the registration quality with another tool.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractResampleAnisotropyInputSpec
output_spec = gtractResampleAnisotropyOutputSpec
_cmd = " gtractResampleAnisotropy "
_outputs_filenames = {'outputVolume': 'outputVolume.nrrd'}
_redirect_x = False
class gtractResampleCodeImageInputSpec(CommandLineInputSpec):
inputCodeVolume = File(desc="Required: input file containing the code image", exists=True, argstr="--inputCodeVolume %s")
inputReferenceVolume = File(desc="Required: input file containing the standard image to clone the characteristics of.", exists=True, argstr="--inputReferenceVolume %s")
inputTransform = File(desc="Required: input Rigid or Inverse-B-Spline transform file name", exists=True, argstr="--inputTransform %s")
transformType = traits.Enum("Rigid", "Affine", "B-Spline", "Inverse-B-Spline", "None", desc="Transform type: Rigid or Inverse-B-Spline", argstr="--transformType %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the resampled code image in acquisition space.", argstr="--outputVolume %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractResampleCodeImageOutputSpec(TraitedSpec):
outputVolume = File(desc="Required: name of output NRRD file containing the resampled code image in acquisition space.", exists=True)
class gtractResampleCodeImage(SEMLikeCommandLine):
"""title: Resample Code Image
category: Diffusion.GTRACT
description: This program will resample a short integer code image using either the Rigid or Inverse-B-Spline transform. The reference image is the DTI tensor anisotropy image space, and the input code image is in anatomical space.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractResampleCodeImageInputSpec
output_spec = gtractResampleCodeImageOutputSpec
_cmd = " gtractResampleCodeImage "
_outputs_filenames = {'outputVolume': 'outputVolume.nrrd'}
_redirect_x = False
class gtractCopyImageOrientationInputSpec(CommandLineInputSpec):
inputVolume = File(desc="Required: input file containing the signed short image to reorient without resampling.", exists=True, argstr="--inputVolume %s")
inputReferenceVolume = File(desc="Required: input file containing orietation that will be cloned.", exists=True, argstr="--inputReferenceVolume %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD or Nifti file containing the reoriented image in reference image space.", argstr="--outputVolume %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractCopyImageOrientationOutputSpec(TraitedSpec):
outputVolume = File(desc="Required: name of output NRRD or Nifti file containing the reoriented image in reference image space.", exists=True)
class gtractCopyImageOrientation(SEMLikeCommandLine):
"""title: Copy Image Orientation
category: Diffusion.GTRACT
description: This program will copy the orientation from the reference image into the moving image. Currently, the registration process requires that the diffusion weighted images and the anatomical images have the same image orientation (i.e. Axial, Coronal, Sagittal). It is suggested that you copy the image orientation from the diffusion weighted images and apply this to the anatomical image. This image can be subsequently removed after the registration step is complete. We anticipate that this limitation will be removed in future versions of the registration programs.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractCopyImageOrientationInputSpec
output_spec = gtractCopyImageOrientationOutputSpec
_cmd = " gtractCopyImageOrientation "
_outputs_filenames = {'outputVolume': 'outputVolume.nrrd'}
_redirect_x = False
class gtractCreateGuideFiberInputSpec(CommandLineInputSpec):
inputFiber = File(desc="Required: input fiber tract file name", exists=True, argstr="--inputFiber %s")
numberOfPoints = traits.Int(desc="Number of points in output guide fiber", argstr="--numberOfPoints %d")
outputFiber = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: output guide fiber file name", argstr="--outputFiber %s")
writeXMLPolyDataFile = traits.Bool(desc="Flag to make use of XML files when reading and writing vtkPolyData.", argstr="--writeXMLPolyDataFile ")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractCreateGuideFiberOutputSpec(TraitedSpec):
outputFiber = File(desc="Required: output guide fiber file name", exists=True)
class gtractCreateGuideFiber(SEMLikeCommandLine):
"""title: Create Guide Fiber
category: Diffusion.GTRACT
description: This program will create a guide fiber by averaging fibers from a previously generated tract.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractCreateGuideFiberInputSpec
output_spec = gtractCreateGuideFiberOutputSpec
_cmd = " gtractCreateGuideFiber "
_outputs_filenames = {'outputFiber': 'outputFiber.vtk'}
_redirect_x = False
class gtractAnisotropyMapInputSpec(CommandLineInputSpec):
inputTensorVolume = File(desc="Required: input file containing the diffusion tensor image", exists=True, argstr="--inputTensorVolume %s")
anisotropyType = traits.Enum("ADC", "FA", "RA", "VR", "AD", "RD", "LI", desc="Anisotropy Mapping Type: ADC, FA, RA, VR, AD, RD, LI", argstr="--anisotropyType %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the selected kind of anisotropy scalar.", argstr="--outputVolume %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractAnisotropyMapOutputSpec(TraitedSpec):
outputVolume = File(desc="Required: name of output NRRD file containing the selected kind of anisotropy scalar.", exists=True)
class gtractAnisotropyMap(SEMLikeCommandLine):
"""title: Anisotropy Map
category: Diffusion.GTRACT
description: This program will generate a scalar map of anisotropy, given a tensor representation. Anisotropy images are used for fiber tracking, but the anisotropy scalars are not defined along the path. Instead, the tensor representation is included as point data allowing all of these metrics to be computed using only the fiber tract point data. The images can be saved in any ITK supported format, but it is suggested that you use an image format that supports the definition of the image origin. This includes NRRD, NifTI, and Meta formats. These images can also be used for scalar analysis including regional anisotropy measures or VBM style analysis.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractAnisotropyMapInputSpec
output_spec = gtractAnisotropyMapOutputSpec
_cmd = " gtractAnisotropyMap "
_outputs_filenames = {'outputVolume': 'outputVolume.nrrd'}
_redirect_x = False
class gtractClipAnisotropyInputSpec(CommandLineInputSpec):
inputVolume = File(desc="Required: input image file name", exists=True, argstr="--inputVolume %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the clipped anisotropy image", argstr="--outputVolume %s")
clipFirstSlice = traits.Bool(desc="Clip the first slice of the anisotropy image", argstr="--clipFirstSlice ")
clipLastSlice = traits.Bool(desc="Clip the last slice of the anisotropy image", argstr="--clipLastSlice ")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractClipAnisotropyOutputSpec(TraitedSpec):
outputVolume = File(desc="Required: name of output NRRD file containing the clipped anisotropy image", exists=True)
class gtractClipAnisotropy(SEMLikeCommandLine):
"""title: Clip Anisotropy
category: Diffusion.GTRACT
description: This program will zero the first and/or last slice of an anisotropy image, creating a clipped anisotropy image.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractClipAnisotropyInputSpec
output_spec = gtractClipAnisotropyOutputSpec
_cmd = " gtractClipAnisotropy "
_outputs_filenames = {'outputVolume': 'outputVolume.nrrd'}
_redirect_x = False
class gtractResampleB0InputSpec(CommandLineInputSpec):
inputVolume = File(desc="Required: input file containing the 4D image", exists=True, argstr="--inputVolume %s")
inputAnatomicalVolume = File(desc="Required: input file containing the anatomical image defining the origin, spacing and size of the resampled image (template)", exists=True, argstr="--inputAnatomicalVolume %s")
inputTransform = File(desc="Required: input Rigid OR Bspline transform file name", exists=True, argstr="--inputTransform %s")
vectorIndex = traits.Int(desc="Index in the diffusion weighted image set for the B0 image", argstr="--vectorIndex %d")
transformType = traits.Enum("Rigid", "B-Spline", desc="Transform type: Rigid, B-Spline", argstr="--transformType %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the resampled input image.", argstr="--outputVolume %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractResampleB0OutputSpec(TraitedSpec):
outputVolume = File(desc="Required: name of output NRRD file containing the resampled input image.", exists=True)
class gtractResampleB0(SEMLikeCommandLine):
"""title: Resample B0
category: Diffusion.GTRACT
description: This program will resample a signed short image using either a Rigid or B-Spline transform. The user must specify a template image that will be used to define the origin, orientation, spacing, and size of the resampled image.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractResampleB0InputSpec
output_spec = gtractResampleB0OutputSpec
_cmd = " gtractResampleB0 "
_outputs_filenames = {'outputVolume': 'outputVolume.nrrd'}
_redirect_x = False
class gtractInvertRigidTransformInputSpec(CommandLineInputSpec):
inputTransform = File(desc="Required: input rigid transform file name", exists=True, argstr="--inputTransform %s")
outputTransform = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: output transform file name", argstr="--outputTransform %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractInvertRigidTransformOutputSpec(TraitedSpec):
outputTransform = File(desc="Required: output transform file name", exists=True)
class gtractInvertRigidTransform(SEMLikeCommandLine):
"""title: Rigid Transform Inversion
category: Diffusion.GTRACT
description: This program will invert a Rigid transform.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractInvertRigidTransformInputSpec
output_spec = gtractInvertRigidTransformOutputSpec
_cmd = " gtractInvertRigidTransform "
_outputs_filenames = {'outputTransform': 'outputTransform.h5'}
_redirect_x = False
class gtractImageConformityInputSpec(CommandLineInputSpec):
inputVolume = File(desc="Required: input file containing the signed short image to reorient without resampling.", exists=True, argstr="--inputVolume %s")
inputReferenceVolume = File(desc="Required: input file containing the standard image to clone the characteristics of.", exists=True, argstr="--inputReferenceVolume %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: name of output Nrrd or Nifti file containing the reoriented image in reference image space.", argstr="--outputVolume %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractImageConformityOutputSpec(TraitedSpec):
outputVolume = File(desc="Required: name of output Nrrd or Nifti file containing the reoriented image in reference image space.", exists=True)
class gtractImageConformity(SEMLikeCommandLine):
"""title: Image Conformity
category: Diffusion.GTRACT
description: This program will straighten out the Direction and Origin to match the Reference Image.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractImageConformityInputSpec
output_spec = gtractImageConformityOutputSpec
_cmd = " gtractImageConformity "
_outputs_filenames = {'outputVolume': 'outputVolume.nrrd'}
_redirect_x = False
class compareTractInclusionInputSpec(CommandLineInputSpec):
testFiber = File(desc="Required: test fiber tract file name", exists=True, argstr="--testFiber %s")
standardFiber = File(desc="Required: standard fiber tract file name", exists=True, argstr="--standardFiber %s")
closeness = traits.Float(desc="Closeness of every test fiber to some fiber in the standard tract, computed as a sum of squares of spatial differences of standard points", argstr="--closeness %f")
numberOfPoints = traits.Int(desc="Number of points in comparison fiber pairs", argstr="--numberOfPoints %d")
testForBijection = traits.Bool(desc="Flag to apply the closeness criterion both ways", argstr="--testForBijection ")
testForFiberCardinality = traits.Bool(desc="Flag to require the same number of fibers in both tracts", argstr="--testForFiberCardinality ")
writeXMLPolyDataFile = traits.Bool(desc="Flag to make use of XML files when reading and writing vtkPolyData.", argstr="--writeXMLPolyDataFile ")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class compareTractInclusionOutputSpec(TraitedSpec):
pass
class compareTractInclusion(SEMLikeCommandLine):
"""title: Compare Tracts
category: Diffusion.GTRACT
description: This program will halt with a status code indicating whether a test tract is nearly enough included in a standard tract in the sense that every fiber in the test tract has a low enough sum of squares distance to some fiber in the standard tract modulo spline resampling of every fiber to a fixed number of points.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = compareTractInclusionInputSpec
output_spec = compareTractInclusionOutputSpec
_cmd = " compareTractInclusion "
_outputs_filenames = {}
_redirect_x = False
class gtractFastMarchingTrackingInputSpec(CommandLineInputSpec):
inputTensorVolume = File(desc="Required: input tensor image file name", exists=True, argstr="--inputTensorVolume %s")
inputAnisotropyVolume = File(desc="Required: input anisotropy image file name", exists=True, argstr="--inputAnisotropyVolume %s")
inputCostVolume = File(desc="Required: input vcl_cost image file name", exists=True, argstr="--inputCostVolume %s")
inputStartingSeedsLabelMapVolume = File(desc="Required: input starting seeds LabelMap image file name", exists=True, argstr="--inputStartingSeedsLabelMapVolume %s")
startingSeedsLabel = traits.Int(desc="Label value for Starting Seeds", argstr="--startingSeedsLabel %d")
outputTract = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", argstr="--outputTract %s")
writeXMLPolyDataFile = traits.Bool(desc="Flag to make use of the XML format for vtkPolyData fiber tracts.", argstr="--writeXMLPolyDataFile ")
numberOfIterations = traits.Int(desc="Number of iterations used for the optimization", argstr="--numberOfIterations %d")
seedThreshold = traits.Float(desc="Anisotropy threshold used for seed selection", argstr="--seedThreshold %f")
trackingThreshold = traits.Float(desc="Anisotropy threshold used for fiber tracking", argstr="--trackingThreshold %f")
costStepSize = traits.Float(desc="Cost image sub-voxel sampling", argstr="--costStepSize %f")
maximumStepSize = traits.Float(desc="Maximum step size to move when tracking", argstr="--maximumStepSize %f")
minimumStepSize = traits.Float(desc="Minimum step size to move when tracking", argstr="--minimumStepSize %f")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractFastMarchingTrackingOutputSpec(TraitedSpec):
outputTract = File(desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", exists=True)
class gtractFastMarchingTracking(SEMLikeCommandLine):
"""title: Fast Marching Tracking
category: Diffusion.GTRACT
description: This program will use a fast marching fiber tracking algorithm to identify fiber tracts from a tensor image. This program is the second portion of the algorithm. The user must first run gtractCostFastMarching to generate the vcl_cost image. The second step of the algorithm implemented here is a gradient descent soplution from the defined ending region back to the seed points specified in gtractCostFastMarching. This algorithm is roughly based on the work by G. Parker et al. from IEEE Transactions On Medical Imaging, 21(5): 505-512, 2002. An additional feature of including anisotropy into the vcl_cost function calculation is included.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris. The original code here was developed by Daisy Espino.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractFastMarchingTrackingInputSpec
output_spec = gtractFastMarchingTrackingOutputSpec
_cmd = " gtractFastMarchingTracking "
_outputs_filenames = {'outputTract': 'outputTract.vtk'}
_redirect_x = False
class gtractInvertDisplacementFieldInputSpec(CommandLineInputSpec):
baseImage = File(desc="Required: base image used to define the size of the inverse field", exists=True, argstr="--baseImage %s")
deformationImage = File(desc="Required: Displacement field image", exists=True, argstr="--deformationImage %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: Output deformation field", argstr="--outputVolume %s")
subsamplingFactor = traits.Int(desc="Subsampling factor for the deformation field", argstr="--subsamplingFactor %d")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractInvertDisplacementFieldOutputSpec(TraitedSpec):
outputVolume = File(desc="Required: Output deformation field", exists=True)
class gtractInvertDisplacementField(SEMLikeCommandLine):
"""title: Invert Displacement Field
category: Diffusion.GTRACT
description: This program will invert a deformatrion field. The size of the deformation field is defined by an example image provided by the user
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractInvertDisplacementFieldInputSpec
output_spec = gtractInvertDisplacementFieldOutputSpec
_cmd = " gtractInvertDisplacementField "
_outputs_filenames = {'outputVolume': 'outputVolume.nrrd'}
_redirect_x = False
class gtractCoRegAnatomyInputSpec(CommandLineInputSpec):
inputVolume = File(desc="Required: input vector image file name. It is recommended that the input volume is the skull stripped baseline image of the DWI scan.", exists=True, argstr="--inputVolume %s")
inputAnatomicalVolume = File(desc="Required: input anatomical image file name. It is recommended that that the input anatomical image has been skull stripped and has the same orientation as the DWI scan.", exists=True, argstr="--inputAnatomicalVolume %s")
vectorIndex = traits.Int(desc="Vector image index in the moving image (within the DWI) to be used for registration.", argstr="--vectorIndex %d")
inputRigidTransform = File(desc="Required (for B-Spline type co-registration): input rigid transform file name. Used as a starting point for the anatomical B-Spline registration.", exists=True, argstr="--inputRigidTransform %s")
outputTransformName = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: filename for the fit transform.", argstr="--outputTransformName %s")
transformType = traits.Enum("Rigid", "Bspline", desc="Transform Type: Rigid|Bspline", argstr="--transformType %s")
numberOfIterations = traits.Int(desc="Number of iterations in the selected 3D fit", argstr="--numberOfIterations %d")
gridSize = InputMultiPath(traits.Int, desc="Number of grid subdivisions in all 3 directions", sep=",", argstr="--gridSize %s")
borderSize = traits.Int(desc="Size of border", argstr="--borderSize %d")
numberOfHistogramBins = traits.Int(desc="Number of histogram bins", argstr="--numberOfHistogramBins %d")
spatialScale = traits.Int(desc="Scales the number of voxels in the image by this value to specify the number of voxels used in the registration", argstr="--spatialScale %d")
convergence = traits.Float(desc="Convergence Factor", argstr="--convergence %f")
gradientTolerance = traits.Float(desc="Gradient Tolerance", argstr="--gradientTolerance %f")
maxBSplineDisplacement = traits.Float(
desc=" Sets the maximum allowed displacements in image physical coordinates for BSpline control grid along each axis. A value of 0.0 indicates that the problem should be unbounded. NOTE: This only constrains the BSpline portion, and does not limit the displacement from the associated bulk transform. This can lead to a substantial reduction in computation time in the BSpline optimizer., ", argstr="--maxBSplineDisplacement %f")
maximumStepSize = traits.Float(desc="Maximum permitted step size to move in the selected 3D fit", argstr="--maximumStepSize %f")
minimumStepSize = traits.Float(desc="Minimum required step size to move in the selected 3D fit without converging -- decrease this to make the fit more exacting", argstr="--minimumStepSize %f")
translationScale = traits.Float(desc="How much to scale up changes in position compared to unit rotational changes in radians -- decrease this to put more translation in the fit", argstr="--translationScale %f")
relaxationFactor = traits.Float(desc="Fraction of gradient from Jacobian to attempt to move in the selected 3D fit", argstr="--relaxationFactor %f")
numberOfSamples = traits.Int(
desc="The number of voxels sampled for mutual information computation. Increase this for a slower, more careful fit. NOTE that it is suggested to use samplingPercentage instead of this option. However, if set, it overwrites the samplingPercentage option. ", argstr="--numberOfSamples %d")
samplingPercentage = traits.Float(
desc="This is a number in (0.0,1.0] interval that shows the percentage of the input fixed image voxels that are sampled for mutual information computation. Increase this for a slower, more careful fit. You can also limit the sampling focus with ROI masks and ROIAUTO mask generation. The default is to use approximately 5% of voxels (for backwards compatibility 5% ~= 500000/(256*256*256)). Typical values range from 1% for low detail images to 20% for high detail images.", argstr="--samplingPercentage %f")
useMomentsAlign = traits.Bool(
desc="MomentsAlign assumes that the center of mass of the images represent similar structures. Perform a MomentsAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either CenterOfHeadLAlign, GeometryAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", argstr="--useMomentsAlign ")
useGeometryAlign = traits.Bool(
desc="GeometryAlign on assumes that the center of the voxel lattice of the images represent similar structures. Perform a GeometryCenterAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either MomentsAlign, CenterOfHeadAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", argstr="--useGeometryAlign ")
useCenterOfHeadAlign = traits.Bool(
desc="CenterOfHeadAlign attempts to find a hemisphere full of foreground voxels from the superior direction as an estimate of where the center of a head shape would be to drive a center of mass estimate. Perform a CenterOfHeadAlign registration as part of the sequential registration steps. This option MUST come first, and CAN NOT be used with either MomentsAlign, GeometryAlign, or initialTransform file. This family of options superceeds the use of transformType if any of them are set.", argstr="--useCenterOfHeadAlign ")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractCoRegAnatomyOutputSpec(TraitedSpec):
outputTransformName = File(desc="Required: filename for the fit transform.", exists=True)
class gtractCoRegAnatomy(SEMLikeCommandLine):
"""title: Coregister B0 to Anatomy B-Spline
category: Diffusion.GTRACT
description: This program will register a Nrrd diffusion weighted 4D vector image to a fixed anatomical image. Two registration methods are supported for alignment with anatomical images: Rigid and B-Spline. The rigid registration performs a rigid body registration with the anatomical images and should be done as well to initialize the B-Spline transform. The B-SPline transform is the deformable transform, where the user can control the amount of deformation based on the number of control points as well as the maximum distance that these points can move. The B-Spline registration places a low dimensional grid in the image, which is deformed. This allows for some susceptibility related distortions to be removed from the diffusion weighted images. In general the amount of motion in the slice selection and read-out directions direction should be kept low. The distortion is in the phase encoding direction in the images. It is recommended that skull stripped (i.e. image containing only brain with skull removed) images shoud be used for image co-registration with the B-Spline transform.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractCoRegAnatomyInputSpec
output_spec = gtractCoRegAnatomyOutputSpec
_cmd = " gtractCoRegAnatomy "
_outputs_filenames = {'outputTransformName': 'outputTransformName.h5'}
_redirect_x = False
class gtractResampleDWIInPlaceInputSpec(CommandLineInputSpec):
inputVolume = File(desc="Required: input image is a 4D NRRD image.", exists=True, argstr="--inputVolume %s")
referenceVolume = File(desc="If provided, resample to the final space of the referenceVolume 3D data set.", exists=True, argstr="--referenceVolume %s")
outputResampledB0 = traits.Either(traits.Bool, File(), hash_files=False, desc="Convenience function for extracting the first index location (assumed to be the B0)", argstr="--outputResampledB0 %s")
inputTransform = File(desc="Required: transform file derived from rigid registration of b0 image to reference structural image.", exists=True, argstr="--inputTransform %s")
warpDWITransform = File(desc="Optional: transform file to warp gradient volumes.", exists=True, argstr="--warpDWITransform %s")
debugLevel = traits.Int(desc="Display debug messages, and produce debug intermediate results. 0=OFF, 1=Minimal, 10=Maximum debugging.", argstr="--debugLevel %d")
imageOutputSize = InputMultiPath(traits.Int, desc="The voxel lattice for the output image, padding is added if necessary. NOTE: if 0,0,0, then the inputVolume size is used.", sep=",", argstr="--imageOutputSize %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: output image (NRRD file) that has been rigidly transformed into the space of the structural image and padded if image padding was changed from 0,0,0 default.", argstr="--outputVolume %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractResampleDWIInPlaceOutputSpec(TraitedSpec):
outputResampledB0 = File(desc="Convenience function for extracting the first index location (assumed to be the B0)", exists=True)
outputVolume = File(desc="Required: output image (NRRD file) that has been rigidly transformed into the space of the structural image and padded if image padding was changed from 0,0,0 default.", exists=True)
class gtractResampleDWIInPlace(SEMLikeCommandLine):
"""title: Resample DWI In Place
category: Diffusion.GTRACT
description: Resamples DWI image to structural image.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta, Greg Harris, Hans Johnson, and Joy Matsui.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractResampleDWIInPlaceInputSpec
output_spec = gtractResampleDWIInPlaceOutputSpec
_cmd = " gtractResampleDWIInPlace "
_outputs_filenames = {'outputResampledB0': 'outputResampledB0.nii', 'outputVolume': 'outputVolume.nii'}
_redirect_x = False
class gtractCostFastMarchingInputSpec(CommandLineInputSpec):
inputTensorVolume = File(desc="Required: input tensor image file name", exists=True, argstr="--inputTensorVolume %s")
inputAnisotropyVolume = File(desc="Required: input anisotropy image file name", exists=True, argstr="--inputAnisotropyVolume %s")
inputStartingSeedsLabelMapVolume = File(desc="Required: input starting seeds LabelMap image file name", exists=True, argstr="--inputStartingSeedsLabelMapVolume %s")
startingSeedsLabel = traits.Int(desc="Label value for Starting Seeds", argstr="--startingSeedsLabel %d")
outputCostVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Output vcl_cost image", argstr="--outputCostVolume %s")
outputSpeedVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Output speed image", argstr="--outputSpeedVolume %s")
anisotropyWeight = traits.Float(desc="Anisotropy weight used for vcl_cost function calculations", argstr="--anisotropyWeight %f")
stoppingValue = traits.Float(desc="Terminiating value for vcl_cost function estimation", argstr="--stoppingValue %f")
seedThreshold = traits.Float(desc="Anisotropy threshold used for seed selection", argstr="--seedThreshold %f")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractCostFastMarchingOutputSpec(TraitedSpec):
outputCostVolume = File(desc="Output vcl_cost image", exists=True)
outputSpeedVolume = File(desc="Output speed image", exists=True)
class gtractCostFastMarching(SEMLikeCommandLine):
"""title: Cost Fast Marching
category: Diffusion.GTRACT
description: This program will use a fast marching fiber tracking algorithm to identify fiber tracts from a tensor image. This program is the first portion of the algorithm. The user must first run gtractFastMarchingTracking to generate the actual fiber tracts. This algorithm is roughly based on the work by G. Parker et al. from IEEE Transactions On Medical Imaging, 21(5): 505-512, 2002. An additional feature of including anisotropy into the vcl_cost function calculation is included.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris. The original code here was developed by Daisy Espino.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractCostFastMarchingInputSpec
output_spec = gtractCostFastMarchingOutputSpec
_cmd = " gtractCostFastMarching "
_outputs_filenames = {'outputCostVolume': 'outputCostVolume.nrrd', 'outputSpeedVolume': 'outputSpeedVolume.nrrd'}
_redirect_x = False
class gtractFiberTrackingInputSpec(CommandLineInputSpec):
inputTensorVolume = File(desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input tensor image file name", exists=True, argstr="--inputTensorVolume %s")
inputAnisotropyVolume = File(desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input anisotropy image file name", exists=True, argstr="--inputAnisotropyVolume %s")
inputStartingSeedsLabelMapVolume = File(desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): input starting seeds LabelMap image file name", exists=True, argstr="--inputStartingSeedsLabelMapVolume %s")
startingSeedsLabel = traits.Int(desc="Label value for Starting Seeds (required if Label number used to create seed point in Slicer was not 1)", argstr="--startingSeedsLabel %d")
inputEndingSeedsLabelMapVolume = File(desc="Required (for Streamline, GraphSearch, and Guided fiber tracking methods): input ending seeds LabelMap image file name", exists=True, argstr="--inputEndingSeedsLabelMapVolume %s")
endingSeedsLabel = traits.Int(desc="Label value for Ending Seeds (required if Label number used to create seed point in Slicer was not 1)", argstr="--endingSeedsLabel %d")
inputTract = File(desc="Required (for Guided fiber tracking method): guide fiber in vtkPolydata file containing one tract line.", exists=True, argstr="--inputTract %s")
outputTract = traits.Either(traits.Bool, File(), hash_files=False, desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): name of output vtkPolydata file containing tract lines and the point data collected along them.", argstr="--outputTract %s")
writeXMLPolyDataFile = traits.Bool(desc="Flag to make use of the XML format for vtkPolyData fiber tracts.", argstr="--writeXMLPolyDataFile ")
trackingMethod = traits.Enum("Guided", "Free", "Streamline", "GraphSearch", desc="Fiber tracking Filter Type: Guided|Free|Streamline|GraphSearch", argstr="--trackingMethod %s")
guidedCurvatureThreshold = traits.Float(desc="Guided Curvature Threshold (Degrees)", argstr="--guidedCurvatureThreshold %f")
maximumGuideDistance = traits.Float(desc="Maximum distance for using the guide fiber direction", argstr="--maximumGuideDistance %f")
seedThreshold = traits.Float(desc="Anisotropy threshold for seed selection (recommended for Free fiber tracking)", argstr="--seedThreshold %f")
trackingThreshold = traits.Float(desc="Anisotropy threshold for fiber tracking (anisotropy values of the next point along the path)", argstr="--trackingThreshold %f")
curvatureThreshold = traits.Float(desc="Curvature threshold in degrees (recommended for Free fiber tracking)", argstr="--curvatureThreshold %f")
branchingThreshold = traits.Float(desc="Anisotropy Branching threshold (recommended for GraphSearch fiber tracking method)", argstr="--branchingThreshold %f")
maximumBranchPoints = traits.Int(desc="Maximum branch points (recommended for GraphSearch fiber tracking method)", argstr="--maximumBranchPoints %d")
useRandomWalk = traits.Bool(desc="Flag to use random walk.", argstr="--useRandomWalk ")
randomSeed = traits.Int(desc="Random number generator seed", argstr="--randomSeed %d")
branchingAngle = traits.Float(desc="Branching angle in degrees (recommended for GraphSearch fiber tracking method)", argstr="--branchingAngle %f")
minimumLength = traits.Float(desc="Minimum fiber length. Helpful for filtering invalid tracts.", argstr="--minimumLength %f")
maximumLength = traits.Float(desc="Maximum fiber length (voxels)", argstr="--maximumLength %f")
stepSize = traits.Float(desc="Fiber tracking step size", argstr="--stepSize %f")
useLoopDetection = traits.Bool(desc="Flag to make use of loop detection.", argstr="--useLoopDetection ")
useTend = traits.Bool(desc="Flag to make use of Tend F and Tend G parameters.", argstr="--useTend ")
tendF = traits.Float(desc="Tend F parameter", argstr="--tendF %f")
tendG = traits.Float(desc="Tend G parameter", argstr="--tendG %f")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractFiberTrackingOutputSpec(TraitedSpec):
outputTract = File(desc="Required (for Free, Streamline, GraphSearch, and Guided fiber tracking methods): name of output vtkPolydata file containing tract lines and the point data collected along them.", exists=True)
class gtractFiberTracking(SEMLikeCommandLine):
"""title: Fiber Tracking
category: Diffusion.GTRACT
description: This program implements four fiber tracking methods (Free, Streamline, GraphSearch, Guided). The output of the fiber tracking is vtkPolyData (i.e. Polylines) that can be loaded into Slicer3 for visualization. The poly data can be saved in either old VTK format files (.vtk) or in the new VTK XML format (.xml). The polylines contain point data that defines ther Tensor at each point along the fiber tract. This can then be used to rendered as glyphs in Slicer3 and can be used to define severeal scalar measures without referencing back to the anisotropy images. (1) Free tracking is a basic streamlines algorithm. This is a direct implementation of the method original proposed by Basser et al. The tracking follows the primarty eigenvector. The tracking begins with seed points in the starting region. Only those voxels above the specified anisotropy threshold in the starting region are used as seed points. Tracking terminates either as a result of maximum fiber length, low ansiotropy, or large curvature. This is a great way to explore your data. (2) The streamlines algorithm is a direct implementation of the method originally proposed by Basser et al. The tracking follows the primary eigenvector. The tracking begins with seed points in the starting region. Only those voxels above the specified anisotropy threshold in the starting region are used as seed points. Tracking terminates either by reaching the ending region or reaching some stopping criteria. Stopping criteria are specified using the following parameters: tracking threshold, curvature threshold, and max length. Only paths terminating in the ending region are kept in this method. The TEND algorithm proposed by Lazar et al. (Human Brain Mapping 18:306-321, 2003) has been instrumented. This can be enabled using the --useTend option while performing Streamlines tracking. This utilizes the entire diffusion tensor to deflect the incoming vector instead of simply following the primary eigenvector. The TEND parameters are set using the --tendF and --tendG options. (3) Graph Search tracking is the first step in the full GTRACT algorithm developed by Cheng et al. (NeuroImage 31(3): 1075-1085, 2006) for finding the tracks in a tensor image. This method was developed to generate fibers in a Tensor representation where crossing fibers occur. The graph search algorithm follows the primary eigenvector in non-ambigous regions and utilizes branching and a graph search algorithm in ambigous regions. Ambiguous tracking regions are defined based on two criteria: Branching Al Threshold (anisotropy values below this value and above the traching threshold) and Curvature Major Eigen (angles of the primary eigenvector direction and the current tracking direction). In regions that meet this criteria, two or three tracking paths are considered. The first is the standard primary eigenvector direction. The second is the seconadary eigenvector direction. This is based on the assumption that these regions may be prolate regions. If the Random Walk option is selected then a third direction is also considered. This direction is defined by a cone pointing from the current position to the centroid of the ending region. The interior angle of the cone is specified by the user with the Branch/Guide Angle parameter. A vector contained inside of the cone is selected at random and used as the third direction. This method can also utilize the TEND option where the primary tracking direction is that specified by the TEND method instead of the primary eigenvector. The parameter '--maximumBranchPoints' allows the tracking to have this number of branches being considered at a time. If this number of branch points is exceeded at any time, then the algorithm will revert back to a streamline alogrithm until the number of branches is reduced. This allows the user to constrain the computational complexity of the algorithm. (4) The second phase of the GTRACT algorithm is Guided Tracking. This method incorporates anatomical information about the track orientation using an initial guess of the fiber track. In the originally proposed GTRACT method, this would be created from the fibers resulting from the Graph Search tracking. However, in practice this can be created using any method and could be defined manually. To create the guide fiber the program gtractCreateGuideFiber can be used. This program will load a fiber tract that has been generated and create a centerline representation of the fiber tract (i.e. a single fiber). In this method, the fiber tracking follows the primary eigenvector direction unless it deviates from the guide fiber track by a angle greater than that specified by the '--guidedCurvatureThreshold' parameter. The user must specify the guide fiber when running this program.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta, Greg Harris and Yongqiang Zhao.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractFiberTrackingInputSpec
output_spec = gtractFiberTrackingOutputSpec
_cmd = " gtractFiberTracking "
_outputs_filenames = {'outputTract': 'outputTract.vtk'}
_redirect_x = False
class extractNrrdVectorIndexInputSpec(CommandLineInputSpec):
inputVolume = File(desc="Required: input file containing the vector that will be extracted", exists=True, argstr="--inputVolume %s")
vectorIndex = traits.Int(desc="Index in the vector image to extract", argstr="--vectorIndex %d")
setImageOrientation = traits.Enum("AsAcquired", "Axial", "Coronal", "Sagittal", desc="Sets the image orientation of the extracted vector (Axial, Coronal, Sagittal)", argstr="--setImageOrientation %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the vector image at the given index", argstr="--outputVolume %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class extractNrrdVectorIndexOutputSpec(TraitedSpec):
outputVolume = File(desc="Required: name of output NRRD file containing the vector image at the given index", exists=True)
class extractNrrdVectorIndex(SEMLikeCommandLine):
"""title: Extract Nrrd Index
category: Diffusion.GTRACT
description: This program will extract a 3D image (single vector) from a vector 3D image at a given vector index.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = extractNrrdVectorIndexInputSpec
output_spec = extractNrrdVectorIndexOutputSpec
_cmd = " extractNrrdVectorIndex "
_outputs_filenames = {'outputVolume': 'outputVolume.nii'}
_redirect_x = False
class gtractResampleFibersInputSpec(CommandLineInputSpec):
inputForwardDeformationFieldVolume = File(desc="Required: input forward deformation field image file name", exists=True, argstr="--inputForwardDeformationFieldVolume %s")
inputReverseDeformationFieldVolume = File(desc="Required: input reverse deformation field image file name", exists=True, argstr="--inputReverseDeformationFieldVolume %s")
inputTract = File(desc="Required: name of input vtkPolydata file containing tract lines.", exists=True, argstr="--inputTract %s")
outputTract = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", argstr="--outputTract %s")
writeXMLPolyDataFile = traits.Bool(desc="Flag to make use of the XML format for vtkPolyData fiber tracts.", argstr="--writeXMLPolyDataFile ")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractResampleFibersOutputSpec(TraitedSpec):
outputTract = File(desc="Required: name of output vtkPolydata file containing tract lines and the point data collected along them.", exists=True)
class gtractResampleFibers(SEMLikeCommandLine):
"""title: Resample Fibers
category: Diffusion.GTRACT
description: This program will resample a fiber tract with respect to a pair of deformation fields that represent the forward and reverse deformation fields.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractResampleFibersInputSpec
output_spec = gtractResampleFibersOutputSpec
_cmd = " gtractResampleFibers "
_outputs_filenames = {'outputTract': 'outputTract.vtk'}
_redirect_x = False
class gtractTensorInputSpec(CommandLineInputSpec):
inputVolume = File(desc="Required: input image 4D NRRD image. Must contain data based on at least 6 distinct diffusion directions. The inputVolume is allowed to have multiple b0 and gradient direction images. Averaging of the b0 image is done internally in this step. Prior averaging of the DWIs is not required.",
exists=True, argstr="--inputVolume %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Required: name of output NRRD file containing the Tensor vector image", argstr="--outputVolume %s")
medianFilterSize = InputMultiPath(traits.Int, desc="Median filter radius in all 3 directions", sep=",", argstr="--medianFilterSize %s")
maskProcessingMode = traits.Enum(
"NOMASK", "ROIAUTO", "ROI", desc="ROIAUTO: mask is implicitly defined using a otsu forground and hole filling algorithm. ROI: Uses the masks to define what parts of the image should be used for computing the transform. NOMASK: no mask used", argstr="--maskProcessingMode %s")
maskVolume = File(desc="Mask Image, if maskProcessingMode is ROI", exists=True, argstr="--maskVolume %s")
backgroundSuppressingThreshold = traits.Int(
desc="Image threshold to suppress background. This sets a threshold used on the b0 image to remove background voxels from processing. Typically, values of 100 and 500 work well for Siemens and GE DTI data, respectively. Check your data particularly in the globus pallidus to make sure the brain tissue is not being eliminated with this threshold.", argstr="--backgroundSuppressingThreshold %d")
resampleIsotropic = traits.Bool(desc="Flag to resample to isotropic voxels. Enabling this feature is recommended if fiber tracking will be performed.", argstr="--resampleIsotropic ")
size = traits.Float(desc="Isotropic voxel size to resample to", argstr="--size %f")
b0Index = traits.Int(desc="Index in input vector index to extract", argstr="--b0Index %d")
applyMeasurementFrame = traits.Bool(desc="Flag to apply the measurement frame to the gradient directions", argstr="--applyMeasurementFrame ")
ignoreIndex = InputMultiPath(traits.Int, desc="Ignore diffusion gradient index. Used to remove specific gradient directions with artifacts.", sep=",", argstr="--ignoreIndex %s")
numberOfThreads = traits.Int(desc="Explicitly specify the maximum number of threads to use.", argstr="--numberOfThreads %d")
class gtractTensorOutputSpec(TraitedSpec):
outputVolume = File(desc="Required: name of output NRRD file containing the Tensor vector image", exists=True)
class gtractTensor(SEMLikeCommandLine):
"""title: Tensor Estimation
category: Diffusion.GTRACT
description: This step will convert a b-value averaged diffusion tensor image to a 3x3 tensor voxel image. This step takes the diffusion tensor image data and generates a tensor representation of the data based on the signal intensity decay, b values applied, and the diffusion difrections. The apparent diffusion coefficient for a given orientation is computed on a pixel-by-pixel basis by fitting the image data (voxel intensities) to the Stejskal-Tanner equation. If at least 6 diffusion directions are used, then the diffusion tensor can be computed. This program uses itk::DiffusionTensor3DReconstructionImageFilter. The user can adjust background threshold, median filter, and isotropic resampling.
version: 4.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:GTRACT
license: http://mri.radiology.uiowa.edu/copyright/GTRACT-Copyright.txt
contributor: This tool was developed by Vincent Magnotta and Greg Harris.
acknowledgements: Funding for this version of the GTRACT program was provided by NIH/NINDS R01NS050568-01A2S1
"""
input_spec = gtractTensorInputSpec
output_spec = gtractTensorOutputSpec
_cmd = " gtractTensor "
_outputs_filenames = {'outputVolume': 'outputVolume.nrrd'}
_redirect_x = False
|
bsd-3-clause
|
kanaka/spacewar
|
python/txt.py
|
1
|
4781
|
"""text and font classes, helps everyone to text"""
import pygame, pygame.font, gfx
#old versions of SysFont were buggy
if pygame.ver <= '1.6.1':
from mysysfont import SysFont
else:
SysFont = pygame.font.SysFont
FontPool = {}
def initialize():
pygame.font.init()
return 1
class Font:
def __init__(self, name, size, bold=0, italic=0):
val = name, size
if FontPool.has_key(val):
font = FontPool[val]
else:
font = SysFont(name, size, bold, italic)
FontPool[val] = font
self.font = font
if size >= 20:
self.text = self.textshadowed
def render(self, *args):
return self.font.render(*args)
def set_underline(self, *args):
return self.font.set_underline(*args)
def set_italic(self, *args):
return self.font.set_italic(*args)
def set_bold(self, *args):
return self.font.set_bold(*args)
def _positionrect(self, img, center, pos):
r = img.get_rect()
if center:
setattr(r, pos, center)
return r
def _render(self, text, color, bgd=(0,0,0)):
return img
def get_height(self):
return self.font.get_height()
def get_linesize(self):
return self.font.get_linesize()
def text(self, color, text, center=None, pos='center', bgd=(0,0,0)):
if text is None: text = ' '
try:
if gfx.surface.get_bytesize()>1:
img = self.font.render(text, 1, color, bgd)
img.set_colorkey(bgd, pygame.RLEACCEL)
else:
img = self.font.render(text, 0, color)
except (pygame.error, TypeError):
img = pygame.Surface((10, 10))
img = img.convert()
r = self._positionrect(img, center, pos)
return [img, r]
def textlined(self, color, text, center=None, pos='center'):
darkcolor = [int(c/2) for c in color]
if text is None: text = ' '
try:
if gfx.surface.get_bytesize()>1:
img1 = self.font.render(text, 1, color)
img2 = self.font.render(text, 1, darkcolor)
else:
img1 = img2 = self.font.render(text, 0, color)
img2 = self.font.render(text, 0, darkcolor)
except (pygame.error, TypeError):
img1 = img2 = pygame.Surface((10, 10))
newsize = img1.get_width()+4, img1.get_height()+4
img = pygame.Surface(newsize)
img.blit(img2, (0, 0))
img.blit(img2, (0, 4))
img.blit(img2, (4, 0))
img.blit(img2, (4, 4))
img.blit(img1, (2, 2))
img = img.convert()
img.set_colorkey((0,0,0), pygame.RLEACCEL)
r = self._positionrect(img, center, pos)
return [img, r]
def textshadowed(self, color, text, center=None, pos='center'):
darkcolor = [int(c/2) for c in color]
if text is None: text = ' '
try:
if gfx.surface.get_bytesize()>1:
img1 = self.font.render(text, 1, color)
img2 = self.font.render(text, 1, darkcolor)
else:
img1 = img2 = self.font.render(text, 0, color)
img2 = self.font.render(text, 0, darkcolor)
except (pygame.error, TypeError):
img1 = img2 = pygame.Surface((10, 10))
newsize = img1.get_width()+2, img1.get_height()+2
img = pygame.Surface(newsize)
img.blit(img2, (2, 2))
img.blit(img1, (0, 0))
img = img.convert()
img.set_colorkey((0,0,0), pygame.RLEACCEL)
r = self._positionrect(img, center, pos)
return [img, r]
def textbox(self, color, text, width, bgcolor, topmargin=6):
sidemargin = 6
lines = []
for line in text.splitlines():
cursize = 0
build = ''
for word in line.split():
wordspace = word + ' '
size = self.font.size(wordspace)[0]
if size + cursize >= width:
lines.append(build)
cursize = size
build = wordspace
else:
build += wordspace
cursize += size
lines.append(build)
lineheight = self.font.get_linesize()
height = len(lines) * lineheight + topmargin + sidemargin
width += sidemargin * 2
surf = pygame.Surface((width, height))
surf.fill(bgcolor)
pos = topmargin
for line in lines:
if line:
img = self.font.render(line, 1, color, bgcolor)
img.set_colorkey(bgcolor)
surf.blit(img, (sidemargin, pos))
pos += lineheight
return surf
|
lgpl-2.1
|
florian-dacosta/OpenUpgrade
|
addons/website_customer/__openerp__.py
|
52
|
1511
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP S.A. (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Customer References',
'category': 'Website',
'summary': 'Publish Your Customer References',
'version': '1.0',
'description': """
OpenERP Customer References
===========================
""",
'author': 'OpenERP SA',
'depends': [
'crm_partner_assign',
'website_partner',
'website_google_map',
],
'demo': [
'website_customer_demo.xml',
],
'data': [
'views/website_customer.xml',
],
'qweb': [],
'installable': True,
}
|
agpl-3.0
|
alextremblay/snmp-cmds
|
tests/conftest.py
|
1
|
1071
|
"""
We're using snmpsim to simulate various target devices for our tests.
Before we do anything, we should probably make sure it's actually running
"""
import pytest
from subprocess import run, PIPE
SNMP_TEST_SRV_HOST = '127.0.0.1:10000'
SYSDESCR_OID = '.1.3.6.1.2.1.1.1.0'
@pytest.fixture(scope="session", autouse=True)
def execute_before_any_test():
"""
We're using snmpsim to simulate various target devices for our tests.
Before we do anything, we should probably make sure it's actually running
"""
test = run(['snmpget', '-v', '2c', '-c', 'public', '-t', '1', '-r', '0',
SNMP_TEST_SRV_HOST, SYSDESCR_OID], stdout=PIPE, stderr=PIPE)
if test.returncode is not 0:
raise Exception("the SNMP test server is either not running, or not "
"configured properly. Please be sure to run the "
"following command from within the tests directory "
"of this project:\nsnmpsimd.py --agent-udpv4-endpoint"
"=127.0.0.1:10000 --data-dir=. ")
|
mit
|
kwailamchan/programming-languages
|
javascript/backbone/backbone-templates/backbone-fileupload/venvs/lib/python2.7/site-packages/django/contrib/sessions/backends/file.py
|
79
|
5340
|
import errno
import os
import tempfile
from django.conf import settings
from django.contrib.sessions.backends.base import SessionBase, CreateError
from django.core.exceptions import SuspiciousOperation, ImproperlyConfigured
class SessionStore(SessionBase):
"""
Implements a file based session store.
"""
def __init__(self, session_key=None):
self.storage_path = getattr(settings, "SESSION_FILE_PATH", None)
if not self.storage_path:
self.storage_path = tempfile.gettempdir()
# Make sure the storage path is valid.
if not os.path.isdir(self.storage_path):
raise ImproperlyConfigured(
"The session storage path %r doesn't exist. Please set your"
" SESSION_FILE_PATH setting to an existing directory in which"
" Django can store session data." % self.storage_path)
self.file_prefix = settings.SESSION_COOKIE_NAME
super(SessionStore, self).__init__(session_key)
VALID_KEY_CHARS = set("abcdef0123456789")
def _key_to_file(self, session_key=None):
"""
Get the file associated with this session key.
"""
if session_key is None:
session_key = self._get_or_create_session_key()
# Make sure we're not vulnerable to directory traversal. Session keys
# should always be md5s, so they should never contain directory
# components.
if not set(session_key).issubset(self.VALID_KEY_CHARS):
raise SuspiciousOperation(
"Invalid characters in session key")
return os.path.join(self.storage_path, self.file_prefix + session_key)
def load(self):
session_data = {}
try:
session_file = open(self._key_to_file(), "rb")
try:
file_data = session_file.read()
# Don't fail if there is no data in the session file.
# We may have opened the empty placeholder file.
if file_data:
try:
session_data = self.decode(file_data)
except (EOFError, SuspiciousOperation):
self.create()
finally:
session_file.close()
except IOError:
self.create()
return session_data
def create(self):
while True:
self._session_key = self._get_new_session_key()
try:
self.save(must_create=True)
except CreateError:
continue
self.modified = True
self._session_cache = {}
return
def save(self, must_create=False):
# Get the session data now, before we start messing
# with the file it is stored within.
session_data = self._get_session(no_load=must_create)
session_file_name = self._key_to_file()
try:
# Make sure the file exists. If it does not already exist, an
# empty placeholder file is created.
flags = os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0)
if must_create:
flags |= os.O_EXCL
fd = os.open(session_file_name, flags)
os.close(fd)
except OSError, e:
if must_create and e.errno == errno.EEXIST:
raise CreateError
raise
# Write the session file without interfering with other threads
# or processes. By writing to an atomically generated temporary
# file and then using the atomic os.rename() to make the complete
# file visible, we avoid having to lock the session file, while
# still maintaining its integrity.
#
# Note: Locking the session file was explored, but rejected in part
# because in order to be atomic and cross-platform, it required a
# long-lived lock file for each session, doubling the number of
# files in the session storage directory at any given time. This
# rename solution is cleaner and avoids any additional overhead
# when reading the session data, which is the more common case
# unless SESSION_SAVE_EVERY_REQUEST = True.
#
# See ticket #8616.
dir, prefix = os.path.split(session_file_name)
try:
output_file_fd, output_file_name = tempfile.mkstemp(dir=dir,
prefix=prefix + '_out_')
renamed = False
try:
try:
os.write(output_file_fd, self.encode(session_data))
finally:
os.close(output_file_fd)
os.rename(output_file_name, session_file_name)
renamed = True
finally:
if not renamed:
os.unlink(output_file_name)
except (OSError, IOError, EOFError):
pass
def exists(self, session_key):
return os.path.exists(self._key_to_file(session_key))
def delete(self, session_key=None):
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
try:
os.unlink(self._key_to_file(session_key))
except OSError:
pass
def clean(self):
pass
|
mit
|
spitfire88/upm
|
examples/python/grovewater.py
|
7
|
2028
|
#!/usr/bin/python
# Author: Zion Orent <[email protected]>
# Copyright (c) 2015 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import time, sys, signal, atexit
from upm import pyupm_grovewater as upmGrovewater
def main():
# Instantiate a Grove Water sensor on digital pin D2
myWaterSensor = upmGrovewater.GroveWater(2)
## Exit handlers ##
# This function stops python from printing a stacktrace when you hit control-C
def SIGINTHandler(signum, frame):
raise SystemExit
# This function lets you run code on exit, including functions from myWaterSensor
def exitHandler():
print("Exiting")
sys.exit(0)
# Register exit handlers
atexit.register(exitHandler)
signal.signal(signal.SIGINT, SIGINTHandler)
while(1):
if (myWaterSensor.isWet()):
print("Sensor is wet")
else:
print("Sensor is dry")
time.sleep(1)
if __name__ == '__main__':
main()
|
mit
|
Distrotech/antlr4
|
runtime/Python3/src/antlr4/atn/ATNDeserializer.py
|
17
|
22901
|
# [The "BSD license"]
# Copyright (c) 2013 Terence Parr
# Copyright (c) 2013 Sam Harwell
# Copyright (c) 2014 Eric Vergnaud
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#/
from uuid import UUID
from io import StringIO
from antlr4.Token import Token
from antlr4.atn.ATN import ATN
from antlr4.atn.ATNType import ATNType
from antlr4.atn.ATNState import *
from antlr4.atn.Transition import *
from antlr4.atn.LexerAction import *
from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions
# This is the earliest supported serialized UUID.
BASE_SERIALIZED_UUID = UUID("AADB8D7E-AEEF-4415-AD2B-8204D6CF042E")
# This list contains all of the currently supported UUIDs, ordered by when
# the feature first appeared in this branch.
SUPPORTED_UUIDS = [ BASE_SERIALIZED_UUID ]
SERIALIZED_VERSION = 3
# This is the current serialized UUID.
SERIALIZED_UUID = BASE_SERIALIZED_UUID
class ATNDeserializer (object):
def __init__(self, options : ATNDeserializationOptions = None):
if options is None:
options = ATNDeserializationOptions.defaultOptions
self.deserializationOptions = options
# Determines if a particular serialized representation of an ATN supports
# a particular feature, identified by the {@link UUID} used for serializing
# the ATN at the time the feature was first introduced.
#
# @param feature The {@link UUID} marking the first time the feature was
# supported in the serialized ATN.
# @param actualUuid The {@link UUID} of the actual serialized ATN which is
# currently being deserialized.
# @return {@code true} if the {@code actualUuid} value represents a
# serialized ATN at or after the feature identified by {@code feature} was
# introduced; otherwise, {@code false}.
def isFeatureSupported(self, feature : UUID , actualUuid : UUID ):
idx1 = SUPPORTED_UUIDS.index(feature)
if idx1<0:
return False
idx2 = SUPPORTED_UUIDS.index(actualUuid)
return idx2 >= idx1
def deserialize(self, data : str):
self.reset(data)
self.checkVersion()
self.checkUUID()
atn = self.readATN()
self.readStates(atn)
self.readRules(atn)
self.readModes(atn)
sets = self.readSets(atn)
self.readEdges(atn, sets)
self.readDecisions(atn)
self.readLexerActions(atn)
self.markPrecedenceDecisions(atn)
self.verifyATN(atn)
if self.deserializationOptions.generateRuleBypassTransitions \
and atn.grammarType == ATNType.PARSER:
self.generateRuleBypassTransitions(atn)
# re-verify after modification
self.verifyATN(atn)
return atn
def reset(self, data:str):
def adjust(c):
v = ord(c)
return v-2 if v>1 else -1
temp = [ adjust(c) for c in data ]
# don't adjust the first value since that's the version number
temp[0] = ord(data[0])
self.data = temp
self.pos = 0
def checkVersion(self):
version = self.readInt()
if version != SERIALIZED_VERSION:
raise Exception("Could not deserialize ATN with version " + str(version) + " (expected " + str(SERIALIZED_VERSION) + ").")
def checkUUID(self):
uuid = self.readUUID()
if not uuid in SUPPORTED_UUIDS:
raise Exception("Could not deserialize ATN with UUID: " + str(uuid) + \
" (expected " + str(SERIALIZED_UUID) + " or a legacy UUID).", uuid, SERIALIZED_UUID)
self.uuid = uuid
def readATN(self):
idx = self.readInt()
grammarType = ATNType.fromOrdinal(idx)
maxTokenType = self.readInt()
return ATN(grammarType, maxTokenType)
def readStates(self, atn:ATN):
loopBackStateNumbers = []
endStateNumbers = []
nstates = self.readInt()
for i in range(0, nstates):
stype = self.readInt()
# ignore bad type of states
if stype==ATNState.INVALID_TYPE:
atn.addState(None)
continue
ruleIndex = self.readInt()
if ruleIndex == 0xFFFF:
ruleIndex = -1
s = self.stateFactory(stype, ruleIndex)
if stype == ATNState.LOOP_END: # special case
loopBackStateNumber = self.readInt()
loopBackStateNumbers.append((s, loopBackStateNumber))
elif isinstance(s, BlockStartState):
endStateNumber = self.readInt()
endStateNumbers.append((s, endStateNumber))
atn.addState(s)
# delay the assignment of loop back and end states until we know all the state instances have been initialized
for pair in loopBackStateNumbers:
pair[0].loopBackState = atn.states[pair[1]]
for pair in endStateNumbers:
pair[0].endState = atn.states[pair[1]]
numNonGreedyStates = self.readInt()
for i in range(0, numNonGreedyStates):
stateNumber = self.readInt()
atn.states[stateNumber].nonGreedy = True
numPrecedenceStates = self.readInt()
for i in range(0, numPrecedenceStates):
stateNumber = self.readInt()
atn.states[stateNumber].isPrecedenceRule = True
def readRules(self, atn:ATN):
nrules = self.readInt()
if atn.grammarType == ATNType.LEXER:
atn.ruleToTokenType = [0] * nrules
atn.ruleToStartState = [0] * nrules
for i in range(0, nrules):
s = self.readInt()
startState = atn.states[s]
atn.ruleToStartState[i] = startState
if atn.grammarType == ATNType.LEXER:
tokenType = self.readInt()
if tokenType == 0xFFFF:
tokenType = Token.EOF
atn.ruleToTokenType[i] = tokenType
atn.ruleToStopState = [0] * nrules
for state in atn.states:
if not isinstance(state, RuleStopState):
continue
atn.ruleToStopState[state.ruleIndex] = state
atn.ruleToStartState[state.ruleIndex].stopState = state
def readModes(self, atn:ATN):
nmodes = self.readInt()
for i in range(0, nmodes):
s = self.readInt()
atn.modeToStartState.append(atn.states[s])
def readSets(self, atn:ATN):
sets = []
m = self.readInt()
for i in range(0, m):
iset = IntervalSet()
sets.append(iset)
n = self.readInt()
containsEof = self.readInt()
if containsEof!=0:
iset.addOne(-1)
for j in range(0, n):
i1 = self.readInt()
i2 = self.readInt()
iset.addRange(range(i1, i2 + 1)) # range upper limit is exclusive
return sets
def readEdges(self, atn:ATN, sets:list):
nedges = self.readInt()
for i in range(0, nedges):
src = self.readInt()
trg = self.readInt()
ttype = self.readInt()
arg1 = self.readInt()
arg2 = self.readInt()
arg3 = self.readInt()
trans = self.edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
srcState = atn.states[src]
srcState.addTransition(trans)
# edges for rule stop states can be derived, so they aren't serialized
for state in atn.states:
for i in range(0, len(state.transitions)):
t = state.transitions[i]
if not isinstance(t, RuleTransition):
continue
outermostPrecedenceReturn = -1
if atn.ruleToStartState[t.target.ruleIndex].isPrecedenceRule:
if t.precedence == 0:
outermostPrecedenceReturn = t.target.ruleIndex
trans = EpsilonTransition(t.followState, outermostPrecedenceReturn)
atn.ruleToStopState[t.target.ruleIndex].addTransition(trans)
for state in atn.states:
if isinstance(state, BlockStartState):
# we need to know the end state to set its start state
if state.endState is None:
raise Exception("IllegalState")
# block end states can only be associated to a single block start state
if state.endState.startState is not None:
raise Exception("IllegalState")
state.endState.startState = state
if isinstance(state, PlusLoopbackState):
for i in range(0, len(state.transitions)):
target = state.transitions[i].target
if isinstance(target, PlusBlockStartState):
target.loopBackState = state
elif isinstance(state, StarLoopbackState):
for i in range(0, len(state.transitions)):
target = state.transitions[i].target
if isinstance(target, StarLoopEntryState):
target.loopBackState = state
def readDecisions(self, atn:ATN):
ndecisions = self.readInt()
for i in range(0, ndecisions):
s = self.readInt()
decState = atn.states[s]
atn.decisionToState.append(decState)
decState.decision = i
def readLexerActions(self, atn:ATN):
if atn.grammarType == ATNType.LEXER:
count = self.readInt()
atn.lexerActions = [ None ] * count
for i in range(0, count):
actionType = self.readInt()
data1 = self.readInt()
if data1 == 0xFFFF:
data1 = -1
data2 = self.readInt()
if data2 == 0xFFFF:
data2 = -1
lexerAction = self.lexerActionFactory(actionType, data1, data2)
atn.lexerActions[i] = lexerAction
def generateRuleBypassTransitions(self, atn:ATN):
count = len(atn.ruleToStartState)
atn.ruleToTokenType = [ 0 ] * count
for i in range(0, count):
atn.ruleToTokenType[i] = atn.maxTokenType + i + 1
for i in range(0, count):
self.generateRuleBypassTransition(atn, i)
def generateRuleBypassTransition(self, atn:ATN, idx:int):
bypassStart = BasicBlockStartState()
bypassStart.ruleIndex = idx
atn.addState(bypassStart)
bypassStop = BlockEndState()
bypassStop.ruleIndex = idx
atn.addState(bypassStop)
bypassStart.endState = bypassStop
atn.defineDecisionState(bypassStart)
bypassStop.startState = bypassStart
excludeTransition = None
if atn.ruleToStartState[idx].isPrecedenceRule:
# wrap from the beginning of the rule to the StarLoopEntryState
endState = None
for state in atn.states:
if self.stateIsEndStateFor(state, idx):
endState = state
excludeTransition = state.loopBackState.transitions[0]
break
if excludeTransition is None:
raise Exception("Couldn't identify final state of the precedence rule prefix section.")
else:
endState = atn.ruleToStopState[idx]
# all non-excluded transitions that currently target end state need to target blockEnd instead
for state in atn.states:
for transition in state.transitions:
if transition == excludeTransition:
continue
if transition.target == endState:
transition.target = bypassStop
# all transitions leaving the rule start state need to leave blockStart instead
ruleToStartState = atn.ruleToStartState[idx]
count = len(ruleToStartState.transitions)
while count > 0:
bypassStart.addTransition(ruleToStartState.transitions[count-1])
del ruleToStartState.transitions[-1]
# link the new states
atn.ruleToStartState[idx].addTransition(EpsilonTransition(bypassStart))
bypassStop.addTransition(EpsilonTransition(endState))
matchState = BasicState()
atn.addState(matchState)
matchState.addTransition(AtomTransition(bypassStop, atn.ruleToTokenType[idx]))
bypassStart.addTransition(EpsilonTransition(matchState))
def stateIsEndStateFor(self, state:ATNState, idx:int):
if state.ruleIndex != idx:
return None
if not isinstance(state, StarLoopEntryState):
return None
maybeLoopEndState = state.transitions[len(state.transitions) - 1].target
if not isinstance(maybeLoopEndState, LoopEndState):
return None
if maybeLoopEndState.epsilonOnlyTransitions and \
isinstance(maybeLoopEndState.transitions[0].target, RuleStopState):
return state
else:
return None
#
# Analyze the {@link StarLoopEntryState} states in the specified ATN to set
# the {@link StarLoopEntryState#precedenceRuleDecision} field to the
# correct value.
#
# @param atn The ATN.
#
def markPrecedenceDecisions(self, atn:ATN):
for state in atn.states:
if not isinstance(state, StarLoopEntryState):
continue
# We analyze the ATN to determine if this ATN decision state is the
# decision for the closure block that determines whether a
# precedence rule should continue or complete.
#
if atn.ruleToStartState[state.ruleIndex].isPrecedenceRule:
maybeLoopEndState = state.transitions[len(state.transitions) - 1].target
if isinstance(maybeLoopEndState, LoopEndState):
if maybeLoopEndState.epsilonOnlyTransitions and \
isinstance(maybeLoopEndState.transitions[0].target, RuleStopState):
state.precedenceRuleDecision = True
def verifyATN(self, atn:ATN):
if not self.deserializationOptions.verifyATN:
return
# verify assumptions
for state in atn.states:
if state is None:
continue
self.checkCondition(state.epsilonOnlyTransitions or len(state.transitions) <= 1)
if isinstance(state, PlusBlockStartState):
self.checkCondition(state.loopBackState is not None)
if isinstance(state, StarLoopEntryState):
self.checkCondition(state.loopBackState is not None)
self.checkCondition(len(state.transitions) == 2)
if isinstance(state.transitions[0].target, StarBlockStartState):
self.checkCondition(isinstance(state.transitions[1].target, LoopEndState))
self.checkCondition(not state.nonGreedy)
elif isinstance(state.transitions[0].target, LoopEndState):
self.checkCondition(isinstance(state.transitions[1].target, StarBlockStartState))
self.checkCondition(state.nonGreedy)
else:
raise Exception("IllegalState")
if isinstance(state, StarLoopbackState):
self.checkCondition(len(state.transitions) == 1)
self.checkCondition(isinstance(state.transitions[0].target, StarLoopEntryState))
if isinstance(state, LoopEndState):
self.checkCondition(state.loopBackState is not None)
if isinstance(state, RuleStartState):
self.checkCondition(state.stopState is not None)
if isinstance(state, BlockStartState):
self.checkCondition(state.endState is not None)
if isinstance(state, BlockEndState):
self.checkCondition(state.startState is not None)
if isinstance(state, DecisionState):
self.checkCondition(len(state.transitions) <= 1 or state.decision >= 0)
else:
self.checkCondition(len(state.transitions) <= 1 or isinstance(state, RuleStopState))
def checkCondition(self, condition:bool, message=None):
if not condition:
if message is None:
message = "IllegalState"
raise Exception(message)
def readInt(self):
i = self.data[self.pos]
self.pos += 1
return i
def readInt32(self):
low = self.readInt()
high = self.readInt()
return low | (high << 16)
def readLong(self):
low = self.readInt32()
high = self.readInt32()
return (low & 0x00000000FFFFFFFF) | (high << 32)
def readUUID(self):
low = self.readLong()
high = self.readLong()
allBits = (low & 0xFFFFFFFFFFFFFFFF) | (high << 64)
return UUID(int=allBits)
edgeFactories = [ lambda args : None,
lambda atn, src, trg, arg1, arg2, arg3, sets, target : EpsilonTransition(target),
lambda atn, src, trg, arg1, arg2, arg3, sets, target : \
RangeTransition(target, Token.EOF, arg2) if arg3 != 0 else RangeTransition(target, arg1, arg2),
lambda atn, src, trg, arg1, arg2, arg3, sets, target : \
RuleTransition(atn.states[arg1], arg2, arg3, target),
lambda atn, src, trg, arg1, arg2, arg3, sets, target : \
PredicateTransition(target, arg1, arg2, arg3 != 0),
lambda atn, src, trg, arg1, arg2, arg3, sets, target : \
AtomTransition(target, Token.EOF) if arg3 != 0 else AtomTransition(target, arg1),
lambda atn, src, trg, arg1, arg2, arg3, sets, target : \
ActionTransition(target, arg1, arg2, arg3 != 0),
lambda atn, src, trg, arg1, arg2, arg3, sets, target : \
SetTransition(target, sets[arg1]),
lambda atn, src, trg, arg1, arg2, arg3, sets, target : \
NotSetTransition(target, sets[arg1]),
lambda atn, src, trg, arg1, arg2, arg3, sets, target : \
WildcardTransition(target),
lambda atn, src, trg, arg1, arg2, arg3, sets, target : \
PrecedencePredicateTransition(target, arg1)
]
def edgeFactory(self, atn:ATN, type:int, src:int, trg:int, arg1:int, arg2:int, arg3:int, sets:list):
target = atn.states[trg]
if type > len(self.edgeFactories) or self.edgeFactories[type] is None:
raise Exception("The specified transition type: " + str(type) + " is not valid.")
else:
return self.edgeFactories[type](atn, src, trg, arg1, arg2, arg3, sets, target)
stateFactories = [ lambda : None,
lambda : BasicState(),
lambda : RuleStartState(),
lambda : BasicBlockStartState(),
lambda : PlusBlockStartState(),
lambda : StarBlockStartState(),
lambda : TokensStartState(),
lambda : RuleStopState(),
lambda : BlockEndState(),
lambda : StarLoopbackState(),
lambda : StarLoopEntryState(),
lambda : PlusLoopbackState(),
lambda : LoopEndState()
]
def stateFactory(self, type:int, ruleIndex:int):
if type> len(self.stateFactories) or self.stateFactories[type] is None:
raise Exception("The specified state type " + str(type) + " is not valid.")
else:
s = self.stateFactories[type]()
if s is not None:
s.ruleIndex = ruleIndex
return s
CHANNEL = 0 #The type of a {@link LexerChannelAction} action.
CUSTOM = 1 #The type of a {@link LexerCustomAction} action.
MODE = 2 #The type of a {@link LexerModeAction} action.
MORE = 3 #The type of a {@link LexerMoreAction} action.
POP_MODE = 4 #The type of a {@link LexerPopModeAction} action.
PUSH_MODE = 5 #The type of a {@link LexerPushModeAction} action.
SKIP = 6 #The type of a {@link LexerSkipAction} action.
TYPE = 7 #The type of a {@link LexerTypeAction} action.
actionFactories = [ lambda data1, data2: LexerChannelAction(data1),
lambda data1, data2: LexerCustomAction(data1, data2),
lambda data1, data2: LexerModeAction(data1),
lambda data1, data2: LexerMoreAction.INSTANCE,
lambda data1, data2: LexerPopModeAction.INSTANCE,
lambda data1, data2: LexerPushModeAction(data1),
lambda data1, data2: LexerSkipAction.INSTANCE,
lambda data1, data2: LexerTypeAction(data1)
]
def lexerActionFactory(self, type:int, data1:int, data2:int):
if type > len(self.actionFactories) or self.actionFactories[type] is None:
raise Exception("The specified lexer action type " + str(type) + " is not valid.")
else:
return self.actionFactories[type](data1, data2)
|
bsd-3-clause
|
evernote/evernote-sdk-python3
|
lib/evernote/edam/notestore/NoteStore.py
|
11
|
791799
|
#
# Autogenerated by Thrift Compiler (0.9.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:new_style,utf8strings
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface(object):
def getSyncState(self, authenticationToken):
"""
Asks the NoteStore to provide information about the status of the user
account corresponding to the provided authentication token.
Parameters:
- authenticationToken
"""
pass
def getSyncStateWithMetrics(self, authenticationToken, clientMetrics):
"""
Asks the NoteStore to provide information about the status of the user
account corresponding to the provided authentication token.
This version of 'getSyncState' allows the client to upload coarse-
grained usage metrics to the service.
@param clientMetrics see the documentation of the ClientUsageMetrics
structure for an explanation of the fields that clients can pass to
the service.
Parameters:
- authenticationToken
- clientMetrics
"""
pass
def getSyncChunk(self, authenticationToken, afterUSN, maxEntries, fullSyncOnly):
"""
DEPRECATED - use getFilteredSyncChunk.
Parameters:
- authenticationToken
- afterUSN
- maxEntries
- fullSyncOnly
"""
pass
def getFilteredSyncChunk(self, authenticationToken, afterUSN, maxEntries, filter):
"""
Asks the NoteStore to provide the state of the account in order of
last modification. This request retrieves one block of the server's
state so that a client can make several small requests against a large
account rather than getting the entire state in one big message.
This call gives fine-grained control of the data that will
be received by a client by omitting data elements that a client doesn't
need. This may reduce network traffic and sync times.
@param afterUSN
The client can pass this value to ask only for objects that
have been updated after a certain point. This allows the client to
receive updates after its last checkpoint rather than doing a full
synchronization on every pass. The default value of "0" indicates
that the client wants to get objects from the start of the account.
@param maxEntries
The maximum number of modified objects that should be
returned in the result SyncChunk. This can be used to limit the size
of each individual message to be friendly for network transfer.
@param filter
The caller must set some of the flags in this structure to specify which
data types should be returned during the synchronization. See
the SyncChunkFilter structure for information on each flag.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "afterUSN" - if negative
</li>
<li> BAD_DATA_FORMAT "maxEntries" - if less than 1
</li>
</ul>
Parameters:
- authenticationToken
- afterUSN
- maxEntries
- filter
"""
pass
def getLinkedNotebookSyncState(self, authenticationToken, linkedNotebook):
"""
Asks the NoteStore to provide information about the status of a linked
notebook that has been shared with the caller, or that is public to the
world.
This will return a result that is similar to getSyncState, but may omit
SyncState.uploaded if the caller doesn't have permission to write to
the linked notebook.
This function must be called on the shard that owns the referenced
notebook. (I.e. the shardId in /shard/shardId/edam/note must be the
same as LinkedNotebook.shardId.)
@param authenticationToken
This should be an authenticationToken for the guest who has received
the invitation to the share. (I.e. this should not be the result of
NoteStore.authenticateToSharedNotebook)
@param linkedNotebook
This structure should contain identifying information and permissions
to access the notebook in question.
Parameters:
- authenticationToken
- linkedNotebook
"""
pass
def getLinkedNotebookSyncChunk(self, authenticationToken, linkedNotebook, afterUSN, maxEntries, fullSyncOnly):
"""
Asks the NoteStore to provide information about the contents of a linked
notebook that has been shared with the caller, or that is public to the
world.
This will return a result that is similar to getSyncChunk, but will only
contain entries that are visible to the caller. I.e. only that particular
Notebook will be visible, along with its Notes, and Tags on those Notes.
This function must be called on the shard that owns the referenced
notebook. (I.e. the shardId in /shard/shardId/edam/note must be the
same as LinkedNotebook.shardId.)
@param authenticationToken
This should be an authenticationToken for the guest who has received
the invitation to the share. (I.e. this should not be the result of
NoteStore.authenticateToSharedNotebook)
@param linkedNotebook
This structure should contain identifying information and permissions
to access the notebook in question. This must contain the valid fields
for either a shared notebook (e.g. shareKey)
or a public notebook (e.g. username, uri)
@param afterUSN
The client can pass this value to ask only for objects that
have been updated after a certain point. This allows the client to
receive updates after its last checkpoint rather than doing a full
synchronization on every pass. The default value of "0" indicates
that the client wants to get objects from the start of the account.
@param maxEntries
The maximum number of modified objects that should be
returned in the result SyncChunk. This can be used to limit the size
of each individual message to be friendly for network transfer.
Applications should not request more than 256 objects at a time,
and must handle the case where the service returns less than the
requested number of objects in a given request even though more
objects are available on the service.
@param fullSyncOnly
If true, then the client only wants initial data for a full sync.
In this case, the service will not return any expunged objects,
and will not return any Resources, since these are also provided
in their corresponding Notes.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "afterUSN" - if negative
</li>
<li> BAD_DATA_FORMAT "maxEntries" - if less than 1
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "LinkedNotebook" - if the provided information doesn't match any
valid notebook
</li>
<li> "LinkedNotebook.uri" - if the provided public URI doesn't match any
valid notebook
</li>
<li> "SharedNotebook.id" - if the provided information indicates a
shared notebook that no longer exists
</li>
</ul>
Parameters:
- authenticationToken
- linkedNotebook
- afterUSN
- maxEntries
- fullSyncOnly
"""
pass
def listNotebooks(self, authenticationToken):
"""
Returns a list of all of the notebooks in the account.
Parameters:
- authenticationToken
"""
pass
def getNotebook(self, authenticationToken, guid):
"""
Returns the current state of the notebook with the provided GUID.
The notebook may be active or deleted (but not expunged).
@param guid
The GUID of the notebook to be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Notebook.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Notebook" - private notebook, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - tag not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def getDefaultNotebook(self, authenticationToken):
"""
Returns the notebook that should be used to store new notes in the
user's account when no other notebooks are specified.
Parameters:
- authenticationToken
"""
pass
def createNotebook(self, authenticationToken, notebook):
"""
Asks the service to make a notebook with the provided name.
@param notebook
The desired fields for the notebook must be provided on this
object. The name of the notebook must be set, and either the 'active'
or 'defaultNotebook' fields may be set by the client at creation.
If a notebook exists in the account with the same name (via
case-insensitive compare), this will throw an EDAMUserException.
@return
The newly created Notebook. The server-side GUID will be
saved in this object's 'guid' field.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Notebook.name" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Notebook.stack" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Publishing.uri" - if publishing set but bad uri
</li>
<li> BAD_DATA_FORMAT "Publishing.publicDescription" - if too long
</li>
<li> DATA_CONFLICT "Notebook.name" - name already in use
</li>
<li> DATA_CONFLICT "Publishing.uri" - if URI already in use
</li>
<li> DATA_REQUIRED "Publishing.uri" - if publishing set but uri missing
</li>
<li> LIMIT_REACHED "Notebook" - at max number of notebooks
</li>
</ul>
Parameters:
- authenticationToken
- notebook
"""
pass
def updateNotebook(self, authenticationToken, notebook):
"""
Submits notebook changes to the service. The provided data must include
the notebook's guid field for identification.
@param notebook
The notebook object containing the requested changes.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Notebook.name" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Notebook.stack" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Publishing.uri" - if publishing set but bad uri
</li>
<li> BAD_DATA_FORMAT "Publishing.publicDescription" - if too long
</li>
<li> DATA_CONFLICT "Notebook.name" - name already in use
</li>
<li> DATA_CONFLICT "Publishing.uri" - if URI already in use
</li>
<li> DATA_REQUIRED "Publishing.uri" - if publishing set but uri missing
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- notebook
"""
pass
def expungeNotebook(self, authenticationToken, guid):
"""
Permanently removes the notebook from the user's account.
After this action, the notebook is no longer available for undeletion, etc.
If the notebook contains any Notes, they will be moved to the current
default notebook and moved into the trash (i.e. Note.active=false).
<p/>
NOTE: This function is generally not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param guid
The GUID of the notebook to delete.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Notebook.guid" - if the parameter is missing
</li>
<li> LIMIT_REACHED "Notebook" - trying to expunge the last Notebook
</li>
<li> PERMISSION_DENIED "Notebook" - private notebook, user doesn't own
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def listTags(self, authenticationToken):
"""
Returns a list of the tags in the account. Evernote does not support
the undeletion of tags, so this will only include active tags.
Parameters:
- authenticationToken
"""
pass
def listTagsByNotebook(self, authenticationToken, notebookGuid):
"""
Returns a list of the tags that are applied to at least one note within
the provided notebook. If the notebook is public, the authenticationToken
may be ignored.
@param notebookGuid
the GUID of the notebook to use to find tags
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - notebook not found by GUID
</li>
</ul>
Parameters:
- authenticationToken
- notebookGuid
"""
pass
def getTag(self, authenticationToken, guid):
"""
Returns the current state of the Tag with the provided GUID.
@param guid
The GUID of the tag to be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Tag.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Tag" - private Tag, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Tag.guid" - tag not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def createTag(self, authenticationToken, tag):
"""
Asks the service to make a tag with a set of information.
@param tag
The desired list of fields for the tag are specified in this
object. The caller must specify the tag name, and may provide
the parentGUID.
@return
The newly created Tag. The server-side GUID will be
saved in this object.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Tag.name" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Tag.parentGuid" - malformed GUID
</li>
<li> DATA_CONFLICT "Tag.name" - name already in use
</li>
<li> LIMIT_REACHED "Tag" - at max number of tags
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Tag.parentGuid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- tag
"""
pass
def updateTag(self, authenticationToken, tag):
"""
Submits tag changes to the service. The provided data must include
the tag's guid field for identification. The service will apply
updates to the following tag fields: name, parentGuid
@param tag
The tag object containing the requested changes.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Tag.name" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Tag.parentGuid" - malformed GUID
</li>
<li> DATA_CONFLICT "Tag.name" - name already in use
</li>
<li> DATA_CONFLICT "Tag.parentGuid" - can't set parent: circular
</li>
<li> PERMISSION_DENIED "Tag" - user doesn't own tag
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Tag.guid" - tag not found, by GUID
</li>
<li> "Tag.parentGuid" - parent not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- tag
"""
pass
def untagAll(self, authenticationToken, guid):
"""
Removes the provided tag from every note that is currently tagged with
this tag. If this operation is successful, the tag will still be in
the account, but it will not be tagged on any notes.
This function is not indended for use by full synchronizing clients, since
it does not provide enough result information to the client to reconcile
the local state without performing a follow-up sync from the service. This
is intended for "thin clients" that need to efficiently support this as
a UI operation.
@param guid
The GUID of the tag to remove from all notes.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Tag.guid" - if the guid parameter is missing
</li>
<li> PERMISSION_DENIED "Tag" - user doesn't own tag
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Tag.guid" - tag not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def expungeTag(self, authenticationToken, guid):
"""
Permanently deletes the tag with the provided GUID, if present.
<p/>
NOTE: This function is generally not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param guid
The GUID of the tag to delete.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Tag.guid" - if the guid parameter is missing
</li>
<li> PERMISSION_DENIED "Tag" - user doesn't own tag
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Tag.guid" - tag not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def listSearches(self, authenticationToken):
"""
Returns a list of the searches in the account. Evernote does not support
the undeletion of searches, so this will only include active searches.
Parameters:
- authenticationToken
"""
pass
def getSearch(self, authenticationToken, guid):
"""
Returns the current state of the search with the provided GUID.
@param guid
The GUID of the search to be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "SavedSearch.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "SavedSearch" - private Tag, user doesn't own
</li>
@throws EDAMNotFoundException <ul>
<li> "SavedSearch.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def createSearch(self, authenticationToken, search):
"""
Asks the service to make a saved search with a set of information.
@param search
The desired list of fields for the search are specified in this
object. The caller must specify the name and query for the
search, and may optionally specify a search scope.
The SavedSearch.format field is ignored by the service.
@return
The newly created SavedSearch. The server-side GUID will be
saved in this object.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "SavedSearch.name" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "SavedSearch.query" - invalid length
</li>
<li> DATA_CONFLICT "SavedSearch.name" - name already in use
</li>
<li> LIMIT_REACHED "SavedSearch" - at max number of searches
</li>
</ul>
Parameters:
- authenticationToken
- search
"""
pass
def updateSearch(self, authenticationToken, search):
"""
Submits search changes to the service. The provided data must include
the search's guid field for identification. The service will apply
updates to the following search fields: name, query, and scope.
@param search
The search object containing the requested changes.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "SavedSearch.name" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "SavedSearch.query" - invalid length
</li>
<li> DATA_CONFLICT "SavedSearch.name" - name already in use
</li>
<li> PERMISSION_DENIED "SavedSearch" - user doesn't own tag
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "SavedSearch.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- search
"""
pass
def expungeSearch(self, authenticationToken, guid):
"""
Permanently deletes the saved search with the provided GUID, if present.
<p/>
NOTE: This function is generally not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param guid
The GUID of the search to delete.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "SavedSearch.guid" - if the guid parameter is empty
</li>
<li> PERMISSION_DENIED "SavedSearch" - user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "SavedSearch.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def findNotes(self, authenticationToken, filter, offset, maxNotes):
"""
DEPRECATED. Use findNotesMetadata.
Parameters:
- authenticationToken
- filter
- offset
- maxNotes
"""
pass
def findNoteOffset(self, authenticationToken, filter, guid):
"""
Finds the position of a note within a sorted subset of all of the user's
notes. This may be useful for thin clients that are displaying a paginated
listing of a large account, which need to know where a particular note
sits in the list without retrieving all notes first.
@param authenticationToken
Must be a valid token for the user's account unless the NoteFilter
'notebookGuid' is the GUID of a public notebook.
@param filter
The list of criteria that will constrain the notes to be returned.
@param guid
The GUID of the note to be retrieved.
@return
If the note with the provided GUID is found within the matching note
list, this will return the offset of that note within that list (where
the first offset is 0). If the note is not found within the set of
notes, this will return -1.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "offset" - not between 0 and EDAM_USER_NOTES_MAX
</li>
<li> BAD_DATA_FORMAT "maxNotes" - not between 0 and EDAM_USER_NOTES_MAX
</li>
<li> BAD_DATA_FORMAT "NoteFilter.notebookGuid" - if malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.tagGuids" - if any are malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.words" - if search string too long
</li>
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - not found, by GUID
</li>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- filter
- guid
"""
pass
def findNotesMetadata(self, authenticationToken, filter, offset, maxNotes, resultSpec):
"""
Used to find the high-level information about a set of the notes from a
user's account based on various criteria specified via a NoteFilter object.
<p/>
Web applications that wish to periodically check for new content in a user's
Evernote account should consider using webhooks instead of polling this API.
See http://dev.evernote.com/documentation/cloud/chapters/polling_notification.php
for more information.
@param authenticationToken
Must be a valid token for the user's account unless the NoteFilter
'notebookGuid' is the GUID of a public notebook.
@param filter
The list of criteria that will constrain the notes to be returned.
@param offset
The numeric index of the first note to show within the sorted
results. The numbering scheme starts with "0". This can be used for
pagination.
@param maxNotes
The mximum notes to return in this query. The service will return a set
of notes that is no larger than this number, but may return fewer notes
if needed. The NoteList.totalNotes field in the return value will
indicate whether there are more values available after the returned set.
@param resultSpec
This specifies which information should be returned for each matching
Note. The fields on this structure can be used to eliminate data that
the client doesn't need, which will reduce the time and bandwidth
to receive and process the reply.
@return
The list of notes that match the criteria.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "offset" - not between 0 and EDAM_USER_NOTES_MAX
</li>
<li> BAD_DATA_FORMAT "maxNotes" - not between 0 and EDAM_USER_NOTES_MAX
</li>
<li> BAD_DATA_FORMAT "NoteFilter.notebookGuid" - if malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.tagGuids" - if any are malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.words" - if search string too long
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- filter
- offset
- maxNotes
- resultSpec
"""
pass
def findNoteCounts(self, authenticationToken, filter, withTrash):
"""
This function is used to determine how many notes are found for each
notebook and tag in the user's account, given a current set of filter
parameters that determine the current selection. This function will
return a structure that gives the note count for each notebook and tag
that has at least one note under the requested filter. Any notebook or
tag that has zero notes in the filtered set will not be listed in the
reply to this function (so they can be assumed to be 0).
@param authenticationToken
Must be a valid token for the user's account unless the NoteFilter
'notebookGuid' is the GUID of a public notebook.
@param filter
The note selection filter that is currently being applied. The note
counts are to be calculated with this filter applied to the total set
of notes in the user's account.
@param withTrash
If true, then the NoteCollectionCounts.trashCount will be calculated
and supplied in the reply. Otherwise, the trash value will be omitted.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "NoteFilter.notebookGuid" - if malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.notebookGuids" - if any are malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.words" - if search string too long
</li>
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- filter
- withTrash
"""
pass
def getNote(self, authenticationToken, guid, withContent, withResourcesData, withResourcesRecognition, withResourcesAlternateData):
"""
Returns the current state of the note in the service with the provided
GUID. The ENML contents of the note will only be provided if the
'withContent' parameter is true. The service will include the meta-data
for each resource in the note, but the binary contents of the resources
and their recognition data will be omitted.
If the Note is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string). The applicationData
fields are returned as keysOnly.
@param guid
The GUID of the note to be retrieved.
@param withContent
If true, the note will include the ENML contents of its
'content' field.
@param withResourcesData
If true, any Resource elements in this Note will include the binary
contents of their 'data' field's body.
@param withResourcesRecognition
If true, any Resource elements will include the binary contents of the
'recognition' field's body if recognition data is present.
@param withResourcesAlternateData
If true, any Resource elements in this Note will include the binary
contents of their 'alternateData' fields' body, if an alternate form
is present.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
- withContent
- withResourcesData
- withResourcesRecognition
- withResourcesAlternateData
"""
pass
def getNoteApplicationData(self, authenticationToken, guid):
"""
Get all of the application data for the note identified by GUID,
with values returned within the LazyMap fullMap field.
If there are no applicationData entries, then a LazyMap
with an empty fullMap will be returned. If your application
only needs to fetch its own applicationData entry, use
getNoteApplicationDataEntry instead.
Parameters:
- authenticationToken
- guid
"""
pass
def getNoteApplicationDataEntry(self, authenticationToken, guid, key):
"""
Get the value of a single entry in the applicationData map
for the note identified by GUID.
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - note not found, by GUID</li>
<li> "NoteAttributes.applicationData.key" - note not found, by key</li>
</ul>
Parameters:
- authenticationToken
- guid
- key
"""
pass
def setNoteApplicationDataEntry(self, authenticationToken, guid, key, value):
"""
Update, or create, an entry in the applicationData map for
the note identified by guid.
Parameters:
- authenticationToken
- guid
- key
- value
"""
pass
def unsetNoteApplicationDataEntry(self, authenticationToken, guid, key):
"""
Remove an entry identified by 'key' from the applicationData map for
the note identified by 'guid'. Silently ignores an unset of a
non-existing key.
Parameters:
- authenticationToken
- guid
- key
"""
pass
def getNoteContent(self, authenticationToken, guid):
"""
Returns XHTML contents of the note with the provided GUID.
If the Note is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string).
@param guid
The GUID of the note to be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def getNoteSearchText(self, authenticationToken, guid, noteOnly, tokenizeForIndexing):
"""
Returns a block of the extracted plain text contents of the note with the
provided GUID. This text can be indexed for search purposes by a light
client that doesn't have capabilities to extract all of the searchable
text content from the note and its resources.
If the Note is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string).
@param guid
The GUID of the note to be retrieved.
@param noteOnly
If true, this will only return the text extracted from the ENML contents
of the note itself. If false, this will also include the extracted text
from any text-bearing resources (PDF, recognized images)
@param tokenizeForIndexing
If true, this will break the text into cleanly separated and sanitized
tokens. If false, this will return the more raw text extraction, with
its original punctuation, capitalization, spacing, etc.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
- noteOnly
- tokenizeForIndexing
"""
pass
def getResourceSearchText(self, authenticationToken, guid):
"""
Returns a block of the extracted plain text contents of the resource with
the provided GUID. This text can be indexed for search purposes by a light
client that doesn't have capability to extract all of the searchable
text content from a resource.
If the Resource is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string).
@param guid
The GUID of the resource to be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Resource.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def getNoteTagNames(self, authenticationToken, guid):
"""
Returns a list of the names of the tags for the note with the provided
guid. This can be used with authentication to get the tags for a
user's own note, or can be used without valid authentication to retrieve
the names of the tags for a note in a public notebook.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def createNote(self, authenticationToken, note):
"""
Asks the service to make a note with the provided set of information.
@param note
A Note object containing the desired fields to be populated on
the service.
@return
The newly created Note from the service. The server-side
GUIDs for the Note and any Resources will be saved in this object.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.title" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Note.content" - invalid length for ENML content
</li>
<li> BAD_DATA_FORMAT "Resource.mime" - invalid resource MIME type
</li>
<li> BAD_DATA_FORMAT "NoteAttributes.*" - bad resource string
</li>
<li> BAD_DATA_FORMAT "ResourceAttributes.*" - bad resource string
</li>
<li> DATA_CONFLICT "Note.deleted" - deleted time set on active note
</li>
<li> DATA_REQUIRED "Resource.data" - resource data body missing
</li>
<li> ENML_VALIDATION "*" - note content doesn't validate against DTD
</li>
<li> LIMIT_REACHED "Note" - at max number per account
</li>
<li> LIMIT_REACHED "Note.size" - total note size too large
</li>
<li> LIMIT_REACHED "Note.resources" - too many resources on Note
</li>
<li> LIMIT_REACHED "Note.tagGuids" - too many Tags on Note
</li>
<li> LIMIT_REACHED "Resource.data.size" - resource too large
</li>
<li> LIMIT_REACHED "NoteAttribute.*" - attribute string too long
</li>
<li> LIMIT_REACHED "ResourceAttribute.*" - attribute string too long
</li>
<li> PERMISSION_DENIED "Note.notebookGuid" - NB not owned by user
</li>
<li> QUOTA_REACHED "Accounting.uploadLimit" - note exceeds upload quota
</li>
<li> BAD_DATA_FORMAT "Tag.name" - Note.tagNames was provided, and one
of the specified tags had an invalid length or pattern
</li>
<li> LIMIT_REACHED "Tag" - Note.tagNames was provided, and the required
new tags would exceed the maximum number per account
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.notebookGuid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- note
"""
pass
def updateNote(self, authenticationToken, note):
"""
Submit a set of changes to a note to the service. The provided data
must include the note's guid field for identification. The note's
title must also be set.
@param note
A Note object containing the desired fields to be populated on
the service. With the exception of the note's title and guid, fields
that are not being changed do not need to be set. If the content is not
being modified, note.content should be left unset. If the list of
resources is not being modified, note.resources should be left unset.
@return
The metadata (no contents) for the Note on the server after the update
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.title" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Note.content" - invalid length for ENML body
</li>
<li> BAD_DATA_FORMAT "NoteAttributes.*" - bad resource string
</li>
<li> BAD_DATA_FORMAT "ResourceAttributes.*" - bad resource string
</li>
<li> BAD_DATA_FORMAT "Resource.mime" - invalid resource MIME type
</li>
<li> DATA_CONFLICT "Note.deleted" - deleted time set on active note
</li>
<li> DATA_REQUIRED "Resource.data" - resource data body missing
</li>
<li> ENML_VALIDATION "*" - note content doesn't validate against DTD
</li>
<li> LIMIT_REACHED "Note.tagGuids" - too many Tags on Note
</li>
<li> LIMIT_REACHED "Note.resources" - too many resources on Note
</li>
<li> LIMIT_REACHED "Note.size" - total note size too large
</li>
<li> LIMIT_REACHED "Resource.data.size" - resource too large
</li>
<li> LIMIT_REACHED "NoteAttribute.*" - attribute string too long
</li>
<li> LIMIT_REACHED "ResourceAttribute.*" - attribute string too long
</li>
<li> PERMISSION_DENIED "Note" - user doesn't own
</li>
<li> PERMISSION_DENIED "Note.notebookGuid" - user doesn't own destination
</li>
<li> QUOTA_REACHED "Accounting.uploadLimit" - note exceeds upload quota
</li>
<li> BAD_DATA_FORMAT "Tag.name" - Note.tagNames was provided, and one
of the specified tags had an invalid length or pattern
</li>
<li> LIMIT_REACHED "Tag" - Note.tagNames was provided, and the required
new tags would exceed the maximum number per account
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - note not found, by GUID
</li>
<li> "Note.notebookGuid" - if notebookGuid provided, but not found
</li>
</ul>
Parameters:
- authenticationToken
- note
"""
pass
def deleteNote(self, authenticationToken, guid):
"""
Moves the note into the trash. The note may still be undeleted, unless it
is expunged. This is equivalent to calling updateNote() after setting
Note.active = false
@param guid
The GUID of the note to delete.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> PERMISSION_DENIED "Note" - user doesn't have permission to
update the note.
</li>
</ul>
@throws EDAMUserException <ul>
<li> DATA_CONFLICT "Note.guid" - the note is already deleted
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def expungeNote(self, authenticationToken, guid):
"""
Permanently removes a Note, and all of its Resources,
from the service.
<p/>
NOTE: This function is not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param guid
The GUID of the note to delete.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> PERMISSION_DENIED "Note" - user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def expungeNotes(self, authenticationToken, noteGuids):
"""
Permanently removes a list of Notes, and all of their Resources, from
the service. This should be invoked with a small number of Note GUIDs
(e.g. 100 or less) on each call. To expunge a larger number of notes,
call this method multiple times. This should also be used to reduce the
number of Notes in a notebook before calling expungeNotebook() or
in the trash before calling expungeInactiveNotes(), since these calls may
be prohibitively slow if there are more than a few hundred notes.
If an exception is thrown for any of the GUIDs, then none of the notes
will be deleted. I.e. this call can be treated as an atomic transaction.
<p/>
NOTE: This function is not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param noteGuids
The list of GUIDs for the Notes to remove.
@return
The account's updateCount at the end of this operation
@throws EDAMUserException <ul>
<li> PERMISSION_DENIED "Note" - user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- noteGuids
"""
pass
def expungeInactiveNotes(self, authenticationToken):
"""
Permanently removes all of the Notes that are currently marked as
inactive. This is equivalent to "emptying the trash", and these Notes
will be gone permanently.
<p/>
This operation may be relatively slow if the account contains a large
number of inactive Notes.
<p/>
NOTE: This function is not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@return
The number of notes that were expunged.
Parameters:
- authenticationToken
"""
pass
def copyNote(self, authenticationToken, noteGuid, toNotebookGuid):
"""
Performs a deep copy of the Note with the provided GUID 'noteGuid' into
the Notebook with the provided GUID 'toNotebookGuid'.
The caller must be the owner of both the Note and the Notebook.
This creates a new Note in the destination Notebook with new content and
Resources that match all of the content and Resources from the original
Note, but with new GUID identifiers.
The original Note is not modified by this operation.
The copied note is considered as an "upload" for the purpose of upload
transfer limit calculation, so its size is added to the upload count for
the owner.
@param noteGuid
The GUID of the Note to copy.
@param toNotebookGuid
The GUID of the Notebook that should receive the new Note.
@return
The metadata for the new Note that was created. This will include the
new GUID for this Note (and any copied Resources), but will not include
the content body or the binary bodies of any Resources.
@throws EDAMUserException <ul>
<li> LIMIT_REACHED "Note" - at max number per account
</li>
<li> PERMISSION_DENIED "Notebook.guid" - destination not owned by user
</li>
<li> PERMISSION_DENIED "Note" - user doesn't own
</li>
<li> QUOTA_REACHED "Accounting.uploadLimit" - note exceeds upload quota
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- noteGuid
- toNotebookGuid
"""
pass
def listNoteVersions(self, authenticationToken, noteGuid):
"""
Returns a list of the prior versions of a particular note that are
saved within the service. These prior versions are stored to provide a
recovery from unintentional removal of content from a note. The identifiers
that are returned by this call can be used with getNoteVersion to retrieve
the previous note.
The identifiers will be listed from the most recent versions to the oldest.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- noteGuid
"""
pass
def getNoteVersion(self, authenticationToken, noteGuid, updateSequenceNum, withResourcesData, withResourcesRecognition, withResourcesAlternateData):
"""
This can be used to retrieve a previous version of a Note after it has been
updated within the service. The caller must identify the note (via its
guid) and the version (via the updateSequenceNumber of that version).
to find a listing of the stored version USNs for a note, call
listNoteVersions.
This call is only available for notes in Premium accounts. (I.e. access
to past versions of Notes is a Premium-only feature.)
@param noteGuid
The GUID of the note to be retrieved.
@param updateSequenceNum
The USN of the version of the note that is being retrieved
@param withResourcesData
If true, any Resource elements in this Note will include the binary
contents of their 'data' field's body.
@param withResourcesRecognition
If true, any Resource elements will include the binary contents of the
'recognition' field's body if recognition data is present.
@param withResourcesAlternateData
If true, any Resource elements in this Note will include the binary
contents of their 'alternateData' fields' body, if an alternate form
is present.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
<li> PERMISSION_DENIED "updateSequenceNum" -
The account isn't permitted to access previous versions of notes.
(i.e. this is a Free account.)
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
<li> "Note.updateSequenceNumber" - the Note doesn't have a version with
the corresponding USN.
</li>
</ul>
Parameters:
- authenticationToken
- noteGuid
- updateSequenceNum
- withResourcesData
- withResourcesRecognition
- withResourcesAlternateData
"""
pass
def getResource(self, authenticationToken, guid, withData, withRecognition, withAttributes, withAlternateData):
"""
Returns the current state of the resource in the service with the
provided GUID.
If the Resource is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string). Only the
keys for the applicationData will be returned.
@param guid
The GUID of the resource to be retrieved.
@param withData
If true, the Resource will include the binary contents of the
'data' field's body.
@param withRecognition
If true, the Resource will include the binary contents of the
'recognition' field's body if recognition data is present.
@param withAttributes
If true, the Resource will include the attributes
@param withAlternateData
If true, the Resource will include the binary contents of the
'alternateData' field's body, if an alternate form is present.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Resource.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
- withData
- withRecognition
- withAttributes
- withAlternateData
"""
pass
def getResourceApplicationData(self, authenticationToken, guid):
"""
Get all of the application data for the Resource identified by GUID,
with values returned within the LazyMap fullMap field.
If there are no applicationData entries, then a LazyMap
with an empty fullMap will be returned. If your application
only needs to fetch its own applicationData entry, use
getResourceApplicationDataEntry instead.
Parameters:
- authenticationToken
- guid
"""
pass
def getResourceApplicationDataEntry(self, authenticationToken, guid, key):
"""
Get the value of a single entry in the applicationData map
for the Resource identified by GUID.
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - Resource not found, by GUID</li>
<li> "ResourceAttributes.applicationData.key" - Resource not found, by key</li>
</ul>
Parameters:
- authenticationToken
- guid
- key
"""
pass
def setResourceApplicationDataEntry(self, authenticationToken, guid, key, value):
"""
Update, or create, an entry in the applicationData map for
the Resource identified by guid.
Parameters:
- authenticationToken
- guid
- key
- value
"""
pass
def unsetResourceApplicationDataEntry(self, authenticationToken, guid, key):
"""
Remove an entry identified by 'key' from the applicationData map for
the Resource identified by 'guid'.
Parameters:
- authenticationToken
- guid
- key
"""
pass
def updateResource(self, authenticationToken, resource):
"""
Submit a set of changes to a resource to the service. This can be used
to update the meta-data about the resource, but cannot be used to change
the binary contents of the resource (including the length and hash). These
cannot be changed directly without creating a new resource and removing the
old one via updateNote.
@param resource
A Resource object containing the desired fields to be populated on
the service. The service will attempt to update the resource with the
following fields from the client:
<ul>
<li>guid: must be provided to identify the resource
</li>
<li>mime
</li>
<li>width
</li>
<li>height
</li>
<li>duration
</li>
<li>attributes: optional. if present, the set of attributes will
be replaced.
</li>
</ul>
@return
The Update Sequence Number of the resource after the changes have been
applied.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Resource.guid" - if the parameter is missing
</li>
<li> BAD_DATA_FORMAT "Resource.mime" - invalid resource MIME type
</li>
<li> BAD_DATA_FORMAT "ResourceAttributes.*" - bad resource string
</li>
<li> LIMIT_REACHED "ResourceAttribute.*" - attribute string too long
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- resource
"""
pass
def getResourceData(self, authenticationToken, guid):
"""
Returns binary data of the resource with the provided GUID. For
example, if this were an image resource, this would contain the
raw bits of the image.
If the Resource is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string).
@param guid
The GUID of the resource to be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Resource.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def getResourceByHash(self, authenticationToken, noteGuid, contentHash, withData, withRecognition, withAlternateData):
"""
Returns the current state of a resource, referenced by containing
note GUID and resource content hash.
@param noteGuid
The GUID of the note that holds the resource to be retrieved.
@param contentHash
The MD5 checksum of the resource within that note. Note that
this is the binary checksum, for example from Resource.data.bodyHash,
and not the hex-encoded checksum that is used within an en-media
tag in a note body.
@param withData
If true, the Resource will include the binary contents of the
'data' field's body.
@param withRecognition
If true, the Resource will include the binary contents of the
'recognition' field's body.
@param withAlternateData
If true, the Resource will include the binary contents of the
'alternateData' field's body, if an alternate form is present.
@throws EDAMUserException <ul>
<li> DATA_REQUIRED "Note.guid" - noteGuid param missing
</li>
<li> DATA_REQUIRED "Note.contentHash" - contentHash param missing
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note" - not found, by guid
</li>
<li> "Resource" - not found, by hash
</li>
</ul>
Parameters:
- authenticationToken
- noteGuid
- contentHash
- withData
- withRecognition
- withAlternateData
"""
pass
def getResourceRecognition(self, authenticationToken, guid):
"""
Returns the binary contents of the recognition index for the resource
with the provided GUID. If the caller asks about a resource that has
no recognition data, this will throw EDAMNotFoundException.
If the Resource is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string).
@param guid
The GUID of the resource whose recognition data should be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Resource.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - not found, by GUID
</li>
<li> "Resource.recognition" - resource has no recognition
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def getResourceAlternateData(self, authenticationToken, guid):
"""
If the Resource with the provided GUID has an alternate data representation
(indicated via the Resource.alternateData field), then this request can
be used to retrieve the binary contents of that alternate data file.
If the caller asks about a resource that has no alternate data form, this
will throw EDAMNotFoundException.
@param guid
The GUID of the resource whose recognition data should be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Resource.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - not found, by GUID
</li>
<li> "Resource.alternateData" - resource has no recognition
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def getResourceAttributes(self, authenticationToken, guid):
"""
Returns the set of attributes for the Resource with the provided GUID.
If the Resource is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string).
@param guid
The GUID of the resource whose attributes should be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Resource.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def getPublicNotebook(self, userId, publicUri):
"""
<p>
Looks for a user account with the provided userId on this NoteStore
shard and determines whether that account contains a public notebook
with the given URI. If the account is not found, or no public notebook
exists with this URI, this will throw an EDAMNotFoundException,
otherwise this will return the information for that Notebook.
</p>
<p>
If a notebook is visible on the web with a full URL like
http://www.evernote.com/pub/sethdemo/api
Then 'sethdemo' is the username that can be used to look up the userId,
and 'api' is the publicUri.
</p>
@param userId
The numeric identifier for the user who owns the public notebook.
To find this value based on a username string, you can invoke
UserStore.getPublicUserInfo
@param publicUri
The uri string for the public notebook, from Notebook.publishing.uri.
@throws EDAMNotFoundException <ul>
<li>"Publishing.uri" - not found, by URI</li>
</ul>
@throws EDAMSystemException <ul>
<li> TAKEN_DOWN "PublicNotebook" - The specified public notebook is
taken down (for all requesters).</li>
<li> TAKEN_DOWN "Country" - The specified public notebook is taken
down for the requester because of an IP-based country lookup.</li>
</ul>
Parameters:
- userId
- publicUri
"""
pass
def createSharedNotebook(self, authenticationToken, sharedNotebook):
"""
Used to construct a shared notebook object. The constructed notebook will
contain a "share key" which serve as a unique identifer and access token
for a user to access the notebook of the shared notebook owner.
@param sharedNotebook
A shared notebook object populated with the email address of the share
recipient, the notebook guid and the access permissions. All other
attributes of the shared object are ignored. The SharedNotebook.allowPreview
field must be explicitly set with either a true or false value.
@return
The fully populated SharedNotebook object including the server assigned
share id and shareKey which can both be used to uniquely identify the
SharedNotebook.
@throws EDAMUserException <ul>
<li>BAD_DATA_FORMAT "SharedNotebook.email" - if the email was not valid</li>
<li>BAD_DATA_FORMAT "requireLogin" - if the SharedNotebook.allowPreview field was
not set, and the SharedNotebook.requireLogin was also not set or was set to
false.</li>
<li>PERMISSION_DENIED "SharedNotebook.recipientSettings" - if
recipientSettings is set in the sharedNotebook. Only the recipient
can set these values via the setSharedNotebookRecipientSettings
method.
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li>Notebook.guid - if the notebookGuid is not a valid GUID for the user.
</li>
</ul>
Parameters:
- authenticationToken
- sharedNotebook
"""
pass
def updateSharedNotebook(self, authenticationToken, sharedNotebook):
"""
Update a SharedNotebook object.
@param authenticationToken
Must be an authentication token from the owner or a shared notebook
authentication token or business authentication token with sufficient
permissions to change invitations for a notebook.
@param sharedNotebook
The SharedNotebook object containing the requested changes.
The "id" of the shared notebook must be set to allow the service
to identify the SharedNotebook to be updated. In addition, you MUST set
the email, permission, and allowPreview fields to the desired values.
All other fields will be ignored if set.
@return
The Update Serial Number for this change within the account.
@throws EDAMUserException <ul>
<li>UNSUPPORTED_OPERATION "updateSharedNotebook" - if this service instance does not support shared notebooks.</li>
<li>BAD_DATA_FORMAT "SharedNotebook.email" - if the email was not valid.</li>
<li>DATA_REQUIRED "SharedNotebook.id" - if the id field was not set.</li>
<li>DATA_REQUIRED "SharedNotebook.privilege" - if the privilege field was not set.</li>
<li>DATA_REQUIRED "SharedNotebook.allowPreview" - if the allowPreview field was not set.</li>
</ul>
@throws EDAMNotFoundException <ul>
<li>SharedNotebook.id - if no shared notebook with the specified ID was found.
</ul>
Parameters:
- authenticationToken
- sharedNotebook
"""
pass
def setSharedNotebookRecipientSettings(self, authenticationToken, sharedNotebookId, recipientSettings):
"""
Set values for the recipient settings associated with a shared notebook. Having
update rights to the shared notebook record itself has no effect on this call;
only the recipient of the shared notebook can can the recipient settings.
If you do <i>not</i> wish to, or cannot, change one of the reminderNotifyEmail or
reminderNotifyInApp fields, you must leave that field unset in recipientSettings.
This method will skip that field for updates and leave the existing state as
it is.
@return The update sequence number of the account to which the shared notebook
belongs, which is the account from which we are sharing a notebook.
@throws EDAMNotFoundException "sharedNotebookId" - Thrown if the service does not
have a shared notebook record for the sharedNotebookId on the given shard. If you
receive this exception, it is probable that the shared notebook record has
been revoked or expired, or that you accessed the wrong shard.
@throws EDAMUserException <ul>
<li>PEMISSION_DENIED "authenticationToken" - If you do not have permission to set
the recipient settings for the shared notebook. Only the recipient has
permission to do this.
<li>DATA_CONFLICT "recipientSettings.reminderNotifyEmail" - Setting whether
or not you want to receive reminder e-mail notifications is possible on
a business notebook in the business to which the user belongs. All
others can safely unset the reminderNotifyEmail field from the
recipientSettings parameter.
</ul>
Parameters:
- authenticationToken
- sharedNotebookId
- recipientSettings
"""
pass
def sendMessageToSharedNotebookMembers(self, authenticationToken, notebookGuid, messageText, recipients):
"""
Send a reminder message to some or all of the email addresses that a notebook has been
shared with. The message includes the current link to view the notebook.
@param authenticationToken
The auth token of the user with permissions to share the notebook
@param notebookGuid
The guid of the shared notebook
@param messageText
User provided text to include in the email
@param recipients
The email addresses of the recipients. If this list is empty then all of the
users that the notebook has been shared with are emailed.
If an email address doesn't correspond to share invite members then that address
is ignored.
@return
The number of messages sent
@throws EDAMUserException <ul>
<li> LIMIT_REACHED "(recipients)" -
The email can't be sent because this would exceed the user's daily
email limit.
</li>
<li> PERMISSION_DENIED "Notebook.guid" - The user doesn't have permission to
send a message for the specified notebook.
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- notebookGuid
- messageText
- recipients
"""
pass
def listSharedNotebooks(self, authenticationToken):
"""
Lists the collection of shared notebooks for all notebooks in the
users account.
@return
The list of all SharedNotebooks for the user
Parameters:
- authenticationToken
"""
pass
def expungeSharedNotebooks(self, authenticationToken, sharedNotebookIds):
"""
Expunges the SharedNotebooks in the user's account using the
SharedNotebook.id as the identifier.
<p/>
NOTE: This function is generally not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param
sharedNotebookIds - a list of ShardNotebook.id longs identifying the
objects to delete permanently.
@return
The account's update sequence number.
Parameters:
- authenticationToken
- sharedNotebookIds
"""
pass
def createLinkedNotebook(self, authenticationToken, linkedNotebook):
"""
Asks the service to make a linked notebook with the provided name, username
of the owner and identifiers provided. A linked notebook can be either a
link to a public notebook or to a private shared notebook.
@param linkedNotebook
The desired fields for the linked notebook must be provided on this
object. The name of the linked notebook must be set. Either a username
uri or a shard id and share key must be provided otherwise a
EDAMUserException is thrown.
@return
The newly created LinkedNotebook. The server-side id will be
saved in this object's 'id' field.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "LinkedNotebook.name" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "LinkedNotebook.username" - bad username format
</li>
<li> BAD_DATA_FORMAT "LinkedNotebook.uri" -
if public notebook set but bad uri
</li>
<li> BAD_DATA_FORMAT "LinkedNotebook.shareKey" -
if private notebook set but bad shareKey
</li>
<li> DATA_REQUIRED "LinkedNotebook.shardId" -
if private notebook but shard id not provided
</li>
</ul>
Parameters:
- authenticationToken
- linkedNotebook
"""
pass
def updateLinkedNotebook(self, authenticationToken, linkedNotebook):
"""
@param linkedNotebook
Updates the name of a linked notebook.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "LinkedNotebook.name" - invalid length or pattern
</li>
</ul>
Parameters:
- authenticationToken
- linkedNotebook
"""
pass
def listLinkedNotebooks(self, authenticationToken):
"""
Returns a list of linked notebooks
Parameters:
- authenticationToken
"""
pass
def expungeLinkedNotebook(self, authenticationToken, guid):
"""
Permanently expunges the linked notebook from the account.
<p/>
NOTE: This function is generally not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param guid
The LinkedNotebook.guid field of the LinkedNotebook to permanently remove
from the account.
Parameters:
- authenticationToken
- guid
"""
pass
def authenticateToSharedNotebook(self, shareKey, authenticationToken):
"""
Asks the service to produce an authentication token that can be used to
access the contents of a shared notebook from someone else's account.
This authenticationToken can be used with the various other NoteStore
calls to find and retrieve notes, and if the permissions in the shared
notebook are sufficient, to make changes to the contents of the notebook.
@param shareKey
The 'shareKey' identifier from the SharedNotebook that was granted to
some recipient. This string internally encodes the notebook identifier
and a security signature.
@param authenticationToken
If a non-empty string is provided, this is the full user-based
authentication token that identifies the user who is currently logged in
and trying to access the shared notebook. This may be required if the
notebook was created with 'requireLogin'.
If this string is empty, the service will attempt to authenticate to the
shared notebook without any logged in user.
@throws EDAMSystemException <ul>
<li> BAD_DATA_FORMAT "shareKey" - invalid shareKey string
</li>
<li> INVALID_AUTH "shareKey" - bad signature on shareKey string
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "SharedNotebook.id" - the shared notebook no longer exists
</li>
</ul>
@throws EDAMUserException <ul>
<li> DATA_REQUIRED "authenticationToken" - the share requires login, and
no valid authentication token was provided.
</li>
<li> PERMISSION_DENIED "SharedNotebook.username" - share requires login,
and another username has already been bound to this notebook.
</li>
</ul>
Parameters:
- shareKey
- authenticationToken
"""
pass
def getSharedNotebookByAuth(self, authenticationToken):
"""
This function is used to retrieve extended information about a shared
notebook by a guest who has already authenticated to access that notebook.
This requires an 'authenticationToken' parameter which should be the
resut of a call to authenticateToSharedNotebook(...).
I.e. this is the token that gives access to the particular shared notebook
in someone else's account -- it's not the authenticationToken for the
owner of the notebook itself.
@param authenticationToken
Should be the authentication token retrieved from the reply of
authenticateToSharedNotebook(), proving access to a particular shared
notebook.
@throws EDAMUserException <ul>
<li> PERMISSION_DENIED "authenticationToken" -
authentication token doesn't correspond to a valid shared notebook
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "SharedNotebook.id" - the shared notebook no longer exists
</li>
</ul>
Parameters:
- authenticationToken
"""
pass
def emailNote(self, authenticationToken, parameters):
"""
Attempts to send a single note to one or more email recipients.
<p/>
NOTE: This function is generally not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param authenticationToken
The note will be sent as the user logged in via this token, using that
user's registered email address. If the authenticated user doesn't
have permission to read that note, the emailing will fail.
@param parameters
The note must be specified either by GUID (in which case it will be
sent using the existing data in the service), or else the full Note
must be passed to this call. This also specifies the additional
email fields that will be used in the email.
@throws EDAMUserException <ul>
<li> LIMIT_REACHED "NoteEmailParameters.toAddresses" -
The email can't be sent because this would exceed the user's daily
email limit.
</li>
<li> BAD_DATA_FORMAT "(email address)" -
email address malformed
</li>
<li> DATA_REQUIRED "NoteEmailParameters.toAddresses" -
if there are no To: or Cc: addresses provided.
</li>
<li> DATA_REQUIRED "Note.title" -
if the caller provides a Note parameter with no title
</li>
<li> DATA_REQUIRED "Note.content" -
if the caller provides a Note parameter with no content
</li>
<li> ENML_VALIDATION "*" - note content doesn't validate against DTD
</li>
<li> DATA_REQUIRED "NoteEmailParameters.note" -
if no guid or note provided
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- parameters
"""
pass
def shareNote(self, authenticationToken, guid):
"""
If this note is not already shared (via its own direct URL), then this
will start sharing that note.
This will return the secret "Note Key" for this note that
can currently be used in conjunction with the Note's GUID to gain direct
read-only access to the Note.
If the note is already shared, then this won't make any changes to the
note, and the existing "Note Key" will be returned. The only way to change
the Note Key for an existing note is to stopSharingNote first, and then
call this function.
@param guid
The GUID of the note to be shared.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def stopSharingNote(self, authenticationToken, guid):
"""
If this note is not already shared then this will stop sharing that note
and invalidate its "Note Key", so any existing URLs to access that Note
will stop working.
If the Note is not shared, then this function will do nothing.
@param guid
The GUID of the note to be un-shared.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
pass
def authenticateToSharedNote(self, guid, noteKey, authenticationToken):
"""
Asks the service to produce an authentication token that can be used to
access the contents of a single Note which was individually shared
from someone's account.
This authenticationToken can be used with the various other NoteStore
calls to find and retrieve the Note and its directly-referenced children.
@param guid
The GUID identifying this Note on this shard.
@param noteKey
The 'noteKey' identifier from the Note that was originally created via
a call to shareNote() and then given to a recipient to access.
@param authenticationToken
An optional authenticationToken that identifies the user accessing the
shared note. This parameter may be required to access some shared notes.
@throws EDAMUserException <ul>
<li> PERMISSION_DENIED "Note" - the Note with that GUID is either not
shared, or the noteKey doesn't match the current key for this note
</li>
<li> PERMISSION_DENIED "authenticationToken" - an authentication token is
required to access this Note, but either no authentication token or a
"non-owner" authentication token was provided.
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "guid" - the note with that GUID is not found
</li>
</ul>
@throws EDAMSystemException <ul>
<li> TAKEN_DOWN "Note" - The specified shared note is taken down (for
all requesters).
</li>
<li> TAKEN_DOWN "Country" - The specified shared note is taken down
for the requester because of an IP-based country lookup.
</ul>
</ul>
Parameters:
- guid
- noteKey
- authenticationToken
"""
pass
def findRelated(self, authenticationToken, query, resultSpec):
"""
Identify related entities on the service, such as notes,
notebooks, and tags related to notes or content.
@param query
The information about which we are finding related entities.
@param resultSpec
Allows the client to indicate the type and quantity of
information to be returned, allowing a saving of time and
bandwidth.
@return
The result of the query, with information considered
to likely be relevantly related to the information
described by the query.
@throws EDAMUserException <ul>
<li>BAD_DATA_FORMAT "RelatedQuery.plainText" - If you provided a
a zero-length plain text value.
</li>
<li>BAD_DATA_FORMAT "RelatedQuery.noteGuid" - If you provided an
invalid Note GUID, that is, one that does not match the constraints
defined by EDAM_GUID_LEN_MIN, EDAM_GUID_LEN_MAX, EDAM_GUID_REGEX.
</li>
<li> BAD_DATA_FORMAT "NoteFilter.notebookGuid" - if malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.tagGuids" - if any are malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.words" - if search string too long
</li>
<li>PERMISSION_DENIED "Note" - If the caller does not have access to
the note identified by RelatedQuery.noteGuid.
</li>
<li>DATA_REQUIRED "RelatedResultSpec" - If you did not not set any values
in the result spec.
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li>"RelatedQuery.noteGuid" - the note with that GUID is not
found, if that field has been set in the query.
</li>
</ul>
Parameters:
- authenticationToken
- query
- resultSpec
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def getSyncState(self, authenticationToken):
"""
Asks the NoteStore to provide information about the status of the user
account corresponding to the provided authentication token.
Parameters:
- authenticationToken
"""
self.send_getSyncState(authenticationToken)
return self.recv_getSyncState()
def send_getSyncState(self, authenticationToken):
self._oprot.writeMessageBegin('getSyncState', TMessageType.CALL, self._seqid)
args = getSyncState_args()
args.authenticationToken = authenticationToken
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getSyncState(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getSyncState_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getSyncState failed: unknown result");
def getSyncStateWithMetrics(self, authenticationToken, clientMetrics):
"""
Asks the NoteStore to provide information about the status of the user
account corresponding to the provided authentication token.
This version of 'getSyncState' allows the client to upload coarse-
grained usage metrics to the service.
@param clientMetrics see the documentation of the ClientUsageMetrics
structure for an explanation of the fields that clients can pass to
the service.
Parameters:
- authenticationToken
- clientMetrics
"""
self.send_getSyncStateWithMetrics(authenticationToken, clientMetrics)
return self.recv_getSyncStateWithMetrics()
def send_getSyncStateWithMetrics(self, authenticationToken, clientMetrics):
self._oprot.writeMessageBegin('getSyncStateWithMetrics', TMessageType.CALL, self._seqid)
args = getSyncStateWithMetrics_args()
args.authenticationToken = authenticationToken
args.clientMetrics = clientMetrics
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getSyncStateWithMetrics(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getSyncStateWithMetrics_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getSyncStateWithMetrics failed: unknown result");
def getSyncChunk(self, authenticationToken, afterUSN, maxEntries, fullSyncOnly):
"""
DEPRECATED - use getFilteredSyncChunk.
Parameters:
- authenticationToken
- afterUSN
- maxEntries
- fullSyncOnly
"""
self.send_getSyncChunk(authenticationToken, afterUSN, maxEntries, fullSyncOnly)
return self.recv_getSyncChunk()
def send_getSyncChunk(self, authenticationToken, afterUSN, maxEntries, fullSyncOnly):
self._oprot.writeMessageBegin('getSyncChunk', TMessageType.CALL, self._seqid)
args = getSyncChunk_args()
args.authenticationToken = authenticationToken
args.afterUSN = afterUSN
args.maxEntries = maxEntries
args.fullSyncOnly = fullSyncOnly
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getSyncChunk(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getSyncChunk_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getSyncChunk failed: unknown result");
def getFilteredSyncChunk(self, authenticationToken, afterUSN, maxEntries, filter):
"""
Asks the NoteStore to provide the state of the account in order of
last modification. This request retrieves one block of the server's
state so that a client can make several small requests against a large
account rather than getting the entire state in one big message.
This call gives fine-grained control of the data that will
be received by a client by omitting data elements that a client doesn't
need. This may reduce network traffic and sync times.
@param afterUSN
The client can pass this value to ask only for objects that
have been updated after a certain point. This allows the client to
receive updates after its last checkpoint rather than doing a full
synchronization on every pass. The default value of "0" indicates
that the client wants to get objects from the start of the account.
@param maxEntries
The maximum number of modified objects that should be
returned in the result SyncChunk. This can be used to limit the size
of each individual message to be friendly for network transfer.
@param filter
The caller must set some of the flags in this structure to specify which
data types should be returned during the synchronization. See
the SyncChunkFilter structure for information on each flag.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "afterUSN" - if negative
</li>
<li> BAD_DATA_FORMAT "maxEntries" - if less than 1
</li>
</ul>
Parameters:
- authenticationToken
- afterUSN
- maxEntries
- filter
"""
self.send_getFilteredSyncChunk(authenticationToken, afterUSN, maxEntries, filter)
return self.recv_getFilteredSyncChunk()
def send_getFilteredSyncChunk(self, authenticationToken, afterUSN, maxEntries, filter):
self._oprot.writeMessageBegin('getFilteredSyncChunk', TMessageType.CALL, self._seqid)
args = getFilteredSyncChunk_args()
args.authenticationToken = authenticationToken
args.afterUSN = afterUSN
args.maxEntries = maxEntries
args.filter = filter
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getFilteredSyncChunk(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getFilteredSyncChunk_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getFilteredSyncChunk failed: unknown result");
def getLinkedNotebookSyncState(self, authenticationToken, linkedNotebook):
"""
Asks the NoteStore to provide information about the status of a linked
notebook that has been shared with the caller, or that is public to the
world.
This will return a result that is similar to getSyncState, but may omit
SyncState.uploaded if the caller doesn't have permission to write to
the linked notebook.
This function must be called on the shard that owns the referenced
notebook. (I.e. the shardId in /shard/shardId/edam/note must be the
same as LinkedNotebook.shardId.)
@param authenticationToken
This should be an authenticationToken for the guest who has received
the invitation to the share. (I.e. this should not be the result of
NoteStore.authenticateToSharedNotebook)
@param linkedNotebook
This structure should contain identifying information and permissions
to access the notebook in question.
Parameters:
- authenticationToken
- linkedNotebook
"""
self.send_getLinkedNotebookSyncState(authenticationToken, linkedNotebook)
return self.recv_getLinkedNotebookSyncState()
def send_getLinkedNotebookSyncState(self, authenticationToken, linkedNotebook):
self._oprot.writeMessageBegin('getLinkedNotebookSyncState', TMessageType.CALL, self._seqid)
args = getLinkedNotebookSyncState_args()
args.authenticationToken = authenticationToken
args.linkedNotebook = linkedNotebook
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getLinkedNotebookSyncState(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getLinkedNotebookSyncState_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getLinkedNotebookSyncState failed: unknown result");
def getLinkedNotebookSyncChunk(self, authenticationToken, linkedNotebook, afterUSN, maxEntries, fullSyncOnly):
"""
Asks the NoteStore to provide information about the contents of a linked
notebook that has been shared with the caller, or that is public to the
world.
This will return a result that is similar to getSyncChunk, but will only
contain entries that are visible to the caller. I.e. only that particular
Notebook will be visible, along with its Notes, and Tags on those Notes.
This function must be called on the shard that owns the referenced
notebook. (I.e. the shardId in /shard/shardId/edam/note must be the
same as LinkedNotebook.shardId.)
@param authenticationToken
This should be an authenticationToken for the guest who has received
the invitation to the share. (I.e. this should not be the result of
NoteStore.authenticateToSharedNotebook)
@param linkedNotebook
This structure should contain identifying information and permissions
to access the notebook in question. This must contain the valid fields
for either a shared notebook (e.g. shareKey)
or a public notebook (e.g. username, uri)
@param afterUSN
The client can pass this value to ask only for objects that
have been updated after a certain point. This allows the client to
receive updates after its last checkpoint rather than doing a full
synchronization on every pass. The default value of "0" indicates
that the client wants to get objects from the start of the account.
@param maxEntries
The maximum number of modified objects that should be
returned in the result SyncChunk. This can be used to limit the size
of each individual message to be friendly for network transfer.
Applications should not request more than 256 objects at a time,
and must handle the case where the service returns less than the
requested number of objects in a given request even though more
objects are available on the service.
@param fullSyncOnly
If true, then the client only wants initial data for a full sync.
In this case, the service will not return any expunged objects,
and will not return any Resources, since these are also provided
in their corresponding Notes.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "afterUSN" - if negative
</li>
<li> BAD_DATA_FORMAT "maxEntries" - if less than 1
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "LinkedNotebook" - if the provided information doesn't match any
valid notebook
</li>
<li> "LinkedNotebook.uri" - if the provided public URI doesn't match any
valid notebook
</li>
<li> "SharedNotebook.id" - if the provided information indicates a
shared notebook that no longer exists
</li>
</ul>
Parameters:
- authenticationToken
- linkedNotebook
- afterUSN
- maxEntries
- fullSyncOnly
"""
self.send_getLinkedNotebookSyncChunk(authenticationToken, linkedNotebook, afterUSN, maxEntries, fullSyncOnly)
return self.recv_getLinkedNotebookSyncChunk()
def send_getLinkedNotebookSyncChunk(self, authenticationToken, linkedNotebook, afterUSN, maxEntries, fullSyncOnly):
self._oprot.writeMessageBegin('getLinkedNotebookSyncChunk', TMessageType.CALL, self._seqid)
args = getLinkedNotebookSyncChunk_args()
args.authenticationToken = authenticationToken
args.linkedNotebook = linkedNotebook
args.afterUSN = afterUSN
args.maxEntries = maxEntries
args.fullSyncOnly = fullSyncOnly
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getLinkedNotebookSyncChunk(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getLinkedNotebookSyncChunk_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getLinkedNotebookSyncChunk failed: unknown result");
def listNotebooks(self, authenticationToken):
"""
Returns a list of all of the notebooks in the account.
Parameters:
- authenticationToken
"""
self.send_listNotebooks(authenticationToken)
return self.recv_listNotebooks()
def send_listNotebooks(self, authenticationToken):
self._oprot.writeMessageBegin('listNotebooks', TMessageType.CALL, self._seqid)
args = listNotebooks_args()
args.authenticationToken = authenticationToken
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_listNotebooks(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = listNotebooks_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "listNotebooks failed: unknown result");
def getNotebook(self, authenticationToken, guid):
"""
Returns the current state of the notebook with the provided GUID.
The notebook may be active or deleted (but not expunged).
@param guid
The GUID of the notebook to be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Notebook.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Notebook" - private notebook, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - tag not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_getNotebook(authenticationToken, guid)
return self.recv_getNotebook()
def send_getNotebook(self, authenticationToken, guid):
self._oprot.writeMessageBegin('getNotebook', TMessageType.CALL, self._seqid)
args = getNotebook_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getNotebook(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getNotebook_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getNotebook failed: unknown result");
def getDefaultNotebook(self, authenticationToken):
"""
Returns the notebook that should be used to store new notes in the
user's account when no other notebooks are specified.
Parameters:
- authenticationToken
"""
self.send_getDefaultNotebook(authenticationToken)
return self.recv_getDefaultNotebook()
def send_getDefaultNotebook(self, authenticationToken):
self._oprot.writeMessageBegin('getDefaultNotebook', TMessageType.CALL, self._seqid)
args = getDefaultNotebook_args()
args.authenticationToken = authenticationToken
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getDefaultNotebook(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getDefaultNotebook_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getDefaultNotebook failed: unknown result");
def createNotebook(self, authenticationToken, notebook):
"""
Asks the service to make a notebook with the provided name.
@param notebook
The desired fields for the notebook must be provided on this
object. The name of the notebook must be set, and either the 'active'
or 'defaultNotebook' fields may be set by the client at creation.
If a notebook exists in the account with the same name (via
case-insensitive compare), this will throw an EDAMUserException.
@return
The newly created Notebook. The server-side GUID will be
saved in this object's 'guid' field.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Notebook.name" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Notebook.stack" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Publishing.uri" - if publishing set but bad uri
</li>
<li> BAD_DATA_FORMAT "Publishing.publicDescription" - if too long
</li>
<li> DATA_CONFLICT "Notebook.name" - name already in use
</li>
<li> DATA_CONFLICT "Publishing.uri" - if URI already in use
</li>
<li> DATA_REQUIRED "Publishing.uri" - if publishing set but uri missing
</li>
<li> LIMIT_REACHED "Notebook" - at max number of notebooks
</li>
</ul>
Parameters:
- authenticationToken
- notebook
"""
self.send_createNotebook(authenticationToken, notebook)
return self.recv_createNotebook()
def send_createNotebook(self, authenticationToken, notebook):
self._oprot.writeMessageBegin('createNotebook', TMessageType.CALL, self._seqid)
args = createNotebook_args()
args.authenticationToken = authenticationToken
args.notebook = notebook
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_createNotebook(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = createNotebook_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "createNotebook failed: unknown result");
def updateNotebook(self, authenticationToken, notebook):
"""
Submits notebook changes to the service. The provided data must include
the notebook's guid field for identification.
@param notebook
The notebook object containing the requested changes.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Notebook.name" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Notebook.stack" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Publishing.uri" - if publishing set but bad uri
</li>
<li> BAD_DATA_FORMAT "Publishing.publicDescription" - if too long
</li>
<li> DATA_CONFLICT "Notebook.name" - name already in use
</li>
<li> DATA_CONFLICT "Publishing.uri" - if URI already in use
</li>
<li> DATA_REQUIRED "Publishing.uri" - if publishing set but uri missing
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- notebook
"""
self.send_updateNotebook(authenticationToken, notebook)
return self.recv_updateNotebook()
def send_updateNotebook(self, authenticationToken, notebook):
self._oprot.writeMessageBegin('updateNotebook', TMessageType.CALL, self._seqid)
args = updateNotebook_args()
args.authenticationToken = authenticationToken
args.notebook = notebook
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_updateNotebook(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = updateNotebook_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "updateNotebook failed: unknown result");
def expungeNotebook(self, authenticationToken, guid):
"""
Permanently removes the notebook from the user's account.
After this action, the notebook is no longer available for undeletion, etc.
If the notebook contains any Notes, they will be moved to the current
default notebook and moved into the trash (i.e. Note.active=false).
<p/>
NOTE: This function is generally not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param guid
The GUID of the notebook to delete.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Notebook.guid" - if the parameter is missing
</li>
<li> LIMIT_REACHED "Notebook" - trying to expunge the last Notebook
</li>
<li> PERMISSION_DENIED "Notebook" - private notebook, user doesn't own
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_expungeNotebook(authenticationToken, guid)
return self.recv_expungeNotebook()
def send_expungeNotebook(self, authenticationToken, guid):
self._oprot.writeMessageBegin('expungeNotebook', TMessageType.CALL, self._seqid)
args = expungeNotebook_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_expungeNotebook(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = expungeNotebook_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "expungeNotebook failed: unknown result");
def listTags(self, authenticationToken):
"""
Returns a list of the tags in the account. Evernote does not support
the undeletion of tags, so this will only include active tags.
Parameters:
- authenticationToken
"""
self.send_listTags(authenticationToken)
return self.recv_listTags()
def send_listTags(self, authenticationToken):
self._oprot.writeMessageBegin('listTags', TMessageType.CALL, self._seqid)
args = listTags_args()
args.authenticationToken = authenticationToken
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_listTags(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = listTags_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "listTags failed: unknown result");
def listTagsByNotebook(self, authenticationToken, notebookGuid):
"""
Returns a list of the tags that are applied to at least one note within
the provided notebook. If the notebook is public, the authenticationToken
may be ignored.
@param notebookGuid
the GUID of the notebook to use to find tags
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - notebook not found by GUID
</li>
</ul>
Parameters:
- authenticationToken
- notebookGuid
"""
self.send_listTagsByNotebook(authenticationToken, notebookGuid)
return self.recv_listTagsByNotebook()
def send_listTagsByNotebook(self, authenticationToken, notebookGuid):
self._oprot.writeMessageBegin('listTagsByNotebook', TMessageType.CALL, self._seqid)
args = listTagsByNotebook_args()
args.authenticationToken = authenticationToken
args.notebookGuid = notebookGuid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_listTagsByNotebook(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = listTagsByNotebook_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "listTagsByNotebook failed: unknown result");
def getTag(self, authenticationToken, guid):
"""
Returns the current state of the Tag with the provided GUID.
@param guid
The GUID of the tag to be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Tag.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Tag" - private Tag, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Tag.guid" - tag not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_getTag(authenticationToken, guid)
return self.recv_getTag()
def send_getTag(self, authenticationToken, guid):
self._oprot.writeMessageBegin('getTag', TMessageType.CALL, self._seqid)
args = getTag_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getTag(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getTag_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getTag failed: unknown result");
def createTag(self, authenticationToken, tag):
"""
Asks the service to make a tag with a set of information.
@param tag
The desired list of fields for the tag are specified in this
object. The caller must specify the tag name, and may provide
the parentGUID.
@return
The newly created Tag. The server-side GUID will be
saved in this object.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Tag.name" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Tag.parentGuid" - malformed GUID
</li>
<li> DATA_CONFLICT "Tag.name" - name already in use
</li>
<li> LIMIT_REACHED "Tag" - at max number of tags
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Tag.parentGuid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- tag
"""
self.send_createTag(authenticationToken, tag)
return self.recv_createTag()
def send_createTag(self, authenticationToken, tag):
self._oprot.writeMessageBegin('createTag', TMessageType.CALL, self._seqid)
args = createTag_args()
args.authenticationToken = authenticationToken
args.tag = tag
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_createTag(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = createTag_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "createTag failed: unknown result");
def updateTag(self, authenticationToken, tag):
"""
Submits tag changes to the service. The provided data must include
the tag's guid field for identification. The service will apply
updates to the following tag fields: name, parentGuid
@param tag
The tag object containing the requested changes.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Tag.name" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Tag.parentGuid" - malformed GUID
</li>
<li> DATA_CONFLICT "Tag.name" - name already in use
</li>
<li> DATA_CONFLICT "Tag.parentGuid" - can't set parent: circular
</li>
<li> PERMISSION_DENIED "Tag" - user doesn't own tag
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Tag.guid" - tag not found, by GUID
</li>
<li> "Tag.parentGuid" - parent not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- tag
"""
self.send_updateTag(authenticationToken, tag)
return self.recv_updateTag()
def send_updateTag(self, authenticationToken, tag):
self._oprot.writeMessageBegin('updateTag', TMessageType.CALL, self._seqid)
args = updateTag_args()
args.authenticationToken = authenticationToken
args.tag = tag
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_updateTag(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = updateTag_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "updateTag failed: unknown result");
def untagAll(self, authenticationToken, guid):
"""
Removes the provided tag from every note that is currently tagged with
this tag. If this operation is successful, the tag will still be in
the account, but it will not be tagged on any notes.
This function is not indended for use by full synchronizing clients, since
it does not provide enough result information to the client to reconcile
the local state without performing a follow-up sync from the service. This
is intended for "thin clients" that need to efficiently support this as
a UI operation.
@param guid
The GUID of the tag to remove from all notes.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Tag.guid" - if the guid parameter is missing
</li>
<li> PERMISSION_DENIED "Tag" - user doesn't own tag
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Tag.guid" - tag not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_untagAll(authenticationToken, guid)
self.recv_untagAll()
def send_untagAll(self, authenticationToken, guid):
self._oprot.writeMessageBegin('untagAll', TMessageType.CALL, self._seqid)
args = untagAll_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_untagAll(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = untagAll_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
return
def expungeTag(self, authenticationToken, guid):
"""
Permanently deletes the tag with the provided GUID, if present.
<p/>
NOTE: This function is generally not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param guid
The GUID of the tag to delete.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Tag.guid" - if the guid parameter is missing
</li>
<li> PERMISSION_DENIED "Tag" - user doesn't own tag
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Tag.guid" - tag not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_expungeTag(authenticationToken, guid)
return self.recv_expungeTag()
def send_expungeTag(self, authenticationToken, guid):
self._oprot.writeMessageBegin('expungeTag', TMessageType.CALL, self._seqid)
args = expungeTag_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_expungeTag(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = expungeTag_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "expungeTag failed: unknown result");
def listSearches(self, authenticationToken):
"""
Returns a list of the searches in the account. Evernote does not support
the undeletion of searches, so this will only include active searches.
Parameters:
- authenticationToken
"""
self.send_listSearches(authenticationToken)
return self.recv_listSearches()
def send_listSearches(self, authenticationToken):
self._oprot.writeMessageBegin('listSearches', TMessageType.CALL, self._seqid)
args = listSearches_args()
args.authenticationToken = authenticationToken
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_listSearches(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = listSearches_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "listSearches failed: unknown result");
def getSearch(self, authenticationToken, guid):
"""
Returns the current state of the search with the provided GUID.
@param guid
The GUID of the search to be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "SavedSearch.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "SavedSearch" - private Tag, user doesn't own
</li>
@throws EDAMNotFoundException <ul>
<li> "SavedSearch.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_getSearch(authenticationToken, guid)
return self.recv_getSearch()
def send_getSearch(self, authenticationToken, guid):
self._oprot.writeMessageBegin('getSearch', TMessageType.CALL, self._seqid)
args = getSearch_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getSearch(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getSearch_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getSearch failed: unknown result");
def createSearch(self, authenticationToken, search):
"""
Asks the service to make a saved search with a set of information.
@param search
The desired list of fields for the search are specified in this
object. The caller must specify the name and query for the
search, and may optionally specify a search scope.
The SavedSearch.format field is ignored by the service.
@return
The newly created SavedSearch. The server-side GUID will be
saved in this object.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "SavedSearch.name" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "SavedSearch.query" - invalid length
</li>
<li> DATA_CONFLICT "SavedSearch.name" - name already in use
</li>
<li> LIMIT_REACHED "SavedSearch" - at max number of searches
</li>
</ul>
Parameters:
- authenticationToken
- search
"""
self.send_createSearch(authenticationToken, search)
return self.recv_createSearch()
def send_createSearch(self, authenticationToken, search):
self._oprot.writeMessageBegin('createSearch', TMessageType.CALL, self._seqid)
args = createSearch_args()
args.authenticationToken = authenticationToken
args.search = search
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_createSearch(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = createSearch_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "createSearch failed: unknown result");
def updateSearch(self, authenticationToken, search):
"""
Submits search changes to the service. The provided data must include
the search's guid field for identification. The service will apply
updates to the following search fields: name, query, and scope.
@param search
The search object containing the requested changes.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "SavedSearch.name" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "SavedSearch.query" - invalid length
</li>
<li> DATA_CONFLICT "SavedSearch.name" - name already in use
</li>
<li> PERMISSION_DENIED "SavedSearch" - user doesn't own tag
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "SavedSearch.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- search
"""
self.send_updateSearch(authenticationToken, search)
return self.recv_updateSearch()
def send_updateSearch(self, authenticationToken, search):
self._oprot.writeMessageBegin('updateSearch', TMessageType.CALL, self._seqid)
args = updateSearch_args()
args.authenticationToken = authenticationToken
args.search = search
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_updateSearch(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = updateSearch_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "updateSearch failed: unknown result");
def expungeSearch(self, authenticationToken, guid):
"""
Permanently deletes the saved search with the provided GUID, if present.
<p/>
NOTE: This function is generally not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param guid
The GUID of the search to delete.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "SavedSearch.guid" - if the guid parameter is empty
</li>
<li> PERMISSION_DENIED "SavedSearch" - user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "SavedSearch.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_expungeSearch(authenticationToken, guid)
return self.recv_expungeSearch()
def send_expungeSearch(self, authenticationToken, guid):
self._oprot.writeMessageBegin('expungeSearch', TMessageType.CALL, self._seqid)
args = expungeSearch_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_expungeSearch(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = expungeSearch_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "expungeSearch failed: unknown result");
def findNotes(self, authenticationToken, filter, offset, maxNotes):
"""
DEPRECATED. Use findNotesMetadata.
Parameters:
- authenticationToken
- filter
- offset
- maxNotes
"""
self.send_findNotes(authenticationToken, filter, offset, maxNotes)
return self.recv_findNotes()
def send_findNotes(self, authenticationToken, filter, offset, maxNotes):
self._oprot.writeMessageBegin('findNotes', TMessageType.CALL, self._seqid)
args = findNotes_args()
args.authenticationToken = authenticationToken
args.filter = filter
args.offset = offset
args.maxNotes = maxNotes
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_findNotes(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = findNotes_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "findNotes failed: unknown result");
def findNoteOffset(self, authenticationToken, filter, guid):
"""
Finds the position of a note within a sorted subset of all of the user's
notes. This may be useful for thin clients that are displaying a paginated
listing of a large account, which need to know where a particular note
sits in the list without retrieving all notes first.
@param authenticationToken
Must be a valid token for the user's account unless the NoteFilter
'notebookGuid' is the GUID of a public notebook.
@param filter
The list of criteria that will constrain the notes to be returned.
@param guid
The GUID of the note to be retrieved.
@return
If the note with the provided GUID is found within the matching note
list, this will return the offset of that note within that list (where
the first offset is 0). If the note is not found within the set of
notes, this will return -1.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "offset" - not between 0 and EDAM_USER_NOTES_MAX
</li>
<li> BAD_DATA_FORMAT "maxNotes" - not between 0 and EDAM_USER_NOTES_MAX
</li>
<li> BAD_DATA_FORMAT "NoteFilter.notebookGuid" - if malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.tagGuids" - if any are malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.words" - if search string too long
</li>
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - not found, by GUID
</li>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- filter
- guid
"""
self.send_findNoteOffset(authenticationToken, filter, guid)
return self.recv_findNoteOffset()
def send_findNoteOffset(self, authenticationToken, filter, guid):
self._oprot.writeMessageBegin('findNoteOffset', TMessageType.CALL, self._seqid)
args = findNoteOffset_args()
args.authenticationToken = authenticationToken
args.filter = filter
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_findNoteOffset(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = findNoteOffset_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "findNoteOffset failed: unknown result");
def findNotesMetadata(self, authenticationToken, filter, offset, maxNotes, resultSpec):
"""
Used to find the high-level information about a set of the notes from a
user's account based on various criteria specified via a NoteFilter object.
<p/>
Web applications that wish to periodically check for new content in a user's
Evernote account should consider using webhooks instead of polling this API.
See http://dev.evernote.com/documentation/cloud/chapters/polling_notification.php
for more information.
@param authenticationToken
Must be a valid token for the user's account unless the NoteFilter
'notebookGuid' is the GUID of a public notebook.
@param filter
The list of criteria that will constrain the notes to be returned.
@param offset
The numeric index of the first note to show within the sorted
results. The numbering scheme starts with "0". This can be used for
pagination.
@param maxNotes
The mximum notes to return in this query. The service will return a set
of notes that is no larger than this number, but may return fewer notes
if needed. The NoteList.totalNotes field in the return value will
indicate whether there are more values available after the returned set.
@param resultSpec
This specifies which information should be returned for each matching
Note. The fields on this structure can be used to eliminate data that
the client doesn't need, which will reduce the time and bandwidth
to receive and process the reply.
@return
The list of notes that match the criteria.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "offset" - not between 0 and EDAM_USER_NOTES_MAX
</li>
<li> BAD_DATA_FORMAT "maxNotes" - not between 0 and EDAM_USER_NOTES_MAX
</li>
<li> BAD_DATA_FORMAT "NoteFilter.notebookGuid" - if malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.tagGuids" - if any are malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.words" - if search string too long
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- filter
- offset
- maxNotes
- resultSpec
"""
self.send_findNotesMetadata(authenticationToken, filter, offset, maxNotes, resultSpec)
return self.recv_findNotesMetadata()
def send_findNotesMetadata(self, authenticationToken, filter, offset, maxNotes, resultSpec):
self._oprot.writeMessageBegin('findNotesMetadata', TMessageType.CALL, self._seqid)
args = findNotesMetadata_args()
args.authenticationToken = authenticationToken
args.filter = filter
args.offset = offset
args.maxNotes = maxNotes
args.resultSpec = resultSpec
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_findNotesMetadata(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = findNotesMetadata_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "findNotesMetadata failed: unknown result");
def findNoteCounts(self, authenticationToken, filter, withTrash):
"""
This function is used to determine how many notes are found for each
notebook and tag in the user's account, given a current set of filter
parameters that determine the current selection. This function will
return a structure that gives the note count for each notebook and tag
that has at least one note under the requested filter. Any notebook or
tag that has zero notes in the filtered set will not be listed in the
reply to this function (so they can be assumed to be 0).
@param authenticationToken
Must be a valid token for the user's account unless the NoteFilter
'notebookGuid' is the GUID of a public notebook.
@param filter
The note selection filter that is currently being applied. The note
counts are to be calculated with this filter applied to the total set
of notes in the user's account.
@param withTrash
If true, then the NoteCollectionCounts.trashCount will be calculated
and supplied in the reply. Otherwise, the trash value will be omitted.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "NoteFilter.notebookGuid" - if malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.notebookGuids" - if any are malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.words" - if search string too long
</li>
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- filter
- withTrash
"""
self.send_findNoteCounts(authenticationToken, filter, withTrash)
return self.recv_findNoteCounts()
def send_findNoteCounts(self, authenticationToken, filter, withTrash):
self._oprot.writeMessageBegin('findNoteCounts', TMessageType.CALL, self._seqid)
args = findNoteCounts_args()
args.authenticationToken = authenticationToken
args.filter = filter
args.withTrash = withTrash
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_findNoteCounts(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = findNoteCounts_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "findNoteCounts failed: unknown result");
def getNote(self, authenticationToken, guid, withContent, withResourcesData, withResourcesRecognition, withResourcesAlternateData):
"""
Returns the current state of the note in the service with the provided
GUID. The ENML contents of the note will only be provided if the
'withContent' parameter is true. The service will include the meta-data
for each resource in the note, but the binary contents of the resources
and their recognition data will be omitted.
If the Note is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string). The applicationData
fields are returned as keysOnly.
@param guid
The GUID of the note to be retrieved.
@param withContent
If true, the note will include the ENML contents of its
'content' field.
@param withResourcesData
If true, any Resource elements in this Note will include the binary
contents of their 'data' field's body.
@param withResourcesRecognition
If true, any Resource elements will include the binary contents of the
'recognition' field's body if recognition data is present.
@param withResourcesAlternateData
If true, any Resource elements in this Note will include the binary
contents of their 'alternateData' fields' body, if an alternate form
is present.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
- withContent
- withResourcesData
- withResourcesRecognition
- withResourcesAlternateData
"""
self.send_getNote(authenticationToken, guid, withContent, withResourcesData, withResourcesRecognition, withResourcesAlternateData)
return self.recv_getNote()
def send_getNote(self, authenticationToken, guid, withContent, withResourcesData, withResourcesRecognition, withResourcesAlternateData):
self._oprot.writeMessageBegin('getNote', TMessageType.CALL, self._seqid)
args = getNote_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.withContent = withContent
args.withResourcesData = withResourcesData
args.withResourcesRecognition = withResourcesRecognition
args.withResourcesAlternateData = withResourcesAlternateData
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getNote(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getNote_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getNote failed: unknown result");
def getNoteApplicationData(self, authenticationToken, guid):
"""
Get all of the application data for the note identified by GUID,
with values returned within the LazyMap fullMap field.
If there are no applicationData entries, then a LazyMap
with an empty fullMap will be returned. If your application
only needs to fetch its own applicationData entry, use
getNoteApplicationDataEntry instead.
Parameters:
- authenticationToken
- guid
"""
self.send_getNoteApplicationData(authenticationToken, guid)
return self.recv_getNoteApplicationData()
def send_getNoteApplicationData(self, authenticationToken, guid):
self._oprot.writeMessageBegin('getNoteApplicationData', TMessageType.CALL, self._seqid)
args = getNoteApplicationData_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getNoteApplicationData(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getNoteApplicationData_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getNoteApplicationData failed: unknown result");
def getNoteApplicationDataEntry(self, authenticationToken, guid, key):
"""
Get the value of a single entry in the applicationData map
for the note identified by GUID.
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - note not found, by GUID</li>
<li> "NoteAttributes.applicationData.key" - note not found, by key</li>
</ul>
Parameters:
- authenticationToken
- guid
- key
"""
self.send_getNoteApplicationDataEntry(authenticationToken, guid, key)
return self.recv_getNoteApplicationDataEntry()
def send_getNoteApplicationDataEntry(self, authenticationToken, guid, key):
self._oprot.writeMessageBegin('getNoteApplicationDataEntry', TMessageType.CALL, self._seqid)
args = getNoteApplicationDataEntry_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.key = key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getNoteApplicationDataEntry(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getNoteApplicationDataEntry_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getNoteApplicationDataEntry failed: unknown result");
def setNoteApplicationDataEntry(self, authenticationToken, guid, key, value):
"""
Update, or create, an entry in the applicationData map for
the note identified by guid.
Parameters:
- authenticationToken
- guid
- key
- value
"""
self.send_setNoteApplicationDataEntry(authenticationToken, guid, key, value)
return self.recv_setNoteApplicationDataEntry()
def send_setNoteApplicationDataEntry(self, authenticationToken, guid, key, value):
self._oprot.writeMessageBegin('setNoteApplicationDataEntry', TMessageType.CALL, self._seqid)
args = setNoteApplicationDataEntry_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.key = key
args.value = value
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_setNoteApplicationDataEntry(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = setNoteApplicationDataEntry_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "setNoteApplicationDataEntry failed: unknown result");
def unsetNoteApplicationDataEntry(self, authenticationToken, guid, key):
"""
Remove an entry identified by 'key' from the applicationData map for
the note identified by 'guid'. Silently ignores an unset of a
non-existing key.
Parameters:
- authenticationToken
- guid
- key
"""
self.send_unsetNoteApplicationDataEntry(authenticationToken, guid, key)
return self.recv_unsetNoteApplicationDataEntry()
def send_unsetNoteApplicationDataEntry(self, authenticationToken, guid, key):
self._oprot.writeMessageBegin('unsetNoteApplicationDataEntry', TMessageType.CALL, self._seqid)
args = unsetNoteApplicationDataEntry_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.key = key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_unsetNoteApplicationDataEntry(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = unsetNoteApplicationDataEntry_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "unsetNoteApplicationDataEntry failed: unknown result");
def getNoteContent(self, authenticationToken, guid):
"""
Returns XHTML contents of the note with the provided GUID.
If the Note is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string).
@param guid
The GUID of the note to be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_getNoteContent(authenticationToken, guid)
return self.recv_getNoteContent()
def send_getNoteContent(self, authenticationToken, guid):
self._oprot.writeMessageBegin('getNoteContent', TMessageType.CALL, self._seqid)
args = getNoteContent_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getNoteContent(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getNoteContent_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getNoteContent failed: unknown result");
def getNoteSearchText(self, authenticationToken, guid, noteOnly, tokenizeForIndexing):
"""
Returns a block of the extracted plain text contents of the note with the
provided GUID. This text can be indexed for search purposes by a light
client that doesn't have capabilities to extract all of the searchable
text content from the note and its resources.
If the Note is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string).
@param guid
The GUID of the note to be retrieved.
@param noteOnly
If true, this will only return the text extracted from the ENML contents
of the note itself. If false, this will also include the extracted text
from any text-bearing resources (PDF, recognized images)
@param tokenizeForIndexing
If true, this will break the text into cleanly separated and sanitized
tokens. If false, this will return the more raw text extraction, with
its original punctuation, capitalization, spacing, etc.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
- noteOnly
- tokenizeForIndexing
"""
self.send_getNoteSearchText(authenticationToken, guid, noteOnly, tokenizeForIndexing)
return self.recv_getNoteSearchText()
def send_getNoteSearchText(self, authenticationToken, guid, noteOnly, tokenizeForIndexing):
self._oprot.writeMessageBegin('getNoteSearchText', TMessageType.CALL, self._seqid)
args = getNoteSearchText_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.noteOnly = noteOnly
args.tokenizeForIndexing = tokenizeForIndexing
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getNoteSearchText(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getNoteSearchText_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getNoteSearchText failed: unknown result");
def getResourceSearchText(self, authenticationToken, guid):
"""
Returns a block of the extracted plain text contents of the resource with
the provided GUID. This text can be indexed for search purposes by a light
client that doesn't have capability to extract all of the searchable
text content from a resource.
If the Resource is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string).
@param guid
The GUID of the resource to be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Resource.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_getResourceSearchText(authenticationToken, guid)
return self.recv_getResourceSearchText()
def send_getResourceSearchText(self, authenticationToken, guid):
self._oprot.writeMessageBegin('getResourceSearchText', TMessageType.CALL, self._seqid)
args = getResourceSearchText_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getResourceSearchText(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getResourceSearchText_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getResourceSearchText failed: unknown result");
def getNoteTagNames(self, authenticationToken, guid):
"""
Returns a list of the names of the tags for the note with the provided
guid. This can be used with authentication to get the tags for a
user's own note, or can be used without valid authentication to retrieve
the names of the tags for a note in a public notebook.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_getNoteTagNames(authenticationToken, guid)
return self.recv_getNoteTagNames()
def send_getNoteTagNames(self, authenticationToken, guid):
self._oprot.writeMessageBegin('getNoteTagNames', TMessageType.CALL, self._seqid)
args = getNoteTagNames_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getNoteTagNames(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getNoteTagNames_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getNoteTagNames failed: unknown result");
def createNote(self, authenticationToken, note):
"""
Asks the service to make a note with the provided set of information.
@param note
A Note object containing the desired fields to be populated on
the service.
@return
The newly created Note from the service. The server-side
GUIDs for the Note and any Resources will be saved in this object.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.title" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Note.content" - invalid length for ENML content
</li>
<li> BAD_DATA_FORMAT "Resource.mime" - invalid resource MIME type
</li>
<li> BAD_DATA_FORMAT "NoteAttributes.*" - bad resource string
</li>
<li> BAD_DATA_FORMAT "ResourceAttributes.*" - bad resource string
</li>
<li> DATA_CONFLICT "Note.deleted" - deleted time set on active note
</li>
<li> DATA_REQUIRED "Resource.data" - resource data body missing
</li>
<li> ENML_VALIDATION "*" - note content doesn't validate against DTD
</li>
<li> LIMIT_REACHED "Note" - at max number per account
</li>
<li> LIMIT_REACHED "Note.size" - total note size too large
</li>
<li> LIMIT_REACHED "Note.resources" - too many resources on Note
</li>
<li> LIMIT_REACHED "Note.tagGuids" - too many Tags on Note
</li>
<li> LIMIT_REACHED "Resource.data.size" - resource too large
</li>
<li> LIMIT_REACHED "NoteAttribute.*" - attribute string too long
</li>
<li> LIMIT_REACHED "ResourceAttribute.*" - attribute string too long
</li>
<li> PERMISSION_DENIED "Note.notebookGuid" - NB not owned by user
</li>
<li> QUOTA_REACHED "Accounting.uploadLimit" - note exceeds upload quota
</li>
<li> BAD_DATA_FORMAT "Tag.name" - Note.tagNames was provided, and one
of the specified tags had an invalid length or pattern
</li>
<li> LIMIT_REACHED "Tag" - Note.tagNames was provided, and the required
new tags would exceed the maximum number per account
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.notebookGuid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- note
"""
self.send_createNote(authenticationToken, note)
return self.recv_createNote()
def send_createNote(self, authenticationToken, note):
self._oprot.writeMessageBegin('createNote', TMessageType.CALL, self._seqid)
args = createNote_args()
args.authenticationToken = authenticationToken
args.note = note
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_createNote(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = createNote_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "createNote failed: unknown result");
def updateNote(self, authenticationToken, note):
"""
Submit a set of changes to a note to the service. The provided data
must include the note's guid field for identification. The note's
title must also be set.
@param note
A Note object containing the desired fields to be populated on
the service. With the exception of the note's title and guid, fields
that are not being changed do not need to be set. If the content is not
being modified, note.content should be left unset. If the list of
resources is not being modified, note.resources should be left unset.
@return
The metadata (no contents) for the Note on the server after the update
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.title" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "Note.content" - invalid length for ENML body
</li>
<li> BAD_DATA_FORMAT "NoteAttributes.*" - bad resource string
</li>
<li> BAD_DATA_FORMAT "ResourceAttributes.*" - bad resource string
</li>
<li> BAD_DATA_FORMAT "Resource.mime" - invalid resource MIME type
</li>
<li> DATA_CONFLICT "Note.deleted" - deleted time set on active note
</li>
<li> DATA_REQUIRED "Resource.data" - resource data body missing
</li>
<li> ENML_VALIDATION "*" - note content doesn't validate against DTD
</li>
<li> LIMIT_REACHED "Note.tagGuids" - too many Tags on Note
</li>
<li> LIMIT_REACHED "Note.resources" - too many resources on Note
</li>
<li> LIMIT_REACHED "Note.size" - total note size too large
</li>
<li> LIMIT_REACHED "Resource.data.size" - resource too large
</li>
<li> LIMIT_REACHED "NoteAttribute.*" - attribute string too long
</li>
<li> LIMIT_REACHED "ResourceAttribute.*" - attribute string too long
</li>
<li> PERMISSION_DENIED "Note" - user doesn't own
</li>
<li> PERMISSION_DENIED "Note.notebookGuid" - user doesn't own destination
</li>
<li> QUOTA_REACHED "Accounting.uploadLimit" - note exceeds upload quota
</li>
<li> BAD_DATA_FORMAT "Tag.name" - Note.tagNames was provided, and one
of the specified tags had an invalid length or pattern
</li>
<li> LIMIT_REACHED "Tag" - Note.tagNames was provided, and the required
new tags would exceed the maximum number per account
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - note not found, by GUID
</li>
<li> "Note.notebookGuid" - if notebookGuid provided, but not found
</li>
</ul>
Parameters:
- authenticationToken
- note
"""
self.send_updateNote(authenticationToken, note)
return self.recv_updateNote()
def send_updateNote(self, authenticationToken, note):
self._oprot.writeMessageBegin('updateNote', TMessageType.CALL, self._seqid)
args = updateNote_args()
args.authenticationToken = authenticationToken
args.note = note
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_updateNote(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = updateNote_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "updateNote failed: unknown result");
def deleteNote(self, authenticationToken, guid):
"""
Moves the note into the trash. The note may still be undeleted, unless it
is expunged. This is equivalent to calling updateNote() after setting
Note.active = false
@param guid
The GUID of the note to delete.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> PERMISSION_DENIED "Note" - user doesn't have permission to
update the note.
</li>
</ul>
@throws EDAMUserException <ul>
<li> DATA_CONFLICT "Note.guid" - the note is already deleted
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_deleteNote(authenticationToken, guid)
return self.recv_deleteNote()
def send_deleteNote(self, authenticationToken, guid):
self._oprot.writeMessageBegin('deleteNote', TMessageType.CALL, self._seqid)
args = deleteNote_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_deleteNote(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = deleteNote_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "deleteNote failed: unknown result");
def expungeNote(self, authenticationToken, guid):
"""
Permanently removes a Note, and all of its Resources,
from the service.
<p/>
NOTE: This function is not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param guid
The GUID of the note to delete.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> PERMISSION_DENIED "Note" - user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_expungeNote(authenticationToken, guid)
return self.recv_expungeNote()
def send_expungeNote(self, authenticationToken, guid):
self._oprot.writeMessageBegin('expungeNote', TMessageType.CALL, self._seqid)
args = expungeNote_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_expungeNote(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = expungeNote_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "expungeNote failed: unknown result");
def expungeNotes(self, authenticationToken, noteGuids):
"""
Permanently removes a list of Notes, and all of their Resources, from
the service. This should be invoked with a small number of Note GUIDs
(e.g. 100 or less) on each call. To expunge a larger number of notes,
call this method multiple times. This should also be used to reduce the
number of Notes in a notebook before calling expungeNotebook() or
in the trash before calling expungeInactiveNotes(), since these calls may
be prohibitively slow if there are more than a few hundred notes.
If an exception is thrown for any of the GUIDs, then none of the notes
will be deleted. I.e. this call can be treated as an atomic transaction.
<p/>
NOTE: This function is not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param noteGuids
The list of GUIDs for the Notes to remove.
@return
The account's updateCount at the end of this operation
@throws EDAMUserException <ul>
<li> PERMISSION_DENIED "Note" - user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- noteGuids
"""
self.send_expungeNotes(authenticationToken, noteGuids)
return self.recv_expungeNotes()
def send_expungeNotes(self, authenticationToken, noteGuids):
self._oprot.writeMessageBegin('expungeNotes', TMessageType.CALL, self._seqid)
args = expungeNotes_args()
args.authenticationToken = authenticationToken
args.noteGuids = noteGuids
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_expungeNotes(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = expungeNotes_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "expungeNotes failed: unknown result");
def expungeInactiveNotes(self, authenticationToken):
"""
Permanently removes all of the Notes that are currently marked as
inactive. This is equivalent to "emptying the trash", and these Notes
will be gone permanently.
<p/>
This operation may be relatively slow if the account contains a large
number of inactive Notes.
<p/>
NOTE: This function is not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@return
The number of notes that were expunged.
Parameters:
- authenticationToken
"""
self.send_expungeInactiveNotes(authenticationToken)
return self.recv_expungeInactiveNotes()
def send_expungeInactiveNotes(self, authenticationToken):
self._oprot.writeMessageBegin('expungeInactiveNotes', TMessageType.CALL, self._seqid)
args = expungeInactiveNotes_args()
args.authenticationToken = authenticationToken
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_expungeInactiveNotes(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = expungeInactiveNotes_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "expungeInactiveNotes failed: unknown result");
def copyNote(self, authenticationToken, noteGuid, toNotebookGuid):
"""
Performs a deep copy of the Note with the provided GUID 'noteGuid' into
the Notebook with the provided GUID 'toNotebookGuid'.
The caller must be the owner of both the Note and the Notebook.
This creates a new Note in the destination Notebook with new content and
Resources that match all of the content and Resources from the original
Note, but with new GUID identifiers.
The original Note is not modified by this operation.
The copied note is considered as an "upload" for the purpose of upload
transfer limit calculation, so its size is added to the upload count for
the owner.
@param noteGuid
The GUID of the Note to copy.
@param toNotebookGuid
The GUID of the Notebook that should receive the new Note.
@return
The metadata for the new Note that was created. This will include the
new GUID for this Note (and any copied Resources), but will not include
the content body or the binary bodies of any Resources.
@throws EDAMUserException <ul>
<li> LIMIT_REACHED "Note" - at max number per account
</li>
<li> PERMISSION_DENIED "Notebook.guid" - destination not owned by user
</li>
<li> PERMISSION_DENIED "Note" - user doesn't own
</li>
<li> QUOTA_REACHED "Accounting.uploadLimit" - note exceeds upload quota
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- noteGuid
- toNotebookGuid
"""
self.send_copyNote(authenticationToken, noteGuid, toNotebookGuid)
return self.recv_copyNote()
def send_copyNote(self, authenticationToken, noteGuid, toNotebookGuid):
self._oprot.writeMessageBegin('copyNote', TMessageType.CALL, self._seqid)
args = copyNote_args()
args.authenticationToken = authenticationToken
args.noteGuid = noteGuid
args.toNotebookGuid = toNotebookGuid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_copyNote(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = copyNote_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "copyNote failed: unknown result");
def listNoteVersions(self, authenticationToken, noteGuid):
"""
Returns a list of the prior versions of a particular note that are
saved within the service. These prior versions are stored to provide a
recovery from unintentional removal of content from a note. The identifiers
that are returned by this call can be used with getNoteVersion to retrieve
the previous note.
The identifiers will be listed from the most recent versions to the oldest.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- noteGuid
"""
self.send_listNoteVersions(authenticationToken, noteGuid)
return self.recv_listNoteVersions()
def send_listNoteVersions(self, authenticationToken, noteGuid):
self._oprot.writeMessageBegin('listNoteVersions', TMessageType.CALL, self._seqid)
args = listNoteVersions_args()
args.authenticationToken = authenticationToken
args.noteGuid = noteGuid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_listNoteVersions(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = listNoteVersions_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "listNoteVersions failed: unknown result");
def getNoteVersion(self, authenticationToken, noteGuid, updateSequenceNum, withResourcesData, withResourcesRecognition, withResourcesAlternateData):
"""
This can be used to retrieve a previous version of a Note after it has been
updated within the service. The caller must identify the note (via its
guid) and the version (via the updateSequenceNumber of that version).
to find a listing of the stored version USNs for a note, call
listNoteVersions.
This call is only available for notes in Premium accounts. (I.e. access
to past versions of Notes is a Premium-only feature.)
@param noteGuid
The GUID of the note to be retrieved.
@param updateSequenceNum
The USN of the version of the note that is being retrieved
@param withResourcesData
If true, any Resource elements in this Note will include the binary
contents of their 'data' field's body.
@param withResourcesRecognition
If true, any Resource elements will include the binary contents of the
'recognition' field's body if recognition data is present.
@param withResourcesAlternateData
If true, any Resource elements in this Note will include the binary
contents of their 'alternateData' fields' body, if an alternate form
is present.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
<li> PERMISSION_DENIED "updateSequenceNum" -
The account isn't permitted to access previous versions of notes.
(i.e. this is a Free account.)
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
<li> "Note.updateSequenceNumber" - the Note doesn't have a version with
the corresponding USN.
</li>
</ul>
Parameters:
- authenticationToken
- noteGuid
- updateSequenceNum
- withResourcesData
- withResourcesRecognition
- withResourcesAlternateData
"""
self.send_getNoteVersion(authenticationToken, noteGuid, updateSequenceNum, withResourcesData, withResourcesRecognition, withResourcesAlternateData)
return self.recv_getNoteVersion()
def send_getNoteVersion(self, authenticationToken, noteGuid, updateSequenceNum, withResourcesData, withResourcesRecognition, withResourcesAlternateData):
self._oprot.writeMessageBegin('getNoteVersion', TMessageType.CALL, self._seqid)
args = getNoteVersion_args()
args.authenticationToken = authenticationToken
args.noteGuid = noteGuid
args.updateSequenceNum = updateSequenceNum
args.withResourcesData = withResourcesData
args.withResourcesRecognition = withResourcesRecognition
args.withResourcesAlternateData = withResourcesAlternateData
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getNoteVersion(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getNoteVersion_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getNoteVersion failed: unknown result");
def getResource(self, authenticationToken, guid, withData, withRecognition, withAttributes, withAlternateData):
"""
Returns the current state of the resource in the service with the
provided GUID.
If the Resource is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string). Only the
keys for the applicationData will be returned.
@param guid
The GUID of the resource to be retrieved.
@param withData
If true, the Resource will include the binary contents of the
'data' field's body.
@param withRecognition
If true, the Resource will include the binary contents of the
'recognition' field's body if recognition data is present.
@param withAttributes
If true, the Resource will include the attributes
@param withAlternateData
If true, the Resource will include the binary contents of the
'alternateData' field's body, if an alternate form is present.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Resource.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
- withData
- withRecognition
- withAttributes
- withAlternateData
"""
self.send_getResource(authenticationToken, guid, withData, withRecognition, withAttributes, withAlternateData)
return self.recv_getResource()
def send_getResource(self, authenticationToken, guid, withData, withRecognition, withAttributes, withAlternateData):
self._oprot.writeMessageBegin('getResource', TMessageType.CALL, self._seqid)
args = getResource_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.withData = withData
args.withRecognition = withRecognition
args.withAttributes = withAttributes
args.withAlternateData = withAlternateData
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getResource(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getResource_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getResource failed: unknown result");
def getResourceApplicationData(self, authenticationToken, guid):
"""
Get all of the application data for the Resource identified by GUID,
with values returned within the LazyMap fullMap field.
If there are no applicationData entries, then a LazyMap
with an empty fullMap will be returned. If your application
only needs to fetch its own applicationData entry, use
getResourceApplicationDataEntry instead.
Parameters:
- authenticationToken
- guid
"""
self.send_getResourceApplicationData(authenticationToken, guid)
return self.recv_getResourceApplicationData()
def send_getResourceApplicationData(self, authenticationToken, guid):
self._oprot.writeMessageBegin('getResourceApplicationData', TMessageType.CALL, self._seqid)
args = getResourceApplicationData_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getResourceApplicationData(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getResourceApplicationData_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getResourceApplicationData failed: unknown result");
def getResourceApplicationDataEntry(self, authenticationToken, guid, key):
"""
Get the value of a single entry in the applicationData map
for the Resource identified by GUID.
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - Resource not found, by GUID</li>
<li> "ResourceAttributes.applicationData.key" - Resource not found, by key</li>
</ul>
Parameters:
- authenticationToken
- guid
- key
"""
self.send_getResourceApplicationDataEntry(authenticationToken, guid, key)
return self.recv_getResourceApplicationDataEntry()
def send_getResourceApplicationDataEntry(self, authenticationToken, guid, key):
self._oprot.writeMessageBegin('getResourceApplicationDataEntry', TMessageType.CALL, self._seqid)
args = getResourceApplicationDataEntry_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.key = key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getResourceApplicationDataEntry(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getResourceApplicationDataEntry_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getResourceApplicationDataEntry failed: unknown result");
def setResourceApplicationDataEntry(self, authenticationToken, guid, key, value):
"""
Update, or create, an entry in the applicationData map for
the Resource identified by guid.
Parameters:
- authenticationToken
- guid
- key
- value
"""
self.send_setResourceApplicationDataEntry(authenticationToken, guid, key, value)
return self.recv_setResourceApplicationDataEntry()
def send_setResourceApplicationDataEntry(self, authenticationToken, guid, key, value):
self._oprot.writeMessageBegin('setResourceApplicationDataEntry', TMessageType.CALL, self._seqid)
args = setResourceApplicationDataEntry_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.key = key
args.value = value
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_setResourceApplicationDataEntry(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = setResourceApplicationDataEntry_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "setResourceApplicationDataEntry failed: unknown result");
def unsetResourceApplicationDataEntry(self, authenticationToken, guid, key):
"""
Remove an entry identified by 'key' from the applicationData map for
the Resource identified by 'guid'.
Parameters:
- authenticationToken
- guid
- key
"""
self.send_unsetResourceApplicationDataEntry(authenticationToken, guid, key)
return self.recv_unsetResourceApplicationDataEntry()
def send_unsetResourceApplicationDataEntry(self, authenticationToken, guid, key):
self._oprot.writeMessageBegin('unsetResourceApplicationDataEntry', TMessageType.CALL, self._seqid)
args = unsetResourceApplicationDataEntry_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.key = key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_unsetResourceApplicationDataEntry(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = unsetResourceApplicationDataEntry_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "unsetResourceApplicationDataEntry failed: unknown result");
def updateResource(self, authenticationToken, resource):
"""
Submit a set of changes to a resource to the service. This can be used
to update the meta-data about the resource, but cannot be used to change
the binary contents of the resource (including the length and hash). These
cannot be changed directly without creating a new resource and removing the
old one via updateNote.
@param resource
A Resource object containing the desired fields to be populated on
the service. The service will attempt to update the resource with the
following fields from the client:
<ul>
<li>guid: must be provided to identify the resource
</li>
<li>mime
</li>
<li>width
</li>
<li>height
</li>
<li>duration
</li>
<li>attributes: optional. if present, the set of attributes will
be replaced.
</li>
</ul>
@return
The Update Sequence Number of the resource after the changes have been
applied.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Resource.guid" - if the parameter is missing
</li>
<li> BAD_DATA_FORMAT "Resource.mime" - invalid resource MIME type
</li>
<li> BAD_DATA_FORMAT "ResourceAttributes.*" - bad resource string
</li>
<li> LIMIT_REACHED "ResourceAttribute.*" - attribute string too long
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- resource
"""
self.send_updateResource(authenticationToken, resource)
return self.recv_updateResource()
def send_updateResource(self, authenticationToken, resource):
self._oprot.writeMessageBegin('updateResource', TMessageType.CALL, self._seqid)
args = updateResource_args()
args.authenticationToken = authenticationToken
args.resource = resource
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_updateResource(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = updateResource_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "updateResource failed: unknown result");
def getResourceData(self, authenticationToken, guid):
"""
Returns binary data of the resource with the provided GUID. For
example, if this were an image resource, this would contain the
raw bits of the image.
If the Resource is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string).
@param guid
The GUID of the resource to be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Resource.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_getResourceData(authenticationToken, guid)
return self.recv_getResourceData()
def send_getResourceData(self, authenticationToken, guid):
self._oprot.writeMessageBegin('getResourceData', TMessageType.CALL, self._seqid)
args = getResourceData_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getResourceData(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getResourceData_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getResourceData failed: unknown result");
def getResourceByHash(self, authenticationToken, noteGuid, contentHash, withData, withRecognition, withAlternateData):
"""
Returns the current state of a resource, referenced by containing
note GUID and resource content hash.
@param noteGuid
The GUID of the note that holds the resource to be retrieved.
@param contentHash
The MD5 checksum of the resource within that note. Note that
this is the binary checksum, for example from Resource.data.bodyHash,
and not the hex-encoded checksum that is used within an en-media
tag in a note body.
@param withData
If true, the Resource will include the binary contents of the
'data' field's body.
@param withRecognition
If true, the Resource will include the binary contents of the
'recognition' field's body.
@param withAlternateData
If true, the Resource will include the binary contents of the
'alternateData' field's body, if an alternate form is present.
@throws EDAMUserException <ul>
<li> DATA_REQUIRED "Note.guid" - noteGuid param missing
</li>
<li> DATA_REQUIRED "Note.contentHash" - contentHash param missing
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note" - not found, by guid
</li>
<li> "Resource" - not found, by hash
</li>
</ul>
Parameters:
- authenticationToken
- noteGuid
- contentHash
- withData
- withRecognition
- withAlternateData
"""
self.send_getResourceByHash(authenticationToken, noteGuid, contentHash, withData, withRecognition, withAlternateData)
return self.recv_getResourceByHash()
def send_getResourceByHash(self, authenticationToken, noteGuid, contentHash, withData, withRecognition, withAlternateData):
self._oprot.writeMessageBegin('getResourceByHash', TMessageType.CALL, self._seqid)
args = getResourceByHash_args()
args.authenticationToken = authenticationToken
args.noteGuid = noteGuid
args.contentHash = contentHash
args.withData = withData
args.withRecognition = withRecognition
args.withAlternateData = withAlternateData
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getResourceByHash(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getResourceByHash_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getResourceByHash failed: unknown result");
def getResourceRecognition(self, authenticationToken, guid):
"""
Returns the binary contents of the recognition index for the resource
with the provided GUID. If the caller asks about a resource that has
no recognition data, this will throw EDAMNotFoundException.
If the Resource is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string).
@param guid
The GUID of the resource whose recognition data should be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Resource.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - not found, by GUID
</li>
<li> "Resource.recognition" - resource has no recognition
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_getResourceRecognition(authenticationToken, guid)
return self.recv_getResourceRecognition()
def send_getResourceRecognition(self, authenticationToken, guid):
self._oprot.writeMessageBegin('getResourceRecognition', TMessageType.CALL, self._seqid)
args = getResourceRecognition_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getResourceRecognition(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getResourceRecognition_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getResourceRecognition failed: unknown result");
def getResourceAlternateData(self, authenticationToken, guid):
"""
If the Resource with the provided GUID has an alternate data representation
(indicated via the Resource.alternateData field), then this request can
be used to retrieve the binary contents of that alternate data file.
If the caller asks about a resource that has no alternate data form, this
will throw EDAMNotFoundException.
@param guid
The GUID of the resource whose recognition data should be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Resource.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - not found, by GUID
</li>
<li> "Resource.alternateData" - resource has no recognition
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_getResourceAlternateData(authenticationToken, guid)
return self.recv_getResourceAlternateData()
def send_getResourceAlternateData(self, authenticationToken, guid):
self._oprot.writeMessageBegin('getResourceAlternateData', TMessageType.CALL, self._seqid)
args = getResourceAlternateData_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getResourceAlternateData(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getResourceAlternateData_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getResourceAlternateData failed: unknown result");
def getResourceAttributes(self, authenticationToken, guid):
"""
Returns the set of attributes for the Resource with the provided GUID.
If the Resource is found in a public notebook, the authenticationToken
will be ignored (so it could be an empty string).
@param guid
The GUID of the resource whose attributes should be retrieved.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Resource.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Resource" - private resource, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Resource.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_getResourceAttributes(authenticationToken, guid)
return self.recv_getResourceAttributes()
def send_getResourceAttributes(self, authenticationToken, guid):
self._oprot.writeMessageBegin('getResourceAttributes', TMessageType.CALL, self._seqid)
args = getResourceAttributes_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getResourceAttributes(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getResourceAttributes_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getResourceAttributes failed: unknown result");
def getPublicNotebook(self, userId, publicUri):
"""
<p>
Looks for a user account with the provided userId on this NoteStore
shard and determines whether that account contains a public notebook
with the given URI. If the account is not found, or no public notebook
exists with this URI, this will throw an EDAMNotFoundException,
otherwise this will return the information for that Notebook.
</p>
<p>
If a notebook is visible on the web with a full URL like
http://www.evernote.com/pub/sethdemo/api
Then 'sethdemo' is the username that can be used to look up the userId,
and 'api' is the publicUri.
</p>
@param userId
The numeric identifier for the user who owns the public notebook.
To find this value based on a username string, you can invoke
UserStore.getPublicUserInfo
@param publicUri
The uri string for the public notebook, from Notebook.publishing.uri.
@throws EDAMNotFoundException <ul>
<li>"Publishing.uri" - not found, by URI</li>
</ul>
@throws EDAMSystemException <ul>
<li> TAKEN_DOWN "PublicNotebook" - The specified public notebook is
taken down (for all requesters).</li>
<li> TAKEN_DOWN "Country" - The specified public notebook is taken
down for the requester because of an IP-based country lookup.</li>
</ul>
Parameters:
- userId
- publicUri
"""
self.send_getPublicNotebook(userId, publicUri)
return self.recv_getPublicNotebook()
def send_getPublicNotebook(self, userId, publicUri):
self._oprot.writeMessageBegin('getPublicNotebook', TMessageType.CALL, self._seqid)
args = getPublicNotebook_args()
args.userId = userId
args.publicUri = publicUri
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getPublicNotebook(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getPublicNotebook_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getPublicNotebook failed: unknown result");
def createSharedNotebook(self, authenticationToken, sharedNotebook):
"""
Used to construct a shared notebook object. The constructed notebook will
contain a "share key" which serve as a unique identifer and access token
for a user to access the notebook of the shared notebook owner.
@param sharedNotebook
A shared notebook object populated with the email address of the share
recipient, the notebook guid and the access permissions. All other
attributes of the shared object are ignored. The SharedNotebook.allowPreview
field must be explicitly set with either a true or false value.
@return
The fully populated SharedNotebook object including the server assigned
share id and shareKey which can both be used to uniquely identify the
SharedNotebook.
@throws EDAMUserException <ul>
<li>BAD_DATA_FORMAT "SharedNotebook.email" - if the email was not valid</li>
<li>BAD_DATA_FORMAT "requireLogin" - if the SharedNotebook.allowPreview field was
not set, and the SharedNotebook.requireLogin was also not set or was set to
false.</li>
<li>PERMISSION_DENIED "SharedNotebook.recipientSettings" - if
recipientSettings is set in the sharedNotebook. Only the recipient
can set these values via the setSharedNotebookRecipientSettings
method.
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li>Notebook.guid - if the notebookGuid is not a valid GUID for the user.
</li>
</ul>
Parameters:
- authenticationToken
- sharedNotebook
"""
self.send_createSharedNotebook(authenticationToken, sharedNotebook)
return self.recv_createSharedNotebook()
def send_createSharedNotebook(self, authenticationToken, sharedNotebook):
self._oprot.writeMessageBegin('createSharedNotebook', TMessageType.CALL, self._seqid)
args = createSharedNotebook_args()
args.authenticationToken = authenticationToken
args.sharedNotebook = sharedNotebook
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_createSharedNotebook(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = createSharedNotebook_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "createSharedNotebook failed: unknown result");
def updateSharedNotebook(self, authenticationToken, sharedNotebook):
"""
Update a SharedNotebook object.
@param authenticationToken
Must be an authentication token from the owner or a shared notebook
authentication token or business authentication token with sufficient
permissions to change invitations for a notebook.
@param sharedNotebook
The SharedNotebook object containing the requested changes.
The "id" of the shared notebook must be set to allow the service
to identify the SharedNotebook to be updated. In addition, you MUST set
the email, permission, and allowPreview fields to the desired values.
All other fields will be ignored if set.
@return
The Update Serial Number for this change within the account.
@throws EDAMUserException <ul>
<li>UNSUPPORTED_OPERATION "updateSharedNotebook" - if this service instance does not support shared notebooks.</li>
<li>BAD_DATA_FORMAT "SharedNotebook.email" - if the email was not valid.</li>
<li>DATA_REQUIRED "SharedNotebook.id" - if the id field was not set.</li>
<li>DATA_REQUIRED "SharedNotebook.privilege" - if the privilege field was not set.</li>
<li>DATA_REQUIRED "SharedNotebook.allowPreview" - if the allowPreview field was not set.</li>
</ul>
@throws EDAMNotFoundException <ul>
<li>SharedNotebook.id - if no shared notebook with the specified ID was found.
</ul>
Parameters:
- authenticationToken
- sharedNotebook
"""
self.send_updateSharedNotebook(authenticationToken, sharedNotebook)
return self.recv_updateSharedNotebook()
def send_updateSharedNotebook(self, authenticationToken, sharedNotebook):
self._oprot.writeMessageBegin('updateSharedNotebook', TMessageType.CALL, self._seqid)
args = updateSharedNotebook_args()
args.authenticationToken = authenticationToken
args.sharedNotebook = sharedNotebook
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_updateSharedNotebook(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = updateSharedNotebook_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "updateSharedNotebook failed: unknown result");
def setSharedNotebookRecipientSettings(self, authenticationToken, sharedNotebookId, recipientSettings):
"""
Set values for the recipient settings associated with a shared notebook. Having
update rights to the shared notebook record itself has no effect on this call;
only the recipient of the shared notebook can can the recipient settings.
If you do <i>not</i> wish to, or cannot, change one of the reminderNotifyEmail or
reminderNotifyInApp fields, you must leave that field unset in recipientSettings.
This method will skip that field for updates and leave the existing state as
it is.
@return The update sequence number of the account to which the shared notebook
belongs, which is the account from which we are sharing a notebook.
@throws EDAMNotFoundException "sharedNotebookId" - Thrown if the service does not
have a shared notebook record for the sharedNotebookId on the given shard. If you
receive this exception, it is probable that the shared notebook record has
been revoked or expired, or that you accessed the wrong shard.
@throws EDAMUserException <ul>
<li>PEMISSION_DENIED "authenticationToken" - If you do not have permission to set
the recipient settings for the shared notebook. Only the recipient has
permission to do this.
<li>DATA_CONFLICT "recipientSettings.reminderNotifyEmail" - Setting whether
or not you want to receive reminder e-mail notifications is possible on
a business notebook in the business to which the user belongs. All
others can safely unset the reminderNotifyEmail field from the
recipientSettings parameter.
</ul>
Parameters:
- authenticationToken
- sharedNotebookId
- recipientSettings
"""
self.send_setSharedNotebookRecipientSettings(authenticationToken, sharedNotebookId, recipientSettings)
return self.recv_setSharedNotebookRecipientSettings()
def send_setSharedNotebookRecipientSettings(self, authenticationToken, sharedNotebookId, recipientSettings):
self._oprot.writeMessageBegin('setSharedNotebookRecipientSettings', TMessageType.CALL, self._seqid)
args = setSharedNotebookRecipientSettings_args()
args.authenticationToken = authenticationToken
args.sharedNotebookId = sharedNotebookId
args.recipientSettings = recipientSettings
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_setSharedNotebookRecipientSettings(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = setSharedNotebookRecipientSettings_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "setSharedNotebookRecipientSettings failed: unknown result");
def sendMessageToSharedNotebookMembers(self, authenticationToken, notebookGuid, messageText, recipients):
"""
Send a reminder message to some or all of the email addresses that a notebook has been
shared with. The message includes the current link to view the notebook.
@param authenticationToken
The auth token of the user with permissions to share the notebook
@param notebookGuid
The guid of the shared notebook
@param messageText
User provided text to include in the email
@param recipients
The email addresses of the recipients. If this list is empty then all of the
users that the notebook has been shared with are emailed.
If an email address doesn't correspond to share invite members then that address
is ignored.
@return
The number of messages sent
@throws EDAMUserException <ul>
<li> LIMIT_REACHED "(recipients)" -
The email can't be sent because this would exceed the user's daily
email limit.
</li>
<li> PERMISSION_DENIED "Notebook.guid" - The user doesn't have permission to
send a message for the specified notebook.
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Notebook.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- notebookGuid
- messageText
- recipients
"""
self.send_sendMessageToSharedNotebookMembers(authenticationToken, notebookGuid, messageText, recipients)
return self.recv_sendMessageToSharedNotebookMembers()
def send_sendMessageToSharedNotebookMembers(self, authenticationToken, notebookGuid, messageText, recipients):
self._oprot.writeMessageBegin('sendMessageToSharedNotebookMembers', TMessageType.CALL, self._seqid)
args = sendMessageToSharedNotebookMembers_args()
args.authenticationToken = authenticationToken
args.notebookGuid = notebookGuid
args.messageText = messageText
args.recipients = recipients
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_sendMessageToSharedNotebookMembers(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = sendMessageToSharedNotebookMembers_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "sendMessageToSharedNotebookMembers failed: unknown result");
def listSharedNotebooks(self, authenticationToken):
"""
Lists the collection of shared notebooks for all notebooks in the
users account.
@return
The list of all SharedNotebooks for the user
Parameters:
- authenticationToken
"""
self.send_listSharedNotebooks(authenticationToken)
return self.recv_listSharedNotebooks()
def send_listSharedNotebooks(self, authenticationToken):
self._oprot.writeMessageBegin('listSharedNotebooks', TMessageType.CALL, self._seqid)
args = listSharedNotebooks_args()
args.authenticationToken = authenticationToken
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_listSharedNotebooks(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = listSharedNotebooks_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "listSharedNotebooks failed: unknown result");
def expungeSharedNotebooks(self, authenticationToken, sharedNotebookIds):
"""
Expunges the SharedNotebooks in the user's account using the
SharedNotebook.id as the identifier.
<p/>
NOTE: This function is generally not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param
sharedNotebookIds - a list of ShardNotebook.id longs identifying the
objects to delete permanently.
@return
The account's update sequence number.
Parameters:
- authenticationToken
- sharedNotebookIds
"""
self.send_expungeSharedNotebooks(authenticationToken, sharedNotebookIds)
return self.recv_expungeSharedNotebooks()
def send_expungeSharedNotebooks(self, authenticationToken, sharedNotebookIds):
self._oprot.writeMessageBegin('expungeSharedNotebooks', TMessageType.CALL, self._seqid)
args = expungeSharedNotebooks_args()
args.authenticationToken = authenticationToken
args.sharedNotebookIds = sharedNotebookIds
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_expungeSharedNotebooks(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = expungeSharedNotebooks_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "expungeSharedNotebooks failed: unknown result");
def createLinkedNotebook(self, authenticationToken, linkedNotebook):
"""
Asks the service to make a linked notebook with the provided name, username
of the owner and identifiers provided. A linked notebook can be either a
link to a public notebook or to a private shared notebook.
@param linkedNotebook
The desired fields for the linked notebook must be provided on this
object. The name of the linked notebook must be set. Either a username
uri or a shard id and share key must be provided otherwise a
EDAMUserException is thrown.
@return
The newly created LinkedNotebook. The server-side id will be
saved in this object's 'id' field.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "LinkedNotebook.name" - invalid length or pattern
</li>
<li> BAD_DATA_FORMAT "LinkedNotebook.username" - bad username format
</li>
<li> BAD_DATA_FORMAT "LinkedNotebook.uri" -
if public notebook set but bad uri
</li>
<li> BAD_DATA_FORMAT "LinkedNotebook.shareKey" -
if private notebook set but bad shareKey
</li>
<li> DATA_REQUIRED "LinkedNotebook.shardId" -
if private notebook but shard id not provided
</li>
</ul>
Parameters:
- authenticationToken
- linkedNotebook
"""
self.send_createLinkedNotebook(authenticationToken, linkedNotebook)
return self.recv_createLinkedNotebook()
def send_createLinkedNotebook(self, authenticationToken, linkedNotebook):
self._oprot.writeMessageBegin('createLinkedNotebook', TMessageType.CALL, self._seqid)
args = createLinkedNotebook_args()
args.authenticationToken = authenticationToken
args.linkedNotebook = linkedNotebook
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_createLinkedNotebook(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = createLinkedNotebook_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "createLinkedNotebook failed: unknown result");
def updateLinkedNotebook(self, authenticationToken, linkedNotebook):
"""
@param linkedNotebook
Updates the name of a linked notebook.
@return
The Update Sequence Number for this change within the account.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "LinkedNotebook.name" - invalid length or pattern
</li>
</ul>
Parameters:
- authenticationToken
- linkedNotebook
"""
self.send_updateLinkedNotebook(authenticationToken, linkedNotebook)
return self.recv_updateLinkedNotebook()
def send_updateLinkedNotebook(self, authenticationToken, linkedNotebook):
self._oprot.writeMessageBegin('updateLinkedNotebook', TMessageType.CALL, self._seqid)
args = updateLinkedNotebook_args()
args.authenticationToken = authenticationToken
args.linkedNotebook = linkedNotebook
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_updateLinkedNotebook(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = updateLinkedNotebook_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "updateLinkedNotebook failed: unknown result");
def listLinkedNotebooks(self, authenticationToken):
"""
Returns a list of linked notebooks
Parameters:
- authenticationToken
"""
self.send_listLinkedNotebooks(authenticationToken)
return self.recv_listLinkedNotebooks()
def send_listLinkedNotebooks(self, authenticationToken):
self._oprot.writeMessageBegin('listLinkedNotebooks', TMessageType.CALL, self._seqid)
args = listLinkedNotebooks_args()
args.authenticationToken = authenticationToken
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_listLinkedNotebooks(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = listLinkedNotebooks_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "listLinkedNotebooks failed: unknown result");
def expungeLinkedNotebook(self, authenticationToken, guid):
"""
Permanently expunges the linked notebook from the account.
<p/>
NOTE: This function is generally not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param guid
The LinkedNotebook.guid field of the LinkedNotebook to permanently remove
from the account.
Parameters:
- authenticationToken
- guid
"""
self.send_expungeLinkedNotebook(authenticationToken, guid)
return self.recv_expungeLinkedNotebook()
def send_expungeLinkedNotebook(self, authenticationToken, guid):
self._oprot.writeMessageBegin('expungeLinkedNotebook', TMessageType.CALL, self._seqid)
args = expungeLinkedNotebook_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_expungeLinkedNotebook(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = expungeLinkedNotebook_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "expungeLinkedNotebook failed: unknown result");
def authenticateToSharedNotebook(self, shareKey, authenticationToken):
"""
Asks the service to produce an authentication token that can be used to
access the contents of a shared notebook from someone else's account.
This authenticationToken can be used with the various other NoteStore
calls to find and retrieve notes, and if the permissions in the shared
notebook are sufficient, to make changes to the contents of the notebook.
@param shareKey
The 'shareKey' identifier from the SharedNotebook that was granted to
some recipient. This string internally encodes the notebook identifier
and a security signature.
@param authenticationToken
If a non-empty string is provided, this is the full user-based
authentication token that identifies the user who is currently logged in
and trying to access the shared notebook. This may be required if the
notebook was created with 'requireLogin'.
If this string is empty, the service will attempt to authenticate to the
shared notebook without any logged in user.
@throws EDAMSystemException <ul>
<li> BAD_DATA_FORMAT "shareKey" - invalid shareKey string
</li>
<li> INVALID_AUTH "shareKey" - bad signature on shareKey string
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "SharedNotebook.id" - the shared notebook no longer exists
</li>
</ul>
@throws EDAMUserException <ul>
<li> DATA_REQUIRED "authenticationToken" - the share requires login, and
no valid authentication token was provided.
</li>
<li> PERMISSION_DENIED "SharedNotebook.username" - share requires login,
and another username has already been bound to this notebook.
</li>
</ul>
Parameters:
- shareKey
- authenticationToken
"""
self.send_authenticateToSharedNotebook(shareKey, authenticationToken)
return self.recv_authenticateToSharedNotebook()
def send_authenticateToSharedNotebook(self, shareKey, authenticationToken):
self._oprot.writeMessageBegin('authenticateToSharedNotebook', TMessageType.CALL, self._seqid)
args = authenticateToSharedNotebook_args()
args.shareKey = shareKey
args.authenticationToken = authenticationToken
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_authenticateToSharedNotebook(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = authenticateToSharedNotebook_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "authenticateToSharedNotebook failed: unknown result");
def getSharedNotebookByAuth(self, authenticationToken):
"""
This function is used to retrieve extended information about a shared
notebook by a guest who has already authenticated to access that notebook.
This requires an 'authenticationToken' parameter which should be the
resut of a call to authenticateToSharedNotebook(...).
I.e. this is the token that gives access to the particular shared notebook
in someone else's account -- it's not the authenticationToken for the
owner of the notebook itself.
@param authenticationToken
Should be the authentication token retrieved from the reply of
authenticateToSharedNotebook(), proving access to a particular shared
notebook.
@throws EDAMUserException <ul>
<li> PERMISSION_DENIED "authenticationToken" -
authentication token doesn't correspond to a valid shared notebook
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "SharedNotebook.id" - the shared notebook no longer exists
</li>
</ul>
Parameters:
- authenticationToken
"""
self.send_getSharedNotebookByAuth(authenticationToken)
return self.recv_getSharedNotebookByAuth()
def send_getSharedNotebookByAuth(self, authenticationToken):
self._oprot.writeMessageBegin('getSharedNotebookByAuth', TMessageType.CALL, self._seqid)
args = getSharedNotebookByAuth_args()
args.authenticationToken = authenticationToken
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getSharedNotebookByAuth(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = getSharedNotebookByAuth_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "getSharedNotebookByAuth failed: unknown result");
def emailNote(self, authenticationToken, parameters):
"""
Attempts to send a single note to one or more email recipients.
<p/>
NOTE: This function is generally not available to third party applications.
Calls will result in an EDAMUserException with the error code
PERMISSION_DENIED.
@param authenticationToken
The note will be sent as the user logged in via this token, using that
user's registered email address. If the authenticated user doesn't
have permission to read that note, the emailing will fail.
@param parameters
The note must be specified either by GUID (in which case it will be
sent using the existing data in the service), or else the full Note
must be passed to this call. This also specifies the additional
email fields that will be used in the email.
@throws EDAMUserException <ul>
<li> LIMIT_REACHED "NoteEmailParameters.toAddresses" -
The email can't be sent because this would exceed the user's daily
email limit.
</li>
<li> BAD_DATA_FORMAT "(email address)" -
email address malformed
</li>
<li> DATA_REQUIRED "NoteEmailParameters.toAddresses" -
if there are no To: or Cc: addresses provided.
</li>
<li> DATA_REQUIRED "Note.title" -
if the caller provides a Note parameter with no title
</li>
<li> DATA_REQUIRED "Note.content" -
if the caller provides a Note parameter with no content
</li>
<li> ENML_VALIDATION "*" - note content doesn't validate against DTD
</li>
<li> DATA_REQUIRED "NoteEmailParameters.note" -
if no guid or note provided
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- parameters
"""
self.send_emailNote(authenticationToken, parameters)
self.recv_emailNote()
def send_emailNote(self, authenticationToken, parameters):
self._oprot.writeMessageBegin('emailNote', TMessageType.CALL, self._seqid)
args = emailNote_args()
args.authenticationToken = authenticationToken
args.parameters = parameters
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_emailNote(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = emailNote_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
return
def shareNote(self, authenticationToken, guid):
"""
If this note is not already shared (via its own direct URL), then this
will start sharing that note.
This will return the secret "Note Key" for this note that
can currently be used in conjunction with the Note's GUID to gain direct
read-only access to the Note.
If the note is already shared, then this won't make any changes to the
note, and the existing "Note Key" will be returned. The only way to change
the Note Key for an existing note is to stopSharingNote first, and then
call this function.
@param guid
The GUID of the note to be shared.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_shareNote(authenticationToken, guid)
return self.recv_shareNote()
def send_shareNote(self, authenticationToken, guid):
self._oprot.writeMessageBegin('shareNote', TMessageType.CALL, self._seqid)
args = shareNote_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_shareNote(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = shareNote_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "shareNote failed: unknown result");
def stopSharingNote(self, authenticationToken, guid):
"""
If this note is not already shared then this will stop sharing that note
and invalidate its "Note Key", so any existing URLs to access that Note
will stop working.
If the Note is not shared, then this function will do nothing.
@param guid
The GUID of the note to be un-shared.
@throws EDAMUserException <ul>
<li> BAD_DATA_FORMAT "Note.guid" - if the parameter is missing
</li>
<li> PERMISSION_DENIED "Note" - private note, user doesn't own
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "Note.guid" - not found, by GUID
</li>
</ul>
Parameters:
- authenticationToken
- guid
"""
self.send_stopSharingNote(authenticationToken, guid)
self.recv_stopSharingNote()
def send_stopSharingNote(self, authenticationToken, guid):
self._oprot.writeMessageBegin('stopSharingNote', TMessageType.CALL, self._seqid)
args = stopSharingNote_args()
args.authenticationToken = authenticationToken
args.guid = guid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_stopSharingNote(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = stopSharingNote_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
return
def authenticateToSharedNote(self, guid, noteKey, authenticationToken):
"""
Asks the service to produce an authentication token that can be used to
access the contents of a single Note which was individually shared
from someone's account.
This authenticationToken can be used with the various other NoteStore
calls to find and retrieve the Note and its directly-referenced children.
@param guid
The GUID identifying this Note on this shard.
@param noteKey
The 'noteKey' identifier from the Note that was originally created via
a call to shareNote() and then given to a recipient to access.
@param authenticationToken
An optional authenticationToken that identifies the user accessing the
shared note. This parameter may be required to access some shared notes.
@throws EDAMUserException <ul>
<li> PERMISSION_DENIED "Note" - the Note with that GUID is either not
shared, or the noteKey doesn't match the current key for this note
</li>
<li> PERMISSION_DENIED "authenticationToken" - an authentication token is
required to access this Note, but either no authentication token or a
"non-owner" authentication token was provided.
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li> "guid" - the note with that GUID is not found
</li>
</ul>
@throws EDAMSystemException <ul>
<li> TAKEN_DOWN "Note" - The specified shared note is taken down (for
all requesters).
</li>
<li> TAKEN_DOWN "Country" - The specified shared note is taken down
for the requester because of an IP-based country lookup.
</ul>
</ul>
Parameters:
- guid
- noteKey
- authenticationToken
"""
self.send_authenticateToSharedNote(guid, noteKey, authenticationToken)
return self.recv_authenticateToSharedNote()
def send_authenticateToSharedNote(self, guid, noteKey, authenticationToken):
self._oprot.writeMessageBegin('authenticateToSharedNote', TMessageType.CALL, self._seqid)
args = authenticateToSharedNote_args()
args.guid = guid
args.noteKey = noteKey
args.authenticationToken = authenticationToken
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_authenticateToSharedNote(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = authenticateToSharedNote_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.notFoundException is not None:
raise result.notFoundException
if result.systemException is not None:
raise result.systemException
raise TApplicationException(TApplicationException.MISSING_RESULT, "authenticateToSharedNote failed: unknown result");
def findRelated(self, authenticationToken, query, resultSpec):
"""
Identify related entities on the service, such as notes,
notebooks, and tags related to notes or content.
@param query
The information about which we are finding related entities.
@param resultSpec
Allows the client to indicate the type and quantity of
information to be returned, allowing a saving of time and
bandwidth.
@return
The result of the query, with information considered
to likely be relevantly related to the information
described by the query.
@throws EDAMUserException <ul>
<li>BAD_DATA_FORMAT "RelatedQuery.plainText" - If you provided a
a zero-length plain text value.
</li>
<li>BAD_DATA_FORMAT "RelatedQuery.noteGuid" - If you provided an
invalid Note GUID, that is, one that does not match the constraints
defined by EDAM_GUID_LEN_MIN, EDAM_GUID_LEN_MAX, EDAM_GUID_REGEX.
</li>
<li> BAD_DATA_FORMAT "NoteFilter.notebookGuid" - if malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.tagGuids" - if any are malformed
</li>
<li> BAD_DATA_FORMAT "NoteFilter.words" - if search string too long
</li>
<li>PERMISSION_DENIED "Note" - If the caller does not have access to
the note identified by RelatedQuery.noteGuid.
</li>
<li>DATA_REQUIRED "RelatedResultSpec" - If you did not not set any values
in the result spec.
</li>
</ul>
@throws EDAMNotFoundException <ul>
<li>"RelatedQuery.noteGuid" - the note with that GUID is not
found, if that field has been set in the query.
</li>
</ul>
Parameters:
- authenticationToken
- query
- resultSpec
"""
self.send_findRelated(authenticationToken, query, resultSpec)
return self.recv_findRelated()
def send_findRelated(self, authenticationToken, query, resultSpec):
self._oprot.writeMessageBegin('findRelated', TMessageType.CALL, self._seqid)
args = findRelated_args()
args.authenticationToken = authenticationToken
args.query = query
args.resultSpec = resultSpec
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_findRelated(self, ):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = findRelated_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.userException is not None:
raise result.userException
if result.systemException is not None:
raise result.systemException
if result.notFoundException is not None:
raise result.notFoundException
raise TApplicationException(TApplicationException.MISSING_RESULT, "findRelated failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["getSyncState"] = Processor.process_getSyncState
self._processMap["getSyncStateWithMetrics"] = Processor.process_getSyncStateWithMetrics
self._processMap["getSyncChunk"] = Processor.process_getSyncChunk
self._processMap["getFilteredSyncChunk"] = Processor.process_getFilteredSyncChunk
self._processMap["getLinkedNotebookSyncState"] = Processor.process_getLinkedNotebookSyncState
self._processMap["getLinkedNotebookSyncChunk"] = Processor.process_getLinkedNotebookSyncChunk
self._processMap["listNotebooks"] = Processor.process_listNotebooks
self._processMap["getNotebook"] = Processor.process_getNotebook
self._processMap["getDefaultNotebook"] = Processor.process_getDefaultNotebook
self._processMap["createNotebook"] = Processor.process_createNotebook
self._processMap["updateNotebook"] = Processor.process_updateNotebook
self._processMap["expungeNotebook"] = Processor.process_expungeNotebook
self._processMap["listTags"] = Processor.process_listTags
self._processMap["listTagsByNotebook"] = Processor.process_listTagsByNotebook
self._processMap["getTag"] = Processor.process_getTag
self._processMap["createTag"] = Processor.process_createTag
self._processMap["updateTag"] = Processor.process_updateTag
self._processMap["untagAll"] = Processor.process_untagAll
self._processMap["expungeTag"] = Processor.process_expungeTag
self._processMap["listSearches"] = Processor.process_listSearches
self._processMap["getSearch"] = Processor.process_getSearch
self._processMap["createSearch"] = Processor.process_createSearch
self._processMap["updateSearch"] = Processor.process_updateSearch
self._processMap["expungeSearch"] = Processor.process_expungeSearch
self._processMap["findNotes"] = Processor.process_findNotes
self._processMap["findNoteOffset"] = Processor.process_findNoteOffset
self._processMap["findNotesMetadata"] = Processor.process_findNotesMetadata
self._processMap["findNoteCounts"] = Processor.process_findNoteCounts
self._processMap["getNote"] = Processor.process_getNote
self._processMap["getNoteApplicationData"] = Processor.process_getNoteApplicationData
self._processMap["getNoteApplicationDataEntry"] = Processor.process_getNoteApplicationDataEntry
self._processMap["setNoteApplicationDataEntry"] = Processor.process_setNoteApplicationDataEntry
self._processMap["unsetNoteApplicationDataEntry"] = Processor.process_unsetNoteApplicationDataEntry
self._processMap["getNoteContent"] = Processor.process_getNoteContent
self._processMap["getNoteSearchText"] = Processor.process_getNoteSearchText
self._processMap["getResourceSearchText"] = Processor.process_getResourceSearchText
self._processMap["getNoteTagNames"] = Processor.process_getNoteTagNames
self._processMap["createNote"] = Processor.process_createNote
self._processMap["updateNote"] = Processor.process_updateNote
self._processMap["deleteNote"] = Processor.process_deleteNote
self._processMap["expungeNote"] = Processor.process_expungeNote
self._processMap["expungeNotes"] = Processor.process_expungeNotes
self._processMap["expungeInactiveNotes"] = Processor.process_expungeInactiveNotes
self._processMap["copyNote"] = Processor.process_copyNote
self._processMap["listNoteVersions"] = Processor.process_listNoteVersions
self._processMap["getNoteVersion"] = Processor.process_getNoteVersion
self._processMap["getResource"] = Processor.process_getResource
self._processMap["getResourceApplicationData"] = Processor.process_getResourceApplicationData
self._processMap["getResourceApplicationDataEntry"] = Processor.process_getResourceApplicationDataEntry
self._processMap["setResourceApplicationDataEntry"] = Processor.process_setResourceApplicationDataEntry
self._processMap["unsetResourceApplicationDataEntry"] = Processor.process_unsetResourceApplicationDataEntry
self._processMap["updateResource"] = Processor.process_updateResource
self._processMap["getResourceData"] = Processor.process_getResourceData
self._processMap["getResourceByHash"] = Processor.process_getResourceByHash
self._processMap["getResourceRecognition"] = Processor.process_getResourceRecognition
self._processMap["getResourceAlternateData"] = Processor.process_getResourceAlternateData
self._processMap["getResourceAttributes"] = Processor.process_getResourceAttributes
self._processMap["getPublicNotebook"] = Processor.process_getPublicNotebook
self._processMap["createSharedNotebook"] = Processor.process_createSharedNotebook
self._processMap["updateSharedNotebook"] = Processor.process_updateSharedNotebook
self._processMap["setSharedNotebookRecipientSettings"] = Processor.process_setSharedNotebookRecipientSettings
self._processMap["sendMessageToSharedNotebookMembers"] = Processor.process_sendMessageToSharedNotebookMembers
self._processMap["listSharedNotebooks"] = Processor.process_listSharedNotebooks
self._processMap["expungeSharedNotebooks"] = Processor.process_expungeSharedNotebooks
self._processMap["createLinkedNotebook"] = Processor.process_createLinkedNotebook
self._processMap["updateLinkedNotebook"] = Processor.process_updateLinkedNotebook
self._processMap["listLinkedNotebooks"] = Processor.process_listLinkedNotebooks
self._processMap["expungeLinkedNotebook"] = Processor.process_expungeLinkedNotebook
self._processMap["authenticateToSharedNotebook"] = Processor.process_authenticateToSharedNotebook
self._processMap["getSharedNotebookByAuth"] = Processor.process_getSharedNotebookByAuth
self._processMap["emailNote"] = Processor.process_emailNote
self._processMap["shareNote"] = Processor.process_shareNote
self._processMap["stopSharingNote"] = Processor.process_stopSharingNote
self._processMap["authenticateToSharedNote"] = Processor.process_authenticateToSharedNote
self._processMap["findRelated"] = Processor.process_findRelated
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_getSyncState(self, seqid, iprot, oprot):
args = getSyncState_args()
args.read(iprot)
iprot.readMessageEnd()
result = getSyncState_result()
try:
result.success = self._handler.getSyncState(args.authenticationToken)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("getSyncState", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getSyncStateWithMetrics(self, seqid, iprot, oprot):
args = getSyncStateWithMetrics_args()
args.read(iprot)
iprot.readMessageEnd()
result = getSyncStateWithMetrics_result()
try:
result.success = self._handler.getSyncStateWithMetrics(args.authenticationToken, args.clientMetrics)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("getSyncStateWithMetrics", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getSyncChunk(self, seqid, iprot, oprot):
args = getSyncChunk_args()
args.read(iprot)
iprot.readMessageEnd()
result = getSyncChunk_result()
try:
result.success = self._handler.getSyncChunk(args.authenticationToken, args.afterUSN, args.maxEntries, args.fullSyncOnly)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("getSyncChunk", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getFilteredSyncChunk(self, seqid, iprot, oprot):
args = getFilteredSyncChunk_args()
args.read(iprot)
iprot.readMessageEnd()
result = getFilteredSyncChunk_result()
try:
result.success = self._handler.getFilteredSyncChunk(args.authenticationToken, args.afterUSN, args.maxEntries, args.filter)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("getFilteredSyncChunk", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getLinkedNotebookSyncState(self, seqid, iprot, oprot):
args = getLinkedNotebookSyncState_args()
args.read(iprot)
iprot.readMessageEnd()
result = getLinkedNotebookSyncState_result()
try:
result.success = self._handler.getLinkedNotebookSyncState(args.authenticationToken, args.linkedNotebook)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getLinkedNotebookSyncState", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getLinkedNotebookSyncChunk(self, seqid, iprot, oprot):
args = getLinkedNotebookSyncChunk_args()
args.read(iprot)
iprot.readMessageEnd()
result = getLinkedNotebookSyncChunk_result()
try:
result.success = self._handler.getLinkedNotebookSyncChunk(args.authenticationToken, args.linkedNotebook, args.afterUSN, args.maxEntries, args.fullSyncOnly)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getLinkedNotebookSyncChunk", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_listNotebooks(self, seqid, iprot, oprot):
args = listNotebooks_args()
args.read(iprot)
iprot.readMessageEnd()
result = listNotebooks_result()
try:
result.success = self._handler.listNotebooks(args.authenticationToken)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("listNotebooks", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getNotebook(self, seqid, iprot, oprot):
args = getNotebook_args()
args.read(iprot)
iprot.readMessageEnd()
result = getNotebook_result()
try:
result.success = self._handler.getNotebook(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getNotebook", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getDefaultNotebook(self, seqid, iprot, oprot):
args = getDefaultNotebook_args()
args.read(iprot)
iprot.readMessageEnd()
result = getDefaultNotebook_result()
try:
result.success = self._handler.getDefaultNotebook(args.authenticationToken)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("getDefaultNotebook", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_createNotebook(self, seqid, iprot, oprot):
args = createNotebook_args()
args.read(iprot)
iprot.readMessageEnd()
result = createNotebook_result()
try:
result.success = self._handler.createNotebook(args.authenticationToken, args.notebook)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("createNotebook", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_updateNotebook(self, seqid, iprot, oprot):
args = updateNotebook_args()
args.read(iprot)
iprot.readMessageEnd()
result = updateNotebook_result()
try:
result.success = self._handler.updateNotebook(args.authenticationToken, args.notebook)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("updateNotebook", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_expungeNotebook(self, seqid, iprot, oprot):
args = expungeNotebook_args()
args.read(iprot)
iprot.readMessageEnd()
result = expungeNotebook_result()
try:
result.success = self._handler.expungeNotebook(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("expungeNotebook", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_listTags(self, seqid, iprot, oprot):
args = listTags_args()
args.read(iprot)
iprot.readMessageEnd()
result = listTags_result()
try:
result.success = self._handler.listTags(args.authenticationToken)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("listTags", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_listTagsByNotebook(self, seqid, iprot, oprot):
args = listTagsByNotebook_args()
args.read(iprot)
iprot.readMessageEnd()
result = listTagsByNotebook_result()
try:
result.success = self._handler.listTagsByNotebook(args.authenticationToken, args.notebookGuid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("listTagsByNotebook", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getTag(self, seqid, iprot, oprot):
args = getTag_args()
args.read(iprot)
iprot.readMessageEnd()
result = getTag_result()
try:
result.success = self._handler.getTag(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getTag", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_createTag(self, seqid, iprot, oprot):
args = createTag_args()
args.read(iprot)
iprot.readMessageEnd()
result = createTag_result()
try:
result.success = self._handler.createTag(args.authenticationToken, args.tag)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("createTag", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_updateTag(self, seqid, iprot, oprot):
args = updateTag_args()
args.read(iprot)
iprot.readMessageEnd()
result = updateTag_result()
try:
result.success = self._handler.updateTag(args.authenticationToken, args.tag)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("updateTag", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_untagAll(self, seqid, iprot, oprot):
args = untagAll_args()
args.read(iprot)
iprot.readMessageEnd()
result = untagAll_result()
try:
self._handler.untagAll(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("untagAll", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_expungeTag(self, seqid, iprot, oprot):
args = expungeTag_args()
args.read(iprot)
iprot.readMessageEnd()
result = expungeTag_result()
try:
result.success = self._handler.expungeTag(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("expungeTag", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_listSearches(self, seqid, iprot, oprot):
args = listSearches_args()
args.read(iprot)
iprot.readMessageEnd()
result = listSearches_result()
try:
result.success = self._handler.listSearches(args.authenticationToken)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("listSearches", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getSearch(self, seqid, iprot, oprot):
args = getSearch_args()
args.read(iprot)
iprot.readMessageEnd()
result = getSearch_result()
try:
result.success = self._handler.getSearch(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getSearch", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_createSearch(self, seqid, iprot, oprot):
args = createSearch_args()
args.read(iprot)
iprot.readMessageEnd()
result = createSearch_result()
try:
result.success = self._handler.createSearch(args.authenticationToken, args.search)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("createSearch", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_updateSearch(self, seqid, iprot, oprot):
args = updateSearch_args()
args.read(iprot)
iprot.readMessageEnd()
result = updateSearch_result()
try:
result.success = self._handler.updateSearch(args.authenticationToken, args.search)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("updateSearch", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_expungeSearch(self, seqid, iprot, oprot):
args = expungeSearch_args()
args.read(iprot)
iprot.readMessageEnd()
result = expungeSearch_result()
try:
result.success = self._handler.expungeSearch(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("expungeSearch", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_findNotes(self, seqid, iprot, oprot):
args = findNotes_args()
args.read(iprot)
iprot.readMessageEnd()
result = findNotes_result()
try:
result.success = self._handler.findNotes(args.authenticationToken, args.filter, args.offset, args.maxNotes)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("findNotes", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_findNoteOffset(self, seqid, iprot, oprot):
args = findNoteOffset_args()
args.read(iprot)
iprot.readMessageEnd()
result = findNoteOffset_result()
try:
result.success = self._handler.findNoteOffset(args.authenticationToken, args.filter, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("findNoteOffset", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_findNotesMetadata(self, seqid, iprot, oprot):
args = findNotesMetadata_args()
args.read(iprot)
iprot.readMessageEnd()
result = findNotesMetadata_result()
try:
result.success = self._handler.findNotesMetadata(args.authenticationToken, args.filter, args.offset, args.maxNotes, args.resultSpec)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("findNotesMetadata", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_findNoteCounts(self, seqid, iprot, oprot):
args = findNoteCounts_args()
args.read(iprot)
iprot.readMessageEnd()
result = findNoteCounts_result()
try:
result.success = self._handler.findNoteCounts(args.authenticationToken, args.filter, args.withTrash)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("findNoteCounts", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getNote(self, seqid, iprot, oprot):
args = getNote_args()
args.read(iprot)
iprot.readMessageEnd()
result = getNote_result()
try:
result.success = self._handler.getNote(args.authenticationToken, args.guid, args.withContent, args.withResourcesData, args.withResourcesRecognition, args.withResourcesAlternateData)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getNote", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getNoteApplicationData(self, seqid, iprot, oprot):
args = getNoteApplicationData_args()
args.read(iprot)
iprot.readMessageEnd()
result = getNoteApplicationData_result()
try:
result.success = self._handler.getNoteApplicationData(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getNoteApplicationData", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getNoteApplicationDataEntry(self, seqid, iprot, oprot):
args = getNoteApplicationDataEntry_args()
args.read(iprot)
iprot.readMessageEnd()
result = getNoteApplicationDataEntry_result()
try:
result.success = self._handler.getNoteApplicationDataEntry(args.authenticationToken, args.guid, args.key)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getNoteApplicationDataEntry", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_setNoteApplicationDataEntry(self, seqid, iprot, oprot):
args = setNoteApplicationDataEntry_args()
args.read(iprot)
iprot.readMessageEnd()
result = setNoteApplicationDataEntry_result()
try:
result.success = self._handler.setNoteApplicationDataEntry(args.authenticationToken, args.guid, args.key, args.value)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("setNoteApplicationDataEntry", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_unsetNoteApplicationDataEntry(self, seqid, iprot, oprot):
args = unsetNoteApplicationDataEntry_args()
args.read(iprot)
iprot.readMessageEnd()
result = unsetNoteApplicationDataEntry_result()
try:
result.success = self._handler.unsetNoteApplicationDataEntry(args.authenticationToken, args.guid, args.key)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("unsetNoteApplicationDataEntry", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getNoteContent(self, seqid, iprot, oprot):
args = getNoteContent_args()
args.read(iprot)
iprot.readMessageEnd()
result = getNoteContent_result()
try:
result.success = self._handler.getNoteContent(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getNoteContent", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getNoteSearchText(self, seqid, iprot, oprot):
args = getNoteSearchText_args()
args.read(iprot)
iprot.readMessageEnd()
result = getNoteSearchText_result()
try:
result.success = self._handler.getNoteSearchText(args.authenticationToken, args.guid, args.noteOnly, args.tokenizeForIndexing)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getNoteSearchText", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getResourceSearchText(self, seqid, iprot, oprot):
args = getResourceSearchText_args()
args.read(iprot)
iprot.readMessageEnd()
result = getResourceSearchText_result()
try:
result.success = self._handler.getResourceSearchText(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getResourceSearchText", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getNoteTagNames(self, seqid, iprot, oprot):
args = getNoteTagNames_args()
args.read(iprot)
iprot.readMessageEnd()
result = getNoteTagNames_result()
try:
result.success = self._handler.getNoteTagNames(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getNoteTagNames", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_createNote(self, seqid, iprot, oprot):
args = createNote_args()
args.read(iprot)
iprot.readMessageEnd()
result = createNote_result()
try:
result.success = self._handler.createNote(args.authenticationToken, args.note)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("createNote", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_updateNote(self, seqid, iprot, oprot):
args = updateNote_args()
args.read(iprot)
iprot.readMessageEnd()
result = updateNote_result()
try:
result.success = self._handler.updateNote(args.authenticationToken, args.note)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("updateNote", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_deleteNote(self, seqid, iprot, oprot):
args = deleteNote_args()
args.read(iprot)
iprot.readMessageEnd()
result = deleteNote_result()
try:
result.success = self._handler.deleteNote(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("deleteNote", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_expungeNote(self, seqid, iprot, oprot):
args = expungeNote_args()
args.read(iprot)
iprot.readMessageEnd()
result = expungeNote_result()
try:
result.success = self._handler.expungeNote(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("expungeNote", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_expungeNotes(self, seqid, iprot, oprot):
args = expungeNotes_args()
args.read(iprot)
iprot.readMessageEnd()
result = expungeNotes_result()
try:
result.success = self._handler.expungeNotes(args.authenticationToken, args.noteGuids)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("expungeNotes", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_expungeInactiveNotes(self, seqid, iprot, oprot):
args = expungeInactiveNotes_args()
args.read(iprot)
iprot.readMessageEnd()
result = expungeInactiveNotes_result()
try:
result.success = self._handler.expungeInactiveNotes(args.authenticationToken)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("expungeInactiveNotes", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_copyNote(self, seqid, iprot, oprot):
args = copyNote_args()
args.read(iprot)
iprot.readMessageEnd()
result = copyNote_result()
try:
result.success = self._handler.copyNote(args.authenticationToken, args.noteGuid, args.toNotebookGuid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("copyNote", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_listNoteVersions(self, seqid, iprot, oprot):
args = listNoteVersions_args()
args.read(iprot)
iprot.readMessageEnd()
result = listNoteVersions_result()
try:
result.success = self._handler.listNoteVersions(args.authenticationToken, args.noteGuid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("listNoteVersions", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getNoteVersion(self, seqid, iprot, oprot):
args = getNoteVersion_args()
args.read(iprot)
iprot.readMessageEnd()
result = getNoteVersion_result()
try:
result.success = self._handler.getNoteVersion(args.authenticationToken, args.noteGuid, args.updateSequenceNum, args.withResourcesData, args.withResourcesRecognition, args.withResourcesAlternateData)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getNoteVersion", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getResource(self, seqid, iprot, oprot):
args = getResource_args()
args.read(iprot)
iprot.readMessageEnd()
result = getResource_result()
try:
result.success = self._handler.getResource(args.authenticationToken, args.guid, args.withData, args.withRecognition, args.withAttributes, args.withAlternateData)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getResource", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getResourceApplicationData(self, seqid, iprot, oprot):
args = getResourceApplicationData_args()
args.read(iprot)
iprot.readMessageEnd()
result = getResourceApplicationData_result()
try:
result.success = self._handler.getResourceApplicationData(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getResourceApplicationData", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getResourceApplicationDataEntry(self, seqid, iprot, oprot):
args = getResourceApplicationDataEntry_args()
args.read(iprot)
iprot.readMessageEnd()
result = getResourceApplicationDataEntry_result()
try:
result.success = self._handler.getResourceApplicationDataEntry(args.authenticationToken, args.guid, args.key)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getResourceApplicationDataEntry", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_setResourceApplicationDataEntry(self, seqid, iprot, oprot):
args = setResourceApplicationDataEntry_args()
args.read(iprot)
iprot.readMessageEnd()
result = setResourceApplicationDataEntry_result()
try:
result.success = self._handler.setResourceApplicationDataEntry(args.authenticationToken, args.guid, args.key, args.value)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("setResourceApplicationDataEntry", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_unsetResourceApplicationDataEntry(self, seqid, iprot, oprot):
args = unsetResourceApplicationDataEntry_args()
args.read(iprot)
iprot.readMessageEnd()
result = unsetResourceApplicationDataEntry_result()
try:
result.success = self._handler.unsetResourceApplicationDataEntry(args.authenticationToken, args.guid, args.key)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("unsetResourceApplicationDataEntry", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_updateResource(self, seqid, iprot, oprot):
args = updateResource_args()
args.read(iprot)
iprot.readMessageEnd()
result = updateResource_result()
try:
result.success = self._handler.updateResource(args.authenticationToken, args.resource)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("updateResource", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getResourceData(self, seqid, iprot, oprot):
args = getResourceData_args()
args.read(iprot)
iprot.readMessageEnd()
result = getResourceData_result()
try:
result.success = self._handler.getResourceData(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getResourceData", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getResourceByHash(self, seqid, iprot, oprot):
args = getResourceByHash_args()
args.read(iprot)
iprot.readMessageEnd()
result = getResourceByHash_result()
try:
result.success = self._handler.getResourceByHash(args.authenticationToken, args.noteGuid, args.contentHash, args.withData, args.withRecognition, args.withAlternateData)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getResourceByHash", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getResourceRecognition(self, seqid, iprot, oprot):
args = getResourceRecognition_args()
args.read(iprot)
iprot.readMessageEnd()
result = getResourceRecognition_result()
try:
result.success = self._handler.getResourceRecognition(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getResourceRecognition", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getResourceAlternateData(self, seqid, iprot, oprot):
args = getResourceAlternateData_args()
args.read(iprot)
iprot.readMessageEnd()
result = getResourceAlternateData_result()
try:
result.success = self._handler.getResourceAlternateData(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getResourceAlternateData", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getResourceAttributes(self, seqid, iprot, oprot):
args = getResourceAttributes_args()
args.read(iprot)
iprot.readMessageEnd()
result = getResourceAttributes_result()
try:
result.success = self._handler.getResourceAttributes(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getResourceAttributes", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getPublicNotebook(self, seqid, iprot, oprot):
args = getPublicNotebook_args()
args.read(iprot)
iprot.readMessageEnd()
result = getPublicNotebook_result()
try:
result.success = self._handler.getPublicNotebook(args.userId, args.publicUri)
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("getPublicNotebook", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_createSharedNotebook(self, seqid, iprot, oprot):
args = createSharedNotebook_args()
args.read(iprot)
iprot.readMessageEnd()
result = createSharedNotebook_result()
try:
result.success = self._handler.createSharedNotebook(args.authenticationToken, args.sharedNotebook)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("createSharedNotebook", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_updateSharedNotebook(self, seqid, iprot, oprot):
args = updateSharedNotebook_args()
args.read(iprot)
iprot.readMessageEnd()
result = updateSharedNotebook_result()
try:
result.success = self._handler.updateSharedNotebook(args.authenticationToken, args.sharedNotebook)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("updateSharedNotebook", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_setSharedNotebookRecipientSettings(self, seqid, iprot, oprot):
args = setSharedNotebookRecipientSettings_args()
args.read(iprot)
iprot.readMessageEnd()
result = setSharedNotebookRecipientSettings_result()
try:
result.success = self._handler.setSharedNotebookRecipientSettings(args.authenticationToken, args.sharedNotebookId, args.recipientSettings)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("setSharedNotebookRecipientSettings", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_sendMessageToSharedNotebookMembers(self, seqid, iprot, oprot):
args = sendMessageToSharedNotebookMembers_args()
args.read(iprot)
iprot.readMessageEnd()
result = sendMessageToSharedNotebookMembers_result()
try:
result.success = self._handler.sendMessageToSharedNotebookMembers(args.authenticationToken, args.notebookGuid, args.messageText, args.recipients)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("sendMessageToSharedNotebookMembers", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_listSharedNotebooks(self, seqid, iprot, oprot):
args = listSharedNotebooks_args()
args.read(iprot)
iprot.readMessageEnd()
result = listSharedNotebooks_result()
try:
result.success = self._handler.listSharedNotebooks(args.authenticationToken)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("listSharedNotebooks", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_expungeSharedNotebooks(self, seqid, iprot, oprot):
args = expungeSharedNotebooks_args()
args.read(iprot)
iprot.readMessageEnd()
result = expungeSharedNotebooks_result()
try:
result.success = self._handler.expungeSharedNotebooks(args.authenticationToken, args.sharedNotebookIds)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("expungeSharedNotebooks", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_createLinkedNotebook(self, seqid, iprot, oprot):
args = createLinkedNotebook_args()
args.read(iprot)
iprot.readMessageEnd()
result = createLinkedNotebook_result()
try:
result.success = self._handler.createLinkedNotebook(args.authenticationToken, args.linkedNotebook)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("createLinkedNotebook", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_updateLinkedNotebook(self, seqid, iprot, oprot):
args = updateLinkedNotebook_args()
args.read(iprot)
iprot.readMessageEnd()
result = updateLinkedNotebook_result()
try:
result.success = self._handler.updateLinkedNotebook(args.authenticationToken, args.linkedNotebook)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("updateLinkedNotebook", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_listLinkedNotebooks(self, seqid, iprot, oprot):
args = listLinkedNotebooks_args()
args.read(iprot)
iprot.readMessageEnd()
result = listLinkedNotebooks_result()
try:
result.success = self._handler.listLinkedNotebooks(args.authenticationToken)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("listLinkedNotebooks", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_expungeLinkedNotebook(self, seqid, iprot, oprot):
args = expungeLinkedNotebook_args()
args.read(iprot)
iprot.readMessageEnd()
result = expungeLinkedNotebook_result()
try:
result.success = self._handler.expungeLinkedNotebook(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("expungeLinkedNotebook", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_authenticateToSharedNotebook(self, seqid, iprot, oprot):
args = authenticateToSharedNotebook_args()
args.read(iprot)
iprot.readMessageEnd()
result = authenticateToSharedNotebook_result()
try:
result.success = self._handler.authenticateToSharedNotebook(args.shareKey, args.authenticationToken)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("authenticateToSharedNotebook", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getSharedNotebookByAuth(self, seqid, iprot, oprot):
args = getSharedNotebookByAuth_args()
args.read(iprot)
iprot.readMessageEnd()
result = getSharedNotebookByAuth_result()
try:
result.success = self._handler.getSharedNotebookByAuth(args.authenticationToken)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("getSharedNotebookByAuth", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_emailNote(self, seqid, iprot, oprot):
args = emailNote_args()
args.read(iprot)
iprot.readMessageEnd()
result = emailNote_result()
try:
self._handler.emailNote(args.authenticationToken, args.parameters)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("emailNote", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_shareNote(self, seqid, iprot, oprot):
args = shareNote_args()
args.read(iprot)
iprot.readMessageEnd()
result = shareNote_result()
try:
result.success = self._handler.shareNote(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("shareNote", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_stopSharingNote(self, seqid, iprot, oprot):
args = stopSharingNote_args()
args.read(iprot)
iprot.readMessageEnd()
result = stopSharingNote_result()
try:
self._handler.stopSharingNote(args.authenticationToken, args.guid)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("stopSharingNote", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_authenticateToSharedNote(self, seqid, iprot, oprot):
args = authenticateToSharedNote_args()
args.read(iprot)
iprot.readMessageEnd()
result = authenticateToSharedNote_result()
try:
result.success = self._handler.authenticateToSharedNote(args.guid, args.noteKey, args.authenticationToken)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
oprot.writeMessageBegin("authenticateToSharedNote", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_findRelated(self, seqid, iprot, oprot):
args = findRelated_args()
args.read(iprot)
iprot.readMessageEnd()
result = findRelated_result()
try:
result.success = self._handler.findRelated(args.authenticationToken, args.query, args.resultSpec)
except evernote.edam.error.ttypes.EDAMUserException as userException:
result.userException = userException
except evernote.edam.error.ttypes.EDAMSystemException as systemException:
result.systemException = systemException
except evernote.edam.error.ttypes.EDAMNotFoundException as notFoundException:
result.notFoundException = notFoundException
oprot.writeMessageBegin("findRelated", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class getSyncState_args(object):
"""
Attributes:
- authenticationToken
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
)
def __init__(self, authenticationToken=None,):
self.authenticationToken = authenticationToken
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getSyncState_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getSyncState_result(object):
"""
Attributes:
- success
- userException
- systemException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (SyncState, SyncState.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, userException=None, systemException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = SyncState()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getSyncState_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getSyncStateWithMetrics_args(object):
"""
Attributes:
- authenticationToken
- clientMetrics
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'clientMetrics', (ClientUsageMetrics, ClientUsageMetrics.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, clientMetrics=None,):
self.authenticationToken = authenticationToken
self.clientMetrics = clientMetrics
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.clientMetrics = ClientUsageMetrics()
self.clientMetrics.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getSyncStateWithMetrics_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.clientMetrics is not None:
oprot.writeFieldBegin('clientMetrics', TType.STRUCT, 2)
self.clientMetrics.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getSyncStateWithMetrics_result(object):
"""
Attributes:
- success
- userException
- systemException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (SyncState, SyncState.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, userException=None, systemException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = SyncState()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getSyncStateWithMetrics_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getSyncChunk_args(object):
"""
Attributes:
- authenticationToken
- afterUSN
- maxEntries
- fullSyncOnly
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.I32, 'afterUSN', None, None, ), # 2
(3, TType.I32, 'maxEntries', None, None, ), # 3
(4, TType.BOOL, 'fullSyncOnly', None, None, ), # 4
)
def __init__(self, authenticationToken=None, afterUSN=None, maxEntries=None, fullSyncOnly=None,):
self.authenticationToken = authenticationToken
self.afterUSN = afterUSN
self.maxEntries = maxEntries
self.fullSyncOnly = fullSyncOnly
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.afterUSN = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.maxEntries = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.fullSyncOnly = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getSyncChunk_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.afterUSN is not None:
oprot.writeFieldBegin('afterUSN', TType.I32, 2)
oprot.writeI32(self.afterUSN)
oprot.writeFieldEnd()
if self.maxEntries is not None:
oprot.writeFieldBegin('maxEntries', TType.I32, 3)
oprot.writeI32(self.maxEntries)
oprot.writeFieldEnd()
if self.fullSyncOnly is not None:
oprot.writeFieldBegin('fullSyncOnly', TType.BOOL, 4)
oprot.writeBool(self.fullSyncOnly)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getSyncChunk_result(object):
"""
Attributes:
- success
- userException
- systemException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (SyncChunk, SyncChunk.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, userException=None, systemException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = SyncChunk()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getSyncChunk_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getFilteredSyncChunk_args(object):
"""
Attributes:
- authenticationToken
- afterUSN
- maxEntries
- filter
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.I32, 'afterUSN', None, None, ), # 2
(3, TType.I32, 'maxEntries', None, None, ), # 3
(4, TType.STRUCT, 'filter', (SyncChunkFilter, SyncChunkFilter.thrift_spec), None, ), # 4
)
def __init__(self, authenticationToken=None, afterUSN=None, maxEntries=None, filter=None,):
self.authenticationToken = authenticationToken
self.afterUSN = afterUSN
self.maxEntries = maxEntries
self.filter = filter
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.afterUSN = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.maxEntries = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.filter = SyncChunkFilter()
self.filter.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getFilteredSyncChunk_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.afterUSN is not None:
oprot.writeFieldBegin('afterUSN', TType.I32, 2)
oprot.writeI32(self.afterUSN)
oprot.writeFieldEnd()
if self.maxEntries is not None:
oprot.writeFieldBegin('maxEntries', TType.I32, 3)
oprot.writeI32(self.maxEntries)
oprot.writeFieldEnd()
if self.filter is not None:
oprot.writeFieldBegin('filter', TType.STRUCT, 4)
self.filter.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getFilteredSyncChunk_result(object):
"""
Attributes:
- success
- userException
- systemException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (SyncChunk, SyncChunk.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, userException=None, systemException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = SyncChunk()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getFilteredSyncChunk_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getLinkedNotebookSyncState_args(object):
"""
Attributes:
- authenticationToken
- linkedNotebook
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'linkedNotebook', (evernote.edam.type.ttypes.LinkedNotebook, evernote.edam.type.ttypes.LinkedNotebook.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, linkedNotebook=None,):
self.authenticationToken = authenticationToken
self.linkedNotebook = linkedNotebook
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.linkedNotebook = evernote.edam.type.ttypes.LinkedNotebook()
self.linkedNotebook.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getLinkedNotebookSyncState_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.linkedNotebook is not None:
oprot.writeFieldBegin('linkedNotebook', TType.STRUCT, 2)
self.linkedNotebook.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getLinkedNotebookSyncState_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (SyncState, SyncState.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = SyncState()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getLinkedNotebookSyncState_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getLinkedNotebookSyncChunk_args(object):
"""
Attributes:
- authenticationToken
- linkedNotebook
- afterUSN
- maxEntries
- fullSyncOnly
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'linkedNotebook', (evernote.edam.type.ttypes.LinkedNotebook, evernote.edam.type.ttypes.LinkedNotebook.thrift_spec), None, ), # 2
(3, TType.I32, 'afterUSN', None, None, ), # 3
(4, TType.I32, 'maxEntries', None, None, ), # 4
(5, TType.BOOL, 'fullSyncOnly', None, None, ), # 5
)
def __init__(self, authenticationToken=None, linkedNotebook=None, afterUSN=None, maxEntries=None, fullSyncOnly=None,):
self.authenticationToken = authenticationToken
self.linkedNotebook = linkedNotebook
self.afterUSN = afterUSN
self.maxEntries = maxEntries
self.fullSyncOnly = fullSyncOnly
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.linkedNotebook = evernote.edam.type.ttypes.LinkedNotebook()
self.linkedNotebook.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.afterUSN = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.maxEntries = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.BOOL:
self.fullSyncOnly = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getLinkedNotebookSyncChunk_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.linkedNotebook is not None:
oprot.writeFieldBegin('linkedNotebook', TType.STRUCT, 2)
self.linkedNotebook.write(oprot)
oprot.writeFieldEnd()
if self.afterUSN is not None:
oprot.writeFieldBegin('afterUSN', TType.I32, 3)
oprot.writeI32(self.afterUSN)
oprot.writeFieldEnd()
if self.maxEntries is not None:
oprot.writeFieldBegin('maxEntries', TType.I32, 4)
oprot.writeI32(self.maxEntries)
oprot.writeFieldEnd()
if self.fullSyncOnly is not None:
oprot.writeFieldBegin('fullSyncOnly', TType.BOOL, 5)
oprot.writeBool(self.fullSyncOnly)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getLinkedNotebookSyncChunk_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (SyncChunk, SyncChunk.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = SyncChunk()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getLinkedNotebookSyncChunk_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listNotebooks_args(object):
"""
Attributes:
- authenticationToken
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
)
def __init__(self, authenticationToken=None,):
self.authenticationToken = authenticationToken
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listNotebooks_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listNotebooks_result(object):
"""
Attributes:
- success
- userException
- systemException
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(evernote.edam.type.ttypes.Notebook, evernote.edam.type.ttypes.Notebook.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, userException=None, systemException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype196, _size193) = iprot.readListBegin()
for _i197 in range(_size193):
_elem198 = evernote.edam.type.ttypes.Notebook()
_elem198.read(iprot)
self.success.append(_elem198)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listNotebooks_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter199 in self.success:
iter199.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNotebook_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNotebook_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNotebook_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.Notebook, evernote.edam.type.ttypes.Notebook.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.Notebook()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNotebook_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getDefaultNotebook_args(object):
"""
Attributes:
- authenticationToken
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
)
def __init__(self, authenticationToken=None,):
self.authenticationToken = authenticationToken
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getDefaultNotebook_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getDefaultNotebook_result(object):
"""
Attributes:
- success
- userException
- systemException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.Notebook, evernote.edam.type.ttypes.Notebook.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, userException=None, systemException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.Notebook()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getDefaultNotebook_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createNotebook_args(object):
"""
Attributes:
- authenticationToken
- notebook
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'notebook', (evernote.edam.type.ttypes.Notebook, evernote.edam.type.ttypes.Notebook.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, notebook=None,):
self.authenticationToken = authenticationToken
self.notebook = notebook
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notebook = evernote.edam.type.ttypes.Notebook()
self.notebook.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createNotebook_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.notebook is not None:
oprot.writeFieldBegin('notebook', TType.STRUCT, 2)
self.notebook.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createNotebook_result(object):
"""
Attributes:
- success
- userException
- systemException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.Notebook, evernote.edam.type.ttypes.Notebook.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, userException=None, systemException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.Notebook()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createNotebook_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateNotebook_args(object):
"""
Attributes:
- authenticationToken
- notebook
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'notebook', (evernote.edam.type.ttypes.Notebook, evernote.edam.type.ttypes.Notebook.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, notebook=None,):
self.authenticationToken = authenticationToken
self.notebook = notebook
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notebook = evernote.edam.type.ttypes.Notebook()
self.notebook.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateNotebook_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.notebook is not None:
oprot.writeFieldBegin('notebook', TType.STRUCT, 2)
self.notebook.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateNotebook_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateNotebook_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeNotebook_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeNotebook_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeNotebook_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeNotebook_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listTags_args(object):
"""
Attributes:
- authenticationToken
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
)
def __init__(self, authenticationToken=None,):
self.authenticationToken = authenticationToken
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listTags_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listTags_result(object):
"""
Attributes:
- success
- userException
- systemException
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(evernote.edam.type.ttypes.Tag, evernote.edam.type.ttypes.Tag.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, userException=None, systemException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype203, _size200) = iprot.readListBegin()
for _i204 in range(_size200):
_elem205 = evernote.edam.type.ttypes.Tag()
_elem205.read(iprot)
self.success.append(_elem205)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listTags_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter206 in self.success:
iter206.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listTagsByNotebook_args(object):
"""
Attributes:
- authenticationToken
- notebookGuid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'notebookGuid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, notebookGuid=None,):
self.authenticationToken = authenticationToken
self.notebookGuid = notebookGuid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.notebookGuid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listTagsByNotebook_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.notebookGuid is not None:
oprot.writeFieldBegin('notebookGuid', TType.STRING, 2)
oprot.writeString(self.notebookGuid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listTagsByNotebook_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(evernote.edam.type.ttypes.Tag, evernote.edam.type.ttypes.Tag.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype210, _size207) = iprot.readListBegin()
for _i211 in range(_size207):
_elem212 = evernote.edam.type.ttypes.Tag()
_elem212.read(iprot)
self.success.append(_elem212)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listTagsByNotebook_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter213 in self.success:
iter213.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTag_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTag_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getTag_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.Tag, evernote.edam.type.ttypes.Tag.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.Tag()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getTag_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createTag_args(object):
"""
Attributes:
- authenticationToken
- tag
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'tag', (evernote.edam.type.ttypes.Tag, evernote.edam.type.ttypes.Tag.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, tag=None,):
self.authenticationToken = authenticationToken
self.tag = tag
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.tag = evernote.edam.type.ttypes.Tag()
self.tag.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createTag_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.tag is not None:
oprot.writeFieldBegin('tag', TType.STRUCT, 2)
self.tag.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createTag_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.Tag, evernote.edam.type.ttypes.Tag.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.Tag()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createTag_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateTag_args(object):
"""
Attributes:
- authenticationToken
- tag
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'tag', (evernote.edam.type.ttypes.Tag, evernote.edam.type.ttypes.Tag.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, tag=None,):
self.authenticationToken = authenticationToken
self.tag = tag
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.tag = evernote.edam.type.ttypes.Tag()
self.tag.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateTag_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.tag is not None:
oprot.writeFieldBegin('tag', TType.STRUCT, 2)
self.tag.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateTag_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateTag_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class untagAll_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('untagAll_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class untagAll_result(object):
"""
Attributes:
- userException
- systemException
- notFoundException
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, userException=None, systemException=None, notFoundException=None,):
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('untagAll_result')
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeTag_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeTag_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeTag_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeTag_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listSearches_args(object):
"""
Attributes:
- authenticationToken
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
)
def __init__(self, authenticationToken=None,):
self.authenticationToken = authenticationToken
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listSearches_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listSearches_result(object):
"""
Attributes:
- success
- userException
- systemException
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(evernote.edam.type.ttypes.SavedSearch, evernote.edam.type.ttypes.SavedSearch.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, userException=None, systemException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype217, _size214) = iprot.readListBegin()
for _i218 in range(_size214):
_elem219 = evernote.edam.type.ttypes.SavedSearch()
_elem219.read(iprot)
self.success.append(_elem219)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listSearches_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter220 in self.success:
iter220.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getSearch_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getSearch_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getSearch_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.SavedSearch, evernote.edam.type.ttypes.SavedSearch.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.SavedSearch()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getSearch_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createSearch_args(object):
"""
Attributes:
- authenticationToken
- search
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'search', (evernote.edam.type.ttypes.SavedSearch, evernote.edam.type.ttypes.SavedSearch.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, search=None,):
self.authenticationToken = authenticationToken
self.search = search
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.search = evernote.edam.type.ttypes.SavedSearch()
self.search.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createSearch_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.search is not None:
oprot.writeFieldBegin('search', TType.STRUCT, 2)
self.search.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createSearch_result(object):
"""
Attributes:
- success
- userException
- systemException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.SavedSearch, evernote.edam.type.ttypes.SavedSearch.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, userException=None, systemException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.SavedSearch()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createSearch_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateSearch_args(object):
"""
Attributes:
- authenticationToken
- search
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'search', (evernote.edam.type.ttypes.SavedSearch, evernote.edam.type.ttypes.SavedSearch.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, search=None,):
self.authenticationToken = authenticationToken
self.search = search
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.search = evernote.edam.type.ttypes.SavedSearch()
self.search.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateSearch_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.search is not None:
oprot.writeFieldBegin('search', TType.STRUCT, 2)
self.search.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateSearch_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateSearch_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeSearch_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeSearch_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeSearch_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeSearch_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class findNotes_args(object):
"""
Attributes:
- authenticationToken
- filter
- offset
- maxNotes
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'filter', (NoteFilter, NoteFilter.thrift_spec), None, ), # 2
(3, TType.I32, 'offset', None, None, ), # 3
(4, TType.I32, 'maxNotes', None, None, ), # 4
)
def __init__(self, authenticationToken=None, filter=None, offset=None, maxNotes=None,):
self.authenticationToken = authenticationToken
self.filter = filter
self.offset = offset
self.maxNotes = maxNotes
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.filter = NoteFilter()
self.filter.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.offset = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.maxNotes = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('findNotes_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.filter is not None:
oprot.writeFieldBegin('filter', TType.STRUCT, 2)
self.filter.write(oprot)
oprot.writeFieldEnd()
if self.offset is not None:
oprot.writeFieldBegin('offset', TType.I32, 3)
oprot.writeI32(self.offset)
oprot.writeFieldEnd()
if self.maxNotes is not None:
oprot.writeFieldBegin('maxNotes', TType.I32, 4)
oprot.writeI32(self.maxNotes)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class findNotes_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (NoteList, NoteList.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = NoteList()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('findNotes_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class findNoteOffset_args(object):
"""
Attributes:
- authenticationToken
- filter
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'filter', (NoteFilter, NoteFilter.thrift_spec), None, ), # 2
(3, TType.STRING, 'guid', None, None, ), # 3
)
def __init__(self, authenticationToken=None, filter=None, guid=None,):
self.authenticationToken = authenticationToken
self.filter = filter
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.filter = NoteFilter()
self.filter.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('findNoteOffset_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.filter is not None:
oprot.writeFieldBegin('filter', TType.STRUCT, 2)
self.filter.write(oprot)
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 3)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class findNoteOffset_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('findNoteOffset_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class findNotesMetadata_args(object):
"""
Attributes:
- authenticationToken
- filter
- offset
- maxNotes
- resultSpec
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'filter', (NoteFilter, NoteFilter.thrift_spec), None, ), # 2
(3, TType.I32, 'offset', None, None, ), # 3
(4, TType.I32, 'maxNotes', None, None, ), # 4
(5, TType.STRUCT, 'resultSpec', (NotesMetadataResultSpec, NotesMetadataResultSpec.thrift_spec), None, ), # 5
)
def __init__(self, authenticationToken=None, filter=None, offset=None, maxNotes=None, resultSpec=None,):
self.authenticationToken = authenticationToken
self.filter = filter
self.offset = offset
self.maxNotes = maxNotes
self.resultSpec = resultSpec
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.filter = NoteFilter()
self.filter.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.offset = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.maxNotes = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.resultSpec = NotesMetadataResultSpec()
self.resultSpec.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('findNotesMetadata_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.filter is not None:
oprot.writeFieldBegin('filter', TType.STRUCT, 2)
self.filter.write(oprot)
oprot.writeFieldEnd()
if self.offset is not None:
oprot.writeFieldBegin('offset', TType.I32, 3)
oprot.writeI32(self.offset)
oprot.writeFieldEnd()
if self.maxNotes is not None:
oprot.writeFieldBegin('maxNotes', TType.I32, 4)
oprot.writeI32(self.maxNotes)
oprot.writeFieldEnd()
if self.resultSpec is not None:
oprot.writeFieldBegin('resultSpec', TType.STRUCT, 5)
self.resultSpec.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class findNotesMetadata_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (NotesMetadataList, NotesMetadataList.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = NotesMetadataList()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('findNotesMetadata_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class findNoteCounts_args(object):
"""
Attributes:
- authenticationToken
- filter
- withTrash
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'filter', (NoteFilter, NoteFilter.thrift_spec), None, ), # 2
(3, TType.BOOL, 'withTrash', None, None, ), # 3
)
def __init__(self, authenticationToken=None, filter=None, withTrash=None,):
self.authenticationToken = authenticationToken
self.filter = filter
self.withTrash = withTrash
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.filter = NoteFilter()
self.filter.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.withTrash = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('findNoteCounts_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.filter is not None:
oprot.writeFieldBegin('filter', TType.STRUCT, 2)
self.filter.write(oprot)
oprot.writeFieldEnd()
if self.withTrash is not None:
oprot.writeFieldBegin('withTrash', TType.BOOL, 3)
oprot.writeBool(self.withTrash)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class findNoteCounts_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (NoteCollectionCounts, NoteCollectionCounts.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = NoteCollectionCounts()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('findNoteCounts_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNote_args(object):
"""
Attributes:
- authenticationToken
- guid
- withContent
- withResourcesData
- withResourcesRecognition
- withResourcesAlternateData
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
(3, TType.BOOL, 'withContent', None, None, ), # 3
(4, TType.BOOL, 'withResourcesData', None, None, ), # 4
(5, TType.BOOL, 'withResourcesRecognition', None, None, ), # 5
(6, TType.BOOL, 'withResourcesAlternateData', None, None, ), # 6
)
def __init__(self, authenticationToken=None, guid=None, withContent=None, withResourcesData=None, withResourcesRecognition=None, withResourcesAlternateData=None,):
self.authenticationToken = authenticationToken
self.guid = guid
self.withContent = withContent
self.withResourcesData = withResourcesData
self.withResourcesRecognition = withResourcesRecognition
self.withResourcesAlternateData = withResourcesAlternateData
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.withContent = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.withResourcesData = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.BOOL:
self.withResourcesRecognition = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.BOOL:
self.withResourcesAlternateData = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNote_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
if self.withContent is not None:
oprot.writeFieldBegin('withContent', TType.BOOL, 3)
oprot.writeBool(self.withContent)
oprot.writeFieldEnd()
if self.withResourcesData is not None:
oprot.writeFieldBegin('withResourcesData', TType.BOOL, 4)
oprot.writeBool(self.withResourcesData)
oprot.writeFieldEnd()
if self.withResourcesRecognition is not None:
oprot.writeFieldBegin('withResourcesRecognition', TType.BOOL, 5)
oprot.writeBool(self.withResourcesRecognition)
oprot.writeFieldEnd()
if self.withResourcesAlternateData is not None:
oprot.writeFieldBegin('withResourcesAlternateData', TType.BOOL, 6)
oprot.writeBool(self.withResourcesAlternateData)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNote_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.Note, evernote.edam.type.ttypes.Note.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.Note()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNote_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNoteApplicationData_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNoteApplicationData_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNoteApplicationData_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.LazyMap, evernote.edam.type.ttypes.LazyMap.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.LazyMap()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNoteApplicationData_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNoteApplicationDataEntry_args(object):
"""
Attributes:
- authenticationToken
- guid
- key
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
(3, TType.STRING, 'key', None, None, ), # 3
)
def __init__(self, authenticationToken=None, guid=None, key=None,):
self.authenticationToken = authenticationToken
self.guid = guid
self.key = key
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNoteApplicationDataEntry_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 3)
oprot.writeString(self.key.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNoteApplicationDataEntry_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNoteApplicationDataEntry_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8'))
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setNoteApplicationDataEntry_args(object):
"""
Attributes:
- authenticationToken
- guid
- key
- value
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
(3, TType.STRING, 'key', None, None, ), # 3
(4, TType.STRING, 'value', None, None, ), # 4
)
def __init__(self, authenticationToken=None, guid=None, key=None, value=None,):
self.authenticationToken = authenticationToken
self.guid = guid
self.key = key
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.value = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setNoteApplicationDataEntry_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 3)
oprot.writeString(self.key.encode('utf-8'))
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRING, 4)
oprot.writeString(self.value.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setNoteApplicationDataEntry_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setNoteApplicationDataEntry_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class unsetNoteApplicationDataEntry_args(object):
"""
Attributes:
- authenticationToken
- guid
- key
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
(3, TType.STRING, 'key', None, None, ), # 3
)
def __init__(self, authenticationToken=None, guid=None, key=None,):
self.authenticationToken = authenticationToken
self.guid = guid
self.key = key
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('unsetNoteApplicationDataEntry_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 3)
oprot.writeString(self.key.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class unsetNoteApplicationDataEntry_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('unsetNoteApplicationDataEntry_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNoteContent_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNoteContent_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNoteContent_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNoteContent_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8'))
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNoteSearchText_args(object):
"""
Attributes:
- authenticationToken
- guid
- noteOnly
- tokenizeForIndexing
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
(3, TType.BOOL, 'noteOnly', None, None, ), # 3
(4, TType.BOOL, 'tokenizeForIndexing', None, None, ), # 4
)
def __init__(self, authenticationToken=None, guid=None, noteOnly=None, tokenizeForIndexing=None,):
self.authenticationToken = authenticationToken
self.guid = guid
self.noteOnly = noteOnly
self.tokenizeForIndexing = tokenizeForIndexing
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.noteOnly = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.tokenizeForIndexing = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNoteSearchText_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
if self.noteOnly is not None:
oprot.writeFieldBegin('noteOnly', TType.BOOL, 3)
oprot.writeBool(self.noteOnly)
oprot.writeFieldEnd()
if self.tokenizeForIndexing is not None:
oprot.writeFieldBegin('tokenizeForIndexing', TType.BOOL, 4)
oprot.writeBool(self.tokenizeForIndexing)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNoteSearchText_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNoteSearchText_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8'))
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceSearchText_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceSearchText_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceSearchText_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceSearchText_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8'))
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNoteTagNames_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNoteTagNames_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNoteTagNames_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRING,None), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype224, _size221) = iprot.readListBegin()
for _i225 in range(_size221):
_elem226 = iprot.readString().decode('utf-8')
self.success.append(_elem226)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNoteTagNames_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRING, len(self.success))
for iter227 in self.success:
oprot.writeString(iter227.encode('utf-8'))
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createNote_args(object):
"""
Attributes:
- authenticationToken
- note
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'note', (evernote.edam.type.ttypes.Note, evernote.edam.type.ttypes.Note.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, note=None,):
self.authenticationToken = authenticationToken
self.note = note
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.note = evernote.edam.type.ttypes.Note()
self.note.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createNote_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.note is not None:
oprot.writeFieldBegin('note', TType.STRUCT, 2)
self.note.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createNote_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.Note, evernote.edam.type.ttypes.Note.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.Note()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createNote_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateNote_args(object):
"""
Attributes:
- authenticationToken
- note
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'note', (evernote.edam.type.ttypes.Note, evernote.edam.type.ttypes.Note.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, note=None,):
self.authenticationToken = authenticationToken
self.note = note
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.note = evernote.edam.type.ttypes.Note()
self.note.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateNote_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.note is not None:
oprot.writeFieldBegin('note', TType.STRUCT, 2)
self.note.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateNote_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.Note, evernote.edam.type.ttypes.Note.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.Note()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateNote_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteNote_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteNote_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteNote_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteNote_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeNote_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeNote_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeNote_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeNote_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeNotes_args(object):
"""
Attributes:
- authenticationToken
- noteGuids
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.LIST, 'noteGuids', (TType.STRING,None), None, ), # 2
)
def __init__(self, authenticationToken=None, noteGuids=None,):
self.authenticationToken = authenticationToken
self.noteGuids = noteGuids
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.noteGuids = []
(_etype231, _size228) = iprot.readListBegin()
for _i232 in range(_size228):
_elem233 = iprot.readString().decode('utf-8')
self.noteGuids.append(_elem233)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeNotes_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.noteGuids is not None:
oprot.writeFieldBegin('noteGuids', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.noteGuids))
for iter234 in self.noteGuids:
oprot.writeString(iter234.encode('utf-8'))
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeNotes_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeNotes_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeInactiveNotes_args(object):
"""
Attributes:
- authenticationToken
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
)
def __init__(self, authenticationToken=None,):
self.authenticationToken = authenticationToken
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeInactiveNotes_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeInactiveNotes_result(object):
"""
Attributes:
- success
- userException
- systemException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, userException=None, systemException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeInactiveNotes_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class copyNote_args(object):
"""
Attributes:
- authenticationToken
- noteGuid
- toNotebookGuid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'noteGuid', None, None, ), # 2
(3, TType.STRING, 'toNotebookGuid', None, None, ), # 3
)
def __init__(self, authenticationToken=None, noteGuid=None, toNotebookGuid=None,):
self.authenticationToken = authenticationToken
self.noteGuid = noteGuid
self.toNotebookGuid = toNotebookGuid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.noteGuid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.toNotebookGuid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('copyNote_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.noteGuid is not None:
oprot.writeFieldBegin('noteGuid', TType.STRING, 2)
oprot.writeString(self.noteGuid.encode('utf-8'))
oprot.writeFieldEnd()
if self.toNotebookGuid is not None:
oprot.writeFieldBegin('toNotebookGuid', TType.STRING, 3)
oprot.writeString(self.toNotebookGuid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class copyNote_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.Note, evernote.edam.type.ttypes.Note.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.Note()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('copyNote_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listNoteVersions_args(object):
"""
Attributes:
- authenticationToken
- noteGuid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'noteGuid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, noteGuid=None,):
self.authenticationToken = authenticationToken
self.noteGuid = noteGuid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.noteGuid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listNoteVersions_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.noteGuid is not None:
oprot.writeFieldBegin('noteGuid', TType.STRING, 2)
oprot.writeString(self.noteGuid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listNoteVersions_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(NoteVersionId, NoteVersionId.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype238, _size235) = iprot.readListBegin()
for _i239 in range(_size235):
_elem240 = NoteVersionId()
_elem240.read(iprot)
self.success.append(_elem240)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listNoteVersions_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter241 in self.success:
iter241.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNoteVersion_args(object):
"""
Attributes:
- authenticationToken
- noteGuid
- updateSequenceNum
- withResourcesData
- withResourcesRecognition
- withResourcesAlternateData
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'noteGuid', None, None, ), # 2
(3, TType.I32, 'updateSequenceNum', None, None, ), # 3
(4, TType.BOOL, 'withResourcesData', None, None, ), # 4
(5, TType.BOOL, 'withResourcesRecognition', None, None, ), # 5
(6, TType.BOOL, 'withResourcesAlternateData', None, None, ), # 6
)
def __init__(self, authenticationToken=None, noteGuid=None, updateSequenceNum=None, withResourcesData=None, withResourcesRecognition=None, withResourcesAlternateData=None,):
self.authenticationToken = authenticationToken
self.noteGuid = noteGuid
self.updateSequenceNum = updateSequenceNum
self.withResourcesData = withResourcesData
self.withResourcesRecognition = withResourcesRecognition
self.withResourcesAlternateData = withResourcesAlternateData
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.noteGuid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.updateSequenceNum = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.withResourcesData = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.BOOL:
self.withResourcesRecognition = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.BOOL:
self.withResourcesAlternateData = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNoteVersion_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.noteGuid is not None:
oprot.writeFieldBegin('noteGuid', TType.STRING, 2)
oprot.writeString(self.noteGuid.encode('utf-8'))
oprot.writeFieldEnd()
if self.updateSequenceNum is not None:
oprot.writeFieldBegin('updateSequenceNum', TType.I32, 3)
oprot.writeI32(self.updateSequenceNum)
oprot.writeFieldEnd()
if self.withResourcesData is not None:
oprot.writeFieldBegin('withResourcesData', TType.BOOL, 4)
oprot.writeBool(self.withResourcesData)
oprot.writeFieldEnd()
if self.withResourcesRecognition is not None:
oprot.writeFieldBegin('withResourcesRecognition', TType.BOOL, 5)
oprot.writeBool(self.withResourcesRecognition)
oprot.writeFieldEnd()
if self.withResourcesAlternateData is not None:
oprot.writeFieldBegin('withResourcesAlternateData', TType.BOOL, 6)
oprot.writeBool(self.withResourcesAlternateData)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNoteVersion_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.Note, evernote.edam.type.ttypes.Note.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.Note()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNoteVersion_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResource_args(object):
"""
Attributes:
- authenticationToken
- guid
- withData
- withRecognition
- withAttributes
- withAlternateData
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
(3, TType.BOOL, 'withData', None, None, ), # 3
(4, TType.BOOL, 'withRecognition', None, None, ), # 4
(5, TType.BOOL, 'withAttributes', None, None, ), # 5
(6, TType.BOOL, 'withAlternateData', None, None, ), # 6
)
def __init__(self, authenticationToken=None, guid=None, withData=None, withRecognition=None, withAttributes=None, withAlternateData=None,):
self.authenticationToken = authenticationToken
self.guid = guid
self.withData = withData
self.withRecognition = withRecognition
self.withAttributes = withAttributes
self.withAlternateData = withAlternateData
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.withData = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.withRecognition = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.BOOL:
self.withAttributes = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.BOOL:
self.withAlternateData = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResource_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
if self.withData is not None:
oprot.writeFieldBegin('withData', TType.BOOL, 3)
oprot.writeBool(self.withData)
oprot.writeFieldEnd()
if self.withRecognition is not None:
oprot.writeFieldBegin('withRecognition', TType.BOOL, 4)
oprot.writeBool(self.withRecognition)
oprot.writeFieldEnd()
if self.withAttributes is not None:
oprot.writeFieldBegin('withAttributes', TType.BOOL, 5)
oprot.writeBool(self.withAttributes)
oprot.writeFieldEnd()
if self.withAlternateData is not None:
oprot.writeFieldBegin('withAlternateData', TType.BOOL, 6)
oprot.writeBool(self.withAlternateData)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResource_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.Resource, evernote.edam.type.ttypes.Resource.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.Resource()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResource_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceApplicationData_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceApplicationData_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceApplicationData_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.LazyMap, evernote.edam.type.ttypes.LazyMap.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.LazyMap()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceApplicationData_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceApplicationDataEntry_args(object):
"""
Attributes:
- authenticationToken
- guid
- key
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
(3, TType.STRING, 'key', None, None, ), # 3
)
def __init__(self, authenticationToken=None, guid=None, key=None,):
self.authenticationToken = authenticationToken
self.guid = guid
self.key = key
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceApplicationDataEntry_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 3)
oprot.writeString(self.key.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceApplicationDataEntry_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceApplicationDataEntry_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8'))
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setResourceApplicationDataEntry_args(object):
"""
Attributes:
- authenticationToken
- guid
- key
- value
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
(3, TType.STRING, 'key', None, None, ), # 3
(4, TType.STRING, 'value', None, None, ), # 4
)
def __init__(self, authenticationToken=None, guid=None, key=None, value=None,):
self.authenticationToken = authenticationToken
self.guid = guid
self.key = key
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.value = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setResourceApplicationDataEntry_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 3)
oprot.writeString(self.key.encode('utf-8'))
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRING, 4)
oprot.writeString(self.value.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setResourceApplicationDataEntry_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setResourceApplicationDataEntry_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class unsetResourceApplicationDataEntry_args(object):
"""
Attributes:
- authenticationToken
- guid
- key
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
(3, TType.STRING, 'key', None, None, ), # 3
)
def __init__(self, authenticationToken=None, guid=None, key=None,):
self.authenticationToken = authenticationToken
self.guid = guid
self.key = key
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('unsetResourceApplicationDataEntry_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 3)
oprot.writeString(self.key.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class unsetResourceApplicationDataEntry_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('unsetResourceApplicationDataEntry_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateResource_args(object):
"""
Attributes:
- authenticationToken
- resource
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'resource', (evernote.edam.type.ttypes.Resource, evernote.edam.type.ttypes.Resource.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, resource=None,):
self.authenticationToken = authenticationToken
self.resource = resource
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.resource = evernote.edam.type.ttypes.Resource()
self.resource.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateResource_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.resource is not None:
oprot.writeFieldBegin('resource', TType.STRUCT, 2)
self.resource.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateResource_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateResource_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceData_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceData_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceData_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceData_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceByHash_args(object):
"""
Attributes:
- authenticationToken
- noteGuid
- contentHash
- withData
- withRecognition
- withAlternateData
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'noteGuid', None, None, ), # 2
(3, TType.STRING, 'contentHash', None, None, ), # 3
(4, TType.BOOL, 'withData', None, None, ), # 4
(5, TType.BOOL, 'withRecognition', None, None, ), # 5
(6, TType.BOOL, 'withAlternateData', None, None, ), # 6
)
def __init__(self, authenticationToken=None, noteGuid=None, contentHash=None, withData=None, withRecognition=None, withAlternateData=None,):
self.authenticationToken = authenticationToken
self.noteGuid = noteGuid
self.contentHash = contentHash
self.withData = withData
self.withRecognition = withRecognition
self.withAlternateData = withAlternateData
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.noteGuid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.contentHash = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.withData = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.BOOL:
self.withRecognition = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.BOOL:
self.withAlternateData = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceByHash_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.noteGuid is not None:
oprot.writeFieldBegin('noteGuid', TType.STRING, 2)
oprot.writeString(self.noteGuid.encode('utf-8'))
oprot.writeFieldEnd()
if self.contentHash is not None:
oprot.writeFieldBegin('contentHash', TType.STRING, 3)
oprot.writeString(self.contentHash)
oprot.writeFieldEnd()
if self.withData is not None:
oprot.writeFieldBegin('withData', TType.BOOL, 4)
oprot.writeBool(self.withData)
oprot.writeFieldEnd()
if self.withRecognition is not None:
oprot.writeFieldBegin('withRecognition', TType.BOOL, 5)
oprot.writeBool(self.withRecognition)
oprot.writeFieldEnd()
if self.withAlternateData is not None:
oprot.writeFieldBegin('withAlternateData', TType.BOOL, 6)
oprot.writeBool(self.withAlternateData)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceByHash_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.Resource, evernote.edam.type.ttypes.Resource.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.Resource()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceByHash_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceRecognition_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceRecognition_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceRecognition_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceRecognition_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceAlternateData_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceAlternateData_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceAlternateData_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceAlternateData_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceAttributes_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceAttributes_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getResourceAttributes_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.ResourceAttributes, evernote.edam.type.ttypes.ResourceAttributes.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.ResourceAttributes()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getResourceAttributes_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getPublicNotebook_args(object):
"""
Attributes:
- userId
- publicUri
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'userId', None, None, ), # 1
(2, TType.STRING, 'publicUri', None, None, ), # 2
)
def __init__(self, userId=None, publicUri=None,):
self.userId = userId
self.publicUri = publicUri
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.userId = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.publicUri = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getPublicNotebook_args')
if self.userId is not None:
oprot.writeFieldBegin('userId', TType.I32, 1)
oprot.writeI32(self.userId)
oprot.writeFieldEnd()
if self.publicUri is not None:
oprot.writeFieldBegin('publicUri', TType.STRING, 2)
oprot.writeString(self.publicUri.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getPublicNotebook_result(object):
"""
Attributes:
- success
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.Notebook, evernote.edam.type.ttypes.Notebook.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
)
def __init__(self, success=None, systemException=None, notFoundException=None,):
self.success = success
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.Notebook()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getPublicNotebook_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 1)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createSharedNotebook_args(object):
"""
Attributes:
- authenticationToken
- sharedNotebook
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'sharedNotebook', (evernote.edam.type.ttypes.SharedNotebook, evernote.edam.type.ttypes.SharedNotebook.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, sharedNotebook=None,):
self.authenticationToken = authenticationToken
self.sharedNotebook = sharedNotebook
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.sharedNotebook = evernote.edam.type.ttypes.SharedNotebook()
self.sharedNotebook.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createSharedNotebook_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.sharedNotebook is not None:
oprot.writeFieldBegin('sharedNotebook', TType.STRUCT, 2)
self.sharedNotebook.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createSharedNotebook_result(object):
"""
Attributes:
- success
- userException
- notFoundException
- systemException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.SharedNotebook, evernote.edam.type.ttypes.SharedNotebook.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, notFoundException=None, systemException=None,):
self.success = success
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.SharedNotebook()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createSharedNotebook_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateSharedNotebook_args(object):
"""
Attributes:
- authenticationToken
- sharedNotebook
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'sharedNotebook', (evernote.edam.type.ttypes.SharedNotebook, evernote.edam.type.ttypes.SharedNotebook.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, sharedNotebook=None,):
self.authenticationToken = authenticationToken
self.sharedNotebook = sharedNotebook
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.sharedNotebook = evernote.edam.type.ttypes.SharedNotebook()
self.sharedNotebook.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateSharedNotebook_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.sharedNotebook is not None:
oprot.writeFieldBegin('sharedNotebook', TType.STRUCT, 2)
self.sharedNotebook.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateSharedNotebook_result(object):
"""
Attributes:
- success
- userException
- notFoundException
- systemException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, notFoundException=None, systemException=None,):
self.success = success
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateSharedNotebook_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setSharedNotebookRecipientSettings_args(object):
"""
Attributes:
- authenticationToken
- sharedNotebookId
- recipientSettings
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.I64, 'sharedNotebookId', None, None, ), # 2
(3, TType.STRUCT, 'recipientSettings', (evernote.edam.type.ttypes.SharedNotebookRecipientSettings, evernote.edam.type.ttypes.SharedNotebookRecipientSettings.thrift_spec), None, ), # 3
)
def __init__(self, authenticationToken=None, sharedNotebookId=None, recipientSettings=None,):
self.authenticationToken = authenticationToken
self.sharedNotebookId = sharedNotebookId
self.recipientSettings = recipientSettings
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I64:
self.sharedNotebookId = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.recipientSettings = evernote.edam.type.ttypes.SharedNotebookRecipientSettings()
self.recipientSettings.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setSharedNotebookRecipientSettings_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.sharedNotebookId is not None:
oprot.writeFieldBegin('sharedNotebookId', TType.I64, 2)
oprot.writeI64(self.sharedNotebookId)
oprot.writeFieldEnd()
if self.recipientSettings is not None:
oprot.writeFieldBegin('recipientSettings', TType.STRUCT, 3)
self.recipientSettings.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class setSharedNotebookRecipientSettings_result(object):
"""
Attributes:
- success
- userException
- notFoundException
- systemException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, notFoundException=None, systemException=None,):
self.success = success
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('setSharedNotebookRecipientSettings_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sendMessageToSharedNotebookMembers_args(object):
"""
Attributes:
- authenticationToken
- notebookGuid
- messageText
- recipients
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'notebookGuid', None, None, ), # 2
(3, TType.STRING, 'messageText', None, None, ), # 3
(4, TType.LIST, 'recipients', (TType.STRING,None), None, ), # 4
)
def __init__(self, authenticationToken=None, notebookGuid=None, messageText=None, recipients=None,):
self.authenticationToken = authenticationToken
self.notebookGuid = notebookGuid
self.messageText = messageText
self.recipients = recipients
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.notebookGuid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.messageText = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.recipients = []
(_etype245, _size242) = iprot.readListBegin()
for _i246 in range(_size242):
_elem247 = iprot.readString().decode('utf-8')
self.recipients.append(_elem247)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('sendMessageToSharedNotebookMembers_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.notebookGuid is not None:
oprot.writeFieldBegin('notebookGuid', TType.STRING, 2)
oprot.writeString(self.notebookGuid.encode('utf-8'))
oprot.writeFieldEnd()
if self.messageText is not None:
oprot.writeFieldBegin('messageText', TType.STRING, 3)
oprot.writeString(self.messageText.encode('utf-8'))
oprot.writeFieldEnd()
if self.recipients is not None:
oprot.writeFieldBegin('recipients', TType.LIST, 4)
oprot.writeListBegin(TType.STRING, len(self.recipients))
for iter248 in self.recipients:
oprot.writeString(iter248.encode('utf-8'))
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class sendMessageToSharedNotebookMembers_result(object):
"""
Attributes:
- success
- userException
- notFoundException
- systemException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, notFoundException=None, systemException=None,):
self.success = success
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('sendMessageToSharedNotebookMembers_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listSharedNotebooks_args(object):
"""
Attributes:
- authenticationToken
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
)
def __init__(self, authenticationToken=None,):
self.authenticationToken = authenticationToken
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listSharedNotebooks_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listSharedNotebooks_result(object):
"""
Attributes:
- success
- userException
- notFoundException
- systemException
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(evernote.edam.type.ttypes.SharedNotebook, evernote.edam.type.ttypes.SharedNotebook.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, notFoundException=None, systemException=None,):
self.success = success
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype252, _size249) = iprot.readListBegin()
for _i253 in range(_size249):
_elem254 = evernote.edam.type.ttypes.SharedNotebook()
_elem254.read(iprot)
self.success.append(_elem254)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listSharedNotebooks_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter255 in self.success:
iter255.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeSharedNotebooks_args(object):
"""
Attributes:
- authenticationToken
- sharedNotebookIds
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.LIST, 'sharedNotebookIds', (TType.I64,None), None, ), # 2
)
def __init__(self, authenticationToken=None, sharedNotebookIds=None,):
self.authenticationToken = authenticationToken
self.sharedNotebookIds = sharedNotebookIds
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.sharedNotebookIds = []
(_etype259, _size256) = iprot.readListBegin()
for _i260 in range(_size256):
_elem261 = iprot.readI64();
self.sharedNotebookIds.append(_elem261)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeSharedNotebooks_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.sharedNotebookIds is not None:
oprot.writeFieldBegin('sharedNotebookIds', TType.LIST, 2)
oprot.writeListBegin(TType.I64, len(self.sharedNotebookIds))
for iter262 in self.sharedNotebookIds:
oprot.writeI64(iter262)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeSharedNotebooks_result(object):
"""
Attributes:
- success
- userException
- notFoundException
- systemException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, notFoundException=None, systemException=None,):
self.success = success
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeSharedNotebooks_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createLinkedNotebook_args(object):
"""
Attributes:
- authenticationToken
- linkedNotebook
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'linkedNotebook', (evernote.edam.type.ttypes.LinkedNotebook, evernote.edam.type.ttypes.LinkedNotebook.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, linkedNotebook=None,):
self.authenticationToken = authenticationToken
self.linkedNotebook = linkedNotebook
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.linkedNotebook = evernote.edam.type.ttypes.LinkedNotebook()
self.linkedNotebook.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createLinkedNotebook_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.linkedNotebook is not None:
oprot.writeFieldBegin('linkedNotebook', TType.STRUCT, 2)
self.linkedNotebook.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class createLinkedNotebook_result(object):
"""
Attributes:
- success
- userException
- notFoundException
- systemException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.LinkedNotebook, evernote.edam.type.ttypes.LinkedNotebook.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, notFoundException=None, systemException=None,):
self.success = success
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.LinkedNotebook()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('createLinkedNotebook_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateLinkedNotebook_args(object):
"""
Attributes:
- authenticationToken
- linkedNotebook
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'linkedNotebook', (evernote.edam.type.ttypes.LinkedNotebook, evernote.edam.type.ttypes.LinkedNotebook.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, linkedNotebook=None,):
self.authenticationToken = authenticationToken
self.linkedNotebook = linkedNotebook
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.linkedNotebook = evernote.edam.type.ttypes.LinkedNotebook()
self.linkedNotebook.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateLinkedNotebook_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.linkedNotebook is not None:
oprot.writeFieldBegin('linkedNotebook', TType.STRUCT, 2)
self.linkedNotebook.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateLinkedNotebook_result(object):
"""
Attributes:
- success
- userException
- notFoundException
- systemException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, notFoundException=None, systemException=None,):
self.success = success
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateLinkedNotebook_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listLinkedNotebooks_args(object):
"""
Attributes:
- authenticationToken
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
)
def __init__(self, authenticationToken=None,):
self.authenticationToken = authenticationToken
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listLinkedNotebooks_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class listLinkedNotebooks_result(object):
"""
Attributes:
- success
- userException
- notFoundException
- systemException
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(evernote.edam.type.ttypes.LinkedNotebook, evernote.edam.type.ttypes.LinkedNotebook.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, notFoundException=None, systemException=None,):
self.success = success
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype266, _size263) = iprot.readListBegin()
for _i267 in range(_size263):
_elem268 = evernote.edam.type.ttypes.LinkedNotebook()
_elem268.read(iprot)
self.success.append(_elem268)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('listLinkedNotebooks_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter269 in self.success:
iter269.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeLinkedNotebook_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeLinkedNotebook_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class expungeLinkedNotebook_result(object):
"""
Attributes:
- success
- userException
- notFoundException
- systemException
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, notFoundException=None, systemException=None,):
self.success = success
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('expungeLinkedNotebook_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class authenticateToSharedNotebook_args(object):
"""
Attributes:
- shareKey
- authenticationToken
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'shareKey', None, None, ), # 1
(2, TType.STRING, 'authenticationToken', None, None, ), # 2
)
def __init__(self, shareKey=None, authenticationToken=None,):
self.shareKey = shareKey
self.authenticationToken = authenticationToken
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.shareKey = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('authenticateToSharedNotebook_args')
if self.shareKey is not None:
oprot.writeFieldBegin('shareKey', TType.STRING, 1)
oprot.writeString(self.shareKey.encode('utf-8'))
oprot.writeFieldEnd()
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 2)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class authenticateToSharedNotebook_result(object):
"""
Attributes:
- success
- userException
- notFoundException
- systemException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.userstore.ttypes.AuthenticationResult, evernote.edam.userstore.ttypes.AuthenticationResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, notFoundException=None, systemException=None,):
self.success = success
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.userstore.ttypes.AuthenticationResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('authenticateToSharedNotebook_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getSharedNotebookByAuth_args(object):
"""
Attributes:
- authenticationToken
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
)
def __init__(self, authenticationToken=None,):
self.authenticationToken = authenticationToken
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getSharedNotebookByAuth_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getSharedNotebookByAuth_result(object):
"""
Attributes:
- success
- userException
- notFoundException
- systemException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.type.ttypes.SharedNotebook, evernote.edam.type.ttypes.SharedNotebook.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, notFoundException=None, systemException=None,):
self.success = success
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.type.ttypes.SharedNotebook()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getSharedNotebookByAuth_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class emailNote_args(object):
"""
Attributes:
- authenticationToken
- parameters
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'parameters', (NoteEmailParameters, NoteEmailParameters.thrift_spec), None, ), # 2
)
def __init__(self, authenticationToken=None, parameters=None,):
self.authenticationToken = authenticationToken
self.parameters = parameters
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.parameters = NoteEmailParameters()
self.parameters.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('emailNote_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.parameters is not None:
oprot.writeFieldBegin('parameters', TType.STRUCT, 2)
self.parameters.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class emailNote_result(object):
"""
Attributes:
- userException
- notFoundException
- systemException
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, userException=None, notFoundException=None, systemException=None,):
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('emailNote_result')
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class shareNote_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('shareNote_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class shareNote_result(object):
"""
Attributes:
- success
- userException
- notFoundException
- systemException
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, notFoundException=None, systemException=None,):
self.success = success
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('shareNote_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8'))
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class stopSharingNote_args(object):
"""
Attributes:
- authenticationToken
- guid
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRING, 'guid', None, None, ), # 2
)
def __init__(self, authenticationToken=None, guid=None,):
self.authenticationToken = authenticationToken
self.guid = guid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('stopSharingNote_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 2)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class stopSharingNote_result(object):
"""
Attributes:
- userException
- notFoundException
- systemException
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, userException=None, notFoundException=None, systemException=None,):
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('stopSharingNote_result')
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class authenticateToSharedNote_args(object):
"""
Attributes:
- guid
- noteKey
- authenticationToken
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'guid', None, None, ), # 1
(2, TType.STRING, 'noteKey', None, None, ), # 2
(3, TType.STRING, 'authenticationToken', None, None, ), # 3
)
def __init__(self, guid=None, noteKey=None, authenticationToken=None,):
self.guid = guid
self.noteKey = noteKey
self.authenticationToken = authenticationToken
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.guid = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.noteKey = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('authenticateToSharedNote_args')
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 1)
oprot.writeString(self.guid.encode('utf-8'))
oprot.writeFieldEnd()
if self.noteKey is not None:
oprot.writeFieldBegin('noteKey', TType.STRING, 2)
oprot.writeString(self.noteKey.encode('utf-8'))
oprot.writeFieldEnd()
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 3)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class authenticateToSharedNote_result(object):
"""
Attributes:
- success
- userException
- notFoundException
- systemException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (evernote.edam.userstore.ttypes.AuthenticationResult, evernote.edam.userstore.ttypes.AuthenticationResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, notFoundException=None, systemException=None,):
self.success = success
self.userException = userException
self.notFoundException = notFoundException
self.systemException = systemException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = evernote.edam.userstore.ttypes.AuthenticationResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('authenticateToSharedNote_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 2)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 3)
self.systemException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class findRelated_args(object):
"""
Attributes:
- authenticationToken
- query
- resultSpec
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'authenticationToken', None, None, ), # 1
(2, TType.STRUCT, 'query', (RelatedQuery, RelatedQuery.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'resultSpec', (RelatedResultSpec, RelatedResultSpec.thrift_spec), None, ), # 3
)
def __init__(self, authenticationToken=None, query=None, resultSpec=None,):
self.authenticationToken = authenticationToken
self.query = query
self.resultSpec = resultSpec
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.authenticationToken = iprot.readString().decode('utf-8')
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.query = RelatedQuery()
self.query.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.resultSpec = RelatedResultSpec()
self.resultSpec.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('findRelated_args')
if self.authenticationToken is not None:
oprot.writeFieldBegin('authenticationToken', TType.STRING, 1)
oprot.writeString(self.authenticationToken.encode('utf-8'))
oprot.writeFieldEnd()
if self.query is not None:
oprot.writeFieldBegin('query', TType.STRUCT, 2)
self.query.write(oprot)
oprot.writeFieldEnd()
if self.resultSpec is not None:
oprot.writeFieldBegin('resultSpec', TType.STRUCT, 3)
self.resultSpec.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class findRelated_result(object):
"""
Attributes:
- success
- userException
- systemException
- notFoundException
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (RelatedResult, RelatedResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'userException', (evernote.edam.error.ttypes.EDAMUserException, evernote.edam.error.ttypes.EDAMUserException.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'systemException', (evernote.edam.error.ttypes.EDAMSystemException, evernote.edam.error.ttypes.EDAMSystemException.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'notFoundException', (evernote.edam.error.ttypes.EDAMNotFoundException, evernote.edam.error.ttypes.EDAMNotFoundException.thrift_spec), None, ), # 3
)
def __init__(self, success=None, userException=None, systemException=None, notFoundException=None,):
self.success = success
self.userException = userException
self.systemException = systemException
self.notFoundException = notFoundException
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = RelatedResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.userException = evernote.edam.error.ttypes.EDAMUserException()
self.userException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.systemException = evernote.edam.error.ttypes.EDAMSystemException()
self.systemException.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.notFoundException = evernote.edam.error.ttypes.EDAMNotFoundException()
self.notFoundException.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('findRelated_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.userException is not None:
oprot.writeFieldBegin('userException', TType.STRUCT, 1)
self.userException.write(oprot)
oprot.writeFieldEnd()
if self.systemException is not None:
oprot.writeFieldBegin('systemException', TType.STRUCT, 2)
self.systemException.write(oprot)
oprot.writeFieldEnd()
if self.notFoundException is not None:
oprot.writeFieldBegin('notFoundException', TType.STRUCT, 3)
self.notFoundException.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
|
bsd-2-clause
|
jhawkesworth/ansible
|
lib/ansible/modules/network/aci/aci_firmware_policy.py
|
16
|
6423
|
#!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: aci_firmware_policy
short_description: This creates a firmware policy
version_added: "2.8"
description:
- This module creates a firmware policy for firmware groups. The firmware policy is create first and then
- referenced by the firmware group. You will assign the firmware and specify if you want to ignore the compatibility
- check
options:
name:
description:
- Name of the firmware policy
required: true
version:
description:
- The version of the firmware assoicated with this policy. This value is very import as well as constructing
- it correctly. The syntax for this field is n9000-xx.x. If you look at the firmware repository using the UI
- each version will have a "Full Version" column, this is the value you need to use. So, if the Full Version
- is 13.1(1i), the value for this field would be n9000-13.1(1i)
required: true
ignoreCompat:
description:
- Check if compatibility checks should be ignored
required: false
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
default: present
choices: ['absent', 'present', 'query']
extends_documentation_fragment:
- ACI
author:
- Steven Gerhart (@sgerhart)
'''
EXAMPLES = '''
- name: firmware policy
aci_firmware_policy:
host: "{{ inventory_hostname }}"
username: "{{ user }}"
password: "{{ pass }}"
validate_certs: no
name: test2FrmPol
version: n9000-13.2(1m)
ignoreCompat: False
state: present
'''
RETURN = '''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: str
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: str
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: str
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: str
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
name=dict(type='str', aliases=['name']), # Not required for querying all objects
version=dict(type='str', aliases=['version']),
ignoreCompat=dict(type=bool),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['name']],
['state', 'present', ['name', 'version']],
],
)
state = module.params['state']
name = module.params['name']
version = module.params['version']
if module.params['ignoreCompat']:
ignore = 'yes'
else:
ignore = 'no'
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='firmwareFwP',
aci_rn='fabric/fwpol-{0}'.format(name),
target_filter={'name': name},
module_object=name,
),
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class='firmwareFwP',
class_config=dict(
name=name,
version=version,
ignoreCompat=ignore,
),
)
aci.get_diff(aci_class='firmwareFwP')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
|
gpl-3.0
|
pyIMS/pyimzML
|
pyimzml/metadata.py
|
2
|
11625
|
from warnings import warn
from pyimzml.ontology.ontology import lookup_and_convert_cv_param, convert_xml_value, convert_term_name
XMLNS_PREFIX = "{http://psi.hupo.org/ms/mzml}"
def _deep_pretty(obj):
if isinstance(obj, list):
return [_deep_pretty(item) for item in obj]
if isinstance(obj, dict):
return {k: _deep_pretty(v) for k, v in obj.items()}
if hasattr(obj, 'pretty'):
return obj.pretty()
return obj
class _ParseUtils:
"""
Utility class for common parsing patterns and tracking created param groups so that
their refs to other param groups can later be linked up.
"""
def __init__(self):
self.created_param_groups = []
def param_group(self, node, **extra_fields):
pg = ParamGroup(node, **extra_fields)
self.created_param_groups.append(pg)
return pg
def optional_param_group(self, parent_node, xpath, **extra_fields):
node = parent_node.find(xpath.format(XMLNS_PREFIX))
return self.param_group(node, **extra_fields) if node is not None else None
def param_groups_by_id(self, parent_node, xpath):
return dict(
(n.get('id', idx), self.param_group(n))
for idx, n in enumerate(parent_node.findall(xpath.format(XMLNS_PREFIX)))
)
def param_groups_list(self, parent_node, xpath):
return [self.param_group(n) for n in parent_node.findall(xpath.format(XMLNS_PREFIX))]
def refs_list(self, parent_node, xpath):
return [n.attrib.get('ref') for n in parent_node.findall(xpath.format(XMLNS_PREFIX))]
class Metadata:
def __init__(self, root):
"""
Parse metadata headers from an imzML file into a structured format for easy access in Python code.
This class deliberately excludes spectra, as they account for significantly more memory use
and parsing time, and typically should be treated separately.
"""
pu = _ParseUtils()
fd_node = root.find('{0}fileDescription'.format(XMLNS_PREFIX))
self.file_description = pu.param_group(
fd_node.find('{0}fileContent'.format(XMLNS_PREFIX)),
source_files=pu.param_groups_by_id(fd_node, '{0}sourceFileList/{0}sourceFile'),
contacts=pu.param_groups_list(fd_node, '{0}contact'),
)
self.referenceable_param_groups = pu.param_groups_by_id(
root,
'{0}referenceableParamGroupList/{0}referenceableParamGroup'
)
self.samples = pu.param_groups_by_id(root, '{0}sampleList/{0}sample')
self.softwares = pu.param_groups_by_id(root, '{0}softwareList/{0}software')
self.scan_settings = {}
for node in root.findall('{0}scanSettingsList/{0}scanSettings'.format(XMLNS_PREFIX)):
self.scan_settings[node.get('id')] = pu.param_group(
node,
source_file_refs=pu.refs_list(node, '{0}sourceFileRefList/{0}sourceFileRef'),
targets=pu.param_groups_by_id(node, '{0}targetList/{0}target'),
)
self.instrument_configurations = {}
for node in root.findall('{0}instrumentConfigurationList/{0}instrumentConfiguration'.format(XMLNS_PREFIX)):
self.instrument_configurations[node.get('id')] = pu.param_group(
node,
components=pu.param_groups_list(node, '{0}componentList/*'),
software_ref=next(iter(pu.refs_list(node, '{0}softwareRef')), None),
)
self.data_processings = {}
for node in root.findall('{0}dataProcessingList/{0}dataProcessing'.format(XMLNS_PREFIX)):
self.data_processings[node.get('id')] = pu.param_group(
node,
methods=pu.param_groups_list(node, '{0}processingMethod')
)
# Apply referenceable_param_groups
for pg in pu.created_param_groups:
pg.apply_referenceable_param_groups(self.referenceable_param_groups)
def pretty(self):
"""
Returns a nested dict summarizing all contained sections, intended to help human inspection.
"""
return {
'file_description': self.file_description.pretty(),
'referenceable_param_groups': _deep_pretty(self.referenceable_param_groups),
'samples': _deep_pretty(self.samples),
'softwares': _deep_pretty(self.softwares),
'scan_settings': _deep_pretty(self.scan_settings),
'instrument_configurations': _deep_pretty(self.instrument_configurations),
'data_processings': _deep_pretty(self.data_processings),
}
class ParamGroup:
"""
This class exposes a group of imzML parameters at two layers of abstraction:
High-level examples:
`param_group['MS:0000000']`
Access a controlled vocabulary parameter by accession ID or name, or a user-defined
parameter by name. Controlled vocabulary parameters will take priority.
This also inherits values from referenced referenceable param groups.
`'particle beam' in param_group`
Check if a parameter exists by name / accession ID.
`param_group.targets`
Access a subelement directly by name.
Low-level examples:
`param_group.cv_params` - A list of all cvParams defined in this group. Includes raw values,
units, and multiple items if one accession is used multiple times.
Does not include values inherited from referenceable param groups.
`param_group.user_params` - A list of all userParams.
`param_group.attrs` - A dict of all XML attributes.
`param_group.subelements` - A dict of all subelements.
"""
def __init__(self, elem, **extra_data):
"""
Parses an XML element representing a group of controlled vocabulary parameters.
:param elem: an XML element containing cvParam children
:param extra_data: extra attributes to assign to the class instance
"""
self.param_group_refs = [
ref.get('ref')
for ref in elem.findall('{0}referenceableParamGroupRef'.format(XMLNS_PREFIX))
]
self.type = elem.tag.replace(XMLNS_PREFIX, '')
# Tuples of (name, accession, parsed_value, raw_value, unit_name, unit_accession)
# These are kept in an array as the imzML spec allows multiple uses of accession numbers
# in the same block
self.cv_params = []
for node in elem.findall('{0}cvParam'.format(XMLNS_PREFIX)):
accession = node.get('accession')
raw_name = node.get('name')
raw_value = node.get('value')
unit_accession = node.get('unitAccession')
accession, name, parsed_value, unit_name = lookup_and_convert_cv_param(
accession, raw_name, raw_value, unit_accession
)
self.cv_params.append(
(name, accession, parsed_value, raw_name, raw_value, unit_name, unit_accession)
)
# Tuples of (name, type, parsed_value, raw_value, unit_name, unit_accession)
self.user_params = []
for node in elem.findall('{0}userParam'.format(XMLNS_PREFIX)):
name = node.get('name')
dtype = node.get('dtype')
raw_value = node.get('value')
parsed_value = convert_xml_value(raw_value, dtype)
unit_accession = node.get('unitAccession')
unit_name = convert_term_name(unit_accession)
self.user_params.append(
(name, dtype, parsed_value, raw_value, unit_name, unit_accession)
)
# Mapping of CV param name to parsed value
self.param_by_name = {}
self.param_by_name.update((param[0], param[2]) for param in self.user_params)
self.param_by_name.update((param[0], param[2]) for param in self.cv_params)
# Mapping of CV param accession to parsed value
self.param_by_accession = {
param[1]: param[2] for param in self.cv_params
}
self.attrs = elem.attrib
self.subelements = extra_data
for k, v in extra_data.items():
setattr(self, k, v)
def __getitem__(self, key):
try:
return self.param_by_accession[key]
except KeyError:
return self.param_by_name[key]
def __contains__(self, key):
return key in self.param_by_accession or key in self.param_by_name
def apply_referenceable_param_groups(self, rpgs):
for ref in self.param_group_refs[::-1]:
rpg = rpgs.get(ref)
if rpg:
for name, accession, parsed_value, *_ in rpg.cv_params:
if name is not None and name != accession:
self.param_by_name.setdefault(name, parsed_value)
self.param_by_accession.setdefault(accession, parsed_value)
for name, _, parsed_value, *_ in rpg.user_params:
self.param_by_name.setdefault(name, parsed_value)
else:
warn('ReferenceableParamGroup "%s" not found' % ref)
def pretty(self):
"""
Flattens attributes, params and extra fields into a single dict keyed by name.
This function is intended to help human inspection. For programmatic access to specific fields,
always use the `attrs`, `param_by_name`, `param_by_accession`, etc. instance attributes instead.
"""
result = {
'type': self.type,
}
result.update(self.attrs)
result.update(self.param_by_name)
result.update(_deep_pretty(self.subelements))
return result
class SpectrumData(ParamGroup):
def __init__(self, root, referenceable_param_groups):
pu = _ParseUtils()
scan_list_params = pu.optional_param_group(root, '{0}scanList')
scans = []
for node in root.findall('{0}scanList/{0}scan'.format(XMLNS_PREFIX)):
scans.append(
pu.param_group(
node,
scan_windows=pu.param_groups_list(node, '{0}scanWindowList/{0}scanWindow')
)
)
precursors = []
for node in root.findall('{0}precursorList/{0}precursor'.format(XMLNS_PREFIX)):
precursors.append(
pu.param_group(
node,
isolation_window=pu.optional_param_group(node, '{0}isolationWindow'),
selected_ions=pu.param_groups_list(node, '{0}selectedIonList/{0}selectedIon'),
activation=pu.optional_param_group(node, '{0}activation'),
)
)
products = []
for node in root.findall('{0}productList/{0}product'.format(XMLNS_PREFIX)):
products.append(
pu.param_group(
node,
isolation_window=pu.optional_param_group(node, '{0}isolationWindow'),
)
)
binary_data_arrays = pu.param_groups_list(root, '{0}binaryDataArrayList/{0}binaryDataArray')
super().__init__(
root,
scan_list_params=scan_list_params,
scans=scans,
precursors=precursors,
products=products,
binary_data_arrays=binary_data_arrays,
)
for pg in pu.created_param_groups:
pg.apply_referenceable_param_groups(referenceable_param_groups)
self.apply_referenceable_param_groups(referenceable_param_groups)
|
apache-2.0
|
caesar2164/edx-platform
|
common/djangoapps/student/management/commands/manage_group.py
|
52
|
4853
|
"""
Management command `manage_group` is used to idempotently create Django groups
and set their permissions by name.
"""
from django.apps import apps
from django.contrib.auth.models import Group, Permission
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from django.utils.translation import gettext as _
class Command(BaseCommand):
# pylint: disable=missing-docstring
help = 'Creates the specified group, if it does not exist, and sets its permissions.'
def add_arguments(self, parser):
parser.add_argument('group_name')
parser.add_argument('--remove', dest='is_remove', action='store_true')
parser.add_argument('-p', '--permissions', nargs='*', default=[])
def _handle_remove(self, group_name):
try:
Group.objects.get(name=group_name).delete() # pylint: disable=no-member
self.stderr.write(_('Removed group: "{}"').format(group_name))
except Group.DoesNotExist:
self.stderr.write(_('Did not find a group with name "{}" - skipping.').format(group_name))
@transaction.atomic
def handle(self, group_name, is_remove, permissions=None, *args, **options):
if is_remove:
self._handle_remove(group_name)
return
old_permissions = set()
group, created = Group.objects.get_or_create(name=group_name) # pylint: disable=no-member
if created:
try:
# Needed for sqlite backend (i.e. in tests) because
# name.max_length won't be enforced by the db.
# See also http://www.sqlite.org/faq.html#q9
group.full_clean()
except ValidationError as exc:
# give a more helpful error
raise CommandError(
_(
'Invalid group name: "{group_name}". {messages}'
).format(
group_name=group_name,
messages=exc.messages[0]
)
)
self.stderr.write(_('Created new group: "{}"').format(group_name))
else:
self.stderr.write(_('Found existing group: "{}"').format(group_name))
old_permissions = set(group.permissions.all())
new_permissions = self._resolve_permissions(permissions or set())
add_permissions = new_permissions - old_permissions
remove_permissions = old_permissions - new_permissions
self.stderr.write(
_(
'Adding {codenames} permissions to group "{group}"'
).format(
codenames=[ap.name for ap in add_permissions],
group=group.name
)
)
self.stderr.write(
_(
'Removing {codenames} permissions from group "{group}"'
).format(
codenames=[rp.codename for rp in remove_permissions],
group=group.name
)
)
group.permissions = new_permissions
group.save()
def _resolve_permissions(self, permissions):
new_permissions = set()
for permission in permissions:
try:
app_label, model_name, codename = permission.split(':')
except ValueError:
# give a more helpful error
raise CommandError(_(
'Invalid permission option: "{}". Please specify permissions '
'using the format: app_label:model_name:permission_codename.'
).format(permission))
# this will raise a LookupError if it fails.
try:
model_class = apps.get_model(app_label, model_name)
except LookupError as exc:
raise CommandError(str(exc))
content_type = ContentType.objects.get_for_model(model_class)
try:
new_permission = Permission.objects.get( # pylint: disable=no-member
content_type=content_type,
codename=codename,
)
except Permission.DoesNotExist:
# give a more helpful error
raise CommandError(
_(
'Invalid permission codename: "{codename}". No such permission exists '
'for the model {module}.{model_name}.'
).format(
codename=codename,
module=model_class.__module__,
model_name=model_class.__name__,
)
)
new_permissions.add(new_permission)
return new_permissions
|
agpl-3.0
|
nagaozen/my-os-customizations
|
home/nagaozen/gedit-2.30.4/plugins/externaltools/tools/linkparsing.py
|
6
|
7117
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2009-2010 Per Arneng <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import re
class Link:
"""
This class represents a file link from within a string given by the
output of some software tool. A link contains a reference to a file, the
line number within the file and the boundaries within the given output
string that should be marked as a link.
"""
def __init__(self, path, line_nr, start, end):
"""
path -- the path of the file (that could be extracted)
line_nr -- the line nr of the specified file
start -- the index within the string that the link starts at
end -- the index within the string where the link ends at
"""
self.path = path
self.line_nr = int(line_nr)
self.start = start
self.end = end
def __repr__(self):
return "%s[%s](%s:%s)" % (self.path, self.line_nr,
self.start, self.end)
class LinkParser:
"""
Parses a text using different parsing providers with the goal of finding one
or more file links within the text. A typical example could be the output
from a compiler that specifies an error in a specific file. The path of the
file, the line nr and some more info is then returned so that it can be used
to be able to navigate from the error output in to the specific file.
The actual work of parsing the text is done by instances of classes that
inherits from AbstractLinkParser or by regular expressions. To add a new
parser just create a class that inherits from AbstractLinkParser and then
register in this class cunstructor using the method add_parser. If you want
to add a regular expression then just call add_regexp in this class
constructor and provide your regexp string as argument.
"""
def __init__(self):
self._providers = []
self.add_regexp(REGEXP_STANDARD)
self.add_regexp(REGEXP_PYTHON)
self.add_regexp(REGEXP_VALAC)
self.add_regexp(REGEXP_BASH)
self.add_regexp(REGEXP_RUBY)
self.add_regexp(REGEXP_PERL)
self.add_regexp(REGEXP_MCS)
def add_parser(self, parser):
self._providers.append(parser)
def add_regexp(self, regexp):
"""
Adds a regular expression string that should match a link using
re.MULTILINE and re.VERBOSE regexp. The area marked as a link should
be captured by a group named lnk. The path of the link should be
captured by a group named pth. The line number should be captured by
a group named ln. To read more about this look at the documentation
for the RegexpLinkParser constructor.
"""
self.add_parser(RegexpLinkParser(regexp))
def parse(self, text):
"""
Parses the given text and returns a list of links that are parsed from
the text. This method delegates to parser providers that can parse
output from different kinds of formats. If no links are found then an
empty list is returned.
text -- the text to scan for file links. 'text' can not be None.
"""
if text is None:
raise ValueError("text can not be None")
links = []
for provider in self._providers:
links.extend(provider.parse(text))
return links
class AbstractLinkParser(object):
"""The "abstract" base class for link parses"""
def parse(self, text):
"""
This method should be implemented by subclasses. It takes a text as
argument (never None) and then returns a list of Link objects. If no
links are found then an empty list is expected. The Link class is
defined in this module. If you do not override this method then a
NotImplementedError will be thrown.
text -- the text to parse. This argument is never None.
"""
raise NotImplementedError("need to implement a parse method")
class RegexpLinkParser(AbstractLinkParser):
"""
A class that represents parsers that only use one single regular expression.
It can be used by subclasses or by itself. See the constructor documentation
for details about the rules surrouning the regexp.
"""
def __init__(self, regex):
"""
Creates a new RegexpLinkParser based on the given regular expression.
The regular expression is multiline and verbose (se python docs on
compilation flags). The regular expression should contain three named
capturing groups 'lnk', 'pth' and 'ln'. 'lnk' represents the area wich
should be marked as a link in the text. 'pth' is the path that should
be looked for and 'ln' is the line number in that file.
"""
self.re = re.compile(regex, re.MULTILINE | re.VERBOSE)
def parse(self, text):
links = []
for m in re.finditer(self.re, text):
path = m.group("pth")
line_nr = m.group("ln")
start = m.start("lnk")
end = m.end("lnk")
link = Link(path, line_nr, start, end)
links.append(link)
return links
# gcc 'test.c:13: warning: ...'
# javac 'Test.java:13: ...'
# ruby 'test.rb:5: ...'
# scalac 'Test.scala:5: ...'
# 6g (go) 'test.go:9: ...'
REGEXP_STANDARD = r"""
^
(?P<lnk>
(?P<pth> .*[a-z0-9] )
\:
(?P<ln> \d+)
)
\:\s"""
# python ' File "test.py", line 13'
REGEXP_PYTHON = r"""
^\s\sFile\s
(?P<lnk>
\"
(?P<pth> [^\"]+ )
\",\sline\s
(?P<ln> \d+ )
),"""
# python 'test.sh: line 5:'
REGEXP_BASH = r"""
^(?P<lnk>
(?P<pth> .* )
\:\sline\s
(?P<ln> \d+ )
)\:"""
# valac 'Test.vala:13.1-13.3: ...'
REGEXP_VALAC = r"""
^(?P<lnk>
(?P<pth>
.*vala
)
\:
(?P<ln>
\d+
)
\.\d+-\d+\.\d+
)\: """
#ruby
#test.rb:5: ...
# from test.rb:3:in `each'
# fist line parsed by REGEXP_STANDARD
REGEXP_RUBY = r"""
^\s+from\s
(?P<lnk>
(?P<pth>
.*
)
\:
(?P<ln>
\d+
)
)"""
# perl 'syntax error at test.pl line 88, near "$fake_var'
REGEXP_PERL = r"""
\sat\s
(?P<lnk>
(?P<pth> .* )
\sline\s
(?P<ln> \d+ )
)"""
# mcs (C#) 'Test.cs(12,7): error CS0103: The name `fakeMethod'
REGEXP_MCS = r"""
^
(?P<lnk>
(?P<pth> .*\.[cC][sS] )
\(
(?P<ln> \d+ )
,\d+\)
)
\:\s
"""
# ex:ts=4:et:
|
gpl-3.0
|
DCGenomics/multiple_myeloma_rnaseq_drug_response_hackathon_v002
|
src/reader.py
|
4
|
2559
|
import os
import csv
class Reader(object):
"""
This is an abstract class and not to be used as is.
Expects the text files start with a header
"""
INPUT_DELIMITER='\t'
OUTPUT_DELIMITER='\t'
def __init__(self, filename=None):
self._filename=filename
self._filehandle=None
self._header=None
@property
def filename(self):
return self._filename
@filename.setter
def filename(self, path):
if not os.path.exists(path):
raise IOError("{} does not exists.".format(path))
self._filename=path
@property
def filehandle(self):
if self._filehandle is not None: return self._filehandle
if self._filename is None:
raise RuntimeWarning("Input file not provided.")
else:
self._filehandle = open(self._filename)
return self._filehandle
@property
def header(self):
if self._header is None:
self._header = self.filehandle.next().strip().split(self.INPUT_DELIMITER)
return self._header
def is_valid_fieldname(self, feild):
if feild in self.header:
return True
return False
def get_dictreader(self):
rows = csv.DictReader(self.filehandle,
delimiter=self.INPUT_DELIMITER,
fieldnames=self.header)
return rows
def extract_data(self, fieldnames):
rows = self.get_dictreader()
# There has to be a better way to do this,
# but i can't think of one right now.
for row in rows:
out=[]
for f in fieldnames:
out.append(row[f])
yield out
def extract_to_file(self, fieldnames, output_file):
with open(output_file,'w') as fh:
rows = self.extract_data(fieldnames)
for r in rows:
fh.write("{}\n".format(self.OUTPUT_DELIMITER.join(r)))
def extract_to_stdout(self, fieldnames):
rows = self.extract_data(fieldnames)
for r in rows:
print("{}".format(self.OUTPUT_DELIMITER.join(r)))
class RnaSeqExpressionReader(Reader):
INPUT_DELIMITER=','
class EnsemblToGOMappings(Reader):
INPUT_DELIMITER='\t'
class ExomeVariantReader(Reader):
INPUT_DELIMITER='\t'
# Usage
#FILENAME='/data/datasets/raw/rnaseq_expression/HMCL_ensembl74_Counts.csv'
#r = RnaSeqExpressionReader()
#r.filename=FILENAME
#print r.header
#r.extract_to_stdout(fieldnames=['GENE_ID','SKMM2_DSMZ'])
|
cc0-1.0
|
richjoyce/pandas_vectors
|
test_pv.py
|
1
|
1154
|
import pandas_vectors as pv
import pandas as pd
import numpy as np
import unittest
class PvTest(unittest.TestCase):
def test_indexer(self):
self.assertListEqual(pv.indexer('a'), ['a_x', 'a_y', 'a_z'])
self.assertListEqual(pv.indexer(['a']), ['a_x', 'a_y', 'a_z'])
self.assertListEqual(pv.indexer('abc'), ['abc_x', 'abc_y', 'abc_z'])
self.assertListEqual(pv.indexer(['abc']), ['abc_x', 'abc_y', 'abc_z'])
self.assertListEqual(pv.indexer(['abc','def']), ['abc_x', 'abc_y', 'abc_z', 'def_x', 'def_y', 'def_z'])
def test_vectornames(self):
pv.set_vectornames('pyr')
self.assertListEqual(pv.indexer('a'), ['a_p', 'a_y', 'a_r'])
pv.set_vectornames(['_l', '_m', '_n', '_o'])
self.assertListEqual(pv.indexer('a'), ['a_l', 'a_m', 'a_n', 'a_o'])
with pv.vectornames('xyz'):
self.assertListEqual(pv.indexer('a'), ['a_x', 'a_y', 'a_z'])
with pv.vectornames('xy'):
self.assertListEqual(pv.indexer('a'), ['a_x', 'a_y'])
self.assertListEqual(pv.indexer('a'), ['a_l', 'a_m', 'a_n', 'a_o'])
if __name__ == '__main__':
unittest.main()
|
mit
|
bruce2728/android_kernel_htc_pyramid
|
tools/perf/scripts/python/sctop.py
|
11180
|
1924
|
# system call top
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Periodically displays system-wide system call totals, broken down by
# syscall. If a [comm] arg is specified, only syscalls called by
# [comm] are displayed. If an [interval] arg is specified, the display
# will be refreshed every [interval] seconds. The default interval is
# 3 seconds.
import os, sys, thread, time
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s sctop.py [comm] [interval]\n";
for_comm = None
default_interval = 3
interval = default_interval
if len(sys.argv) > 3:
sys.exit(usage)
if len(sys.argv) > 2:
for_comm = sys.argv[1]
interval = int(sys.argv[2])
elif len(sys.argv) > 1:
try:
interval = int(sys.argv[1])
except ValueError:
for_comm = sys.argv[1]
interval = default_interval
syscalls = autodict()
def trace_begin():
thread.start_new_thread(print_syscall_totals, (interval,))
pass
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals(interval):
while 1:
clear_term()
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
try:
print "%-40s %10d\n" % (syscall_name(id), val),
except TypeError:
pass
syscalls.clear()
time.sleep(interval)
|
gpl-2.0
|
eviljeff/olympia
|
src/olympia/scanners/migrations/0021_auto_20200122_1347.py
|
6
|
1152
|
# Generated by Django 2.2.9 on 2020-01-22 13:47
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scanners', '0020_auto_20200116_1250'),
]
operations = [
migrations.AlterField(
model_name='scannerqueryresult',
name='scanner',
field=models.PositiveSmallIntegerField(choices=[(1, 'customs'), (2, 'wat'), (3, 'yara'), (4, 'ml_api')]),
),
migrations.AlterField(
model_name='scannerqueryrule',
name='scanner',
field=models.PositiveSmallIntegerField(choices=[(1, 'customs'), (2, 'wat'), (3, 'yara'), (4, 'ml_api')]),
),
migrations.AlterField(
model_name='scannerresult',
name='scanner',
field=models.PositiveSmallIntegerField(choices=[(1, 'customs'), (2, 'wat'), (3, 'yara'), (4, 'ml_api')]),
),
migrations.AlterField(
model_name='scannerrule',
name='scanner',
field=models.PositiveSmallIntegerField(choices=[(1, 'customs'), (2, 'wat'), (3, 'yara'), (4, 'ml_api')]),
),
]
|
bsd-3-clause
|
whip112/Whip112
|
kuma/wiki/forms.py
|
7
|
20369
|
import re
from tower import ugettext_lazy as _lazy
from tower import ugettext as _
from django import forms
from django.conf import settings
from django.forms.widgets import CheckboxSelectMultiple
from kuma.contentflagging.forms import ContentFlagForm
import kuma.wiki.content
from kuma.core.form_fields import StrippedCharField
from .constants import (SLUG_CLEANSING_REGEX, REVIEW_FLAG_TAGS,
LOCALIZATION_FLAG_TAGS, RESERVED_SLUGS)
from .models import (Document, Revision,
valid_slug_parent)
TITLE_REQUIRED = _lazy(u'Please provide a title.')
TITLE_SHORT = _lazy(u'The title is too short (%(show_value)s characters). '
u'It must be at least %(limit_value)s characters.')
TITLE_LONG = _lazy(u'Please keep the length of the title to %(limit_value)s '
u'characters or less. It is currently %(show_value)s '
u'characters.')
TITLE_PLACEHOLDER = _lazy(u'Name Your Article')
SLUG_REQUIRED = _lazy(u'Please provide a slug.')
SLUG_INVALID = _lazy(u'The slug provided is not valid.')
SLUG_SHORT = _lazy(u'The slug is too short (%(show_value)s characters). '
u'It must be at least %(limit_value)s characters.')
SLUG_LONG = _lazy(u'Please keep the length of the slug to %(limit_value)s '
u'characters or less. It is currently %(show_value)s '
u'characters.')
SUMMARY_REQUIRED = _lazy(u'Please provide a summary.')
SUMMARY_SHORT = _lazy(u'The summary is too short (%(show_value)s characters). '
u'It must be at least %(limit_value)s characters.')
SUMMARY_LONG = _lazy(u'Please keep the length of the summary to '
u'%(limit_value)s characters or less. It is currently '
u'%(show_value)s characters.')
CONTENT_REQUIRED = _lazy(u'Please provide content.')
CONTENT_SHORT = _lazy(u'The content is too short (%(show_value)s characters). '
u'It must be at least %(limit_value)s characters.')
CONTENT_LONG = _lazy(u'Please keep the length of the content to '
u'%(limit_value)s characters or less. It is currently '
u'%(show_value)s characters.')
COMMENT_LONG = _lazy(u'Please keep the length of the comment to '
u'%(limit_value)s characters or less. It is currently '
u'%(show_value)s characters.')
SLUG_COLLIDES = _lazy(u'Another document with this slug already exists.')
OTHER_COLLIDES = _lazy(u'Another document with this metadata already exists.')
MIDAIR_COLLISION = _lazy(u'This document was modified while you were '
'editing it.')
MOVE_REQUIRED = _lazy(u"Changing this document's slug requires "
u"moving it and its children.")
class DocumentForm(forms.ModelForm):
"""Form to create/edit a document."""
title = StrippedCharField(min_length=1, max_length=255,
widget=forms.TextInput(
attrs={'placeholder': TITLE_PLACEHOLDER}),
label=_lazy(u'Title:'),
help_text=_lazy(u'Title of article'),
error_messages={'required': TITLE_REQUIRED,
'min_length': TITLE_SHORT,
'max_length': TITLE_LONG})
slug = StrippedCharField(min_length=1, max_length=255,
widget=forms.TextInput(),
label=_lazy(u'Slug:'),
help_text=_lazy(u'Article URL'),
error_messages={'required': SLUG_REQUIRED,
'min_length': SLUG_SHORT,
'max_length': SLUG_LONG})
category = forms.ChoiceField(choices=Document.CATEGORIES,
initial=10,
# Required for non-translations, which is
# enforced in Document.clean().
required=False,
label=_lazy(u'Category:'),
help_text=_lazy(u'Type of article'),
widget=forms.HiddenInput())
parent_topic = forms.ModelChoiceField(queryset=Document.objects.all(),
required=False,
label=_lazy(u'Parent:'))
locale = forms.CharField(widget=forms.HiddenInput())
def clean_slug(self):
slug = self.cleaned_data['slug']
if slug == '':
# Default to the title, if missing.
slug = self.cleaned_data['title']
# "?", " ", quote disallowed in slugs altogether
if '?' in slug or ' ' in slug or '"' in slug or "'" in slug:
raise forms.ValidationError(SLUG_INVALID)
# Pattern copied from urls.py
if not re.compile(r'^[^\$]+$').match(slug):
raise forms.ValidationError(SLUG_INVALID)
# Guard against slugs that match urlpatterns
for pat in RESERVED_SLUGS:
if re.compile(pat).match(slug):
raise forms.ValidationError(SLUG_INVALID)
return slug
class Meta:
model = Document
fields = ('title', 'slug', 'category', 'locale')
def save(self, parent_doc, **kwargs):
"""Persist the Document form, and return the saved Document."""
doc = super(DocumentForm, self).save(commit=False, **kwargs)
doc.parent = parent_doc
if 'parent_topic' in self.cleaned_data:
doc.parent_topic = self.cleaned_data['parent_topic']
doc.save()
# not strictly necessary since we didn't change
# any m2m data since we instantiated the doc
self.save_m2m()
return doc
class RevisionForm(forms.ModelForm):
"""Form to create new revisions."""
title = StrippedCharField(min_length=1, max_length=255,
required=False,
widget=forms.TextInput(
attrs={'placeholder': TITLE_PLACEHOLDER}),
label=_lazy(u'Title:'),
help_text=_lazy(u'Title of article'),
error_messages={'required': TITLE_REQUIRED,
'min_length': TITLE_SHORT,
'max_length': TITLE_LONG})
slug = StrippedCharField(min_length=1, max_length=255,
required=False,
widget=forms.TextInput(),
label=_lazy(u'Slug:'),
help_text=_lazy(u'Article URL'),
error_messages={'required': SLUG_REQUIRED,
'min_length': SLUG_SHORT,
'max_length': SLUG_LONG})
tags = StrippedCharField(required=False,
label=_lazy(u'Tags:'))
keywords = StrippedCharField(required=False,
label=_lazy(u'Keywords:'),
help_text=_lazy(u'Affects search results'))
summary = StrippedCharField(
required=False,
min_length=5, max_length=1000,
widget=forms.Textarea(),
label=_lazy(u'Search result summary:'),
help_text=_lazy(u'Only displayed on search results page'),
error_messages={'required': SUMMARY_REQUIRED,
'min_length': SUMMARY_SHORT,
'max_length': SUMMARY_LONG})
content = StrippedCharField(
min_length=5, max_length=300000,
label=_lazy(u'Content:'),
widget=forms.Textarea(),
error_messages={'required': CONTENT_REQUIRED,
'min_length': CONTENT_SHORT,
'max_length': CONTENT_LONG})
comment = StrippedCharField(required=False, label=_lazy(u'Comment:'))
review_tags = forms.MultipleChoiceField(
label=_("Tag this revision for review?"),
widget=CheckboxSelectMultiple, required=False,
choices=REVIEW_FLAG_TAGS)
localization_tags = forms.MultipleChoiceField(
label=_("Tag this revision for localization?"),
widget=CheckboxSelectMultiple, required=False,
choices=LOCALIZATION_FLAG_TAGS)
current_rev = forms.CharField(required=False,
widget=forms.HiddenInput())
class Meta(object):
model = Revision
fields = ('title', 'slug', 'tags', 'keywords', 'summary', 'content',
'comment', 'based_on', 'toc_depth',
'render_max_age')
def __init__(self, *args, **kwargs):
# Snag some optional kwargs and delete them before calling
# super-constructor.
for n in ('section_id', 'is_iframe_target'):
if n not in kwargs:
setattr(self, n, None)
else:
setattr(self, n, kwargs[n])
del kwargs[n]
super(RevisionForm, self).__init__(*args, **kwargs)
self.fields['based_on'].widget = forms.HiddenInput()
if self.instance and self.instance.pk:
# Ensure both title and slug are populated from parent document, if
# last revision didn't have them
if not self.instance.title:
self.initial['title'] = self.instance.document.title
if not self.instance.slug:
self.initial['slug'] = self.instance.document.slug
content = self.instance.content
if not self.instance.document.is_template:
tool = kuma.wiki.content.parse(content)
tool.injectSectionIDs()
if self.section_id:
tool.extractSection(self.section_id)
tool.filterEditorSafety()
content = tool.serialize()
self.initial['content'] = content
self.initial['review_tags'] = list(self.instance.review_tags
.values_list('name',
flat=True))
self.initial['localization_tags'] = list(self.instance
.localization_tags
.values_list('name',
flat=True))
if self.section_id:
self.fields['toc_depth'].required = False
def _clean_collidable(self, name):
value = self.cleaned_data[name]
if self.is_iframe_target:
# Since these collidables can change the URL of the page, changes
# to them are ignored for an iframe submission
return getattr(self.instance.document, name)
error_message = {'slug': SLUG_COLLIDES}.get(name, OTHER_COLLIDES)
try:
existing_doc = Document.objects.get(
locale=self.instance.document.locale,
**{name: value})
if self.instance and self.instance.document:
if (not existing_doc.redirect_url() and
existing_doc.pk != self.instance.document.pk):
# There's another document with this value,
# and we're not a revision of it.
raise forms.ValidationError(error_message)
else:
# This document-and-revision doesn't exist yet, so there
# shouldn't be any collisions at all.
raise forms.ValidationError(error_message)
except Document.DoesNotExist:
# No existing document for this value, so we're good here.
pass
return value
def clean_slug(self):
# TODO: move this check somewhere else?
# edits can come in without a slug, so default to the current doc slug
if not self.cleaned_data['slug']:
existing_slug = self.instance.document.slug
self.cleaned_data['slug'] = self.instance.slug = existing_slug
cleaned_slug = self._clean_collidable('slug')
return cleaned_slug
def clean_content(self):
"""Validate the content, performing any section editing if necessary"""
content = self.cleaned_data['content']
# If we're editing a section, we need to replace the section content
# from the current revision.
if self.section_id and self.instance and self.instance.document:
# Make sure we start with content form the latest revision.
full_content = self.instance.document.current_revision.content
# Replace the section content with the form content.
tool = kuma.wiki.content.parse(full_content)
tool.replaceSection(self.section_id, content)
content = tool.serialize()
return content
def clean_current_rev(self):
"""If a current revision is supplied in the form, compare it against
what the document claims is the current revision. If there's a
difference, then an edit has occurred since the form was constructed
and we treat it as a mid-air collision."""
current_rev = self.cleaned_data.get('current_rev', None)
if not current_rev:
# If there's no current_rev, just bail.
return current_rev
try:
doc_current_rev = self.instance.document.current_revision.id
if unicode(current_rev) != unicode(doc_current_rev):
if (self.section_id and self.instance and
self.instance.document):
# This is a section edit. So, even though the revision has
# changed, it still might not be a collision if the section
# in particular hasn't changed.
orig_ct = (Revision.objects.get(pk=current_rev)
.get_section_content(self.section_id))
curr_ct = (self.instance.document.current_revision
.get_section_content(self.section_id))
if orig_ct != curr_ct:
# Oops. Looks like the section did actually get
# changed, so yeah this is a collision.
raise forms.ValidationError(MIDAIR_COLLISION)
return current_rev
else:
# No section edit, so this is a flat-out collision.
raise forms.ValidationError(MIDAIR_COLLISION)
except Document.DoesNotExist:
# If there's no document yet, just bail.
return current_rev
def save_section(self, creator, document, **kwargs):
"""Save a section edit."""
# This is separate because the logic is slightly different and
# may need to evolve over time; a section edit doesn't submit
# all the fields, and we need to account for that when we
# construct the new Revision.
old_rev = Document.objects.get(pk=self.instance.document.id).current_revision
new_rev = super(RevisionForm, self).save(commit=False, **kwargs)
new_rev.document = document
new_rev.creator = creator
new_rev.toc_depth = old_rev.toc_depth
new_rev.save()
new_rev.review_tags.set(*list(old_rev.review_tags
.values_list('name', flat=True)))
return new_rev
def save(self, creator, document, **kwargs):
"""Persist me, and return the saved Revision.
Take several other necessary pieces of data that aren't from the
form.
"""
if (self.section_id and self.instance and
self.instance.document):
return self.save_section(creator, document, **kwargs)
# Throws a TypeError if somebody passes in a commit kwarg:
new_rev = super(RevisionForm, self).save(commit=False, **kwargs)
new_rev.document = document
new_rev.creator = creator
new_rev.toc_depth = self.cleaned_data['toc_depth']
new_rev.save()
new_rev.review_tags.set(*self.cleaned_data['review_tags'])
new_rev.localization_tags.set(*self.cleaned_data['localization_tags'])
return new_rev
class RevisionValidationForm(RevisionForm):
"""Created primarily to disallow slashes in slugs during validation"""
def clean_slug(self):
is_valid = True
original = self.cleaned_data['slug']
# "/", "?", and " " disallowed in form input
if (u'' == original or
'/' in original or
'?' in original or
' ' in original):
is_valid = False
raise forms.ValidationError(SLUG_INVALID)
# Append parent slug data, call super, ensure still valid
self.cleaned_data['slug'] = self.data['slug'] = (self.parent_slug +
'/' +
original)
is_valid = (is_valid and
super(RevisionValidationForm, self).clean_slug())
# Set the slug back to original
# if not is_valid:
self.cleaned_data['slug'] = self.data['slug'] = original
return self.cleaned_data['slug']
class TreeMoveForm(forms.Form):
title = StrippedCharField(min_length=1, max_length=255,
required=False,
widget=forms.TextInput(
attrs={'placeholder': TITLE_PLACEHOLDER}),
label=_lazy(u'Title:'),
help_text=_lazy(u'Title of article'),
error_messages={'required': TITLE_REQUIRED,
'min_length': TITLE_SHORT,
'max_length': TITLE_LONG})
slug = StrippedCharField(min_length=1, max_length=255,
widget=forms.TextInput(),
label=_lazy(u'New slug:'),
help_text=_lazy(u'New article URL'),
error_messages={'required': SLUG_REQUIRED,
'min_length': SLUG_SHORT,
'max_length': SLUG_LONG})
locale = StrippedCharField(min_length=2, max_length=5,
widget=forms.HiddenInput())
def clean_slug(self):
# We only want the slug here; inputting a full URL would lead
# to disaster.
if '://' in self.cleaned_data['slug']:
raise forms.ValidationError('Please enter only the slug to move '
'to, not the full URL.')
# Removes leading slash and {locale/docs/} if necessary
# IMPORTANT: This exact same regex is used on the client side, so
# update both if doing so
self.cleaned_data['slug'] = re.sub(re.compile(SLUG_CLEANSING_REGEX),
'', self.cleaned_data['slug'])
# Remove the trailing slash if one is present, because it
# will screw up the page move, which doesn't expect one.
self.cleaned_data['slug'] = self.cleaned_data['slug'].rstrip('/')
return self.cleaned_data['slug']
def clean(self):
cleaned_data = super(TreeMoveForm, self).clean()
if set(['slug', 'locale']).issubset(cleaned_data):
slug, locale = cleaned_data['slug'], cleaned_data['locale']
try:
valid_slug_parent(slug, locale)
except Exception, e:
raise forms.ValidationError(e.args[0])
return cleaned_data
class DocumentDeletionForm(forms.Form):
reason = forms.CharField(widget=forms.Textarea(attrs={'autofocus': 'true'}))
class DocumentContentFlagForm(ContentFlagForm):
flag_type = forms.ChoiceField(
choices=settings.WIKI_FLAG_REASONS,
widget=forms.RadioSelect)
|
mpl-2.0
|
thalamus/Flexget
|
flexget/utils/search.py
|
18
|
1283
|
""" Common tools used by plugins implementing search plugin api """
from __future__ import unicode_literals, division, absolute_import
import re
from unicodedata import normalize
from flexget.utils.titles.parser import TitleParser
def clean_symbols(text):
"""Replaces common symbols with spaces. Also normalize unicode strings in decomposed form."""
result = text
if isinstance(result, unicode):
result = normalize('NFKD', result)
return re.sub('[ \(\)\-_\[\]\.]+', ' ', result).lower()
def clean_title(title):
"""Removes common codec, sound keywords, and special characters info from titles to facilitate
loose title comparison.
"""
result = TitleParser.remove_words(title, TitleParser.sounds + TitleParser.codecs)
result = clean_symbols(result)
return result
def normalize_unicode(text):
if isinstance(text, unicode):
# Convert to combined form for better search results
return normalize('NFC', text)
return text
def torrent_availability(seeds, leeches):
"""Returns a rating based on seeds and leeches for a given torrent.
:param seeds: Number of seeds on the torrent
:param leeches: Number of leeches on the torrent
:return: A numeric rating
"""
return seeds * 2 + leeches
|
mit
|
mjuric/duplicity
|
duplicity/globals.py
|
1
|
9094
|
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2002 Ben Escoto <[email protected]>
# Copyright 2007 Kenneth Loafman <[email protected]>
#
# This file is part of duplicity.
#
# Duplicity is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# Duplicity is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Store global configuration information"""
import os
import sys
import socket
# The current version of duplicity
version = "0.7.17"
# Prefix for all files (appended before type-specific prefixes)
file_prefix = ""
# Prefix for manifest files only
file_prefix_manifest = ""
# Prefix for archive files only
file_prefix_archive = ""
# Prefix for sig files only
file_prefix_signature = ""
# The name of the current host, or None if it cannot be set
hostname = socket.getfqdn()
# The main local path. For backing up the is the path to be backed
# up. For restoring, this is the destination of the restored files.
local_path = None
# The symbolic name of the backup being operated upon.
backup_name = None
# For testing -- set current time
current_time = None
# Set to the Path of the archive directory (the directory which
# contains the signatures and manifests of the relevent backup
# collection), and for checkpoint state between volumes.
# NOTE: this gets expanded in duplicity.commandline
os.environ["XDG_CACHE_HOME"] = os.getenv("XDG_CACHE_HOME", os.path.expanduser("~/.cache"))
archive_dir = os.path.expandvars("$XDG_CACHE_HOME/duplicity")
# config dir for future use
os.environ["XDG_CONFIG_HOME"] = os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config"))
config_dir = os.path.expandvars("$XDG_CONFIG_HOME/duplicity")
# Restores will try to bring back the state as of the following time.
# If it is None, default to current time.
restore_time = None
# If set, restore only the subdirectory or file specified, not the
# whole root.
restore_dir = None
# The backend representing the remote side
backend = None
# If set, the Select object which iterates paths in the local
# source directory.
select = None
gpg_binary = None
# Set to GPGProfile that will be used to compress/uncompress encrypted
# files. Replaces encryption_keys, sign_key, and passphrase settings.
gpg_profile = None
# Options to pass to gpg
gpg_options = ''
# Maximum file blocksize
max_blocksize = 2048
# If true, filelists and directory statistics will be split on
# nulls instead of newlines.
null_separator = None
# number of retries on network operations
num_retries = 5
# True if Pydev debugger should be activated
pydevd = False
# Character used like the ":" in time strings like
# 2002-08-06T04:22:00-07:00. The colon isn't good for filenames on
# windows machines.
time_separator = ":"
# Global lockfile used to manage concurrency
lockpath = ""
lockfile = None
# If this is true, only warn and don't raise fatal error when backup
# source directory doesn't match previous backup source directory.
allow_source_mismatch = None
# If set, abort if cannot do an incremental backup. Otherwise if
# signatures not found, default to full.
incremental = None
# If set, print the statistics after every backup session
print_statistics = True
# If set, use short (< 30 char) filenames for all the remote files.
short_filenames = False
# If set, forces a full backup if the last full backup is older than
# the time specified
full_force_time = None
# Used to confirm certain destructive operations like deleting old files.
force = None
# If set, signifies time in seconds before which backup files should
# be deleted.
remove_time = None
# If set, signifies the number of backups chains to keep when performing
# a remove-all-but-n-full.
keep_chains = None
# If set, signifies that remove-all-but-n-full in progress
remove_all_but_n_full_mode = None
# If set, signifies that remove-all-inc-of-but-n-full in progress (variant of remove-all-but-n-full)
remove_all_inc_of_but_n_full_mode = None
# Don't actually do anything, but still report what would be done
dry_run = False
# If set to false, then do not encrypt files on remote system
encryption = True
# If set to false, then do not compress files on remote system
compression = True
# volume size. default 200M
volsize = 200 * 1024 * 1024
# Working directory for the tempfile module. Defaults to /tmp on most systems.
temproot = None
# network timeout value
timeout = 30
# FTP data connection type
ftp_connection = 'passive'
# Protocol for webdav
webdav_proto = 'http'
# Asynchronous put/get concurrency limit
# (default of 0 disables asynchronicity).
async_concurrency = 0
# Whether to use "new-style" subdomain addressing for S3 buckets. Such
# use is not backwards-compatible with upper-case buckets, or buckets
# that are otherwise not expressable in a valid hostname.
s3_use_new_style = False
# Whether to create European buckets (sorry, hard-coded to only
# support european for now).
s3_european_buckets = False
# File owner uid keeps number from tar file. Like same option in GNU tar.
numeric_owner = False
# Whether to use plain HTTP (without SSL) to send data to S3
# See <https://bugs.launchpad.net/duplicity/+bug/433970>.
s3_unencrypted_connection = False
# Whether to use S3 Reduced Redudancy Storage
s3_use_rrs = False
# Whether to use S3 Infrequent Access Storage
s3_use_ia = False
# True if we should use boto multiprocessing version
s3_use_multiprocessing = False
# Chunk size used for S3 multipart uploads.The number of parallel uploads to
# S3 be given by chunk size / volume size. Use this to maximize the use of
# your bandwidth. Defaults to 25MB
s3_multipart_chunk_size = 25 * 1024 * 1024
# Minimum chunk size accepted by S3
s3_multipart_minimum_chunk_size = 5 * 1024 * 1024
# Maximum number of processes to use while doing a multipart upload to S3
s3_multipart_max_procs = None
# Maximum time to wait for a part to finish when doig a multipart upload to S3
s3_multipart_max_timeout = None
# Use server side encryption in s3
s3_use_sse = False
# Whether to use the full email address as the user name when
# logging into an imap server. If false just the user name
# part of the email address is used.
imap_full_address = False
# Name of the imap folder where we want to store backups.
# Can be changed with a command line argument.
imap_mailbox = "INBOX"
# Whether the old filename format is in effect.
old_filenames = False
# Wheter to specify --use-agent in GnuPG options
use_agent = False
# ssh commands to use, used by ssh_pexpect (defaults to sftp, scp)
scp_command = None
sftp_command = None
# default to batch mode using public-key encryption
ssh_askpass = False
# user added ssh options
ssh_options = ""
# default cf backend is pyrax
cf_backend = "pyrax"
# HTTPS ssl options (currently only webdav, lftp)
ssl_cacert_file = None
ssl_cacert_path = None
ssl_no_check_certificate = False
# user added rsync options
rsync_options = ""
# will be a Restart object if restarting
restart = None
# used in testing only - raises exception after volume
fail_on_volume = 0
# used in testing only - skips uploading a particular volume
skip_volume = 0
# ignore (some) errors during operations; supposed to make it more
# likely that you are able to restore data under problematic
# circumstances. the default should absolutely always be True unless
# you know what you are doing.
ignore_errors = False
# If we should be particularly aggressive when cleaning up
extra_clean = False
# Renames (--rename)
rename = {}
# enable data comparison on verify runs
compare_data = False
# When symlinks are encountered, the item they point to is copied rather than
# the symlink.
copy_links = False
# When selected, triggers a dry-run before a full or incremental to compute
# changes, then runs the real operation and keeps track of the real progress
progress = False
# Controls the upload progress messages refresh rate. Default: update each
# 3 seconds
progress_rate = 3
# Level of Redundancy in % for Par2 files
par2_redundancy = 10
# Verbatim par2 other options
par2_options = ""
# Whether to enable gio backend
use_gio = False
# delay (in seconds) before next operation after failure
backend_retry_delay = 30
# default filesystem encoding
# In Python 2 it seems that sys.getfilesystemencoding() will normally return
# 'utf-8' or some other sane encoding, but will sometimes fail and return
# either 'ascii' or None. Both are bogus, so default to 'utf-8' if it does.
fsencoding = sys.getfilesystemencoding()
fsencoding = fsencoding if fsencoding not in ['ascii', None] else 'utf-8'
|
gpl-2.0
|
efiring/scipy
|
scipy/io/matlab/tests/test_mio5_utils.py
|
106
|
5604
|
""" Testing mio5_utils Cython module
"""
from __future__ import division, print_function, absolute_import
import sys
from io import BytesIO
cStringIO = BytesIO
import numpy as np
from nose.tools import (assert_true, assert_equal, assert_raises)
from numpy.testing import (assert_array_equal, run_module_suite)
from scipy._lib.six import u
import scipy.io.matlab.byteordercodes as boc
import scipy.io.matlab.streams as streams
import scipy.io.matlab.mio5_params as mio5p
import scipy.io.matlab.mio5_utils as m5u
def test_byteswap():
for val in (
1,
0x100,
0x10000):
a = np.array(val, dtype=np.uint32)
b = a.byteswap()
c = m5u.byteswap_u4(a)
assert_equal(b.item(), c)
d = m5u.byteswap_u4(c)
assert_equal(a.item(), d)
def _make_tag(base_dt, val, mdtype, sde=False):
''' Makes a simple matlab tag, full or sde '''
base_dt = np.dtype(base_dt)
bo = boc.to_numpy_code(base_dt.byteorder)
byte_count = base_dt.itemsize
if not sde:
udt = bo + 'u4'
padding = 8 - (byte_count % 8)
all_dt = [('mdtype', udt),
('byte_count', udt),
('val', base_dt)]
if padding:
all_dt.append(('padding', 'u1', padding))
else: # is sde
udt = bo + 'u2'
padding = 4-byte_count
if bo == '<': # little endian
all_dt = [('mdtype', udt),
('byte_count', udt),
('val', base_dt)]
else: # big endian
all_dt = [('byte_count', udt),
('mdtype', udt),
('val', base_dt)]
if padding:
all_dt.append(('padding', 'u1', padding))
tag = np.zeros((1,), dtype=all_dt)
tag['mdtype'] = mdtype
tag['byte_count'] = byte_count
tag['val'] = val
return tag
def _write_stream(stream, *strings):
stream.truncate(0)
stream.seek(0)
for s in strings:
stream.write(s)
stream.seek(0)
def _make_readerlike(stream, byte_order=boc.native_code):
class R(object):
pass
r = R()
r.mat_stream = stream
r.byte_order = byte_order
r.struct_as_record = True
r.uint16_codec = sys.getdefaultencoding()
r.chars_as_strings = False
r.mat_dtype = False
r.squeeze_me = False
return r
def test_read_tag():
# mainly to test errors
# make reader-like thing
str_io = BytesIO()
r = _make_readerlike(str_io)
c_reader = m5u.VarReader5(r)
# This works for StringIO but _not_ cStringIO
assert_raises(IOError, c_reader.read_tag)
# bad SDE
tag = _make_tag('i4', 1, mio5p.miINT32, sde=True)
tag['byte_count'] = 5
_write_stream(str_io, tag.tostring())
assert_raises(ValueError, c_reader.read_tag)
def test_read_stream():
tag = _make_tag('i4', 1, mio5p.miINT32, sde=True)
tag_str = tag.tostring()
str_io = cStringIO(tag_str)
st = streams.make_stream(str_io)
s = streams._read_into(st, tag.itemsize)
assert_equal(s, tag.tostring())
def test_read_numeric():
# make reader-like thing
str_io = cStringIO()
r = _make_readerlike(str_io)
# check simplest of tags
for base_dt, val, mdtype in (('u2', 30, mio5p.miUINT16),
('i4', 1, mio5p.miINT32),
('i2', -1, mio5p.miINT16)):
for byte_code in ('<', '>'):
r.byte_order = byte_code
c_reader = m5u.VarReader5(r)
assert_equal(c_reader.little_endian, byte_code == '<')
assert_equal(c_reader.is_swapped, byte_code != boc.native_code)
for sde_f in (False, True):
dt = np.dtype(base_dt).newbyteorder(byte_code)
a = _make_tag(dt, val, mdtype, sde_f)
a_str = a.tostring()
_write_stream(str_io, a_str)
el = c_reader.read_numeric()
assert_equal(el, val)
# two sequential reads
_write_stream(str_io, a_str, a_str)
el = c_reader.read_numeric()
assert_equal(el, val)
el = c_reader.read_numeric()
assert_equal(el, val)
def test_read_numeric_writeable():
# make reader-like thing
str_io = cStringIO()
r = _make_readerlike(str_io, '<')
c_reader = m5u.VarReader5(r)
dt = np.dtype('<u2')
a = _make_tag(dt, 30, mio5p.miUINT16, 0)
a_str = a.tostring()
_write_stream(str_io, a_str)
el = c_reader.read_numeric()
assert_true(el.flags.writeable)
def test_zero_byte_string():
# Tests hack to allow chars of non-zero length, but 0 bytes
# make reader-like thing
str_io = cStringIO()
r = _make_readerlike(str_io, boc.native_code)
c_reader = m5u.VarReader5(r)
tag_dt = np.dtype([('mdtype', 'u4'), ('byte_count', 'u4')])
tag = np.zeros((1,), dtype=tag_dt)
tag['mdtype'] = mio5p.miINT8
tag['byte_count'] = 1
hdr = m5u.VarHeader5()
# Try when string is 1 length
hdr.set_dims([1,])
_write_stream(str_io, tag.tostring() + b' ')
str_io.seek(0)
val = c_reader.read_char(hdr)
assert_equal(val, u(' '))
# Now when string has 0 bytes 1 length
tag['byte_count'] = 0
_write_stream(str_io, tag.tostring())
str_io.seek(0)
val = c_reader.read_char(hdr)
assert_equal(val, u(' '))
# Now when string has 0 bytes 4 length
str_io.seek(0)
hdr.set_dims([4,])
val = c_reader.read_char(hdr)
assert_array_equal(val, [u(' ')] * 4)
if __name__ == "__main__":
run_module_suite()
|
bsd-3-clause
|
applicationdevm/XlsxWriter
|
xlsxwriter/test/workbook/test_workbook02.py
|
8
|
1742
|
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, [email protected]
#
import unittest
from ...compatibility import StringIO
from ..helperfunctions import _xml_to_list
from ...workbook import Workbook
class TestAssembleWorkbook(unittest.TestCase):
"""
Test assembling a complete Workbook file.
"""
def test_assemble_xml_file(self):
"""Test writing a workbook with 2 worksheets."""
self.maxDiff = None
fh = StringIO()
workbook = Workbook()
workbook._set_filehandle(fh)
workbook.add_worksheet()
workbook.add_worksheet()
workbook._assemble_xml_file()
workbook.fileclosed = 1
exp = _xml_to_list("""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<workbook xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships">
<fileVersion appName="xl" lastEdited="4" lowestEdited="4" rupBuild="4505"/>
<workbookPr defaultThemeVersion="124226"/>
<bookViews>
<workbookView xWindow="240" yWindow="15" windowWidth="16095" windowHeight="9660"/>
</bookViews>
<sheets>
<sheet name="Sheet1" sheetId="1" r:id="rId1"/>
<sheet name="Sheet2" sheetId="2" r:id="rId2"/>
</sheets>
<calcPr calcId="124519" fullCalcOnLoad="1"/>
</workbook>
""")
got = _xml_to_list(fh.getvalue())
self.assertEqual(got, exp)
|
bsd-2-clause
|
FireWRT/OpenWrt-Firefly-Libraries
|
staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python3.4/test/test_binhex.py
|
96
|
1508
|
"""Test script for the binhex C module
Uses the mechanism of the python binhex module
Based on an original test by Roger E. Masse.
"""
import binhex
import os
import unittest
from test import support
class BinHexTestCase(unittest.TestCase):
def setUp(self):
self.fname1 = support.TESTFN + "1"
self.fname2 = support.TESTFN + "2"
self.fname3 = support.TESTFN + "very_long_filename__very_long_filename__very_long_filename__very_long_filename__"
def tearDown(self):
support.unlink(self.fname1)
support.unlink(self.fname2)
support.unlink(self.fname3)
DATA = b'Jack is my hero'
def test_binhex(self):
f = open(self.fname1, 'wb')
f.write(self.DATA)
f.close()
binhex.binhex(self.fname1, self.fname2)
binhex.hexbin(self.fname2, self.fname1)
f = open(self.fname1, 'rb')
finish = f.readline()
f.close()
self.assertEqual(self.DATA, finish)
def test_binhex_error_on_long_filename(self):
"""
The testcase fails if no exception is raised when a filename parameter provided to binhex.binhex()
is too long, or if the exception raised in binhex.binhex() is not an instance of binhex.Error.
"""
f3 = open(self.fname3, 'wb')
f3.close()
self.assertRaises(binhex.Error, binhex.binhex, self.fname3, self.fname2)
def test_main():
support.run_unittest(BinHexTestCase)
if __name__ == "__main__":
test_main()
|
gpl-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.